diff --git a/.gitignore b/.gitignore index 0bfcbb2..11be616 100644 --- a/.gitignore +++ b/.gitignore @@ -37,5 +37,8 @@ next-env.d.ts # amplify .amplify -amplify_outputs* -amplifyconfiguration* \ No newline at end of file +amplifyconfiguration* + +# AWS CDK +cdk.out/ +cdk.out \ No newline at end of file diff --git a/README.md b/README.md index 5ba35f1..ee697a5 100644 --- a/README.md +++ b/README.md @@ -1,25 +1,144 @@ -## AWS Amplify Next.js (App Router) Starter Template +# Real‑Time Document Editor (RTDE) +# by: TitanTechs2.0(Anthony,Kyle,Jaytee,Michael,Christopher) -This repository provides a starter template for creating applications using Next.js (App Router) and AWS Amplify, emphasizing easy setup for authentication, API, and database capabilities. +A starter template for building a real‑time collaborative text editor with Next.js (App Router) and AWS Amplify. +Users can sign in (Cognito + social providers), read & update a shared document via AppSync GraphQL & DynamoDB, and receive live updates via GraphQL subscriptions. -## Overview +--- -This template equips you with a foundational Next.js application integrated with AWS Amplify, streamlined for scalability and performance. It is ideal for developers looking to jumpstart their project with pre-configured AWS services like Cognito, AppSync, and DynamoDB. +## 🚀 Features -## Features +- **Authentication** + - Email/password and Sign in with Google + - Secure routes with Amplify’s `` component +- **API & Database** + - GraphQL CRUD API powered by AWS AppSync + - DynamoDB table for persistence + - Realtime updates with GraphQL subscriptions +- **Framework** + - Next.js 14 (App Router + “use client” components) + - TypeScript, Tailwind CSS +- **Infrastructure as Code** + - CDK / Terraform modules under `infra/` + - Local sandbox support via `npx ampx sandbox` -- **Authentication**: Setup with Amazon Cognito for secure user authentication. -- **API**: Ready-to-use GraphQL endpoint with AWS AppSync. -- **Database**: Real-time database powered by Amazon DynamoDB. +--- -## Deploying to AWS +## 📁 Repo Structure -For detailed instructions on deploying your application, refer to the [deployment section](https://docs.amplify.aws/nextjs/start/quickstart/nextjs-app-router-client-components/#deploy-a-fullstack-app-to-aws) of our documentation. +``` +/ +├── amplify/ # Amplify‑generated backend config & stubs +├── app/ # Next.js “app” directory (pages & client components) +├── infra/terraform_security # IaC for AWS resources (Cognito, AppSync, DynamoDB) +├── lib/ # Shared React/utility code +├── public/ # Static assets +├── amplify.yml # Amplify Console build settings +├── next.config.js # Next.js configuration +├── package.json +├── tsconfig.json +├── .gitignore +├── README.md +├── CONTRIBUTING.md +├── CODE_OF_CONDUCT.md +├── LICENSE (MIT‑0) +└── SECURITY.md +``` -## Security -See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information. -## License +--- -This library is licensed under the MIT-0 License. See the LICENSE file. \ No newline at end of file +## 🔧 Prerequisites + +- [Node.js 20+](https://nodejs.org) +- [Yarn](https://yarnpkg.com) or npm +- AWS account & [Amplify CLI](https://docs.amplify.aws/cli/) (for sandbox) +- (Optional) Join the free Amplify “sandbox” for zero‑config AWS resources: + ```bash + npm install -g @aws-amplify/cli + npx ampx sandbox --once \ + --outputs-format json \ + --outputs-version 1 \ + --outputs-out-dir amplify + + +📥 Getting Started +Clone this repo + +bash +Copy +Edit +git clone https://github.com/KyleParato/rtde.git +cd rtde +Install dependencies + +bash +Copy +Edit +npm install +or +yarn install +Bootstrap AWS backend + +If using the Amplify sandbox: + +bash +Copy +Edit +npx ampx sandbox --once \ + --outputs-format json \ + --outputs-version 1 \ + --outputs-out-dir amplify +Otherwise, configure your Amplify project and run: + +bash +Copy +Edit +amplify init +amplify push --yes +Configure your frontend + +Confirm amplify/amplify_outputs.json is committed (it contains your API endpoints, Cognito settings, etc.) + +In amplifyConfig.ts, import and pass it to Amplify.configure(). + +Run the development server + +bash +Copy +Edit +npm run dev +or +yarn dev +Open http://localhost:3000 to view the editor. + +📦 Available Scripts +dev: Next.js local dev server + +build: Compile for production + +start: Run the compiled app + +amplify: Amplify CLI shortcut for local sandbox & push + +☁️ Deployment +This repo is preconfigured for AWS Amplify Hosting. Simply connect your GitHub repo to Amplify Console and it will: + +Install dependencies + +Run amplify pull or npm run amplify:sandbox + +Build & deploy both backend (AppSync, Cognito, DynamoDB) and frontend in one pipeline + +See amplify.yml for the exact steps. + +🤝 Contributing +Please read CONTRIBUTING.md for guidelines on code style, branch naming, and pull requests. + +📜 License +This project is released under the MIT‑0 License. See LICENSE for details. + +RTDE is maintained by TitanTechs2.0 + +**Feel free to iterate** on any section—rearrange or rename things to fit your workflow and naming conventions. \ No newline at end of file diff --git a/amplify.yml b/amplify.yml index e69489b..ca35f82 100644 --- a/amplify.yml +++ b/amplify.yml @@ -1,12 +1,36 @@ version: 1 +env: + variables: + CDK_DEFAULT_REGION: us-west-1 + CDK_DEFAULT_ACCOUNT: 464672142928 + backend: phases: + preBuild: + commands: + - nvm install 20 + - nvm use 20 + - npm ci + + - unset AWS_PROFILE + + - export AWS_CONTAINER_CREDENTIALS_FULL_URI="http://169.254.170.2${AWS_CONTAINER_CREDENTIALS_RELATIVE_URI}" + - export AWS_REGION=$CDK_DEFAULT_REGION build: commands: + - echo "CRED URI → $AWS_CONTAINER_CREDENTIALS_FULL_URI" + - aws sts get-caller-identity - npm ci --cache .npm --prefer-offline - - npx ampx pipeline-deploy --branch $AWS_BRANCH --app-id $AWS_APP_ID + - npx cdk bootstrap aws://$CDK_DEFAULT_ACCOUNT/$CDK_DEFAULT_REGION + - npx cdk deploy --require-approval never + frontend: phases: + preBuild: + commands: + - nvm install 20 + - nvm use 20 + - npm ci build: commands: - npm run build @@ -18,4 +42,4 @@ frontend: paths: - .next/cache/**/* - .npm/**/* - - node_modules/**/* \ No newline at end of file + - node_modules/**/*xs \ No newline at end of file diff --git a/amplify/amplify_outputs.json b/amplify/amplify_outputs.json new file mode 100644 index 0000000..e1c9871 --- /dev/null +++ b/amplify/amplify_outputs.json @@ -0,0 +1,114 @@ +{ + "auth": { + "user_pool_id": "us-east-1_gajpEI5Ox", + "aws_region": "us-east-1", + "user_pool_client_id": "7sl2rmbk0m7hh96c6g1il3q9it", + "identity_pool_id": "us-east-1:20079a08-6076-4ed1-a947-0bd5bd108cc7", + "mfa_methods": [], + "standard_required_attributes": [ + "email" + ], + "username_attributes": [ + "email" + ], + "user_verification_types": [ + "email" + ], + "mfa_configuration": "NONE", + "password_policy": { + "min_length": 8, + "require_lowercase": true, + "require_numbers": true, + "require_symbols": true, + "require_uppercase": true + }, + "unauthenticated_identities_enabled": true + }, + "data": { + "url": "https://et67qcynanbafjpd4rd5y7s4sm.appsync-api.us-east-1.amazonaws.com/graphql", + "aws_region": "us-east-1", + "default_authorization_type": "AMAZON_COGNITO_USER_POOLS", + "authorization_types": [ + "AWS_IAM" + ], + "model_introspection": { + "version": 1, + "models": { + "Document": { + "name": "Document", + "fields": { + "id": { + "name": "id", + "isArray": false, + "type": "ID", + "isRequired": true, + "attributes": [] + }, + "title": { + "name": "title", + "isArray": false, + "type": "String", + "isRequired": false, + "attributes": [] + }, + "content": { + "name": "content", + "isArray": false, + "type": "String", + "isRequired": false, + "attributes": [] + }, + "createdAt": { + "name": "createdAt", + "isArray": false, + "type": "AWSDateTime", + "isRequired": false, + "attributes": [], + "isReadOnly": true + }, + "updatedAt": { + "name": "updatedAt", + "isArray": false, + "type": "AWSDateTime", + "isRequired": false, + "attributes": [], + "isReadOnly": true + } + }, + "syncable": true, + "pluralName": "Documents", + "attributes": [ + { + "type": "model", + "properties": {} + }, + { + "type": "auth", + "properties": { + "rules": [ + { + "allow": "private", + "operations": [ + "create", + "update", + "delete", + "read" + ] + } + ] + } + } + ], + "primaryKeyInfo": { + "isCustomPrimaryKey": false, + "primaryKeyFieldName": "id", + "sortKeyFieldNames": [] + } + } + }, + "enums": {}, + "nonModels": {} + } + }, + "version": "1" +} \ No newline at end of file diff --git a/amplify/data/resource.ts b/amplify/data/resource.ts index 5cead7a..72887d0 100644 --- a/amplify/data/resource.ts +++ b/amplify/data/resource.ts @@ -7,7 +7,7 @@ const schema = a.schema({ content: a.string(), }) .authorization((allow) => [allow.authenticated()]), // or add allow.apiKey() here if needed -}); + }); export const data = defineData({ schema, // ✅ This now has a value diff --git a/amplify/functions/deleteDocument/node_modules/.bin/fxparser b/amplify/functions/deleteDocument/node_modules/.bin/fxparser new file mode 120000 index 0000000..75327ed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/.bin/fxparser @@ -0,0 +1 @@ +../fast-xml-parser/src/cli/cli.js \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/.bin/uuid b/amplify/functions/deleteDocument/node_modules/.bin/uuid new file mode 120000 index 0000000..588f70e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/dist/bin/uuid \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/.package-lock.json b/amplify/functions/deleteDocument/node_modules/.package-lock.json new file mode 100644 index 0000000..e50c178 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/.package-lock.json @@ -0,0 +1,1340 @@ +{ + "name": "deletedocument", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.803.0.tgz", + "integrity": "sha512-rJPidxfyTQHz/1Naq3FukSoIt40GwXfv3npVR15bCBFpqx9TXEt7GoIUbiqm+Ftx8sx9hqJ6XNhf80FIa243gw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.803.0.tgz", + "integrity": "sha512-TT3BRD1yiL3IGXBKfq560vvEdyOJtJr8bp+R82dD6P0IoS8aFcNtF822BOJy7CqvxksOc3hQKLaPVzE82gE8Ow==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.803.0.tgz", + "integrity": "sha512-XtbFftJex18GobpRWJxg5V7stVwvmV2gdBYW+zRM0YW6NZAR4NP/4vcc9ktM3++BWW5OF4Kvl7Nu7N4mAzRHmw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.803.0.tgz", + "integrity": "sha512-lPdRYbjxwmv7gRqbaEe1Y1Yl5fD4c43AuK3P31eKjf1j41hZEQ0dg9a9KLk7i6ehEoVsxewnJrvbC2pVoYrCmQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.803.0.tgz", + "integrity": "sha512-HEAcxSHrHxVekGnZqjFrkqdYAf4jFiZIMhuh0jqiqY6A4udEyXy1V623HVcTz/XXj6UBRnyD+zmOmlbzBvkfQg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.803.0.tgz", + "integrity": "sha512-oChnEpwI25OW4GPvhI1VnXM3IQEkDhESGFZd5JHzJDHyvSF2NU58V86jkJyaa4H4X25IbGaThuulNI5xCOngjw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/endpoint-cache": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.723.0.tgz", + "integrity": "sha512-2+a4WXRc+07uiPR+zJiPGKSOWaNJQNqitkks+6Hhm/haTLJqNVTgY2OWDh2PXvwMNpKB+AlGdhE65Oy6NzUgXg==", + "license": "Apache-2.0", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.803.0.tgz", + "integrity": "sha512-J9oeaKnF0vfw1ixUc0Bu1GTcYwp/riiGCst/MSLPHeGqoFiYzyox/im1Pbuv2Ipx7/0QI7w5PxYdxOpwvqMCFg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/middleware-endpoint-discovery": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.775.0.tgz", + "integrity": "sha512-L0PmjSg7t+wovRo/Lin1kpei3e7wBhrENWb1Bbccu3PWUIfxolGeWplOmNhSlXjuQe9GXjf3z8kJRYOGBMFOvw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.803.0.tgz", + "integrity": "sha512-wiWiYaFQxK2u37G9IOXuWkHelEbU8ulLxdHpoPf0TSu/1boqLW7fcofuZATAvFcvigQx3oebwO8G4c/mmixTTw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.803.0.tgz", + "integrity": "sha512-lDbMgVjWWEPT7a6lLaAEPPljwOeLTjPX2sJ7MoDICpQotg4Yd8cQfX3nqScSyLAGSc7Rq/21UPnPoij/E0K3lg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.803.0.tgz", + "integrity": "sha512-QiXvurnve8xIm41Zf/jNXwcYotDX3KZbHcsTaJ7ILhyFomqCjJ6bjLcCRdfndG600N5ua6AEK2XGw1luyBQxig==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz", + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.1.tgz", + "integrity": "sha512-W7AppgQD3fP1aBmo8wWo0id5zeR2/aYRy067vZsDVaa6v/mdhkg6DxXwEVuSPjZl+ZnvWAQbUMCd5ckw38+tHQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.2.tgz", + "integrity": "sha512-EqOy3xaEGQpsKxLlzYstDRJ8eY90CbyBP4cl+w7r45mE60S8YliyL9AgWsdWcyNiB95E2PMqHBEv67nNl1zLfg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.3.tgz", + "integrity": "sha512-AsJtI9KiFoEGAhcEKZyzzPfrszAQGcf4HSYKmenz0WGx/6YNvoPPv4OSGfZTCsDmgPHv4pXzxE+7QV7jcGWNKw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz", + "integrity": "sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.2.tgz", + "integrity": "sha512-3AnHfsMdq9Wg7+3BeR1HuLWI9+DMA/SoHVpCWq6xSsa52ikNd6nlF/wFzdpHyGtVa+Aji6lMgvwOF4sGcVA7SA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.10.tgz", + "integrity": "sha512-2k6fgUNOZ1Rn0gEjvGPGrDEINLG8qSBHsN7xlkkbO+fnHJ36BQPDzhFfMmYSDS8AgzoygqQiDOQ+6Hp2vBTUdA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.10.tgz", + "integrity": "sha512-2XR1WRglLVmoIFts7bODUTgBdVyvkfKNkydHrlsI5VxW9q3s1hnJCuY+f1OHzvj5ue23q4vydM2fjrMjf2HSdQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz", + "integrity": "sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "license": "MIT" + }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==", + "license": "MIT" + }, + "node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/mnemonist": { + "version": "0.38.3", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz", + "integrity": "sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw==", + "license": "MIT", + "dependencies": { + "obliterator": "^1.6.1" + } + }, + "node_modules/obliterator": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz", + "integrity": "sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig==", + "license": "MIT" + }, + "node_modules/strnum": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", + "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md new file mode 100644 index 0000000..e6036f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md @@ -0,0 +1,118 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +- feat!: drop support for IE 11 (#629) ([6c49fb6](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6c49fb6c1b1f18bbff02dbd77a37a21bdb40c959)), closes [#629](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/629) + +### BREAKING CHANGES + +- Remove support for IE11 + +Co-authored-by: texastony <5892063+texastony@users.noreply.github.com> + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +### Bug Fixes + +- **docs:** sha256 packages, clarify hmac support ([#455](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/455)) ([1be5043](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/1be5043325991f3f5ccb52a8dd928f004b4d442e)) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) + +## [1.1.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.1.0...@aws-crypto/sha256-browser@1.1.1) (2021-07-13) + +### Bug Fixes + +- **sha256-browser:** throw errors not string ([#194](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/194)) ([7fa7ac4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/7fa7ac445ef7a04dfb1ff479e7114aba045b2b2c)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.0.0...@aws-crypto/sha256-browser@1.1.0) (2021-01-13) + +### Bug Fixes + +- remove package lock ([6002a5a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6002a5ab9218dc8798c19dc205d3eebd3bec5b43)) +- **aws-crypto:** export explicit dependencies on [@aws-types](https://github.com/aws-types) ([6a1873a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6a1873a4dcc2aaa4a1338595703cfa7099f17b8c)) +- **deps-dev:** move @aws-sdk/types to devDependencies ([#188](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/188)) ([08efdf4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/08efdf46dcc612d88c441e29945d787f253ee77d)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.0.0-alpha.0...@aws-crypto/sha256-browser@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.4...@aws-crypto/sha256-browser@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.2...@aws-crypto/sha256-browser@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.2...@aws-crypto/sha256-browser@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.1...@aws-crypto/sha256-browser@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/README.md new file mode 100644 index 0000000..75bf105 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/README.md @@ -0,0 +1,31 @@ +# @aws-crypto/sha256-browser + +SHA256 wrapper for browsers that prefers `window.crypto.subtle` but will +fall back to a pure JS implementation in @aws-crypto/sha256-js +to provide a consistent interface for SHA256. + +## Usage + +- To hash "some data" +``` +import {Sha256} from '@aws-crypto/sha256-browser' + +const hash = new Sha256(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +- To hmac "some data" with "a key" +``` +import {Sha256} from '@aws-crypto/sha256-browser' + +const hash = new Sha256('a key'); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts new file mode 100644 index 0000000..fe8def7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_256_HASH: { + name: "SHA-256"; +}; +export declare const SHA_256_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-256"; + }; +}; +export declare const EMPTY_DATA_SHA_256: Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js new file mode 100644 index 0000000..acb5c55 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EMPTY_DATA_SHA_256 = exports.SHA_256_HMAC_ALGO = exports.SHA_256_HASH = void 0; +exports.SHA_256_HASH = { name: "SHA-256" }; +exports.SHA_256_HMAC_ALGO = { + name: "HMAC", + hash: exports.SHA_256_HASH +}; +exports.EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map new file mode 100644 index 0000000..217561a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAa,QAAA,YAAY,GAAwB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AAExD,QAAA,iBAAiB,GAAgD;IAC5E,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,oBAAY;CACnB,CAAC;AAEW,QAAA,kBAAkB,GAAG,IAAI,UAAU,CAAC;IAC/C,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;CACH,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts new file mode 100644 index 0000000..055d3ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js new file mode 100644 index 0000000..cde2a42 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var webCryptoSha256_1 = require("./webCryptoSha256"); +var sha256_js_1 = require("@aws-crypto/sha256-js"); +var supports_web_crypto_1 = require("@aws-crypto/supports-web-crypto"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var util_1 = require("@aws-crypto/util"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + if ((0, supports_web_crypto_1.supportsWebCrypto)((0, util_locate_window_1.locateWindow)())) { + this.hash = new webCryptoSha256_1.Sha256(secret); + } + else { + this.hash = new sha256_js_1.Sha256(secret); + } + } + Sha256.prototype.update = function (data, encoding) { + this.hash.update((0, util_1.convertToBuffer)(data)); + }; + Sha256.prototype.digest = function () { + return this.hash.digest(); + }; + Sha256.prototype.reset = function () { + this.hash.reset(); + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +//# sourceMappingURL=crossPlatformSha256.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map new file mode 100644 index 0000000..9a177dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha256.js","sourceRoot":"","sources":["../../src/crossPlatformSha256.ts"],"names":[],"mappings":";;;AAAA,qDAA8D;AAC9D,mDAA2D;AAE3D,uEAAoE;AACpE,kEAA2D;AAC3D,yCAAmD;AAEnD;IAGE,gBAAY,MAAmB;QAC7B,IAAI,IAAA,uCAAiB,EAAC,IAAA,iCAAY,GAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,wBAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,IAAI,CAAC,IAAI,GAAG,IAAI,kBAAQ,CAAC,MAAM,CAAC,CAAC;SAClC;IACH,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,aAAC;AAAD,CAAC,AAtBD,IAsBC;AAtBY,wBAAM"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js new file mode 100644 index 0000000..a270349 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WebCryptoSha256 = void 0; +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./crossPlatformSha256"), exports); +var webCryptoSha256_1 = require("./webCryptoSha256"); +Object.defineProperty(exports, "WebCryptoSha256", { enumerable: true, get: function () { return webCryptoSha256_1.Sha256; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map new file mode 100644 index 0000000..64b19eb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAAA,gEAAsC;AACtC,qDAA8D;AAArD,kHAAA,MAAM,OAAmB"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js new file mode 100644 index 0000000..fe91548 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map new file mode 100644 index 0000000..20ccfd6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";;;AAEA,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts new file mode 100644 index 0000000..ec0e214 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts @@ -0,0 +1,10 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js new file mode 100644 index 0000000..778fdd9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var util_1 = require("@aws-crypto/util"); +var constants_1 = require("./constants"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.toHash = new Uint8Array(0); + this.secret = secret; + this.reset(); + } + Sha256.prototype.update = function (data) { + if ((0, util_1.isEmptyData)(data)) { + return; + } + var update = (0, util_1.convertToBuffer)(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha256.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return (0, util_locate_window_1.locateWindow)() + .crypto.subtle.sign(constants_1.SHA_256_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if ((0, util_1.isEmptyData)(this.toHash)) { + return Promise.resolve(constants_1.EMPTY_DATA_SHA_256); + } + return Promise.resolve() + .then(function () { + return (0, util_locate_window_1.locateWindow)().crypto.subtle.digest(constants_1.SHA_256_HASH, _this.toHash); + }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha256.prototype.reset = function () { + var _this = this; + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + (0, util_locate_window_1.locateWindow)() + .crypto.subtle.importKey("raw", (0, util_1.convertToBuffer)(_this.secret), constants_1.SHA_256_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +//# sourceMappingURL=webCryptoSha256.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map new file mode 100644 index 0000000..7b55a07 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha256.js","sourceRoot":"","sources":["../../src/webCryptoSha256.ts"],"names":[],"mappings":";;;AACA,yCAAgE;AAChE,yCAIqB;AACrB,kEAA2D;AAE3D;IAKE,gBAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAA,kBAAW,EAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,uBAAM,GAAN;QAAA,iBAkBC;QAjBC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,IAAA,iCAAY,GAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,6BAAiB,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACvD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,IAAA,kBAAW,EAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,8BAAkB,CAAC,CAAC;SAC5C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC;YACJ,OAAA,IAAA,iCAAY,GAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,wBAAY,EAAE,KAAI,CAAC,MAAM,CAAC;QAA9D,CAA8D,CAC/D;aACA,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,sBAAK,GAAL;QAAA,iBAgBC;QAfC,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,IAAA,iCAAY,GAAE;qBACT,MAAM,CAAC,MAAM,CAAC,SAAS,CACxB,KAAK,EACL,IAAA,sBAAe,EAAC,KAAI,CAAC,MAAoB,CAAC,EAC1C,6BAAiB,EACjB,KAAK,EACL,CAAC,MAAM,CAAC,CACX;qBACI,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA7DD,IA6DC;AA7DY,wBAAM"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts new file mode 100644 index 0000000..fe8def7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_256_HASH: { + name: "SHA-256"; +}; +export declare const SHA_256_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-256"; + }; +}; +export declare const EMPTY_DATA_SHA_256: Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js new file mode 100644 index 0000000..7fb1613 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js @@ -0,0 +1,40 @@ +export var SHA_256_HASH = { name: "SHA-256" }; +export var SHA_256_HMAC_ALGO = { + name: "HMAC", + hash: SHA_256_HASH +}; +export var EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map new file mode 100644 index 0000000..09ed9a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,IAAM,YAAY,GAAwB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AAErE,MAAM,CAAC,IAAM,iBAAiB,GAAgD;IAC5E,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,YAAY;CACnB,CAAC;AAEF,MAAM,CAAC,IAAM,kBAAkB,GAAG,IAAI,UAAU,CAAC;IAC/C,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;CACH,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts new file mode 100644 index 0000000..055d3ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js new file mode 100644 index 0000000..5ae82ea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js @@ -0,0 +1,27 @@ +import { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +import { Sha256 as JsSha256 } from "@aws-crypto/sha256-js"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha256(secret); + } + else { + this.hash = new JsSha256(secret); + } + } + Sha256.prototype.update = function (data, encoding) { + this.hash.update(convertToBuffer(data)); + }; + Sha256.prototype.digest = function () { + return this.hash.digest(); + }; + Sha256.prototype.reset = function () { + this.hash.reset(); + }; + return Sha256; +}()); +export { Sha256 }; +//# sourceMappingURL=crossPlatformSha256.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map new file mode 100644 index 0000000..4a83c57 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha256.js","sourceRoot":"","sources":["../../src/crossPlatformSha256.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAC9D,OAAO,EAAE,MAAM,IAAI,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAE3D,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAC3D,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEnD;IAGE,gBAAY,MAAmB;QAC7B,IAAI,iBAAiB,CAAC,YAAY,EAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,eAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,IAAI,CAAC,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,CAAC,CAAC;SAClC;IACH,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,aAAC;AAAD,CAAC,AAtBD,IAsBC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js new file mode 100644 index 0000000..94ffb63 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js @@ -0,0 +1,3 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map new file mode 100644 index 0000000..01d20bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,uBAAuB,CAAC;AACtC,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js new file mode 100644 index 0000000..4f31a61 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js @@ -0,0 +1,7 @@ +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map new file mode 100644 index 0000000..776ce2b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts new file mode 100644 index 0000000..ec0e214 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts @@ -0,0 +1,10 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js new file mode 100644 index 0000000..d12acd0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js @@ -0,0 +1,53 @@ +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +import { EMPTY_DATA_SHA_256, SHA_256_HASH, SHA_256_HMAC_ALGO, } from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.toHash = new Uint8Array(0); + this.secret = secret; + this.reset(); + } + Sha256.prototype.update = function (data) { + if (isEmptyData(data)) { + return; + } + var update = convertToBuffer(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha256.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return locateWindow() + .crypto.subtle.sign(SHA_256_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_256); + } + return Promise.resolve() + .then(function () { + return locateWindow().crypto.subtle.digest(SHA_256_HASH, _this.toHash); + }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha256.prototype.reset = function () { + var _this = this; + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + locateWindow() + .crypto.subtle.importKey("raw", convertToBuffer(_this.secret), SHA_256_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + }; + return Sha256; +}()); +export { Sha256 }; +//# sourceMappingURL=webCryptoSha256.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map new file mode 100644 index 0000000..c7b31c0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha256.js","sourceRoot":"","sources":["../../src/webCryptoSha256.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAChE,OAAO,EACL,kBAAkB,EAClB,YAAY,EACZ,iBAAiB,GAClB,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAE3D;IAKE,gBAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,WAAW,CAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,uBAAM,GAAN;QAAA,iBAkBC;QAjBC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,YAAY,EAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,iBAAiB,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACvD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;SAC5C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC;YACJ,OAAA,YAAY,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,EAAE,KAAI,CAAC,MAAM,CAAC;QAA9D,CAA8D,CAC/D;aACA,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,sBAAK,GAAL;QAAA,iBAgBC;QAfC,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,YAAY,EAAE;qBACT,MAAM,CAAC,MAAM,CAAC,SAAS,CACxB,KAAK,EACL,eAAe,CAAC,KAAI,CAAC,MAAoB,CAAC,EAC1C,iBAAiB,EACjB,KAAK,EACL,CAAC,MAAM,CAAC,CACX;qBACI,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA7DD,IA6DC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..ed8affc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..a12e51c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..78bfb4d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/package.json new file mode 100644 index 0000000..2688ecf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/package.json @@ -0,0 +1,33 @@ +{ + "name": "@aws-crypto/sha256-browser", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha256-browser", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/constants.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/constants.ts new file mode 100644 index 0000000..7f68e2a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/constants.ts @@ -0,0 +1,41 @@ +export const SHA_256_HASH: { name: "SHA-256" } = { name: "SHA-256" }; + +export const SHA_256_HMAC_ALGO: { name: "HMAC"; hash: { name: "SHA-256" } } = { + name: "HMAC", + hash: SHA_256_HASH +}; + +export const EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts new file mode 100644 index 0000000..8cb9ff0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts @@ -0,0 +1,30 @@ +import { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +import { Sha256 as JsSha256 } from "@aws-crypto/sha256-js"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; + +export class Sha256 implements Checksum { + private hash: Checksum; + + constructor(secret?: SourceData) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha256(secret); + } else { + this.hash = new JsSha256(secret); + } + } + + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void { + this.hash.update(convertToBuffer(data)); + } + + digest(): Promise { + return this.hash.digest(); + } + + reset(): void { + this.hash.reset(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/index.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/index.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/index.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts new file mode 100644 index 0000000..538971f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts @@ -0,0 +1,9 @@ +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts new file mode 100644 index 0000000..fe4db57 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts @@ -0,0 +1,71 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +import { + EMPTY_DATA_SHA_256, + SHA_256_HASH, + SHA_256_HMAC_ALGO, +} from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; + +export class Sha256 implements Checksum { + private readonly secret?: SourceData; + private key: Promise | undefined; + private toHash: Uint8Array = new Uint8Array(0); + + constructor(secret?: SourceData) { + this.secret = secret; + this.reset(); + } + + update(data: SourceData): void { + if (isEmptyData(data)) { + return; + } + + const update = convertToBuffer(data); + const typedArray = new Uint8Array( + this.toHash.byteLength + update.byteLength + ); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + } + + digest(): Promise { + if (this.key) { + return this.key.then((key) => + locateWindow() + .crypto.subtle.sign(SHA_256_HMAC_ALGO, key, this.toHash) + .then((data) => new Uint8Array(data)) + ); + } + + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_256); + } + + return Promise.resolve() + .then(() => + locateWindow().crypto.subtle.digest(SHA_256_HASH, this.toHash) + ) + .then((data) => Promise.resolve(new Uint8Array(data))); + } + + reset(): void { + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise((resolve, reject) => { + locateWindow() + .crypto.subtle.importKey( + "raw", + convertToBuffer(this.secret as SourceData), + SHA_256_HMAC_ALGO, + false, + ["sign"] + ) + .then(resolve, reject); + }); + this.key.catch(() => {}); + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.json new file mode 100644 index 0000000..fb9aa95 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/CHANGELOG.md new file mode 100644 index 0000000..97c1f60 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/CHANGELOG.md @@ -0,0 +1,106 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +### Bug Fixes + +- **docs:** sha256 packages, clarify hmac support ([#455](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/455)) ([1be5043](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/1be5043325991f3f5ccb52a8dd928f004b4d442e)) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@1.0.0...@aws-crypto/sha256-js@1.1.0) (2021-01-13) + +### Bug Fixes + +- remove package lock ([6002a5a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6002a5ab9218dc8798c19dc205d3eebd3bec5b43)) +- **aws-crypto:** export explicit dependencies on [@aws-types](https://github.com/aws-types) ([6a1873a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6a1873a4dcc2aaa4a1338595703cfa7099f17b8c)) +- **deps-dev:** move @aws-sdk/types to devDependencies ([#188](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/188)) ([08efdf4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/08efdf46dcc612d88c441e29945d787f253ee77d)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@1.0.0-alpha.0...@aws-crypto/sha256-js@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.4...@aws-crypto/sha256-js@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.2...@aws-crypto/sha256-js@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.2...@aws-crypto/sha256-js@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.1...@aws-crypto/sha256-js@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) + +### Features + +- **sha256-js:** expose synchronous digest ([#7](https://github.com/aws/aws-javascript-crypto-helpers/issues/7)) ([9edaef7](https://github.com/aws/aws-javascript-crypto-helpers/commit/9edaef7)), closes [#6](https://github.com/aws/aws-javascript-crypto-helpers/issues/6) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/LICENSE new file mode 100644 index 0000000..ad410e1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/README.md new file mode 100644 index 0000000..f769f5b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/README.md @@ -0,0 +1,29 @@ +# crypto-sha256-js + +A pure JS implementation SHA256. + +## Usage + +- To hash "some data" +``` +import {Sha256} from '@aws-crypto/sha256-js'; + +const hash = new Sha256(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +- To hmac "some data" with "a key" +``` +import {Sha256} from '@aws-crypto/sha256-js'; + +const hash = new Sha256('a key'); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts new file mode 100644 index 0000000..1f580b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export declare class RawSha256 { + private state; + private temp; + private buffer; + private bufferLength; + private bytesHashed; + /** + * @internal + */ + finished: boolean; + update(data: Uint8Array): void; + digest(): Uint8Array; + private hashBuffer; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js new file mode 100644 index 0000000..68ceacc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js @@ -0,0 +1,124 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RawSha256 = void 0; +var constants_1 = require("./constants"); +/** + * @internal + */ +var RawSha256 = /** @class */ (function () { + function RawSha256() { + this.state = Int32Array.from(constants_1.INIT); + this.temp = new Int32Array(64); + this.buffer = new Uint8Array(64); + this.bufferLength = 0; + this.bytesHashed = 0; + /** + * @internal + */ + this.finished = false; + } + RawSha256.prototype.update = function (data) { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + var position = 0; + var byteLength = data.byteLength; + this.bytesHashed += byteLength; + if (this.bytesHashed * 8 > constants_1.MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + if (this.bufferLength === constants_1.BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + }; + RawSha256.prototype.digest = function () { + if (!this.finished) { + var bitsHashed = this.bytesHashed * 8; + var bufferView = new DataView(this.buffer.buffer, this.buffer.byteOffset, this.buffer.byteLength); + var undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % constants_1.BLOCK_SIZE >= constants_1.BLOCK_SIZE - 8) { + for (var i = this.bufferLength; i < constants_1.BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (var i = this.bufferLength; i < constants_1.BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32(constants_1.BLOCK_SIZE - 8, Math.floor(bitsHashed / 0x100000000), true); + bufferView.setUint32(constants_1.BLOCK_SIZE - 4, bitsHashed); + this.hashBuffer(); + this.finished = true; + } + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + var out = new Uint8Array(constants_1.DIGEST_LENGTH); + for (var i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + return out; + }; + RawSha256.prototype.hashBuffer = function () { + var _a = this, buffer = _a.buffer, state = _a.state; + var state0 = state[0], state1 = state[1], state2 = state[2], state3 = state[3], state4 = state[4], state5 = state[5], state6 = state[6], state7 = state[7]; + for (var i = 0; i < constants_1.BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } + else { + var u = this.temp[i - 2]; + var t1_1 = ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + u = this.temp[i - 15]; + var t2_1 = ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + this.temp[i] = + ((t1_1 + this.temp[i - 7]) | 0) + ((t2_1 + this.temp[i - 16]) | 0); + } + var t1 = ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((constants_1.KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + var t2 = ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + }; + return RawSha256; +}()); +exports.RawSha256 = RawSha256; +//# sourceMappingURL=RawSha256.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map new file mode 100644 index 0000000..81659f5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RawSha256.js","sourceRoot":"","sources":["../../src/RawSha256.ts"],"names":[],"mappings":";;;AAAA,yCAMqB;AAErB;;GAEG;AACH;IAAA;QACU,UAAK,GAAe,UAAU,CAAC,IAAI,CAAC,gBAAI,CAAC,CAAC;QAC1C,SAAI,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,WAAM,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAEhC;;WAEG;QACH,aAAQ,GAAY,KAAK,CAAC;IA8I5B,CAAC;IA5IC,0BAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;SAClE;QAED,IAAI,QAAQ,GAAG,CAAC,CAAC;QACX,IAAA,UAAU,GAAK,IAAI,WAAT,CAAU;QAC1B,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAE/B,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,GAAG,+BAAmB,EAAE;YAC9C,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,OAAO,UAAU,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpD,UAAU,EAAE,CAAC;YAEb,IAAI,IAAI,CAAC,YAAY,KAAK,sBAAU,EAAE;gBACpC,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;SACF;IACH,CAAC;IAED,0BAAM,GAAN;QACE,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YAClB,IAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;YACxC,IAAM,UAAU,GAAG,IAAI,QAAQ,CAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,EAClB,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CACvB,CAAC;YAEF,IAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC5C,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC;YAE/C,+DAA+D;YAC/D,IAAI,iBAAiB,GAAG,sBAAU,IAAI,sBAAU,GAAG,CAAC,EAAE;gBACpD,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;oBACnD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;iBAC3B;gBACD,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;YAED,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,sBAAU,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;gBACvD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;aAC3B;YACD,UAAU,CAAC,SAAS,CAClB,sBAAU,GAAG,CAAC,EACd,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,WAAW,CAAC,EACpC,IAAI,CACL,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,sBAAU,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;YAEjD,IAAI,CAAC,UAAU,EAAE,CAAC;YAElB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;SACtB;QAED,sEAAsE;QACtE,kCAAkC;QAClC,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,yBAAa,CAAC,CAAC;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC3C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC/C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;YAC9C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;SAC/C;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IAEO,8BAAU,GAAlB;QACQ,IAAA,KAAoB,IAAI,EAAtB,MAAM,YAAA,EAAE,KAAK,WAAS,CAAC;QAE/B,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACnB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAEpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;YACnC,IAAI,CAAC,GAAG,EAAE,EAAE;gBACV,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC9B,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAClC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;wBACjC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;aAC9B;iBAAM;gBACL,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACzB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;gBAEnE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACtB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBAEjE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;aAClE;YAED,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBACzC,CAAC,CAAC;gBACF,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,eAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACjD,CAAC,CAAC;YAEJ,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACjC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC9D,CAAC,CAAC;YAEJ,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;YAC3B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;IACrB,CAAC;IACH,gBAAC;AAAD,CAAC,AAxJD,IAwJC;AAxJY,8BAAS"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts new file mode 100644 index 0000000..63bd764 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts @@ -0,0 +1,20 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE: number; +/** + * @internal + */ +export declare const DIGEST_LENGTH: number; +/** + * @internal + */ +export declare const KEY: Uint32Array; +/** + * @internal + */ +export declare const INIT: number[]; +/** + * @internal + */ +export declare const MAX_HASHABLE_LENGTH: number; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js new file mode 100644 index 0000000..c83aa09 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js @@ -0,0 +1,98 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MAX_HASHABLE_LENGTH = exports.INIT = exports.KEY = exports.DIGEST_LENGTH = exports.BLOCK_SIZE = void 0; +/** + * @internal + */ +exports.BLOCK_SIZE = 64; +/** + * @internal + */ +exports.DIGEST_LENGTH = 32; +/** + * @internal + */ +exports.KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); +/** + * @internal + */ +exports.INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; +/** + * @internal + */ +exports.MAX_HASHABLE_LENGTH = Math.pow(2, 53) - 1; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map new file mode 100644 index 0000000..1132c12 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACU,QAAA,UAAU,GAAW,EAAE,CAAC;AAErC;;GAEG;AACU,QAAA,aAAa,GAAW,EAAE,CAAC;AAExC;;GAEG;AACU,QAAA,GAAG,GAAG,IAAI,WAAW,CAAC;IACjC,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC,CAAC;AAEH;;GAEG;AACU,QAAA,IAAI,GAAG;IAClB,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC;AAEF;;GAEG;AACU,QAAA,mBAAmB,GAAG,SAAA,CAAC,EAAI,EAAE,CAAA,GAAG,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js new file mode 100644 index 0000000..4329f10 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./jsSha256"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js.map new file mode 100644 index 0000000..9f97d54 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,qDAA2B"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts new file mode 100644 index 0000000..d813b25 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts @@ -0,0 +1,12 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private hash; + private outer?; + private error; + constructor(secret?: SourceData); + update(toHash: SourceData): void; + digestSync(): Uint8Array; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js new file mode 100644 index 0000000..2a4f2f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js @@ -0,0 +1,85 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var tslib_1 = require("tslib"); +var constants_1 = require("./constants"); +var RawSha256_1 = require("./RawSha256"); +var util_1 = require("@aws-crypto/util"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.secret = secret; + this.hash = new RawSha256_1.RawSha256(); + this.reset(); + } + Sha256.prototype.update = function (toHash) { + if ((0, util_1.isEmptyData)(toHash) || this.error) { + return; + } + try { + this.hash.update((0, util_1.convertToBuffer)(toHash)); + } + catch (e) { + this.error = e; + } + }; + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + Sha256.prototype.digestSync = function () { + if (this.error) { + throw this.error; + } + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + return this.outer.digest(); + } + return this.hash.digest(); + }; + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + Sha256.prototype.digest = function () { + return tslib_1.__awaiter(this, void 0, void 0, function () { + return tslib_1.__generator(this, function (_a) { + return [2 /*return*/, this.digestSync()]; + }); + }); + }; + Sha256.prototype.reset = function () { + this.hash = new RawSha256_1.RawSha256(); + if (this.secret) { + this.outer = new RawSha256_1.RawSha256(); + var inner = bufferFromSecret(this.secret); + var outer = new Uint8Array(constants_1.BLOCK_SIZE); + outer.set(inner); + for (var i = 0; i < constants_1.BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + this.hash.update(inner); + this.outer.update(outer); + // overwrite the copied key in memory + for (var i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +function bufferFromSecret(secret) { + var input = (0, util_1.convertToBuffer)(secret); + if (input.byteLength > constants_1.BLOCK_SIZE) { + var bufferHash = new RawSha256_1.RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + var buffer = new Uint8Array(constants_1.BLOCK_SIZE); + buffer.set(input); + return buffer; +} +//# sourceMappingURL=jsSha256.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map new file mode 100644 index 0000000..c34eb36 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsSha256.js","sourceRoot":"","sources":["../../src/jsSha256.ts"],"names":[],"mappings":";;;;AAAA,yCAAyC;AACzC,yCAAwC;AAExC,yCAAgE;AAEhE;IAME,gBAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,IAAI,qBAAS,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,IAAA,kBAAW,EAAC,MAAM,CAAC,IAAI,IAAI,CAAC,KAAK,EAAE;YACrC,OAAO;SACR;QAED,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC,CAAC;SAC3C;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;SAChB;IACH,CAAC;IAED;;OAEG;IACH,2BAAU,GAAV;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,CAAC,KAAK,CAAC;SAClB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;gBACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;aACvC;YAED,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;SAC5B;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACG,uBAAM,GAAZ;;;gBACE,sBAAO,IAAI,CAAC,UAAU,EAAE,EAAC;;;KAC1B;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,GAAG,IAAI,qBAAS,EAAE,CAAC;QAC5B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,KAAK,GAAG,IAAI,qBAAS,EAAE,CAAC;YAC7B,IAAM,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5C,IAAM,KAAK,GAAG,IAAI,UAAU,CAAC,sBAAU,CAAC,CAAC;YACzC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;gBACnC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACjB,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;aAClB;YAED,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEzB,qCAAqC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE;gBACzC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;SACF;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA1ED,IA0EC;AA1EY,wBAAM;AA4EnB,SAAS,gBAAgB,CAAC,MAAkB;IAC1C,IAAI,KAAK,GAAG,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,KAAK,CAAC,UAAU,GAAG,sBAAU,EAAE;QACjC,IAAM,UAAU,GAAG,IAAI,qBAAS,EAAE,CAAC;QACnC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACzB,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;KAC7B;IAED,IAAM,MAAM,GAAG,IAAI,UAAU,CAAC,sBAAU,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts new file mode 100644 index 0000000..d880343 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts @@ -0,0 +1,5 @@ +export declare const hashTestVectors: Array<[Uint8Array, Uint8Array]>; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export declare const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js new file mode 100644 index 0000000..3f0dd2f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js @@ -0,0 +1,322 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hmacTestVectors = exports.hashTestVectors = void 0; +var util_hex_encoding_1 = require("@aws-sdk/util-hex-encoding"); +var millionChars = new Uint8Array(1000000); +for (var i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} +exports.hashTestVectors = [ + [ + Uint8Array.from([97, 98, 99]), + (0, util_hex_encoding_1.fromHex)("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + (0, util_hex_encoding_1.fromHex)("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + (0, util_hex_encoding_1.fromHex)("61"), + (0, util_hex_encoding_1.fromHex)("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161"), + (0, util_hex_encoding_1.fromHex)("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161"), + (0, util_hex_encoding_1.fromHex)("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161"), + (0, util_hex_encoding_1.fromHex)("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161"), + (0, util_hex_encoding_1.fromHex)("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161"), + (0, util_hex_encoding_1.fromHex)("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161"), + (0, util_hex_encoding_1.fromHex)("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161"), + (0, util_hex_encoding_1.fromHex)("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161"), + (0, util_hex_encoding_1.fromHex)("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + (0, util_hex_encoding_1.fromHex)("de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e"), + (0, util_hex_encoding_1.fromHex)("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + (0, util_hex_encoding_1.fromHex)("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +exports.hmacTestVectors = [ + [ + (0, util_hex_encoding_1.fromHex)("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + (0, util_hex_encoding_1.fromHex)("4869205468657265"), + (0, util_hex_encoding_1.fromHex)("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + (0, util_hex_encoding_1.fromHex)("4a656665"), + (0, util_hex_encoding_1.fromHex)("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + (0, util_hex_encoding_1.fromHex)("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"), + (0, util_hex_encoding_1.fromHex)("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + (0, util_hex_encoding_1.fromHex)("0102030405060708090a0b0c0d0e0f10111213141516171819"), + (0, util_hex_encoding_1.fromHex)("cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"), + (0, util_hex_encoding_1.fromHex)("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374"), + (0, util_hex_encoding_1.fromHex)("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e"), + (0, util_hex_encoding_1.fromHex)("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; +//# sourceMappingURL=knownHashes.fixture.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map new file mode 100644 index 0000000..8ffc02e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map @@ -0,0 +1 @@ +{"version":3,"file":"knownHashes.fixture.js","sourceRoot":"","sources":["../../src/knownHashes.fixture.ts"],"names":[],"mappings":";;;AAAA,gEAAqD;AAErD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;IAChC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;CACtB;AAEY,QAAA,eAAe,GAAoC;IAC9D;QACE,UAAU,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,IAAI,CAAC;QACb,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,MAAM,CAAC;QACf,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,QAAQ,CAAC;QACjB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,UAAU,CAAC;QACnB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,YAAY,CAAC;QACrB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,cAAc,CAAC;QACvB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gBAAgB,CAAC;QACzB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kBAAkB,CAAC;QAC3B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oBAAoB,CAAC;QAC7B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sBAAsB,CAAC;QAC/B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wBAAwB,CAAC;QACjC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0BAA0B,CAAC;QACnC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4BAA4B,CAAC;QACrC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8BAA8B,CAAC;QACvC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gCAAgC,CAAC;QACzC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kCAAkC,CAAC;QAC3C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oCAAoC,CAAC;QAC7C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sCAAsC,CAAC;QAC/C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wCAAwC,CAAC;QACjD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4CAA4C,CAAC;QACrD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8CAA8C,CAAC;QACvD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gDAAgD,CAAC;QACzD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kDAAkD,CAAC;QAC3D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oDAAoD,CAAC;QAC7D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sDAAsD,CAAC;QAC/D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wDAAwD,CAAC;QACjE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0DAA0D,CAAC;QACnE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4DAA4D,CAAC;QACrE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8DAA8D,CAAC;QACvE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gEAAgE,CAAC;QACzE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;QAC3E,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oEAAoE,CACrE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sEAAsE,CACvE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wEAAwE,CACzE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0EAA0E,CAC3E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4EAA4E,CAC7E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8EAA8E,CAC/E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gFAAgF,CACjF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kFAAkF,CACnF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oFAAoF,CACrF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sFAAsF,CACvF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wFAAwF,CACzF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0FAA0F,CAC3F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4FAA4F,CAC7F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8FAA8F,CAC/F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gGAAgG,CACjG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kGAAkG,CACnG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oGAAoG,CACrG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wGAAwG,CACzG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0GAA0G,CAC3G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4GAA4G,CAC7G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8GAA8G,CAC/G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gHAAgH,CACjH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kHAAkH,CACnH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oHAAoH,CACrH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sHAAsH,CACvH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wHAAwH,CACzH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0HAA0H,CAC3H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4HAA4H,CAC7H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8HAA8H,CAC/H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gIAAgI,CACjI;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kIAAkI,CACnI;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gHAAgH,CACjH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,YAAY;QACZ,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC;AAEF;;GAEG;AACU,QAAA,eAAe,GAAgD;IAC1E;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EAAC,kBAAkB,CAAC;QAC3B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,UAAU,CAAC;QACnB,IAAA,2BAAO,EAAC,0DAA0D,CAAC;QACnE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oDAAoD,CAAC;QAC7D,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EACL,8GAA8G,CAC/G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EACL,kTAAkT,CACnT;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts new file mode 100644 index 0000000..1f580b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export declare class RawSha256 { + private state; + private temp; + private buffer; + private bufferLength; + private bytesHashed; + /** + * @internal + */ + finished: boolean; + update(data: Uint8Array): void; + digest(): Uint8Array; + private hashBuffer; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js new file mode 100644 index 0000000..f799acd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js @@ -0,0 +1,121 @@ +import { BLOCK_SIZE, DIGEST_LENGTH, INIT, KEY, MAX_HASHABLE_LENGTH } from "./constants"; +/** + * @internal + */ +var RawSha256 = /** @class */ (function () { + function RawSha256() { + this.state = Int32Array.from(INIT); + this.temp = new Int32Array(64); + this.buffer = new Uint8Array(64); + this.bufferLength = 0; + this.bytesHashed = 0; + /** + * @internal + */ + this.finished = false; + } + RawSha256.prototype.update = function (data) { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + var position = 0; + var byteLength = data.byteLength; + this.bytesHashed += byteLength; + if (this.bytesHashed * 8 > MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + }; + RawSha256.prototype.digest = function () { + if (!this.finished) { + var bitsHashed = this.bytesHashed * 8; + var bufferView = new DataView(this.buffer.buffer, this.buffer.byteOffset, this.buffer.byteLength); + var undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (var i = this.bufferLength; i < BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (var i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32(BLOCK_SIZE - 8, Math.floor(bitsHashed / 0x100000000), true); + bufferView.setUint32(BLOCK_SIZE - 4, bitsHashed); + this.hashBuffer(); + this.finished = true; + } + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + var out = new Uint8Array(DIGEST_LENGTH); + for (var i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + return out; + }; + RawSha256.prototype.hashBuffer = function () { + var _a = this, buffer = _a.buffer, state = _a.state; + var state0 = state[0], state1 = state[1], state2 = state[2], state3 = state[3], state4 = state[4], state5 = state[5], state6 = state[6], state7 = state[7]; + for (var i = 0; i < BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } + else { + var u = this.temp[i - 2]; + var t1_1 = ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + u = this.temp[i - 15]; + var t2_1 = ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + this.temp[i] = + ((t1_1 + this.temp[i - 7]) | 0) + ((t2_1 + this.temp[i - 16]) | 0); + } + var t1 = ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + var t2 = ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + }; + return RawSha256; +}()); +export { RawSha256 }; +//# sourceMappingURL=RawSha256.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map new file mode 100644 index 0000000..c4d50a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RawSha256.js","sourceRoot":"","sources":["../../src/RawSha256.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,UAAU,EACV,aAAa,EACb,IAAI,EACJ,GAAG,EACH,mBAAmB,EACpB,MAAM,aAAa,CAAC;AAErB;;GAEG;AACH;IAAA;QACU,UAAK,GAAe,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC1C,SAAI,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,WAAM,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAEhC;;WAEG;QACH,aAAQ,GAAY,KAAK,CAAC;IA8I5B,CAAC;IA5IC,0BAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;SAClE;QAED,IAAI,QAAQ,GAAG,CAAC,CAAC;QACX,IAAA,UAAU,GAAK,IAAI,WAAT,CAAU;QAC1B,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAE/B,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,GAAG,mBAAmB,EAAE;YAC9C,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,OAAO,UAAU,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpD,UAAU,EAAE,CAAC;YAEb,IAAI,IAAI,CAAC,YAAY,KAAK,UAAU,EAAE;gBACpC,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;SACF;IACH,CAAC;IAED,0BAAM,GAAN;QACE,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YAClB,IAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;YACxC,IAAM,UAAU,GAAG,IAAI,QAAQ,CAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,EAClB,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CACvB,CAAC;YAEF,IAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC5C,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC;YAE/C,+DAA+D;YAC/D,IAAI,iBAAiB,GAAG,UAAU,IAAI,UAAU,GAAG,CAAC,EAAE;gBACpD,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;oBACnD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;iBAC3B;gBACD,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;YAED,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,UAAU,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;gBACvD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;aAC3B;YACD,UAAU,CAAC,SAAS,CAClB,UAAU,GAAG,CAAC,EACd,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,WAAW,CAAC,EACpC,IAAI,CACL,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;YAEjD,IAAI,CAAC,UAAU,EAAE,CAAC;YAElB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;SACtB;QAED,sEAAsE;QACtE,kCAAkC;QAClC,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,aAAa,CAAC,CAAC;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC3C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC/C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;YAC9C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;SAC/C;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IAEO,8BAAU,GAAlB;QACQ,IAAA,KAAoB,IAAI,EAAtB,MAAM,YAAA,EAAE,KAAK,WAAS,CAAC;QAE/B,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACnB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAEpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,IAAI,CAAC,GAAG,EAAE,EAAE;gBACV,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC9B,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAClC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;wBACjC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;aAC9B;iBAAM;gBACL,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACzB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;gBAEnE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACtB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBAEjE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;aAClE;YAED,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBACzC,CAAC,CAAC;gBACF,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACjD,CAAC,CAAC;YAEJ,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACjC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC9D,CAAC,CAAC;YAEJ,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;YAC3B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;IACrB,CAAC;IACH,gBAAC;AAAD,CAAC,AAxJD,IAwJC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts new file mode 100644 index 0000000..63bd764 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts @@ -0,0 +1,20 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE: number; +/** + * @internal + */ +export declare const DIGEST_LENGTH: number; +/** + * @internal + */ +export declare const KEY: Uint32Array; +/** + * @internal + */ +export declare const INIT: number[]; +/** + * @internal + */ +export declare const MAX_HASHABLE_LENGTH: number; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js new file mode 100644 index 0000000..68037b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js @@ -0,0 +1,95 @@ +/** + * @internal + */ +export var BLOCK_SIZE = 64; +/** + * @internal + */ +export var DIGEST_LENGTH = 32; +/** + * @internal + */ +export var KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); +/** + * @internal + */ +export var INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; +/** + * @internal + */ +export var MAX_HASHABLE_LENGTH = Math.pow(2, 53) - 1; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map new file mode 100644 index 0000000..6c93089 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,CAAC,IAAM,UAAU,GAAW,EAAE,CAAC;AAErC;;GAEG;AACH,MAAM,CAAC,IAAM,aAAa,GAAW,EAAE,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,IAAM,GAAG,GAAG,IAAI,WAAW,CAAC;IACjC,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,IAAM,IAAI,GAAG;IAClB,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,IAAM,mBAAmB,GAAG,SAAA,CAAC,EAAI,EAAE,CAAA,GAAG,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js new file mode 100644 index 0000000..a8f73a0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js @@ -0,0 +1,2 @@ +export * from "./jsSha256"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js.map new file mode 100644 index 0000000..030d795 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts new file mode 100644 index 0000000..d813b25 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts @@ -0,0 +1,12 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private hash; + private outer?; + private error; + constructor(secret?: SourceData); + update(toHash: SourceData): void; + digestSync(): Uint8Array; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js new file mode 100644 index 0000000..fa40899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js @@ -0,0 +1,82 @@ +import { __awaiter, __generator } from "tslib"; +import { BLOCK_SIZE } from "./constants"; +import { RawSha256 } from "./RawSha256"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.secret = secret; + this.hash = new RawSha256(); + this.reset(); + } + Sha256.prototype.update = function (toHash) { + if (isEmptyData(toHash) || this.error) { + return; + } + try { + this.hash.update(convertToBuffer(toHash)); + } + catch (e) { + this.error = e; + } + }; + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + Sha256.prototype.digestSync = function () { + if (this.error) { + throw this.error; + } + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + return this.outer.digest(); + } + return this.hash.digest(); + }; + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + Sha256.prototype.digest = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, this.digestSync()]; + }); + }); + }; + Sha256.prototype.reset = function () { + this.hash = new RawSha256(); + if (this.secret) { + this.outer = new RawSha256(); + var inner = bufferFromSecret(this.secret); + var outer = new Uint8Array(BLOCK_SIZE); + outer.set(inner); + for (var i = 0; i < BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + this.hash.update(inner); + this.outer.update(outer); + // overwrite the copied key in memory + for (var i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + }; + return Sha256; +}()); +export { Sha256 }; +function bufferFromSecret(secret) { + var input = convertToBuffer(secret); + if (input.byteLength > BLOCK_SIZE) { + var bufferHash = new RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + var buffer = new Uint8Array(BLOCK_SIZE); + buffer.set(input); + return buffer; +} +//# sourceMappingURL=jsSha256.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map new file mode 100644 index 0000000..94fa401 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsSha256.js","sourceRoot":"","sources":["../../src/jsSha256.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEhE;IAME,gBAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,IAAI,CAAC,KAAK,EAAE;YACrC,OAAO;SACR;QAED,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,CAAC;SAC3C;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;SAChB;IACH,CAAC;IAED;;OAEG;IACH,2BAAU,GAAV;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,CAAC,KAAK,CAAC;SAClB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;gBACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;aACvC;YAED,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;SAC5B;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACG,uBAAM,GAAZ;;;gBACE,sBAAO,IAAI,CAAC,UAAU,EAAE,EAAC;;;KAC1B;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;YAC7B,IAAM,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5C,IAAM,KAAK,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;YACzC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;gBACnC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACjB,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;aAClB;YAED,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEzB,qCAAqC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE;gBACzC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;SACF;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA1ED,IA0EC;;AAED,SAAS,gBAAgB,CAAC,MAAkB;IAC1C,IAAI,KAAK,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,KAAK,CAAC,UAAU,GAAG,UAAU,EAAE;QACjC,IAAM,UAAU,GAAG,IAAI,SAAS,EAAE,CAAC;QACnC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACzB,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;KAC7B;IAED,IAAM,MAAM,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts new file mode 100644 index 0000000..d880343 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts @@ -0,0 +1,5 @@ +export declare const hashTestVectors: Array<[Uint8Array, Uint8Array]>; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export declare const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js new file mode 100644 index 0000000..c2d2663 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js @@ -0,0 +1,319 @@ +import { fromHex } from "@aws-sdk/util-hex-encoding"; +var millionChars = new Uint8Array(1000000); +for (var i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} +export var hashTestVectors = [ + [ + Uint8Array.from([97, 98, 99]), + fromHex("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + fromHex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + fromHex("61"), + fromHex("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + fromHex("6161"), + fromHex("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + fromHex("616161"), + fromHex("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + fromHex("61616161"), + fromHex("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + fromHex("6161616161"), + fromHex("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + fromHex("616161616161"), + fromHex("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + fromHex("61616161616161"), + fromHex("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + fromHex("6161616161616161"), + fromHex("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + fromHex("616161616161616161"), + fromHex("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + fromHex("61616161616161616161"), + fromHex("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + fromHex("6161616161616161616161"), + fromHex("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + fromHex("616161616161616161616161"), + fromHex("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + fromHex("61616161616161616161616161"), + fromHex("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + fromHex("6161616161616161616161616161"), + fromHex("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + fromHex("616161616161616161616161616161"), + fromHex("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + fromHex("61616161616161616161616161616161"), + fromHex("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + fromHex("6161616161616161616161616161616161"), + fromHex("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + fromHex("616161616161616161616161616161616161"), + fromHex("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + fromHex("61616161616161616161616161616161616161"), + fromHex("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + fromHex("6161616161616161616161616161616161616161"), + fromHex("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + fromHex("616161616161616161616161616161616161616161"), + fromHex("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + fromHex("61616161616161616161616161616161616161616161"), + fromHex("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + fromHex("6161616161616161616161616161616161616161616161"), + fromHex("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + fromHex("616161616161616161616161616161616161616161616161"), + fromHex("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161"), + fromHex("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161"), + fromHex("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161"), + fromHex("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161"), + fromHex("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161"), + fromHex("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161"), + fromHex("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161"), + fromHex("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161"), + fromHex("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + fromHex("de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e"), + fromHex("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + fromHex("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export var hmacTestVectors = [ + [ + fromHex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + fromHex("4869205468657265"), + fromHex("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + fromHex("4a656665"), + fromHex("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + fromHex("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"), + fromHex("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + fromHex("0102030405060708090a0b0c0d0e0f10111213141516171819"), + fromHex("cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"), + fromHex("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374"), + fromHex("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e"), + fromHex("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; +//# sourceMappingURL=knownHashes.fixture.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map new file mode 100644 index 0000000..1232159 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map @@ -0,0 +1 @@ +{"version":3,"file":"knownHashes.fixture.js","sourceRoot":"","sources":["../../src/knownHashes.fixture.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,4BAA4B,CAAC;AAErD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;IAChC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;CACtB;AAED,MAAM,CAAC,IAAM,eAAe,GAAoC;IAC9D;QACE,UAAU,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,IAAI,CAAC;QACb,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,MAAM,CAAC;QACf,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,QAAQ,CAAC;QACjB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,UAAU,CAAC;QACnB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,YAAY,CAAC;QACrB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,cAAc,CAAC;QACvB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gBAAgB,CAAC;QACzB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kBAAkB,CAAC;QAC3B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oBAAoB,CAAC;QAC7B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sBAAsB,CAAC;QAC/B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wBAAwB,CAAC;QACjC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0BAA0B,CAAC;QACnC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4BAA4B,CAAC;QACrC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8BAA8B,CAAC;QACvC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gCAAgC,CAAC;QACzC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kCAAkC,CAAC;QAC3C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oCAAoC,CAAC;QAC7C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sCAAsC,CAAC;QAC/C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wCAAwC,CAAC;QACjD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4CAA4C,CAAC;QACrD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8CAA8C,CAAC;QACvD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gDAAgD,CAAC;QACzD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kDAAkD,CAAC;QAC3D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oDAAoD,CAAC;QAC7D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sDAAsD,CAAC;QAC/D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wDAAwD,CAAC;QACjE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0DAA0D,CAAC;QACnE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4DAA4D,CAAC;QACrE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8DAA8D,CAAC;QACvE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gEAAgE,CAAC;QACzE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kEAAkE,CAAC;QAC3E,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oEAAoE,CACrE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sEAAsE,CACvE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wEAAwE,CACzE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0EAA0E,CAC3E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4EAA4E,CAC7E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8EAA8E,CAC/E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gFAAgF,CACjF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kFAAkF,CACnF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oFAAoF,CACrF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sFAAsF,CACvF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wFAAwF,CACzF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0FAA0F,CAC3F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4FAA4F,CAC7F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8FAA8F,CAC/F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gGAAgG,CACjG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kGAAkG,CACnG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oGAAoG,CACrG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wGAAwG,CACzG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0GAA0G,CAC3G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4GAA4G,CAC7G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8GAA8G,CAC/G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gHAAgH,CACjH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kHAAkH,CACnH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oHAAoH,CACrH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sHAAsH,CACvH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wHAAwH,CACzH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0HAA0H,CAC3H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4HAA4H,CAC7H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8HAA8H,CAC/H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gIAAgI,CACjI;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kIAAkI,CACnI;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gHAAgH,CACjH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,YAAY;QACZ,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,IAAM,eAAe,GAAgD;IAC1E;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CAAC,kBAAkB,CAAC;QAC3B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,UAAU,CAAC;QACnB,OAAO,CAAC,0DAA0D,CAAC;QACnE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oDAAoD,CAAC;QAC7D,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CACL,8GAA8G,CAC/G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CACL,kTAAkT,CACnT;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/package.json new file mode 100644 index 0000000..e8ef52d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/sha256-js", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha256-js", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts new file mode 100644 index 0000000..f4a385c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts @@ -0,0 +1,164 @@ +import { + BLOCK_SIZE, + DIGEST_LENGTH, + INIT, + KEY, + MAX_HASHABLE_LENGTH +} from "./constants"; + +/** + * @internal + */ +export class RawSha256 { + private state: Int32Array = Int32Array.from(INIT); + private temp: Int32Array = new Int32Array(64); + private buffer: Uint8Array = new Uint8Array(64); + private bufferLength: number = 0; + private bytesHashed: number = 0; + + /** + * @internal + */ + finished: boolean = false; + + update(data: Uint8Array): void { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + + let position = 0; + let { byteLength } = data; + this.bytesHashed += byteLength; + + if (this.bytesHashed * 8 > MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + } + + digest(): Uint8Array { + if (!this.finished) { + const bitsHashed = this.bytesHashed * 8; + const bufferView = new DataView( + this.buffer.buffer, + this.buffer.byteOffset, + this.buffer.byteLength + ); + + const undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (let i = this.bufferLength; i < BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + + for (let i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32( + BLOCK_SIZE - 8, + Math.floor(bitsHashed / 0x100000000), + true + ); + bufferView.setUint32(BLOCK_SIZE - 4, bitsHashed); + + this.hashBuffer(); + + this.finished = true; + } + + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + const out = new Uint8Array(DIGEST_LENGTH); + for (let i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + + return out; + } + + private hashBuffer(): void { + const { buffer, state } = this; + + let state0 = state[0], + state1 = state[1], + state2 = state[2], + state3 = state[3], + state4 = state[4], + state5 = state[5], + state6 = state[6], + state7 = state[7]; + + for (let i = 0; i < BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } else { + let u = this.temp[i - 2]; + const t1 = + ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + + u = this.temp[i - 15]; + const t2 = + ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + + this.temp[i] = + ((t1 + this.temp[i - 7]) | 0) + ((t2 + this.temp[i - 16]) | 0); + } + + const t1 = + ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + + const t2 = + ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/constants.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/constants.ts new file mode 100644 index 0000000..8cede57 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/constants.ts @@ -0,0 +1,98 @@ +/** + * @internal + */ +export const BLOCK_SIZE: number = 64; + +/** + * @internal + */ +export const DIGEST_LENGTH: number = 32; + +/** + * @internal + */ +export const KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); + +/** + * @internal + */ +export const INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; + +/** + * @internal + */ +export const MAX_HASHABLE_LENGTH = 2 ** 53 - 1; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/index.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/index.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/index.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts new file mode 100644 index 0000000..f7bd993 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts @@ -0,0 +1,94 @@ +import { BLOCK_SIZE } from "./constants"; +import { RawSha256 } from "./RawSha256"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; + +export class Sha256 implements Checksum { + private readonly secret?: SourceData; + private hash: RawSha256; + private outer?: RawSha256; + private error: any; + + constructor(secret?: SourceData) { + this.secret = secret; + this.hash = new RawSha256(); + this.reset(); + } + + update(toHash: SourceData): void { + if (isEmptyData(toHash) || this.error) { + return; + } + + try { + this.hash.update(convertToBuffer(toHash)); + } catch (e) { + this.error = e; + } + } + + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + digestSync(): Uint8Array { + if (this.error) { + throw this.error; + } + + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + + return this.outer.digest(); + } + + return this.hash.digest(); + } + + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + async digest(): Promise { + return this.digestSync(); + } + + reset(): void { + this.hash = new RawSha256(); + if (this.secret) { + this.outer = new RawSha256(); + const inner = bufferFromSecret(this.secret); + const outer = new Uint8Array(BLOCK_SIZE); + outer.set(inner); + + for (let i = 0; i < BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + + this.hash.update(inner); + this.outer.update(outer); + + // overwrite the copied key in memory + for (let i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + } +} + +function bufferFromSecret(secret: SourceData): Uint8Array { + let input = convertToBuffer(secret); + + if (input.byteLength > BLOCK_SIZE) { + const bufferHash = new RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + + const buffer = new Uint8Array(BLOCK_SIZE); + buffer.set(input); + return buffer; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts new file mode 100644 index 0000000..c83dae2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts @@ -0,0 +1,401 @@ +import { fromHex } from "@aws-sdk/util-hex-encoding"; + +const millionChars = new Uint8Array(1000000); +for (let i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} + +export const hashTestVectors: Array<[Uint8Array, Uint8Array]> = [ + [ + Uint8Array.from([97, 98, 99]), + fromHex("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + fromHex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + fromHex("61"), + fromHex("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + fromHex("6161"), + fromHex("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + fromHex("616161"), + fromHex("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + fromHex("61616161"), + fromHex("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + fromHex("6161616161"), + fromHex("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + fromHex("616161616161"), + fromHex("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + fromHex("61616161616161"), + fromHex("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + fromHex("6161616161616161"), + fromHex("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + fromHex("616161616161616161"), + fromHex("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + fromHex("61616161616161616161"), + fromHex("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + fromHex("6161616161616161616161"), + fromHex("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + fromHex("616161616161616161616161"), + fromHex("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + fromHex("61616161616161616161616161"), + fromHex("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + fromHex("6161616161616161616161616161"), + fromHex("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + fromHex("616161616161616161616161616161"), + fromHex("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + fromHex("61616161616161616161616161616161"), + fromHex("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + fromHex("6161616161616161616161616161616161"), + fromHex("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + fromHex("616161616161616161616161616161616161"), + fromHex("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + fromHex("61616161616161616161616161616161616161"), + fromHex("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + fromHex("6161616161616161616161616161616161616161"), + fromHex("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + fromHex("616161616161616161616161616161616161616161"), + fromHex("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + fromHex("61616161616161616161616161616161616161616161"), + fromHex("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + fromHex("6161616161616161616161616161616161616161616161"), + fromHex("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + fromHex("616161616161616161616161616161616161616161616161"), + fromHex("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161"), + fromHex("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161"), + fromHex("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161"), + fromHex("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161"), + fromHex("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161"), + fromHex("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161"), + fromHex("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161"), + fromHex("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161"), + fromHex("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + fromHex( + "de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e" + ), + fromHex("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + fromHex("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; + +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]> = [ + [ + fromHex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + fromHex("4869205468657265"), + fromHex("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + fromHex("4a656665"), + fromHex("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + fromHex("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex( + "dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + ), + fromHex("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + fromHex("0102030405060708090a0b0c0d0e0f10111213141516171819"), + fromHex( + "cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd" + ), + fromHex("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex( + "54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374" + ), + fromHex("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex( + "5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e" + ), + fromHex("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/tsconfig.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/tsconfig.json new file mode 100644 index 0000000..fb9aa95 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/tsconfig.module.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/sha256-js/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md new file mode 100644 index 0000000..13023ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md @@ -0,0 +1,66 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@1.0.0-alpha.0...@aws-crypto/supports-web-crypto@1.0.0) (2020-10-22) + +### Bug Fixes + +- replace `sourceRoot` -> `rootDir` in tsconfig ([#169](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/169)) ([d437167](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/d437167b51d1c56a4fcc2bb8a446b74a7e3b7e06)) + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.4...@aws-crypto/supports-web-crypto@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.2...@aws-crypto/supports-web-crypto@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.2...@aws-crypto/supports-web-crypto@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.1...@aws-crypto/supports-web-crypto@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/README.md new file mode 100644 index 0000000..7891357 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/README.md @@ -0,0 +1,32 @@ +# @aws-crypto/supports-web-crypto + +Functions to check web crypto support for browsers. + +## Usage + +``` +import {supportsWebCrypto} from '@aws-crypto/supports-web-crypto'; + +if (supportsWebCrypto(window)) { + // window.crypto.subtle.encrypt will exist +} + +``` + +## supportsWebCrypto + +Used to make sure `window.crypto.subtle` exists and implements crypto functions +as well as a cryptographic secure random source exists. + +## supportsSecureRandom + +Used to make sure that a cryptographic secure random source exists. +Does not check for `window.crypto.subtle`. + +## supportsSubtleCrypto + +## supportsZeroByteGCM + +## Test + +`npm test` diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js new file mode 100644 index 0000000..cc4c93f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./supportsWebCrypto"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map new file mode 100644 index 0000000..df0dd2c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,8DAAoC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts new file mode 100644 index 0000000..f2723dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts @@ -0,0 +1,4 @@ +export declare function supportsWebCrypto(window: Window): boolean; +export declare function supportsSecureRandom(window: Window): boolean; +export declare function supportsSubtleCrypto(subtle: SubtleCrypto): boolean; +export declare function supportsZeroByteGCM(subtle: SubtleCrypto): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js new file mode 100644 index 0000000..378f31e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.supportsZeroByteGCM = exports.supportsSubtleCrypto = exports.supportsSecureRandom = exports.supportsWebCrypto = void 0; +var tslib_1 = require("tslib"); +var subtleCryptoMethods = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; +function supportsWebCrypto(window) { + if (supportsSecureRandom(window) && + typeof window.crypto.subtle === "object") { + var subtle = window.crypto.subtle; + return supportsSubtleCrypto(subtle); + } + return false; +} +exports.supportsWebCrypto = supportsWebCrypto; +function supportsSecureRandom(window) { + if (typeof window === "object" && typeof window.crypto === "object") { + var getRandomValues = window.crypto.getRandomValues; + return typeof getRandomValues === "function"; + } + return false; +} +exports.supportsSecureRandom = supportsSecureRandom; +function supportsSubtleCrypto(subtle) { + return (subtle && + subtleCryptoMethods.every(function (methodName) { return typeof subtle[methodName] === "function"; })); +} +exports.supportsSubtleCrypto = supportsSubtleCrypto; +function supportsZeroByteGCM(subtle) { + return tslib_1.__awaiter(this, void 0, void 0, function () { + var key, zeroByteAuthTag, _a; + return tslib_1.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!supportsSubtleCrypto(subtle)) + return [2 /*return*/, false]; + _b.label = 1; + case 1: + _b.trys.push([1, 4, , 5]); + return [4 /*yield*/, subtle.generateKey({ name: "AES-GCM", length: 128 }, false, ["encrypt"])]; + case 2: + key = _b.sent(); + return [4 /*yield*/, subtle.encrypt({ + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, key, new Uint8Array(0))]; + case 3: + zeroByteAuthTag = _b.sent(); + return [2 /*return*/, zeroByteAuthTag.byteLength === 16]; + case 4: + _a = _b.sent(); + return [2 /*return*/, false]; + case 5: return [2 /*return*/]; + } + }); + }); +} +exports.supportsZeroByteGCM = supportsZeroByteGCM; +//# sourceMappingURL=supportsWebCrypto.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map new file mode 100644 index 0000000..1cc0ea3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map @@ -0,0 +1 @@ +{"version":3,"file":"supportsWebCrypto.js","sourceRoot":"","sources":["../../src/supportsWebCrypto.ts"],"names":[],"mappings":";;;;AAUA,IAAM,mBAAmB,GAA8B;IACrD,SAAS;IACT,QAAQ;IACR,SAAS;IACT,WAAW;IACX,aAAa;IACb,WAAW;IACX,MAAM;IACN,QAAQ;CACT,CAAC;AAEF,SAAgB,iBAAiB,CAAC,MAAc;IAC9C,IACE,oBAAoB,CAAC,MAAM,CAAC;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,KAAK,QAAQ,EACxC;QACQ,IAAA,MAAM,GAAK,MAAM,CAAC,MAAM,OAAlB,CAAmB;QAEjC,OAAO,oBAAoB,CAAC,MAAM,CAAC,CAAC;KACrC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAXD,8CAWC;AAED,SAAgB,oBAAoB,CAAC,MAAc;IACjD,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;QAC3D,IAAA,eAAe,GAAK,MAAM,CAAC,MAAM,gBAAlB,CAAmB;QAE1C,OAAO,OAAO,eAAe,KAAK,UAAU,CAAC;KAC9C;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AARD,oDAQC;AAED,SAAgB,oBAAoB,CAAC,MAAoB;IACvD,OAAO,CACL,MAAM;QACN,mBAAmB,CAAC,KAAK,CACvB,UAAA,UAAU,IAAI,OAAA,OAAO,MAAM,CAAC,UAAU,CAAC,KAAK,UAAU,EAAxC,CAAwC,CACvD,CACF,CAAC;AACJ,CAAC;AAPD,oDAOC;AAED,SAAsB,mBAAmB,CAAC,MAAoB;;;;;;oBAC5D,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC;wBAAE,sBAAO,KAAK,EAAC;;;;oBAElC,qBAAM,MAAM,CAAC,WAAW,CAClC,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,EAChC,KAAK,EACL,CAAC,SAAS,CAAC,CACZ,EAAA;;oBAJK,GAAG,GAAG,SAIX;oBACuB,qBAAM,MAAM,CAAC,OAAO,CAC1C;4BACE,IAAI,EAAE,SAAS;4BACf,EAAE,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BAC7B,cAAc,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BACzC,SAAS,EAAE,GAAG;yBACf,EACD,GAAG,EACH,IAAI,UAAU,CAAC,CAAC,CAAC,CAClB,EAAA;;oBATK,eAAe,GAAG,SASvB;oBACD,sBAAO,eAAe,CAAC,UAAU,KAAK,EAAE,EAAC;;;oBAEzC,sBAAO,KAAK,EAAC;;;;;CAEhB;AAtBD,kDAsBC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js new file mode 100644 index 0000000..f5527ea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js @@ -0,0 +1,2 @@ +export * from "./supportsWebCrypto"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map new file mode 100644 index 0000000..b2df430 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts new file mode 100644 index 0000000..f2723dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts @@ -0,0 +1,4 @@ +export declare function supportsWebCrypto(window: Window): boolean; +export declare function supportsSecureRandom(window: Window): boolean; +export declare function supportsSubtleCrypto(subtle: SubtleCrypto): boolean; +export declare function supportsZeroByteGCM(subtle: SubtleCrypto): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js new file mode 100644 index 0000000..70b46e6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js @@ -0,0 +1,62 @@ +import { __awaiter, __generator } from "tslib"; +var subtleCryptoMethods = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; +export function supportsWebCrypto(window) { + if (supportsSecureRandom(window) && + typeof window.crypto.subtle === "object") { + var subtle = window.crypto.subtle; + return supportsSubtleCrypto(subtle); + } + return false; +} +export function supportsSecureRandom(window) { + if (typeof window === "object" && typeof window.crypto === "object") { + var getRandomValues = window.crypto.getRandomValues; + return typeof getRandomValues === "function"; + } + return false; +} +export function supportsSubtleCrypto(subtle) { + return (subtle && + subtleCryptoMethods.every(function (methodName) { return typeof subtle[methodName] === "function"; })); +} +export function supportsZeroByteGCM(subtle) { + return __awaiter(this, void 0, void 0, function () { + var key, zeroByteAuthTag, _a; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!supportsSubtleCrypto(subtle)) + return [2 /*return*/, false]; + _b.label = 1; + case 1: + _b.trys.push([1, 4, , 5]); + return [4 /*yield*/, subtle.generateKey({ name: "AES-GCM", length: 128 }, false, ["encrypt"])]; + case 2: + key = _b.sent(); + return [4 /*yield*/, subtle.encrypt({ + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, key, new Uint8Array(0))]; + case 3: + zeroByteAuthTag = _b.sent(); + return [2 /*return*/, zeroByteAuthTag.byteLength === 16]; + case 4: + _a = _b.sent(); + return [2 /*return*/, false]; + case 5: return [2 /*return*/]; + } + }); + }); +} +//# sourceMappingURL=supportsWebCrypto.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map new file mode 100644 index 0000000..967fc19 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map @@ -0,0 +1 @@ +{"version":3,"file":"supportsWebCrypto.js","sourceRoot":"","sources":["../../src/supportsWebCrypto.ts"],"names":[],"mappings":";AAUA,IAAM,mBAAmB,GAA8B;IACrD,SAAS;IACT,QAAQ;IACR,SAAS;IACT,WAAW;IACX,aAAa;IACb,WAAW;IACX,MAAM;IACN,QAAQ;CACT,CAAC;AAEF,MAAM,UAAU,iBAAiB,CAAC,MAAc;IAC9C,IACE,oBAAoB,CAAC,MAAM,CAAC;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,KAAK,QAAQ,EACxC;QACQ,IAAA,MAAM,GAAK,MAAM,CAAC,MAAM,OAAlB,CAAmB;QAEjC,OAAO,oBAAoB,CAAC,MAAM,CAAC,CAAC;KACrC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAc;IACjD,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;QAC3D,IAAA,eAAe,GAAK,MAAM,CAAC,MAAM,gBAAlB,CAAmB;QAE1C,OAAO,OAAO,eAAe,KAAK,UAAU,CAAC;KAC9C;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAoB;IACvD,OAAO,CACL,MAAM;QACN,mBAAmB,CAAC,KAAK,CACvB,UAAA,UAAU,IAAI,OAAA,OAAO,MAAM,CAAC,UAAU,CAAC,KAAK,UAAU,EAAxC,CAAwC,CACvD,CACF,CAAC;AACJ,CAAC;AAED,MAAM,UAAgB,mBAAmB,CAAC,MAAoB;;;;;;oBAC5D,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC;wBAAE,sBAAO,KAAK,EAAC;;;;oBAElC,qBAAM,MAAM,CAAC,WAAW,CAClC,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,EAChC,KAAK,EACL,CAAC,SAAS,CAAC,CACZ,EAAA;;oBAJK,GAAG,GAAG,SAIX;oBACuB,qBAAM,MAAM,CAAC,OAAO,CAC1C;4BACE,IAAI,EAAE,SAAS;4BACf,EAAE,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BAC7B,cAAc,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BACzC,SAAS,EAAE,GAAG;yBACf,EACD,GAAG,EACH,IAAI,UAAU,CAAC,CAAC,CAAC,CAClB,EAAA;;oBATK,eAAe,GAAG,SASvB;oBACD,sBAAO,eAAe,CAAC,UAAU,KAAK,EAAE,EAAC;;;oBAEzC,sBAAO,KAAK,EAAC;;;;;CAEhB"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/package.json new file mode 100644 index 0000000..a97bf01 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/package.json @@ -0,0 +1,28 @@ +{ + "name": "@aws-crypto/supports-web-crypto", + "version": "5.2.0", + "description": "Provides functions for detecting if the host environment supports the WebCrypto API", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/supports-web-crypto", + "license": "Apache-2.0", + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/src/index.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/src/index.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/src/index.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts new file mode 100644 index 0000000..7eef629 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts @@ -0,0 +1,76 @@ +type SubtleCryptoMethod = + | "decrypt" + | "digest" + | "encrypt" + | "exportKey" + | "generateKey" + | "importKey" + | "sign" + | "verify"; + +const subtleCryptoMethods: Array = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; + +export function supportsWebCrypto(window: Window): boolean { + if ( + supportsSecureRandom(window) && + typeof window.crypto.subtle === "object" + ) { + const { subtle } = window.crypto; + + return supportsSubtleCrypto(subtle); + } + + return false; +} + +export function supportsSecureRandom(window: Window): boolean { + if (typeof window === "object" && typeof window.crypto === "object") { + const { getRandomValues } = window.crypto; + + return typeof getRandomValues === "function"; + } + + return false; +} + +export function supportsSubtleCrypto(subtle: SubtleCrypto) { + return ( + subtle && + subtleCryptoMethods.every( + methodName => typeof subtle[methodName] === "function" + ) + ); +} + +export async function supportsZeroByteGCM(subtle: SubtleCrypto) { + if (!supportsSubtleCrypto(subtle)) return false; + try { + const key = await subtle.generateKey( + { name: "AES-GCM", length: 128 }, + false, + ["encrypt"] + ); + const zeroByteAuthTag = await subtle.encrypt( + { + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, + key, + new Uint8Array(0) + ); + return zeroByteAuthTag.byteLength === 16; + } catch { + return false; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json new file mode 100644 index 0000000..efca6de --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "lib": ["dom"], + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/CHANGELOG.md new file mode 100644 index 0000000..df2cecb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/CHANGELOG.md @@ -0,0 +1,71 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/util + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/util + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) +- **docs:** update README for packages/util ([#382](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/382)) ([f3e650e](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/f3e650e1b4792ffbea2e8a1a015fd55fb951a3a4)) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +### Bug Fixes + +- **uint32ArrayFrom:** increment index & polyfill for Uint32Array ([#270](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/270)) ([a70d603](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/a70d603f3ba7600d3c1213f297d4160a4b3793bd)) + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/util + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +### Bug Fixes + +- **crc32c:** ie11 does not support Array.from ([#221](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/221)) ([5f49547](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/5f495472ab8988cf203e0f2a70a51f7e1fcd7e60)) + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +### Bug Fixes + +- better pollyfill check for Buffer ([#217](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/217)) ([bc97da2](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/bc97da29aaf473943e4407c9a29cc30f74f15723)) + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/LICENSE new file mode 100644 index 0000000..980a15a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/README.md new file mode 100644 index 0000000..4c1c8aa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/README.md @@ -0,0 +1,16 @@ +# @aws-crypto/util + +Helper functions + +## Usage + +``` +import { convertToBuffer } from '@aws-crypto/util'; + +const data = "asdf"; +const utf8EncodedUint8Array = convertToBuffer(data); +``` + +## Test + +`npm test` diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts new file mode 100644 index 0000000..697a5cd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function convertToBuffer(data: SourceData): Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js new file mode 100644 index 0000000..85bc8af --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js @@ -0,0 +1,24 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertToBuffer = void 0; +var util_utf8_1 = require("@smithy/util-utf8"); +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : util_utf8_1.fromUtf8; +function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +exports.convertToBuffer = convertToBuffer; +//# sourceMappingURL=convertToBuffer.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map new file mode 100644 index 0000000..916d787 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"convertToBuffer.js","sourceRoot":"","sources":["../../src/convertToBuffer.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAGtC,+CAAgE;AAEhE,iBAAiB;AACjB,IAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,IAAI;IAC1C,CAAC,CAAC,UAAC,KAAa,IAAK,OAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,EAA1B,CAA0B;IAC/C,CAAC,CAAC,oBAAe,CAAC;AAEtB,SAAgB,eAAe,CAAC,IAAgB;IAC9C,8BAA8B;IAC9B,IAAI,IAAI,YAAY,UAAU;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC;AAjBD,0CAiBC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.d.ts new file mode 100644 index 0000000..783c73c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.d.ts @@ -0,0 +1,4 @@ +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.js new file mode 100644 index 0000000..94e1ca9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.uint32ArrayFrom = exports.numToUint8 = exports.isEmptyData = exports.convertToBuffer = void 0; +var convertToBuffer_1 = require("./convertToBuffer"); +Object.defineProperty(exports, "convertToBuffer", { enumerable: true, get: function () { return convertToBuffer_1.convertToBuffer; } }); +var isEmptyData_1 = require("./isEmptyData"); +Object.defineProperty(exports, "isEmptyData", { enumerable: true, get: function () { return isEmptyData_1.isEmptyData; } }); +var numToUint8_1 = require("./numToUint8"); +Object.defineProperty(exports, "numToUint8", { enumerable: true, get: function () { return numToUint8_1.numToUint8; } }); +var uint32ArrayFrom_1 = require("./uint32ArrayFrom"); +Object.defineProperty(exports, "uint32ArrayFrom", { enumerable: true, get: function () { return uint32ArrayFrom_1.uint32ArrayFrom; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.js.map new file mode 100644 index 0000000..a170172 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,qDAAoD;AAA3C,kHAAA,eAAe,OAAA;AACxB,6CAA4C;AAAnC,0GAAA,WAAW,OAAA;AACpB,2CAA0C;AAAjC,wGAAA,UAAU,OAAA;AACnB,qDAAkD;AAA1C,kHAAA,eAAe,OAAA"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js new file mode 100644 index 0000000..6af1e89 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js @@ -0,0 +1,13 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map new file mode 100644 index 0000000..e1eaa02 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAItC,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts new file mode 100644 index 0000000..5b702e8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts @@ -0,0 +1 @@ +export declare function numToUint8(num: number): Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js new file mode 100644 index 0000000..2f070e1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js @@ -0,0 +1,15 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.numToUint8 = void 0; +function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +exports.numToUint8 = numToUint8; +//# sourceMappingURL=numToUint8.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js.map new file mode 100644 index 0000000..fea3aca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js.map @@ -0,0 +1 @@ +{"version":3,"file":"numToUint8.js","sourceRoot":"","sources":["../../src/numToUint8.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,SAAgB,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,UAAU,CAAC;QACpB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC;QACvB,GAAG,GAAG,UAAU;KACjB,CAAC,CAAC;AACL,CAAC;AAPD,gCAOC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts new file mode 100644 index 0000000..fea6607 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts @@ -0,0 +1 @@ +export declare function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js new file mode 100644 index 0000000..226cdc3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js @@ -0,0 +1,20 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.uint32ArrayFrom = void 0; +// IE 11 does not support Array.from, so we do it manually +function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +exports.uint32ArrayFrom = uint32ArrayFrom; +//# sourceMappingURL=uint32ArrayFrom.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map new file mode 100644 index 0000000..fe016e1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uint32ArrayFrom.js","sourceRoot":"","sources":["../../src/uint32ArrayFrom.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,0DAA0D;AAC1D,SAAgB,eAAe,CAAC,aAA4B;IAC1D,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE;QACrB,IAAM,YAAY,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE;YACrC,YAAY,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;YAC9C,OAAO,IAAI,CAAC,CAAA;SACb;QACD,OAAO,YAAY,CAAA;KACpB;IACD,OAAO,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;AACxC,CAAC;AAXD,0CAWC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts new file mode 100644 index 0000000..697a5cd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function convertToBuffer(data: SourceData): Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js new file mode 100644 index 0000000..c700d1e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js @@ -0,0 +1,20 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { fromUtf8 as fromUtf8Browser } from "@smithy/util-utf8"; +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : fromUtf8Browser; +export function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +//# sourceMappingURL=convertToBuffer.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map new file mode 100644 index 0000000..92694a4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"convertToBuffer.js","sourceRoot":"","sources":["../../src/convertToBuffer.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAGtC,OAAO,EAAE,QAAQ,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAEhE,iBAAiB;AACjB,IAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,IAAI;IAC1C,CAAC,CAAC,UAAC,KAAa,IAAK,OAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,EAA1B,CAA0B;IAC/C,CAAC,CAAC,eAAe,CAAC;AAEtB,MAAM,UAAU,eAAe,CAAC,IAAgB;IAC9C,8BAA8B;IAC9B,IAAI,IAAI,YAAY,UAAU;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.d.ts new file mode 100644 index 0000000..783c73c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.d.ts @@ -0,0 +1,4 @@ +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.js new file mode 100644 index 0000000..077e8b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.js @@ -0,0 +1,7 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.js.map new file mode 100644 index 0000000..4ddb12d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAC,eAAe,EAAC,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js new file mode 100644 index 0000000..13841c7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js @@ -0,0 +1,9 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map new file mode 100644 index 0000000..fe0fa02 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAItC,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts new file mode 100644 index 0000000..5b702e8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts @@ -0,0 +1 @@ +export declare function numToUint8(num: number): Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js new file mode 100644 index 0000000..0ca6e47 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +//# sourceMappingURL=numToUint8.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js.map new file mode 100644 index 0000000..ac53e33 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js.map @@ -0,0 +1 @@ +{"version":3,"file":"numToUint8.js","sourceRoot":"","sources":["../../src/numToUint8.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,UAAU,CAAC;QACpB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC;QACvB,GAAG,GAAG,UAAU;KACjB,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts new file mode 100644 index 0000000..fea6607 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts @@ -0,0 +1 @@ +export declare function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js new file mode 100644 index 0000000..c69435e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js @@ -0,0 +1,16 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// IE 11 does not support Array.from, so we do it manually +export function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +//# sourceMappingURL=uint32ArrayFrom.js.map \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map new file mode 100644 index 0000000..7384b0a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uint32ArrayFrom.js","sourceRoot":"","sources":["../../src/uint32ArrayFrom.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,0DAA0D;AAC1D,MAAM,UAAU,eAAe,CAAC,aAA4B;IAC1D,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE;QACrB,IAAM,YAAY,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE;YACrC,YAAY,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;YAC9C,OAAO,IAAI,CAAC,CAAA;SACb;QACD,OAAO,YAAY,CAAA;KACpB;IACD,OAAO,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;AACxC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..ed8affc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..a12e51c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..78bfb4d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/package.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/package.json new file mode 100644 index 0000000..431107a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/util", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/util", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "publishConfig": { + "access": "public" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/convertToBuffer.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/convertToBuffer.ts new file mode 100644 index 0000000..f9f163e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/convertToBuffer.ts @@ -0,0 +1,30 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData } from "@aws-sdk/types"; +import { fromUtf8 as fromUtf8Browser } from "@smithy/util-utf8"; + +// Quick polyfill +const fromUtf8 = + typeof Buffer !== "undefined" && Buffer.from + ? (input: string) => Buffer.from(input, "utf8") + : fromUtf8Browser; + +export function convertToBuffer(data: SourceData): Uint8Array { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) return data; + + if (typeof data === "string") { + return fromUtf8(data); + } + + if (ArrayBuffer.isView(data)) { + return new Uint8Array( + data.buffer, + data.byteOffset, + data.byteLength / Uint8Array.BYTES_PER_ELEMENT + ); + } + + return new Uint8Array(data); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/index.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/index.ts new file mode 100644 index 0000000..2f6c62a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/index.ts @@ -0,0 +1,7 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export {uint32ArrayFrom} from './uint32ArrayFrom'; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/isEmptyData.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/isEmptyData.ts new file mode 100644 index 0000000..089764d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/isEmptyData.ts @@ -0,0 +1,12 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/numToUint8.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/numToUint8.ts new file mode 100644 index 0000000..2f40ace --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/numToUint8.ts @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export function numToUint8(num: number) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts new file mode 100644 index 0000000..b9b6d88 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts @@ -0,0 +1,16 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// IE 11 does not support Array.from, so we do it manually +export function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array { + if (!Uint32Array.from) { + const return_array = new Uint32Array(a_lookUpTable.length) + let a_index = 0 + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index] + a_index += 1 + } + return return_array + } + return Uint32Array.from(a_lookUpTable) +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/tsconfig.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/tsconfig.json new file mode 100644 index 0000000..2b996d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/tsconfig.module.json b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-crypto/util/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/README.md new file mode 100644 index 0000000..c1da3dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/README.md @@ -0,0 +1,678 @@ + + +# @aws-sdk/client-dynamodb + +## Description + +AWS SDK for JavaScript DynamoDB Client for Node.js, Browser and React Native. + +Amazon DynamoDB + +

Amazon DynamoDB is a fully managed NoSQL database service that provides fast +and predictable performance with seamless scalability. DynamoDB lets you +offload the administrative burdens of operating and scaling a distributed database, so +that you don't have to worry about hardware provisioning, setup and configuration, +replication, software patching, or cluster scaling.

+

With DynamoDB, you can create database tables that can store and retrieve +any amount of data, and serve any level of request traffic. You can scale up or scale +down your tables' throughput capacity without downtime or performance degradation, and +use the Amazon Web Services Management Console to monitor resource utilization and performance +metrics.

+

DynamoDB automatically spreads the data and traffic for your tables over +a sufficient number of servers to handle your throughput and storage requirements, while +maintaining consistent and fast performance. All of your data is stored on solid state +disks (SSDs) and automatically replicated across multiple Availability Zones in an +Amazon Web Services Region, providing built-in high availability and data +durability.

+ +## Installing + +To install this package, simply type add or install @aws-sdk/client-dynamodb +using your favorite package manager: + +- `npm install @aws-sdk/client-dynamodb` +- `yarn add @aws-sdk/client-dynamodb` +- `pnpm add @aws-sdk/client-dynamodb` + +## Getting Started + +### Import + +The AWS SDK is modulized by clients and commands. +To send a request, you only need to import the `DynamoDBClient` and +the commands you need, for example `ListBackupsCommand`: + +```js +// ES5 example +const { DynamoDBClient, ListBackupsCommand } = require("@aws-sdk/client-dynamodb"); +``` + +```ts +// ES6+ example +import { DynamoDBClient, ListBackupsCommand } from "@aws-sdk/client-dynamodb"; +``` + +### Usage + +To send a request, you: + +- Initiate client with configuration (e.g. credentials, region). +- Initiate command with input parameters. +- Call `send` operation on client with command object as input. +- If you are using a custom http handler, you may call `destroy()` to close open connections. + +```js +// a client can be shared by different commands. +const client = new DynamoDBClient({ region: "REGION" }); + +const params = { + /** input parameters */ +}; +const command = new ListBackupsCommand(params); +``` + +#### Async/await + +We recommend using [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) +operator to wait for the promise returned by send operation as follows: + +```js +// async/await. +try { + const data = await client.send(command); + // process data. +} catch (error) { + // error handling. +} finally { + // finally. +} +``` + +Async-await is clean, concise, intuitive, easy to debug and has better error handling +as compared to using Promise chains or callbacks. + +#### Promises + +You can also use [Promise chaining](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises#chaining) +to execute send operation. + +```js +client.send(command).then( + (data) => { + // process data. + }, + (error) => { + // error handling. + } +); +``` + +Promises can also be called using `.catch()` and `.finally()` as follows: + +```js +client + .send(command) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }) + .finally(() => { + // finally. + }); +``` + +#### Callbacks + +We do not recommend using callbacks because of [callback hell](http://callbackhell.com/), +but they are supported by the send operation. + +```js +// callbacks. +client.send(command, (err, data) => { + // process err and data. +}); +``` + +#### v2 compatible style + +The client can also send requests using v2 compatible style. +However, it results in a bigger bundle size and may be dropped in next major version. More details in the blog post +on [modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/) + +```ts +import * as AWS from "@aws-sdk/client-dynamodb"; +const client = new AWS.DynamoDB({ region: "REGION" }); + +// async/await. +try { + const data = await client.listBackups(params); + // process data. +} catch (error) { + // error handling. +} + +// Promises. +client + .listBackups(params) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }); + +// callbacks. +client.listBackups(params, (err, data) => { + // process err and data. +}); +``` + +### Troubleshooting + +When the service returns an exception, the error will include the exception information, +as well as response metadata (e.g. request id). + +```js +try { + const data = await client.send(command); + // process data. +} catch (error) { + const { requestId, cfId, extendedRequestId } = error.$metadata; + console.log({ requestId, cfId, extendedRequestId }); + /** + * The keys within exceptions are also parsed. + * You can access them by specifying exception names: + * if (error.name === 'SomeServiceException') { + * const value = error.specialKeyInException; + * } + */ +} +``` + +## Getting Help + +Please use these community resources for getting help. +We use the GitHub issues for tracking bugs and feature requests, but have limited bandwidth to address them. + +- Visit [Developer Guide](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) + or [API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html). +- Check out the blog posts tagged with [`aws-sdk-js`](https://aws.amazon.com/blogs/developer/tag/aws-sdk-js/) + on AWS Developer Blog. +- Ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/aws-sdk-js) and tag it with `aws-sdk-js`. +- Join the AWS JavaScript community on [gitter](https://gitter.im/aws/aws-sdk-js-v3). +- If it turns out that you may have found a bug, please [open an issue](https://github.com/aws/aws-sdk-js-v3/issues/new/choose). + +To test your universal JavaScript code in Node.js, browser and react-native environments, +visit our [code samples repo](https://github.com/aws-samples/aws-sdk-js-tests). + +## Contributing + +This client code is generated automatically. Any modifications will be overwritten the next time the `@aws-sdk/client-dynamodb` package is updated. +To contribute to client you can check our [generate clients scripts](https://github.com/aws/aws-sdk-js-v3/tree/main/scripts/generate-clients). + +## License + +This SDK is distributed under the +[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0), +see LICENSE for more information. + +## Client Commands (Operations List) + +
+ +BatchExecuteStatement + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchExecuteStatementCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchExecuteStatementCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchExecuteStatementCommandOutput/) + +
+
+ +BatchGetItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchGetItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchGetItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchGetItemCommandOutput/) + +
+
+ +BatchWriteItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchWriteItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchWriteItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchWriteItemCommandOutput/) + +
+
+ +CreateBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateBackupCommandOutput/) + +
+
+ +CreateGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateGlobalTableCommandOutput/) + +
+
+ +CreateTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateTableCommandOutput/) + +
+
+ +DeleteBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteBackupCommandOutput/) + +
+
+ +DeleteItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteItemCommandOutput/) + +
+
+ +DeleteResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteResourcePolicyCommandOutput/) + +
+
+ +DeleteTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteTableCommandOutput/) + +
+
+ +DescribeBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeBackupCommandOutput/) + +
+
+ +DescribeContinuousBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeContinuousBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContinuousBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContinuousBackupsCommandOutput/) + +
+
+ +DescribeContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContributorInsightsCommandOutput/) + +
+
+ +DescribeEndpoints + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeEndpointsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeEndpointsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeEndpointsCommandOutput/) + +
+
+ +DescribeExport + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeExportCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeExportCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeExportCommandOutput/) + +
+
+ +DescribeGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableCommandOutput/) + +
+
+ +DescribeGlobalTableSettings + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeGlobalTableSettingsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableSettingsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableSettingsCommandOutput/) + +
+
+ +DescribeImport + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeImportCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeImportCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeImportCommandOutput/) + +
+
+ +DescribeKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeKinesisStreamingDestinationCommandOutput/) + +
+
+ +DescribeLimits + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeLimitsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeLimitsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeLimitsCommandOutput/) + +
+
+ +DescribeTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableCommandOutput/) + +
+
+ +DescribeTableReplicaAutoScaling + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTableReplicaAutoScalingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableReplicaAutoScalingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableReplicaAutoScalingCommandOutput/) + +
+
+ +DescribeTimeToLive + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTimeToLiveCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTimeToLiveCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTimeToLiveCommandOutput/) + +
+
+ +DisableKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DisableKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DisableKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DisableKinesisStreamingDestinationCommandOutput/) + +
+
+ +EnableKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/EnableKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/EnableKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/EnableKinesisStreamingDestinationCommandOutput/) + +
+
+ +ExecuteStatement + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExecuteStatementCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteStatementCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteStatementCommandOutput/) + +
+
+ +ExecuteTransaction + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExecuteTransactionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteTransactionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteTransactionCommandOutput/) + +
+
+ +ExportTableToPointInTime + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExportTableToPointInTimeCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExportTableToPointInTimeCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExportTableToPointInTimeCommandOutput/) + +
+
+ +GetItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/GetItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetItemCommandOutput/) + +
+
+ +GetResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/GetResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetResourcePolicyCommandOutput/) + +
+
+ +ImportTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ImportTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ImportTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ImportTableCommandOutput/) + +
+
+ +ListBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListBackupsCommandOutput/) + +
+
+ +ListContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListContributorInsightsCommandOutput/) + +
+
+ +ListExports + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListExportsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListExportsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListExportsCommandOutput/) + +
+
+ +ListGlobalTables + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListGlobalTablesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListGlobalTablesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListGlobalTablesCommandOutput/) + +
+
+ +ListImports + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListImportsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListImportsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListImportsCommandOutput/) + +
+
+ +ListTables + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListTablesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTablesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTablesCommandOutput/) + +
+
+ +ListTagsOfResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListTagsOfResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTagsOfResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTagsOfResourceCommandOutput/) + +
+
+ +PutItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/PutItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutItemCommandOutput/) + +
+
+ +PutResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/PutResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutResourcePolicyCommandOutput/) + +
+
+ +Query + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/QueryCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/QueryCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/QueryCommandOutput/) + +
+
+ +RestoreTableFromBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/RestoreTableFromBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableFromBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableFromBackupCommandOutput/) + +
+
+ +RestoreTableToPointInTime + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/RestoreTableToPointInTimeCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableToPointInTimeCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableToPointInTimeCommandOutput/) + +
+
+ +Scan + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ScanCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ScanCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ScanCommandOutput/) + +
+
+ +TagResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TagResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TagResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TagResourceCommandOutput/) + +
+
+ +TransactGetItems + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TransactGetItemsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactGetItemsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactGetItemsCommandOutput/) + +
+
+ +TransactWriteItems + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TransactWriteItemsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactWriteItemsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactWriteItemsCommandOutput/) + +
+
+ +UntagResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UntagResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UntagResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UntagResourceCommandOutput/) + +
+
+ +UpdateContinuousBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateContinuousBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContinuousBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContinuousBackupsCommandOutput/) + +
+
+ +UpdateContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContributorInsightsCommandOutput/) + +
+
+ +UpdateGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableCommandOutput/) + +
+
+ +UpdateGlobalTableSettings + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateGlobalTableSettingsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableSettingsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableSettingsCommandOutput/) + +
+
+ +UpdateItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateItemCommandOutput/) + +
+
+ +UpdateKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateKinesisStreamingDestinationCommandOutput/) + +
+
+ +UpdateTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableCommandOutput/) + +
+
+ +UpdateTableReplicaAutoScaling + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTableReplicaAutoScalingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableReplicaAutoScalingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableReplicaAutoScalingCommandOutput/) + +
+
+ +UpdateTimeToLive + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTimeToLiveCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTimeToLiveCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTimeToLiveCommandOutput/) + +
diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..db59164 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultDynamoDBHttpAuthSchemeProvider = exports.defaultDynamoDBHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultDynamoDBHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultDynamoDBHttpAuthSchemeParametersProvider = defaultDynamoDBHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "dynamodb", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +const defaultDynamoDBHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultDynamoDBHttpAuthSchemeProvider = defaultDynamoDBHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js new file mode 100644 index 0000000..1df276c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: [ + "AccountId", + "AccountIdEndpointMode", + "Endpoint", + "Region", + "ResourceArn", + "ResourceArnList", + "UseDualStack", + "UseFIPS", + ], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js new file mode 100644 index 0000000..0a9f993 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const S = "required", T = "type", U = "fn", V = "argv", W = "ref", X = "properties", Y = "headers"; +const a = false, b = "isSet", c = "error", d = "endpoint", e = "tree", f = "PartitionResult", g = "stringEquals", h = "dynamodb", i = "getAttr", j = "aws.parseArn", k = "ParsedArn", l = "isValidHostLabel", m = "FirstArn", n = { [S]: false, [T]: "String" }, o = { [S]: true, "default": false, [T]: "Boolean" }, p = { [U]: "booleanEquals", [V]: [{ [W]: "UseFIPS" }, true] }, q = { [U]: "booleanEquals", [V]: [{ [W]: "UseDualStack" }, true] }, r = {}, s = { [W]: "Region" }, t = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsFIPS"] }, true] }, u = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsDualStack"] }, true] }, v = { "conditions": [{ [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], "rules": [{ [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }], [T]: e }, w = { [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, x = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }, y = { [U]: i, [V]: [{ [W]: f }, "name"] }, z = { [d]: { "url": "https://dynamodb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, [T]: d }, A = { [U]: "not", [V]: [p] }, B = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and DualStack is enabled, but DualStack account endpoints are not supported", [T]: c }, C = { [U]: "not", [V]: [{ [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "disabled"] }] }, D = { [U]: g, [V]: [y, "aws"] }, E = { [U]: "not", [V]: [q] }, F = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "service"] }, h] }, G = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, false] }, H = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, "{Region}"] }, I = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "accountId"] }, false] }, J = { "url": "https://{ParsedArn#accountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, K = { [W]: "ResourceArnList" }, L = { [W]: "AccountId" }, M = [p], N = [q], O = [s], P = [w, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], Q = [A], R = [{ [W]: "ResourceArn" }]; +const _data = { version: "1.0", parameters: { Region: n, UseDualStack: o, UseFIPS: o, Endpoint: n, AccountId: n, AccountIdEndpointMode: n, ResourceArn: n, ResourceArnList: { [S]: a, [T]: "stringArray" } }, rules: [{ conditions: [{ [U]: b, [V]: [{ [W]: "Endpoint" }] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [T]: c }, { endpoint: { url: "{Endpoint}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { conditions: [{ [U]: b, [V]: O }], rules: [{ conditions: [{ [U]: "aws.partition", [V]: O, assign: f }], rules: [{ conditions: [{ [U]: g, [V]: [s, "local"] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and local endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and local endpoint are not supported", [T]: c }, { endpoint: { url: "http://localhost:8000", [X]: { authSchemes: [{ signingRegion: "us-east-1", name: "sigv4", signingName: h }] }, [Y]: r }, [T]: d }], [T]: e }, { conditions: [p, q], rules: [{ conditions: [t, u], rules: [v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [T]: c }], [T]: e }, { conditions: M, rules: [{ conditions: [t], rules: [{ conditions: [{ [U]: g, [V]: [y, "aws-us-gov"] }], rules: [v, z], [T]: e }, v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS is enabled but this partition does not support FIPS", [T]: c }], [T]: e }, { conditions: N, rules: [{ conditions: [u], rules: [{ conditions: P, rules: [{ conditions: Q, rules: [B], [T]: e }, x], [T]: e }, { endpoint: { url: "https://dynamodb.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "DualStack is enabled but this partition does not support DualStack", [T]: c }], [T]: e }, { conditions: [w, C, D, A, E, { [U]: b, [V]: R }, { [U]: j, [V]: R, assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [K] }, { [U]: i, [V]: [K, "[0]"], assign: m }, { [U]: j, [V]: [{ [W]: m }], assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [L] }], rules: [{ conditions: [{ [U]: l, [V]: [L, a] }], rules: [{ endpoint: { url: "https://{AccountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "Credentials-sourced account ID parameter is invalid", [T]: c }], [T]: e }, { conditions: P, rules: [{ conditions: Q, rules: [{ conditions: [E], rules: [{ conditions: [D], rules: [{ error: "AccountIdEndpointMode is required but no AccountID was provided or able to be loaded", [T]: c }], [T]: e }, { error: "Invalid Configuration: AccountIdEndpointMode is required but account endpoints are not supported in this partition", [T]: c }], [T]: e }, B], [T]: e }, x], [T]: e }, z], [T]: e }], [T]: e }, { error: "Invalid Configuration: Missing Region", [T]: c }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..b29fd11 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js @@ -0,0 +1,5561 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ApproximateCreationDateTimePrecision: () => ApproximateCreationDateTimePrecision, + AttributeAction: () => AttributeAction, + AttributeValue: () => AttributeValue, + BackupInUseException: () => BackupInUseException, + BackupNotFoundException: () => BackupNotFoundException, + BackupStatus: () => BackupStatus, + BackupType: () => BackupType, + BackupTypeFilter: () => BackupTypeFilter, + BatchExecuteStatementCommand: () => BatchExecuteStatementCommand, + BatchGetItemCommand: () => BatchGetItemCommand, + BatchStatementErrorCodeEnum: () => BatchStatementErrorCodeEnum, + BatchWriteItemCommand: () => BatchWriteItemCommand, + BillingMode: () => BillingMode, + ComparisonOperator: () => ComparisonOperator, + ConditionalCheckFailedException: () => ConditionalCheckFailedException, + ConditionalOperator: () => ConditionalOperator, + ContinuousBackupsStatus: () => ContinuousBackupsStatus, + ContinuousBackupsUnavailableException: () => ContinuousBackupsUnavailableException, + ContributorInsightsAction: () => ContributorInsightsAction, + ContributorInsightsStatus: () => ContributorInsightsStatus, + CreateBackupCommand: () => CreateBackupCommand, + CreateGlobalTableCommand: () => CreateGlobalTableCommand, + CreateTableCommand: () => CreateTableCommand, + DeleteBackupCommand: () => DeleteBackupCommand, + DeleteItemCommand: () => DeleteItemCommand, + DeleteResourcePolicyCommand: () => DeleteResourcePolicyCommand, + DeleteTableCommand: () => DeleteTableCommand, + DescribeBackupCommand: () => DescribeBackupCommand, + DescribeContinuousBackupsCommand: () => DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand: () => DescribeContributorInsightsCommand, + DescribeEndpointsCommand: () => DescribeEndpointsCommand, + DescribeExportCommand: () => DescribeExportCommand, + DescribeGlobalTableCommand: () => DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand: () => DescribeGlobalTableSettingsCommand, + DescribeImportCommand: () => DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand: () => DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand: () => DescribeLimitsCommand, + DescribeTableCommand: () => DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand: () => DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand: () => DescribeTimeToLiveCommand, + DestinationStatus: () => DestinationStatus, + DisableKinesisStreamingDestinationCommand: () => DisableKinesisStreamingDestinationCommand, + DuplicateItemException: () => DuplicateItemException, + DynamoDB: () => DynamoDB, + DynamoDBClient: () => DynamoDBClient, + DynamoDBServiceException: () => DynamoDBServiceException, + EnableKinesisStreamingDestinationCommand: () => EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand: () => ExecuteStatementCommand, + ExecuteTransactionCommand: () => ExecuteTransactionCommand, + ExportConflictException: () => ExportConflictException, + ExportFormat: () => ExportFormat, + ExportNotFoundException: () => ExportNotFoundException, + ExportStatus: () => ExportStatus, + ExportTableToPointInTimeCommand: () => ExportTableToPointInTimeCommand, + ExportType: () => ExportType, + ExportViewType: () => ExportViewType, + GetItemCommand: () => GetItemCommand, + GetResourcePolicyCommand: () => GetResourcePolicyCommand, + GlobalTableAlreadyExistsException: () => GlobalTableAlreadyExistsException, + GlobalTableNotFoundException: () => GlobalTableNotFoundException, + GlobalTableStatus: () => GlobalTableStatus, + IdempotentParameterMismatchException: () => IdempotentParameterMismatchException, + ImportConflictException: () => ImportConflictException, + ImportNotFoundException: () => ImportNotFoundException, + ImportStatus: () => ImportStatus, + ImportTableCommand: () => ImportTableCommand, + IndexNotFoundException: () => IndexNotFoundException, + IndexStatus: () => IndexStatus, + InputCompressionType: () => InputCompressionType, + InputFormat: () => InputFormat, + InternalServerError: () => InternalServerError, + InvalidEndpointException: () => InvalidEndpointException, + InvalidExportTimeException: () => InvalidExportTimeException, + InvalidRestoreTimeException: () => InvalidRestoreTimeException, + ItemCollectionSizeLimitExceededException: () => ItemCollectionSizeLimitExceededException, + KeyType: () => KeyType, + LimitExceededException: () => LimitExceededException, + ListBackupsCommand: () => ListBackupsCommand, + ListContributorInsightsCommand: () => ListContributorInsightsCommand, + ListExportsCommand: () => ListExportsCommand, + ListGlobalTablesCommand: () => ListGlobalTablesCommand, + ListImportsCommand: () => ListImportsCommand, + ListTablesCommand: () => ListTablesCommand, + ListTagsOfResourceCommand: () => ListTagsOfResourceCommand, + MultiRegionConsistency: () => MultiRegionConsistency, + PointInTimeRecoveryStatus: () => PointInTimeRecoveryStatus, + PointInTimeRecoveryUnavailableException: () => PointInTimeRecoveryUnavailableException, + PolicyNotFoundException: () => PolicyNotFoundException, + ProjectionType: () => ProjectionType, + ProvisionedThroughputExceededException: () => ProvisionedThroughputExceededException, + PutItemCommand: () => PutItemCommand, + PutResourcePolicyCommand: () => PutResourcePolicyCommand, + QueryCommand: () => QueryCommand, + ReplicaAlreadyExistsException: () => ReplicaAlreadyExistsException, + ReplicaNotFoundException: () => ReplicaNotFoundException, + ReplicaStatus: () => ReplicaStatus, + ReplicatedWriteConflictException: () => ReplicatedWriteConflictException, + RequestLimitExceeded: () => RequestLimitExceeded, + ResourceInUseException: () => ResourceInUseException, + ResourceNotFoundException: () => ResourceNotFoundException, + RestoreTableFromBackupCommand: () => RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand: () => RestoreTableToPointInTimeCommand, + ReturnConsumedCapacity: () => ReturnConsumedCapacity, + ReturnItemCollectionMetrics: () => ReturnItemCollectionMetrics, + ReturnValue: () => ReturnValue, + ReturnValuesOnConditionCheckFailure: () => ReturnValuesOnConditionCheckFailure, + S3SseAlgorithm: () => S3SseAlgorithm, + SSEStatus: () => SSEStatus, + SSEType: () => SSEType, + ScalarAttributeType: () => ScalarAttributeType, + ScanCommand: () => ScanCommand, + Select: () => Select, + StreamViewType: () => StreamViewType, + TableAlreadyExistsException: () => TableAlreadyExistsException, + TableClass: () => TableClass, + TableInUseException: () => TableInUseException, + TableNotFoundException: () => TableNotFoundException, + TableStatus: () => TableStatus, + TagResourceCommand: () => TagResourceCommand, + TimeToLiveStatus: () => TimeToLiveStatus, + TransactGetItemsCommand: () => TransactGetItemsCommand, + TransactWriteItemsCommand: () => TransactWriteItemsCommand, + TransactionCanceledException: () => TransactionCanceledException, + TransactionConflictException: () => TransactionConflictException, + TransactionInProgressException: () => TransactionInProgressException, + UntagResourceCommand: () => UntagResourceCommand, + UpdateContinuousBackupsCommand: () => UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand: () => UpdateContributorInsightsCommand, + UpdateGlobalTableCommand: () => UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand: () => UpdateGlobalTableSettingsCommand, + UpdateItemCommand: () => UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand: () => UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand: () => UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand: () => UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand: () => UpdateTimeToLiveCommand, + __Client: () => import_smithy_client.Client, + paginateListContributorInsights: () => paginateListContributorInsights, + paginateListExports: () => paginateListExports, + paginateListImports: () => paginateListImports, + paginateListTables: () => paginateListTables, + paginateQuery: () => paginateQuery, + paginateScan: () => paginateScan, + waitForTableExists: () => waitForTableExists, + waitForTableNotExists: () => waitForTableNotExists, + waitUntilTableExists: () => waitUntilTableExists, + waitUntilTableNotExists: () => waitUntilTableNotExists +}); +module.exports = __toCommonJS(index_exports); + +// src/DynamoDBClient.ts +var import_account_id_endpoint = require("@aws-sdk/core/account-id-endpoint"); +var import_middleware_endpoint_discovery = require("@aws-sdk/middleware-endpoint-discovery"); +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core2 = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); + +var import_middleware_retry = require("@smithy/middleware-retry"); + +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/commands/DescribeEndpointsCommand.ts +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); + + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "dynamodb" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + AccountId: { type: "builtInParams", name: "accountId" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + AccountIdEndpointMode: { type: "builtInParams", name: "accountIdEndpointMode" } +}; + +// src/protocols/Aws_json1_0.ts +var import_core = require("@aws-sdk/core"); +var import_protocol_http = require("@smithy/protocol-http"); + +var import_uuid = require("uuid"); + +// src/models/DynamoDBServiceException.ts +var import_smithy_client = require("@smithy/smithy-client"); +var DynamoDBServiceException = class _DynamoDBServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "DynamoDBServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _DynamoDBServiceException.prototype); + } +}; + +// src/models/models_0.ts +var ApproximateCreationDateTimePrecision = { + MICROSECOND: "MICROSECOND", + MILLISECOND: "MILLISECOND" +}; +var AttributeAction = { + ADD: "ADD", + DELETE: "DELETE", + PUT: "PUT" +}; +var ScalarAttributeType = { + B: "B", + N: "N", + S: "S" +}; +var BackupStatus = { + AVAILABLE: "AVAILABLE", + CREATING: "CREATING", + DELETED: "DELETED" +}; +var BackupType = { + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER" +}; +var BillingMode = { + PAY_PER_REQUEST: "PAY_PER_REQUEST", + PROVISIONED: "PROVISIONED" +}; +var KeyType = { + HASH: "HASH", + RANGE: "RANGE" +}; +var ProjectionType = { + ALL: "ALL", + INCLUDE: "INCLUDE", + KEYS_ONLY: "KEYS_ONLY" +}; +var SSEType = { + AES256: "AES256", + KMS: "KMS" +}; +var SSEStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + UPDATING: "UPDATING" +}; +var StreamViewType = { + KEYS_ONLY: "KEYS_ONLY", + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", + OLD_IMAGE: "OLD_IMAGE" +}; +var TimeToLiveStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING" +}; +var BackupInUseException = class _BackupInUseException extends DynamoDBServiceException { + static { + __name(this, "BackupInUseException"); + } + name = "BackupInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "BackupInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _BackupInUseException.prototype); + } +}; +var BackupNotFoundException = class _BackupNotFoundException extends DynamoDBServiceException { + static { + __name(this, "BackupNotFoundException"); + } + name = "BackupNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "BackupNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _BackupNotFoundException.prototype); + } +}; +var BackupTypeFilter = { + ALL: "ALL", + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER" +}; +var ReturnConsumedCapacity = { + INDEXES: "INDEXES", + NONE: "NONE", + TOTAL: "TOTAL" +}; +var ReturnValuesOnConditionCheckFailure = { + ALL_OLD: "ALL_OLD", + NONE: "NONE" +}; +var BatchStatementErrorCodeEnum = { + AccessDenied: "AccessDenied", + ConditionalCheckFailed: "ConditionalCheckFailed", + DuplicateItem: "DuplicateItem", + InternalServerError: "InternalServerError", + ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded", + ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded", + RequestLimitExceeded: "RequestLimitExceeded", + ResourceNotFound: "ResourceNotFound", + ThrottlingError: "ThrottlingError", + TransactionConflict: "TransactionConflict", + ValidationError: "ValidationError" +}; +var InternalServerError = class _InternalServerError extends DynamoDBServiceException { + static { + __name(this, "InternalServerError"); + } + name = "InternalServerError"; + $fault = "server"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServerError", + $fault: "server", + ...opts + }); + Object.setPrototypeOf(this, _InternalServerError.prototype); + } +}; +var RequestLimitExceeded = class _RequestLimitExceeded extends DynamoDBServiceException { + static { + __name(this, "RequestLimitExceeded"); + } + name = "RequestLimitExceeded"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "RequestLimitExceeded", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _RequestLimitExceeded.prototype); + } +}; +var InvalidEndpointException = class _InvalidEndpointException extends DynamoDBServiceException { + static { + __name(this, "InvalidEndpointException"); + } + name = "InvalidEndpointException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidEndpointException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidEndpointException.prototype); + this.Message = opts.Message; + } +}; +var ProvisionedThroughputExceededException = class _ProvisionedThroughputExceededException extends DynamoDBServiceException { + static { + __name(this, "ProvisionedThroughputExceededException"); + } + name = "ProvisionedThroughputExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ProvisionedThroughputExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ProvisionedThroughputExceededException.prototype); + } +}; +var ResourceNotFoundException = class _ResourceNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ResourceNotFoundException"); + } + name = "ResourceNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + } +}; +var ReturnItemCollectionMetrics = { + NONE: "NONE", + SIZE: "SIZE" +}; +var ItemCollectionSizeLimitExceededException = class _ItemCollectionSizeLimitExceededException extends DynamoDBServiceException { + static { + __name(this, "ItemCollectionSizeLimitExceededException"); + } + name = "ItemCollectionSizeLimitExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ItemCollectionSizeLimitExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ItemCollectionSizeLimitExceededException.prototype); + } +}; +var ComparisonOperator = { + BEGINS_WITH: "BEGINS_WITH", + BETWEEN: "BETWEEN", + CONTAINS: "CONTAINS", + EQ: "EQ", + GE: "GE", + GT: "GT", + IN: "IN", + LE: "LE", + LT: "LT", + NE: "NE", + NOT_CONTAINS: "NOT_CONTAINS", + NOT_NULL: "NOT_NULL", + NULL: "NULL" +}; +var ConditionalOperator = { + AND: "AND", + OR: "OR" +}; +var ContinuousBackupsStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED" +}; +var PointInTimeRecoveryStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED" +}; +var ContinuousBackupsUnavailableException = class _ContinuousBackupsUnavailableException extends DynamoDBServiceException { + static { + __name(this, "ContinuousBackupsUnavailableException"); + } + name = "ContinuousBackupsUnavailableException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ContinuousBackupsUnavailableException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ContinuousBackupsUnavailableException.prototype); + } +}; +var ContributorInsightsAction = { + DISABLE: "DISABLE", + ENABLE: "ENABLE" +}; +var ContributorInsightsStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + FAILED: "FAILED" +}; +var LimitExceededException = class _LimitExceededException extends DynamoDBServiceException { + static { + __name(this, "LimitExceededException"); + } + name = "LimitExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "LimitExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _LimitExceededException.prototype); + } +}; +var TableInUseException = class _TableInUseException extends DynamoDBServiceException { + static { + __name(this, "TableInUseException"); + } + name = "TableInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableInUseException.prototype); + } +}; +var TableNotFoundException = class _TableNotFoundException extends DynamoDBServiceException { + static { + __name(this, "TableNotFoundException"); + } + name = "TableNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableNotFoundException.prototype); + } +}; +var GlobalTableStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING" +}; +var IndexStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING" +}; +var ReplicaStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + CREATION_FAILED: "CREATION_FAILED", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + REGION_DISABLED: "REGION_DISABLED", + UPDATING: "UPDATING" +}; +var TableClass = { + STANDARD: "STANDARD", + STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS" +}; +var TableStatus = { + ACTIVE: "ACTIVE", + ARCHIVED: "ARCHIVED", + ARCHIVING: "ARCHIVING", + CREATING: "CREATING", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + UPDATING: "UPDATING" +}; +var GlobalTableAlreadyExistsException = class _GlobalTableAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "GlobalTableAlreadyExistsException"); + } + name = "GlobalTableAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "GlobalTableAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _GlobalTableAlreadyExistsException.prototype); + } +}; +var MultiRegionConsistency = { + EVENTUAL: "EVENTUAL", + STRONG: "STRONG" +}; +var ResourceInUseException = class _ResourceInUseException extends DynamoDBServiceException { + static { + __name(this, "ResourceInUseException"); + } + name = "ResourceInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceInUseException.prototype); + } +}; +var ReturnValue = { + ALL_NEW: "ALL_NEW", + ALL_OLD: "ALL_OLD", + NONE: "NONE", + UPDATED_NEW: "UPDATED_NEW", + UPDATED_OLD: "UPDATED_OLD" +}; +var ReplicatedWriteConflictException = class _ReplicatedWriteConflictException extends DynamoDBServiceException { + static { + __name(this, "ReplicatedWriteConflictException"); + } + name = "ReplicatedWriteConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicatedWriteConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicatedWriteConflictException.prototype); + } +}; +var TransactionConflictException = class _TransactionConflictException extends DynamoDBServiceException { + static { + __name(this, "TransactionConflictException"); + } + name = "TransactionConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionConflictException.prototype); + } +}; +var PolicyNotFoundException = class _PolicyNotFoundException extends DynamoDBServiceException { + static { + __name(this, "PolicyNotFoundException"); + } + name = "PolicyNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PolicyNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PolicyNotFoundException.prototype); + } +}; +var ExportFormat = { + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION" +}; +var ExportStatus = { + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS" +}; +var ExportType = { + FULL_EXPORT: "FULL_EXPORT", + INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT" +}; +var ExportViewType = { + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE" +}; +var S3SseAlgorithm = { + AES256: "AES256", + KMS: "KMS" +}; +var ExportNotFoundException = class _ExportNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ExportNotFoundException"); + } + name = "ExportNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExportNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExportNotFoundException.prototype); + } +}; +var GlobalTableNotFoundException = class _GlobalTableNotFoundException extends DynamoDBServiceException { + static { + __name(this, "GlobalTableNotFoundException"); + } + name = "GlobalTableNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "GlobalTableNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _GlobalTableNotFoundException.prototype); + } +}; +var ImportStatus = { + CANCELLED: "CANCELLED", + CANCELLING: "CANCELLING", + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS" +}; +var InputCompressionType = { + GZIP: "GZIP", + NONE: "NONE", + ZSTD: "ZSTD" +}; +var InputFormat = { + CSV: "CSV", + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION" +}; +var ImportNotFoundException = class _ImportNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ImportNotFoundException"); + } + name = "ImportNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ImportNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ImportNotFoundException.prototype); + } +}; +var DestinationStatus = { + ACTIVE: "ACTIVE", + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLE_FAILED: "ENABLE_FAILED", + ENABLING: "ENABLING", + UPDATING: "UPDATING" +}; +var DuplicateItemException = class _DuplicateItemException extends DynamoDBServiceException { + static { + __name(this, "DuplicateItemException"); + } + name = "DuplicateItemException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "DuplicateItemException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _DuplicateItemException.prototype); + } +}; +var IdempotentParameterMismatchException = class _IdempotentParameterMismatchException extends DynamoDBServiceException { + static { + __name(this, "IdempotentParameterMismatchException"); + } + name = "IdempotentParameterMismatchException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IdempotentParameterMismatchException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IdempotentParameterMismatchException.prototype); + this.Message = opts.Message; + } +}; +var TransactionInProgressException = class _TransactionInProgressException extends DynamoDBServiceException { + static { + __name(this, "TransactionInProgressException"); + } + name = "TransactionInProgressException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionInProgressException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionInProgressException.prototype); + this.Message = opts.Message; + } +}; +var ExportConflictException = class _ExportConflictException extends DynamoDBServiceException { + static { + __name(this, "ExportConflictException"); + } + name = "ExportConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExportConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExportConflictException.prototype); + } +}; +var InvalidExportTimeException = class _InvalidExportTimeException extends DynamoDBServiceException { + static { + __name(this, "InvalidExportTimeException"); + } + name = "InvalidExportTimeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidExportTimeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidExportTimeException.prototype); + } +}; +var PointInTimeRecoveryUnavailableException = class _PointInTimeRecoveryUnavailableException extends DynamoDBServiceException { + static { + __name(this, "PointInTimeRecoveryUnavailableException"); + } + name = "PointInTimeRecoveryUnavailableException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PointInTimeRecoveryUnavailableException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PointInTimeRecoveryUnavailableException.prototype); + } +}; +var ImportConflictException = class _ImportConflictException extends DynamoDBServiceException { + static { + __name(this, "ImportConflictException"); + } + name = "ImportConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ImportConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ImportConflictException.prototype); + } +}; +var Select = { + ALL_ATTRIBUTES: "ALL_ATTRIBUTES", + ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES", + COUNT: "COUNT", + SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES" +}; +var TableAlreadyExistsException = class _TableAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "TableAlreadyExistsException"); + } + name = "TableAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableAlreadyExistsException.prototype); + } +}; +var InvalidRestoreTimeException = class _InvalidRestoreTimeException extends DynamoDBServiceException { + static { + __name(this, "InvalidRestoreTimeException"); + } + name = "InvalidRestoreTimeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRestoreTimeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRestoreTimeException.prototype); + } +}; +var ReplicaAlreadyExistsException = class _ReplicaAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "ReplicaAlreadyExistsException"); + } + name = "ReplicaAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicaAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicaAlreadyExistsException.prototype); + } +}; +var ReplicaNotFoundException = class _ReplicaNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ReplicaNotFoundException"); + } + name = "ReplicaNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicaNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicaNotFoundException.prototype); + } +}; +var IndexNotFoundException = class _IndexNotFoundException extends DynamoDBServiceException { + static { + __name(this, "IndexNotFoundException"); + } + name = "IndexNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IndexNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IndexNotFoundException.prototype); + } +}; +var AttributeValue; +((AttributeValue2) => { + AttributeValue2.visit = /* @__PURE__ */ __name((value, visitor) => { + if (value.S !== void 0) return visitor.S(value.S); + if (value.N !== void 0) return visitor.N(value.N); + if (value.B !== void 0) return visitor.B(value.B); + if (value.SS !== void 0) return visitor.SS(value.SS); + if (value.NS !== void 0) return visitor.NS(value.NS); + if (value.BS !== void 0) return visitor.BS(value.BS); + if (value.M !== void 0) return visitor.M(value.M); + if (value.L !== void 0) return visitor.L(value.L); + if (value.NULL !== void 0) return visitor.NULL(value.NULL); + if (value.BOOL !== void 0) return visitor.BOOL(value.BOOL); + return visitor._(value.$unknown[0], value.$unknown[1]); + }, "visit"); +})(AttributeValue || (AttributeValue = {})); +var ConditionalCheckFailedException = class _ConditionalCheckFailedException extends DynamoDBServiceException { + static { + __name(this, "ConditionalCheckFailedException"); + } + name = "ConditionalCheckFailedException"; + $fault = "client"; + /** + *

Item which caused the ConditionalCheckFailedException.

+ * @public + */ + Item; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ConditionalCheckFailedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ConditionalCheckFailedException.prototype); + this.Item = opts.Item; + } +}; +var TransactionCanceledException = class _TransactionCanceledException extends DynamoDBServiceException { + static { + __name(this, "TransactionCanceledException"); + } + name = "TransactionCanceledException"; + $fault = "client"; + Message; + /** + *

A list of cancellation reasons.

+ * @public + */ + CancellationReasons; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionCanceledException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionCanceledException.prototype); + this.Message = opts.Message; + this.CancellationReasons = opts.CancellationReasons; + } +}; + +// src/protocols/Aws_json1_0.ts +var se_BatchExecuteStatementCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchExecuteStatement"); + let body; + body = JSON.stringify(se_BatchExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchExecuteStatementCommand"); +var se_BatchGetItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchGetItem"); + let body; + body = JSON.stringify(se_BatchGetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchGetItemCommand"); +var se_BatchWriteItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchWriteItem"); + let body; + body = JSON.stringify(se_BatchWriteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchWriteItemCommand"); +var se_CreateBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateBackupCommand"); +var se_CreateGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateGlobalTableCommand"); +var se_CreateTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateTableCommand"); +var se_DeleteBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteBackupCommand"); +var se_DeleteItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteItem"); + let body; + body = JSON.stringify(se_DeleteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteItemCommand"); +var se_DeleteResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteResourcePolicyCommand"); +var se_DeleteTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteTableCommand"); +var se_DescribeBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeBackupCommand"); +var se_DescribeContinuousBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeContinuousBackups"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeContinuousBackupsCommand"); +var se_DescribeContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeContributorInsightsCommand"); +var se_DescribeEndpointsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeEndpoints"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeEndpointsCommand"); +var se_DescribeExportCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeExport"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeExportCommand"); +var se_DescribeGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeGlobalTableCommand"); +var se_DescribeGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTableSettings"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeGlobalTableSettingsCommand"); +var se_DescribeImportCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeImport"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeImportCommand"); +var se_DescribeKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeKinesisStreamingDestinationCommand"); +var se_DescribeLimitsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeLimits"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeLimitsCommand"); +var se_DescribeTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTableCommand"); +var se_DescribeTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTableReplicaAutoScaling"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTableReplicaAutoScalingCommand"); +var se_DescribeTimeToLiveCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTimeToLive"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTimeToLiveCommand"); +var se_DisableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DisableKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DisableKinesisStreamingDestinationCommand"); +var se_EnableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("EnableKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_EnableKinesisStreamingDestinationCommand"); +var se_ExecuteStatementCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExecuteStatement"); + let body; + body = JSON.stringify(se_ExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExecuteStatementCommand"); +var se_ExecuteTransactionCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExecuteTransaction"); + let body; + body = JSON.stringify(se_ExecuteTransactionInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExecuteTransactionCommand"); +var se_ExportTableToPointInTimeCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExportTableToPointInTime"); + let body; + body = JSON.stringify(se_ExportTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExportTableToPointInTimeCommand"); +var se_GetItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetItem"); + let body; + body = JSON.stringify(se_GetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetItemCommand"); +var se_GetResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetResourcePolicyCommand"); +var se_ImportTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ImportTable"); + let body; + body = JSON.stringify(se_ImportTableInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ImportTableCommand"); +var se_ListBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListBackups"); + let body; + body = JSON.stringify(se_ListBackupsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListBackupsCommand"); +var se_ListContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListContributorInsightsCommand"); +var se_ListExportsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListExports"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListExportsCommand"); +var se_ListGlobalTablesCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListGlobalTables"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListGlobalTablesCommand"); +var se_ListImportsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListImports"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListImportsCommand"); +var se_ListTablesCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListTables"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListTablesCommand"); +var se_ListTagsOfResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListTagsOfResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListTagsOfResourceCommand"); +var se_PutItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("PutItem"); + let body; + body = JSON.stringify(se_PutItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_PutItemCommand"); +var se_PutResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("PutResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_PutResourcePolicyCommand"); +var se_QueryCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("Query"); + let body; + body = JSON.stringify(se_QueryInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_QueryCommand"); +var se_RestoreTableFromBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RestoreTableFromBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RestoreTableFromBackupCommand"); +var se_RestoreTableToPointInTimeCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RestoreTableToPointInTime"); + let body; + body = JSON.stringify(se_RestoreTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RestoreTableToPointInTimeCommand"); +var se_ScanCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("Scan"); + let body; + body = JSON.stringify(se_ScanInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ScanCommand"); +var se_TagResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TagResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TagResourceCommand"); +var se_TransactGetItemsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TransactGetItems"); + let body; + body = JSON.stringify(se_TransactGetItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TransactGetItemsCommand"); +var se_TransactWriteItemsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TransactWriteItems"); + let body; + body = JSON.stringify(se_TransactWriteItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TransactWriteItemsCommand"); +var se_UntagResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UntagResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UntagResourceCommand"); +var se_UpdateContinuousBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateContinuousBackups"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateContinuousBackupsCommand"); +var se_UpdateContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateContributorInsightsCommand"); +var se_UpdateGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateGlobalTableCommand"); +var se_UpdateGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTableSettings"); + let body; + body = JSON.stringify(se_UpdateGlobalTableSettingsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateGlobalTableSettingsCommand"); +var se_UpdateItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateItem"); + let body; + body = JSON.stringify(se_UpdateItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateItemCommand"); +var se_UpdateKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateKinesisStreamingDestinationCommand"); +var se_UpdateTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTableCommand"); +var se_UpdateTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTableReplicaAutoScaling"); + let body; + body = JSON.stringify(se_UpdateTableReplicaAutoScalingInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTableReplicaAutoScalingCommand"); +var se_UpdateTimeToLiveCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTimeToLive"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTimeToLiveCommand"); +var de_BatchExecuteStatementCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchExecuteStatementCommand"); +var de_BatchGetItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchGetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchGetItemCommand"); +var de_BatchWriteItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchWriteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchWriteItemCommand"); +var de_CreateBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateBackupCommand"); +var de_CreateGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateGlobalTableCommand"); +var de_CreateTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateTableCommand"); +var de_DeleteBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteBackupCommand"); +var de_DeleteItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteItemCommand"); +var de_DeleteResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteResourcePolicyCommand"); +var de_DeleteTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteTableCommand"); +var de_DescribeBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeBackupCommand"); +var de_DescribeContinuousBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeContinuousBackupsCommand"); +var de_DescribeContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeContributorInsightsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeContributorInsightsCommand"); +var de_DescribeEndpointsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeEndpointsCommand"); +var de_DescribeExportCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeExportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeExportCommand"); +var de_DescribeGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeGlobalTableCommand"); +var de_DescribeGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeGlobalTableSettingsCommand"); +var de_DescribeImportCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeImportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeImportCommand"); +var de_DescribeKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeKinesisStreamingDestinationCommand"); +var de_DescribeLimitsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeLimitsCommand"); +var de_DescribeTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTableCommand"); +var de_DescribeTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTableReplicaAutoScalingCommand"); +var de_DescribeTimeToLiveCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTimeToLiveCommand"); +var de_DisableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DisableKinesisStreamingDestinationCommand"); +var de_EnableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_EnableKinesisStreamingDestinationCommand"); +var de_ExecuteStatementCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExecuteStatementCommand"); +var de_ExecuteTransactionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExecuteTransactionOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExecuteTransactionCommand"); +var de_ExportTableToPointInTimeCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExportTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExportTableToPointInTimeCommand"); +var de_GetItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_GetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetItemCommand"); +var de_GetResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetResourcePolicyCommand"); +var de_ImportTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ImportTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ImportTableCommand"); +var de_ListBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ListBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListBackupsCommand"); +var de_ListContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListContributorInsightsCommand"); +var de_ListExportsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListExportsCommand"); +var de_ListGlobalTablesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListGlobalTablesCommand"); +var de_ListImportsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ListImportsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListImportsCommand"); +var de_ListTablesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListTablesCommand"); +var de_ListTagsOfResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListTagsOfResourceCommand"); +var de_PutItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_PutItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_PutItemCommand"); +var de_PutResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_PutResourcePolicyCommand"); +var de_QueryCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_QueryOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_QueryCommand"); +var de_RestoreTableFromBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_RestoreTableFromBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RestoreTableFromBackupCommand"); +var de_RestoreTableToPointInTimeCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_RestoreTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RestoreTableToPointInTimeCommand"); +var de_ScanCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ScanOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ScanCommand"); +var de_TagResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await (0, import_smithy_client.collectBody)(output.body, context); + const response = { + $metadata: deserializeMetadata(output) + }; + return response; +}, "de_TagResourceCommand"); +var de_TransactGetItemsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_TransactGetItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_TransactGetItemsCommand"); +var de_TransactWriteItemsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_TransactWriteItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_TransactWriteItemsCommand"); +var de_UntagResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await (0, import_smithy_client.collectBody)(output.body, context); + const response = { + $metadata: deserializeMetadata(output) + }; + return response; +}, "de_UntagResourceCommand"); +var de_UpdateContinuousBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateContinuousBackupsCommand"); +var de_UpdateContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateContributorInsightsCommand"); +var de_UpdateGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateGlobalTableCommand"); +var de_UpdateGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateGlobalTableSettingsCommand"); +var de_UpdateItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateItemCommand"); +var de_UpdateKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateKinesisStreamingDestinationCommand"); +var de_UpdateTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTableCommand"); +var de_UpdateTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTableReplicaAutoScalingCommand"); +var de_UpdateTimeToLiveCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTimeToLiveCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerError": + case "com.amazonaws.dynamodb#InternalServerError": + throw await de_InternalServerErrorRes(parsedOutput, context); + case "RequestLimitExceeded": + case "com.amazonaws.dynamodb#RequestLimitExceeded": + throw await de_RequestLimitExceededRes(parsedOutput, context); + case "InvalidEndpointException": + case "com.amazonaws.dynamodb#InvalidEndpointException": + throw await de_InvalidEndpointExceptionRes(parsedOutput, context); + case "ProvisionedThroughputExceededException": + case "com.amazonaws.dynamodb#ProvisionedThroughputExceededException": + throw await de_ProvisionedThroughputExceededExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.dynamodb#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "ItemCollectionSizeLimitExceededException": + case "com.amazonaws.dynamodb#ItemCollectionSizeLimitExceededException": + throw await de_ItemCollectionSizeLimitExceededExceptionRes(parsedOutput, context); + case "BackupInUseException": + case "com.amazonaws.dynamodb#BackupInUseException": + throw await de_BackupInUseExceptionRes(parsedOutput, context); + case "ContinuousBackupsUnavailableException": + case "com.amazonaws.dynamodb#ContinuousBackupsUnavailableException": + throw await de_ContinuousBackupsUnavailableExceptionRes(parsedOutput, context); + case "LimitExceededException": + case "com.amazonaws.dynamodb#LimitExceededException": + throw await de_LimitExceededExceptionRes(parsedOutput, context); + case "TableInUseException": + case "com.amazonaws.dynamodb#TableInUseException": + throw await de_TableInUseExceptionRes(parsedOutput, context); + case "TableNotFoundException": + case "com.amazonaws.dynamodb#TableNotFoundException": + throw await de_TableNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableAlreadyExistsException": + case "com.amazonaws.dynamodb#GlobalTableAlreadyExistsException": + throw await de_GlobalTableAlreadyExistsExceptionRes(parsedOutput, context); + case "ResourceInUseException": + case "com.amazonaws.dynamodb#ResourceInUseException": + throw await de_ResourceInUseExceptionRes(parsedOutput, context); + case "BackupNotFoundException": + case "com.amazonaws.dynamodb#BackupNotFoundException": + throw await de_BackupNotFoundExceptionRes(parsedOutput, context); + case "ConditionalCheckFailedException": + case "com.amazonaws.dynamodb#ConditionalCheckFailedException": + throw await de_ConditionalCheckFailedExceptionRes(parsedOutput, context); + case "ReplicatedWriteConflictException": + case "com.amazonaws.dynamodb#ReplicatedWriteConflictException": + throw await de_ReplicatedWriteConflictExceptionRes(parsedOutput, context); + case "TransactionConflictException": + case "com.amazonaws.dynamodb#TransactionConflictException": + throw await de_TransactionConflictExceptionRes(parsedOutput, context); + case "PolicyNotFoundException": + case "com.amazonaws.dynamodb#PolicyNotFoundException": + throw await de_PolicyNotFoundExceptionRes(parsedOutput, context); + case "ExportNotFoundException": + case "com.amazonaws.dynamodb#ExportNotFoundException": + throw await de_ExportNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableNotFoundException": + case "com.amazonaws.dynamodb#GlobalTableNotFoundException": + throw await de_GlobalTableNotFoundExceptionRes(parsedOutput, context); + case "ImportNotFoundException": + case "com.amazonaws.dynamodb#ImportNotFoundException": + throw await de_ImportNotFoundExceptionRes(parsedOutput, context); + case "DuplicateItemException": + case "com.amazonaws.dynamodb#DuplicateItemException": + throw await de_DuplicateItemExceptionRes(parsedOutput, context); + case "IdempotentParameterMismatchException": + case "com.amazonaws.dynamodb#IdempotentParameterMismatchException": + throw await de_IdempotentParameterMismatchExceptionRes(parsedOutput, context); + case "TransactionCanceledException": + case "com.amazonaws.dynamodb#TransactionCanceledException": + throw await de_TransactionCanceledExceptionRes(parsedOutput, context); + case "TransactionInProgressException": + case "com.amazonaws.dynamodb#TransactionInProgressException": + throw await de_TransactionInProgressExceptionRes(parsedOutput, context); + case "ExportConflictException": + case "com.amazonaws.dynamodb#ExportConflictException": + throw await de_ExportConflictExceptionRes(parsedOutput, context); + case "InvalidExportTimeException": + case "com.amazonaws.dynamodb#InvalidExportTimeException": + throw await de_InvalidExportTimeExceptionRes(parsedOutput, context); + case "PointInTimeRecoveryUnavailableException": + case "com.amazonaws.dynamodb#PointInTimeRecoveryUnavailableException": + throw await de_PointInTimeRecoveryUnavailableExceptionRes(parsedOutput, context); + case "ImportConflictException": + case "com.amazonaws.dynamodb#ImportConflictException": + throw await de_ImportConflictExceptionRes(parsedOutput, context); + case "TableAlreadyExistsException": + case "com.amazonaws.dynamodb#TableAlreadyExistsException": + throw await de_TableAlreadyExistsExceptionRes(parsedOutput, context); + case "InvalidRestoreTimeException": + case "com.amazonaws.dynamodb#InvalidRestoreTimeException": + throw await de_InvalidRestoreTimeExceptionRes(parsedOutput, context); + case "ReplicaAlreadyExistsException": + case "com.amazonaws.dynamodb#ReplicaAlreadyExistsException": + throw await de_ReplicaAlreadyExistsExceptionRes(parsedOutput, context); + case "ReplicaNotFoundException": + case "com.amazonaws.dynamodb#ReplicaNotFoundException": + throw await de_ReplicaNotFoundExceptionRes(parsedOutput, context); + case "IndexNotFoundException": + case "com.amazonaws.dynamodb#IndexNotFoundException": + throw await de_IndexNotFoundExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var de_BackupInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new BackupInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_BackupInUseExceptionRes"); +var de_BackupNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new BackupNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_BackupNotFoundExceptionRes"); +var de_ConditionalCheckFailedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ConditionalCheckFailedException(body, context); + const exception = new ConditionalCheckFailedException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ConditionalCheckFailedExceptionRes"); +var de_ContinuousBackupsUnavailableExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ContinuousBackupsUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ContinuousBackupsUnavailableExceptionRes"); +var de_DuplicateItemExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new DuplicateItemException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_DuplicateItemExceptionRes"); +var de_ExportConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ExportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ExportConflictExceptionRes"); +var de_ExportNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ExportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ExportNotFoundExceptionRes"); +var de_GlobalTableAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new GlobalTableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_GlobalTableAlreadyExistsExceptionRes"); +var de_GlobalTableNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new GlobalTableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_GlobalTableNotFoundExceptionRes"); +var de_IdempotentParameterMismatchExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new IdempotentParameterMismatchException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_IdempotentParameterMismatchExceptionRes"); +var de_ImportConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ImportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ImportConflictExceptionRes"); +var de_ImportNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ImportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ImportNotFoundExceptionRes"); +var de_IndexNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new IndexNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_IndexNotFoundExceptionRes"); +var de_InternalServerErrorRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InternalServerError({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InternalServerErrorRes"); +var de_InvalidEndpointExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidEndpointException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidEndpointExceptionRes"); +var de_InvalidExportTimeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidExportTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidExportTimeExceptionRes"); +var de_InvalidRestoreTimeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidRestoreTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidRestoreTimeExceptionRes"); +var de_ItemCollectionSizeLimitExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ItemCollectionSizeLimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ItemCollectionSizeLimitExceededExceptionRes"); +var de_LimitExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new LimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_LimitExceededExceptionRes"); +var de_PointInTimeRecoveryUnavailableExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new PointInTimeRecoveryUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PointInTimeRecoveryUnavailableExceptionRes"); +var de_PolicyNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new PolicyNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PolicyNotFoundExceptionRes"); +var de_ProvisionedThroughputExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ProvisionedThroughputExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ProvisionedThroughputExceededExceptionRes"); +var de_ReplicaAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicaAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicaAlreadyExistsExceptionRes"); +var de_ReplicaNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicaNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicaNotFoundExceptionRes"); +var de_ReplicatedWriteConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicatedWriteConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicatedWriteConflictExceptionRes"); +var de_RequestLimitExceededRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new RequestLimitExceeded({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_RequestLimitExceededRes"); +var de_ResourceInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ResourceInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ResourceInUseExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ResourceNotFoundExceptionRes"); +var de_TableAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableAlreadyExistsExceptionRes"); +var de_TableInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableInUseExceptionRes"); +var de_TableNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableNotFoundExceptionRes"); +var de_TransactionCanceledExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_TransactionCanceledException(body, context); + const exception = new TransactionCanceledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionCanceledExceptionRes"); +var de_TransactionConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TransactionConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionConflictExceptionRes"); +var de_TransactionInProgressExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TransactionInProgressException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionInProgressExceptionRes"); +var se_AttributeUpdates = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValueUpdate(value, context); + return acc; + }, {}); +}, "se_AttributeUpdates"); +var se_AttributeValue = /* @__PURE__ */ __name((input, context) => { + return AttributeValue.visit(input, { + B: /* @__PURE__ */ __name((value) => ({ B: context.base64Encoder(value) }), "B"), + BOOL: /* @__PURE__ */ __name((value) => ({ BOOL: value }), "BOOL"), + BS: /* @__PURE__ */ __name((value) => ({ BS: se_BinarySetAttributeValue(value, context) }), "BS"), + L: /* @__PURE__ */ __name((value) => ({ L: se_ListAttributeValue(value, context) }), "L"), + M: /* @__PURE__ */ __name((value) => ({ M: se_MapAttributeValue(value, context) }), "M"), + N: /* @__PURE__ */ __name((value) => ({ N: value }), "N"), + NS: /* @__PURE__ */ __name((value) => ({ NS: (0, import_smithy_client._json)(value) }), "NS"), + NULL: /* @__PURE__ */ __name((value) => ({ NULL: value }), "NULL"), + S: /* @__PURE__ */ __name((value) => ({ S: value }), "S"), + SS: /* @__PURE__ */ __name((value) => ({ SS: (0, import_smithy_client._json)(value) }), "SS"), + _: /* @__PURE__ */ __name((name, value) => ({ [name]: value }), "_") + }); +}, "se_AttributeValue"); +var se_AttributeValueList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_AttributeValueList"); +var se_AttributeValueUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Action: [], + Value: /* @__PURE__ */ __name((_) => se_AttributeValue(_, context), "Value") + }); +}, "se_AttributeValueUpdate"); +var se_AutoScalingPolicyUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + PolicyName: [], + TargetTrackingScalingPolicyConfiguration: /* @__PURE__ */ __name((_) => se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate(_, context), "TargetTrackingScalingPolicyConfiguration") + }); +}, "se_AutoScalingPolicyUpdate"); +var se_AutoScalingSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AutoScalingDisabled: [], + AutoScalingRoleArn: [], + MaximumUnits: [], + MinimumUnits: [], + ScalingPolicyUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingPolicyUpdate(_, context), "ScalingPolicyUpdate") + }); +}, "se_AutoScalingSettingsUpdate"); +var se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + DisableScaleIn: [], + ScaleInCooldown: [], + ScaleOutCooldown: [], + TargetValue: import_smithy_client.serializeFloat + }); +}, "se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate"); +var se_BatchExecuteStatementInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ReturnConsumedCapacity: [], + Statements: /* @__PURE__ */ __name((_) => se_PartiQLBatchRequest(_, context), "Statements") + }); +}, "se_BatchExecuteStatementInput"); +var se_BatchGetItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RequestItems: /* @__PURE__ */ __name((_) => se_BatchGetRequestMap(_, context), "RequestItems"), + ReturnConsumedCapacity: [] + }); +}, "se_BatchGetItemInput"); +var se_BatchGetRequestMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_KeysAndAttributes(value, context); + return acc; + }, {}); +}, "se_BatchGetRequestMap"); +var se_BatchStatementRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConsistentRead: [], + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_BatchStatementRequest"); +var se_BatchWriteItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RequestItems: /* @__PURE__ */ __name((_) => se_BatchWriteItemRequestMap(_, context), "RequestItems"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [] + }); +}, "se_BatchWriteItemInput"); +var se_BatchWriteItemRequestMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_WriteRequests(value, context); + return acc; + }, {}); +}, "se_BatchWriteItemRequestMap"); +var se_BinarySetAttributeValue = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return context.base64Encoder(entry); + }); +}, "se_BinarySetAttributeValue"); +var se_Condition = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeValueList: /* @__PURE__ */ __name((_) => se_AttributeValueList(_, context), "AttributeValueList"), + ComparisonOperator: [] + }); +}, "se_Condition"); +var se_ConditionCheck = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_ConditionCheck"); +var se_Delete = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_Delete"); +var se_DeleteItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_DeleteItemInput"); +var se_DeleteRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key") + }); +}, "se_DeleteRequest"); +var se_ExecuteStatementInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConsistentRead: [], + Limit: [], + NextToken: [], + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnConsumedCapacity: [], + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_ExecuteStatementInput"); +var se_ExecuteTransactionInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ReturnConsumedCapacity: [], + TransactStatements: /* @__PURE__ */ __name((_) => se_ParameterizedStatements(_, context), "TransactStatements") + }); +}, "se_ExecuteTransactionInput"); +var se_ExpectedAttributeMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_ExpectedAttributeValue(value, context); + return acc; + }, {}); +}, "se_ExpectedAttributeMap"); +var se_ExpectedAttributeValue = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeValueList: /* @__PURE__ */ __name((_) => se_AttributeValueList(_, context), "AttributeValueList"), + ComparisonOperator: [], + Exists: [], + Value: /* @__PURE__ */ __name((_) => se_AttributeValue(_, context), "Value") + }); +}, "se_ExpectedAttributeValue"); +var se_ExportTableToPointInTimeInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ExportFormat: [], + ExportTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportTime"), + ExportType: [], + IncrementalExportSpecification: /* @__PURE__ */ __name((_) => se_IncrementalExportSpecification(_, context), "IncrementalExportSpecification"), + S3Bucket: [], + S3BucketOwner: [], + S3Prefix: [], + S3SseAlgorithm: [], + S3SseKmsKeyId: [], + TableArn: [] + }); +}, "se_ExportTableToPointInTimeInput"); +var se_ExpressionAttributeValueMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_ExpressionAttributeValueMap"); +var se_FilterConditionMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}, "se_FilterConditionMap"); +var se_Get = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ExpressionAttributeNames: import_smithy_client._json, + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ProjectionExpression: [], + TableName: [] + }); +}, "se_Get"); +var se_GetItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: [], + ExpressionAttributeNames: import_smithy_client._json, + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ProjectionExpression: [], + ReturnConsumedCapacity: [], + TableName: [] + }); +}, "se_GetItemInput"); +var se_GlobalSecondaryIndexAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingUpdate") + }); +}, "se_GlobalSecondaryIndexAutoScalingUpdate"); +var se_GlobalSecondaryIndexAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_GlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}, "se_GlobalSecondaryIndexAutoScalingUpdateList"); +var se_GlobalTableGlobalSecondaryIndexSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingSettingsUpdate"), + ProvisionedWriteCapacityUnits: [] + }); +}, "se_GlobalTableGlobalSecondaryIndexSettingsUpdate"); +var se_GlobalTableGlobalSecondaryIndexSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_GlobalTableGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}, "se_GlobalTableGlobalSecondaryIndexSettingsUpdateList"); +var se_ImportTableInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + InputCompressionType: [], + InputFormat: [], + InputFormatOptions: import_smithy_client._json, + S3BucketSource: import_smithy_client._json, + TableCreationParameters: import_smithy_client._json + }); +}, "se_ImportTableInput"); +var se_IncrementalExportSpecification = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ExportFromTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportFromTime"), + ExportToTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportToTime"), + ExportViewType: [] + }); +}, "se_IncrementalExportSpecification"); +var se_Key = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_Key"); +var se_KeyConditions = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}, "se_KeyConditions"); +var se_KeyList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_Key(entry, context); + }); +}, "se_KeyList"); +var se_KeysAndAttributes = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: [], + ExpressionAttributeNames: import_smithy_client._json, + Keys: /* @__PURE__ */ __name((_) => se_KeyList(_, context), "Keys"), + ProjectionExpression: [] + }); +}, "se_KeysAndAttributes"); +var se_ListAttributeValue = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_ListAttributeValue"); +var se_ListBackupsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + BackupType: [], + ExclusiveStartBackupArn: [], + Limit: [], + TableName: [], + TimeRangeLowerBound: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "TimeRangeLowerBound"), + TimeRangeUpperBound: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "TimeRangeUpperBound") + }); +}, "se_ListBackupsInput"); +var se_MapAttributeValue = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_MapAttributeValue"); +var se_ParameterizedStatement = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_ParameterizedStatement"); +var se_ParameterizedStatements = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ParameterizedStatement(entry, context); + }); +}, "se_ParameterizedStatements"); +var se_PartiQLBatchRequest = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_BatchStatementRequest(entry, context); + }); +}, "se_PartiQLBatchRequest"); +var se_PreparedStatementParameters = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_PreparedStatementParameters"); +var se_Put = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_Put"); +var se_PutItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_PutItemInput"); +var se_PutItemInputAttributeMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_PutItemInputAttributeMap"); +var se_PutRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item") + }); +}, "se_PutRequest"); +var se_QueryInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: /* @__PURE__ */ __name((_) => se_Key(_, context), "ExclusiveStartKey"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + FilterExpression: [], + IndexName: [], + KeyConditionExpression: [], + KeyConditions: /* @__PURE__ */ __name((_) => se_KeyConditions(_, context), "KeyConditions"), + Limit: [], + ProjectionExpression: [], + QueryFilter: /* @__PURE__ */ __name((_) => se_FilterConditionMap(_, context), "QueryFilter"), + ReturnConsumedCapacity: [], + ScanIndexForward: [], + Select: [], + TableName: [] + }); +}, "se_QueryInput"); +var se_ReplicaAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexUpdates: /* @__PURE__ */ __name((_) => se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList(_, context), "ReplicaGlobalSecondaryIndexUpdates"), + ReplicaProvisionedReadCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ReplicaProvisionedReadCapacityAutoScalingUpdate") + }); +}, "se_ReplicaAutoScalingUpdate"); +var se_ReplicaAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaAutoScalingUpdate(entry, context); + }); +}, "se_ReplicaAutoScalingUpdateList"); +var se_ReplicaGlobalSecondaryIndexAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedReadCapacityAutoScalingUpdate") + }); +}, "se_ReplicaGlobalSecondaryIndexAutoScalingUpdate"); +var se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaGlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}, "se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList"); +var se_ReplicaGlobalSecondaryIndexSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedReadCapacityAutoScalingSettingsUpdate"), + ProvisionedReadCapacityUnits: [] + }); +}, "se_ReplicaGlobalSecondaryIndexSettingsUpdate"); +var se_ReplicaGlobalSecondaryIndexSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}, "se_ReplicaGlobalSecondaryIndexSettingsUpdateList"); +var se_ReplicaSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexSettingsUpdate: /* @__PURE__ */ __name((_) => se_ReplicaGlobalSecondaryIndexSettingsUpdateList(_, context), "ReplicaGlobalSecondaryIndexSettingsUpdate"), + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate"), + ReplicaProvisionedReadCapacityUnits: [], + ReplicaTableClass: [] + }); +}, "se_ReplicaSettingsUpdate"); +var se_ReplicaSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaSettingsUpdate(entry, context); + }); +}, "se_ReplicaSettingsUpdateList"); +var se_RestoreTableToPointInTimeInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + BillingModeOverride: [], + GlobalSecondaryIndexOverride: import_smithy_client._json, + LocalSecondaryIndexOverride: import_smithy_client._json, + OnDemandThroughputOverride: import_smithy_client._json, + ProvisionedThroughputOverride: import_smithy_client._json, + RestoreDateTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "RestoreDateTime"), + SSESpecificationOverride: import_smithy_client._json, + SourceTableArn: [], + SourceTableName: [], + TargetTableName: [], + UseLatestRestorableTime: [] + }); +}, "se_RestoreTableToPointInTimeInput"); +var se_ScanInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: /* @__PURE__ */ __name((_) => se_Key(_, context), "ExclusiveStartKey"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + FilterExpression: [], + IndexName: [], + Limit: [], + ProjectionExpression: [], + ReturnConsumedCapacity: [], + ScanFilter: /* @__PURE__ */ __name((_) => se_FilterConditionMap(_, context), "ScanFilter"), + Segment: [], + Select: [], + TableName: [], + TotalSegments: [] + }); +}, "se_ScanInput"); +var se_TransactGetItem = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Get: /* @__PURE__ */ __name((_) => se_Get(_, context), "Get") + }); +}, "se_TransactGetItem"); +var se_TransactGetItemList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_TransactGetItem(entry, context); + }); +}, "se_TransactGetItemList"); +var se_TransactGetItemsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ReturnConsumedCapacity: [], + TransactItems: /* @__PURE__ */ __name((_) => se_TransactGetItemList(_, context), "TransactItems") + }); +}, "se_TransactGetItemsInput"); +var se_TransactWriteItem = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionCheck: /* @__PURE__ */ __name((_) => se_ConditionCheck(_, context), "ConditionCheck"), + Delete: /* @__PURE__ */ __name((_) => se_Delete(_, context), "Delete"), + Put: /* @__PURE__ */ __name((_) => se_Put(_, context), "Put"), + Update: /* @__PURE__ */ __name((_) => se_Update(_, context), "Update") + }); +}, "se_TransactWriteItem"); +var se_TransactWriteItemList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_TransactWriteItem(entry, context); + }); +}, "se_TransactWriteItemList"); +var se_TransactWriteItemsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + TransactItems: /* @__PURE__ */ __name((_) => se_TransactWriteItemList(_, context), "TransactItems") + }); +}, "se_TransactWriteItemsInput"); +var se_Update = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [] + }); +}, "se_Update"); +var se_UpdateGlobalTableSettingsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + GlobalTableBillingMode: [], + GlobalTableGlobalSecondaryIndexSettingsUpdate: /* @__PURE__ */ __name((_) => se_GlobalTableGlobalSecondaryIndexSettingsUpdateList(_, context), "GlobalTableGlobalSecondaryIndexSettingsUpdate"), + GlobalTableName: [], + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate"), + GlobalTableProvisionedWriteCapacityUnits: [], + ReplicaSettingsUpdate: /* @__PURE__ */ __name((_) => se_ReplicaSettingsUpdateList(_, context), "ReplicaSettingsUpdate") + }); +}, "se_UpdateGlobalTableSettingsInput"); +var se_UpdateItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeUpdates: /* @__PURE__ */ __name((_) => se_AttributeUpdates(_, context), "AttributeUpdates"), + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [] + }); +}, "se_UpdateItemInput"); +var se_UpdateTableReplicaAutoScalingInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + GlobalSecondaryIndexUpdates: /* @__PURE__ */ __name((_) => se_GlobalSecondaryIndexAutoScalingUpdateList(_, context), "GlobalSecondaryIndexUpdates"), + ProvisionedWriteCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingUpdate"), + ReplicaUpdates: /* @__PURE__ */ __name((_) => se_ReplicaAutoScalingUpdateList(_, context), "ReplicaUpdates"), + TableName: [] + }); +}, "se_UpdateTableReplicaAutoScalingInput"); +var se_WriteRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + DeleteRequest: /* @__PURE__ */ __name((_) => se_DeleteRequest(_, context), "DeleteRequest"), + PutRequest: /* @__PURE__ */ __name((_) => se_PutRequest(_, context), "PutRequest") + }); +}, "se_WriteRequest"); +var se_WriteRequests = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_WriteRequest(entry, context); + }); +}, "se_WriteRequests"); +var de_ArchivalSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ArchivalBackupArn: import_smithy_client.expectString, + ArchivalDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ArchivalDateTime"), + ArchivalReason: import_smithy_client.expectString + }); +}, "de_ArchivalSummary"); +var de_AttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_AttributeMap"); +var de_AttributeValue = /* @__PURE__ */ __name((output, context) => { + if (output.B != null) { + return { + B: context.base64Decoder(output.B) + }; + } + if ((0, import_smithy_client.expectBoolean)(output.BOOL) !== void 0) { + return { BOOL: (0, import_smithy_client.expectBoolean)(output.BOOL) }; + } + if (output.BS != null) { + return { + BS: de_BinarySetAttributeValue(output.BS, context) + }; + } + if (output.L != null) { + return { + L: de_ListAttributeValue(output.L, context) + }; + } + if (output.M != null) { + return { + M: de_MapAttributeValue(output.M, context) + }; + } + if ((0, import_smithy_client.expectString)(output.N) !== void 0) { + return { N: (0, import_smithy_client.expectString)(output.N) }; + } + if (output.NS != null) { + return { + NS: (0, import_smithy_client._json)(output.NS) + }; + } + if ((0, import_smithy_client.expectBoolean)(output.NULL) !== void 0) { + return { NULL: (0, import_smithy_client.expectBoolean)(output.NULL) }; + } + if ((0, import_smithy_client.expectString)(output.S) !== void 0) { + return { S: (0, import_smithy_client.expectString)(output.S) }; + } + if (output.SS != null) { + return { + SS: (0, import_smithy_client._json)(output.SS) + }; + } + return { $unknown: Object.entries(output)[0] }; +}, "de_AttributeValue"); +var de_AutoScalingPolicyDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + PolicyName: import_smithy_client.expectString, + TargetTrackingScalingPolicyConfiguration: /* @__PURE__ */ __name((_) => de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription(_, context), "TargetTrackingScalingPolicyConfiguration") + }); +}, "de_AutoScalingPolicyDescription"); +var de_AutoScalingPolicyDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AutoScalingPolicyDescription(entry, context); + }); + return retVal; +}, "de_AutoScalingPolicyDescriptionList"); +var de_AutoScalingSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + AutoScalingDisabled: import_smithy_client.expectBoolean, + AutoScalingRoleArn: import_smithy_client.expectString, + MaximumUnits: import_smithy_client.expectLong, + MinimumUnits: import_smithy_client.expectLong, + ScalingPolicies: /* @__PURE__ */ __name((_) => de_AutoScalingPolicyDescriptionList(_, context), "ScalingPolicies") + }); +}, "de_AutoScalingSettingsDescription"); +var de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + DisableScaleIn: import_smithy_client.expectBoolean, + ScaleInCooldown: import_smithy_client.expectInt32, + ScaleOutCooldown: import_smithy_client.expectInt32, + TargetValue: import_smithy_client.limitedParseDouble + }); +}, "de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription"); +var de_BackupDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDetails: /* @__PURE__ */ __name((_) => de_BackupDetails(_, context), "BackupDetails"), + SourceTableDetails: /* @__PURE__ */ __name((_) => de_SourceTableDetails(_, context), "SourceTableDetails"), + SourceTableFeatureDetails: /* @__PURE__ */ __name((_) => de_SourceTableFeatureDetails(_, context), "SourceTableFeatureDetails") + }); +}, "de_BackupDescription"); +var de_BackupDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupArn: import_smithy_client.expectString, + BackupCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupCreationDateTime"), + BackupExpiryDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupExpiryDateTime"), + BackupName: import_smithy_client.expectString, + BackupSizeBytes: import_smithy_client.expectLong, + BackupStatus: import_smithy_client.expectString, + BackupType: import_smithy_client.expectString + }); +}, "de_BackupDetails"); +var de_BackupSummaries = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_BackupSummary(entry, context); + }); + return retVal; +}, "de_BackupSummaries"); +var de_BackupSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupArn: import_smithy_client.expectString, + BackupCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupCreationDateTime"), + BackupExpiryDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupExpiryDateTime"), + BackupName: import_smithy_client.expectString, + BackupSizeBytes: import_smithy_client.expectLong, + BackupStatus: import_smithy_client.expectString, + BackupType: import_smithy_client.expectString, + TableArn: import_smithy_client.expectString, + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString + }); +}, "de_BackupSummary"); +var de_BatchExecuteStatementOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_PartiQLBatchResponse(_, context), "Responses") + }); +}, "de_BatchExecuteStatementOutput"); +var de_BatchGetItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_BatchGetResponseMap(_, context), "Responses"), + UnprocessedKeys: /* @__PURE__ */ __name((_) => de_BatchGetRequestMap(_, context), "UnprocessedKeys") + }); +}, "de_BatchGetItemOutput"); +var de_BatchGetRequestMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_KeysAndAttributes(value, context); + return acc; + }, {}); +}, "de_BatchGetRequestMap"); +var de_BatchGetResponseMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce( + (acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemList(value, context); + return acc; + }, + {} + ); +}, "de_BatchGetResponseMap"); +var de_BatchStatementError = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Code: import_smithy_client.expectString, + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + Message: import_smithy_client.expectString + }); +}, "de_BatchStatementError"); +var de_BatchStatementResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Error: /* @__PURE__ */ __name((_) => de_BatchStatementError(_, context), "Error"), + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + TableName: import_smithy_client.expectString + }); +}, "de_BatchStatementResponse"); +var de_BatchWriteItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetricsPerTable(_, context), "ItemCollectionMetrics"), + UnprocessedItems: /* @__PURE__ */ __name((_) => de_BatchWriteItemRequestMap(_, context), "UnprocessedItems") + }); +}, "de_BatchWriteItemOutput"); +var de_BatchWriteItemRequestMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_WriteRequests(value, context); + return acc; + }, {}); +}, "de_BatchWriteItemRequestMap"); +var de_BillingModeSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BillingMode: import_smithy_client.expectString, + LastUpdateToPayPerRequestDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateToPayPerRequestDateTime") + }); +}, "de_BillingModeSummary"); +var de_BinarySetAttributeValue = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return context.base64Decoder(entry); + }); + return retVal; +}, "de_BinarySetAttributeValue"); +var de_CancellationReason = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Code: import_smithy_client.expectString, + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + Message: import_smithy_client.expectString + }); +}, "de_CancellationReason"); +var de_CancellationReasonList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_CancellationReason(entry, context); + }); + return retVal; +}, "de_CancellationReasonList"); +var de_Capacity = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CapacityUnits: import_smithy_client.limitedParseDouble, + ReadCapacityUnits: import_smithy_client.limitedParseDouble, + WriteCapacityUnits: import_smithy_client.limitedParseDouble + }); +}, "de_Capacity"); +var de_ConditionalCheckFailedException = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + message: import_smithy_client.expectString + }); +}, "de_ConditionalCheckFailedException"); +var de_ConsumedCapacity = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CapacityUnits: import_smithy_client.limitedParseDouble, + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_SecondaryIndexesCapacityMap(_, context), "GlobalSecondaryIndexes"), + LocalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_SecondaryIndexesCapacityMap(_, context), "LocalSecondaryIndexes"), + ReadCapacityUnits: import_smithy_client.limitedParseDouble, + Table: /* @__PURE__ */ __name((_) => de_Capacity(_, context), "Table"), + TableName: import_smithy_client.expectString, + WriteCapacityUnits: import_smithy_client.limitedParseDouble + }); +}, "de_ConsumedCapacity"); +var de_ConsumedCapacityMultiple = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ConsumedCapacity(entry, context); + }); + return retVal; +}, "de_ConsumedCapacityMultiple"); +var de_ContinuousBackupsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsStatus: import_smithy_client.expectString, + PointInTimeRecoveryDescription: /* @__PURE__ */ __name((_) => de_PointInTimeRecoveryDescription(_, context), "PointInTimeRecoveryDescription") + }); +}, "de_ContinuousBackupsDescription"); +var de_CreateBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDetails: /* @__PURE__ */ __name((_) => de_BackupDetails(_, context), "BackupDetails") + }); +}, "de_CreateBackupOutput"); +var de_CreateGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_CreateGlobalTableOutput"); +var de_CreateTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_CreateTableOutput"); +var de_DeleteBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDescription: /* @__PURE__ */ __name((_) => de_BackupDescription(_, context), "BackupDescription") + }); +}, "de_DeleteBackupOutput"); +var de_DeleteItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_DeleteItemOutput"); +var de_DeleteRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Key: /* @__PURE__ */ __name((_) => de_Key(_, context), "Key") + }); +}, "de_DeleteRequest"); +var de_DeleteTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_DeleteTableOutput"); +var de_DescribeBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDescription: /* @__PURE__ */ __name((_) => de_BackupDescription(_, context), "BackupDescription") + }); +}, "de_DescribeBackupOutput"); +var de_DescribeContinuousBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsDescription: /* @__PURE__ */ __name((_) => de_ContinuousBackupsDescription(_, context), "ContinuousBackupsDescription") + }); +}, "de_DescribeContinuousBackupsOutput"); +var de_DescribeContributorInsightsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContributorInsightsRuleList: import_smithy_client._json, + ContributorInsightsStatus: import_smithy_client.expectString, + FailureException: import_smithy_client._json, + IndexName: import_smithy_client.expectString, + LastUpdateDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateDateTime"), + TableName: import_smithy_client.expectString + }); +}, "de_DescribeContributorInsightsOutput"); +var de_DescribeExportOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportDescription: /* @__PURE__ */ __name((_) => de_ExportDescription(_, context), "ExportDescription") + }); +}, "de_DescribeExportOutput"); +var de_DescribeGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_DescribeGlobalTableOutput"); +var de_DescribeGlobalTableSettingsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableName: import_smithy_client.expectString, + ReplicaSettings: /* @__PURE__ */ __name((_) => de_ReplicaSettingsDescriptionList(_, context), "ReplicaSettings") + }); +}, "de_DescribeGlobalTableSettingsOutput"); +var de_DescribeImportOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportTableDescription: /* @__PURE__ */ __name((_) => de_ImportTableDescription(_, context), "ImportTableDescription") + }); +}, "de_DescribeImportOutput"); +var de_DescribeTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Table: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "Table") + }); +}, "de_DescribeTableOutput"); +var de_DescribeTableReplicaAutoScalingOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableAutoScalingDescription: /* @__PURE__ */ __name((_) => de_TableAutoScalingDescription(_, context), "TableAutoScalingDescription") + }); +}, "de_DescribeTableReplicaAutoScalingOutput"); +var de_ExecuteStatementOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + NextToken: import_smithy_client.expectString + }); +}, "de_ExecuteStatementOutput"); +var de_ExecuteTransactionOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_ItemResponseList(_, context), "Responses") + }); +}, "de_ExecuteTransactionOutput"); +var de_ExportDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BilledSizeBytes: import_smithy_client.expectLong, + ClientToken: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ExportArn: import_smithy_client.expectString, + ExportFormat: import_smithy_client.expectString, + ExportManifest: import_smithy_client.expectString, + ExportStatus: import_smithy_client.expectString, + ExportTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportTime"), + ExportType: import_smithy_client.expectString, + FailureCode: import_smithy_client.expectString, + FailureMessage: import_smithy_client.expectString, + IncrementalExportSpecification: /* @__PURE__ */ __name((_) => de_IncrementalExportSpecification(_, context), "IncrementalExportSpecification"), + ItemCount: import_smithy_client.expectLong, + S3Bucket: import_smithy_client.expectString, + S3BucketOwner: import_smithy_client.expectString, + S3Prefix: import_smithy_client.expectString, + S3SseAlgorithm: import_smithy_client.expectString, + S3SseKmsKeyId: import_smithy_client.expectString, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString, + TableId: import_smithy_client.expectString + }); +}, "de_ExportDescription"); +var de_ExportTableToPointInTimeOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportDescription: /* @__PURE__ */ __name((_) => de_ExportDescription(_, context), "ExportDescription") + }); +}, "de_ExportTableToPointInTimeOutput"); +var de_GetItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item") + }); +}, "de_GetItemOutput"); +var de_GlobalSecondaryIndexDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Backfilling: import_smithy_client.expectBoolean, + IndexArn: import_smithy_client.expectString, + IndexName: import_smithy_client.expectString, + IndexSizeBytes: import_smithy_client.expectLong, + IndexStatus: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + OnDemandThroughput: import_smithy_client._json, + Projection: import_smithy_client._json, + ProvisionedThroughput: /* @__PURE__ */ __name((_) => de_ProvisionedThroughputDescription(_, context), "ProvisionedThroughput"), + WarmThroughput: import_smithy_client._json + }); +}, "de_GlobalSecondaryIndexDescription"); +var de_GlobalSecondaryIndexDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_GlobalSecondaryIndexDescription(entry, context); + }); + return retVal; +}, "de_GlobalSecondaryIndexDescriptionList"); +var de_GlobalTableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "CreationDateTime"), + GlobalTableArn: import_smithy_client.expectString, + GlobalTableName: import_smithy_client.expectString, + GlobalTableStatus: import_smithy_client.expectString, + ReplicationGroup: /* @__PURE__ */ __name((_) => de_ReplicaDescriptionList(_, context), "ReplicationGroup") + }); +}, "de_GlobalTableDescription"); +var de_ImportSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CloudWatchLogGroupArn: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ImportArn: import_smithy_client.expectString, + ImportStatus: import_smithy_client.expectString, + InputFormat: import_smithy_client.expectString, + S3BucketSource: import_smithy_client._json, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString + }); +}, "de_ImportSummary"); +var de_ImportSummaryList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ImportSummary(entry, context); + }); + return retVal; +}, "de_ImportSummaryList"); +var de_ImportTableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ClientToken: import_smithy_client.expectString, + CloudWatchLogGroupArn: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ErrorCount: import_smithy_client.expectLong, + FailureCode: import_smithy_client.expectString, + FailureMessage: import_smithy_client.expectString, + ImportArn: import_smithy_client.expectString, + ImportStatus: import_smithy_client.expectString, + ImportedItemCount: import_smithy_client.expectLong, + InputCompressionType: import_smithy_client.expectString, + InputFormat: import_smithy_client.expectString, + InputFormatOptions: import_smithy_client._json, + ProcessedItemCount: import_smithy_client.expectLong, + ProcessedSizeBytes: import_smithy_client.expectLong, + S3BucketSource: import_smithy_client._json, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString, + TableCreationParameters: import_smithy_client._json, + TableId: import_smithy_client.expectString + }); +}, "de_ImportTableDescription"); +var de_ImportTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportTableDescription: /* @__PURE__ */ __name((_) => de_ImportTableDescription(_, context), "ImportTableDescription") + }); +}, "de_ImportTableOutput"); +var de_IncrementalExportSpecification = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportFromTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportFromTime"), + ExportToTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportToTime"), + ExportViewType: import_smithy_client.expectString + }); +}, "de_IncrementalExportSpecification"); +var de_ItemCollectionKeyAttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_ItemCollectionKeyAttributeMap"); +var de_ItemCollectionMetrics = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ItemCollectionKey: /* @__PURE__ */ __name((_) => de_ItemCollectionKeyAttributeMap(_, context), "ItemCollectionKey"), + SizeEstimateRangeGB: /* @__PURE__ */ __name((_) => de_ItemCollectionSizeEstimateRange(_, context), "SizeEstimateRangeGB") + }); +}, "de_ItemCollectionMetrics"); +var de_ItemCollectionMetricsMultiple = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ItemCollectionMetrics(entry, context); + }); + return retVal; +}, "de_ItemCollectionMetricsMultiple"); +var de_ItemCollectionMetricsPerTable = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemCollectionMetricsMultiple(value, context); + return acc; + }, {}); +}, "de_ItemCollectionMetricsPerTable"); +var de_ItemCollectionSizeEstimateRange = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.limitedParseDouble)(entry); + }); + return retVal; +}, "de_ItemCollectionSizeEstimateRange"); +var de_ItemList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AttributeMap(entry, context); + }); + return retVal; +}, "de_ItemList"); +var de_ItemResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item") + }); +}, "de_ItemResponse"); +var de_ItemResponseList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ItemResponse(entry, context); + }); + return retVal; +}, "de_ItemResponseList"); +var de_Key = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_Key"); +var de_KeyList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_Key(entry, context); + }); + return retVal; +}, "de_KeyList"); +var de_KeysAndAttributes = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: import_smithy_client.expectBoolean, + ExpressionAttributeNames: import_smithy_client._json, + Keys: /* @__PURE__ */ __name((_) => de_KeyList(_, context), "Keys"), + ProjectionExpression: import_smithy_client.expectString + }); +}, "de_KeysAndAttributes"); +var de_ListAttributeValue = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AttributeValue((0, import_core.awsExpectUnion)(entry), context); + }); + return retVal; +}, "de_ListAttributeValue"); +var de_ListBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupSummaries: /* @__PURE__ */ __name((_) => de_BackupSummaries(_, context), "BackupSummaries"), + LastEvaluatedBackupArn: import_smithy_client.expectString + }); +}, "de_ListBackupsOutput"); +var de_ListImportsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportSummaryList: /* @__PURE__ */ __name((_) => de_ImportSummaryList(_, context), "ImportSummaryList"), + NextToken: import_smithy_client.expectString + }); +}, "de_ListImportsOutput"); +var de_MapAttributeValue = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_MapAttributeValue"); +var de_PartiQLBatchResponse = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_BatchStatementResponse(entry, context); + }); + return retVal; +}, "de_PartiQLBatchResponse"); +var de_PointInTimeRecoveryDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + EarliestRestorableDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EarliestRestorableDateTime"), + LatestRestorableDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LatestRestorableDateTime"), + PointInTimeRecoveryStatus: import_smithy_client.expectString, + RecoveryPeriodInDays: import_smithy_client.expectInt32 + }); +}, "de_PointInTimeRecoveryDescription"); +var de_ProvisionedThroughputDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + LastDecreaseDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastDecreaseDateTime"), + LastIncreaseDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastIncreaseDateTime"), + NumberOfDecreasesToday: import_smithy_client.expectLong, + ReadCapacityUnits: import_smithy_client.expectLong, + WriteCapacityUnits: import_smithy_client.expectLong + }); +}, "de_ProvisionedThroughputDescription"); +var de_PutItemInputAttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_PutItemInputAttributeMap"); +var de_PutItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_PutItemOutput"); +var de_PutRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_PutItemInputAttributeMap(_, context), "Item") + }); +}, "de_PutRequest"); +var de_QueryOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Count: import_smithy_client.expectInt32, + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + ScannedCount: import_smithy_client.expectInt32 + }); +}, "de_QueryOutput"); +var de_ReplicaAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList(_, context), "GlobalSecondaryIndexes"), + RegionName: import_smithy_client.expectString, + ReplicaProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettings"), + ReplicaProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedWriteCapacityAutoScalingSettings"), + ReplicaStatus: import_smithy_client.expectString + }); +}, "de_ReplicaAutoScalingDescription"); +var de_ReplicaAutoScalingDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaAutoScalingDescription(entry, context); + }); + return retVal; +}, "de_ReplicaAutoScalingDescriptionList"); +var de_ReplicaDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: import_smithy_client._json, + KMSMasterKeyId: import_smithy_client.expectString, + OnDemandThroughputOverride: import_smithy_client._json, + ProvisionedThroughputOverride: import_smithy_client._json, + RegionName: import_smithy_client.expectString, + ReplicaInaccessibleDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ReplicaInaccessibleDateTime"), + ReplicaStatus: import_smithy_client.expectString, + ReplicaStatusDescription: import_smithy_client.expectString, + ReplicaStatusPercentProgress: import_smithy_client.expectString, + ReplicaTableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "ReplicaTableClassSummary"), + WarmThroughput: import_smithy_client._json + }); +}, "de_ReplicaDescription"); +var de_ReplicaDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaDescription(entry, context); + }); + return retVal; +}, "de_ReplicaDescriptionList"); +var de_ReplicaGlobalSecondaryIndexAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + IndexName: import_smithy_client.expectString, + IndexStatus: import_smithy_client.expectString, + ProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedReadCapacityAutoScalingSettings"), + ProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedWriteCapacityAutoScalingSettings") + }); +}, "de_ReplicaGlobalSecondaryIndexAutoScalingDescription"); +var de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaGlobalSecondaryIndexAutoScalingDescription(entry, context); + }); + return retVal; +}, "de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList"); +var de_ReplicaGlobalSecondaryIndexSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + IndexName: import_smithy_client.expectString, + IndexStatus: import_smithy_client.expectString, + ProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedReadCapacityAutoScalingSettings"), + ProvisionedReadCapacityUnits: import_smithy_client.expectLong, + ProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedWriteCapacityAutoScalingSettings"), + ProvisionedWriteCapacityUnits: import_smithy_client.expectLong + }); +}, "de_ReplicaGlobalSecondaryIndexSettingsDescription"); +var de_ReplicaGlobalSecondaryIndexSettingsDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaGlobalSecondaryIndexSettingsDescription(entry, context); + }); + return retVal; +}, "de_ReplicaGlobalSecondaryIndexSettingsDescriptionList"); +var de_ReplicaSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + RegionName: import_smithy_client.expectString, + ReplicaBillingModeSummary: /* @__PURE__ */ __name((_) => de_BillingModeSummary(_, context), "ReplicaBillingModeSummary"), + ReplicaGlobalSecondaryIndexSettings: /* @__PURE__ */ __name((_) => de_ReplicaGlobalSecondaryIndexSettingsDescriptionList(_, context), "ReplicaGlobalSecondaryIndexSettings"), + ReplicaProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettings"), + ReplicaProvisionedReadCapacityUnits: import_smithy_client.expectLong, + ReplicaProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedWriteCapacityAutoScalingSettings"), + ReplicaProvisionedWriteCapacityUnits: import_smithy_client.expectLong, + ReplicaStatus: import_smithy_client.expectString, + ReplicaTableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "ReplicaTableClassSummary") + }); +}, "de_ReplicaSettingsDescription"); +var de_ReplicaSettingsDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaSettingsDescription(entry, context); + }); + return retVal; +}, "de_ReplicaSettingsDescriptionList"); +var de_RestoreSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + RestoreDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "RestoreDateTime"), + RestoreInProgress: import_smithy_client.expectBoolean, + SourceBackupArn: import_smithy_client.expectString, + SourceTableArn: import_smithy_client.expectString + }); +}, "de_RestoreSummary"); +var de_RestoreTableFromBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_RestoreTableFromBackupOutput"); +var de_RestoreTableToPointInTimeOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_RestoreTableToPointInTimeOutput"); +var de_ScanOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Count: import_smithy_client.expectInt32, + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + ScannedCount: import_smithy_client.expectInt32 + }); +}, "de_ScanOutput"); +var de_SecondaryIndexesCapacityMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_Capacity(value, context); + return acc; + }, {}); +}, "de_SecondaryIndexesCapacityMap"); +var de_SourceTableDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BillingMode: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + OnDemandThroughput: import_smithy_client._json, + ProvisionedThroughput: import_smithy_client._json, + TableArn: import_smithy_client.expectString, + TableCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "TableCreationDateTime"), + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString, + TableSizeBytes: import_smithy_client.expectLong + }); +}, "de_SourceTableDetails"); +var de_SourceTableFeatureDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: import_smithy_client._json, + LocalSecondaryIndexes: import_smithy_client._json, + SSEDescription: /* @__PURE__ */ __name((_) => de_SSEDescription(_, context), "SSEDescription"), + StreamDescription: import_smithy_client._json, + TimeToLiveDescription: import_smithy_client._json + }); +}, "de_SourceTableFeatureDetails"); +var de_SSEDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + InaccessibleEncryptionDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "InaccessibleEncryptionDateTime"), + KMSMasterKeyArn: import_smithy_client.expectString, + SSEType: import_smithy_client.expectString, + Status: import_smithy_client.expectString + }); +}, "de_SSEDescription"); +var de_TableAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Replicas: /* @__PURE__ */ __name((_) => de_ReplicaAutoScalingDescriptionList(_, context), "Replicas"), + TableName: import_smithy_client.expectString, + TableStatus: import_smithy_client.expectString + }); +}, "de_TableAutoScalingDescription"); +var de_TableClassSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + LastUpdateDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateDateTime"), + TableClass: import_smithy_client.expectString + }); +}, "de_TableClassSummary"); +var de_TableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ArchivalSummary: /* @__PURE__ */ __name((_) => de_ArchivalSummary(_, context), "ArchivalSummary"), + AttributeDefinitions: import_smithy_client._json, + BillingModeSummary: /* @__PURE__ */ __name((_) => de_BillingModeSummary(_, context), "BillingModeSummary"), + CreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "CreationDateTime"), + DeletionProtectionEnabled: import_smithy_client.expectBoolean, + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_GlobalSecondaryIndexDescriptionList(_, context), "GlobalSecondaryIndexes"), + GlobalTableVersion: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + LatestStreamArn: import_smithy_client.expectString, + LatestStreamLabel: import_smithy_client.expectString, + LocalSecondaryIndexes: import_smithy_client._json, + MultiRegionConsistency: import_smithy_client.expectString, + OnDemandThroughput: import_smithy_client._json, + ProvisionedThroughput: /* @__PURE__ */ __name((_) => de_ProvisionedThroughputDescription(_, context), "ProvisionedThroughput"), + Replicas: /* @__PURE__ */ __name((_) => de_ReplicaDescriptionList(_, context), "Replicas"), + RestoreSummary: /* @__PURE__ */ __name((_) => de_RestoreSummary(_, context), "RestoreSummary"), + SSEDescription: /* @__PURE__ */ __name((_) => de_SSEDescription(_, context), "SSEDescription"), + StreamSpecification: import_smithy_client._json, + TableArn: import_smithy_client.expectString, + TableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "TableClassSummary"), + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString, + TableSizeBytes: import_smithy_client.expectLong, + TableStatus: import_smithy_client.expectString, + WarmThroughput: import_smithy_client._json + }); +}, "de_TableDescription"); +var de_TransactGetItemsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_ItemResponseList(_, context), "Responses") + }); +}, "de_TransactGetItemsOutput"); +var de_TransactionCanceledException = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CancellationReasons: /* @__PURE__ */ __name((_) => de_CancellationReasonList(_, context), "CancellationReasons"), + Message: import_smithy_client.expectString + }); +}, "de_TransactionCanceledException"); +var de_TransactWriteItemsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetricsPerTable(_, context), "ItemCollectionMetrics") + }); +}, "de_TransactWriteItemsOutput"); +var de_UpdateContinuousBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsDescription: /* @__PURE__ */ __name((_) => de_ContinuousBackupsDescription(_, context), "ContinuousBackupsDescription") + }); +}, "de_UpdateContinuousBackupsOutput"); +var de_UpdateGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_UpdateGlobalTableOutput"); +var de_UpdateGlobalTableSettingsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableName: import_smithy_client.expectString, + ReplicaSettings: /* @__PURE__ */ __name((_) => de_ReplicaSettingsDescriptionList(_, context), "ReplicaSettings") + }); +}, "de_UpdateGlobalTableSettingsOutput"); +var de_UpdateItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_UpdateItemOutput"); +var de_UpdateTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_UpdateTableOutput"); +var de_UpdateTableReplicaAutoScalingOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableAutoScalingDescription: /* @__PURE__ */ __name((_) => de_TableAutoScalingDescription(_, context), "TableAutoScalingDescription") + }); +}, "de_UpdateTableReplicaAutoScalingOutput"); +var de_WriteRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + DeleteRequest: /* @__PURE__ */ __name((_) => de_DeleteRequest(_, context), "DeleteRequest"), + PutRequest: /* @__PURE__ */ __name((_) => de_PutRequest(_, context), "PutRequest") + }); +}, "de_WriteRequest"); +var de_WriteRequests = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_WriteRequest(entry, context); + }); + return retVal; +}, "de_WriteRequests"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(DynamoDBServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +function sharedHeaders(operation) { + return { + "content-type": "application/x-amz-json-1.0", + "x-amz-target": `DynamoDB_20120810.${operation}` + }; +} +__name(sharedHeaders, "sharedHeaders"); + +// src/commands/DescribeEndpointsCommand.ts +var DescribeEndpointsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeEndpoints", {}).n("DynamoDBClient", "DescribeEndpointsCommand").f(void 0, void 0).ser(se_DescribeEndpointsCommand).de(de_DescribeEndpointsCommand).build() { + static { + __name(this, "DescribeEndpointsCommand"); + } +}; + +// src/DynamoDBClient.ts +var import_runtimeConfig = require("././runtimeConfig"); + +// src/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); + + + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/DynamoDBClient.ts +var DynamoDBClient = class extends import_smithy_client.Client { + static { + __name(this, "DynamoDBClient"); + } + /** + * The resolved configuration of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_account_id_endpoint.resolveAccountIdEndpointModeConfig)(_config_1); + const _config_3 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_2); + const _config_4 = (0, import_middleware_retry.resolveRetryConfig)(_config_3); + const _config_5 = (0, import_config_resolver.resolveRegionConfig)(_config_4); + const _config_6 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_5); + const _config_7 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_6); + const _config_8 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_7); + const _config_9 = (0, import_middleware_endpoint_discovery.resolveEndpointDiscoveryConfig)(_config_8, { + endpointDiscoveryCommandCtor: DescribeEndpointsCommand + }); + const _config_10 = resolveRuntimeExtensions(_config_9, configuration?.extensions || []); + this.config = _config_10; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core2.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultDynamoDBHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core2.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core2.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/DynamoDB.ts + + +// src/commands/BatchExecuteStatementCommand.ts + + + +var BatchExecuteStatementCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchExecuteStatement", {}).n("DynamoDBClient", "BatchExecuteStatementCommand").f(void 0, void 0).ser(se_BatchExecuteStatementCommand).de(de_BatchExecuteStatementCommand).build() { + static { + __name(this, "BatchExecuteStatementCommand"); + } +}; + +// src/commands/BatchGetItemCommand.ts + + + +var BatchGetItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => Object.keys(input?.RequestItems ?? {}), "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchGetItem", {}).n("DynamoDBClient", "BatchGetItemCommand").f(void 0, void 0).ser(se_BatchGetItemCommand).de(de_BatchGetItemCommand).build() { + static { + __name(this, "BatchGetItemCommand"); + } +}; + +// src/commands/BatchWriteItemCommand.ts + + + +var BatchWriteItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => Object.keys(input?.RequestItems ?? {}), "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchWriteItem", {}).n("DynamoDBClient", "BatchWriteItemCommand").f(void 0, void 0).ser(se_BatchWriteItemCommand).de(de_BatchWriteItemCommand).build() { + static { + __name(this, "BatchWriteItemCommand"); + } +}; + +// src/commands/CreateBackupCommand.ts + + + +var CreateBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateBackup", {}).n("DynamoDBClient", "CreateBackupCommand").f(void 0, void 0).ser(se_CreateBackupCommand).de(de_CreateBackupCommand).build() { + static { + __name(this, "CreateBackupCommand"); + } +}; + +// src/commands/CreateGlobalTableCommand.ts + + + +var CreateGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateGlobalTable", {}).n("DynamoDBClient", "CreateGlobalTableCommand").f(void 0, void 0).ser(se_CreateGlobalTableCommand).de(de_CreateGlobalTableCommand).build() { + static { + __name(this, "CreateGlobalTableCommand"); + } +}; + +// src/commands/CreateTableCommand.ts + + + +var CreateTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateTable", {}).n("DynamoDBClient", "CreateTableCommand").f(void 0, void 0).ser(se_CreateTableCommand).de(de_CreateTableCommand).build() { + static { + __name(this, "CreateTableCommand"); + } +}; + +// src/commands/DeleteBackupCommand.ts + + + +var DeleteBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteBackup", {}).n("DynamoDBClient", "DeleteBackupCommand").f(void 0, void 0).ser(se_DeleteBackupCommand).de(de_DeleteBackupCommand).build() { + static { + __name(this, "DeleteBackupCommand"); + } +}; + +// src/commands/DeleteItemCommand.ts + + + +var DeleteItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteItem", {}).n("DynamoDBClient", "DeleteItemCommand").f(void 0, void 0).ser(se_DeleteItemCommand).de(de_DeleteItemCommand).build() { + static { + __name(this, "DeleteItemCommand"); + } +}; + +// src/commands/DeleteResourcePolicyCommand.ts + + + +var DeleteResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteResourcePolicy", {}).n("DynamoDBClient", "DeleteResourcePolicyCommand").f(void 0, void 0).ser(se_DeleteResourcePolicyCommand).de(de_DeleteResourcePolicyCommand).build() { + static { + __name(this, "DeleteResourcePolicyCommand"); + } +}; + +// src/commands/DeleteTableCommand.ts + + + +var DeleteTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteTable", {}).n("DynamoDBClient", "DeleteTableCommand").f(void 0, void 0).ser(se_DeleteTableCommand).de(de_DeleteTableCommand).build() { + static { + __name(this, "DeleteTableCommand"); + } +}; + +// src/commands/DescribeBackupCommand.ts + + + +var DescribeBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeBackup", {}).n("DynamoDBClient", "DescribeBackupCommand").f(void 0, void 0).ser(se_DescribeBackupCommand).de(de_DescribeBackupCommand).build() { + static { + __name(this, "DescribeBackupCommand"); + } +}; + +// src/commands/DescribeContinuousBackupsCommand.ts + + + +var DescribeContinuousBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeContinuousBackups", {}).n("DynamoDBClient", "DescribeContinuousBackupsCommand").f(void 0, void 0).ser(se_DescribeContinuousBackupsCommand).de(de_DescribeContinuousBackupsCommand).build() { + static { + __name(this, "DescribeContinuousBackupsCommand"); + } +}; + +// src/commands/DescribeContributorInsightsCommand.ts + + + +var DescribeContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeContributorInsights", {}).n("DynamoDBClient", "DescribeContributorInsightsCommand").f(void 0, void 0).ser(se_DescribeContributorInsightsCommand).de(de_DescribeContributorInsightsCommand).build() { + static { + __name(this, "DescribeContributorInsightsCommand"); + } +}; + +// src/commands/DescribeExportCommand.ts + + + +var DescribeExportCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ExportArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeExport", {}).n("DynamoDBClient", "DescribeExportCommand").f(void 0, void 0).ser(se_DescribeExportCommand).de(de_DescribeExportCommand).build() { + static { + __name(this, "DescribeExportCommand"); + } +}; + +// src/commands/DescribeGlobalTableCommand.ts + + + +var DescribeGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeGlobalTable", {}).n("DynamoDBClient", "DescribeGlobalTableCommand").f(void 0, void 0).ser(se_DescribeGlobalTableCommand).de(de_DescribeGlobalTableCommand).build() { + static { + __name(this, "DescribeGlobalTableCommand"); + } +}; + +// src/commands/DescribeGlobalTableSettingsCommand.ts + + + +var DescribeGlobalTableSettingsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeGlobalTableSettings", {}).n("DynamoDBClient", "DescribeGlobalTableSettingsCommand").f(void 0, void 0).ser(se_DescribeGlobalTableSettingsCommand).de(de_DescribeGlobalTableSettingsCommand).build() { + static { + __name(this, "DescribeGlobalTableSettingsCommand"); + } +}; + +// src/commands/DescribeImportCommand.ts + + + +var DescribeImportCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ImportArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeImport", {}).n("DynamoDBClient", "DescribeImportCommand").f(void 0, void 0).ser(se_DescribeImportCommand).de(de_DescribeImportCommand).build() { + static { + __name(this, "DescribeImportCommand"); + } +}; + +// src/commands/DescribeKinesisStreamingDestinationCommand.ts + + + +var DescribeKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeKinesisStreamingDestination", {}).n("DynamoDBClient", "DescribeKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_DescribeKinesisStreamingDestinationCommand).de(de_DescribeKinesisStreamingDestinationCommand).build() { + static { + __name(this, "DescribeKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/DescribeLimitsCommand.ts + + + +var DescribeLimitsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeLimits", {}).n("DynamoDBClient", "DescribeLimitsCommand").f(void 0, void 0).ser(se_DescribeLimitsCommand).de(de_DescribeLimitsCommand).build() { + static { + __name(this, "DescribeLimitsCommand"); + } +}; + +// src/commands/DescribeTableCommand.ts + + + +var DescribeTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTable", {}).n("DynamoDBClient", "DescribeTableCommand").f(void 0, void 0).ser(se_DescribeTableCommand).de(de_DescribeTableCommand).build() { + static { + __name(this, "DescribeTableCommand"); + } +}; + +// src/commands/DescribeTableReplicaAutoScalingCommand.ts + + + +var DescribeTableReplicaAutoScalingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTableReplicaAutoScaling", {}).n("DynamoDBClient", "DescribeTableReplicaAutoScalingCommand").f(void 0, void 0).ser(se_DescribeTableReplicaAutoScalingCommand).de(de_DescribeTableReplicaAutoScalingCommand).build() { + static { + __name(this, "DescribeTableReplicaAutoScalingCommand"); + } +}; + +// src/commands/DescribeTimeToLiveCommand.ts + + + +var DescribeTimeToLiveCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTimeToLive", {}).n("DynamoDBClient", "DescribeTimeToLiveCommand").f(void 0, void 0).ser(se_DescribeTimeToLiveCommand).de(de_DescribeTimeToLiveCommand).build() { + static { + __name(this, "DescribeTimeToLiveCommand"); + } +}; + +// src/commands/DisableKinesisStreamingDestinationCommand.ts + + + +var DisableKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DisableKinesisStreamingDestination", {}).n("DynamoDBClient", "DisableKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_DisableKinesisStreamingDestinationCommand).de(de_DisableKinesisStreamingDestinationCommand).build() { + static { + __name(this, "DisableKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/EnableKinesisStreamingDestinationCommand.ts + + + +var EnableKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "EnableKinesisStreamingDestination", {}).n("DynamoDBClient", "EnableKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_EnableKinesisStreamingDestinationCommand).de(de_EnableKinesisStreamingDestinationCommand).build() { + static { + __name(this, "EnableKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/ExecuteStatementCommand.ts + + + +var ExecuteStatementCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExecuteStatement", {}).n("DynamoDBClient", "ExecuteStatementCommand").f(void 0, void 0).ser(se_ExecuteStatementCommand).de(de_ExecuteStatementCommand).build() { + static { + __name(this, "ExecuteStatementCommand"); + } +}; + +// src/commands/ExecuteTransactionCommand.ts + + + +var ExecuteTransactionCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExecuteTransaction", {}).n("DynamoDBClient", "ExecuteTransactionCommand").f(void 0, void 0).ser(se_ExecuteTransactionCommand).de(de_ExecuteTransactionCommand).build() { + static { + __name(this, "ExecuteTransactionCommand"); + } +}; + +// src/commands/ExportTableToPointInTimeCommand.ts + + + +var ExportTableToPointInTimeCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExportTableToPointInTime", {}).n("DynamoDBClient", "ExportTableToPointInTimeCommand").f(void 0, void 0).ser(se_ExportTableToPointInTimeCommand).de(de_ExportTableToPointInTimeCommand).build() { + static { + __name(this, "ExportTableToPointInTimeCommand"); + } +}; + +// src/commands/GetItemCommand.ts + + + +var GetItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "GetItem", {}).n("DynamoDBClient", "GetItemCommand").f(void 0, void 0).ser(se_GetItemCommand).de(de_GetItemCommand).build() { + static { + __name(this, "GetItemCommand"); + } +}; + +// src/commands/GetResourcePolicyCommand.ts + + + +var GetResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "GetResourcePolicy", {}).n("DynamoDBClient", "GetResourcePolicyCommand").f(void 0, void 0).ser(se_GetResourcePolicyCommand).de(de_GetResourcePolicyCommand).build() { + static { + __name(this, "GetResourcePolicyCommand"); + } +}; + +// src/commands/ImportTableCommand.ts + + + +var ImportTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => input?.TableCreationParameters?.TableName, "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ImportTable", {}).n("DynamoDBClient", "ImportTableCommand").f(void 0, void 0).ser(se_ImportTableCommand).de(de_ImportTableCommand).build() { + static { + __name(this, "ImportTableCommand"); + } +}; + +// src/commands/ListBackupsCommand.ts + + + +var ListBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListBackups", {}).n("DynamoDBClient", "ListBackupsCommand").f(void 0, void 0).ser(se_ListBackupsCommand).de(de_ListBackupsCommand).build() { + static { + __name(this, "ListBackupsCommand"); + } +}; + +// src/commands/ListContributorInsightsCommand.ts + + + +var ListContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListContributorInsights", {}).n("DynamoDBClient", "ListContributorInsightsCommand").f(void 0, void 0).ser(se_ListContributorInsightsCommand).de(de_ListContributorInsightsCommand).build() { + static { + __name(this, "ListContributorInsightsCommand"); + } +}; + +// src/commands/ListExportsCommand.ts + + + +var ListExportsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListExports", {}).n("DynamoDBClient", "ListExportsCommand").f(void 0, void 0).ser(se_ListExportsCommand).de(de_ListExportsCommand).build() { + static { + __name(this, "ListExportsCommand"); + } +}; + +// src/commands/ListGlobalTablesCommand.ts + + + +var ListGlobalTablesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListGlobalTables", {}).n("DynamoDBClient", "ListGlobalTablesCommand").f(void 0, void 0).ser(se_ListGlobalTablesCommand).de(de_ListGlobalTablesCommand).build() { + static { + __name(this, "ListGlobalTablesCommand"); + } +}; + +// src/commands/ListImportsCommand.ts + + + +var ListImportsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListImports", {}).n("DynamoDBClient", "ListImportsCommand").f(void 0, void 0).ser(se_ListImportsCommand).de(de_ListImportsCommand).build() { + static { + __name(this, "ListImportsCommand"); + } +}; + +// src/commands/ListTablesCommand.ts + + + +var ListTablesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListTables", {}).n("DynamoDBClient", "ListTablesCommand").f(void 0, void 0).ser(se_ListTablesCommand).de(de_ListTablesCommand).build() { + static { + __name(this, "ListTablesCommand"); + } +}; + +// src/commands/ListTagsOfResourceCommand.ts + + + +var ListTagsOfResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListTagsOfResource", {}).n("DynamoDBClient", "ListTagsOfResourceCommand").f(void 0, void 0).ser(se_ListTagsOfResourceCommand).de(de_ListTagsOfResourceCommand).build() { + static { + __name(this, "ListTagsOfResourceCommand"); + } +}; + +// src/commands/PutItemCommand.ts + + + +var PutItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "PutItem", {}).n("DynamoDBClient", "PutItemCommand").f(void 0, void 0).ser(se_PutItemCommand).de(de_PutItemCommand).build() { + static { + __name(this, "PutItemCommand"); + } +}; + +// src/commands/PutResourcePolicyCommand.ts + + + +var PutResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "PutResourcePolicy", {}).n("DynamoDBClient", "PutResourcePolicyCommand").f(void 0, void 0).ser(se_PutResourcePolicyCommand).de(de_PutResourcePolicyCommand).build() { + static { + __name(this, "PutResourcePolicyCommand"); + } +}; + +// src/commands/QueryCommand.ts + + + +var QueryCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "Query", {}).n("DynamoDBClient", "QueryCommand").f(void 0, void 0).ser(se_QueryCommand).de(de_QueryCommand).build() { + static { + __name(this, "QueryCommand"); + } +}; + +// src/commands/RestoreTableFromBackupCommand.ts + + + +var RestoreTableFromBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "RestoreTableFromBackup", {}).n("DynamoDBClient", "RestoreTableFromBackupCommand").f(void 0, void 0).ser(se_RestoreTableFromBackupCommand).de(de_RestoreTableFromBackupCommand).build() { + static { + __name(this, "RestoreTableFromBackupCommand"); + } +}; + +// src/commands/RestoreTableToPointInTimeCommand.ts + + + +var RestoreTableToPointInTimeCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "RestoreTableToPointInTime", {}).n("DynamoDBClient", "RestoreTableToPointInTimeCommand").f(void 0, void 0).ser(se_RestoreTableToPointInTimeCommand).de(de_RestoreTableToPointInTimeCommand).build() { + static { + __name(this, "RestoreTableToPointInTimeCommand"); + } +}; + +// src/commands/ScanCommand.ts + + + +var ScanCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "Scan", {}).n("DynamoDBClient", "ScanCommand").f(void 0, void 0).ser(se_ScanCommand).de(de_ScanCommand).build() { + static { + __name(this, "ScanCommand"); + } +}; + +// src/commands/TagResourceCommand.ts + + + +var TagResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TagResource", {}).n("DynamoDBClient", "TagResourceCommand").f(void 0, void 0).ser(se_TagResourceCommand).de(de_TagResourceCommand).build() { + static { + __name(this, "TagResourceCommand"); + } +}; + +// src/commands/TransactGetItemsCommand.ts + + + +var TransactGetItemsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: /* @__PURE__ */ __name((input) => input?.TransactItems?.map((obj) => obj?.Get?.TableName), "get") + } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TransactGetItems", {}).n("DynamoDBClient", "TransactGetItemsCommand").f(void 0, void 0).ser(se_TransactGetItemsCommand).de(de_TransactGetItemsCommand).build() { + static { + __name(this, "TransactGetItemsCommand"); + } +}; + +// src/commands/TransactWriteItemsCommand.ts + + + +var TransactWriteItemsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: /* @__PURE__ */ __name((input) => input?.TransactItems?.map( + (obj) => [obj?.ConditionCheck?.TableName, obj?.Put?.TableName, obj?.Delete?.TableName, obj?.Update?.TableName].filter( + (i) => i + ) + ).flat(), "get") + } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TransactWriteItems", {}).n("DynamoDBClient", "TransactWriteItemsCommand").f(void 0, void 0).ser(se_TransactWriteItemsCommand).de(de_TransactWriteItemsCommand).build() { + static { + __name(this, "TransactWriteItemsCommand"); + } +}; + +// src/commands/UntagResourceCommand.ts + + + +var UntagResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UntagResource", {}).n("DynamoDBClient", "UntagResourceCommand").f(void 0, void 0).ser(se_UntagResourceCommand).de(de_UntagResourceCommand).build() { + static { + __name(this, "UntagResourceCommand"); + } +}; + +// src/commands/UpdateContinuousBackupsCommand.ts + + + +var UpdateContinuousBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateContinuousBackups", {}).n("DynamoDBClient", "UpdateContinuousBackupsCommand").f(void 0, void 0).ser(se_UpdateContinuousBackupsCommand).de(de_UpdateContinuousBackupsCommand).build() { + static { + __name(this, "UpdateContinuousBackupsCommand"); + } +}; + +// src/commands/UpdateContributorInsightsCommand.ts + + + +var UpdateContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateContributorInsights", {}).n("DynamoDBClient", "UpdateContributorInsightsCommand").f(void 0, void 0).ser(se_UpdateContributorInsightsCommand).de(de_UpdateContributorInsightsCommand).build() { + static { + __name(this, "UpdateContributorInsightsCommand"); + } +}; + +// src/commands/UpdateGlobalTableCommand.ts + + + +var UpdateGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateGlobalTable", {}).n("DynamoDBClient", "UpdateGlobalTableCommand").f(void 0, void 0).ser(se_UpdateGlobalTableCommand).de(de_UpdateGlobalTableCommand).build() { + static { + __name(this, "UpdateGlobalTableCommand"); + } +}; + +// src/commands/UpdateGlobalTableSettingsCommand.ts + + + +var UpdateGlobalTableSettingsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateGlobalTableSettings", {}).n("DynamoDBClient", "UpdateGlobalTableSettingsCommand").f(void 0, void 0).ser(se_UpdateGlobalTableSettingsCommand).de(de_UpdateGlobalTableSettingsCommand).build() { + static { + __name(this, "UpdateGlobalTableSettingsCommand"); + } +}; + +// src/commands/UpdateItemCommand.ts + + + +var UpdateItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateItem", {}).n("DynamoDBClient", "UpdateItemCommand").f(void 0, void 0).ser(se_UpdateItemCommand).de(de_UpdateItemCommand).build() { + static { + __name(this, "UpdateItemCommand"); + } +}; + +// src/commands/UpdateKinesisStreamingDestinationCommand.ts + + + +var UpdateKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateKinesisStreamingDestination", {}).n("DynamoDBClient", "UpdateKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_UpdateKinesisStreamingDestinationCommand).de(de_UpdateKinesisStreamingDestinationCommand).build() { + static { + __name(this, "UpdateKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/UpdateTableCommand.ts + + + +var UpdateTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTable", {}).n("DynamoDBClient", "UpdateTableCommand").f(void 0, void 0).ser(se_UpdateTableCommand).de(de_UpdateTableCommand).build() { + static { + __name(this, "UpdateTableCommand"); + } +}; + +// src/commands/UpdateTableReplicaAutoScalingCommand.ts + + + +var UpdateTableReplicaAutoScalingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTableReplicaAutoScaling", {}).n("DynamoDBClient", "UpdateTableReplicaAutoScalingCommand").f(void 0, void 0).ser(se_UpdateTableReplicaAutoScalingCommand).de(de_UpdateTableReplicaAutoScalingCommand).build() { + static { + __name(this, "UpdateTableReplicaAutoScalingCommand"); + } +}; + +// src/commands/UpdateTimeToLiveCommand.ts + + + +var UpdateTimeToLiveCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTimeToLive", {}).n("DynamoDBClient", "UpdateTimeToLiveCommand").f(void 0, void 0).ser(se_UpdateTimeToLiveCommand).de(de_UpdateTimeToLiveCommand).build() { + static { + __name(this, "UpdateTimeToLiveCommand"); + } +}; + +// src/DynamoDB.ts +var commands = { + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand +}; +var DynamoDB = class extends DynamoDBClient { + static { + __name(this, "DynamoDB"); + } +}; +(0, import_smithy_client.createAggregatedClient)(commands, DynamoDB); + +// src/pagination/ListContributorInsightsPaginator.ts +var import_core3 = require("@smithy/core"); +var paginateListContributorInsights = (0, import_core3.createPaginator)(DynamoDBClient, ListContributorInsightsCommand, "NextToken", "NextToken", "MaxResults"); + +// src/pagination/ListExportsPaginator.ts +var import_core4 = require("@smithy/core"); +var paginateListExports = (0, import_core4.createPaginator)(DynamoDBClient, ListExportsCommand, "NextToken", "NextToken", "MaxResults"); + +// src/pagination/ListImportsPaginator.ts +var import_core5 = require("@smithy/core"); +var paginateListImports = (0, import_core5.createPaginator)(DynamoDBClient, ListImportsCommand, "NextToken", "NextToken", "PageSize"); + +// src/pagination/ListTablesPaginator.ts +var import_core6 = require("@smithy/core"); +var paginateListTables = (0, import_core6.createPaginator)(DynamoDBClient, ListTablesCommand, "ExclusiveStartTableName", "LastEvaluatedTableName", "Limit"); + +// src/pagination/QueryPaginator.ts +var import_core7 = require("@smithy/core"); +var paginateQuery = (0, import_core7.createPaginator)(DynamoDBClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/pagination/ScanPaginator.ts +var import_core8 = require("@smithy/core"); +var paginateScan = (0, import_core8.createPaginator)(DynamoDBClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/waiters/waitForTableExists.ts +var import_util_waiter = require("@smithy/util-waiter"); +var checkState = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + try { + const returnComparator = /* @__PURE__ */ __name(() => { + return result.Table.TableStatus; + }, "returnComparator"); + if (returnComparator() === "ACTIVE") { + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } + } catch (e) { + } + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: import_util_waiter.WaiterState.RETRY, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForTableExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState); +}, "waitForTableExists"); +var waitUntilTableExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilTableExists"); + +// src/waiters/waitForTableNotExists.ts + +var checkState2 = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForTableNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState2); +}, "waitForTableNotExists"); +var waitUntilTableNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState2); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilTableNotExists"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DynamoDBServiceException, + __Client, + DynamoDBClient, + DynamoDB, + $Command, + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand, + paginateListContributorInsights, + paginateListExports, + paginateListImports, + paginateListTables, + paginateQuery, + paginateScan, + waitForTableExists, + waitUntilTableExists, + waitForTableNotExists, + waitUntilTableNotExists, + ApproximateCreationDateTimePrecision, + AttributeAction, + ScalarAttributeType, + BackupStatus, + BackupType, + BillingMode, + KeyType, + ProjectionType, + SSEType, + SSEStatus, + StreamViewType, + TimeToLiveStatus, + BackupInUseException, + BackupNotFoundException, + BackupTypeFilter, + ReturnConsumedCapacity, + ReturnValuesOnConditionCheckFailure, + BatchStatementErrorCodeEnum, + InternalServerError, + RequestLimitExceeded, + InvalidEndpointException, + ProvisionedThroughputExceededException, + ResourceNotFoundException, + ReturnItemCollectionMetrics, + ItemCollectionSizeLimitExceededException, + ComparisonOperator, + ConditionalOperator, + ContinuousBackupsStatus, + PointInTimeRecoveryStatus, + ContinuousBackupsUnavailableException, + ContributorInsightsAction, + ContributorInsightsStatus, + LimitExceededException, + TableInUseException, + TableNotFoundException, + GlobalTableStatus, + IndexStatus, + ReplicaStatus, + TableClass, + TableStatus, + GlobalTableAlreadyExistsException, + MultiRegionConsistency, + ResourceInUseException, + ReturnValue, + ReplicatedWriteConflictException, + TransactionConflictException, + PolicyNotFoundException, + ExportFormat, + ExportStatus, + ExportType, + ExportViewType, + S3SseAlgorithm, + ExportNotFoundException, + GlobalTableNotFoundException, + ImportStatus, + InputCompressionType, + InputFormat, + ImportNotFoundException, + DestinationStatus, + DuplicateItemException, + IdempotentParameterMismatchException, + TransactionInProgressException, + ExportConflictException, + InvalidExportTimeException, + PointInTimeRecoveryUnavailableException, + ImportConflictException, + Select, + TableAlreadyExistsException, + InvalidRestoreTimeException, + ReplicaAlreadyExistsException, + ReplicaNotFoundException, + IndexNotFoundException, + AttributeValue, + ConditionalCheckFailedException, + TransactionCanceledException +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js new file mode 100644 index 0000000..be381dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js @@ -0,0 +1,42 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const account_id_endpoint_1 = require("@aws-sdk/core/account-id-endpoint"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (() => Promise.resolve(account_id_endpoint_1.DEFAULT_ACCOUNT_ID_ENDPOINT_MODE)), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (() => Promise.resolve(undefined)), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js new file mode 100644 index 0000000..a07d8b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const core_1 = require("@aws-sdk/core"); +const account_id_endpoint_1 = require("@aws-sdk/core/account-id-endpoint"); +const credential_provider_node_1 = require("@aws-sdk/credential-provider-node"); +const middleware_endpoint_discovery_1 = require("@aws-sdk/middleware-endpoint-discovery"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (0, node_config_provider_1.loadConfig)(account_id_endpoint_1.NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, profileConfig), + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credential_provider_node_1.defaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (0, node_config_provider_1.loadConfig)(middleware_endpoint_discovery_1.NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, profileConfig), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js new file mode 100644 index 0000000..817ba14 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2012-08-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultDynamoDBHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "DynamoDB", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js new file mode 100644 index 0000000..e7f892c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js @@ -0,0 +1,121 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { BatchExecuteStatementCommand, } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommand, } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommand, } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommand, } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommand, } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommand } from "./commands/CreateTableCommand"; +import { DeleteBackupCommand, } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommand } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommand, } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommand } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommand, } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommand, } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommand, } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommand, } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommand, } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommand, } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommand, } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommand, } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommand, } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommand, } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommand, } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommand, } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommand, } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommand, } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommand, } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommand, } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommand, } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommand, } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommand } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommand, } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommand } from "./commands/ImportTableCommand"; +import { ListBackupsCommand } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommand, } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommand } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommand, } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommand } from "./commands/ListImportsCommand"; +import { ListTablesCommand } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommand, } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommand } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommand, } from "./commands/PutResourcePolicyCommand"; +import { QueryCommand } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommand, } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommand, } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommand } from "./commands/ScanCommand"; +import { TagResourceCommand } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommand, } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommand, } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommand, } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommand, } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommand, } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommand, } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommand, } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommand } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommand, } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommand } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommand, } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommand, } from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +const commands = { + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand, +}; +export class DynamoDB extends DynamoDBClient { +} +createAggregatedClient(commands, DynamoDB); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js new file mode 100644 index 0000000..b77d943 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js @@ -0,0 +1,55 @@ +import { resolveAccountIdEndpointModeConfig, } from "@aws-sdk/core/account-id-endpoint"; +import { resolveEndpointDiscoveryConfig, } from "@aws-sdk/middleware-endpoint-discovery"; +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultDynamoDBHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { DescribeEndpointsCommand, } from "./commands/DescribeEndpointsCommand"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class DynamoDBClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveAccountIdEndpointModeConfig(_config_1); + const _config_3 = resolveUserAgentConfig(_config_2); + const _config_4 = resolveRetryConfig(_config_3); + const _config_5 = resolveRegionConfig(_config_4); + const _config_6 = resolveHostHeaderConfig(_config_5); + const _config_7 = resolveEndpointConfig(_config_6); + const _config_8 = resolveHttpAuthSchemeConfig(_config_7); + const _config_9 = resolveEndpointDiscoveryConfig(_config_8, { + endpointDiscoveryCommandCtor: DescribeEndpointsCommand, + }); + const _config_10 = resolveRuntimeExtensions(_config_9, configuration?.extensions || []); + this.config = _config_10; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultDynamoDBHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..6a9e23e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js @@ -0,0 +1,41 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultDynamoDBHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "dynamodb", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +export const defaultDynamoDBHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js new file mode 100644 index 0000000..4cbd251 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchExecuteStatementCommand, se_BatchExecuteStatementCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchExecuteStatementCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchExecuteStatement", {}) + .n("DynamoDBClient", "BatchExecuteStatementCommand") + .f(void 0, void 0) + .ser(se_BatchExecuteStatementCommand) + .de(de_BatchExecuteStatementCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js new file mode 100644 index 0000000..88dbf81 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchGetItemCommand, se_BatchGetItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchGetItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: (input) => Object.keys(input?.RequestItems ?? {}) }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchGetItem", {}) + .n("DynamoDBClient", "BatchGetItemCommand") + .f(void 0, void 0) + .ser(se_BatchGetItemCommand) + .de(de_BatchGetItemCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js new file mode 100644 index 0000000..c27df1a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchWriteItemCommand, se_BatchWriteItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchWriteItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: (input) => Object.keys(input?.RequestItems ?? {}) }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchWriteItem", {}) + .n("DynamoDBClient", "BatchWriteItemCommand") + .f(void 0, void 0) + .ser(se_BatchWriteItemCommand) + .de(de_BatchWriteItemCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js new file mode 100644 index 0000000..c932b5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateBackupCommand, se_CreateBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateBackup", {}) + .n("DynamoDBClient", "CreateBackupCommand") + .f(void 0, void 0) + .ser(se_CreateBackupCommand) + .de(de_CreateBackupCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js new file mode 100644 index 0000000..49b8b7a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateGlobalTableCommand, se_CreateGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateGlobalTable", {}) + .n("DynamoDBClient", "CreateGlobalTableCommand") + .f(void 0, void 0) + .ser(se_CreateGlobalTableCommand) + .de(de_CreateGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js new file mode 100644 index 0000000..89f3586 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateTableCommand, se_CreateTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateTable", {}) + .n("DynamoDBClient", "CreateTableCommand") + .f(void 0, void 0) + .ser(se_CreateTableCommand) + .de(de_CreateTableCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js new file mode 100644 index 0000000..d420225 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBackupCommand, se_DeleteBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteBackup", {}) + .n("DynamoDBClient", "DeleteBackupCommand") + .f(void 0, void 0) + .ser(se_DeleteBackupCommand) + .de(de_DeleteBackupCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js new file mode 100644 index 0000000..0550355 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteItemCommand, se_DeleteItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteItem", {}) + .n("DynamoDBClient", "DeleteItemCommand") + .f(void 0, void 0) + .ser(se_DeleteItemCommand) + .de(de_DeleteItemCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js new file mode 100644 index 0000000..045379c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteResourcePolicyCommand, se_DeleteResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteResourcePolicy", {}) + .n("DynamoDBClient", "DeleteResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_DeleteResourcePolicyCommand) + .de(de_DeleteResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js new file mode 100644 index 0000000..b7792b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteTableCommand, se_DeleteTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteTable", {}) + .n("DynamoDBClient", "DeleteTableCommand") + .f(void 0, void 0) + .ser(se_DeleteTableCommand) + .de(de_DeleteTableCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js new file mode 100644 index 0000000..70345e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeBackupCommand, se_DescribeBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeBackup", {}) + .n("DynamoDBClient", "DescribeBackupCommand") + .f(void 0, void 0) + .ser(se_DescribeBackupCommand) + .de(de_DescribeBackupCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js new file mode 100644 index 0000000..1104b60 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeContinuousBackupsCommand, se_DescribeContinuousBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeContinuousBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeContinuousBackups", {}) + .n("DynamoDBClient", "DescribeContinuousBackupsCommand") + .f(void 0, void 0) + .ser(se_DescribeContinuousBackupsCommand) + .de(de_DescribeContinuousBackupsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js new file mode 100644 index 0000000..18a44c9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeContributorInsightsCommand, se_DescribeContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeContributorInsights", {}) + .n("DynamoDBClient", "DescribeContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_DescribeContributorInsightsCommand) + .de(de_DescribeContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js new file mode 100644 index 0000000..7fa3d01 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeEndpointsCommand, se_DescribeEndpointsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeEndpointsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeEndpoints", {}) + .n("DynamoDBClient", "DescribeEndpointsCommand") + .f(void 0, void 0) + .ser(se_DescribeEndpointsCommand) + .de(de_DescribeEndpointsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js new file mode 100644 index 0000000..18f9c9a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeExportCommand, se_DescribeExportCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeExportCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ExportArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeExport", {}) + .n("DynamoDBClient", "DescribeExportCommand") + .f(void 0, void 0) + .ser(se_DescribeExportCommand) + .de(de_DescribeExportCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js new file mode 100644 index 0000000..87acf97 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeGlobalTableCommand, se_DescribeGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeGlobalTable", {}) + .n("DynamoDBClient", "DescribeGlobalTableCommand") + .f(void 0, void 0) + .ser(se_DescribeGlobalTableCommand) + .de(de_DescribeGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js new file mode 100644 index 0000000..e6497a5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeGlobalTableSettingsCommand, se_DescribeGlobalTableSettingsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeGlobalTableSettingsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeGlobalTableSettings", {}) + .n("DynamoDBClient", "DescribeGlobalTableSettingsCommand") + .f(void 0, void 0) + .ser(se_DescribeGlobalTableSettingsCommand) + .de(de_DescribeGlobalTableSettingsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js new file mode 100644 index 0000000..2c147ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeImportCommand, se_DescribeImportCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeImportCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ImportArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeImport", {}) + .n("DynamoDBClient", "DescribeImportCommand") + .f(void 0, void 0) + .ser(se_DescribeImportCommand) + .de(de_DescribeImportCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..b2011ae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeKinesisStreamingDestinationCommand, se_DescribeKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeKinesisStreamingDestination", {}) + .n("DynamoDBClient", "DescribeKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_DescribeKinesisStreamingDestinationCommand) + .de(de_DescribeKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js new file mode 100644 index 0000000..38a0fcc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeLimitsCommand, se_DescribeLimitsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeLimitsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeLimits", {}) + .n("DynamoDBClient", "DescribeLimitsCommand") + .f(void 0, void 0) + .ser(se_DescribeLimitsCommand) + .de(de_DescribeLimitsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js new file mode 100644 index 0000000..b2a3ddf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTableCommand, se_DescribeTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTable", {}) + .n("DynamoDBClient", "DescribeTableCommand") + .f(void 0, void 0) + .ser(se_DescribeTableCommand) + .de(de_DescribeTableCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js new file mode 100644 index 0000000..127d173 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTableReplicaAutoScalingCommand, se_DescribeTableReplicaAutoScalingCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTableReplicaAutoScalingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTableReplicaAutoScaling", {}) + .n("DynamoDBClient", "DescribeTableReplicaAutoScalingCommand") + .f(void 0, void 0) + .ser(se_DescribeTableReplicaAutoScalingCommand) + .de(de_DescribeTableReplicaAutoScalingCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js new file mode 100644 index 0000000..0ae052c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTimeToLiveCommand, se_DescribeTimeToLiveCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTimeToLiveCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTimeToLive", {}) + .n("DynamoDBClient", "DescribeTimeToLiveCommand") + .f(void 0, void 0) + .ser(se_DescribeTimeToLiveCommand) + .de(de_DescribeTimeToLiveCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..59ac9ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DisableKinesisStreamingDestinationCommand, se_DisableKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DisableKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DisableKinesisStreamingDestination", {}) + .n("DynamoDBClient", "DisableKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_DisableKinesisStreamingDestinationCommand) + .de(de_DisableKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..e96a5f5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_EnableKinesisStreamingDestinationCommand, se_EnableKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class EnableKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "EnableKinesisStreamingDestination", {}) + .n("DynamoDBClient", "EnableKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_EnableKinesisStreamingDestinationCommand) + .de(de_EnableKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js new file mode 100644 index 0000000..8402c48 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExecuteStatementCommand, se_ExecuteStatementCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExecuteStatementCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExecuteStatement", {}) + .n("DynamoDBClient", "ExecuteStatementCommand") + .f(void 0, void 0) + .ser(se_ExecuteStatementCommand) + .de(de_ExecuteStatementCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js new file mode 100644 index 0000000..2298f6f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExecuteTransactionCommand, se_ExecuteTransactionCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExecuteTransactionCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExecuteTransaction", {}) + .n("DynamoDBClient", "ExecuteTransactionCommand") + .f(void 0, void 0) + .ser(se_ExecuteTransactionCommand) + .de(de_ExecuteTransactionCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js new file mode 100644 index 0000000..7cd72fc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExportTableToPointInTimeCommand, se_ExportTableToPointInTimeCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExportTableToPointInTimeCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExportTableToPointInTime", {}) + .n("DynamoDBClient", "ExportTableToPointInTimeCommand") + .f(void 0, void 0) + .ser(se_ExportTableToPointInTimeCommand) + .de(de_ExportTableToPointInTimeCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js new file mode 100644 index 0000000..9b8e996 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetItemCommand, se_GetItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class GetItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "GetItem", {}) + .n("DynamoDBClient", "GetItemCommand") + .f(void 0, void 0) + .ser(se_GetItemCommand) + .de(de_GetItemCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js new file mode 100644 index 0000000..9b67404 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetResourcePolicyCommand, se_GetResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class GetResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "GetResourcePolicy", {}) + .n("DynamoDBClient", "GetResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_GetResourcePolicyCommand) + .de(de_GetResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js new file mode 100644 index 0000000..2f7a05d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ImportTableCommand, se_ImportTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ImportTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "operationContextParams", get: (input) => input?.TableCreationParameters?.TableName }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ImportTable", {}) + .n("DynamoDBClient", "ImportTableCommand") + .f(void 0, void 0) + .ser(se_ImportTableCommand) + .de(de_ImportTableCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js new file mode 100644 index 0000000..8e378a6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListBackupsCommand, se_ListBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListBackups", {}) + .n("DynamoDBClient", "ListBackupsCommand") + .f(void 0, void 0) + .ser(se_ListBackupsCommand) + .de(de_ListBackupsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js new file mode 100644 index 0000000..09e5506 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListContributorInsightsCommand, se_ListContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListContributorInsights", {}) + .n("DynamoDBClient", "ListContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_ListContributorInsightsCommand) + .de(de_ListContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js new file mode 100644 index 0000000..52e6bf0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListExportsCommand, se_ListExportsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListExportsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListExports", {}) + .n("DynamoDBClient", "ListExportsCommand") + .f(void 0, void 0) + .ser(se_ListExportsCommand) + .de(de_ListExportsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js new file mode 100644 index 0000000..01def1c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListGlobalTablesCommand, se_ListGlobalTablesCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListGlobalTablesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListGlobalTables", {}) + .n("DynamoDBClient", "ListGlobalTablesCommand") + .f(void 0, void 0) + .ser(se_ListGlobalTablesCommand) + .de(de_ListGlobalTablesCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js new file mode 100644 index 0000000..89b1f8b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListImportsCommand, se_ListImportsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListImportsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListImports", {}) + .n("DynamoDBClient", "ListImportsCommand") + .f(void 0, void 0) + .ser(se_ListImportsCommand) + .de(de_ListImportsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js new file mode 100644 index 0000000..6b299d1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListTablesCommand, se_ListTablesCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListTablesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListTables", {}) + .n("DynamoDBClient", "ListTablesCommand") + .f(void 0, void 0) + .ser(se_ListTablesCommand) + .de(de_ListTablesCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js new file mode 100644 index 0000000..1e0fb57 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListTagsOfResourceCommand, se_ListTagsOfResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListTagsOfResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListTagsOfResource", {}) + .n("DynamoDBClient", "ListTagsOfResourceCommand") + .f(void 0, void 0) + .ser(se_ListTagsOfResourceCommand) + .de(de_ListTagsOfResourceCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js new file mode 100644 index 0000000..7d4bef4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutItemCommand, se_PutItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class PutItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "PutItem", {}) + .n("DynamoDBClient", "PutItemCommand") + .f(void 0, void 0) + .ser(se_PutItemCommand) + .de(de_PutItemCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js new file mode 100644 index 0000000..d683507 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutResourcePolicyCommand, se_PutResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class PutResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "PutResourcePolicy", {}) + .n("DynamoDBClient", "PutResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_PutResourcePolicyCommand) + .de(de_PutResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js new file mode 100644 index 0000000..9ee6441 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_QueryCommand, se_QueryCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class QueryCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "Query", {}) + .n("DynamoDBClient", "QueryCommand") + .f(void 0, void 0) + .ser(se_QueryCommand) + .de(de_QueryCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js new file mode 100644 index 0000000..7dd9ba0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_RestoreTableFromBackupCommand, se_RestoreTableFromBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class RestoreTableFromBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "RestoreTableFromBackup", {}) + .n("DynamoDBClient", "RestoreTableFromBackupCommand") + .f(void 0, void 0) + .ser(se_RestoreTableFromBackupCommand) + .de(de_RestoreTableFromBackupCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js new file mode 100644 index 0000000..98bbb12 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_RestoreTableToPointInTimeCommand, se_RestoreTableToPointInTimeCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class RestoreTableToPointInTimeCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "RestoreTableToPointInTime", {}) + .n("DynamoDBClient", "RestoreTableToPointInTimeCommand") + .f(void 0, void 0) + .ser(se_RestoreTableToPointInTimeCommand) + .de(de_RestoreTableToPointInTimeCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js new file mode 100644 index 0000000..66285c2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ScanCommand, se_ScanCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ScanCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "Scan", {}) + .n("DynamoDBClient", "ScanCommand") + .f(void 0, void 0) + .ser(se_ScanCommand) + .de(de_ScanCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js new file mode 100644 index 0000000..244c7c2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TagResourceCommand, se_TagResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TagResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TagResource", {}) + .n("DynamoDBClient", "TagResourceCommand") + .f(void 0, void 0) + .ser(se_TagResourceCommand) + .de(de_TagResourceCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js new file mode 100644 index 0000000..82b3738 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js @@ -0,0 +1,28 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TransactGetItemsCommand, se_TransactGetItemsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TransactGetItemsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: (input) => input?.TransactItems?.map((obj) => obj?.Get?.TableName), + }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TransactGetItems", {}) + .n("DynamoDBClient", "TransactGetItemsCommand") + .f(void 0, void 0) + .ser(se_TransactGetItemsCommand) + .de(de_TransactGetItemsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js new file mode 100644 index 0000000..86d749b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js @@ -0,0 +1,28 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TransactWriteItemsCommand, se_TransactWriteItemsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TransactWriteItemsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: (input) => input?.TransactItems?.map((obj) => [obj?.ConditionCheck?.TableName, obj?.Put?.TableName, obj?.Delete?.TableName, obj?.Update?.TableName].filter((i) => i)).flat(), + }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TransactWriteItems", {}) + .n("DynamoDBClient", "TransactWriteItemsCommand") + .f(void 0, void 0) + .ser(se_TransactWriteItemsCommand) + .de(de_TransactWriteItemsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js new file mode 100644 index 0000000..849acf1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UntagResourceCommand, se_UntagResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UntagResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UntagResource", {}) + .n("DynamoDBClient", "UntagResourceCommand") + .f(void 0, void 0) + .ser(se_UntagResourceCommand) + .de(de_UntagResourceCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js new file mode 100644 index 0000000..753b715 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateContinuousBackupsCommand, se_UpdateContinuousBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateContinuousBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateContinuousBackups", {}) + .n("DynamoDBClient", "UpdateContinuousBackupsCommand") + .f(void 0, void 0) + .ser(se_UpdateContinuousBackupsCommand) + .de(de_UpdateContinuousBackupsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js new file mode 100644 index 0000000..8d43ddc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateContributorInsightsCommand, se_UpdateContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateContributorInsights", {}) + .n("DynamoDBClient", "UpdateContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_UpdateContributorInsightsCommand) + .de(de_UpdateContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js new file mode 100644 index 0000000..c613742 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateGlobalTableCommand, se_UpdateGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateGlobalTable", {}) + .n("DynamoDBClient", "UpdateGlobalTableCommand") + .f(void 0, void 0) + .ser(se_UpdateGlobalTableCommand) + .de(de_UpdateGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js new file mode 100644 index 0000000..bab8a85 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateGlobalTableSettingsCommand, se_UpdateGlobalTableSettingsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateGlobalTableSettingsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateGlobalTableSettings", {}) + .n("DynamoDBClient", "UpdateGlobalTableSettingsCommand") + .f(void 0, void 0) + .ser(se_UpdateGlobalTableSettingsCommand) + .de(de_UpdateGlobalTableSettingsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js new file mode 100644 index 0000000..1182c0d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateItemCommand, se_UpdateItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateItem", {}) + .n("DynamoDBClient", "UpdateItemCommand") + .f(void 0, void 0) + .ser(se_UpdateItemCommand) + .de(de_UpdateItemCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..5f44195 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateKinesisStreamingDestinationCommand, se_UpdateKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateKinesisStreamingDestination", {}) + .n("DynamoDBClient", "UpdateKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_UpdateKinesisStreamingDestinationCommand) + .de(de_UpdateKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js new file mode 100644 index 0000000..845f3e6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTableCommand, se_UpdateTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTable", {}) + .n("DynamoDBClient", "UpdateTableCommand") + .f(void 0, void 0) + .ser(se_UpdateTableCommand) + .de(de_UpdateTableCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js new file mode 100644 index 0000000..99fb7f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTableReplicaAutoScalingCommand, se_UpdateTableReplicaAutoScalingCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTableReplicaAutoScalingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTableReplicaAutoScaling", {}) + .n("DynamoDBClient", "UpdateTableReplicaAutoScalingCommand") + .f(void 0, void 0) + .ser(se_UpdateTableReplicaAutoScalingCommand) + .de(de_UpdateTableReplicaAutoScalingCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js new file mode 100644 index 0000000..97200fb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTimeToLiveCommand, se_UpdateTimeToLiveCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTimeToLiveCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTimeToLive", {}) + .n("DynamoDBClient", "UpdateTimeToLiveCommand") + .f(void 0, void 0) + .ser(se_UpdateTimeToLiveCommand) + .de(de_UpdateTimeToLiveCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js new file mode 100644 index 0000000..e5cae7d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js @@ -0,0 +1,15 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "dynamodb", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + AccountId: { type: "builtInParams", name: "accountId" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + AccountIdEndpointMode: { type: "builtInParams", name: "accountIdEndpointMode" }, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js new file mode 100644 index 0000000..7fbe485 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js @@ -0,0 +1,23 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: [ + "AccountId", + "AccountIdEndpointMode", + "Endpoint", + "Region", + "ResourceArn", + "ResourceArnList", + "UseDualStack", + "UseFIPS", + ], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js new file mode 100644 index 0000000..86e26fd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const S = "required", T = "type", U = "fn", V = "argv", W = "ref", X = "properties", Y = "headers"; +const a = false, b = "isSet", c = "error", d = "endpoint", e = "tree", f = "PartitionResult", g = "stringEquals", h = "dynamodb", i = "getAttr", j = "aws.parseArn", k = "ParsedArn", l = "isValidHostLabel", m = "FirstArn", n = { [S]: false, [T]: "String" }, o = { [S]: true, "default": false, [T]: "Boolean" }, p = { [U]: "booleanEquals", [V]: [{ [W]: "UseFIPS" }, true] }, q = { [U]: "booleanEquals", [V]: [{ [W]: "UseDualStack" }, true] }, r = {}, s = { [W]: "Region" }, t = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsFIPS"] }, true] }, u = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsDualStack"] }, true] }, v = { "conditions": [{ [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], "rules": [{ [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }], [T]: e }, w = { [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, x = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }, y = { [U]: i, [V]: [{ [W]: f }, "name"] }, z = { [d]: { "url": "https://dynamodb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, [T]: d }, A = { [U]: "not", [V]: [p] }, B = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and DualStack is enabled, but DualStack account endpoints are not supported", [T]: c }, C = { [U]: "not", [V]: [{ [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "disabled"] }] }, D = { [U]: g, [V]: [y, "aws"] }, E = { [U]: "not", [V]: [q] }, F = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "service"] }, h] }, G = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, false] }, H = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, "{Region}"] }, I = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "accountId"] }, false] }, J = { "url": "https://{ParsedArn#accountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, K = { [W]: "ResourceArnList" }, L = { [W]: "AccountId" }, M = [p], N = [q], O = [s], P = [w, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], Q = [A], R = [{ [W]: "ResourceArn" }]; +const _data = { version: "1.0", parameters: { Region: n, UseDualStack: o, UseFIPS: o, Endpoint: n, AccountId: n, AccountIdEndpointMode: n, ResourceArn: n, ResourceArnList: { [S]: a, [T]: "stringArray" } }, rules: [{ conditions: [{ [U]: b, [V]: [{ [W]: "Endpoint" }] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [T]: c }, { endpoint: { url: "{Endpoint}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { conditions: [{ [U]: b, [V]: O }], rules: [{ conditions: [{ [U]: "aws.partition", [V]: O, assign: f }], rules: [{ conditions: [{ [U]: g, [V]: [s, "local"] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and local endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and local endpoint are not supported", [T]: c }, { endpoint: { url: "http://localhost:8000", [X]: { authSchemes: [{ signingRegion: "us-east-1", name: "sigv4", signingName: h }] }, [Y]: r }, [T]: d }], [T]: e }, { conditions: [p, q], rules: [{ conditions: [t, u], rules: [v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [T]: c }], [T]: e }, { conditions: M, rules: [{ conditions: [t], rules: [{ conditions: [{ [U]: g, [V]: [y, "aws-us-gov"] }], rules: [v, z], [T]: e }, v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS is enabled but this partition does not support FIPS", [T]: c }], [T]: e }, { conditions: N, rules: [{ conditions: [u], rules: [{ conditions: P, rules: [{ conditions: Q, rules: [B], [T]: e }, x], [T]: e }, { endpoint: { url: "https://dynamodb.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "DualStack is enabled but this partition does not support DualStack", [T]: c }], [T]: e }, { conditions: [w, C, D, A, E, { [U]: b, [V]: R }, { [U]: j, [V]: R, assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [K] }, { [U]: i, [V]: [K, "[0]"], assign: m }, { [U]: j, [V]: [{ [W]: m }], assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [L] }], rules: [{ conditions: [{ [U]: l, [V]: [L, a] }], rules: [{ endpoint: { url: "https://{AccountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "Credentials-sourced account ID parameter is invalid", [T]: c }], [T]: e }, { conditions: P, rules: [{ conditions: Q, rules: [{ conditions: [E], rules: [{ conditions: [D], rules: [{ error: "AccountIdEndpointMode is required but no AccountID was provided or able to be loaded", [T]: c }], [T]: e }, { error: "Invalid Configuration: AccountIdEndpointMode is required but account endpoints are not supported in this partition", [T]: c }], [T]: e }, B], [T]: e }, x], [T]: e }, z], [T]: e }], [T]: e }, { error: "Invalid Configuration: Missing Region", [T]: c }] }; +export const ruleSet = _data; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js new file mode 100644 index 0000000..8bd6d9e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js new file mode 100644 index 0000000..2712903 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class DynamoDBServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, DynamoDBServiceException.prototype); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js new file mode 100644 index 0000000..2b4b364 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js @@ -0,0 +1,674 @@ +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +export const ApproximateCreationDateTimePrecision = { + MICROSECOND: "MICROSECOND", + MILLISECOND: "MILLISECOND", +}; +export const AttributeAction = { + ADD: "ADD", + DELETE: "DELETE", + PUT: "PUT", +}; +export const ScalarAttributeType = { + B: "B", + N: "N", + S: "S", +}; +export const BackupStatus = { + AVAILABLE: "AVAILABLE", + CREATING: "CREATING", + DELETED: "DELETED", +}; +export const BackupType = { + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER", +}; +export const BillingMode = { + PAY_PER_REQUEST: "PAY_PER_REQUEST", + PROVISIONED: "PROVISIONED", +}; +export const KeyType = { + HASH: "HASH", + RANGE: "RANGE", +}; +export const ProjectionType = { + ALL: "ALL", + INCLUDE: "INCLUDE", + KEYS_ONLY: "KEYS_ONLY", +}; +export const SSEType = { + AES256: "AES256", + KMS: "KMS", +}; +export const SSEStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + UPDATING: "UPDATING", +}; +export const StreamViewType = { + KEYS_ONLY: "KEYS_ONLY", + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", + OLD_IMAGE: "OLD_IMAGE", +}; +export const TimeToLiveStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", +}; +export class BackupInUseException extends __BaseException { + name = "BackupInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "BackupInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, BackupInUseException.prototype); + } +} +export class BackupNotFoundException extends __BaseException { + name = "BackupNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "BackupNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, BackupNotFoundException.prototype); + } +} +export const BackupTypeFilter = { + ALL: "ALL", + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER", +}; +export const ReturnConsumedCapacity = { + INDEXES: "INDEXES", + NONE: "NONE", + TOTAL: "TOTAL", +}; +export const ReturnValuesOnConditionCheckFailure = { + ALL_OLD: "ALL_OLD", + NONE: "NONE", +}; +export const BatchStatementErrorCodeEnum = { + AccessDenied: "AccessDenied", + ConditionalCheckFailed: "ConditionalCheckFailed", + DuplicateItem: "DuplicateItem", + InternalServerError: "InternalServerError", + ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded", + ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded", + RequestLimitExceeded: "RequestLimitExceeded", + ResourceNotFound: "ResourceNotFound", + ThrottlingError: "ThrottlingError", + TransactionConflict: "TransactionConflict", + ValidationError: "ValidationError", +}; +export class InternalServerError extends __BaseException { + name = "InternalServerError"; + $fault = "server"; + constructor(opts) { + super({ + name: "InternalServerError", + $fault: "server", + ...opts, + }); + Object.setPrototypeOf(this, InternalServerError.prototype); + } +} +export class RequestLimitExceeded extends __BaseException { + name = "RequestLimitExceeded"; + $fault = "client"; + constructor(opts) { + super({ + name: "RequestLimitExceeded", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, RequestLimitExceeded.prototype); + } +} +export class InvalidEndpointException extends __BaseException { + name = "InvalidEndpointException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "InvalidEndpointException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidEndpointException.prototype); + this.Message = opts.Message; + } +} +export class ProvisionedThroughputExceededException extends __BaseException { + name = "ProvisionedThroughputExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ProvisionedThroughputExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ProvisionedThroughputExceededException.prototype); + } +} +export class ResourceNotFoundException extends __BaseException { + name = "ResourceNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +export const ReturnItemCollectionMetrics = { + NONE: "NONE", + SIZE: "SIZE", +}; +export class ItemCollectionSizeLimitExceededException extends __BaseException { + name = "ItemCollectionSizeLimitExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ItemCollectionSizeLimitExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ItemCollectionSizeLimitExceededException.prototype); + } +} +export const ComparisonOperator = { + BEGINS_WITH: "BEGINS_WITH", + BETWEEN: "BETWEEN", + CONTAINS: "CONTAINS", + EQ: "EQ", + GE: "GE", + GT: "GT", + IN: "IN", + LE: "LE", + LT: "LT", + NE: "NE", + NOT_CONTAINS: "NOT_CONTAINS", + NOT_NULL: "NOT_NULL", + NULL: "NULL", +}; +export const ConditionalOperator = { + AND: "AND", + OR: "OR", +}; +export const ContinuousBackupsStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED", +}; +export const PointInTimeRecoveryStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED", +}; +export class ContinuousBackupsUnavailableException extends __BaseException { + name = "ContinuousBackupsUnavailableException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ContinuousBackupsUnavailableException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ContinuousBackupsUnavailableException.prototype); + } +} +export const ContributorInsightsAction = { + DISABLE: "DISABLE", + ENABLE: "ENABLE", +}; +export const ContributorInsightsStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + FAILED: "FAILED", +}; +export class LimitExceededException extends __BaseException { + name = "LimitExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "LimitExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, LimitExceededException.prototype); + } +} +export class TableInUseException extends __BaseException { + name = "TableInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableInUseException.prototype); + } +} +export class TableNotFoundException extends __BaseException { + name = "TableNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableNotFoundException.prototype); + } +} +export const GlobalTableStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING", +}; +export const IndexStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING", +}; +export const ReplicaStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + CREATION_FAILED: "CREATION_FAILED", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + REGION_DISABLED: "REGION_DISABLED", + UPDATING: "UPDATING", +}; +export const TableClass = { + STANDARD: "STANDARD", + STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS", +}; +export const TableStatus = { + ACTIVE: "ACTIVE", + ARCHIVED: "ARCHIVED", + ARCHIVING: "ARCHIVING", + CREATING: "CREATING", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + UPDATING: "UPDATING", +}; +export class GlobalTableAlreadyExistsException extends __BaseException { + name = "GlobalTableAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "GlobalTableAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, GlobalTableAlreadyExistsException.prototype); + } +} +export const MultiRegionConsistency = { + EVENTUAL: "EVENTUAL", + STRONG: "STRONG", +}; +export class ResourceInUseException extends __BaseException { + name = "ResourceInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceInUseException.prototype); + } +} +export const ReturnValue = { + ALL_NEW: "ALL_NEW", + ALL_OLD: "ALL_OLD", + NONE: "NONE", + UPDATED_NEW: "UPDATED_NEW", + UPDATED_OLD: "UPDATED_OLD", +}; +export class ReplicatedWriteConflictException extends __BaseException { + name = "ReplicatedWriteConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicatedWriteConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicatedWriteConflictException.prototype); + } +} +export class TransactionConflictException extends __BaseException { + name = "TransactionConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TransactionConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionConflictException.prototype); + } +} +export class PolicyNotFoundException extends __BaseException { + name = "PolicyNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PolicyNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PolicyNotFoundException.prototype); + } +} +export const ExportFormat = { + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION", +}; +export const ExportStatus = { + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS", +}; +export const ExportType = { + FULL_EXPORT: "FULL_EXPORT", + INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT", +}; +export const ExportViewType = { + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", +}; +export const S3SseAlgorithm = { + AES256: "AES256", + KMS: "KMS", +}; +export class ExportNotFoundException extends __BaseException { + name = "ExportNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExportNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExportNotFoundException.prototype); + } +} +export class GlobalTableNotFoundException extends __BaseException { + name = "GlobalTableNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "GlobalTableNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, GlobalTableNotFoundException.prototype); + } +} +export const ImportStatus = { + CANCELLED: "CANCELLED", + CANCELLING: "CANCELLING", + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS", +}; +export const InputCompressionType = { + GZIP: "GZIP", + NONE: "NONE", + ZSTD: "ZSTD", +}; +export const InputFormat = { + CSV: "CSV", + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION", +}; +export class ImportNotFoundException extends __BaseException { + name = "ImportNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ImportNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ImportNotFoundException.prototype); + } +} +export const DestinationStatus = { + ACTIVE: "ACTIVE", + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLE_FAILED: "ENABLE_FAILED", + ENABLING: "ENABLING", + UPDATING: "UPDATING", +}; +export class DuplicateItemException extends __BaseException { + name = "DuplicateItemException"; + $fault = "client"; + constructor(opts) { + super({ + name: "DuplicateItemException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, DuplicateItemException.prototype); + } +} +export class IdempotentParameterMismatchException extends __BaseException { + name = "IdempotentParameterMismatchException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "IdempotentParameterMismatchException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IdempotentParameterMismatchException.prototype); + this.Message = opts.Message; + } +} +export class TransactionInProgressException extends __BaseException { + name = "TransactionInProgressException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "TransactionInProgressException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionInProgressException.prototype); + this.Message = opts.Message; + } +} +export class ExportConflictException extends __BaseException { + name = "ExportConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExportConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExportConflictException.prototype); + } +} +export class InvalidExportTimeException extends __BaseException { + name = "InvalidExportTimeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidExportTimeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidExportTimeException.prototype); + } +} +export class PointInTimeRecoveryUnavailableException extends __BaseException { + name = "PointInTimeRecoveryUnavailableException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PointInTimeRecoveryUnavailableException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PointInTimeRecoveryUnavailableException.prototype); + } +} +export class ImportConflictException extends __BaseException { + name = "ImportConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ImportConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ImportConflictException.prototype); + } +} +export const Select = { + ALL_ATTRIBUTES: "ALL_ATTRIBUTES", + ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES", + COUNT: "COUNT", + SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES", +}; +export class TableAlreadyExistsException extends __BaseException { + name = "TableAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableAlreadyExistsException.prototype); + } +} +export class InvalidRestoreTimeException extends __BaseException { + name = "InvalidRestoreTimeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidRestoreTimeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRestoreTimeException.prototype); + } +} +export class ReplicaAlreadyExistsException extends __BaseException { + name = "ReplicaAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicaAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicaAlreadyExistsException.prototype); + } +} +export class ReplicaNotFoundException extends __BaseException { + name = "ReplicaNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicaNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicaNotFoundException.prototype); + } +} +export class IndexNotFoundException extends __BaseException { + name = "IndexNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IndexNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IndexNotFoundException.prototype); + } +} +export var AttributeValue; +(function (AttributeValue) { + AttributeValue.visit = (value, visitor) => { + if (value.S !== undefined) + return visitor.S(value.S); + if (value.N !== undefined) + return visitor.N(value.N); + if (value.B !== undefined) + return visitor.B(value.B); + if (value.SS !== undefined) + return visitor.SS(value.SS); + if (value.NS !== undefined) + return visitor.NS(value.NS); + if (value.BS !== undefined) + return visitor.BS(value.BS); + if (value.M !== undefined) + return visitor.M(value.M); + if (value.L !== undefined) + return visitor.L(value.L); + if (value.NULL !== undefined) + return visitor.NULL(value.NULL); + if (value.BOOL !== undefined) + return visitor.BOOL(value.BOOL); + return visitor._(value.$unknown[0], value.$unknown[1]); + }; +})(AttributeValue || (AttributeValue = {})); +export class ConditionalCheckFailedException extends __BaseException { + name = "ConditionalCheckFailedException"; + $fault = "client"; + Item; + constructor(opts) { + super({ + name: "ConditionalCheckFailedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ConditionalCheckFailedException.prototype); + this.Item = opts.Item; + } +} +export class TransactionCanceledException extends __BaseException { + name = "TransactionCanceledException"; + $fault = "client"; + Message; + CancellationReasons; + constructor(opts) { + super({ + name: "TransactionCanceledException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionCanceledException.prototype); + this.Message = opts.Message; + this.CancellationReasons = opts.CancellationReasons; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js new file mode 100644 index 0000000..23bb95c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListContributorInsightsCommand, } from "../commands/ListContributorInsightsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListContributorInsights = createPaginator(DynamoDBClient, ListContributorInsightsCommand, "NextToken", "NextToken", "MaxResults"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js new file mode 100644 index 0000000..e252e7f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListExportsCommand } from "../commands/ListExportsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListExports = createPaginator(DynamoDBClient, ListExportsCommand, "NextToken", "NextToken", "MaxResults"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js new file mode 100644 index 0000000..c3fe323 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListImportsCommand } from "../commands/ListImportsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListImports = createPaginator(DynamoDBClient, ListImportsCommand, "NextToken", "NextToken", "PageSize"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js new file mode 100644 index 0000000..979f3f6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListTablesCommand } from "../commands/ListTablesCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListTables = createPaginator(DynamoDBClient, ListTablesCommand, "ExclusiveStartTableName", "LastEvaluatedTableName", "Limit"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js new file mode 100644 index 0000000..4fcc17d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { QueryCommand } from "../commands/QueryCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateQuery = createPaginator(DynamoDBClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js new file mode 100644 index 0000000..b95b746 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ScanCommand } from "../commands/ScanCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateScan = createPaginator(DynamoDBClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js new file mode 100644 index 0000000..d6c7135 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js @@ -0,0 +1,3094 @@ +import { awsExpectUnion as __expectUnion, loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody, } from "@aws-sdk/core"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectBoolean as __expectBoolean, expectInt32 as __expectInt32, expectLong as __expectLong, expectNonNull as __expectNonNull, expectNumber as __expectNumber, expectString as __expectString, limitedParseDouble as __limitedParseDouble, parseEpochTimestamp as __parseEpochTimestamp, serializeFloat as __serializeFloat, take, withBaseException, } from "@smithy/smithy-client"; +import { v4 as generateIdempotencyToken } from "uuid"; +import { DynamoDBServiceException as __BaseException } from "../models/DynamoDBServiceException"; +import { AttributeValue, BackupInUseException, BackupNotFoundException, ConditionalCheckFailedException, ContinuousBackupsUnavailableException, DuplicateItemException, ExportConflictException, ExportNotFoundException, GlobalTableAlreadyExistsException, GlobalTableNotFoundException, IdempotentParameterMismatchException, ImportConflictException, ImportNotFoundException, IndexNotFoundException, InternalServerError, InvalidEndpointException, InvalidExportTimeException, InvalidRestoreTimeException, ItemCollectionSizeLimitExceededException, LimitExceededException, PointInTimeRecoveryUnavailableException, PolicyNotFoundException, ProvisionedThroughputExceededException, ReplicaAlreadyExistsException, ReplicaNotFoundException, ReplicatedWriteConflictException, RequestLimitExceeded, ResourceInUseException, ResourceNotFoundException, TableAlreadyExistsException, TableInUseException, TableNotFoundException, TransactionCanceledException, TransactionConflictException, TransactionInProgressException, } from "../models/models_0"; +export const se_BatchExecuteStatementCommand = async (input, context) => { + const headers = sharedHeaders("BatchExecuteStatement"); + let body; + body = JSON.stringify(se_BatchExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_BatchGetItemCommand = async (input, context) => { + const headers = sharedHeaders("BatchGetItem"); + let body; + body = JSON.stringify(se_BatchGetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_BatchWriteItemCommand = async (input, context) => { + const headers = sharedHeaders("BatchWriteItem"); + let body; + body = JSON.stringify(se_BatchWriteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateBackupCommand = async (input, context) => { + const headers = sharedHeaders("CreateBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("CreateGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateTableCommand = async (input, context) => { + const headers = sharedHeaders("CreateTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteBackupCommand = async (input, context) => { + const headers = sharedHeaders("DeleteBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteItemCommand = async (input, context) => { + const headers = sharedHeaders("DeleteItem"); + let body; + body = JSON.stringify(se_DeleteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("DeleteResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteTableCommand = async (input, context) => { + const headers = sharedHeaders("DeleteTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeBackupCommand = async (input, context) => { + const headers = sharedHeaders("DescribeBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeContinuousBackupsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeContinuousBackups"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeEndpointsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeEndpoints"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeExportCommand = async (input, context) => { + const headers = sharedHeaders("DescribeExport"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeGlobalTableSettingsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTableSettings"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeImportCommand = async (input, context) => { + const headers = sharedHeaders("DescribeImport"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("DescribeKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeLimitsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeLimits"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTableCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTableReplicaAutoScalingCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTableReplicaAutoScaling"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTimeToLiveCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTimeToLive"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DisableKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("DisableKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_EnableKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("EnableKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExecuteStatementCommand = async (input, context) => { + const headers = sharedHeaders("ExecuteStatement"); + let body; + body = JSON.stringify(se_ExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExecuteTransactionCommand = async (input, context) => { + const headers = sharedHeaders("ExecuteTransaction"); + let body; + body = JSON.stringify(se_ExecuteTransactionInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExportTableToPointInTimeCommand = async (input, context) => { + const headers = sharedHeaders("ExportTableToPointInTime"); + let body; + body = JSON.stringify(se_ExportTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_GetItemCommand = async (input, context) => { + const headers = sharedHeaders("GetItem"); + let body; + body = JSON.stringify(se_GetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_GetResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("GetResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ImportTableCommand = async (input, context) => { + const headers = sharedHeaders("ImportTable"); + let body; + body = JSON.stringify(se_ImportTableInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListBackupsCommand = async (input, context) => { + const headers = sharedHeaders("ListBackups"); + let body; + body = JSON.stringify(se_ListBackupsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("ListContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListExportsCommand = async (input, context) => { + const headers = sharedHeaders("ListExports"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListGlobalTablesCommand = async (input, context) => { + const headers = sharedHeaders("ListGlobalTables"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListImportsCommand = async (input, context) => { + const headers = sharedHeaders("ListImports"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListTablesCommand = async (input, context) => { + const headers = sharedHeaders("ListTables"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListTagsOfResourceCommand = async (input, context) => { + const headers = sharedHeaders("ListTagsOfResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_PutItemCommand = async (input, context) => { + const headers = sharedHeaders("PutItem"); + let body; + body = JSON.stringify(se_PutItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_PutResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("PutResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_QueryCommand = async (input, context) => { + const headers = sharedHeaders("Query"); + let body; + body = JSON.stringify(se_QueryInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_RestoreTableFromBackupCommand = async (input, context) => { + const headers = sharedHeaders("RestoreTableFromBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_RestoreTableToPointInTimeCommand = async (input, context) => { + const headers = sharedHeaders("RestoreTableToPointInTime"); + let body; + body = JSON.stringify(se_RestoreTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ScanCommand = async (input, context) => { + const headers = sharedHeaders("Scan"); + let body; + body = JSON.stringify(se_ScanInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TagResourceCommand = async (input, context) => { + const headers = sharedHeaders("TagResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TransactGetItemsCommand = async (input, context) => { + const headers = sharedHeaders("TransactGetItems"); + let body; + body = JSON.stringify(se_TransactGetItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TransactWriteItemsCommand = async (input, context) => { + const headers = sharedHeaders("TransactWriteItems"); + let body; + body = JSON.stringify(se_TransactWriteItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UntagResourceCommand = async (input, context) => { + const headers = sharedHeaders("UntagResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateContinuousBackupsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateContinuousBackups"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateGlobalTableSettingsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTableSettings"); + let body; + body = JSON.stringify(se_UpdateGlobalTableSettingsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateItemCommand = async (input, context) => { + const headers = sharedHeaders("UpdateItem"); + let body; + body = JSON.stringify(se_UpdateItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("UpdateKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTableCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTableReplicaAutoScalingCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTableReplicaAutoScaling"); + let body; + body = JSON.stringify(se_UpdateTableReplicaAutoScalingInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTimeToLiveCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTimeToLive"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const de_BatchExecuteStatementCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_BatchGetItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchGetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_BatchWriteItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchWriteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeContinuousBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeContributorInsightsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeEndpointsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeExportCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeExportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeGlobalTableSettingsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeImportCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeImportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeLimitsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTableReplicaAutoScalingCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTimeToLiveCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DisableKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_EnableKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExecuteStatementCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExecuteTransactionCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExecuteTransactionOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExportTableToPointInTimeCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExportTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_GetItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_GetResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ImportTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ImportTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ListBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListExportsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListGlobalTablesCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListImportsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ListImportsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListTablesCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListTagsOfResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_PutItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_PutItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_PutResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_QueryCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_QueryOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_RestoreTableFromBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_RestoreTableFromBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_RestoreTableToPointInTimeCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_RestoreTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ScanCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ScanOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_TagResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await collectBody(output.body, context); + const response = { + $metadata: deserializeMetadata(output), + }; + return response; +}; +export const de_TransactGetItemsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_TransactGetItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_TransactWriteItemsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_TransactWriteItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UntagResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await collectBody(output.body, context); + const response = { + $metadata: deserializeMetadata(output), + }; + return response; +}; +export const de_UpdateContinuousBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateGlobalTableSettingsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTableReplicaAutoScalingCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTimeToLiveCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerError": + case "com.amazonaws.dynamodb#InternalServerError": + throw await de_InternalServerErrorRes(parsedOutput, context); + case "RequestLimitExceeded": + case "com.amazonaws.dynamodb#RequestLimitExceeded": + throw await de_RequestLimitExceededRes(parsedOutput, context); + case "InvalidEndpointException": + case "com.amazonaws.dynamodb#InvalidEndpointException": + throw await de_InvalidEndpointExceptionRes(parsedOutput, context); + case "ProvisionedThroughputExceededException": + case "com.amazonaws.dynamodb#ProvisionedThroughputExceededException": + throw await de_ProvisionedThroughputExceededExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.dynamodb#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "ItemCollectionSizeLimitExceededException": + case "com.amazonaws.dynamodb#ItemCollectionSizeLimitExceededException": + throw await de_ItemCollectionSizeLimitExceededExceptionRes(parsedOutput, context); + case "BackupInUseException": + case "com.amazonaws.dynamodb#BackupInUseException": + throw await de_BackupInUseExceptionRes(parsedOutput, context); + case "ContinuousBackupsUnavailableException": + case "com.amazonaws.dynamodb#ContinuousBackupsUnavailableException": + throw await de_ContinuousBackupsUnavailableExceptionRes(parsedOutput, context); + case "LimitExceededException": + case "com.amazonaws.dynamodb#LimitExceededException": + throw await de_LimitExceededExceptionRes(parsedOutput, context); + case "TableInUseException": + case "com.amazonaws.dynamodb#TableInUseException": + throw await de_TableInUseExceptionRes(parsedOutput, context); + case "TableNotFoundException": + case "com.amazonaws.dynamodb#TableNotFoundException": + throw await de_TableNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableAlreadyExistsException": + case "com.amazonaws.dynamodb#GlobalTableAlreadyExistsException": + throw await de_GlobalTableAlreadyExistsExceptionRes(parsedOutput, context); + case "ResourceInUseException": + case "com.amazonaws.dynamodb#ResourceInUseException": + throw await de_ResourceInUseExceptionRes(parsedOutput, context); + case "BackupNotFoundException": + case "com.amazonaws.dynamodb#BackupNotFoundException": + throw await de_BackupNotFoundExceptionRes(parsedOutput, context); + case "ConditionalCheckFailedException": + case "com.amazonaws.dynamodb#ConditionalCheckFailedException": + throw await de_ConditionalCheckFailedExceptionRes(parsedOutput, context); + case "ReplicatedWriteConflictException": + case "com.amazonaws.dynamodb#ReplicatedWriteConflictException": + throw await de_ReplicatedWriteConflictExceptionRes(parsedOutput, context); + case "TransactionConflictException": + case "com.amazonaws.dynamodb#TransactionConflictException": + throw await de_TransactionConflictExceptionRes(parsedOutput, context); + case "PolicyNotFoundException": + case "com.amazonaws.dynamodb#PolicyNotFoundException": + throw await de_PolicyNotFoundExceptionRes(parsedOutput, context); + case "ExportNotFoundException": + case "com.amazonaws.dynamodb#ExportNotFoundException": + throw await de_ExportNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableNotFoundException": + case "com.amazonaws.dynamodb#GlobalTableNotFoundException": + throw await de_GlobalTableNotFoundExceptionRes(parsedOutput, context); + case "ImportNotFoundException": + case "com.amazonaws.dynamodb#ImportNotFoundException": + throw await de_ImportNotFoundExceptionRes(parsedOutput, context); + case "DuplicateItemException": + case "com.amazonaws.dynamodb#DuplicateItemException": + throw await de_DuplicateItemExceptionRes(parsedOutput, context); + case "IdempotentParameterMismatchException": + case "com.amazonaws.dynamodb#IdempotentParameterMismatchException": + throw await de_IdempotentParameterMismatchExceptionRes(parsedOutput, context); + case "TransactionCanceledException": + case "com.amazonaws.dynamodb#TransactionCanceledException": + throw await de_TransactionCanceledExceptionRes(parsedOutput, context); + case "TransactionInProgressException": + case "com.amazonaws.dynamodb#TransactionInProgressException": + throw await de_TransactionInProgressExceptionRes(parsedOutput, context); + case "ExportConflictException": + case "com.amazonaws.dynamodb#ExportConflictException": + throw await de_ExportConflictExceptionRes(parsedOutput, context); + case "InvalidExportTimeException": + case "com.amazonaws.dynamodb#InvalidExportTimeException": + throw await de_InvalidExportTimeExceptionRes(parsedOutput, context); + case "PointInTimeRecoveryUnavailableException": + case "com.amazonaws.dynamodb#PointInTimeRecoveryUnavailableException": + throw await de_PointInTimeRecoveryUnavailableExceptionRes(parsedOutput, context); + case "ImportConflictException": + case "com.amazonaws.dynamodb#ImportConflictException": + throw await de_ImportConflictExceptionRes(parsedOutput, context); + case "TableAlreadyExistsException": + case "com.amazonaws.dynamodb#TableAlreadyExistsException": + throw await de_TableAlreadyExistsExceptionRes(parsedOutput, context); + case "InvalidRestoreTimeException": + case "com.amazonaws.dynamodb#InvalidRestoreTimeException": + throw await de_InvalidRestoreTimeExceptionRes(parsedOutput, context); + case "ReplicaAlreadyExistsException": + case "com.amazonaws.dynamodb#ReplicaAlreadyExistsException": + throw await de_ReplicaAlreadyExistsExceptionRes(parsedOutput, context); + case "ReplicaNotFoundException": + case "com.amazonaws.dynamodb#ReplicaNotFoundException": + throw await de_ReplicaNotFoundExceptionRes(parsedOutput, context); + case "IndexNotFoundException": + case "com.amazonaws.dynamodb#IndexNotFoundException": + throw await de_IndexNotFoundExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_BackupInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new BackupInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_BackupNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new BackupNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ConditionalCheckFailedExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ConditionalCheckFailedException(body, context); + const exception = new ConditionalCheckFailedException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ContinuousBackupsUnavailableExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ContinuousBackupsUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_DuplicateItemExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new DuplicateItemException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ExportConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ExportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ExportNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ExportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_GlobalTableAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new GlobalTableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_GlobalTableNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new GlobalTableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IdempotentParameterMismatchExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new IdempotentParameterMismatchException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ImportConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ImportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ImportNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ImportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IndexNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new IndexNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InternalServerErrorRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InternalServerError({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidEndpointExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidEndpointException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidExportTimeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidExportTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidRestoreTimeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidRestoreTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ItemCollectionSizeLimitExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ItemCollectionSizeLimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_LimitExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new LimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PointInTimeRecoveryUnavailableExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new PointInTimeRecoveryUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PolicyNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new PolicyNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ProvisionedThroughputExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ProvisionedThroughputExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicaAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicaAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicaNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicaNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicatedWriteConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicatedWriteConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_RequestLimitExceededRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new RequestLimitExceeded({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ResourceInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ResourceInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionCanceledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_TransactionCanceledException(body, context); + const exception = new TransactionCanceledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TransactionConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionInProgressExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TransactionInProgressException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const se_AttributeUpdates = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValueUpdate(value, context); + return acc; + }, {}); +}; +const se_AttributeValue = (input, context) => { + return AttributeValue.visit(input, { + B: (value) => ({ B: context.base64Encoder(value) }), + BOOL: (value) => ({ BOOL: value }), + BS: (value) => ({ BS: se_BinarySetAttributeValue(value, context) }), + L: (value) => ({ L: se_ListAttributeValue(value, context) }), + M: (value) => ({ M: se_MapAttributeValue(value, context) }), + N: (value) => ({ N: value }), + NS: (value) => ({ NS: _json(value) }), + NULL: (value) => ({ NULL: value }), + S: (value) => ({ S: value }), + SS: (value) => ({ SS: _json(value) }), + _: (name, value) => ({ [name]: value }), + }); +}; +const se_AttributeValueList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_AttributeValueUpdate = (input, context) => { + return take(input, { + Action: [], + Value: (_) => se_AttributeValue(_, context), + }); +}; +const se_AutoScalingPolicyUpdate = (input, context) => { + return take(input, { + PolicyName: [], + TargetTrackingScalingPolicyConfiguration: (_) => se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate(_, context), + }); +}; +const se_AutoScalingSettingsUpdate = (input, context) => { + return take(input, { + AutoScalingDisabled: [], + AutoScalingRoleArn: [], + MaximumUnits: [], + MinimumUnits: [], + ScalingPolicyUpdate: (_) => se_AutoScalingPolicyUpdate(_, context), + }); +}; +const se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate = (input, context) => { + return take(input, { + DisableScaleIn: [], + ScaleInCooldown: [], + ScaleOutCooldown: [], + TargetValue: __serializeFloat, + }); +}; +const se_BatchExecuteStatementInput = (input, context) => { + return take(input, { + ReturnConsumedCapacity: [], + Statements: (_) => se_PartiQLBatchRequest(_, context), + }); +}; +const se_BatchGetItemInput = (input, context) => { + return take(input, { + RequestItems: (_) => se_BatchGetRequestMap(_, context), + ReturnConsumedCapacity: [], + }); +}; +const se_BatchGetRequestMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_KeysAndAttributes(value, context); + return acc; + }, {}); +}; +const se_BatchStatementRequest = (input, context) => { + return take(input, { + ConsistentRead: [], + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_BatchWriteItemInput = (input, context) => { + return take(input, { + RequestItems: (_) => se_BatchWriteItemRequestMap(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + }); +}; +const se_BatchWriteItemRequestMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_WriteRequests(value, context); + return acc; + }, {}); +}; +const se_BinarySetAttributeValue = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return context.base64Encoder(entry); + }); +}; +const se_Condition = (input, context) => { + return take(input, { + AttributeValueList: (_) => se_AttributeValueList(_, context), + ComparisonOperator: [], + }); +}; +const se_ConditionCheck = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_Delete = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_DeleteItemInput = (input, context) => { + return take(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_DeleteRequest = (input, context) => { + return take(input, { + Key: (_) => se_Key(_, context), + }); +}; +const se_ExecuteStatementInput = (input, context) => { + return take(input, { + ConsistentRead: [], + Limit: [], + NextToken: [], + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnConsumedCapacity: [], + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_ExecuteTransactionInput = (input, context) => { + return take(input, { + ClientRequestToken: [true, (_) => _ ?? generateIdempotencyToken()], + ReturnConsumedCapacity: [], + TransactStatements: (_) => se_ParameterizedStatements(_, context), + }); +}; +const se_ExpectedAttributeMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_ExpectedAttributeValue(value, context); + return acc; + }, {}); +}; +const se_ExpectedAttributeValue = (input, context) => { + return take(input, { + AttributeValueList: (_) => se_AttributeValueList(_, context), + ComparisonOperator: [], + Exists: [], + Value: (_) => se_AttributeValue(_, context), + }); +}; +const se_ExportTableToPointInTimeInput = (input, context) => { + return take(input, { + ClientToken: [true, (_) => _ ?? generateIdempotencyToken()], + ExportFormat: [], + ExportTime: (_) => _.getTime() / 1000, + ExportType: [], + IncrementalExportSpecification: (_) => se_IncrementalExportSpecification(_, context), + S3Bucket: [], + S3BucketOwner: [], + S3Prefix: [], + S3SseAlgorithm: [], + S3SseKmsKeyId: [], + TableArn: [], + }); +}; +const se_ExpressionAttributeValueMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_FilterConditionMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}; +const se_Get = (input, context) => { + return take(input, { + ExpressionAttributeNames: _json, + Key: (_) => se_Key(_, context), + ProjectionExpression: [], + TableName: [], + }); +}; +const se_GetItemInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConsistentRead: [], + ExpressionAttributeNames: _json, + Key: (_) => se_Key(_, context), + ProjectionExpression: [], + ReturnConsumedCapacity: [], + TableName: [], + }); +}; +const se_GlobalSecondaryIndexAutoScalingUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_GlobalSecondaryIndexAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_GlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}; +const se_GlobalTableGlobalSecondaryIndexSettingsUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ProvisionedWriteCapacityUnits: [], + }); +}; +const se_GlobalTableGlobalSecondaryIndexSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_GlobalTableGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}; +const se_ImportTableInput = (input, context) => { + return take(input, { + ClientToken: [true, (_) => _ ?? generateIdempotencyToken()], + InputCompressionType: [], + InputFormat: [], + InputFormatOptions: _json, + S3BucketSource: _json, + TableCreationParameters: _json, + }); +}; +const se_IncrementalExportSpecification = (input, context) => { + return take(input, { + ExportFromTime: (_) => _.getTime() / 1000, + ExportToTime: (_) => _.getTime() / 1000, + ExportViewType: [], + }); +}; +const se_Key = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_KeyConditions = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}; +const se_KeyList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_Key(entry, context); + }); +}; +const se_KeysAndAttributes = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConsistentRead: [], + ExpressionAttributeNames: _json, + Keys: (_) => se_KeyList(_, context), + ProjectionExpression: [], + }); +}; +const se_ListAttributeValue = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_ListBackupsInput = (input, context) => { + return take(input, { + BackupType: [], + ExclusiveStartBackupArn: [], + Limit: [], + TableName: [], + TimeRangeLowerBound: (_) => _.getTime() / 1000, + TimeRangeUpperBound: (_) => _.getTime() / 1000, + }); +}; +const se_MapAttributeValue = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_ParameterizedStatement = (input, context) => { + return take(input, { + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_ParameterizedStatements = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ParameterizedStatement(entry, context); + }); +}; +const se_PartiQLBatchRequest = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_BatchStatementRequest(entry, context); + }); +}; +const se_PreparedStatementParameters = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_Put = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Item: (_) => se_PutItemInputAttributeMap(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_PutItemInput = (input, context) => { + return take(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Item: (_) => se_PutItemInputAttributeMap(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_PutItemInputAttributeMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_PutRequest = (input, context) => { + return take(input, { + Item: (_) => se_PutItemInputAttributeMap(_, context), + }); +}; +const se_QueryInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: (_) => se_Key(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + FilterExpression: [], + IndexName: [], + KeyConditionExpression: [], + KeyConditions: (_) => se_KeyConditions(_, context), + Limit: [], + ProjectionExpression: [], + QueryFilter: (_) => se_FilterConditionMap(_, context), + ReturnConsumedCapacity: [], + ScanIndexForward: [], + Select: [], + TableName: [], + }); +}; +const se_ReplicaAutoScalingUpdate = (input, context) => { + return take(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexUpdates: (_) => se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList(_, context), + ReplicaProvisionedReadCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_ReplicaAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaAutoScalingUpdate(entry, context); + }); +}; +const se_ReplicaGlobalSecondaryIndexAutoScalingUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaGlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}; +const se_ReplicaGlobalSecondaryIndexSettingsUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ProvisionedReadCapacityUnits: [], + }); +}; +const se_ReplicaGlobalSecondaryIndexSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}; +const se_ReplicaSettingsUpdate = (input, context) => { + return take(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexSettingsUpdate: (_) => se_ReplicaGlobalSecondaryIndexSettingsUpdateList(_, context), + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ReplicaProvisionedReadCapacityUnits: [], + ReplicaTableClass: [], + }); +}; +const se_ReplicaSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaSettingsUpdate(entry, context); + }); +}; +const se_RestoreTableToPointInTimeInput = (input, context) => { + return take(input, { + BillingModeOverride: [], + GlobalSecondaryIndexOverride: _json, + LocalSecondaryIndexOverride: _json, + OnDemandThroughputOverride: _json, + ProvisionedThroughputOverride: _json, + RestoreDateTime: (_) => _.getTime() / 1000, + SSESpecificationOverride: _json, + SourceTableArn: [], + SourceTableName: [], + TargetTableName: [], + UseLatestRestorableTime: [], + }); +}; +const se_ScanInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: (_) => se_Key(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + FilterExpression: [], + IndexName: [], + Limit: [], + ProjectionExpression: [], + ReturnConsumedCapacity: [], + ScanFilter: (_) => se_FilterConditionMap(_, context), + Segment: [], + Select: [], + TableName: [], + TotalSegments: [], + }); +}; +const se_TransactGetItem = (input, context) => { + return take(input, { + Get: (_) => se_Get(_, context), + }); +}; +const se_TransactGetItemList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_TransactGetItem(entry, context); + }); +}; +const se_TransactGetItemsInput = (input, context) => { + return take(input, { + ReturnConsumedCapacity: [], + TransactItems: (_) => se_TransactGetItemList(_, context), + }); +}; +const se_TransactWriteItem = (input, context) => { + return take(input, { + ConditionCheck: (_) => se_ConditionCheck(_, context), + Delete: (_) => se_Delete(_, context), + Put: (_) => se_Put(_, context), + Update: (_) => se_Update(_, context), + }); +}; +const se_TransactWriteItemList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_TransactWriteItem(entry, context); + }); +}; +const se_TransactWriteItemsInput = (input, context) => { + return take(input, { + ClientRequestToken: [true, (_) => _ ?? generateIdempotencyToken()], + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + TransactItems: (_) => se_TransactWriteItemList(_, context), + }); +}; +const se_Update = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [], + }); +}; +const se_UpdateGlobalTableSettingsInput = (input, context) => { + return take(input, { + GlobalTableBillingMode: [], + GlobalTableGlobalSecondaryIndexSettingsUpdate: (_) => se_GlobalTableGlobalSecondaryIndexSettingsUpdateList(_, context), + GlobalTableName: [], + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + GlobalTableProvisionedWriteCapacityUnits: [], + ReplicaSettingsUpdate: (_) => se_ReplicaSettingsUpdateList(_, context), + }); +}; +const se_UpdateItemInput = (input, context) => { + return take(input, { + AttributeUpdates: (_) => se_AttributeUpdates(_, context), + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [], + }); +}; +const se_UpdateTableReplicaAutoScalingInput = (input, context) => { + return take(input, { + GlobalSecondaryIndexUpdates: (_) => se_GlobalSecondaryIndexAutoScalingUpdateList(_, context), + ProvisionedWriteCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ReplicaUpdates: (_) => se_ReplicaAutoScalingUpdateList(_, context), + TableName: [], + }); +}; +const se_WriteRequest = (input, context) => { + return take(input, { + DeleteRequest: (_) => se_DeleteRequest(_, context), + PutRequest: (_) => se_PutRequest(_, context), + }); +}; +const se_WriteRequests = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_WriteRequest(entry, context); + }); +}; +const de_ArchivalSummary = (output, context) => { + return take(output, { + ArchivalBackupArn: __expectString, + ArchivalDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ArchivalReason: __expectString, + }); +}; +const de_AttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_AttributeValue = (output, context) => { + if (output.B != null) { + return { + B: context.base64Decoder(output.B), + }; + } + if (__expectBoolean(output.BOOL) !== undefined) { + return { BOOL: __expectBoolean(output.BOOL) }; + } + if (output.BS != null) { + return { + BS: de_BinarySetAttributeValue(output.BS, context), + }; + } + if (output.L != null) { + return { + L: de_ListAttributeValue(output.L, context), + }; + } + if (output.M != null) { + return { + M: de_MapAttributeValue(output.M, context), + }; + } + if (__expectString(output.N) !== undefined) { + return { N: __expectString(output.N) }; + } + if (output.NS != null) { + return { + NS: _json(output.NS), + }; + } + if (__expectBoolean(output.NULL) !== undefined) { + return { NULL: __expectBoolean(output.NULL) }; + } + if (__expectString(output.S) !== undefined) { + return { S: __expectString(output.S) }; + } + if (output.SS != null) { + return { + SS: _json(output.SS), + }; + } + return { $unknown: Object.entries(output)[0] }; +}; +const de_AutoScalingPolicyDescription = (output, context) => { + return take(output, { + PolicyName: __expectString, + TargetTrackingScalingPolicyConfiguration: (_) => de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription(_, context), + }); +}; +const de_AutoScalingPolicyDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AutoScalingPolicyDescription(entry, context); + }); + return retVal; +}; +const de_AutoScalingSettingsDescription = (output, context) => { + return take(output, { + AutoScalingDisabled: __expectBoolean, + AutoScalingRoleArn: __expectString, + MaximumUnits: __expectLong, + MinimumUnits: __expectLong, + ScalingPolicies: (_) => de_AutoScalingPolicyDescriptionList(_, context), + }); +}; +const de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription = (output, context) => { + return take(output, { + DisableScaleIn: __expectBoolean, + ScaleInCooldown: __expectInt32, + ScaleOutCooldown: __expectInt32, + TargetValue: __limitedParseDouble, + }); +}; +const de_BackupDescription = (output, context) => { + return take(output, { + BackupDetails: (_) => de_BackupDetails(_, context), + SourceTableDetails: (_) => de_SourceTableDetails(_, context), + SourceTableFeatureDetails: (_) => de_SourceTableFeatureDetails(_, context), + }); +}; +const de_BackupDetails = (output, context) => { + return take(output, { + BackupArn: __expectString, + BackupCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupExpiryDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupName: __expectString, + BackupSizeBytes: __expectLong, + BackupStatus: __expectString, + BackupType: __expectString, + }); +}; +const de_BackupSummaries = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_BackupSummary(entry, context); + }); + return retVal; +}; +const de_BackupSummary = (output, context) => { + return take(output, { + BackupArn: __expectString, + BackupCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupExpiryDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupName: __expectString, + BackupSizeBytes: __expectLong, + BackupStatus: __expectString, + BackupType: __expectString, + TableArn: __expectString, + TableId: __expectString, + TableName: __expectString, + }); +}; +const de_BatchExecuteStatementOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_PartiQLBatchResponse(_, context), + }); +}; +const de_BatchGetItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_BatchGetResponseMap(_, context), + UnprocessedKeys: (_) => de_BatchGetRequestMap(_, context), + }); +}; +const de_BatchGetRequestMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_KeysAndAttributes(value, context); + return acc; + }, {}); +}; +const de_BatchGetResponseMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemList(value, context); + return acc; + }, {}); +}; +const de_BatchStatementError = (output, context) => { + return take(output, { + Code: __expectString, + Item: (_) => de_AttributeMap(_, context), + Message: __expectString, + }); +}; +const de_BatchStatementResponse = (output, context) => { + return take(output, { + Error: (_) => de_BatchStatementError(_, context), + Item: (_) => de_AttributeMap(_, context), + TableName: __expectString, + }); +}; +const de_BatchWriteItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetricsPerTable(_, context), + UnprocessedItems: (_) => de_BatchWriteItemRequestMap(_, context), + }); +}; +const de_BatchWriteItemRequestMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_WriteRequests(value, context); + return acc; + }, {}); +}; +const de_BillingModeSummary = (output, context) => { + return take(output, { + BillingMode: __expectString, + LastUpdateToPayPerRequestDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + }); +}; +const de_BinarySetAttributeValue = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return context.base64Decoder(entry); + }); + return retVal; +}; +const de_CancellationReason = (output, context) => { + return take(output, { + Code: __expectString, + Item: (_) => de_AttributeMap(_, context), + Message: __expectString, + }); +}; +const de_CancellationReasonList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_CancellationReason(entry, context); + }); + return retVal; +}; +const de_Capacity = (output, context) => { + return take(output, { + CapacityUnits: __limitedParseDouble, + ReadCapacityUnits: __limitedParseDouble, + WriteCapacityUnits: __limitedParseDouble, + }); +}; +const de_ConditionalCheckFailedException = (output, context) => { + return take(output, { + Item: (_) => de_AttributeMap(_, context), + message: __expectString, + }); +}; +const de_ConsumedCapacity = (output, context) => { + return take(output, { + CapacityUnits: __limitedParseDouble, + GlobalSecondaryIndexes: (_) => de_SecondaryIndexesCapacityMap(_, context), + LocalSecondaryIndexes: (_) => de_SecondaryIndexesCapacityMap(_, context), + ReadCapacityUnits: __limitedParseDouble, + Table: (_) => de_Capacity(_, context), + TableName: __expectString, + WriteCapacityUnits: __limitedParseDouble, + }); +}; +const de_ConsumedCapacityMultiple = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ConsumedCapacity(entry, context); + }); + return retVal; +}; +const de_ContinuousBackupsDescription = (output, context) => { + return take(output, { + ContinuousBackupsStatus: __expectString, + PointInTimeRecoveryDescription: (_) => de_PointInTimeRecoveryDescription(_, context), + }); +}; +const de_CreateBackupOutput = (output, context) => { + return take(output, { + BackupDetails: (_) => de_BackupDetails(_, context), + }); +}; +const de_CreateGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_CreateTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_DeleteBackupOutput = (output, context) => { + return take(output, { + BackupDescription: (_) => de_BackupDescription(_, context), + }); +}; +const de_DeleteItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_DeleteRequest = (output, context) => { + return take(output, { + Key: (_) => de_Key(_, context), + }); +}; +const de_DeleteTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_DescribeBackupOutput = (output, context) => { + return take(output, { + BackupDescription: (_) => de_BackupDescription(_, context), + }); +}; +const de_DescribeContinuousBackupsOutput = (output, context) => { + return take(output, { + ContinuousBackupsDescription: (_) => de_ContinuousBackupsDescription(_, context), + }); +}; +const de_DescribeContributorInsightsOutput = (output, context) => { + return take(output, { + ContributorInsightsRuleList: _json, + ContributorInsightsStatus: __expectString, + FailureException: _json, + IndexName: __expectString, + LastUpdateDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableName: __expectString, + }); +}; +const de_DescribeExportOutput = (output, context) => { + return take(output, { + ExportDescription: (_) => de_ExportDescription(_, context), + }); +}; +const de_DescribeGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_DescribeGlobalTableSettingsOutput = (output, context) => { + return take(output, { + GlobalTableName: __expectString, + ReplicaSettings: (_) => de_ReplicaSettingsDescriptionList(_, context), + }); +}; +const de_DescribeImportOutput = (output, context) => { + return take(output, { + ImportTableDescription: (_) => de_ImportTableDescription(_, context), + }); +}; +const de_DescribeTableOutput = (output, context) => { + return take(output, { + Table: (_) => de_TableDescription(_, context), + }); +}; +const de_DescribeTableReplicaAutoScalingOutput = (output, context) => { + return take(output, { + TableAutoScalingDescription: (_) => de_TableAutoScalingDescription(_, context), + }); +}; +const de_ExecuteStatementOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + NextToken: __expectString, + }); +}; +const de_ExecuteTransactionOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_ItemResponseList(_, context), + }); +}; +const de_ExportDescription = (output, context) => { + return take(output, { + BilledSizeBytes: __expectLong, + ClientToken: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportArn: __expectString, + ExportFormat: __expectString, + ExportManifest: __expectString, + ExportStatus: __expectString, + ExportTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportType: __expectString, + FailureCode: __expectString, + FailureMessage: __expectString, + IncrementalExportSpecification: (_) => de_IncrementalExportSpecification(_, context), + ItemCount: __expectLong, + S3Bucket: __expectString, + S3BucketOwner: __expectString, + S3Prefix: __expectString, + S3SseAlgorithm: __expectString, + S3SseKmsKeyId: __expectString, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + TableId: __expectString, + }); +}; +const de_ExportTableToPointInTimeOutput = (output, context) => { + return take(output, { + ExportDescription: (_) => de_ExportDescription(_, context), + }); +}; +const de_GetItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Item: (_) => de_AttributeMap(_, context), + }); +}; +const de_GlobalSecondaryIndexDescription = (output, context) => { + return take(output, { + Backfilling: __expectBoolean, + IndexArn: __expectString, + IndexName: __expectString, + IndexSizeBytes: __expectLong, + IndexStatus: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + OnDemandThroughput: _json, + Projection: _json, + ProvisionedThroughput: (_) => de_ProvisionedThroughputDescription(_, context), + WarmThroughput: _json, + }); +}; +const de_GlobalSecondaryIndexDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_GlobalSecondaryIndexDescription(entry, context); + }); + return retVal; +}; +const de_GlobalTableDescription = (output, context) => { + return take(output, { + CreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + GlobalTableArn: __expectString, + GlobalTableName: __expectString, + GlobalTableStatus: __expectString, + ReplicationGroup: (_) => de_ReplicaDescriptionList(_, context), + }); +}; +const de_ImportSummary = (output, context) => { + return take(output, { + CloudWatchLogGroupArn: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ImportArn: __expectString, + ImportStatus: __expectString, + InputFormat: __expectString, + S3BucketSource: _json, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + }); +}; +const de_ImportSummaryList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ImportSummary(entry, context); + }); + return retVal; +}; +const de_ImportTableDescription = (output, context) => { + return take(output, { + ClientToken: __expectString, + CloudWatchLogGroupArn: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ErrorCount: __expectLong, + FailureCode: __expectString, + FailureMessage: __expectString, + ImportArn: __expectString, + ImportStatus: __expectString, + ImportedItemCount: __expectLong, + InputCompressionType: __expectString, + InputFormat: __expectString, + InputFormatOptions: _json, + ProcessedItemCount: __expectLong, + ProcessedSizeBytes: __expectLong, + S3BucketSource: _json, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + TableCreationParameters: _json, + TableId: __expectString, + }); +}; +const de_ImportTableOutput = (output, context) => { + return take(output, { + ImportTableDescription: (_) => de_ImportTableDescription(_, context), + }); +}; +const de_IncrementalExportSpecification = (output, context) => { + return take(output, { + ExportFromTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportToTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportViewType: __expectString, + }); +}; +const de_ItemCollectionKeyAttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_ItemCollectionMetrics = (output, context) => { + return take(output, { + ItemCollectionKey: (_) => de_ItemCollectionKeyAttributeMap(_, context), + SizeEstimateRangeGB: (_) => de_ItemCollectionSizeEstimateRange(_, context), + }); +}; +const de_ItemCollectionMetricsMultiple = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ItemCollectionMetrics(entry, context); + }); + return retVal; +}; +const de_ItemCollectionMetricsPerTable = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemCollectionMetricsMultiple(value, context); + return acc; + }, {}); +}; +const de_ItemCollectionSizeEstimateRange = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return __limitedParseDouble(entry); + }); + return retVal; +}; +const de_ItemList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AttributeMap(entry, context); + }); + return retVal; +}; +const de_ItemResponse = (output, context) => { + return take(output, { + Item: (_) => de_AttributeMap(_, context), + }); +}; +const de_ItemResponseList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ItemResponse(entry, context); + }); + return retVal; +}; +const de_Key = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_KeyList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Key(entry, context); + }); + return retVal; +}; +const de_KeysAndAttributes = (output, context) => { + return take(output, { + AttributesToGet: _json, + ConsistentRead: __expectBoolean, + ExpressionAttributeNames: _json, + Keys: (_) => de_KeyList(_, context), + ProjectionExpression: __expectString, + }); +}; +const de_ListAttributeValue = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AttributeValue(__expectUnion(entry), context); + }); + return retVal; +}; +const de_ListBackupsOutput = (output, context) => { + return take(output, { + BackupSummaries: (_) => de_BackupSummaries(_, context), + LastEvaluatedBackupArn: __expectString, + }); +}; +const de_ListImportsOutput = (output, context) => { + return take(output, { + ImportSummaryList: (_) => de_ImportSummaryList(_, context), + NextToken: __expectString, + }); +}; +const de_MapAttributeValue = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_PartiQLBatchResponse = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_BatchStatementResponse(entry, context); + }); + return retVal; +}; +const de_PointInTimeRecoveryDescription = (output, context) => { + return take(output, { + EarliestRestorableDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + LatestRestorableDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + PointInTimeRecoveryStatus: __expectString, + RecoveryPeriodInDays: __expectInt32, + }); +}; +const de_ProvisionedThroughputDescription = (output, context) => { + return take(output, { + LastDecreaseDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + LastIncreaseDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + NumberOfDecreasesToday: __expectLong, + ReadCapacityUnits: __expectLong, + WriteCapacityUnits: __expectLong, + }); +}; +const de_PutItemInputAttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_PutItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_PutRequest = (output, context) => { + return take(output, { + Item: (_) => de_PutItemInputAttributeMap(_, context), + }); +}; +const de_QueryOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Count: __expectInt32, + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + ScannedCount: __expectInt32, + }); +}; +const de_ReplicaAutoScalingDescription = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: (_) => de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList(_, context), + RegionName: __expectString, + ReplicaProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaStatus: __expectString, + }); +}; +const de_ReplicaAutoScalingDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaAutoScalingDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaDescription = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: _json, + KMSMasterKeyId: __expectString, + OnDemandThroughputOverride: _json, + ProvisionedThroughputOverride: _json, + RegionName: __expectString, + ReplicaInaccessibleDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ReplicaStatus: __expectString, + ReplicaStatusDescription: __expectString, + ReplicaStatusPercentProgress: __expectString, + ReplicaTableClassSummary: (_) => de_TableClassSummary(_, context), + WarmThroughput: _json, + }); +}; +const de_ReplicaDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaGlobalSecondaryIndexAutoScalingDescription = (output, context) => { + return take(output, { + IndexName: __expectString, + IndexStatus: __expectString, + ProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + }); +}; +const de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaGlobalSecondaryIndexAutoScalingDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaGlobalSecondaryIndexSettingsDescription = (output, context) => { + return take(output, { + IndexName: __expectString, + IndexStatus: __expectString, + ProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedReadCapacityUnits: __expectLong, + ProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedWriteCapacityUnits: __expectLong, + }); +}; +const de_ReplicaGlobalSecondaryIndexSettingsDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaGlobalSecondaryIndexSettingsDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaSettingsDescription = (output, context) => { + return take(output, { + RegionName: __expectString, + ReplicaBillingModeSummary: (_) => de_BillingModeSummary(_, context), + ReplicaGlobalSecondaryIndexSettings: (_) => de_ReplicaGlobalSecondaryIndexSettingsDescriptionList(_, context), + ReplicaProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedReadCapacityUnits: __expectLong, + ReplicaProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedWriteCapacityUnits: __expectLong, + ReplicaStatus: __expectString, + ReplicaTableClassSummary: (_) => de_TableClassSummary(_, context), + }); +}; +const de_ReplicaSettingsDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaSettingsDescription(entry, context); + }); + return retVal; +}; +const de_RestoreSummary = (output, context) => { + return take(output, { + RestoreDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + RestoreInProgress: __expectBoolean, + SourceBackupArn: __expectString, + SourceTableArn: __expectString, + }); +}; +const de_RestoreTableFromBackupOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_RestoreTableToPointInTimeOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_ScanOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Count: __expectInt32, + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + ScannedCount: __expectInt32, + }); +}; +const de_SecondaryIndexesCapacityMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_Capacity(value, context); + return acc; + }, {}); +}; +const de_SourceTableDetails = (output, context) => { + return take(output, { + BillingMode: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + OnDemandThroughput: _json, + ProvisionedThroughput: _json, + TableArn: __expectString, + TableCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableId: __expectString, + TableName: __expectString, + TableSizeBytes: __expectLong, + }); +}; +const de_SourceTableFeatureDetails = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: _json, + LocalSecondaryIndexes: _json, + SSEDescription: (_) => de_SSEDescription(_, context), + StreamDescription: _json, + TimeToLiveDescription: _json, + }); +}; +const de_SSEDescription = (output, context) => { + return take(output, { + InaccessibleEncryptionDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + KMSMasterKeyArn: __expectString, + SSEType: __expectString, + Status: __expectString, + }); +}; +const de_TableAutoScalingDescription = (output, context) => { + return take(output, { + Replicas: (_) => de_ReplicaAutoScalingDescriptionList(_, context), + TableName: __expectString, + TableStatus: __expectString, + }); +}; +const de_TableClassSummary = (output, context) => { + return take(output, { + LastUpdateDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableClass: __expectString, + }); +}; +const de_TableDescription = (output, context) => { + return take(output, { + ArchivalSummary: (_) => de_ArchivalSummary(_, context), + AttributeDefinitions: _json, + BillingModeSummary: (_) => de_BillingModeSummary(_, context), + CreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + DeletionProtectionEnabled: __expectBoolean, + GlobalSecondaryIndexes: (_) => de_GlobalSecondaryIndexDescriptionList(_, context), + GlobalTableVersion: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + LatestStreamArn: __expectString, + LatestStreamLabel: __expectString, + LocalSecondaryIndexes: _json, + MultiRegionConsistency: __expectString, + OnDemandThroughput: _json, + ProvisionedThroughput: (_) => de_ProvisionedThroughputDescription(_, context), + Replicas: (_) => de_ReplicaDescriptionList(_, context), + RestoreSummary: (_) => de_RestoreSummary(_, context), + SSEDescription: (_) => de_SSEDescription(_, context), + StreamSpecification: _json, + TableArn: __expectString, + TableClassSummary: (_) => de_TableClassSummary(_, context), + TableId: __expectString, + TableName: __expectString, + TableSizeBytes: __expectLong, + TableStatus: __expectString, + WarmThroughput: _json, + }); +}; +const de_TransactGetItemsOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_ItemResponseList(_, context), + }); +}; +const de_TransactionCanceledException = (output, context) => { + return take(output, { + CancellationReasons: (_) => de_CancellationReasonList(_, context), + Message: __expectString, + }); +}; +const de_TransactWriteItemsOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetricsPerTable(_, context), + }); +}; +const de_UpdateContinuousBackupsOutput = (output, context) => { + return take(output, { + ContinuousBackupsDescription: (_) => de_ContinuousBackupsDescription(_, context), + }); +}; +const de_UpdateGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_UpdateGlobalTableSettingsOutput = (output, context) => { + return take(output, { + GlobalTableName: __expectString, + ReplicaSettings: (_) => de_ReplicaSettingsDescriptionList(_, context), + }); +}; +const de_UpdateItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_UpdateTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_UpdateTableReplicaAutoScalingOutput = (output, context) => { + return take(output, { + TableAutoScalingDescription: (_) => de_TableAutoScalingDescription(_, context), + }); +}; +const de_WriteRequest = (output, context) => { + return take(output, { + DeleteRequest: (_) => de_DeleteRequest(_, context), + PutRequest: (_) => de_PutRequest(_, context), + }); +}; +const de_WriteRequests = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_WriteRequest(entry, context); + }); + return retVal; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = withBaseException(__BaseException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new __HttpRequest(contents); +}; +function sharedHeaders(operation) { + return { + "content-type": "application/x-amz-json-1.0", + "x-amz-target": `DynamoDB_20120810.${operation}`, + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js new file mode 100644 index 0000000..a29a02d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js @@ -0,0 +1,37 @@ +import packageInfo from "../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE } from "@aws-sdk/core/account-id-endpoint"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (() => Promise.resolve(DEFAULT_ACCOUNT_ID_ENDPOINT_MODE)), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (() => Promise.resolve(undefined)), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js new file mode 100644 index 0000000..69898d1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js @@ -0,0 +1,52 @@ +import packageInfo from "../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS } from "@aws-sdk/core/account-id-endpoint"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS } from "@aws-sdk/middleware-endpoint-discovery"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? loadNodeConfig(NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, profileConfig), + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credentialDefaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? loadNodeConfig(NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, profileConfig), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js new file mode 100644 index 0000000..ee4ffa7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js @@ -0,0 +1,30 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultDynamoDBHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2012-08-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultDynamoDBHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "DynamoDB", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js new file mode 100644 index 0000000..c6faadd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js @@ -0,0 +1,34 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { DescribeTableCommand } from "../commands/DescribeTableCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + try { + const returnComparator = () => { + return result.Table.TableStatus; + }; + if (returnComparator() === "ACTIVE") { + return { state: WaiterState.SUCCESS, reason }; + } + } + catch (e) { } + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: WaiterState.RETRY, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForTableExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilTableExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js new file mode 100644 index 0000000..b691c03 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js @@ -0,0 +1,25 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { DescribeTableCommand } from "../commands/DescribeTableCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: WaiterState.SUCCESS, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForTableNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilTableNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts new file mode 100644 index 0000000..7ad45f0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts @@ -0,0 +1,433 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "./commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "./commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "./commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +export interface DynamoDB { + /** + * @see {@link BatchExecuteStatementCommand} + */ + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + /** + * @see {@link BatchGetItemCommand} + */ + batchGetItem(args: BatchGetItemCommandInput, options?: __HttpHandlerOptions): Promise; + batchGetItem(args: BatchGetItemCommandInput, cb: (err: any, data?: BatchGetItemCommandOutput) => void): void; + batchGetItem(args: BatchGetItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchGetItemCommandOutput) => void): void; + /** + * @see {@link BatchWriteItemCommand} + */ + batchWriteItem(args: BatchWriteItemCommandInput, options?: __HttpHandlerOptions): Promise; + batchWriteItem(args: BatchWriteItemCommandInput, cb: (err: any, data?: BatchWriteItemCommandOutput) => void): void; + batchWriteItem(args: BatchWriteItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchWriteItemCommandOutput) => void): void; + /** + * @see {@link CreateBackupCommand} + */ + createBackup(args: CreateBackupCommandInput, options?: __HttpHandlerOptions): Promise; + createBackup(args: CreateBackupCommandInput, cb: (err: any, data?: CreateBackupCommandOutput) => void): void; + createBackup(args: CreateBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateBackupCommandOutput) => void): void; + /** + * @see {@link CreateGlobalTableCommand} + */ + createGlobalTable(args: CreateGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + createGlobalTable(args: CreateGlobalTableCommandInput, cb: (err: any, data?: CreateGlobalTableCommandOutput) => void): void; + createGlobalTable(args: CreateGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateGlobalTableCommandOutput) => void): void; + /** + * @see {@link CreateTableCommand} + */ + createTable(args: CreateTableCommandInput, options?: __HttpHandlerOptions): Promise; + createTable(args: CreateTableCommandInput, cb: (err: any, data?: CreateTableCommandOutput) => void): void; + createTable(args: CreateTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateTableCommandOutput) => void): void; + /** + * @see {@link DeleteBackupCommand} + */ + deleteBackup(args: DeleteBackupCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBackup(args: DeleteBackupCommandInput, cb: (err: any, data?: DeleteBackupCommandOutput) => void): void; + deleteBackup(args: DeleteBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBackupCommandOutput) => void): void; + /** + * @see {@link DeleteItemCommand} + */ + deleteItem(args: DeleteItemCommandInput, options?: __HttpHandlerOptions): Promise; + deleteItem(args: DeleteItemCommandInput, cb: (err: any, data?: DeleteItemCommandOutput) => void): void; + deleteItem(args: DeleteItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteItemCommandOutput) => void): void; + /** + * @see {@link DeleteResourcePolicyCommand} + */ + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void): void; + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void): void; + /** + * @see {@link DeleteTableCommand} + */ + deleteTable(args: DeleteTableCommandInput, options?: __HttpHandlerOptions): Promise; + deleteTable(args: DeleteTableCommandInput, cb: (err: any, data?: DeleteTableCommandOutput) => void): void; + deleteTable(args: DeleteTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteTableCommandOutput) => void): void; + /** + * @see {@link DescribeBackupCommand} + */ + describeBackup(args: DescribeBackupCommandInput, options?: __HttpHandlerOptions): Promise; + describeBackup(args: DescribeBackupCommandInput, cb: (err: any, data?: DescribeBackupCommandOutput) => void): void; + describeBackup(args: DescribeBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeBackupCommandOutput) => void): void; + /** + * @see {@link DescribeContinuousBackupsCommand} + */ + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void): void; + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void): void; + /** + * @see {@link DescribeContributorInsightsCommand} + */ + describeContributorInsights(args: DescribeContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + describeContributorInsights(args: DescribeContributorInsightsCommandInput, cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void): void; + describeContributorInsights(args: DescribeContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void): void; + /** + * @see {@link DescribeEndpointsCommand} + */ + describeEndpoints(): Promise; + describeEndpoints(args: DescribeEndpointsCommandInput, options?: __HttpHandlerOptions): Promise; + describeEndpoints(args: DescribeEndpointsCommandInput, cb: (err: any, data?: DescribeEndpointsCommandOutput) => void): void; + describeEndpoints(args: DescribeEndpointsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeEndpointsCommandOutput) => void): void; + /** + * @see {@link DescribeExportCommand} + */ + describeExport(args: DescribeExportCommandInput, options?: __HttpHandlerOptions): Promise; + describeExport(args: DescribeExportCommandInput, cb: (err: any, data?: DescribeExportCommandOutput) => void): void; + describeExport(args: DescribeExportCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeExportCommandOutput) => void): void; + /** + * @see {@link DescribeGlobalTableCommand} + */ + describeGlobalTable(args: DescribeGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + describeGlobalTable(args: DescribeGlobalTableCommandInput, cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void): void; + describeGlobalTable(args: DescribeGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void): void; + /** + * @see {@link DescribeGlobalTableSettingsCommand} + */ + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, options?: __HttpHandlerOptions): Promise; + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void): void; + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void): void; + /** + * @see {@link DescribeImportCommand} + */ + describeImport(args: DescribeImportCommandInput, options?: __HttpHandlerOptions): Promise; + describeImport(args: DescribeImportCommandInput, cb: (err: any, data?: DescribeImportCommandOutput) => void): void; + describeImport(args: DescribeImportCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeImportCommandOutput) => void): void; + /** + * @see {@link DescribeKinesisStreamingDestinationCommand} + */ + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, cb: (err: any, data?: DescribeKinesisStreamingDestinationCommandOutput) => void): void; + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link DescribeLimitsCommand} + */ + describeLimits(): Promise; + describeLimits(args: DescribeLimitsCommandInput, options?: __HttpHandlerOptions): Promise; + describeLimits(args: DescribeLimitsCommandInput, cb: (err: any, data?: DescribeLimitsCommandOutput) => void): void; + describeLimits(args: DescribeLimitsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeLimitsCommandOutput) => void): void; + /** + * @see {@link DescribeTableCommand} + */ + describeTable(args: DescribeTableCommandInput, options?: __HttpHandlerOptions): Promise; + describeTable(args: DescribeTableCommandInput, cb: (err: any, data?: DescribeTableCommandOutput) => void): void; + describeTable(args: DescribeTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTableCommandOutput) => void): void; + /** + * @see {@link DescribeTableReplicaAutoScalingCommand} + */ + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, options?: __HttpHandlerOptions): Promise; + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void): void; + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void): void; + /** + * @see {@link DescribeTimeToLiveCommand} + */ + describeTimeToLive(args: DescribeTimeToLiveCommandInput, options?: __HttpHandlerOptions): Promise; + describeTimeToLive(args: DescribeTimeToLiveCommandInput, cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void): void; + describeTimeToLive(args: DescribeTimeToLiveCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void): void; + /** + * @see {@link DisableKinesisStreamingDestinationCommand} + */ + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, cb: (err: any, data?: DisableKinesisStreamingDestinationCommandOutput) => void): void; + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DisableKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link EnableKinesisStreamingDestinationCommand} + */ + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, cb: (err: any, data?: EnableKinesisStreamingDestinationCommandOutput) => void): void; + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: EnableKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link ExecuteStatementCommand} + */ + executeStatement(args: ExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + executeStatement(args: ExecuteStatementCommandInput, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + executeStatement(args: ExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + /** + * @see {@link ExecuteTransactionCommand} + */ + executeTransaction(args: ExecuteTransactionCommandInput, options?: __HttpHandlerOptions): Promise; + executeTransaction(args: ExecuteTransactionCommandInput, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + executeTransaction(args: ExecuteTransactionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + /** + * @see {@link ExportTableToPointInTimeCommand} + */ + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, options?: __HttpHandlerOptions): Promise; + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void): void; + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void): void; + /** + * @see {@link GetItemCommand} + */ + getItem(args: GetItemCommandInput, options?: __HttpHandlerOptions): Promise; + getItem(args: GetItemCommandInput, cb: (err: any, data?: GetItemCommandOutput) => void): void; + getItem(args: GetItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetItemCommandOutput) => void): void; + /** + * @see {@link GetResourcePolicyCommand} + */ + getResourcePolicy(args: GetResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + getResourcePolicy(args: GetResourcePolicyCommandInput, cb: (err: any, data?: GetResourcePolicyCommandOutput) => void): void; + getResourcePolicy(args: GetResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetResourcePolicyCommandOutput) => void): void; + /** + * @see {@link ImportTableCommand} + */ + importTable(args: ImportTableCommandInput, options?: __HttpHandlerOptions): Promise; + importTable(args: ImportTableCommandInput, cb: (err: any, data?: ImportTableCommandOutput) => void): void; + importTable(args: ImportTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ImportTableCommandOutput) => void): void; + /** + * @see {@link ListBackupsCommand} + */ + listBackups(): Promise; + listBackups(args: ListBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + listBackups(args: ListBackupsCommandInput, cb: (err: any, data?: ListBackupsCommandOutput) => void): void; + listBackups(args: ListBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListBackupsCommandOutput) => void): void; + /** + * @see {@link ListContributorInsightsCommand} + */ + listContributorInsights(): Promise; + listContributorInsights(args: ListContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + listContributorInsights(args: ListContributorInsightsCommandInput, cb: (err: any, data?: ListContributorInsightsCommandOutput) => void): void; + listContributorInsights(args: ListContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListContributorInsightsCommandOutput) => void): void; + /** + * @see {@link ListExportsCommand} + */ + listExports(): Promise; + listExports(args: ListExportsCommandInput, options?: __HttpHandlerOptions): Promise; + listExports(args: ListExportsCommandInput, cb: (err: any, data?: ListExportsCommandOutput) => void): void; + listExports(args: ListExportsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListExportsCommandOutput) => void): void; + /** + * @see {@link ListGlobalTablesCommand} + */ + listGlobalTables(): Promise; + listGlobalTables(args: ListGlobalTablesCommandInput, options?: __HttpHandlerOptions): Promise; + listGlobalTables(args: ListGlobalTablesCommandInput, cb: (err: any, data?: ListGlobalTablesCommandOutput) => void): void; + listGlobalTables(args: ListGlobalTablesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListGlobalTablesCommandOutput) => void): void; + /** + * @see {@link ListImportsCommand} + */ + listImports(): Promise; + listImports(args: ListImportsCommandInput, options?: __HttpHandlerOptions): Promise; + listImports(args: ListImportsCommandInput, cb: (err: any, data?: ListImportsCommandOutput) => void): void; + listImports(args: ListImportsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListImportsCommandOutput) => void): void; + /** + * @see {@link ListTablesCommand} + */ + listTables(): Promise; + listTables(args: ListTablesCommandInput, options?: __HttpHandlerOptions): Promise; + listTables(args: ListTablesCommandInput, cb: (err: any, data?: ListTablesCommandOutput) => void): void; + listTables(args: ListTablesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListTablesCommandOutput) => void): void; + /** + * @see {@link ListTagsOfResourceCommand} + */ + listTagsOfResource(args: ListTagsOfResourceCommandInput, options?: __HttpHandlerOptions): Promise; + listTagsOfResource(args: ListTagsOfResourceCommandInput, cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void): void; + listTagsOfResource(args: ListTagsOfResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void): void; + /** + * @see {@link PutItemCommand} + */ + putItem(args: PutItemCommandInput, options?: __HttpHandlerOptions): Promise; + putItem(args: PutItemCommandInput, cb: (err: any, data?: PutItemCommandOutput) => void): void; + putItem(args: PutItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutItemCommandOutput) => void): void; + /** + * @see {@link PutResourcePolicyCommand} + */ + putResourcePolicy(args: PutResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + putResourcePolicy(args: PutResourcePolicyCommandInput, cb: (err: any, data?: PutResourcePolicyCommandOutput) => void): void; + putResourcePolicy(args: PutResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutResourcePolicyCommandOutput) => void): void; + /** + * @see {@link QueryCommand} + */ + query(args: QueryCommandInput, options?: __HttpHandlerOptions): Promise; + query(args: QueryCommandInput, cb: (err: any, data?: QueryCommandOutput) => void): void; + query(args: QueryCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: QueryCommandOutput) => void): void; + /** + * @see {@link RestoreTableFromBackupCommand} + */ + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, options?: __HttpHandlerOptions): Promise; + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void): void; + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void): void; + /** + * @see {@link RestoreTableToPointInTimeCommand} + */ + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, options?: __HttpHandlerOptions): Promise; + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void): void; + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void): void; + /** + * @see {@link ScanCommand} + */ + scan(args: ScanCommandInput, options?: __HttpHandlerOptions): Promise; + scan(args: ScanCommandInput, cb: (err: any, data?: ScanCommandOutput) => void): void; + scan(args: ScanCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ScanCommandOutput) => void): void; + /** + * @see {@link TagResourceCommand} + */ + tagResource(args: TagResourceCommandInput, options?: __HttpHandlerOptions): Promise; + tagResource(args: TagResourceCommandInput, cb: (err: any, data?: TagResourceCommandOutput) => void): void; + tagResource(args: TagResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TagResourceCommandOutput) => void): void; + /** + * @see {@link TransactGetItemsCommand} + */ + transactGetItems(args: TransactGetItemsCommandInput, options?: __HttpHandlerOptions): Promise; + transactGetItems(args: TransactGetItemsCommandInput, cb: (err: any, data?: TransactGetItemsCommandOutput) => void): void; + transactGetItems(args: TransactGetItemsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactGetItemsCommandOutput) => void): void; + /** + * @see {@link TransactWriteItemsCommand} + */ + transactWriteItems(args: TransactWriteItemsCommandInput, options?: __HttpHandlerOptions): Promise; + transactWriteItems(args: TransactWriteItemsCommandInput, cb: (err: any, data?: TransactWriteItemsCommandOutput) => void): void; + transactWriteItems(args: TransactWriteItemsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactWriteItemsCommandOutput) => void): void; + /** + * @see {@link UntagResourceCommand} + */ + untagResource(args: UntagResourceCommandInput, options?: __HttpHandlerOptions): Promise; + untagResource(args: UntagResourceCommandInput, cb: (err: any, data?: UntagResourceCommandOutput) => void): void; + untagResource(args: UntagResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UntagResourceCommandOutput) => void): void; + /** + * @see {@link UpdateContinuousBackupsCommand} + */ + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void): void; + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void): void; + /** + * @see {@link UpdateContributorInsightsCommand} + */ + updateContributorInsights(args: UpdateContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + updateContributorInsights(args: UpdateContributorInsightsCommandInput, cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void): void; + updateContributorInsights(args: UpdateContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void): void; + /** + * @see {@link UpdateGlobalTableCommand} + */ + updateGlobalTable(args: UpdateGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + updateGlobalTable(args: UpdateGlobalTableCommandInput, cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void): void; + updateGlobalTable(args: UpdateGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void): void; + /** + * @see {@link UpdateGlobalTableSettingsCommand} + */ + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, options?: __HttpHandlerOptions): Promise; + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void): void; + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void): void; + /** + * @see {@link UpdateItemCommand} + */ + updateItem(args: UpdateItemCommandInput, options?: __HttpHandlerOptions): Promise; + updateItem(args: UpdateItemCommandInput, cb: (err: any, data?: UpdateItemCommandOutput) => void): void; + updateItem(args: UpdateItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateItemCommandOutput) => void): void; + /** + * @see {@link UpdateKinesisStreamingDestinationCommand} + */ + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, cb: (err: any, data?: UpdateKinesisStreamingDestinationCommandOutput) => void): void; + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link UpdateTableCommand} + */ + updateTable(args: UpdateTableCommandInput, options?: __HttpHandlerOptions): Promise; + updateTable(args: UpdateTableCommandInput, cb: (err: any, data?: UpdateTableCommandOutput) => void): void; + updateTable(args: UpdateTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTableCommandOutput) => void): void; + /** + * @see {@link UpdateTableReplicaAutoScalingCommand} + */ + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, options?: __HttpHandlerOptions): Promise; + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void): void; + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void): void; + /** + * @see {@link UpdateTimeToLiveCommand} + */ + updateTimeToLive(args: UpdateTimeToLiveCommandInput, options?: __HttpHandlerOptions): Promise; + updateTimeToLive(args: UpdateTimeToLiveCommandInput, cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void): void; + updateTimeToLive(args: UpdateTimeToLiveCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void): void; +} +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * @public + */ +export declare class DynamoDB extends DynamoDBClient implements DynamoDB { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts new file mode 100644 index 0000000..c508d62 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts @@ -0,0 +1,272 @@ +import { AccountIdEndpointMode, AccountIdEndpointModeInputConfig, AccountIdEndpointModeResolvedConfig } from "@aws-sdk/core/account-id-endpoint"; +import { EndpointDiscoveryInputConfig, EndpointDiscoveryResolvedConfig } from "@aws-sdk/middleware-endpoint-discovery"; +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { AwsCredentialIdentityProvider, BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "./commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "./commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "./commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "./commands/UpdateTimeToLiveCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = BatchExecuteStatementCommandInput | BatchGetItemCommandInput | BatchWriteItemCommandInput | CreateBackupCommandInput | CreateGlobalTableCommandInput | CreateTableCommandInput | DeleteBackupCommandInput | DeleteItemCommandInput | DeleteResourcePolicyCommandInput | DeleteTableCommandInput | DescribeBackupCommandInput | DescribeContinuousBackupsCommandInput | DescribeContributorInsightsCommandInput | DescribeEndpointsCommandInput | DescribeExportCommandInput | DescribeGlobalTableCommandInput | DescribeGlobalTableSettingsCommandInput | DescribeImportCommandInput | DescribeKinesisStreamingDestinationCommandInput | DescribeLimitsCommandInput | DescribeTableCommandInput | DescribeTableReplicaAutoScalingCommandInput | DescribeTimeToLiveCommandInput | DisableKinesisStreamingDestinationCommandInput | EnableKinesisStreamingDestinationCommandInput | ExecuteStatementCommandInput | ExecuteTransactionCommandInput | ExportTableToPointInTimeCommandInput | GetItemCommandInput | GetResourcePolicyCommandInput | ImportTableCommandInput | ListBackupsCommandInput | ListContributorInsightsCommandInput | ListExportsCommandInput | ListGlobalTablesCommandInput | ListImportsCommandInput | ListTablesCommandInput | ListTagsOfResourceCommandInput | PutItemCommandInput | PutResourcePolicyCommandInput | QueryCommandInput | RestoreTableFromBackupCommandInput | RestoreTableToPointInTimeCommandInput | ScanCommandInput | TagResourceCommandInput | TransactGetItemsCommandInput | TransactWriteItemsCommandInput | UntagResourceCommandInput | UpdateContinuousBackupsCommandInput | UpdateContributorInsightsCommandInput | UpdateGlobalTableCommandInput | UpdateGlobalTableSettingsCommandInput | UpdateItemCommandInput | UpdateKinesisStreamingDestinationCommandInput | UpdateTableCommandInput | UpdateTableReplicaAutoScalingCommandInput | UpdateTimeToLiveCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = BatchExecuteStatementCommandOutput | BatchGetItemCommandOutput | BatchWriteItemCommandOutput | CreateBackupCommandOutput | CreateGlobalTableCommandOutput | CreateTableCommandOutput | DeleteBackupCommandOutput | DeleteItemCommandOutput | DeleteResourcePolicyCommandOutput | DeleteTableCommandOutput | DescribeBackupCommandOutput | DescribeContinuousBackupsCommandOutput | DescribeContributorInsightsCommandOutput | DescribeEndpointsCommandOutput | DescribeExportCommandOutput | DescribeGlobalTableCommandOutput | DescribeGlobalTableSettingsCommandOutput | DescribeImportCommandOutput | DescribeKinesisStreamingDestinationCommandOutput | DescribeLimitsCommandOutput | DescribeTableCommandOutput | DescribeTableReplicaAutoScalingCommandOutput | DescribeTimeToLiveCommandOutput | DisableKinesisStreamingDestinationCommandOutput | EnableKinesisStreamingDestinationCommandOutput | ExecuteStatementCommandOutput | ExecuteTransactionCommandOutput | ExportTableToPointInTimeCommandOutput | GetItemCommandOutput | GetResourcePolicyCommandOutput | ImportTableCommandOutput | ListBackupsCommandOutput | ListContributorInsightsCommandOutput | ListExportsCommandOutput | ListGlobalTablesCommandOutput | ListImportsCommandOutput | ListTablesCommandOutput | ListTagsOfResourceCommandOutput | PutItemCommandOutput | PutResourcePolicyCommandOutput | QueryCommandOutput | RestoreTableFromBackupCommandOutput | RestoreTableToPointInTimeCommandOutput | ScanCommandOutput | TagResourceCommandOutput | TransactGetItemsCommandOutput | TransactWriteItemsCommandOutput | UntagResourceCommandOutput | UpdateContinuousBackupsCommandOutput | UpdateContributorInsightsCommandOutput | UpdateGlobalTableCommandOutput | UpdateGlobalTableSettingsCommandOutput | UpdateItemCommandOutput | UpdateKinesisStreamingDestinationCommandOutput | UpdateTableCommandOutput | UpdateTableReplicaAutoScalingCommandOutput | UpdateTimeToLiveCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * Defines if the AWS AccountId will be used for endpoint routing. + */ + accountIdEndpointMode?: AccountIdEndpointMode | __Provider; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Default credentials provider; Not available in browser runtime. + * @deprecated + * @internal + */ + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; + /** + * The provider which populates default for endpointDiscoveryEnabled configuration, if it's + * not passed during client creation. + * @internal + */ + endpointDiscoveryEnabledProvider?: __Provider; +} +/** + * @public + */ +export type DynamoDBClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & AccountIdEndpointModeInputConfig & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & EndpointDiscoveryInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of DynamoDBClient class constructor that set the region, credentials and other options. + */ +export interface DynamoDBClientConfig extends DynamoDBClientConfigType { +} +/** + * @public + */ +export type DynamoDBClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & AccountIdEndpointModeResolvedConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & EndpointDiscoveryResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ +export interface DynamoDBClientResolvedConfig extends DynamoDBClientResolvedConfigType { +} +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * @public + */ +export declare class DynamoDBClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, DynamoDBClientResolvedConfig> { + /** + * The resolved configuration of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ + readonly config: DynamoDBClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..b2b8f76 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { DynamoDBHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): DynamoDBHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..3760ff0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { DynamoDBClientResolvedConfig } from "../DynamoDBClient"; +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultDynamoDBHttpAuthSchemeParametersProvider: (config: DynamoDBClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultDynamoDBHttpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: DynamoDBHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..0d8793c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,227 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchExecuteStatementInput, BatchExecuteStatementOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchExecuteStatementCommand}. + */ +export interface BatchExecuteStatementCommandInput extends BatchExecuteStatementInput { +} +/** + * @public + * + * The output of {@link BatchExecuteStatementCommand}. + */ +export interface BatchExecuteStatementCommandOutput extends BatchExecuteStatementOutput, __MetadataBearer { +} +declare const BatchExecuteStatementCommand_base: { + new (input: BatchExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform batch reads or writes on data stored in DynamoDB, + * using PartiQL. Each read statement in a BatchExecuteStatement must specify + * an equality condition on all key attributes. This enforces that each SELECT + * statement in a batch returns at most a single item. For more information, see Running batch operations with PartiQL for DynamoDB .

+ * + *

The entire batch must consist of either read statements or write statements, you + * cannot mix both in one batch.

+ *
+ * + *

A HTTP 200 response does not mean that all statements in the BatchExecuteStatement + * succeeded. Error details for individual statements can be found under the Error field of the BatchStatementResponse for each + * statement.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchExecuteStatementCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchExecuteStatementInput + * Statements: [ // PartiQLBatchRequest // required + * { // BatchStatementRequest + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ConsistentRead: true || false, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new BatchExecuteStatementCommand(input); + * const response = await client.send(command); + * // { // BatchExecuteStatementOutput + * // Responses: [ // PartiQLBatchResponse + * // { // BatchStatementResponse + * // Error: { // BatchStatementError + * // Code: "ConditionalCheckFailed" || "ItemCollectionSizeLimitExceeded" || "RequestLimitExceeded" || "ValidationError" || "ProvisionedThroughputExceeded" || "TransactionConflict" || "ThrottlingError" || "InternalServerError" || "ResourceNotFound" || "AccessDenied" || "DuplicateItem", + * // Message: "STRING_VALUE", + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // TableName: "STRING_VALUE", + * // Item: { + * // "": "", + * // }, + * // }, + * // ], + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchExecuteStatementCommandInput - {@link BatchExecuteStatementCommandInput} + * @returns {@link BatchExecuteStatementCommandOutput} + * @see {@link BatchExecuteStatementCommandInput} for command's `input` shape. + * @see {@link BatchExecuteStatementCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class BatchExecuteStatementCommand extends BatchExecuteStatementCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchExecuteStatementInput; + output: BatchExecuteStatementOutput; + }; + sdk: { + input: BatchExecuteStatementCommandInput; + output: BatchExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts new file mode 100644 index 0000000..aee4fd5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts @@ -0,0 +1,357 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchGetItemInput, BatchGetItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchGetItemCommand}. + */ +export interface BatchGetItemCommandInput extends BatchGetItemInput { +} +/** + * @public + * + * The output of {@link BatchGetItemCommand}. + */ +export interface BatchGetItemCommandOutput extends BatchGetItemOutput, __MetadataBearer { +} +declare const BatchGetItemCommand_base: { + new (input: BatchGetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchGetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The BatchGetItem operation returns the attributes of one or more items + * from one or more tables. You identify requested items by primary key.

+ *

A single operation can retrieve up to 16 MB of data, which can contain as many as 100 + * items. BatchGetItem returns a partial result if the response size limit is + * exceeded, the table's provisioned throughput is exceeded, more than 1MB per partition is + * requested, or an internal processing failure occurs. If a partial result is returned, + * the operation returns a value for UnprocessedKeys. You can use this value + * to retry the operation starting with the next item to get.

+ * + *

If you request more than 100 items, BatchGetItem returns a + * ValidationException with the message "Too many items requested for + * the BatchGetItem call."

+ *
+ *

For example, if you ask to retrieve 100 items, but each individual item is 300 KB in + * size, the system returns 52 items (so as not to exceed the 16 MB limit). It also returns + * an appropriate UnprocessedKeys value so you can get the next page of + * results. If desired, your application can include its own logic to assemble the pages of + * results into one dataset.

+ *

If none of the items can be processed due to insufficient + * provisioned throughput on all of the tables in the request, then + * BatchGetItem returns a + * ProvisionedThroughputExceededException. If at least + * one of the items is successfully processed, then + * BatchGetItem completes successfully, while returning the keys of the + * unread items in UnprocessedKeys.

+ * + *

If DynamoDB returns any unprocessed items, you should retry the batch operation on + * those items. However, we strongly recommend that you use an exponential + * backoff algorithm. If you retry the batch operation immediately, the + * underlying read or write requests can still fail due to throttling on the individual + * tables. If you delay the batch operation using exponential backoff, the individual + * requests in the batch are much more likely to succeed.

+ *

For more information, see Batch Operations and Error Handling in the Amazon DynamoDB + * Developer Guide.

+ *
+ *

By default, BatchGetItem performs eventually consistent reads on every + * table in the request. If you want strongly consistent reads instead, you can set + * ConsistentRead to true for any or all tables.

+ *

In order to minimize response latency, BatchGetItem may retrieve items in + * parallel.

+ *

When designing your application, keep in mind that DynamoDB does not return items in + * any particular order. To help parse the response by item, include the primary key values + * for the items in your request in the ProjectionExpression parameter.

+ *

If a requested item does not exist, it is not returned in the result. Requests for + * nonexistent items consume the minimum read capacity units according to the type of read. + * For more information, see Working with Tables in the Amazon DynamoDB Developer + * Guide.

+ * + *

+ * BatchGetItem will result in a ValidationException if the + * same key is specified multiple times.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchGetItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchGetItemInput + * RequestItems: { // BatchGetRequestMap // required + * "": { // KeysAndAttributes + * Keys: [ // KeyList // required + * { // Key + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * ], + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * ConsistentRead: true || false, + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }, + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new BatchGetItemCommand(input); + * const response = await client.send(command); + * // { // BatchGetItemOutput + * // Responses: { // BatchGetResponseMap + * // "": [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // }, + * // UnprocessedKeys: { // BatchGetRequestMap + * // "": { // KeysAndAttributes + * // Keys: [ // KeyList // required + * // { // Key + * // "": "", + * // }, + * // ], + * // AttributesToGet: [ // AttributeNameList + * // "STRING_VALUE", + * // ], + * // ConsistentRead: true || false, + * // ProjectionExpression: "STRING_VALUE", + * // ExpressionAttributeNames: { // ExpressionAttributeNameMap + * // "": "STRING_VALUE", + * // }, + * // }, + * // }, + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchGetItemCommandInput - {@link BatchGetItemCommandInput} + * @returns {@link BatchGetItemCommandOutput} + * @see {@link BatchGetItemCommandInput} for command's `input` shape. + * @see {@link BatchGetItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To retrieve multiple items from a table + * ```javascript + * // This example reads multiple items from the Music table using a batch of three GetItem requests. Only the AlbumTitle attribute is returned. + * const input = { + * RequestItems: { + * Music: { + * Keys: [ + * { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * ], + * ProjectionExpression: "AlbumTitle" + * } + * } + * }; + * const command = new BatchGetItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Responses: { + * Music: [ + * { + * AlbumTitle: { + * S: "Somewhat Famous" + * } + * }, + * { + * AlbumTitle: { + * S: "Blue Sky Blues" + * } + * }, + * { + * AlbumTitle: { + * S: "Louder Than Ever" + * } + * } + * ] + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class BatchGetItemCommand extends BatchGetItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchGetItemInput; + output: BatchGetItemOutput; + }; + sdk: { + input: BatchGetItemCommandInput; + output: BatchGetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts new file mode 100644 index 0000000..13bed0c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts @@ -0,0 +1,401 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchWriteItemInput, BatchWriteItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchWriteItemCommand}. + */ +export interface BatchWriteItemCommandInput extends BatchWriteItemInput { +} +/** + * @public + * + * The output of {@link BatchWriteItemCommand}. + */ +export interface BatchWriteItemCommandOutput extends BatchWriteItemOutput, __MetadataBearer { +} +declare const BatchWriteItemCommand_base: { + new (input: BatchWriteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchWriteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The BatchWriteItem operation puts or deletes multiple items in one or + * more tables. A single call to BatchWriteItem can transmit up to 16MB of + * data over the network, consisting of up to 25 item put or delete operations. While + * individual items can be up to 400 KB once stored, it's important to note that an item's + * representation might be greater than 400KB while being sent in DynamoDB's JSON format + * for the API call. For more details on this distinction, see Naming Rules and Data Types.

+ * + *

+ * BatchWriteItem cannot update items. If you perform a + * BatchWriteItem operation on an existing item, that item's values + * will be overwritten by the operation and it will appear like it was updated. To + * update items, we recommend you use the UpdateItem action.

+ *
+ *

The individual PutItem and DeleteItem operations specified + * in BatchWriteItem are atomic; however BatchWriteItem as a + * whole is not. If any requested operations fail because the table's provisioned + * throughput is exceeded or an internal processing failure occurs, the failed operations + * are returned in the UnprocessedItems response parameter. You can + * investigate and optionally resend the requests. Typically, you would call + * BatchWriteItem in a loop. Each iteration would check for unprocessed + * items and submit a new BatchWriteItem request with those unprocessed items + * until all items have been processed.

+ *

For tables and indexes with provisioned capacity, if none of the items can be + * processed due to insufficient provisioned throughput on all of the tables in the + * request, then BatchWriteItem returns a + * ProvisionedThroughputExceededException. For all tables and indexes, if + * none of the items can be processed due to other throttling scenarios (such as exceeding + * partition level limits), then BatchWriteItem returns a + * ThrottlingException.

+ * + *

If DynamoDB returns any unprocessed items, you should retry the batch operation on + * those items. However, we strongly recommend that you use an exponential + * backoff algorithm. If you retry the batch operation immediately, the + * underlying read or write requests can still fail due to throttling on the individual + * tables. If you delay the batch operation using exponential backoff, the individual + * requests in the batch are much more likely to succeed.

+ *

For more information, see Batch Operations and Error Handling in the Amazon DynamoDB + * Developer Guide.

+ *
+ *

With BatchWriteItem, you can efficiently write or delete large amounts of + * data, such as from Amazon EMR, or copy data from another database into DynamoDB. In + * order to improve performance with these large-scale operations, + * BatchWriteItem does not behave in the same way as individual + * PutItem and DeleteItem calls would. For example, you + * cannot specify conditions on individual put and delete requests, and + * BatchWriteItem does not return deleted items in the response.

+ *

If you use a programming language that supports concurrency, you can use threads to + * write items in parallel. Your application must include the necessary logic to manage the + * threads. With languages that don't support threading, you must update or delete the + * specified items one at a time. In both situations, BatchWriteItem performs + * the specified put and delete operations in parallel, giving you the power of the thread + * pool approach without having to introduce complexity into your application.

+ *

Parallel processing reduces latency, but each specified put and delete request + * consumes the same number of write capacity units whether it is processed in parallel or + * not. Delete operations on nonexistent items consume one write capacity unit.

+ *

If one or more of the following is true, DynamoDB rejects the entire batch write + * operation:

+ *
    + *
  • + *

    One or more tables specified in the BatchWriteItem request does + * not exist.

    + *
  • + *
  • + *

    Primary key attributes specified on an item in the request do not match those + * in the corresponding table's primary key schema.

    + *
  • + *
  • + *

    You try to perform multiple operations on the same item in the same + * BatchWriteItem request. For example, you cannot put and delete + * the same item in the same BatchWriteItem request.

    + *
  • + *
  • + *

    Your request contains at least two items with identical hash and range keys + * (which essentially is two put operations).

    + *
  • + *
  • + *

    There are more than 25 requests in the batch.

    + *
  • + *
  • + *

    Any individual item in a batch exceeds 400 KB.

    + *
  • + *
  • + *

    The total request size exceeds 16 MB.

    + *
  • + *
  • + *

    Any individual items with keys exceeding the key length limits. For a + * partition key, the limit is 2048 bytes and for a sort key, the limit is 1024 + * bytes.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchWriteItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchWriteItemInput + * RequestItems: { // BatchWriteItemRequestMap // required + * "": [ // WriteRequests + * { // WriteRequest + * PutRequest: { // PutRequest + * Item: { // PutItemInputAttributeMap // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * }, + * DeleteRequest: { // DeleteRequest + * Key: { // Key // required + * "": "", + * }, + * }, + * }, + * ], + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * }; + * const command = new BatchWriteItemCommand(input); + * const response = await client.send(command); + * // { // BatchWriteItemOutput + * // UnprocessedItems: { // BatchWriteItemRequestMap + * // "": [ // WriteRequests + * // { // WriteRequest + * // PutRequest: { // PutRequest + * // Item: { // PutItemInputAttributeMap // required + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // DeleteRequest: { // DeleteRequest + * // Key: { // Key // required + * // "": "", + * // }, + * // }, + * // }, + * // ], + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetricsPerTable + * // "": [ // ItemCollectionMetricsMultiple + * // { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // ], + * // }, + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchWriteItemCommandInput - {@link BatchWriteItemCommandInput} + * @returns {@link BatchWriteItemCommandOutput} + * @see {@link BatchWriteItemCommandInput} for command's `input` shape. + * @see {@link BatchWriteItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To add multiple items to a table + * ```javascript + * // This example adds three new items to the Music table using a batch of three PutItem requests. + * const input = { + * RequestItems: { + * Music: [ + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * } + * } + * }, + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Songs About Life" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * } + * } + * }, + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Blue Sky Blues" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * } + * } + * ] + * } + * }; + * const command = new BatchWriteItemCommand(input); + * const response = await client.send(command); + * /* response is + * { /* empty *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class BatchWriteItemCommand extends BatchWriteItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchWriteItemInput; + output: BatchWriteItemOutput; + }; + sdk: { + input: BatchWriteItemCommandInput; + output: BatchWriteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts new file mode 100644 index 0000000..32ec75a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts @@ -0,0 +1,146 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateBackupInput, CreateBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateBackupCommand}. + */ +export interface CreateBackupCommandInput extends CreateBackupInput { +} +/** + * @public + * + * The output of {@link CreateBackupCommand}. + */ +export interface CreateBackupCommandOutput extends CreateBackupOutput, __MetadataBearer { +} +declare const CreateBackupCommand_base: { + new (input: CreateBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a backup for an existing table.

+ *

Each time you create an on-demand backup, the entire table data is backed up. There + * is no limit to the number of on-demand backups that can be taken.

+ *

When you create an on-demand backup, a time marker of the request is cataloged, and + * the backup is created asynchronously, by applying all changes until the time of the + * request to the last full table snapshot. Backup requests are processed instantaneously + * and become available for restore within minutes.

+ *

You can call CreateBackup at a maximum rate of 50 times per + * second.

+ *

All backups in DynamoDB work without consuming any provisioned throughput on the + * table.

+ *

If you submit a backup request on 2018-12-14 at 14:25:00, the backup is guaranteed to + * contain all data committed to the table up to 14:24:00, and data committed after + * 14:26:00 will not be. The backup might contain data modifications made between 14:24:00 + * and 14:26:00. On-demand backup does not support causal consistency.

+ *

Along with data, the following are also included on the backups:

+ *
    + *
  • + *

    Global secondary indexes (GSIs)

    + *
  • + *
  • + *

    Local secondary indexes (LSIs)

    + *
  • + *
  • + *

    Streams

    + *
  • + *
  • + *

    Provisioned read and write capacity

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateBackupInput + * TableName: "STRING_VALUE", // required + * BackupName: "STRING_VALUE", // required + * }; + * const command = new CreateBackupCommand(input); + * const response = await client.send(command); + * // { // CreateBackupOutput + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // }; + * + * ``` + * + * @param CreateBackupCommandInput - {@link CreateBackupCommandInput} + * @returns {@link CreateBackupCommandOutput} + * @see {@link CreateBackupCommandInput} for command's `input` shape. + * @see {@link CreateBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link ContinuousBackupsUnavailableException} (client fault) + *

Backups have not yet been enabled for this table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateBackupCommand extends CreateBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateBackupInput; + output: CreateBackupOutput; + }; + sdk: { + input: CreateBackupCommandInput; + output: CreateBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts new file mode 100644 index 0000000..551fcd3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts @@ -0,0 +1,205 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateGlobalTableInput, CreateGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateGlobalTableCommand}. + */ +export interface CreateGlobalTableCommandInput extends CreateGlobalTableInput { +} +/** + * @public + * + * The output of {@link CreateGlobalTableCommand}. + */ +export interface CreateGlobalTableCommandOutput extends CreateGlobalTableOutput, __MetadataBearer { +} +declare const CreateGlobalTableCommand_base: { + new (input: CreateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a global table from an existing table. A global table creates a replication + * relationship between two or more DynamoDB tables with the same table name in the + * provided Regions.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ *

If you want to add a new replica table to a global table, each of the following + * conditions must be true:

+ *
    + *
  • + *

    The table must have the same primary key as all of the other replicas.

    + *
  • + *
  • + *

    The table must have the same name as all of the other replicas.

    + *
  • + *
  • + *

    The table must have DynamoDB Streams enabled, with the stream containing both + * the new and the old images of the item.

    + *
  • + *
  • + *

    None of the replica tables in the global table can contain any data.

    + *
  • + *
+ *

If global secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The global secondary indexes must have the same name.

    + *
  • + *
  • + *

    The global secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
+ *

If local secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The local secondary indexes must have the same name.

    + *
  • + *
  • + *

    The local secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
+ * + *

Write capacity settings should be set consistently across your replica tables and + * secondary indexes. DynamoDB strongly recommends enabling auto scaling to manage the + * write capacity settings for all of your global tables replicas and indexes.

+ *

If you prefer to manage write capacity settings manually, you should provision + * equal replicated write capacity units to your replica tables. You should also + * provision equal replicated write capacity units to matching secondary indexes across + * your global table.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * ReplicationGroup: [ // ReplicaList // required + * { // Replica + * RegionName: "STRING_VALUE", + * }, + * ], + * }; + * const command = new CreateGlobalTableCommand(input); + * const response = await client.send(command); + * // { // CreateGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param CreateGlobalTableCommandInput - {@link CreateGlobalTableCommandInput} + * @returns {@link CreateGlobalTableCommandOutput} + * @see {@link CreateGlobalTableCommandInput} for command's `input` shape. + * @see {@link CreateGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableAlreadyExistsException} (client fault) + *

The specified global table already exists.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateGlobalTableCommand extends CreateGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateGlobalTableInput; + output: CreateGlobalTableOutput; + }; + sdk: { + input: CreateGlobalTableCommandInput; + output: CreateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts new file mode 100644 index 0000000..0c21eec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts @@ -0,0 +1,378 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateTableInput, CreateTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateTableCommand}. + */ +export interface CreateTableCommandInput extends CreateTableInput { +} +/** + * @public + * + * The output of {@link CreateTableCommand}. + */ +export interface CreateTableCommandOutput extends CreateTableOutput, __MetadataBearer { +} +declare const CreateTableCommand_base: { + new (input: CreateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The CreateTable operation adds a new table to your account. In an Amazon Web Services account, table names must be unique within each Region. That is, you can + * have two tables with same name if you create the tables in different Regions.

+ *

+ * CreateTable is an asynchronous operation. Upon receiving a + * CreateTable request, DynamoDB immediately returns a response with a + * TableStatus of CREATING. After the table is created, + * DynamoDB sets the TableStatus to ACTIVE. You can perform read + * and write operations only on an ACTIVE table.

+ *

You can optionally define secondary indexes on the new table, as part of the + * CreateTable operation. If you want to create multiple tables with + * secondary indexes on them, you must create the tables sequentially. Only one table with + * secondary indexes can be in the CREATING state at any given time.

+ *

You can use the DescribeTable action to check the table status.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateTableInput + * AttributeDefinitions: [ // AttributeDefinitions // required + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * TableName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * LocalSecondaryIndexes: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * StreamSpecification: { // StreamSpecification + * StreamEnabled: true || false, // required + * StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * Tags: [ // TagList + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * DeletionProtectionEnabled: true || false, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * ResourcePolicy: "STRING_VALUE", + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * }; + * const command = new CreateTableCommand(input); + * const response = await client.send(command); + * // { // CreateTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param CreateTableCommandInput - {@link CreateTableCommandInput} + * @returns {@link CreateTableCommandOutput} + * @see {@link CreateTableCommandInput} for command's `input` shape. + * @see {@link CreateTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateTableCommand extends CreateTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateTableInput; + output: CreateTableOutput; + }; + sdk: { + input: CreateTableCommandInput; + output: CreateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts new file mode 100644 index 0000000..d3da508 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts @@ -0,0 +1,193 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteBackupInput, DeleteBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBackupCommand}. + */ +export interface DeleteBackupCommandInput extends DeleteBackupInput { +} +/** + * @public + * + * The output of {@link DeleteBackupCommand}. + */ +export interface DeleteBackupCommandOutput extends DeleteBackupOutput, __MetadataBearer { +} +declare const DeleteBackupCommand_base: { + new (input: DeleteBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes an existing backup of a table.

+ *

You can call DeleteBackup at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteBackupInput + * BackupArn: "STRING_VALUE", // required + * }; + * const command = new DeleteBackupCommand(input); + * const response = await client.send(command); + * // { // DeleteBackupOutput + * // BackupDescription: { // BackupDescription + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // SourceTableDetails: { // SourceTableDetails + * // TableName: "STRING_VALUE", // required + * // TableId: "STRING_VALUE", // required + * // TableArn: "STRING_VALUE", + * // TableSizeBytes: Number("long"), + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableCreationDateTime: new Date("TIMESTAMP"), // required + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // ItemCount: Number("long"), + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // }, + * // SourceTableFeatureDetails: { // SourceTableFeatureDetails + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexes + * // { // LocalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexes + * // { // GlobalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // }, + * // ], + * // StreamDescription: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param DeleteBackupCommandInput - {@link DeleteBackupCommandInput} + * @returns {@link DeleteBackupCommandOutput} + * @see {@link DeleteBackupCommandInput} for command's `input` shape. + * @see {@link DeleteBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DeleteBackupCommand extends DeleteBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBackupInput; + output: DeleteBackupOutput; + }; + sdk: { + input: DeleteBackupCommandInput; + output: DeleteBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts new file mode 100644 index 0000000..619ecf2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts @@ -0,0 +1,286 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteItemInput, DeleteItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteItemCommand}. + */ +export interface DeleteItemCommandInput extends DeleteItemInput { +} +/** + * @public + * + * The output of {@link DeleteItemCommand}. + */ +export interface DeleteItemCommandOutput extends DeleteItemOutput, __MetadataBearer { +} +declare const DeleteItemCommand_base: { + new (input: DeleteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes a single item in a table by primary key. You can perform a conditional delete + * operation that deletes the item if it exists, or if it has an expected attribute + * value.

+ *

In addition to deleting an item, you can also return the item's attribute values in + * the same operation, using the ReturnValues parameter.

+ *

Unless you specify conditions, the DeleteItem is an idempotent operation; + * running it multiple times on the same item or attribute does not + * result in an error response.

+ *

Conditional deletes are useful for deleting items only if specific conditions are met. + * If those conditions are met, DynamoDB performs the delete. Otherwise, the item is not + * deleted.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new DeleteItemCommand(input); + * const response = await client.send(command); + * // { // DeleteItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param DeleteItemCommandInput - {@link DeleteItemCommandInput} + * @returns {@link DeleteItemCommandOutput} + * @see {@link DeleteItemCommandInput} for command's `input` shape. + * @see {@link DeleteItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To delete an item + * ```javascript + * // This example deletes an item from the Music table. + * const input = { + * Key: { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * }, + * TableName: "Music" + * }; + * const command = new DeleteItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { + * CapacityUnits: 1, + * TableName: "Music" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteItemCommand extends DeleteItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteItemInput; + output: DeleteItemOutput; + }; + sdk: { + input: DeleteItemCommandInput; + output: DeleteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts new file mode 100644 index 0000000..da92cf9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts @@ -0,0 +1,138 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteResourcePolicyInput, DeleteResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteResourcePolicyCommand}. + */ +export interface DeleteResourcePolicyCommandInput extends DeleteResourcePolicyInput { +} +/** + * @public + * + * The output of {@link DeleteResourcePolicyCommand}. + */ +export interface DeleteResourcePolicyCommandOutput extends DeleteResourcePolicyOutput, __MetadataBearer { +} +declare const DeleteResourcePolicyCommand_base: { + new (input: DeleteResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes the resource-based policy attached to the resource, which can be a table or + * stream.

+ *

+ * DeleteResourcePolicy is an idempotent operation; running it multiple + * times on the same resource doesn't result in an error response, + * unless you specify an ExpectedRevisionId, which will then return a + * PolicyNotFoundException.

+ * + *

To make sure that you don't inadvertently lock yourself out of your own resources, + * the root principal in your Amazon Web Services account can perform + * DeleteResourcePolicy requests, even if your resource-based policy + * explicitly denies the root principal's access.

+ *
+ * + *

+ * DeleteResourcePolicy is an asynchronous operation. If you issue a + * GetResourcePolicy request immediately after running the + * DeleteResourcePolicy request, DynamoDB might still return + * the deleted policy. This is because the policy for your resource might not have been + * deleted yet. Wait for a few seconds, and then try the GetResourcePolicy + * request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * ExpectedRevisionId: "STRING_VALUE", + * }; + * const command = new DeleteResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // DeleteResourcePolicyOutput + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param DeleteResourcePolicyCommandInput - {@link DeleteResourcePolicyCommandInput} + * @returns {@link DeleteResourcePolicyCommandOutput} + * @see {@link DeleteResourcePolicyCommandInput} for command's `input` shape. + * @see {@link DeleteResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DeleteResourcePolicyCommand extends DeleteResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteResourcePolicyInput; + output: DeleteResourcePolicyOutput; + }; + sdk: { + input: DeleteResourcePolicyCommandInput; + output: DeleteResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts new file mode 100644 index 0000000..f756cd0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts @@ -0,0 +1,328 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteTableInput, DeleteTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteTableCommand}. + */ +export interface DeleteTableCommandInput extends DeleteTableInput { +} +/** + * @public + * + * The output of {@link DeleteTableCommand}. + */ +export interface DeleteTableCommandOutput extends DeleteTableOutput, __MetadataBearer { +} +declare const DeleteTableCommand_base: { + new (input: DeleteTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The DeleteTable operation deletes a table and all of its items. After a + * DeleteTable request, the specified table is in the + * DELETING state until DynamoDB completes the deletion. If the table is + * in the ACTIVE state, you can delete it. If a table is in + * CREATING or UPDATING states, then DynamoDB returns a + * ResourceInUseException. If the specified table does not exist, DynamoDB + * returns a ResourceNotFoundException. If table is already in the + * DELETING state, no error is returned.

+ * + *

For global tables, this operation only applies to + * global tables using Version 2019.11.21 (Current version).

+ *
+ * + *

DynamoDB might continue to accept data read and write operations, such as + * GetItem and PutItem, on a table in the + * DELETING state until the table deletion is complete. For the full + * list of table states, see TableStatus.

+ *
+ *

When you delete a table, any indexes on that table are also deleted.

+ *

If you have DynamoDB Streams enabled on the table, then the corresponding stream on + * that table goes into the DISABLED state, and the stream is automatically + * deleted after 24 hours.

+ *

Use the DescribeTable action to check the status of the table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteTableInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DeleteTableCommand(input); + * const response = await client.send(command); + * // { // DeleteTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param DeleteTableCommandInput - {@link DeleteTableCommandInput} + * @returns {@link DeleteTableCommandOutput} + * @see {@link DeleteTableCommandInput} for command's `input` shape. + * @see {@link DeleteTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To delete a table + * ```javascript + * // This example deletes the Music table. + * const input = { + * TableName: "Music" + * }; + * const command = new DeleteTableCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TableDescription: { + * ItemCount: 0, + * ProvisionedThroughput: { + * NumberOfDecreasesToday: 1, + * ReadCapacityUnits: 5, + * WriteCapacityUnits: 5 + * }, + * TableName: "Music", + * TableSizeBytes: 0, + * TableStatus: "DELETING" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteTableCommand extends DeleteTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteTableInput; + output: DeleteTableOutput; + }; + sdk: { + input: DeleteTableCommandInput; + output: DeleteTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts new file mode 100644 index 0000000..8568846 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts @@ -0,0 +1,173 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeBackupInput, DescribeBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeBackupCommand}. + */ +export interface DescribeBackupCommandInput extends DescribeBackupInput { +} +/** + * @public + * + * The output of {@link DescribeBackupCommand}. + */ +export interface DescribeBackupCommandOutput extends DescribeBackupOutput, __MetadataBearer { +} +declare const DescribeBackupCommand_base: { + new (input: DescribeBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes an existing backup of a table.

+ *

You can call DescribeBackup at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeBackupInput + * BackupArn: "STRING_VALUE", // required + * }; + * const command = new DescribeBackupCommand(input); + * const response = await client.send(command); + * // { // DescribeBackupOutput + * // BackupDescription: { // BackupDescription + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // SourceTableDetails: { // SourceTableDetails + * // TableName: "STRING_VALUE", // required + * // TableId: "STRING_VALUE", // required + * // TableArn: "STRING_VALUE", + * // TableSizeBytes: Number("long"), + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableCreationDateTime: new Date("TIMESTAMP"), // required + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // ItemCount: Number("long"), + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // }, + * // SourceTableFeatureDetails: { // SourceTableFeatureDetails + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexes + * // { // LocalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexes + * // { // GlobalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // }, + * // ], + * // StreamDescription: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeBackupCommandInput - {@link DescribeBackupCommandInput} + * @returns {@link DescribeBackupCommandOutput} + * @see {@link DescribeBackupCommandInput} for command's `input` shape. + * @see {@link DescribeBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeBackupCommand extends DescribeBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeBackupInput; + output: DescribeBackupOutput; + }; + sdk: { + input: DescribeBackupCommandInput; + output: DescribeBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..301ba60 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts @@ -0,0 +1,101 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeContinuousBackupsInput, DescribeContinuousBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeContinuousBackupsCommand}. + */ +export interface DescribeContinuousBackupsCommandInput extends DescribeContinuousBackupsInput { +} +/** + * @public + * + * The output of {@link DescribeContinuousBackupsCommand}. + */ +export interface DescribeContinuousBackupsCommandOutput extends DescribeContinuousBackupsOutput, __MetadataBearer { +} +declare const DescribeContinuousBackupsCommand_base: { + new (input: DescribeContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Checks the status of continuous backups and point in time recovery on the specified + * table. Continuous backups are ENABLED on all tables at table creation. If + * point in time recovery is enabled, PointInTimeRecoveryStatus will be set to + * ENABLED.

+ *

After continuous backups and point in time recovery are enabled, you can restore to + * any point in time within EarliestRestorableDateTime and + * LatestRestorableDateTime.

+ *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + * You can restore your table to any point in time in the last 35 days. You can set the + * recovery period to any value between 1 and 35 days.

+ *

You can call DescribeContinuousBackups at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeContinuousBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeContinuousBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeContinuousBackupsInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeContinuousBackupsCommand(input); + * const response = await client.send(command); + * // { // DescribeContinuousBackupsOutput + * // ContinuousBackupsDescription: { // ContinuousBackupsDescription + * // ContinuousBackupsStatus: "ENABLED" || "DISABLED", // required + * // PointInTimeRecoveryDescription: { // PointInTimeRecoveryDescription + * // PointInTimeRecoveryStatus: "ENABLED" || "DISABLED", + * // RecoveryPeriodInDays: Number("int"), + * // EarliestRestorableDateTime: new Date("TIMESTAMP"), + * // LatestRestorableDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeContinuousBackupsCommandInput - {@link DescribeContinuousBackupsCommandInput} + * @returns {@link DescribeContinuousBackupsCommandOutput} + * @see {@link DescribeContinuousBackupsCommandInput} for command's `input` shape. + * @see {@link DescribeContinuousBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeContinuousBackupsCommand extends DescribeContinuousBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeContinuousBackupsInput; + output: DescribeContinuousBackupsOutput; + }; + sdk: { + input: DescribeContinuousBackupsCommandInput; + output: DescribeContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts new file mode 100644 index 0000000..99eb5e0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts @@ -0,0 +1,91 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeContributorInsightsInput, DescribeContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeContributorInsightsCommand}. + */ +export interface DescribeContributorInsightsCommandInput extends DescribeContributorInsightsInput { +} +/** + * @public + * + * The output of {@link DescribeContributorInsightsCommand}. + */ +export interface DescribeContributorInsightsCommandOutput extends DescribeContributorInsightsOutput, __MetadataBearer { +} +declare const DescribeContributorInsightsCommand_base: { + new (input: DescribeContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about contributor insights for a given table or global secondary + * index.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeContributorInsightsInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * }; + * const command = new DescribeContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // DescribeContributorInsightsOutput + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsRuleList: [ // ContributorInsightsRuleList + * // "STRING_VALUE", + * // ], + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // FailureException: { // FailureException + * // ExceptionName: "STRING_VALUE", + * // ExceptionDescription: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeContributorInsightsCommandInput - {@link DescribeContributorInsightsCommandInput} + * @returns {@link DescribeContributorInsightsCommandOutput} + * @see {@link DescribeContributorInsightsCommandInput} for command's `input` shape. + * @see {@link DescribeContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeContributorInsightsCommand extends DescribeContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeContributorInsightsInput; + output: DescribeContributorInsightsOutput; + }; + sdk: { + input: DescribeContributorInsightsCommandInput; + output: DescribeContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts new file mode 100644 index 0000000..57fe82d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts @@ -0,0 +1,76 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeEndpointsRequest, DescribeEndpointsResponse } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeEndpointsCommand}. + */ +export interface DescribeEndpointsCommandInput extends DescribeEndpointsRequest { +} +/** + * @public + * + * The output of {@link DescribeEndpointsCommand}. + */ +export interface DescribeEndpointsCommandOutput extends DescribeEndpointsResponse, __MetadataBearer { +} +declare const DescribeEndpointsCommand_base: { + new (input: DescribeEndpointsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [DescribeEndpointsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the regional endpoint information. For more information on policy permissions, + * please see Internetwork traffic privacy.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeEndpointsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeEndpointsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = {}; + * const command = new DescribeEndpointsCommand(input); + * const response = await client.send(command); + * // { // DescribeEndpointsResponse + * // Endpoints: [ // Endpoints // required + * // { // Endpoint + * // Address: "STRING_VALUE", // required + * // CachePeriodInMinutes: Number("long"), // required + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeEndpointsCommandInput - {@link DescribeEndpointsCommandInput} + * @returns {@link DescribeEndpointsCommandOutput} + * @see {@link DescribeEndpointsCommandInput} for command's `input` shape. + * @see {@link DescribeEndpointsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeEndpointsCommand extends DescribeEndpointsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: {}; + output: DescribeEndpointsResponse; + }; + sdk: { + input: DescribeEndpointsCommandInput; + output: DescribeEndpointsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts new file mode 100644 index 0000000..400d9ab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts @@ -0,0 +1,120 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeExportInput, DescribeExportOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeExportCommand}. + */ +export interface DescribeExportCommandInput extends DescribeExportInput { +} +/** + * @public + * + * The output of {@link DescribeExportCommand}. + */ +export interface DescribeExportCommandOutput extends DescribeExportOutput, __MetadataBearer { +} +declare const DescribeExportCommand_base: { + new (input: DescribeExportCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeExportCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes an existing table export.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeExportCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeExportCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeExportInput + * ExportArn: "STRING_VALUE", // required + * }; + * const command = new DescribeExportCommand(input); + * const response = await client.send(command); + * // { // DescribeExportOutput + * // ExportDescription: { // ExportDescription + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ExportManifest: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ExportTime: new Date("TIMESTAMP"), + * // ClientToken: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", + * // S3BucketOwner: "STRING_VALUE", + * // S3Prefix: "STRING_VALUE", + * // S3SseAlgorithm: "AES256" || "KMS", + * // S3SseKmsKeyId: "STRING_VALUE", + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // ExportFormat: "DYNAMODB_JSON" || "ION", + * // BilledSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // IncrementalExportSpecification: { // IncrementalExportSpecification + * // ExportFromTime: new Date("TIMESTAMP"), + * // ExportToTime: new Date("TIMESTAMP"), + * // ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeExportCommandInput - {@link DescribeExportCommandInput} + * @returns {@link DescribeExportCommandOutput} + * @see {@link DescribeExportCommandInput} for command's `input` shape. + * @see {@link DescribeExportCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ExportNotFoundException} (client fault) + *

The specified export was not found.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeExportCommand extends DescribeExportCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeExportInput; + output: DescribeExportOutput; + }; + sdk: { + input: DescribeExportCommandInput; + output: DescribeExportCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts new file mode 100644 index 0000000..79c9f59 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts @@ -0,0 +1,130 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeGlobalTableInput, DescribeGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeGlobalTableCommand}. + */ +export interface DescribeGlobalTableCommandInput extends DescribeGlobalTableInput { +} +/** + * @public + * + * The output of {@link DescribeGlobalTableCommand}. + */ +export interface DescribeGlobalTableCommandOutput extends DescribeGlobalTableOutput, __MetadataBearer { +} +declare const DescribeGlobalTableCommand_base: { + new (input: DescribeGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the specified global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * }; + * const command = new DescribeGlobalTableCommand(input); + * const response = await client.send(command); + * // { // DescribeGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeGlobalTableCommandInput - {@link DescribeGlobalTableCommandInput} + * @returns {@link DescribeGlobalTableCommandOutput} + * @see {@link DescribeGlobalTableCommandInput} for command's `input` shape. + * @see {@link DescribeGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeGlobalTableCommand extends DescribeGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeGlobalTableInput; + output: DescribeGlobalTableOutput; + }; + sdk: { + input: DescribeGlobalTableCommandInput; + output: DescribeGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..aadabee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts @@ -0,0 +1,176 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeGlobalTableSettingsInput, DescribeGlobalTableSettingsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeGlobalTableSettingsCommand}. + */ +export interface DescribeGlobalTableSettingsCommandInput extends DescribeGlobalTableSettingsInput { +} +/** + * @public + * + * The output of {@link DescribeGlobalTableSettingsCommand}. + */ +export interface DescribeGlobalTableSettingsCommandOutput extends DescribeGlobalTableSettingsOutput, __MetadataBearer { +} +declare const DescribeGlobalTableSettingsCommand_base: { + new (input: DescribeGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes Region-specific settings for a global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeGlobalTableSettingsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeGlobalTableSettingsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeGlobalTableSettingsInput + * GlobalTableName: "STRING_VALUE", // required + * }; + * const command = new DescribeGlobalTableSettingsCommand(input); + * const response = await client.send(command); + * // { // DescribeGlobalTableSettingsOutput + * // GlobalTableName: "STRING_VALUE", + * // ReplicaSettings: [ // ReplicaSettingsDescriptionList + * // { // ReplicaSettingsDescription + * // RegionName: "STRING_VALUE", // required + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaBillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // ReplicaProvisionedReadCapacityUnits: Number("long"), + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityUnits: Number("long"), + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaGlobalSecondaryIndexSettings: [ // ReplicaGlobalSecondaryIndexSettingsDescriptionList + * // { // ReplicaGlobalSecondaryIndexSettingsDescription + * // IndexName: "STRING_VALUE", // required + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityUnits: Number("long"), + * // ProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityUnits: Number("long"), + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeGlobalTableSettingsCommandInput - {@link DescribeGlobalTableSettingsCommandInput} + * @returns {@link DescribeGlobalTableSettingsCommandOutput} + * @see {@link DescribeGlobalTableSettingsCommandInput} for command's `input` shape. + * @see {@link DescribeGlobalTableSettingsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeGlobalTableSettingsCommand extends DescribeGlobalTableSettingsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeGlobalTableSettingsInput; + output: DescribeGlobalTableSettingsOutput; + }; + sdk: { + input: DescribeGlobalTableSettingsCommandInput; + output: DescribeGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts new file mode 100644 index 0000000..f440258 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts @@ -0,0 +1,165 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeImportInput, DescribeImportOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeImportCommand}. + */ +export interface DescribeImportCommandInput extends DescribeImportInput { +} +/** + * @public + * + * The output of {@link DescribeImportCommand}. + */ +export interface DescribeImportCommandOutput extends DescribeImportOutput, __MetadataBearer { +} +declare const DescribeImportCommand_base: { + new (input: DescribeImportCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeImportCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Represents the properties of the import.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeImportCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeImportCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeImportInput + * ImportArn: "STRING_VALUE", // required + * }; + * const command = new DescribeImportCommand(input); + * const response = await client.send(command); + * // { // DescribeImportOutput + * // ImportTableDescription: { // ImportTableDescription + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ClientToken: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // ErrorCount: Number("long"), + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // InputFormatOptions: { // InputFormatOptions + * // Csv: { // CsvOptions + * // Delimiter: "STRING_VALUE", + * // HeaderList: [ // CsvHeaderList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * // TableCreationParameters: { // TableCreationParameters + * // TableName: "STRING_VALUE", // required + * // AttributeDefinitions: [ // AttributeDefinitions // required + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // SSESpecification: { // SSESpecification + * // Enabled: true || false, + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyId: "STRING_VALUE", + * // }, + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * // { // GlobalSecondaryIndex + * // IndexName: "STRING_VALUE", // required + * // KeySchema: [ // required + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // WarmThroughput + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // }, + * // }, + * // ], + * // }, + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ProcessedSizeBytes: Number("long"), + * // ProcessedItemCount: Number("long"), + * // ImportedItemCount: Number("long"), + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeImportCommandInput - {@link DescribeImportCommandInput} + * @returns {@link DescribeImportCommandOutput} + * @see {@link DescribeImportCommandInput} for command's `input` shape. + * @see {@link DescribeImportCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ImportNotFoundException} (client fault) + *

+ * The specified import was not found. + *

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeImportCommand extends DescribeImportCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeImportInput; + output: DescribeImportOutput; + }; + sdk: { + input: DescribeImportCommandInput; + output: DescribeImportCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..5af13ee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeKinesisStreamingDestinationInput, DescribeKinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeKinesisStreamingDestinationCommand}. + */ +export interface DescribeKinesisStreamingDestinationCommandInput extends DescribeKinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link DescribeKinesisStreamingDestinationCommand}. + */ +export interface DescribeKinesisStreamingDestinationCommandOutput extends DescribeKinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const DescribeKinesisStreamingDestinationCommand_base: { + new (input: DescribeKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the status of Kinesis streaming.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeKinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // DescribeKinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // KinesisDataStreamDestinations: [ // KinesisDataStreamDestinations + * // { // KinesisDataStreamDestination + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // DestinationStatusDescription: "STRING_VALUE", + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeKinesisStreamingDestinationCommandInput - {@link DescribeKinesisStreamingDestinationCommandInput} + * @returns {@link DescribeKinesisStreamingDestinationCommandOutput} + * @see {@link DescribeKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link DescribeKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeKinesisStreamingDestinationCommand extends DescribeKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeKinesisStreamingDestinationInput; + output: DescribeKinesisStreamingDestinationOutput; + }; + sdk: { + input: DescribeKinesisStreamingDestinationCommandInput; + output: DescribeKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts new file mode 100644 index 0000000..bdf6316 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts @@ -0,0 +1,163 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeLimitsInput, DescribeLimitsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeLimitsCommand}. + */ +export interface DescribeLimitsCommandInput extends DescribeLimitsInput { +} +/** + * @public + * + * The output of {@link DescribeLimitsCommand}. + */ +export interface DescribeLimitsCommandOutput extends DescribeLimitsOutput, __MetadataBearer { +} +declare const DescribeLimitsCommand_base: { + new (input: DescribeLimitsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [DescribeLimitsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the current provisioned-capacity quotas for your Amazon Web Services account in + * a Region, both for the Region as a whole and for any one DynamoDB table that you create + * there.

+ *

When you establish an Amazon Web Services account, the account has initial quotas on + * the maximum read capacity units and write capacity units that you can provision across + * all of your DynamoDB tables in a given Region. Also, there are per-table + * quotas that apply when you create a table there. For more information, see Service, + * Account, and Table Quotas page in the Amazon DynamoDB + * Developer Guide.

+ *

Although you can increase these quotas by filing a case at Amazon Web Services Support Center, obtaining the + * increase is not instantaneous. The DescribeLimits action lets you write + * code to compare the capacity you are currently using to those quotas imposed by your + * account so that you have enough time to apply for an increase before you hit a + * quota.

+ *

For example, you could use one of the Amazon Web Services SDKs to do the + * following:

+ *
    + *
  1. + *

    Call DescribeLimits for a particular Region to obtain your + * current account quotas on provisioned capacity there.

    + *
  2. + *
  3. + *

    Create a variable to hold the aggregate read capacity units provisioned for + * all your tables in that Region, and one to hold the aggregate write capacity + * units. Zero them both.

    + *
  4. + *
  5. + *

    Call ListTables to obtain a list of all your DynamoDB + * tables.

    + *
  6. + *
  7. + *

    For each table name listed by ListTables, do the + * following:

    + *
      + *
    • + *

      Call DescribeTable with the table name.

      + *
    • + *
    • + *

      Use the data returned by DescribeTable to add the read + * capacity units and write capacity units provisioned for the table itself + * to your variables.

      + *
    • + *
    • + *

      If the table has one or more global secondary indexes (GSIs), loop + * over these GSIs and add their provisioned capacity values to your + * variables as well.

      + *
    • + *
    + *
  8. + *
  9. + *

    Report the account quotas for that Region returned by + * DescribeLimits, along with the total current provisioned + * capacity levels you have calculated.

    + *
  10. + *
+ *

This will let you see whether you are getting close to your account-level + * quotas.

+ *

The per-table quotas apply only when you are creating a new table. They restrict the + * sum of the provisioned capacity of the new table itself and all its global secondary + * indexes.

+ *

For existing tables and their GSIs, DynamoDB doesn't let you increase provisioned + * capacity extremely rapidly, but the only quota that applies is that the aggregate + * provisioned capacity over all your tables and GSIs cannot exceed either of the + * per-account quotas.

+ * + *

+ * DescribeLimits should only be called periodically. You can expect + * throttling errors if you call it more than once in a minute.

+ *
+ *

The DescribeLimits Request element has no content.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeLimitsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeLimitsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = {}; + * const command = new DescribeLimitsCommand(input); + * const response = await client.send(command); + * // { // DescribeLimitsOutput + * // AccountMaxReadCapacityUnits: Number("long"), + * // AccountMaxWriteCapacityUnits: Number("long"), + * // TableMaxReadCapacityUnits: Number("long"), + * // TableMaxWriteCapacityUnits: Number("long"), + * // }; + * + * ``` + * + * @param DescribeLimitsCommandInput - {@link DescribeLimitsCommandInput} + * @returns {@link DescribeLimitsCommandOutput} + * @see {@link DescribeLimitsCommandInput} for command's `input` shape. + * @see {@link DescribeLimitsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To determine capacity limits per table and account, in the current AWS region + * ```javascript + * // The following example returns the maximum read and write capacity units per table, and for the AWS account, in the current AWS region. + * const input = { /* empty *\/ }; + * const command = new DescribeLimitsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AccountMaxReadCapacityUnits: 20000, + * AccountMaxWriteCapacityUnits: 20000, + * TableMaxReadCapacityUnits: 10000, + * TableMaxWriteCapacityUnits: 10000 + * } + * *\/ + * ``` + * + * @public + */ +export declare class DescribeLimitsCommand extends DescribeLimitsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: {}; + output: DescribeLimitsOutput; + }; + sdk: { + input: DescribeLimitsCommandInput; + output: DescribeLimitsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts new file mode 100644 index 0000000..b74ef35 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts @@ -0,0 +1,263 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTableInput, DescribeTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTableCommand}. + */ +export interface DescribeTableCommandInput extends DescribeTableInput { +} +/** + * @public + * + * The output of {@link DescribeTableCommand}. + */ +export interface DescribeTableCommandOutput extends DescribeTableOutput, __MetadataBearer { +} +declare const DescribeTableCommand_base: { + new (input: DescribeTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the table, including the current status of the table, when + * it was created, the primary key schema, and any indexes on the table.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * + *

If you issue a DescribeTable request immediately after a + * CreateTable request, DynamoDB might return a + * ResourceNotFoundException. This is because + * DescribeTable uses an eventually consistent query, and the metadata + * for your table might not be available at that moment. Wait for a few seconds, and + * then try the DescribeTable request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTableInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTableCommand(input); + * const response = await client.send(command); + * // { // DescribeTableOutput + * // Table: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param DescribeTableCommandInput - {@link DescribeTableCommandInput} + * @returns {@link DescribeTableCommandOutput} + * @see {@link DescribeTableCommandInput} for command's `input` shape. + * @see {@link DescribeTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTableCommand extends DescribeTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTableInput; + output: DescribeTableOutput; + }; + sdk: { + input: DescribeTableCommandInput; + output: DescribeTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..6875231 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,166 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTableReplicaAutoScalingInput, DescribeTableReplicaAutoScalingOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTableReplicaAutoScalingCommand}. + */ +export interface DescribeTableReplicaAutoScalingCommandInput extends DescribeTableReplicaAutoScalingInput { +} +/** + * @public + * + * The output of {@link DescribeTableReplicaAutoScalingCommand}. + */ +export interface DescribeTableReplicaAutoScalingCommandOutput extends DescribeTableReplicaAutoScalingOutput, __MetadataBearer { +} +declare const DescribeTableReplicaAutoScalingCommand_base: { + new (input: DescribeTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes auto scaling settings across replicas of the global table at once.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTableReplicaAutoScalingCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTableReplicaAutoScalingCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTableReplicaAutoScalingInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTableReplicaAutoScalingCommand(input); + * const response = await client.send(command); + * // { // DescribeTableReplicaAutoScalingOutput + * // TableAutoScalingDescription: { // TableAutoScalingDescription + * // TableName: "STRING_VALUE", + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // Replicas: [ // ReplicaAutoScalingDescriptionList + * // { // ReplicaAutoScalingDescription + * // RegionName: "STRING_VALUE", + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexAutoScalingDescriptionList + * // { // ReplicaGlobalSecondaryIndexAutoScalingDescription + * // IndexName: "STRING_VALUE", + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param DescribeTableReplicaAutoScalingCommandInput - {@link DescribeTableReplicaAutoScalingCommandInput} + * @returns {@link DescribeTableReplicaAutoScalingCommandOutput} + * @see {@link DescribeTableReplicaAutoScalingCommandInput} for command's `input` shape. + * @see {@link DescribeTableReplicaAutoScalingCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTableReplicaAutoScalingCommand extends DescribeTableReplicaAutoScalingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTableReplicaAutoScalingInput; + output: DescribeTableReplicaAutoScalingOutput; + }; + sdk: { + input: DescribeTableReplicaAutoScalingCommandInput; + output: DescribeTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts new file mode 100644 index 0000000..f5f3419 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts @@ -0,0 +1,84 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTimeToLiveInput, DescribeTimeToLiveOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTimeToLiveCommand}. + */ +export interface DescribeTimeToLiveCommandInput extends DescribeTimeToLiveInput { +} +/** + * @public + * + * The output of {@link DescribeTimeToLiveCommand}. + */ +export interface DescribeTimeToLiveCommandOutput extends DescribeTimeToLiveOutput, __MetadataBearer { +} +declare const DescribeTimeToLiveCommand_base: { + new (input: DescribeTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Gives a description of the Time to Live (TTL) status on the specified table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTimeToLiveCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTimeToLiveCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTimeToLiveInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTimeToLiveCommand(input); + * const response = await client.send(command); + * // { // DescribeTimeToLiveOutput + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeTimeToLiveCommandInput - {@link DescribeTimeToLiveCommandInput} + * @returns {@link DescribeTimeToLiveCommandOutput} + * @see {@link DescribeTimeToLiveCommandInput} for command's `input` shape. + * @see {@link DescribeTimeToLiveCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTimeToLiveCommand extends DescribeTimeToLiveCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTimeToLiveInput; + output: DescribeTimeToLiveOutput; + }; + sdk: { + input: DescribeTimeToLiveCommandInput; + output: DescribeTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..e52268c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,122 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { KinesisStreamingDestinationInput, KinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DisableKinesisStreamingDestinationCommand}. + */ +export interface DisableKinesisStreamingDestinationCommandInput extends KinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link DisableKinesisStreamingDestinationCommand}. + */ +export interface DisableKinesisStreamingDestinationCommandOutput extends KinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const DisableKinesisStreamingDestinationCommand_base: { + new (input: DisableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DisableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Stops replication from the DynamoDB table to the Kinesis data stream. This + * is done without deleting either of the resources.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DisableKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DisableKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // KinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new DisableKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // KinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param DisableKinesisStreamingDestinationCommandInput - {@link DisableKinesisStreamingDestinationCommandInput} + * @returns {@link DisableKinesisStreamingDestinationCommandOutput} + * @see {@link DisableKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link DisableKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DisableKinesisStreamingDestinationCommand extends DisableKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: DisableKinesisStreamingDestinationCommandInput; + output: DisableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..ff8985a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,124 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { KinesisStreamingDestinationInput, KinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link EnableKinesisStreamingDestinationCommand}. + */ +export interface EnableKinesisStreamingDestinationCommandInput extends KinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link EnableKinesisStreamingDestinationCommand}. + */ +export interface EnableKinesisStreamingDestinationCommandOutput extends KinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const EnableKinesisStreamingDestinationCommand_base: { + new (input: EnableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: EnableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Starts table data replication to the specified Kinesis data stream at a timestamp + * chosen during the enable workflow. If this operation doesn't return results immediately, + * use DescribeKinesisStreamingDestination to check if streaming to the Kinesis data stream + * is ACTIVE.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, EnableKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, EnableKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // KinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new EnableKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // KinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param EnableKinesisStreamingDestinationCommandInput - {@link EnableKinesisStreamingDestinationCommandInput} + * @returns {@link EnableKinesisStreamingDestinationCommandOutput} + * @see {@link EnableKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link EnableKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class EnableKinesisStreamingDestinationCommand extends EnableKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: EnableKinesisStreamingDestinationCommandInput; + output: EnableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..999b557 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,242 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExecuteStatementInput, ExecuteStatementOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExecuteStatementCommand}. + */ +export interface ExecuteStatementCommandInput extends ExecuteStatementInput { +} +/** + * @public + * + * The output of {@link ExecuteStatementCommand}. + */ +export interface ExecuteStatementCommandOutput extends ExecuteStatementOutput, __MetadataBearer { +} +declare const ExecuteStatementCommand_base: { + new (input: ExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform reads and singleton writes on data stored in + * DynamoDB, using PartiQL.

+ *

For PartiQL reads (SELECT statement), if the total number of processed + * items exceeds the maximum dataset size limit of 1 MB, the read stops and results are + * returned to the user as a LastEvaluatedKey value to continue the read in a + * subsequent operation. If the filter criteria in WHERE clause does not match + * any data, the read will return an empty result set.

+ *

A single SELECT statement response can return up to the maximum number of + * items (if using the Limit parameter) or a maximum of 1 MB of data (and then apply any + * filtering to the results using WHERE clause). If + * LastEvaluatedKey is present in the response, you need to paginate the + * result set. If NextToken is present, you need to paginate the result set + * and include NextToken.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExecuteStatementCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExecuteStatementInput + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ConsistentRead: true || false, + * NextToken: "STRING_VALUE", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * Limit: Number("int"), + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new ExecuteStatementCommand(input); + * const response = await client.send(command); + * // { // ExecuteStatementOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // }; + * + * ``` + * + * @param ExecuteStatementCommandInput - {@link ExecuteStatementCommandInput} + * @returns {@link ExecuteStatementCommandOutput} + * @see {@link ExecuteStatementCommandInput} for command's `input` shape. + * @see {@link ExecuteStatementCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link DuplicateItemException} (client fault) + *

There was an attempt to insert an item with the same primary key as an item that + * already exists in the DynamoDB table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExecuteStatementCommand extends ExecuteStatementCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExecuteStatementInput; + output: ExecuteStatementOutput; + }; + sdk: { + input: ExecuteStatementCommandInput; + output: ExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..6e1a94b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,533 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExecuteTransactionInput, ExecuteTransactionOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExecuteTransactionCommand}. + */ +export interface ExecuteTransactionCommandInput extends ExecuteTransactionInput { +} +/** + * @public + * + * The output of {@link ExecuteTransactionCommand}. + */ +export interface ExecuteTransactionCommandOutput extends ExecuteTransactionOutput, __MetadataBearer { +} +declare const ExecuteTransactionCommand_base: { + new (input: ExecuteTransactionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExecuteTransactionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform transactional reads or writes on data stored in + * DynamoDB, using PartiQL.

+ * + *

The entire transaction must consist of either read statements or write statements, + * you cannot mix both in one transaction. The EXISTS function is an exception and can + * be used to check the condition of specific attributes of the item in a similar + * manner to ConditionCheck in the TransactWriteItems API.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExecuteTransactionCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExecuteTransactionInput + * TransactStatements: [ // ParameterizedStatements // required + * { // ParameterizedStatement + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * ], + * ClientRequestToken: "STRING_VALUE", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new ExecuteTransactionCommand(input); + * const response = await client.send(command); + * // { // ExecuteTransactionOutput + * // Responses: [ // ItemResponseList + * // { // ItemResponse + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // ], + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param ExecuteTransactionCommandInput - {@link ExecuteTransactionCommandInput} + * @returns {@link ExecuteTransactionCommandOutput} + * @see {@link ExecuteTransactionCommandInput} for command's `input` shape. + * @see {@link ExecuteTransactionCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link IdempotentParameterMismatchException} (client fault) + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link TransactionInProgressException} (client fault) + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExecuteTransactionCommand extends ExecuteTransactionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExecuteTransactionInput; + output: ExecuteTransactionOutput; + }; + sdk: { + input: ExecuteTransactionCommandInput; + output: ExecuteTransactionCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..672cebb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts @@ -0,0 +1,147 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExportTableToPointInTimeInput, ExportTableToPointInTimeOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExportTableToPointInTimeCommand}. + */ +export interface ExportTableToPointInTimeCommandInput extends ExportTableToPointInTimeInput { +} +/** + * @public + * + * The output of {@link ExportTableToPointInTimeCommand}. + */ +export interface ExportTableToPointInTimeCommandOutput extends ExportTableToPointInTimeOutput, __MetadataBearer { +} +declare const ExportTableToPointInTimeCommand_base: { + new (input: ExportTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExportTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Exports table data to an S3 bucket. The table must have point in time recovery + * enabled, and you can export data from any time within the point in time recovery + * window.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExportTableToPointInTimeCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExportTableToPointInTimeCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExportTableToPointInTimeInput + * TableArn: "STRING_VALUE", // required + * ExportTime: new Date("TIMESTAMP"), + * ClientToken: "STRING_VALUE", + * S3Bucket: "STRING_VALUE", // required + * S3BucketOwner: "STRING_VALUE", + * S3Prefix: "STRING_VALUE", + * S3SseAlgorithm: "AES256" || "KMS", + * S3SseKmsKeyId: "STRING_VALUE", + * ExportFormat: "DYNAMODB_JSON" || "ION", + * ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * IncrementalExportSpecification: { // IncrementalExportSpecification + * ExportFromTime: new Date("TIMESTAMP"), + * ExportToTime: new Date("TIMESTAMP"), + * ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * }, + * }; + * const command = new ExportTableToPointInTimeCommand(input); + * const response = await client.send(command); + * // { // ExportTableToPointInTimeOutput + * // ExportDescription: { // ExportDescription + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ExportManifest: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ExportTime: new Date("TIMESTAMP"), + * // ClientToken: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", + * // S3BucketOwner: "STRING_VALUE", + * // S3Prefix: "STRING_VALUE", + * // S3SseAlgorithm: "AES256" || "KMS", + * // S3SseKmsKeyId: "STRING_VALUE", + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // ExportFormat: "DYNAMODB_JSON" || "ION", + * // BilledSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // IncrementalExportSpecification: { // IncrementalExportSpecification + * // ExportFromTime: new Date("TIMESTAMP"), + * // ExportToTime: new Date("TIMESTAMP"), + * // ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * // }, + * // }, + * // }; + * + * ``` + * + * @param ExportTableToPointInTimeCommandInput - {@link ExportTableToPointInTimeCommandInput} + * @returns {@link ExportTableToPointInTimeCommandOutput} + * @see {@link ExportTableToPointInTimeCommandInput} for command's `input` shape. + * @see {@link ExportTableToPointInTimeCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ExportConflictException} (client fault) + *

There was a conflict when writing to the specified S3 bucket.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidExportTimeException} (client fault) + *

The specified ExportTime is outside of the point in time recovery + * window.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PointInTimeRecoveryUnavailableException} (client fault) + *

Point in time recovery has not yet been enabled for this source table.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExportTableToPointInTimeCommand extends ExportTableToPointInTimeCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExportTableToPointInTimeInput; + output: ExportTableToPointInTimeOutput; + }; + sdk: { + input: ExportTableToPointInTimeCommandInput; + output: ExportTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts new file mode 100644 index 0000000..b5e2dfa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts @@ -0,0 +1,255 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { GetItemInput, GetItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetItemCommand}. + */ +export interface GetItemCommandInput extends GetItemInput { +} +/** + * @public + * + * The output of {@link GetItemCommand}. + */ +export interface GetItemCommandOutput extends GetItemOutput, __MetadataBearer { +} +declare const GetItemCommand_base: { + new (input: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The GetItem operation returns a set of attributes for the item with the + * given primary key. If there is no matching item, GetItem does not return + * any data and there will be no Item element in the response.

+ *

+ * GetItem provides an eventually consistent read by default. If your + * application requires a strongly consistent read, set ConsistentRead to + * true. Although a strongly consistent read might take more time than an + * eventually consistent read, it always returns the last updated value.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, GetItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, GetItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // GetItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * ConsistentRead: true || false, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }; + * const command = new GetItemCommand(input); + * const response = await client.send(command); + * // { // GetItemOutput + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetItemCommandInput - {@link GetItemCommandInput} + * @returns {@link GetItemCommandOutput} + * @see {@link GetItemCommandInput} for command's `input` shape. + * @see {@link GetItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To read an item from a table + * ```javascript + * // This example retrieves an item from the Music table. The table has a partition key and a sort key (Artist and SongTitle), so you must specify both of these attributes. + * const input = { + * Key: { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * TableName: "Music" + * }; + * const command = new GetItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Item: { + * AlbumTitle: { + * S: "Songs About Life" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetItemCommand extends GetItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetItemInput; + output: GetItemOutput; + }; + sdk: { + input: GetItemCommandInput; + output: GetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts new file mode 100644 index 0000000..9544c1a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts @@ -0,0 +1,121 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { GetResourcePolicyInput, GetResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetResourcePolicyCommand}. + */ +export interface GetResourcePolicyCommandInput extends GetResourcePolicyInput { +} +/** + * @public + * + * The output of {@link GetResourcePolicyCommand}. + */ +export interface GetResourcePolicyCommandOutput extends GetResourcePolicyOutput, __MetadataBearer { +} +declare const GetResourcePolicyCommand_base: { + new (input: GetResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the resource-based policy document attached to the resource, which can be a + * table or stream, in JSON format.

+ *

+ * GetResourcePolicy follows an + * eventually consistent + * model. The following list + * describes the outcomes when you issue the GetResourcePolicy request + * immediately after issuing another request:

+ *
    + *
  • + *

    If you issue a GetResourcePolicy request immediately after a + * PutResourcePolicy request, DynamoDB might return a + * PolicyNotFoundException.

    + *
  • + *
  • + *

    If you issue a GetResourcePolicyrequest immediately after a + * DeleteResourcePolicy request, DynamoDB might return + * the policy that was present before the deletion request.

    + *
  • + *
  • + *

    If you issue a GetResourcePolicy request immediately after a + * CreateTable request, which includes a resource-based policy, + * DynamoDB might return a ResourceNotFoundException or + * a PolicyNotFoundException.

    + *
  • + *
+ *

Because GetResourcePolicy uses an eventually + * consistent query, the metadata for your policy or table might not be + * available at that moment. Wait for a few seconds, and then retry the + * GetResourcePolicy request.

+ *

After a GetResourcePolicy request returns a policy created using the + * PutResourcePolicy request, the policy will be applied in the + * authorization of requests to the resource. Because this process is eventually + * consistent, it will take some time to apply the policy to all requests to a resource. + * Policies that you attach while creating a table using the CreateTable + * request will always be applied to all requests for that table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, GetResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, GetResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // GetResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * }; + * const command = new GetResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // GetResourcePolicyOutput + * // Policy: "STRING_VALUE", + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param GetResourcePolicyCommandInput - {@link GetResourcePolicyCommandInput} + * @returns {@link GetResourcePolicyCommandOutput} + * @see {@link GetResourcePolicyCommandInput} for command's `input` shape. + * @see {@link GetResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class GetResourcePolicyCommand extends GetResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetResourcePolicyInput; + output: GetResourcePolicyOutput; + }; + sdk: { + input: GetResourcePolicyCommandInput; + output: GetResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts new file mode 100644 index 0000000..48a37a1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts @@ -0,0 +1,271 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ImportTableInput, ImportTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ImportTableCommand}. + */ +export interface ImportTableCommandInput extends ImportTableInput { +} +/** + * @public + * + * The output of {@link ImportTableCommand}. + */ +export interface ImportTableCommandOutput extends ImportTableOutput, __MetadataBearer { +} +declare const ImportTableCommand_base: { + new (input: ImportTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ImportTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Imports table data from an S3 bucket.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ImportTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ImportTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ImportTableInput + * ClientToken: "STRING_VALUE", + * S3BucketSource: { // S3BucketSource + * S3BucketOwner: "STRING_VALUE", + * S3Bucket: "STRING_VALUE", // required + * S3KeyPrefix: "STRING_VALUE", + * }, + * InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", // required + * InputFormatOptions: { // InputFormatOptions + * Csv: { // CsvOptions + * Delimiter: "STRING_VALUE", + * HeaderList: [ // CsvHeaderList + * "STRING_VALUE", + * ], + * }, + * }, + * InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * TableCreationParameters: { // TableCreationParameters + * TableName: "STRING_VALUE", // required + * AttributeDefinitions: [ // AttributeDefinitions // required + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * }, + * }; + * const command = new ImportTableCommand(input); + * const response = await client.send(command); + * // { // ImportTableOutput + * // ImportTableDescription: { // ImportTableDescription + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ClientToken: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // ErrorCount: Number("long"), + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // InputFormatOptions: { // InputFormatOptions + * // Csv: { // CsvOptions + * // Delimiter: "STRING_VALUE", + * // HeaderList: [ // CsvHeaderList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * // TableCreationParameters: { // TableCreationParameters + * // TableName: "STRING_VALUE", // required + * // AttributeDefinitions: [ // AttributeDefinitions // required + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // SSESpecification: { // SSESpecification + * // Enabled: true || false, + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyId: "STRING_VALUE", + * // }, + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * // { // GlobalSecondaryIndex + * // IndexName: "STRING_VALUE", // required + * // KeySchema: [ // required + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // WarmThroughput + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // }, + * // }, + * // ], + * // }, + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ProcessedSizeBytes: Number("long"), + * // ProcessedItemCount: Number("long"), + * // ImportedItemCount: Number("long"), + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param ImportTableCommandInput - {@link ImportTableCommandInput} + * @returns {@link ImportTableCommandOutput} + * @see {@link ImportTableCommandInput} for command's `input` shape. + * @see {@link ImportTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ImportConflictException} (client fault) + *

+ * There was a conflict when importing from the specified S3 source. + * This can occur when the current import conflicts with a previous import request + * that had the same client token. + *

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ImportTableCommand extends ImportTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ImportTableInput; + output: ImportTableOutput; + }; + sdk: { + input: ImportTableCommandInput; + output: ImportTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts new file mode 100644 index 0000000..50c70da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts @@ -0,0 +1,107 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListBackupsInput, ListBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListBackupsCommand}. + */ +export interface ListBackupsCommandInput extends ListBackupsInput { +} +/** + * @public + * + * The output of {@link ListBackupsCommand}. + */ +export interface ListBackupsCommandOutput extends ListBackupsOutput, __MetadataBearer { +} +declare const ListBackupsCommand_base: { + new (input: ListBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListBackupsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

List DynamoDB backups that are associated with an Amazon Web Services account and + * weren't made with Amazon Web Services Backup. To list these backups for a given table, + * specify TableName. ListBackups returns a paginated list of + * results with at most 1 MB worth of items in a page. You can also specify a maximum + * number of entries to be returned in a page.

+ *

In the request, start time is inclusive, but end time is exclusive. Note that these + * boundaries are for the time at which the original backup was requested.

+ *

You can call ListBackups a maximum of five times per second.

+ *

If you want to retrieve the complete list of backups made with Amazon Web Services + * Backup, use the Amazon Web Services Backup + * list API. + *

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListBackupsInput + * TableName: "STRING_VALUE", + * Limit: Number("int"), + * TimeRangeLowerBound: new Date("TIMESTAMP"), + * TimeRangeUpperBound: new Date("TIMESTAMP"), + * ExclusiveStartBackupArn: "STRING_VALUE", + * BackupType: "USER" || "SYSTEM" || "AWS_BACKUP" || "ALL", + * }; + * const command = new ListBackupsCommand(input); + * const response = await client.send(command); + * // { // ListBackupsOutput + * // BackupSummaries: [ // BackupSummaries + * // { // BackupSummary + * // TableName: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // BackupArn: "STRING_VALUE", + * // BackupName: "STRING_VALUE", + * // BackupCreationDateTime: new Date("TIMESTAMP"), + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", + * // BackupSizeBytes: Number("long"), + * // }, + * // ], + * // LastEvaluatedBackupArn: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListBackupsCommandInput - {@link ListBackupsCommandInput} + * @returns {@link ListBackupsCommandOutput} + * @see {@link ListBackupsCommandInput} for command's `input` shape. + * @see {@link ListBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListBackupsCommand extends ListBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListBackupsInput; + output: ListBackupsOutput; + }; + sdk: { + input: ListBackupsCommandInput; + output: ListBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts new file mode 100644 index 0000000..9d508c7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListContributorInsightsInput, ListContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListContributorInsightsCommand}. + */ +export interface ListContributorInsightsCommandInput extends ListContributorInsightsInput { +} +/** + * @public + * + * The output of {@link ListContributorInsightsCommand}. + */ +export interface ListContributorInsightsCommandOutput extends ListContributorInsightsOutput, __MetadataBearer { +} +declare const ListContributorInsightsCommand_base: { + new (input: ListContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListContributorInsightsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a list of ContributorInsightsSummary for a table and all its global secondary + * indexes.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListContributorInsightsInput + * TableName: "STRING_VALUE", + * NextToken: "STRING_VALUE", + * MaxResults: Number("int"), + * }; + * const command = new ListContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // ListContributorInsightsOutput + * // ContributorInsightsSummaries: [ // ContributorInsightsSummaries + * // { // ContributorInsightsSummary + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListContributorInsightsCommandInput - {@link ListContributorInsightsCommandInput} + * @returns {@link ListContributorInsightsCommandOutput} + * @see {@link ListContributorInsightsCommandInput} for command's `input` shape. + * @see {@link ListContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListContributorInsightsCommand extends ListContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListContributorInsightsInput; + output: ListContributorInsightsOutput; + }; + sdk: { + input: ListContributorInsightsCommandInput; + output: ListContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts new file mode 100644 index 0000000..83f5476 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts @@ -0,0 +1,100 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListExportsInput, ListExportsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListExportsCommand}. + */ +export interface ListExportsCommandInput extends ListExportsInput { +} +/** + * @public + * + * The output of {@link ListExportsCommand}. + */ +export interface ListExportsCommandOutput extends ListExportsOutput, __MetadataBearer { +} +declare const ListExportsCommand_base: { + new (input: ListExportsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListExportsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists completed exports within the past 90 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListExportsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListExportsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListExportsInput + * TableArn: "STRING_VALUE", + * MaxResults: Number("int"), + * NextToken: "STRING_VALUE", + * }; + * const command = new ListExportsCommand(input); + * const response = await client.send(command); + * // { // ListExportsOutput + * // ExportSummaries: [ // ExportSummaries + * // { // ExportSummary + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListExportsCommandInput - {@link ListExportsCommandInput} + * @returns {@link ListExportsCommandOutput} + * @see {@link ListExportsCommandInput} for command's `input` shape. + * @see {@link ListExportsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListExportsCommand extends ListExportsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListExportsInput; + output: ListExportsOutput; + }; + sdk: { + input: ListExportsCommandInput; + output: ListExportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts new file mode 100644 index 0000000..530e8e4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts @@ -0,0 +1,93 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListGlobalTablesInput, ListGlobalTablesOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListGlobalTablesCommand}. + */ +export interface ListGlobalTablesCommandInput extends ListGlobalTablesInput { +} +/** + * @public + * + * The output of {@link ListGlobalTablesCommand}. + */ +export interface ListGlobalTablesCommandOutput extends ListGlobalTablesOutput, __MetadataBearer { +} +declare const ListGlobalTablesCommand_base: { + new (input: ListGlobalTablesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListGlobalTablesCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all global tables that have a replica in the specified Region.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListGlobalTablesCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListGlobalTablesCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListGlobalTablesInput + * ExclusiveStartGlobalTableName: "STRING_VALUE", + * Limit: Number("int"), + * RegionName: "STRING_VALUE", + * }; + * const command = new ListGlobalTablesCommand(input); + * const response = await client.send(command); + * // { // ListGlobalTablesOutput + * // GlobalTables: [ // GlobalTableList + * // { // GlobalTable + * // GlobalTableName: "STRING_VALUE", + * // ReplicationGroup: [ // ReplicaList + * // { // Replica + * // RegionName: "STRING_VALUE", + * // }, + * // ], + * // }, + * // ], + * // LastEvaluatedGlobalTableName: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListGlobalTablesCommandInput - {@link ListGlobalTablesCommandInput} + * @returns {@link ListGlobalTablesCommandOutput} + * @see {@link ListGlobalTablesCommandInput} for command's `input` shape. + * @see {@link ListGlobalTablesCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListGlobalTablesCommand extends ListGlobalTablesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListGlobalTablesInput; + output: ListGlobalTablesOutput; + }; + sdk: { + input: ListGlobalTablesCommandInput; + output: ListGlobalTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts new file mode 100644 index 0000000..be76088 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts @@ -0,0 +1,106 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListImportsInput, ListImportsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListImportsCommand}. + */ +export interface ListImportsCommandInput extends ListImportsInput { +} +/** + * @public + * + * The output of {@link ListImportsCommand}. + */ +export interface ListImportsCommandOutput extends ListImportsOutput, __MetadataBearer { +} +declare const ListImportsCommand_base: { + new (input: ListImportsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListImportsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists completed imports within the past 90 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListImportsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListImportsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListImportsInput + * TableArn: "STRING_VALUE", + * PageSize: Number("int"), + * NextToken: "STRING_VALUE", + * }; + * const command = new ListImportsCommand(input); + * const response = await client.send(command); + * // { // ListImportsOutput + * // ImportSummaryList: [ // ImportSummaryList + * // { // ImportSummary + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListImportsCommandInput - {@link ListImportsCommandInput} + * @returns {@link ListImportsCommandOutput} + * @see {@link ListImportsCommandInput} for command's `input` shape. + * @see {@link ListImportsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListImportsCommand extends ListImportsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListImportsInput; + output: ListImportsOutput; + }; + sdk: { + input: ListImportsCommandInput; + output: ListImportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts new file mode 100644 index 0000000..394c20f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts @@ -0,0 +1,101 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListTablesInput, ListTablesOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListTablesCommand}. + */ +export interface ListTablesCommandInput extends ListTablesInput { +} +/** + * @public + * + * The output of {@link ListTablesCommand}. + */ +export interface ListTablesCommandOutput extends ListTablesOutput, __MetadataBearer { +} +declare const ListTablesCommand_base: { + new (input: ListTablesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListTablesCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns an array of table names associated with the current account and endpoint. The + * output from ListTables is paginated, with each page returning a maximum of + * 100 table names.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListTablesCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListTablesCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListTablesInput + * ExclusiveStartTableName: "STRING_VALUE", + * Limit: Number("int"), + * }; + * const command = new ListTablesCommand(input); + * const response = await client.send(command); + * // { // ListTablesOutput + * // TableNames: [ // TableNameList + * // "STRING_VALUE", + * // ], + * // LastEvaluatedTableName: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListTablesCommandInput - {@link ListTablesCommandInput} + * @returns {@link ListTablesCommandOutput} + * @see {@link ListTablesCommandInput} for command's `input` shape. + * @see {@link ListTablesCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To list tables + * ```javascript + * // This example lists all of the tables associated with the current AWS account and endpoint. + * const input = { /* empty *\/ }; + * const command = new ListTablesCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TableNames: [ + * "Forum", + * "ProductCatalog", + * "Reply", + * "Thread" + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListTablesCommand extends ListTablesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListTablesInput; + output: ListTablesOutput; + }; + sdk: { + input: ListTablesCommandInput; + output: ListTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts new file mode 100644 index 0000000..c2230f4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts @@ -0,0 +1,91 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListTagsOfResourceInput, ListTagsOfResourceOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListTagsOfResourceCommand}. + */ +export interface ListTagsOfResourceCommandInput extends ListTagsOfResourceInput { +} +/** + * @public + * + * The output of {@link ListTagsOfResourceCommand}. + */ +export interface ListTagsOfResourceCommandOutput extends ListTagsOfResourceOutput, __MetadataBearer { +} +declare const ListTagsOfResourceCommand_base: { + new (input: ListTagsOfResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListTagsOfResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

List all tags on an Amazon DynamoDB resource. You can call ListTagsOfResource up to 10 + * times per second, per account.

+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListTagsOfResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListTagsOfResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListTagsOfResourceInput + * ResourceArn: "STRING_VALUE", // required + * NextToken: "STRING_VALUE", + * }; + * const command = new ListTagsOfResourceCommand(input); + * const response = await client.send(command); + * // { // ListTagsOfResourceOutput + * // Tags: [ // TagList + * // { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListTagsOfResourceCommandInput - {@link ListTagsOfResourceCommandInput} + * @returns {@link ListTagsOfResourceCommandOutput} + * @see {@link ListTagsOfResourceCommandInput} for command's `input` shape. + * @see {@link ListTagsOfResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListTagsOfResourceCommand extends ListTagsOfResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListTagsOfResourceInput; + output: ListTagsOfResourceOutput; + }; + sdk: { + input: ListTagsOfResourceCommandInput; + output: ListTagsOfResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts new file mode 100644 index 0000000..f9e32df --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts @@ -0,0 +1,300 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { PutItemInput, PutItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutItemCommand}. + */ +export interface PutItemCommandInput extends PutItemInput { +} +/** + * @public + * + * The output of {@link PutItemCommand}. + */ +export interface PutItemCommandOutput extends PutItemOutput, __MetadataBearer { +} +declare const PutItemCommand_base: { + new (input: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a new item, or replaces an old item with a new item. If an item that has the + * same primary key as the new item already exists in the specified table, the new item + * completely replaces the existing item. You can perform a conditional put operation (add + * a new item if one with the specified primary key doesn't exist), or replace an existing + * item if it has certain attribute values. You can return the item's attribute values in + * the same operation, using the ReturnValues parameter.

+ *

When you add an item, the primary key attributes are the only required attributes.

+ *

Empty String and Binary attribute values are allowed. Attribute values of type String + * and Binary must have a length greater than zero if the attribute is used as a key + * attribute for a table or index. Set type attributes cannot be empty.

+ *

Invalid Requests with empty values will be rejected with a + * ValidationException exception.

+ * + *

To prevent a new item from replacing an existing item, use a conditional + * expression that contains the attribute_not_exists function with the + * name of the attribute being used as the partition key for the table. Since every + * record must contain that attribute, the attribute_not_exists function + * will only succeed if no matching item exists.

+ *
+ *

For more information about PutItem, see Working with + * Items in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, PutItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, PutItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // PutItemInput + * TableName: "STRING_VALUE", // required + * Item: { // PutItemInputAttributeMap // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ConditionalOperator: "AND" || "OR", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new PutItemCommand(input); + * const response = await client.send(command); + * // { // PutItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param PutItemCommandInput - {@link PutItemCommandInput} + * @returns {@link PutItemCommandOutput} + * @see {@link PutItemCommandInput} for command's `input` shape. + * @see {@link PutItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To add an item to a table + * ```javascript + * // This example adds a new item to the Music table. + * const input = { + * Item: { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * ReturnConsumedCapacity: "TOTAL", + * TableName: "Music" + * }; + * const command = new PutItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { + * CapacityUnits: 1, + * TableName: "Music" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class PutItemCommand extends PutItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutItemInput; + output: PutItemOutput; + }; + sdk: { + input: PutItemCommandInput; + output: PutItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts new file mode 100644 index 0000000..e222271 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts @@ -0,0 +1,140 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { PutResourcePolicyInput, PutResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutResourcePolicyCommand}. + */ +export interface PutResourcePolicyCommandInput extends PutResourcePolicyInput { +} +/** + * @public + * + * The output of {@link PutResourcePolicyCommand}. + */ +export interface PutResourcePolicyCommandOutput extends PutResourcePolicyOutput, __MetadataBearer { +} +declare const PutResourcePolicyCommand_base: { + new (input: PutResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Attaches a resource-based policy document to the resource, which can be a table or + * stream. When you attach a resource-based policy using this API, the policy application + * is + * eventually consistent + * .

+ *

+ * PutResourcePolicy is an idempotent operation; running it multiple times + * on the same resource using the same policy document will return the same revision ID. If + * you specify an ExpectedRevisionId that doesn't match the current policy's + * RevisionId, the PolicyNotFoundException will be + * returned.

+ * + *

+ * PutResourcePolicy is an asynchronous operation. If you issue a + * GetResourcePolicy request immediately after a + * PutResourcePolicy request, DynamoDB might return your + * previous policy, if there was one, or return the + * PolicyNotFoundException. This is because + * GetResourcePolicy uses an eventually consistent query, and the + * metadata for your policy or table might not be available at that moment. Wait for a + * few seconds, and then try the GetResourcePolicy request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, PutResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, PutResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // PutResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * Policy: "STRING_VALUE", // required + * ExpectedRevisionId: "STRING_VALUE", + * ConfirmRemoveSelfResourceAccess: true || false, + * }; + * const command = new PutResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // PutResourcePolicyOutput + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param PutResourcePolicyCommandInput - {@link PutResourcePolicyCommandInput} + * @returns {@link PutResourcePolicyCommandOutput} + * @see {@link PutResourcePolicyCommandInput} for command's `input` shape. + * @see {@link PutResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class PutResourcePolicyCommand extends PutResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutResourcePolicyInput; + output: PutResourcePolicyOutput; + }; + sdk: { + input: PutResourcePolicyCommandInput; + output: PutResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts new file mode 100644 index 0000000..bac7977 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts @@ -0,0 +1,329 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { QueryInput, QueryOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link QueryCommand}. + */ +export interface QueryCommandInput extends QueryInput { +} +/** + * @public + * + * The output of {@link QueryCommand}. + */ +export interface QueryCommandOutput extends QueryOutput, __MetadataBearer { +} +declare const QueryCommand_base: { + new (input: QueryCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: QueryCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

You must provide the name of the partition key attribute and a single value for that + * attribute. Query returns all items with that partition key value. + * Optionally, you can provide a sort key attribute and use a comparison operator to refine + * the search results.

+ *

Use the KeyConditionExpression parameter to provide a specific value for + * the partition key. The Query operation will return all of the items from + * the table or index with that partition key value. You can optionally narrow the scope of + * the Query operation by specifying a sort key value and a comparison + * operator in KeyConditionExpression. To further refine the + * Query results, you can optionally provide a + * FilterExpression. A FilterExpression determines which + * items within the results should be returned to you. All of the other results are + * discarded.

+ *

A Query operation always returns a result set. If no matching items are + * found, the result set will be empty. Queries that do not return results consume the + * minimum number of read capacity units for that type of read operation.

+ * + *

DynamoDB calculates the number of read capacity units consumed based on item + * size, not on the amount of data that is returned to an application. The number of + * capacity units consumed will be the same whether you request all of the attributes + * (the default behavior) or just some of them (using a projection expression). The + * number will also be the same whether or not you use a FilterExpression. + *

+ *
+ *

+ * Query results are always sorted by the sort key value. If the data type of + * the sort key is Number, the results are returned in numeric order; otherwise, the + * results are returned in order of UTF-8 bytes. By default, the sort order is ascending. + * To reverse the order, set the ScanIndexForward parameter to false.

+ *

A single Query operation will read up to the maximum number of items set + * (if using the Limit parameter) or a maximum of 1 MB of data and then apply + * any filtering to the results using FilterExpression. If + * LastEvaluatedKey is present in the response, you will need to paginate + * the result set. For more information, see Paginating + * the Results in the Amazon DynamoDB Developer Guide.

+ *

+ * FilterExpression is applied after a Query finishes, but before + * the results are returned. A FilterExpression cannot contain partition key + * or sort key attributes. You need to specify those attributes in the + * KeyConditionExpression.

+ * + *

A Query operation can return an empty result set and a + * LastEvaluatedKey if all the items read for the page of results are + * filtered out.

+ *
+ *

You can query a table, a local secondary index, or a global secondary index. For a + * query on a table or on a local secondary index, you can set the + * ConsistentRead parameter to true and obtain a strongly + * consistent result. Global secondary indexes support eventually consistent reads only, so + * do not specify ConsistentRead when querying a global secondary + * index.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, QueryCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, QueryCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // QueryInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * Select: "ALL_ATTRIBUTES" || "ALL_PROJECTED_ATTRIBUTES" || "SPECIFIC_ATTRIBUTES" || "COUNT", + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * Limit: Number("int"), + * ConsistentRead: true || false, + * KeyConditions: { // KeyConditions + * "": { // Condition + * AttributeValueList: [ // AttributeValueList + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * QueryFilter: { // FilterConditionMap + * "": { + * AttributeValueList: [ + * "", + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ScanIndexForward: true || false, + * ExclusiveStartKey: { // Key + * "": "", + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ProjectionExpression: "STRING_VALUE", + * FilterExpression: "STRING_VALUE", + * KeyConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * }; + * const command = new QueryCommand(input); + * const response = await client.send(command); + * // { // QueryOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // Count: Number("int"), + * // ScannedCount: Number("int"), + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param QueryCommandInput - {@link QueryCommandInput} + * @returns {@link QueryCommandOutput} + * @see {@link QueryCommandInput} for command's `input` shape. + * @see {@link QueryCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To query an item + * ```javascript + * // This example queries items in the Music table. The table has a partition key and sort key (Artist and SongTitle), but this query only specifies the partition key value. It returns song titles by the artist named "No One You Know". + * const input = { + * ExpressionAttributeValues: { + * :v1: { + * S: "No One You Know" + * } + * }, + * KeyConditionExpression: "Artist = :v1", + * ProjectionExpression: "SongTitle", + * TableName: "Music" + * }; + * const command = new QueryCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { /* empty *\/ }, + * Count: 2, + * Items: [ + * { + * SongTitle: { + * S: "Call Me Today" + * } + * } + * ], + * ScannedCount: 2 + * } + * *\/ + * ``` + * + * @public + */ +export declare class QueryCommand extends QueryCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: QueryInput; + output: QueryOutput; + }; + sdk: { + input: QueryCommandInput; + output: QueryCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts new file mode 100644 index 0000000..495b072 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts @@ -0,0 +1,361 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { RestoreTableFromBackupInput, RestoreTableFromBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link RestoreTableFromBackupCommand}. + */ +export interface RestoreTableFromBackupCommandInput extends RestoreTableFromBackupInput { +} +/** + * @public + * + * The output of {@link RestoreTableFromBackupCommand}. + */ +export interface RestoreTableFromBackupCommandOutput extends RestoreTableFromBackupOutput, __MetadataBearer { +} +declare const RestoreTableFromBackupCommand_base: { + new (input: RestoreTableFromBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: RestoreTableFromBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a new table from an existing backup. Any number of users can execute up to 50 + * concurrent restores (any type of restore) in a given account.

+ *

You can call RestoreTableFromBackup at a maximum rate of 10 times per + * second.

+ *

You must manually set up the following on the restored table:

+ *
    + *
  • + *

    Auto scaling policies

    + *
  • + *
  • + *

    IAM policies

    + *
  • + *
  • + *

    Amazon CloudWatch metrics and alarms

    + *
  • + *
  • + *

    Tags

    + *
  • + *
  • + *

    Stream settings

    + *
  • + *
  • + *

    Time to Live (TTL) settings

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, RestoreTableFromBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, RestoreTableFromBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // RestoreTableFromBackupInput + * TargetTableName: "STRING_VALUE", // required + * BackupArn: "STRING_VALUE", // required + * BillingModeOverride: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalSecondaryIndexOverride: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * LocalSecondaryIndexOverride: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecificationOverride: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * }; + * const command = new RestoreTableFromBackupCommand(input); + * const response = await client.send(command); + * // { // RestoreTableFromBackupOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param RestoreTableFromBackupCommandInput - {@link RestoreTableFromBackupCommandInput} + * @returns {@link RestoreTableFromBackupCommandOutput} + * @see {@link RestoreTableFromBackupCommandInput} for command's `input` shape. + * @see {@link RestoreTableFromBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableAlreadyExistsException} (client fault) + *

A target table with the specified name already exists.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class RestoreTableFromBackupCommand extends RestoreTableFromBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: RestoreTableFromBackupInput; + output: RestoreTableFromBackupOutput; + }; + sdk: { + input: RestoreTableFromBackupCommandInput; + output: RestoreTableFromBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..156e857 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts @@ -0,0 +1,394 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { RestoreTableToPointInTimeInput, RestoreTableToPointInTimeOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link RestoreTableToPointInTimeCommand}. + */ +export interface RestoreTableToPointInTimeCommandInput extends RestoreTableToPointInTimeInput { +} +/** + * @public + * + * The output of {@link RestoreTableToPointInTimeCommand}. + */ +export interface RestoreTableToPointInTimeCommandOutput extends RestoreTableToPointInTimeOutput, __MetadataBearer { +} +declare const RestoreTableToPointInTimeCommand_base: { + new (input: RestoreTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: RestoreTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Restores the specified table to the specified point in time within + * EarliestRestorableDateTime and LatestRestorableDateTime. + * You can restore your table to any point in time in the last 35 days. You can set the + * recovery period to any value between 1 and 35 days. Any number of users can execute up + * to 50 concurrent restores (any type of restore) in a given account.

+ *

When you restore using point in time recovery, DynamoDB restores your table data to + * the state based on the selected date and time (day:hour:minute:second) to a new table.

+ *

Along with data, the following are also included on the new restored table using point + * in time recovery:

+ *
    + *
  • + *

    Global secondary indexes (GSIs)

    + *
  • + *
  • + *

    Local secondary indexes (LSIs)

    + *
  • + *
  • + *

    Provisioned read and write capacity

    + *
  • + *
  • + *

    Encryption settings

    + * + *

    All these settings come from the current settings of the source table at + * the time of restore.

    + *
    + *
  • + *
+ *

You must manually set up the following on the restored table:

+ *
    + *
  • + *

    Auto scaling policies

    + *
  • + *
  • + *

    IAM policies

    + *
  • + *
  • + *

    Amazon CloudWatch metrics and alarms

    + *
  • + *
  • + *

    Tags

    + *
  • + *
  • + *

    Stream settings

    + *
  • + *
  • + *

    Time to Live (TTL) settings

    + *
  • + *
  • + *

    Point in time recovery settings

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, RestoreTableToPointInTimeCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, RestoreTableToPointInTimeCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // RestoreTableToPointInTimeInput + * SourceTableArn: "STRING_VALUE", + * SourceTableName: "STRING_VALUE", + * TargetTableName: "STRING_VALUE", // required + * UseLatestRestorableTime: true || false, + * RestoreDateTime: new Date("TIMESTAMP"), + * BillingModeOverride: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalSecondaryIndexOverride: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * LocalSecondaryIndexOverride: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecificationOverride: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * }; + * const command = new RestoreTableToPointInTimeCommand(input); + * const response = await client.send(command); + * // { // RestoreTableToPointInTimeOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param RestoreTableToPointInTimeCommandInput - {@link RestoreTableToPointInTimeCommandInput} + * @returns {@link RestoreTableToPointInTimeCommandOutput} + * @see {@link RestoreTableToPointInTimeCommandInput} for command's `input` shape. + * @see {@link RestoreTableToPointInTimeCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link InvalidRestoreTimeException} (client fault) + *

An invalid restore time was specified. RestoreDateTime must be between + * EarliestRestorableDateTime and LatestRestorableDateTime.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PointInTimeRecoveryUnavailableException} (client fault) + *

Point in time recovery has not yet been enabled for this source table.

+ * + * @throws {@link TableAlreadyExistsException} (client fault) + *

A target table with the specified name already exists.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class RestoreTableToPointInTimeCommand extends RestoreTableToPointInTimeCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: RestoreTableToPointInTimeInput; + output: RestoreTableToPointInTimeOutput; + }; + sdk: { + input: RestoreTableToPointInTimeCommandInput; + output: RestoreTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts new file mode 100644 index 0000000..64261d8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts @@ -0,0 +1,328 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ScanInput, ScanOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ScanCommand}. + */ +export interface ScanCommandInput extends ScanInput { +} +/** + * @public + * + * The output of {@link ScanCommand}. + */ +export interface ScanCommandOutput extends ScanOutput, __MetadataBearer { +} +declare const ScanCommand_base: { + new (input: ScanCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ScanCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The Scan operation returns one or more items and item attributes by + * accessing every item in a table or a secondary index. To have DynamoDB return fewer + * items, you can provide a FilterExpression operation.

+ *

If the total size of scanned items exceeds the maximum dataset size limit of 1 MB, the + * scan completes and results are returned to the user. The LastEvaluatedKey + * value is also returned and the requestor can use the LastEvaluatedKey to + * continue the scan in a subsequent operation. Each scan response also includes number of + * items that were scanned (ScannedCount) as part of the request. If using a + * FilterExpression, a scan result can result in no items meeting the + * criteria and the Count will result in zero. If you did not use a + * FilterExpression in the scan request, then Count is the + * same as ScannedCount.

+ * + *

+ * Count and ScannedCount only return the count of items + * specific to a single scan request and, unless the table is less than 1MB, do not + * represent the total number of items in the table.

+ *
+ *

A single Scan operation first reads up to the maximum number of items set + * (if using the Limit parameter) or a maximum of 1 MB of data and then + * applies any filtering to the results if a FilterExpression is provided. If + * LastEvaluatedKey is present in the response, pagination is required to + * complete the full table scan. For more information, see Paginating the + * Results in the Amazon DynamoDB Developer Guide.

+ *

+ * Scan operations proceed sequentially; however, for faster performance on + * a large table or secondary index, applications can request a parallel Scan + * operation by providing the Segment and TotalSegments + * parameters. For more information, see Parallel + * Scan in the Amazon DynamoDB Developer Guide.

+ *

By default, a Scan uses eventually consistent reads when accessing the + * items in a table. Therefore, the results from an eventually consistent Scan + * may not include the latest item changes at the time the scan iterates through each item + * in the table. If you require a strongly consistent read of each item as the scan + * iterates through the items in the table, you can set the ConsistentRead + * parameter to true. Strong consistency only relates to the consistency of the read at the + * item level.

+ * + *

DynamoDB does not provide snapshot isolation for a scan operation when the + * ConsistentRead parameter is set to true. Thus, a DynamoDB scan + * operation does not guarantee that all reads in a scan see a consistent snapshot of + * the table when the scan operation was requested.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ScanCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ScanCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ScanInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * Limit: Number("int"), + * Select: "ALL_ATTRIBUTES" || "ALL_PROJECTED_ATTRIBUTES" || "SPECIFIC_ATTRIBUTES" || "COUNT", + * ScanFilter: { // FilterConditionMap + * "": { // Condition + * AttributeValueList: [ // AttributeValueList + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ExclusiveStartKey: { // Key + * "": "", + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * TotalSegments: Number("int"), + * Segment: Number("int"), + * ProjectionExpression: "STRING_VALUE", + * FilterExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ConsistentRead: true || false, + * }; + * const command = new ScanCommand(input); + * const response = await client.send(command); + * // { // ScanOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // Count: Number("int"), + * // ScannedCount: Number("int"), + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param ScanCommandInput - {@link ScanCommandInput} + * @returns {@link ScanCommandOutput} + * @see {@link ScanCommandInput} for command's `input` shape. + * @see {@link ScanCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To scan a table + * ```javascript + * // This example scans the entire Music table, and then narrows the results to songs by the artist "No One You Know". For each item, only the album title and song title are returned. + * const input = { + * ExpressionAttributeNames: { + * #AT: "AlbumTitle", + * #ST: "SongTitle" + * }, + * ExpressionAttributeValues: { + * :a: { + * S: "No One You Know" + * } + * }, + * FilterExpression: "Artist = :a", + * ProjectionExpression: "#ST, #AT", + * TableName: "Music" + * }; + * const command = new ScanCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { /* empty *\/ }, + * Count: 2, + * Items: [ + * { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * { + * AlbumTitle: { + * S: "Blue Sky Blues" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * ], + * ScannedCount: 3 + * } + * *\/ + * ``` + * + * @public + */ +export declare class ScanCommand extends ScanCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ScanInput; + output: ScanOutput; + }; + sdk: { + input: ScanCommandInput; + output: ScanCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts new file mode 100644 index 0000000..8475f32 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts @@ -0,0 +1,139 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TagResourceInput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TagResourceCommand}. + */ +export interface TagResourceCommandInput extends TagResourceInput { +} +/** + * @public + * + * The output of {@link TagResourceCommand}. + */ +export interface TagResourceCommandOutput extends __MetadataBearer { +} +declare const TagResourceCommand_base: { + new (input: TagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Associate a set of tags with an Amazon DynamoDB resource. You can then activate these + * user-defined tags so that they appear on the Billing and Cost Management console for + * cost allocation tracking. You can call TagResource up to five times per second, per + * account.

+ *
    + *
  • + *

    + * TagResource is an asynchronous operation. If you issue a ListTagsOfResource request immediately after a + * TagResource request, DynamoDB might return your + * previous tag set, if there was one, or an empty tag set. This is because + * ListTagsOfResource uses an eventually consistent query, and the + * metadata for your tags or table might not be available at that moment. Wait for + * a few seconds, and then try the ListTagsOfResource request + * again.

    + *
  • + *
  • + *

    The application or removal of tags using TagResource and + * UntagResource APIs is eventually consistent. + * ListTagsOfResource API will only reflect the changes after a + * few seconds.

    + *
  • + *
+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TagResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TagResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TagResourceInput + * ResourceArn: "STRING_VALUE", // required + * Tags: [ // TagList // required + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }; + * const command = new TagResourceCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param TagResourceCommandInput - {@link TagResourceCommandInput} + * @returns {@link TagResourceCommandOutput} + * @see {@link TagResourceCommandInput} for command's `input` shape. + * @see {@link TagResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TagResourceCommand extends TagResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TagResourceInput; + output: {}; + }; + sdk: { + input: TagResourceCommandInput; + output: TagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts new file mode 100644 index 0000000..a9d9997 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts @@ -0,0 +1,489 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TransactGetItemsInput, TransactGetItemsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TransactGetItemsCommand}. + */ +export interface TransactGetItemsCommandInput extends TransactGetItemsInput { +} +/** + * @public + * + * The output of {@link TransactGetItemsCommand}. + */ +export interface TransactGetItemsCommandOutput extends TransactGetItemsOutput, __MetadataBearer { +} +declare const TransactGetItemsCommand_base: { + new (input: TransactGetItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TransactGetItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * TransactGetItems is a synchronous operation that atomically retrieves + * multiple items from one or more tables (but not from indexes) in a single account and + * Region. A TransactGetItems call can contain up to 100 + * TransactGetItem objects, each of which contains a Get + * structure that specifies an item to retrieve from a table in the account and Region. A + * call to TransactGetItems cannot retrieve items from tables in more than one + * Amazon Web Services account or Region. The aggregate size of the items in the + * transaction cannot exceed 4 MB.

+ *

DynamoDB rejects the entire TransactGetItems request if any of + * the following is true:

+ *
    + *
  • + *

    A conflicting operation is in the process of updating an item to be + * read.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    The aggregate size of the items in the transaction exceeded 4 MB.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TransactGetItemsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TransactGetItemsInput + * TransactItems: [ // TransactGetItemList // required + * { // TransactGetItem + * Get: { // Get + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * TableName: "STRING_VALUE", // required + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }, + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new TransactGetItemsCommand(input); + * const response = await client.send(command); + * // { // TransactGetItemsOutput + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // Responses: [ // ItemResponseList + * // { // ItemResponse + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param TransactGetItemsCommandInput - {@link TransactGetItemsCommandInput} + * @returns {@link TransactGetItemsCommandOutput} + * @see {@link TransactGetItemsCommandInput} for command's `input` shape. + * @see {@link TransactGetItemsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TransactGetItemsCommand extends TransactGetItemsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TransactGetItemsInput; + output: TransactGetItemsOutput; + }; + sdk: { + input: TransactGetItemsCommandInput; + output: TransactGetItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts new file mode 100644 index 0000000..644f975 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts @@ -0,0 +1,658 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TransactWriteItemsInput, TransactWriteItemsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TransactWriteItemsCommand}. + */ +export interface TransactWriteItemsCommandInput extends TransactWriteItemsInput { +} +/** + * @public + * + * The output of {@link TransactWriteItemsCommand}. + */ +export interface TransactWriteItemsCommandOutput extends TransactWriteItemsOutput, __MetadataBearer { +} +declare const TransactWriteItemsCommand_base: { + new (input: TransactWriteItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TransactWriteItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * TransactWriteItems is a synchronous write operation that groups up to 100 + * action requests. These actions can target items in different tables, but not in + * different Amazon Web Services accounts or Regions, and no two actions can target the same + * item. For example, you cannot both ConditionCheck and Update + * the same item. The aggregate size of the items in the transaction cannot exceed 4 + * MB.

+ *

The actions are completed atomically so that either all of them succeed, or all of + * them fail. They are defined by the following objects:

+ *
    + *
  • + *

    + * Put  —   Initiates a PutItem + * operation to write a new item. This structure specifies the primary key of the + * item to be written, the name of the table to write it in, an optional condition + * expression that must be satisfied for the write to succeed, a list of the item's + * attributes, and a field indicating whether to retrieve the item's attributes if + * the condition is not met.

    + *
  • + *
  • + *

    + * Update  —   Initiates an UpdateItem + * operation to update an existing item. This structure specifies the primary key + * of the item to be updated, the name of the table where it resides, an optional + * condition expression that must be satisfied for the update to succeed, an + * expression that defines one or more attributes to be updated, and a field + * indicating whether to retrieve the item's attributes if the condition is not + * met.

    + *
  • + *
  • + *

    + * Delete  —   Initiates a DeleteItem + * operation to delete an existing item. This structure specifies the primary key + * of the item to be deleted, the name of the table where it resides, an optional + * condition expression that must be satisfied for the deletion to succeed, and a + * field indicating whether to retrieve the item's attributes if the condition is + * not met.

    + *
  • + *
  • + *

    + * ConditionCheck  —   Applies a condition to an item + * that is not being modified by the transaction. This structure specifies the + * primary key of the item to be checked, the name of the table where it resides, a + * condition expression that must be satisfied for the transaction to succeed, and + * a field indicating whether to retrieve the item's attributes if the condition is + * not met.

    + *
  • + *
+ *

DynamoDB rejects the entire TransactWriteItems request if any of the + * following is true:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    An ongoing operation is in the process of updating the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (bigger than 400 KB), a local secondary index + * (LSI) becomes too large, or a similar validation error occurs because of changes + * made by the transaction.

    + *
  • + *
  • + *

    The aggregate size of the items in the transaction exceeds 4 MB.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TransactWriteItemsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TransactWriteItemsInput + * TransactItems: [ // TransactWriteItemList // required + * { // TransactWriteItem + * ConditionCheck: { // ConditionCheck + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", // required + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Put: { // Put + * Item: { // PutItemInputAttributeMap // required + * "": "", + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Delete: { // Delete + * Key: { // required + * "": "", + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Update: { // Update + * Key: { // required + * "": "", + * }, + * UpdateExpression: "STRING_VALUE", // required + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ClientRequestToken: "STRING_VALUE", + * }; + * const command = new TransactWriteItemsCommand(input); + * const response = await client.send(command); + * // { // TransactWriteItemsOutput + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // ItemCollectionMetrics: { // ItemCollectionMetricsPerTable + * // "": [ // ItemCollectionMetricsMultiple + * // { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param TransactWriteItemsCommandInput - {@link TransactWriteItemsCommandInput} + * @returns {@link TransactWriteItemsCommandOutput} + * @see {@link TransactWriteItemsCommandInput} for command's `input` shape. + * @see {@link TransactWriteItemsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link IdempotentParameterMismatchException} (client fault) + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link TransactionInProgressException} (client fault) + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TransactWriteItemsCommand extends TransactWriteItemsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TransactWriteItemsInput; + output: TransactWriteItemsOutput; + }; + sdk: { + input: TransactWriteItemsCommandInput; + output: TransactWriteItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts new file mode 100644 index 0000000..70f702d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts @@ -0,0 +1,134 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UntagResourceInput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UntagResourceCommand}. + */ +export interface UntagResourceCommandInput extends UntagResourceInput { +} +/** + * @public + * + * The output of {@link UntagResourceCommand}. + */ +export interface UntagResourceCommandOutput extends __MetadataBearer { +} +declare const UntagResourceCommand_base: { + new (input: UntagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UntagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Removes the association of tags from an Amazon DynamoDB resource. You can call + * UntagResource up to five times per second, per account.

+ *
    + *
  • + *

    + * UntagResource is an asynchronous operation. If you issue a ListTagsOfResource request immediately after an + * UntagResource request, DynamoDB might return your + * previous tag set, if there was one, or an empty tag set. This is because + * ListTagsOfResource uses an eventually consistent query, and the + * metadata for your tags or table might not be available at that moment. Wait for + * a few seconds, and then try the ListTagsOfResource request + * again.

    + *
  • + *
  • + *

    The application or removal of tags using TagResource and + * UntagResource APIs is eventually consistent. + * ListTagsOfResource API will only reflect the changes after a + * few seconds.

    + *
  • + *
+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UntagResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UntagResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UntagResourceInput + * ResourceArn: "STRING_VALUE", // required + * TagKeys: [ // TagKeyList // required + * "STRING_VALUE", + * ], + * }; + * const command = new UntagResourceCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param UntagResourceCommandInput - {@link UntagResourceCommandInput} + * @returns {@link UntagResourceCommandOutput} + * @see {@link UntagResourceCommandInput} for command's `input` shape. + * @see {@link UntagResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UntagResourceCommand extends UntagResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UntagResourceInput; + output: {}; + }; + sdk: { + input: UntagResourceCommandInput; + output: UntagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..c15d7f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts @@ -0,0 +1,108 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateContinuousBackupsInput, UpdateContinuousBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateContinuousBackupsCommand}. + */ +export interface UpdateContinuousBackupsCommandInput extends UpdateContinuousBackupsInput { +} +/** + * @public + * + * The output of {@link UpdateContinuousBackupsCommand}. + */ +export interface UpdateContinuousBackupsCommandOutput extends UpdateContinuousBackupsOutput, __MetadataBearer { +} +declare const UpdateContinuousBackupsCommand_base: { + new (input: UpdateContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * UpdateContinuousBackups enables or disables point in time recovery for + * the specified table. A successful UpdateContinuousBackups call returns the + * current ContinuousBackupsDescription. Continuous backups are + * ENABLED on all tables at table creation. If point in time recovery is + * enabled, PointInTimeRecoveryStatus will be set to ENABLED.

+ *

Once continuous backups and point in time recovery are enabled, you can restore to + * any point in time within EarliestRestorableDateTime and + * LatestRestorableDateTime.

+ *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + * You can restore your table to any point in time in the last 35 days. You can set the + * RecoveryPeriodInDays to any value between 1 and 35 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateContinuousBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateContinuousBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateContinuousBackupsInput + * TableName: "STRING_VALUE", // required + * PointInTimeRecoverySpecification: { // PointInTimeRecoverySpecification + * PointInTimeRecoveryEnabled: true || false, // required + * RecoveryPeriodInDays: Number("int"), + * }, + * }; + * const command = new UpdateContinuousBackupsCommand(input); + * const response = await client.send(command); + * // { // UpdateContinuousBackupsOutput + * // ContinuousBackupsDescription: { // ContinuousBackupsDescription + * // ContinuousBackupsStatus: "ENABLED" || "DISABLED", // required + * // PointInTimeRecoveryDescription: { // PointInTimeRecoveryDescription + * // PointInTimeRecoveryStatus: "ENABLED" || "DISABLED", + * // RecoveryPeriodInDays: Number("int"), + * // EarliestRestorableDateTime: new Date("TIMESTAMP"), + * // LatestRestorableDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }; + * + * ``` + * + * @param UpdateContinuousBackupsCommandInput - {@link UpdateContinuousBackupsCommandInput} + * @returns {@link UpdateContinuousBackupsCommandOutput} + * @see {@link UpdateContinuousBackupsCommandInput} for command's `input` shape. + * @see {@link UpdateContinuousBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ContinuousBackupsUnavailableException} (client fault) + *

Backups have not yet been enabled for this table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateContinuousBackupsCommand extends UpdateContinuousBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateContinuousBackupsInput; + output: UpdateContinuousBackupsOutput; + }; + sdk: { + input: UpdateContinuousBackupsCommandInput; + output: UpdateContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts new file mode 100644 index 0000000..61c9981 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateContributorInsightsInput, UpdateContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateContributorInsightsCommand}. + */ +export interface UpdateContributorInsightsCommandInput extends UpdateContributorInsightsInput { +} +/** + * @public + * + * The output of {@link UpdateContributorInsightsCommand}. + */ +export interface UpdateContributorInsightsCommandOutput extends UpdateContributorInsightsOutput, __MetadataBearer { +} +declare const UpdateContributorInsightsCommand_base: { + new (input: UpdateContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates the status for contributor insights for a specific table or index. CloudWatch + * Contributor Insights for DynamoDB graphs display the partition key and (if applicable) + * sort key of frequently accessed items and frequently throttled items in plaintext. If + * you require the use of Amazon Web Services Key Management Service (KMS) to encrypt this + * table’s partition key and sort key data with an Amazon Web Services managed key or + * customer managed key, you should not enable CloudWatch Contributor Insights for DynamoDB + * for this table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateContributorInsightsInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * ContributorInsightsAction: "ENABLE" || "DISABLE", // required + * }; + * const command = new UpdateContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // UpdateContributorInsightsOutput + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // }; + * + * ``` + * + * @param UpdateContributorInsightsCommandInput - {@link UpdateContributorInsightsCommandInput} + * @returns {@link UpdateContributorInsightsCommandOutput} + * @see {@link UpdateContributorInsightsCommandInput} for command's `input` shape. + * @see {@link UpdateContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateContributorInsightsCommand extends UpdateContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateContributorInsightsInput; + output: UpdateContributorInsightsOutput; + }; + sdk: { + input: UpdateContributorInsightsCommandInput; + output: UpdateContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts new file mode 100644 index 0000000..ff0b588 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts @@ -0,0 +1,176 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateGlobalTableInput, UpdateGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateGlobalTableCommand}. + */ +export interface UpdateGlobalTableCommandInput extends UpdateGlobalTableInput { +} +/** + * @public + * + * The output of {@link UpdateGlobalTableCommand}. + */ +export interface UpdateGlobalTableCommandOutput extends UpdateGlobalTableOutput, __MetadataBearer { +} +declare const UpdateGlobalTableCommand_base: { + new (input: UpdateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Adds or removes replicas in the specified global table. The global table must already + * exist to be able to use this operation. Any replica to be added must be empty, have the + * same name as the global table, have the same key schema, have DynamoDB Streams enabled, + * and have the same provisioned and maximum write capacity units.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version). If you are using global tables Version + * 2019.11.21 you can use UpdateTable instead.

+ *

Although you can use UpdateGlobalTable to add replicas and remove + * replicas in a single request, for simplicity we recommend that you issue separate + * requests for adding or removing replicas.

+ *
+ *

If global secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The global secondary indexes must have the same name.

    + *
  • + *
  • + *

    The global secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
  • + *

    The global secondary indexes must have the same provisioned and maximum write + * capacity units.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * ReplicaUpdates: [ // ReplicaUpdateList // required + * { // ReplicaUpdate + * Create: { // CreateReplicaAction + * RegionName: "STRING_VALUE", // required + * }, + * Delete: { // DeleteReplicaAction + * RegionName: "STRING_VALUE", // required + * }, + * }, + * ], + * }; + * const command = new UpdateGlobalTableCommand(input); + * const response = await client.send(command); + * // { // UpdateGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param UpdateGlobalTableCommandInput - {@link UpdateGlobalTableCommandInput} + * @returns {@link UpdateGlobalTableCommandOutput} + * @see {@link UpdateGlobalTableCommandInput} for command's `input` shape. + * @see {@link UpdateGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ReplicaAlreadyExistsException} (client fault) + *

The specified replica is already part of the global table.

+ * + * @throws {@link ReplicaNotFoundException} (client fault) + *

The specified replica is no longer part of the global table.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateGlobalTableCommand extends UpdateGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateGlobalTableInput; + output: UpdateGlobalTableOutput; + }; + sdk: { + input: UpdateGlobalTableCommandInput; + output: UpdateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..bf002ac --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts @@ -0,0 +1,280 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateGlobalTableSettingsInput, UpdateGlobalTableSettingsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateGlobalTableSettingsCommand}. + */ +export interface UpdateGlobalTableSettingsCommandInput extends UpdateGlobalTableSettingsInput { +} +/** + * @public + * + * The output of {@link UpdateGlobalTableSettingsCommand}. + */ +export interface UpdateGlobalTableSettingsCommandOutput extends UpdateGlobalTableSettingsOutput, __MetadataBearer { +} +declare const UpdateGlobalTableSettingsCommand_base: { + new (input: UpdateGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates settings for a global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateGlobalTableSettingsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateGlobalTableSettingsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateGlobalTableSettingsInput + * GlobalTableName: "STRING_VALUE", // required + * GlobalTableBillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalTableProvisionedWriteCapacityUnits: Number("long"), + * GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: { // AutoScalingSettingsUpdate + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { // AutoScalingPolicyUpdate + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * GlobalTableGlobalSecondaryIndexSettingsUpdate: [ // GlobalTableGlobalSecondaryIndexSettingsUpdateList + * { // GlobalTableGlobalSecondaryIndexSettingsUpdate + * IndexName: "STRING_VALUE", // required + * ProvisionedWriteCapacityUnits: Number("long"), + * ProvisionedWriteCapacityAutoScalingSettingsUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * }, + * ], + * ReplicaSettingsUpdate: [ // ReplicaSettingsUpdateList + * { // ReplicaSettingsUpdate + * RegionName: "STRING_VALUE", // required + * ReplicaProvisionedReadCapacityUnits: Number("long"), + * ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * ReplicaGlobalSecondaryIndexSettingsUpdate: [ // ReplicaGlobalSecondaryIndexSettingsUpdateList + * { // ReplicaGlobalSecondaryIndexSettingsUpdate + * IndexName: "STRING_VALUE", // required + * ProvisionedReadCapacityUnits: Number("long"), + * ProvisionedReadCapacityAutoScalingSettingsUpdate: "", + * }, + * ], + * ReplicaTableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * ], + * }; + * const command = new UpdateGlobalTableSettingsCommand(input); + * const response = await client.send(command); + * // { // UpdateGlobalTableSettingsOutput + * // GlobalTableName: "STRING_VALUE", + * // ReplicaSettings: [ // ReplicaSettingsDescriptionList + * // { // ReplicaSettingsDescription + * // RegionName: "STRING_VALUE", // required + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaBillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // ReplicaProvisionedReadCapacityUnits: Number("long"), + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityUnits: Number("long"), + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaGlobalSecondaryIndexSettings: [ // ReplicaGlobalSecondaryIndexSettingsDescriptionList + * // { // ReplicaGlobalSecondaryIndexSettingsDescription + * // IndexName: "STRING_VALUE", // required + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityUnits: Number("long"), + * // ProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityUnits: Number("long"), + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param UpdateGlobalTableSettingsCommandInput - {@link UpdateGlobalTableSettingsCommandInput} + * @returns {@link UpdateGlobalTableSettingsCommandOutput} + * @see {@link UpdateGlobalTableSettingsCommandInput} for command's `input` shape. + * @see {@link UpdateGlobalTableSettingsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link IndexNotFoundException} (client fault) + *

The operation tried to access a nonexistent index.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ReplicaNotFoundException} (client fault) + *

The specified replica is no longer part of the global table.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateGlobalTableSettingsCommand extends UpdateGlobalTableSettingsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateGlobalTableSettingsInput; + output: UpdateGlobalTableSettingsOutput; + }; + sdk: { + input: UpdateGlobalTableSettingsCommandInput; + output: UpdateGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts new file mode 100644 index 0000000..0fc6013 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts @@ -0,0 +1,313 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateItemInput, UpdateItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateItemCommand}. + */ +export interface UpdateItemCommandInput extends UpdateItemInput { +} +/** + * @public + * + * The output of {@link UpdateItemCommand}. + */ +export interface UpdateItemCommandOutput extends UpdateItemOutput, __MetadataBearer { +} +declare const UpdateItemCommand_base: { + new (input: UpdateItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Edits an existing item's attributes, or adds a new item to the table if it does not + * already exist. You can put, delete, or add attribute values. You can also perform a + * conditional update on an existing item (insert a new attribute name-value pair if it + * doesn't exist, or replace an existing name-value pair if it has certain expected + * attribute values).

+ *

You can also return the item's attribute values in the same UpdateItem + * operation using the ReturnValues parameter.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * AttributeUpdates: { // AttributeUpdates + * "": { // AttributeValueUpdate + * Value: "", + * Action: "ADD" || "PUT" || "DELETE", + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * UpdateExpression: "STRING_VALUE", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new UpdateItemCommand(input); + * const response = await client.send(command); + * // { // UpdateItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param UpdateItemCommandInput - {@link UpdateItemCommandInput} + * @returns {@link UpdateItemCommandOutput} + * @see {@link UpdateItemCommandInput} for command's `input` shape. + * @see {@link UpdateItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To update an item in a table + * ```javascript + * // This example updates an item in the Music table. It adds a new attribute (Year) and modifies the AlbumTitle attribute. All of the attributes in the item, as they appear after the update, are returned in the response. + * const input = { + * ExpressionAttributeNames: { + * #AT: "AlbumTitle", + * #Y: "Year" + * }, + * ExpressionAttributeValues: { + * :t: { + * S: "Louder Than Ever" + * }, + * :y: { + * N: "2015" + * } + * }, + * Key: { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * ReturnValues: "ALL_NEW", + * TableName: "Music", + * UpdateExpression: "SET #Y = :y, #AT = :t" + * }; + * const command = new UpdateItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Attributes: { + * AlbumTitle: { + * S: "Louder Than Ever" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * }, + * Year: { + * N: "2015" + * } + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class UpdateItemCommand extends UpdateItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateItemInput; + output: UpdateItemOutput; + }; + sdk: { + input: UpdateItemCommandInput; + output: UpdateItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..ec8c635 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,121 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateKinesisStreamingDestinationInput, UpdateKinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateKinesisStreamingDestinationCommand}. + */ +export interface UpdateKinesisStreamingDestinationCommandInput extends UpdateKinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link UpdateKinesisStreamingDestinationCommand}. + */ +export interface UpdateKinesisStreamingDestinationCommandOutput extends UpdateKinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const UpdateKinesisStreamingDestinationCommand_base: { + new (input: UpdateKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The command to update the Kinesis stream destination.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateKinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * UpdateKinesisStreamingConfiguration: { // UpdateKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new UpdateKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // UpdateKinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // UpdateKinesisStreamingConfiguration: { // UpdateKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param UpdateKinesisStreamingDestinationCommandInput - {@link UpdateKinesisStreamingDestinationCommandInput} + * @returns {@link UpdateKinesisStreamingDestinationCommandOutput} + * @see {@link UpdateKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link UpdateKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateKinesisStreamingDestinationCommand extends UpdateKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateKinesisStreamingDestinationInput; + output: UpdateKinesisStreamingDestinationOutput; + }; + sdk: { + input: UpdateKinesisStreamingDestinationCommandInput; + output: UpdateKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts new file mode 100644 index 0000000..cf64a60 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts @@ -0,0 +1,437 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTableInput, UpdateTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTableCommand}. + */ +export interface UpdateTableCommandInput extends UpdateTableInput { +} +/** + * @public + * + * The output of {@link UpdateTableCommand}. + */ +export interface UpdateTableCommandOutput extends UpdateTableOutput, __MetadataBearer { +} +declare const UpdateTableCommand_base: { + new (input: UpdateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Modifies the provisioned throughput settings, global secondary indexes, or DynamoDB + * Streams settings for a given table.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ *

You can only perform one of the following operations at once:

+ *
    + *
  • + *

    Modify the provisioned throughput settings of the table.

    + *
  • + *
  • + *

    Remove a global secondary index from the table.

    + *
  • + *
  • + *

    Create a new global secondary index on the table. After the index begins + * backfilling, you can use UpdateTable to perform other + * operations.

    + *
  • + *
+ *

+ * UpdateTable is an asynchronous operation; while it's executing, the table + * status changes from ACTIVE to UPDATING. While it's + * UPDATING, you can't issue another UpdateTable request. + * When the table returns to the ACTIVE state, the UpdateTable + * operation is complete.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTableInput + * AttributeDefinitions: [ // AttributeDefinitions + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * TableName: "STRING_VALUE", // required + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * GlobalSecondaryIndexUpdates: [ // GlobalSecondaryIndexUpdateList + * { // GlobalSecondaryIndexUpdate + * Update: { // UpdateGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * Create: { // CreateGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * Delete: { // DeleteGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * }, + * }, + * ], + * StreamSpecification: { // StreamSpecification + * StreamEnabled: true || false, // required + * StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * ReplicaUpdates: [ // ReplicationGroupUpdateList + * { // ReplicationGroupUpdate + * Create: { // CreateReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * KMSMasterKeyId: "STRING_VALUE", + * ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { // OnDemandThroughputOverride + * MaxReadRequestUnits: Number("long"), + * }, + * GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexList + * { // ReplicaGlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * }, + * ], + * TableClassOverride: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * Update: { // UpdateReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * KMSMasterKeyId: "STRING_VALUE", + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * GlobalSecondaryIndexes: [ + * { + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * }, + * ], + * TableClassOverride: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * Delete: { // DeleteReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * }, + * }, + * ], + * TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * DeletionProtectionEnabled: true || false, + * MultiRegionConsistency: "EVENTUAL" || "STRONG", + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }; + * const command = new UpdateTableCommand(input); + * const response = await client.send(command); + * // { // UpdateTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param UpdateTableCommandInput - {@link UpdateTableCommandInput} + * @returns {@link UpdateTableCommandOutput} + * @see {@link UpdateTableCommandInput} for command's `input` shape. + * @see {@link UpdateTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTableCommand extends UpdateTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTableInput; + output: UpdateTableOutput; + }; + sdk: { + input: UpdateTableCommandInput; + output: UpdateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..e6f341c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,244 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTableReplicaAutoScalingInput, UpdateTableReplicaAutoScalingOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTableReplicaAutoScalingCommand}. + */ +export interface UpdateTableReplicaAutoScalingCommandInput extends UpdateTableReplicaAutoScalingInput { +} +/** + * @public + * + * The output of {@link UpdateTableReplicaAutoScalingCommand}. + */ +export interface UpdateTableReplicaAutoScalingCommandOutput extends UpdateTableReplicaAutoScalingOutput, __MetadataBearer { +} +declare const UpdateTableReplicaAutoScalingCommand_base: { + new (input: UpdateTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates auto scaling settings on your global tables at once.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTableReplicaAutoScalingCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTableReplicaAutoScalingCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTableReplicaAutoScalingInput + * GlobalSecondaryIndexUpdates: [ // GlobalSecondaryIndexAutoScalingUpdateList + * { // GlobalSecondaryIndexAutoScalingUpdate + * IndexName: "STRING_VALUE", + * ProvisionedWriteCapacityAutoScalingUpdate: { // AutoScalingSettingsUpdate + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { // AutoScalingPolicyUpdate + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * }, + * ], + * TableName: "STRING_VALUE", // required + * ProvisionedWriteCapacityAutoScalingUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * ReplicaUpdates: [ // ReplicaAutoScalingUpdateList + * { // ReplicaAutoScalingUpdate + * RegionName: "STRING_VALUE", // required + * ReplicaGlobalSecondaryIndexUpdates: [ // ReplicaGlobalSecondaryIndexAutoScalingUpdateList + * { // ReplicaGlobalSecondaryIndexAutoScalingUpdate + * IndexName: "STRING_VALUE", + * ProvisionedReadCapacityAutoScalingUpdate: "", + * }, + * ], + * ReplicaProvisionedReadCapacityAutoScalingUpdate: "", + * }, + * ], + * }; + * const command = new UpdateTableReplicaAutoScalingCommand(input); + * const response = await client.send(command); + * // { // UpdateTableReplicaAutoScalingOutput + * // TableAutoScalingDescription: { // TableAutoScalingDescription + * // TableName: "STRING_VALUE", + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // Replicas: [ // ReplicaAutoScalingDescriptionList + * // { // ReplicaAutoScalingDescription + * // RegionName: "STRING_VALUE", + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexAutoScalingDescriptionList + * // { // ReplicaGlobalSecondaryIndexAutoScalingDescription + * // IndexName: "STRING_VALUE", + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param UpdateTableReplicaAutoScalingCommandInput - {@link UpdateTableReplicaAutoScalingCommandInput} + * @returns {@link UpdateTableReplicaAutoScalingCommandOutput} + * @see {@link UpdateTableReplicaAutoScalingCommandInput} for command's `input` shape. + * @see {@link UpdateTableReplicaAutoScalingCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTableReplicaAutoScalingCommand extends UpdateTableReplicaAutoScalingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTableReplicaAutoScalingInput; + output: UpdateTableReplicaAutoScalingOutput; + }; + sdk: { + input: UpdateTableReplicaAutoScalingCommandInput; + output: UpdateTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts new file mode 100644 index 0000000..b36e07e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts @@ -0,0 +1,143 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTimeToLiveInput, UpdateTimeToLiveOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTimeToLiveCommand}. + */ +export interface UpdateTimeToLiveCommandInput extends UpdateTimeToLiveInput { +} +/** + * @public + * + * The output of {@link UpdateTimeToLiveCommand}. + */ +export interface UpdateTimeToLiveCommandOutput extends UpdateTimeToLiveOutput, __MetadataBearer { +} +declare const UpdateTimeToLiveCommand_base: { + new (input: UpdateTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The UpdateTimeToLive method enables or disables Time to Live (TTL) for + * the specified table. A successful UpdateTimeToLive call returns the current + * TimeToLiveSpecification. It can take up to one hour for the change to + * fully process. Any additional UpdateTimeToLive calls for the same table + * during this one hour duration result in a ValidationException.

+ *

TTL compares the current time in epoch time format to the time stored in the TTL + * attribute of an item. If the epoch time value stored in the attribute is less than the + * current time, the item is marked as expired and subsequently deleted.

+ * + *

The epoch time format is the number of seconds elapsed since 12:00:00 AM January + * 1, 1970 UTC.

+ *
+ *

DynamoDB deletes expired items on a best-effort basis to ensure availability of + * throughput for other data operations.

+ * + *

DynamoDB typically deletes expired items within two days of expiration. The exact + * duration within which an item gets deleted after expiration is specific to the + * nature of the workload. Items that have expired and not been deleted will still show + * up in reads, queries, and scans.

+ *
+ *

As items are deleted, they are removed from any local secondary index and global + * secondary index immediately in the same eventually consistent way as a standard delete + * operation.

+ *

For more information, see Time To Live in the + * Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTimeToLiveCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTimeToLiveCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTimeToLiveInput + * TableName: "STRING_VALUE", // required + * TimeToLiveSpecification: { // TimeToLiveSpecification + * Enabled: true || false, // required + * AttributeName: "STRING_VALUE", // required + * }, + * }; + * const command = new UpdateTimeToLiveCommand(input); + * const response = await client.send(command); + * // { // UpdateTimeToLiveOutput + * // TimeToLiveSpecification: { // TimeToLiveSpecification + * // Enabled: true || false, // required + * // AttributeName: "STRING_VALUE", // required + * // }, + * // }; + * + * ``` + * + * @param UpdateTimeToLiveCommandInput - {@link UpdateTimeToLiveCommandInput} + * @returns {@link UpdateTimeToLiveCommandOutput} + * @see {@link UpdateTimeToLiveCommandInput} for command's `input` shape. + * @see {@link UpdateTimeToLiveCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTimeToLiveCommand extends UpdateTimeToLiveCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTimeToLiveInput; + output: UpdateTimeToLiveOutput; + }; + sdk: { + input: UpdateTimeToLiveCommandInput; + output: UpdateTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..057fd52 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; + accountId?: string | Provider; + accountIdEndpointMode?: string | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly AccountId: { + readonly type: "builtInParams"; + readonly name: "accountId"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; + readonly AccountIdEndpointMode: { + readonly type: "builtInParams"; + readonly name: "accountIdEndpointMode"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + AccountId?: string; + AccountIdEndpointMode?: string; + ResourceArn?: string; + ResourceArnList?: string[]; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts new file mode 100644 index 0000000..3aa1e50 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface DynamoDBExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..bb6be8f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts @@ -0,0 +1,31 @@ +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * + * @packageDocumentation + */ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts new file mode 100644 index 0000000..f8ff019 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from DynamoDB service. + */ +export declare class DynamoDBServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts new file mode 100644 index 0000000..9821a22 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts @@ -0,0 +1,11039 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +/** + * @public + * @enum + */ +export declare const ApproximateCreationDateTimePrecision: { + readonly MICROSECOND: "MICROSECOND"; + readonly MILLISECOND: "MILLISECOND"; +}; +/** + * @public + */ +export type ApproximateCreationDateTimePrecision = (typeof ApproximateCreationDateTimePrecision)[keyof typeof ApproximateCreationDateTimePrecision]; +/** + *

Contains details of a table archival operation.

+ * @public + */ +export interface ArchivalSummary { + /** + *

The date and time when table archival was initiated by DynamoDB, in UNIX epoch time + * format.

+ * @public + */ + ArchivalDateTime?: Date | undefined; + /** + *

The reason DynamoDB archived the table. Currently, the only possible value is:

+ *
    + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The table was archived due + * to the table's KMS key being inaccessible for more than seven + * days. An On-Demand backup was created at the archival time.

    + *
  • + *
+ * @public + */ + ArchivalReason?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the backup the table was archived to, when + * applicable in the archival reason. If you wish to restore this backup to the same table + * name, you will need to delete the original table.

+ * @public + */ + ArchivalBackupArn?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const AttributeAction: { + readonly ADD: "ADD"; + readonly DELETE: "DELETE"; + readonly PUT: "PUT"; +}; +/** + * @public + */ +export type AttributeAction = (typeof AttributeAction)[keyof typeof AttributeAction]; +/** + * @public + * @enum + */ +export declare const ScalarAttributeType: { + readonly B: "B"; + readonly N: "N"; + readonly S: "S"; +}; +/** + * @public + */ +export type ScalarAttributeType = (typeof ScalarAttributeType)[keyof typeof ScalarAttributeType]; +/** + *

Represents an attribute for describing the schema for the table and indexes.

+ * @public + */ +export interface AttributeDefinition { + /** + *

A name for the attribute.

+ * @public + */ + AttributeName: string | undefined; + /** + *

The data type for the attribute, where:

+ *
    + *
  • + *

    + * S - the attribute is of type String

    + *
  • + *
  • + *

    + * N - the attribute is of type Number

    + *
  • + *
  • + *

    + * B - the attribute is of type Binary

    + *
  • + *
+ * @public + */ + AttributeType: ScalarAttributeType | undefined; +} +/** + *

Represents the properties of a target tracking scaling policy.

+ * @public + */ +export interface AutoScalingTargetTrackingScalingPolicyConfigurationDescription { + /** + *

Indicates whether scale in by the target tracking policy is disabled. If the value is + * true, scale in is disabled and the target tracking policy won't remove capacity from the + * scalable resource. Otherwise, scale in is enabled and the target tracking policy can + * remove capacity from the scalable resource. The default value is false.

+ * @public + */ + DisableScaleIn?: boolean | undefined; + /** + *

The amount of time, in seconds, after a scale in activity completes before another + * scale in activity can start. The cooldown period is used to block subsequent scale in + * requests until it has expired. You should scale in conservatively to protect your + * application's availability. However, if another alarm triggers a scale out policy during + * the cooldown period after a scale-in, application auto scaling scales out your scalable + * target immediately.

+ * @public + */ + ScaleInCooldown?: number | undefined; + /** + *

The amount of time, in seconds, after a scale out activity completes before another + * scale out activity can start. While the cooldown period is in effect, the capacity that + * has been added by the previous scale out event that initiated the cooldown is calculated + * as part of the desired capacity for the next scale out. You should continuously (but not + * excessively) scale out.

+ * @public + */ + ScaleOutCooldown?: number | undefined; + /** + *

The target value for the metric. The range is 8.515920e-109 to 1.174271e+108 (Base 10) + * or 2e-360 to 2e360 (Base 2).

+ * @public + */ + TargetValue: number | undefined; +} +/** + *

Represents the properties of the scaling policy.

+ * @public + */ +export interface AutoScalingPolicyDescription { + /** + *

The name of the scaling policy.

+ * @public + */ + PolicyName?: string | undefined; + /** + *

Represents a target tracking scaling policy configuration.

+ * @public + */ + TargetTrackingScalingPolicyConfiguration?: AutoScalingTargetTrackingScalingPolicyConfigurationDescription | undefined; +} +/** + *

Represents the settings of a target tracking scaling policy that will be + * modified.

+ * @public + */ +export interface AutoScalingTargetTrackingScalingPolicyConfigurationUpdate { + /** + *

Indicates whether scale in by the target tracking policy is disabled. If the value is + * true, scale in is disabled and the target tracking policy won't remove capacity from the + * scalable resource. Otherwise, scale in is enabled and the target tracking policy can + * remove capacity from the scalable resource. The default value is false.

+ * @public + */ + DisableScaleIn?: boolean | undefined; + /** + *

The amount of time, in seconds, after a scale in activity completes before another + * scale in activity can start. The cooldown period is used to block subsequent scale in + * requests until it has expired. You should scale in conservatively to protect your + * application's availability. However, if another alarm triggers a scale out policy during + * the cooldown period after a scale-in, application auto scaling scales out your scalable + * target immediately.

+ * @public + */ + ScaleInCooldown?: number | undefined; + /** + *

The amount of time, in seconds, after a scale out activity completes before another + * scale out activity can start. While the cooldown period is in effect, the capacity that + * has been added by the previous scale out event that initiated the cooldown is calculated + * as part of the desired capacity for the next scale out. You should continuously (but not + * excessively) scale out.

+ * @public + */ + ScaleOutCooldown?: number | undefined; + /** + *

The target value for the metric. The range is 8.515920e-109 to 1.174271e+108 (Base 10) + * or 2e-360 to 2e360 (Base 2).

+ * @public + */ + TargetValue: number | undefined; +} +/** + *

Represents the auto scaling policy to be modified.

+ * @public + */ +export interface AutoScalingPolicyUpdate { + /** + *

The name of the scaling policy.

+ * @public + */ + PolicyName?: string | undefined; + /** + *

Represents a target tracking scaling policy configuration.

+ * @public + */ + TargetTrackingScalingPolicyConfiguration: AutoScalingTargetTrackingScalingPolicyConfigurationUpdate | undefined; +} +/** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ +export interface AutoScalingSettingsDescription { + /** + *

The minimum capacity units that a global table or global secondary index should be + * scaled down to.

+ * @public + */ + MinimumUnits?: number | undefined; + /** + *

The maximum capacity units that a global table or global secondary index should be + * scaled up to.

+ * @public + */ + MaximumUnits?: number | undefined; + /** + *

Disabled auto scaling for this global table or global secondary index.

+ * @public + */ + AutoScalingDisabled?: boolean | undefined; + /** + *

Role ARN used for configuring the auto scaling policy.

+ * @public + */ + AutoScalingRoleArn?: string | undefined; + /** + *

Information about the scaling policies.

+ * @public + */ + ScalingPolicies?: AutoScalingPolicyDescription[] | undefined; +} +/** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ +export interface AutoScalingSettingsUpdate { + /** + *

The minimum capacity units that a global table or global secondary index should be + * scaled down to.

+ * @public + */ + MinimumUnits?: number | undefined; + /** + *

The maximum capacity units that a global table or global secondary index should be + * scaled up to.

+ * @public + */ + MaximumUnits?: number | undefined; + /** + *

Disabled auto scaling for this global table or global secondary index.

+ * @public + */ + AutoScalingDisabled?: boolean | undefined; + /** + *

Role ARN used for configuring auto scaling policy.

+ * @public + */ + AutoScalingRoleArn?: string | undefined; + /** + *

The scaling policy to apply for scaling target global table or global secondary index + * capacity units.

+ * @public + */ + ScalingPolicyUpdate?: AutoScalingPolicyUpdate | undefined; +} +/** + * @public + * @enum + */ +export declare const BackupStatus: { + readonly AVAILABLE: "AVAILABLE"; + readonly CREATING: "CREATING"; + readonly DELETED: "DELETED"; +}; +/** + * @public + */ +export type BackupStatus = (typeof BackupStatus)[keyof typeof BackupStatus]; +/** + * @public + * @enum + */ +export declare const BackupType: { + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +/** + * @public + */ +export type BackupType = (typeof BackupType)[keyof typeof BackupType]; +/** + *

Contains the details of the backup created for the table.

+ * @public + */ +export interface BackupDetails { + /** + *

ARN associated with the backup.

+ * @public + */ + BackupArn: string | undefined; + /** + *

Name of the requested backup.

+ * @public + */ + BackupName: string | undefined; + /** + *

Size of the backup in bytes. DynamoDB updates this value approximately every six + * hours. Recent changes might not be reflected in this value.

+ * @public + */ + BackupSizeBytes?: number | undefined; + /** + *

Backup can be in one of the following states: CREATING, ACTIVE, DELETED.

+ * @public + */ + BackupStatus: BackupStatus | undefined; + /** + *

BackupType:

+ *
    + *
  • + *

    + * USER - You create and manage these using the on-demand backup + * feature.

    + *
  • + *
  • + *

    + * SYSTEM - If you delete a table with point-in-time recovery enabled, + * a SYSTEM backup is automatically created and is retained for 35 + * days (at no additional cost). System backups allow you to restore the deleted + * table to the state it was in just before the point of deletion.

    + *
  • + *
  • + *

    + * AWS_BACKUP - On-demand backup created by you from Backup service.

    + *
  • + *
+ * @public + */ + BackupType: BackupType | undefined; + /** + *

Time at which the backup was created. This is the request time of the backup.

+ * @public + */ + BackupCreationDateTime: Date | undefined; + /** + *

Time at which the automatic on-demand backup created by DynamoDB will + * expire. This SYSTEM on-demand backup expires automatically 35 days after + * its creation.

+ * @public + */ + BackupExpiryDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const BillingMode: { + readonly PAY_PER_REQUEST: "PAY_PER_REQUEST"; + readonly PROVISIONED: "PROVISIONED"; +}; +/** + * @public + */ +export type BillingMode = (typeof BillingMode)[keyof typeof BillingMode]; +/** + * @public + * @enum + */ +export declare const KeyType: { + readonly HASH: "HASH"; + readonly RANGE: "RANGE"; +}; +/** + * @public + */ +export type KeyType = (typeof KeyType)[keyof typeof KeyType]; +/** + *

Represents a single element of a key schema. A key schema + * specifies the attributes that make up the primary key of a table, or the key attributes + * of an index.

+ *

A KeySchemaElement represents exactly one attribute of the primary key. + * For example, a simple primary key would be represented by one + * KeySchemaElement (for the partition key). A composite primary key would + * require one KeySchemaElement for the partition key, and another + * KeySchemaElement for the sort key.

+ *

A KeySchemaElement must be a scalar, top-level attribute (not a nested + * attribute). The data type must be one of String, Number, or Binary. The attribute cannot + * be nested within a List or a Map.

+ * @public + */ +export interface KeySchemaElement { + /** + *

The name of a key attribute.

+ * @public + */ + AttributeName: string | undefined; + /** + *

The role that this key attribute will assume:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeyType: KeyType | undefined; +} +/** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ +export interface OnDemandThroughput { + /** + *

Maximum number of read request units for the specified table.

+ *

To specify a maximum OnDemandThroughput on your table, set the value of + * MaxReadRequestUnits as greater than or equal to 1. To remove the + * maximum OnDemandThroughput that is currently set on your table, set the + * value of MaxReadRequestUnits to -1.

+ * @public + */ + MaxReadRequestUnits?: number | undefined; + /** + *

Maximum number of write request units for the specified table.

+ *

To specify a maximum OnDemandThroughput on your table, set the value of + * MaxWriteRequestUnits as greater than or equal to 1. To remove the + * maximum OnDemandThroughput that is currently set on your table, set the + * value of MaxWriteRequestUnits to -1.

+ * @public + */ + MaxWriteRequestUnits?: number | undefined; +} +/** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use ProvisionedThroughput or + * OnDemandThroughput based on your table’s capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface ProvisionedThroughput { + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying + * Read and Write Requirements in the Amazon DynamoDB Developer + * Guide.

+ *

If read/write capacity mode is PAY_PER_REQUEST the value is set to + * 0.

+ * @public + */ + ReadCapacityUnits: number | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. For more information, see Specifying + * Read and Write Requirements in the Amazon DynamoDB Developer + * Guide.

+ *

If read/write capacity mode is PAY_PER_REQUEST the value is set to + * 0.

+ * @public + */ + WriteCapacityUnits: number | undefined; +} +/** + *

Contains the details of the table when the backup was created.

+ * @public + */ +export interface SourceTableDetails { + /** + *

The name of the table for which the backup was created.

+ * @public + */ + TableName: string | undefined; + /** + *

Unique identifier for the table for which the backup was created.

+ * @public + */ + TableId: string | undefined; + /** + *

ARN of the table for which backup was created.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Size of the table in bytes. Note that this is an approximate value.

+ * @public + */ + TableSizeBytes?: number | undefined; + /** + *

Schema of the table.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Time when the source table was created.

+ * @public + */ + TableCreationDateTime: Date | undefined; + /** + *

Read IOPs and Write IOPS on the table when the backup was created.

+ * @public + */ + ProvisionedThroughput: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Number of items in the table. Note that this is an approximate value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PROVISIONED - Sets the read/write capacity mode to + * PROVISIONED. We recommend using PROVISIONED for + * predictable workloads.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - Sets the read/write capacity mode to + * PAY_PER_REQUEST. We recommend using + * PAY_PER_REQUEST for unpredictable workloads.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; +} +/** + * @public + * @enum + */ +export declare const ProjectionType: { + readonly ALL: "ALL"; + readonly INCLUDE: "INCLUDE"; + readonly KEYS_ONLY: "KEYS_ONLY"; +}; +/** + * @public + */ +export type ProjectionType = (typeof ProjectionType)[keyof typeof ProjectionType]; +/** + *

Represents attributes that are copied (projected) from the table into an index. These + * are in addition to the primary key attributes and index key attributes, which are + * automatically projected.

+ * @public + */ +export interface Projection { + /** + *

The set of attributes that are projected into the index:

+ *
    + *
  • + *

    + * KEYS_ONLY - Only the index and primary keys are projected into the + * index.

    + *
  • + *
  • + *

    + * INCLUDE - In addition to the attributes described in + * KEYS_ONLY, the secondary index will include other non-key + * attributes that you specify.

    + *
  • + *
  • + *

    + * ALL - All of the table attributes are projected into the + * index.

    + *
  • + *
+ *

When using the DynamoDB console, ALL is selected by default.

+ * @public + */ + ProjectionType?: ProjectionType | undefined; + /** + *

Represents the non-key attribute names which will be projected into the index.

+ *

For global and local secondary indexes, the total count of NonKeyAttributes summed + * across all of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct attributes when + * determining the total. This limit only applies when you specify the ProjectionType of + * INCLUDE. You still can specify the ProjectionType of ALL to + * project all attributes from the source table, even if the table has more than 100 + * attributes.

+ * @public + */ + NonKeyAttributes?: string[] | undefined; +} +/** + *

Represents the properties of a global secondary index for the table when the backup + * was created.

+ * @public + */ +export interface GlobalSecondaryIndexInfo { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; +} +/** + *

Represents the properties of a local secondary index for the table when the backup was + * created.

+ * @public + */ +export interface LocalSecondaryIndexInfo { + /** + *

Represents the name of the local secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a local secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; +} +/** + * @public + * @enum + */ +export declare const SSEType: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +/** + * @public + */ +export type SSEType = (typeof SSEType)[keyof typeof SSEType]; +/** + * @public + * @enum + */ +export declare const SSEStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type SSEStatus = (typeof SSEStatus)[keyof typeof SSEStatus]; +/** + *

The description of the server-side encryption status on the specified table.

+ * @public + */ +export interface SSEDescription { + /** + *

Represents the current state of server-side encryption. The only supported values + * are:

+ *
    + *
  • + *

    + * ENABLED - Server-side encryption is enabled.

    + *
  • + *
  • + *

    + * UPDATING - Server-side encryption is being updated.

    + *
  • + *
+ * @public + */ + Status?: SSEStatus | undefined; + /** + *

Server-side encryption type. The only supported value is:

+ *
    + *
  • + *

    + * KMS - Server-side encryption that uses Key Management Service. The + * key is stored in your account and is managed by KMS (KMS charges apply).

    + *
  • + *
+ * @public + */ + SSEType?: SSEType | undefined; + /** + *

The KMS key ARN used for the KMS encryption.

+ * @public + */ + KMSMasterKeyArn?: string | undefined; + /** + *

Indicates the time, in UNIX epoch date format, when DynamoDB detected that + * the table's KMS key was inaccessible. This attribute will automatically + * be cleared when DynamoDB detects that the table's KMS key is accessible + * again. DynamoDB will initiate the table archival process when table's KMS key remains inaccessible for more than seven days from this date.

+ * @public + */ + InaccessibleEncryptionDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const StreamViewType: { + readonly KEYS_ONLY: "KEYS_ONLY"; + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; + readonly OLD_IMAGE: "OLD_IMAGE"; +}; +/** + * @public + */ +export type StreamViewType = (typeof StreamViewType)[keyof typeof StreamViewType]; +/** + *

Represents the DynamoDB Streams configuration for a table in DynamoDB.

+ * @public + */ +export interface StreamSpecification { + /** + *

Indicates whether DynamoDB Streams is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + StreamEnabled: boolean | undefined; + /** + *

When an item in the table is modified, StreamViewType determines what + * information is written to the stream for this table. Valid values for + * StreamViewType are:

+ *
    + *
  • + *

    + * KEYS_ONLY - Only the key attributes of the modified item are + * written to the stream.

    + *
  • + *
  • + *

    + * NEW_IMAGE - The entire item, as it appears after it was modified, + * is written to the stream.

    + *
  • + *
  • + *

    + * OLD_IMAGE - The entire item, as it appeared before it was modified, + * is written to the stream.

    + *
  • + *
  • + *

    + * NEW_AND_OLD_IMAGES - Both the new and the old item images of the + * item are written to the stream.

    + *
  • + *
+ * @public + */ + StreamViewType?: StreamViewType | undefined; +} +/** + * @public + * @enum + */ +export declare const TimeToLiveStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; +}; +/** + * @public + */ +export type TimeToLiveStatus = (typeof TimeToLiveStatus)[keyof typeof TimeToLiveStatus]; +/** + *

The description of the Time to Live (TTL) status on the specified table.

+ * @public + */ +export interface TimeToLiveDescription { + /** + *

The TTL status for the table.

+ * @public + */ + TimeToLiveStatus?: TimeToLiveStatus | undefined; + /** + *

The name of the TTL attribute for items in the table.

+ * @public + */ + AttributeName?: string | undefined; +} +/** + *

Contains the details of the features enabled on the table when the backup was created. + * For example, LSIs, GSIs, streams, TTL.

+ * @public + */ +export interface SourceTableFeatureDetails { + /** + *

Represents the LSI properties for the table when the backup was created. It includes + * the IndexName, KeySchema and Projection for the LSIs on the table at the time of backup. + *

+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndexInfo[] | undefined; + /** + *

Represents the GSI properties for the table when the backup was created. It includes + * the IndexName, KeySchema, Projection, and ProvisionedThroughput for the GSIs on the + * table at the time of backup.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndexInfo[] | undefined; + /** + *

Stream settings on the table when the backup was created.

+ * @public + */ + StreamDescription?: StreamSpecification | undefined; + /** + *

Time to Live settings on the table when the backup was created.

+ * @public + */ + TimeToLiveDescription?: TimeToLiveDescription | undefined; + /** + *

The description of the server-side encryption status on the table when the backup was + * created.

+ * @public + */ + SSEDescription?: SSEDescription | undefined; +} +/** + *

Contains the description of the backup created for the table.

+ * @public + */ +export interface BackupDescription { + /** + *

Contains the details of the backup created for the table.

+ * @public + */ + BackupDetails?: BackupDetails | undefined; + /** + *

Contains the details of the table when the backup was created.

+ * @public + */ + SourceTableDetails?: SourceTableDetails | undefined; + /** + *

Contains the details of the features enabled on the table when the backup was created. + * For example, LSIs, GSIs, streams, TTL.

+ * @public + */ + SourceTableFeatureDetails?: SourceTableFeatureDetails | undefined; +} +/** + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * @public + */ +export declare class BackupInUseException extends __BaseException { + readonly name: "BackupInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Backup not found for the given BackupARN.

+ * @public + */ +export declare class BackupNotFoundException extends __BaseException { + readonly name: "BackupNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Contains details for the backup.

+ * @public + */ +export interface BackupSummary { + /** + *

Name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

Unique identifier for the table.

+ * @public + */ + TableId?: string | undefined; + /** + *

ARN associated with the table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

ARN associated with the backup.

+ * @public + */ + BackupArn?: string | undefined; + /** + *

Name of the specified backup.

+ * @public + */ + BackupName?: string | undefined; + /** + *

Time at which the backup was created.

+ * @public + */ + BackupCreationDateTime?: Date | undefined; + /** + *

Time at which the automatic on-demand backup created by DynamoDB will + * expire. This SYSTEM on-demand backup expires automatically 35 days after + * its creation.

+ * @public + */ + BackupExpiryDateTime?: Date | undefined; + /** + *

Backup can be in one of the following states: CREATING, ACTIVE, DELETED.

+ * @public + */ + BackupStatus?: BackupStatus | undefined; + /** + *

BackupType:

+ *
    + *
  • + *

    + * USER - You create and manage these using the on-demand backup + * feature.

    + *
  • + *
  • + *

    + * SYSTEM - If you delete a table with point-in-time recovery enabled, + * a SYSTEM backup is automatically created and is retained for 35 + * days (at no additional cost). System backups allow you to restore the deleted + * table to the state it was in just before the point of deletion.

    + *
  • + *
  • + *

    + * AWS_BACKUP - On-demand backup created by you from Backup service.

    + *
  • + *
+ * @public + */ + BackupType?: BackupType | undefined; + /** + *

Size of the backup in bytes.

+ * @public + */ + BackupSizeBytes?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const BackupTypeFilter: { + readonly ALL: "ALL"; + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +/** + * @public + */ +export type BackupTypeFilter = (typeof BackupTypeFilter)[keyof typeof BackupTypeFilter]; +/** + * @public + * @enum + */ +export declare const ReturnConsumedCapacity: { + readonly INDEXES: "INDEXES"; + readonly NONE: "NONE"; + readonly TOTAL: "TOTAL"; +}; +/** + * @public + */ +export type ReturnConsumedCapacity = (typeof ReturnConsumedCapacity)[keyof typeof ReturnConsumedCapacity]; +/** + * @public + * @enum + */ +export declare const ReturnValuesOnConditionCheckFailure: { + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; +}; +/** + * @public + */ +export type ReturnValuesOnConditionCheckFailure = (typeof ReturnValuesOnConditionCheckFailure)[keyof typeof ReturnValuesOnConditionCheckFailure]; +/** + *

Represents the amount of provisioned throughput capacity consumed on a table or an + * index.

+ * @public + */ +export interface Capacity { + /** + *

The total number of read capacity units consumed on a table or an index.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The total number of write capacity units consumed on a table or an index.

+ * @public + */ + WriteCapacityUnits?: number | undefined; + /** + *

The total number of capacity units consumed on a table or an index.

+ * @public + */ + CapacityUnits?: number | undefined; +} +/** + *

The capacity units consumed by an operation. The data returned includes the total + * provisioned throughput consumed, along with statistics for the table and any indexes + * involved in the operation. ConsumedCapacity is only returned if the request + * asked for it. For more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface ConsumedCapacity { + /** + *

The name of the table that was affected by the operation. If you had specified the + * Amazon Resource Name (ARN) of a table in the input, you'll see the table ARN in the response.

+ * @public + */ + TableName?: string | undefined; + /** + *

The total number of capacity units consumed by the operation.

+ * @public + */ + CapacityUnits?: number | undefined; + /** + *

The total number of read capacity units consumed by the operation.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The total number of write capacity units consumed by the operation.

+ * @public + */ + WriteCapacityUnits?: number | undefined; + /** + *

The amount of throughput consumed on the table affected by the operation.

+ * @public + */ + Table?: Capacity | undefined; + /** + *

The amount of throughput consumed on each local index affected by the + * operation.

+ * @public + */ + LocalSecondaryIndexes?: Record | undefined; + /** + *

The amount of throughput consumed on each global index affected by the + * operation.

+ * @public + */ + GlobalSecondaryIndexes?: Record | undefined; +} +/** + * @public + * @enum + */ +export declare const BatchStatementErrorCodeEnum: { + readonly AccessDenied: "AccessDenied"; + readonly ConditionalCheckFailed: "ConditionalCheckFailed"; + readonly DuplicateItem: "DuplicateItem"; + readonly InternalServerError: "InternalServerError"; + readonly ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded"; + readonly ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded"; + readonly RequestLimitExceeded: "RequestLimitExceeded"; + readonly ResourceNotFound: "ResourceNotFound"; + readonly ThrottlingError: "ThrottlingError"; + readonly TransactionConflict: "TransactionConflict"; + readonly ValidationError: "ValidationError"; +}; +/** + * @public + */ +export type BatchStatementErrorCodeEnum = (typeof BatchStatementErrorCodeEnum)[keyof typeof BatchStatementErrorCodeEnum]; +/** + *

An error occurred on the server side.

+ * @public + */ +export declare class InternalServerError extends __BaseException { + readonly name: "InternalServerError"; + readonly $fault: "server"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * @public + */ +export declare class RequestLimitExceeded extends __BaseException { + readonly name: "RequestLimitExceeded"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export declare class InvalidEndpointException extends __BaseException { + readonly name: "InvalidEndpointException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * @public + */ +export declare class ProvisionedThroughputExceededException extends __BaseException { + readonly name: "ProvisionedThroughputExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * @public + */ +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + * @enum + */ +export declare const ReturnItemCollectionMetrics: { + readonly NONE: "NONE"; + readonly SIZE: "SIZE"; +}; +/** + * @public + */ +export type ReturnItemCollectionMetrics = (typeof ReturnItemCollectionMetrics)[keyof typeof ReturnItemCollectionMetrics]; +/** + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * @public + */ +export declare class ItemCollectionSizeLimitExceededException extends __BaseException { + readonly name: "ItemCollectionSizeLimitExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Contains the details for the read/write capacity mode. This page talks about + * PROVISIONED and PAY_PER_REQUEST billing modes. For more + * information about these modes, see Read/write capacity mode.

+ * + *

You may need to switch to on-demand mode at least once in order to return a + * BillingModeSummary response.

+ *
+ * @public + */ +export interface BillingModeSummary { + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PROVISIONED - Sets the read/write capacity mode to + * PROVISIONED. We recommend using PROVISIONED for + * predictable workloads.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - Sets the read/write capacity mode to + * PAY_PER_REQUEST. We recommend using + * PAY_PER_REQUEST for unpredictable workloads.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the time when PAY_PER_REQUEST was last set as the read/write + * capacity mode.

+ * @public + */ + LastUpdateToPayPerRequestDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const ComparisonOperator: { + readonly BEGINS_WITH: "BEGINS_WITH"; + readonly BETWEEN: "BETWEEN"; + readonly CONTAINS: "CONTAINS"; + readonly EQ: "EQ"; + readonly GE: "GE"; + readonly GT: "GT"; + readonly IN: "IN"; + readonly LE: "LE"; + readonly LT: "LT"; + readonly NE: "NE"; + readonly NOT_CONTAINS: "NOT_CONTAINS"; + readonly NOT_NULL: "NOT_NULL"; + readonly NULL: "NULL"; +}; +/** + * @public + */ +export type ComparisonOperator = (typeof ComparisonOperator)[keyof typeof ComparisonOperator]; +/** + * @public + * @enum + */ +export declare const ConditionalOperator: { + readonly AND: "AND"; + readonly OR: "OR"; +}; +/** + * @public + */ +export type ConditionalOperator = (typeof ConditionalOperator)[keyof typeof ConditionalOperator]; +/** + * @public + * @enum + */ +export declare const ContinuousBackupsStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +/** + * @public + */ +export type ContinuousBackupsStatus = (typeof ContinuousBackupsStatus)[keyof typeof ContinuousBackupsStatus]; +/** + * @public + * @enum + */ +export declare const PointInTimeRecoveryStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +/** + * @public + */ +export type PointInTimeRecoveryStatus = (typeof PointInTimeRecoveryStatus)[keyof typeof PointInTimeRecoveryStatus]; +/** + *

The description of the point in time settings applied to the table.

+ * @public + */ +export interface PointInTimeRecoveryDescription { + /** + *

The current state of point in time recovery:

+ *
    + *
  • + *

    + * ENABLED - Point in time recovery is enabled.

    + *
  • + *
  • + *

    + * DISABLED - Point in time recovery is disabled.

    + *
  • + *
+ * @public + */ + PointInTimeRecoveryStatus?: PointInTimeRecoveryStatus | undefined; + /** + *

The number of preceding days for which continuous backups are taken and maintained. + * Your table data is only recoverable to any point-in-time from within the configured + * recovery period. This parameter is optional.

+ * @public + */ + RecoveryPeriodInDays?: number | undefined; + /** + *

Specifies the earliest point in time you can restore your table to. You can restore + * your table to any point in time during the last 35 days.

+ * @public + */ + EarliestRestorableDateTime?: Date | undefined; + /** + *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + *

+ * @public + */ + LatestRestorableDateTime?: Date | undefined; +} +/** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ +export interface ContinuousBackupsDescription { + /** + *

+ * ContinuousBackupsStatus can be one of the following states: ENABLED, + * DISABLED

+ * @public + */ + ContinuousBackupsStatus: ContinuousBackupsStatus | undefined; + /** + *

The description of the point in time recovery settings applied to the table.

+ * @public + */ + PointInTimeRecoveryDescription?: PointInTimeRecoveryDescription | undefined; +} +/** + *

Backups have not yet been enabled for this table.

+ * @public + */ +export declare class ContinuousBackupsUnavailableException extends __BaseException { + readonly name: "ContinuousBackupsUnavailableException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + * @enum + */ +export declare const ContributorInsightsAction: { + readonly DISABLE: "DISABLE"; + readonly ENABLE: "ENABLE"; +}; +/** + * @public + */ +export type ContributorInsightsAction = (typeof ContributorInsightsAction)[keyof typeof ContributorInsightsAction]; +/** + * @public + * @enum + */ +export declare const ContributorInsightsStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly FAILED: "FAILED"; +}; +/** + * @public + */ +export type ContributorInsightsStatus = (typeof ContributorInsightsStatus)[keyof typeof ContributorInsightsStatus]; +/** + *

Represents a Contributor Insights summary entry.

+ * @public + */ +export interface ContributorInsightsSummary { + /** + *

Name of the table associated with the summary.

+ * @public + */ + TableName?: string | undefined; + /** + *

Name of the index associated with the summary, if any.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Describes the current status for contributor insights for the given table and index, + * if applicable.

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +/** + * @public + */ +export interface CreateBackupInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Specified name for the backup.

+ * @public + */ + BackupName: string | undefined; +} +/** + * @public + */ +export interface CreateBackupOutput { + /** + *

Contains the details of the backup created for the table.

+ * @public + */ + BackupDetails?: BackupDetails | undefined; +} +/** + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * @public + */ +export declare class LimitExceededException extends __BaseException { + readonly name: "LimitExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

A target table with the specified name is either being created or deleted. + *

+ * @public + */ +export declare class TableInUseException extends __BaseException { + readonly name: "TableInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * @public + */ +export declare class TableNotFoundException extends __BaseException { + readonly name: "TableNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Provides visibility into the number of read and write operations your table or + * secondary index can instantaneously support. The settings can be modified using the + * UpdateTable operation to meet the throughput requirements of an + * upcoming peak event.

+ * @public + */ +export interface WarmThroughput { + /** + *

Represents the number of read operations your base table can instantaneously + * support.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents the number of write operations your base table can instantaneously + * support.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; +} +/** + *

Represents a new global secondary index to be added to an existing table.

+ * @public + */ +export interface CreateGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be created.

+ * @public + */ + IndexName: string | undefined; + /** + *

The key schema for the global secondary index.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into an index. These + * are in addition to the primary key attributes and index key attributes, which are + * automatically projected.

+ * @public + */ + Projection: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The maximum number of read and write units for the global secondary index being + * created. If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both. You must use either + * OnDemand Throughput or ProvisionedThroughput based on your table's + * capacity mode.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) when creating a secondary index.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the properties of a replica.

+ * @public + */ +export interface Replica { + /** + *

The Region where the replica needs to be created.

+ * @public + */ + RegionName?: string | undefined; +} +/** + * @public + */ +export interface CreateGlobalTableInput { + /** + *

The global table name.

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

The Regions where the global table needs to be created.

+ * @public + */ + ReplicationGroup: Replica[] | undefined; +} +/** + * @public + * @enum + */ +export declare const GlobalTableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type GlobalTableStatus = (typeof GlobalTableStatus)[keyof typeof GlobalTableStatus]; +/** + *

Overrides the on-demand throughput settings for this replica table. If you don't + * specify a value for this parameter, it uses the source table's on-demand throughput + * settings.

+ * @public + */ +export interface OnDemandThroughputOverride { + /** + *

Maximum number of read request units for the specified replica table.

+ * @public + */ + MaxReadRequestUnits?: number | undefined; +} +/** + *

Replica-specific provisioned throughput settings. If not specified, uses the source + * table's provisioned throughput settings.

+ * @public + */ +export interface ProvisionedThroughputOverride { + /** + *

Replica-specific read capacity units. If not specified, uses the source table's read + * capacity settings.

+ * @public + */ + ReadCapacityUnits?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const IndexStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type IndexStatus = (typeof IndexStatus)[keyof typeof IndexStatus]; +/** + *

The description of the warm throughput value on a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndexWarmThroughputDescription { + /** + *

Represents warm throughput read units per second value for a global secondary + * index.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents warm throughput write units per second value for a global secondary + * index.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; + /** + *

Represents the warm throughput status being created or updated on a global secondary + * index. The status can only be UPDATING or ACTIVE.

+ * @public + */ + Status?: IndexStatus | undefined; +} +/** + *

Represents the properties of a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

If not described, uses the source table GSI's read capacity settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput for the specified global secondary index in + * the specified replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Represents the warm throughput of the global secondary index for this replica.

+ * @public + */ + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +/** + * @public + * @enum + */ +export declare const ReplicaStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly CREATION_FAILED: "CREATION_FAILED"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly REGION_DISABLED: "REGION_DISABLED"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type ReplicaStatus = (typeof ReplicaStatus)[keyof typeof ReplicaStatus]; +/** + * @public + * @enum + */ +export declare const TableClass: { + readonly STANDARD: "STANDARD"; + readonly STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS"; +}; +/** + * @public + */ +export type TableClass = (typeof TableClass)[keyof typeof TableClass]; +/** + *

Contains details of the table class.

+ * @public + */ +export interface TableClassSummary { + /** + *

The table class of the specified table. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

The date and time at which the table class was last updated.

+ * @public + */ + LastUpdateDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const TableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly ARCHIVED: "ARCHIVED"; + readonly ARCHIVING: "ARCHIVING"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type TableStatus = (typeof TableStatus)[keyof typeof TableStatus]; +/** + *

Represents the warm throughput value (in read units per second and write units per second) + * of the table. Warm throughput is applicable for DynamoDB Standard-IA tables and specifies + * the minimum provisioned capacity maintained for immediate data access.

+ * @public + */ +export interface TableWarmThroughputDescription { + /** + *

Represents the base table's warm throughput value in read units per second.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents the base table's warm throughput value in write units per second.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; + /** + *

Represents warm throughput value of the base table.

+ * @public + */ + Status?: TableStatus | undefined; +} +/** + *

Contains the details of the replica.

+ * @public + */ +export interface ReplicaDescription { + /** + *

The name of the Region.

+ * @public + */ + RegionName?: string | undefined; + /** + *

The current state of the replica:

+ *
    + *
  • + *

    + * CREATING - The replica is being created.

    + *
  • + *
  • + *

    + * UPDATING - The replica is being updated.

    + *
  • + *
  • + *

    + * DELETING - The replica is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The replica is ready for use.

    + *
  • + *
  • + *

    + * REGION_DISABLED - The replica is inaccessible because the Amazon Web Services Region has been disabled.

    + * + *

    If the Amazon Web Services Region remains inaccessible for more than 20 + * hours, DynamoDB will remove this replica from the replication + * group. The replica will not be deleted and replication will stop from and to + * this region.

    + *
    + *
  • + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The KMS key + * used to encrypt the table is inaccessible.

    + * + *

    If the KMS key remains inaccessible for more than 20 hours, + * DynamoDB will remove this replica from the replication group. + * The replica will not be deleted and replication will stop from and to this + * region.

    + *
    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; + /** + *

Detailed information about the replica status.

+ * @public + */ + ReplicaStatusDescription?: string | undefined; + /** + *

Specifies the progress of a Create, Update, or Delete action on the replica as a + * percentage.

+ * @public + */ + ReplicaStatusPercentProgress?: string | undefined; + /** + *

The KMS key of the replica that will be used for KMS + * encryption.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not described, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput settings for the specified replica + * table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Represents the warm throughput value for this replica.

+ * @public + */ + WarmThroughput?: TableWarmThroughputDescription | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexDescription[] | undefined; + /** + *

The time at which the replica was first detected as inaccessible. To determine cause + * of inaccessibility check the ReplicaStatus property.

+ * @public + */ + ReplicaInaccessibleDateTime?: Date | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +/** + *

Contains details about the global table.

+ * @public + */ +export interface GlobalTableDescription { + /** + *

The Regions where the global table has replicas.

+ * @public + */ + ReplicationGroup?: ReplicaDescription[] | undefined; + /** + *

The unique identifier of the global table.

+ * @public + */ + GlobalTableArn?: string | undefined; + /** + *

The creation time of the global table.

+ * @public + */ + CreationDateTime?: Date | undefined; + /** + *

The current state of the global table:

+ *
    + *
  • + *

    + * CREATING - The global table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The global table is being updated.

    + *
  • + *
  • + *

    + * DELETING - The global table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The global table is ready for use.

    + *
  • + *
+ * @public + */ + GlobalTableStatus?: GlobalTableStatus | undefined; + /** + *

The global table name.

+ * @public + */ + GlobalTableName?: string | undefined; +} +/** + * @public + */ +export interface CreateGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The specified global table already exists.

+ * @public + */ +export declare class GlobalTableAlreadyExistsException extends __BaseException { + readonly name: "GlobalTableAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a replica to be added.

+ * @public + */ +export interface CreateReplicaAction { + /** + *

The Region of the replica to be added.

+ * @public + */ + RegionName: string | undefined; +} +/** + *

Represents the properties of a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndex { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName: string | undefined; + /** + *

Replica table GSI-specific provisioned throughput. If not specified, uses the source + * table GSI's read capacity settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput settings for the specified global secondary + * index in the specified replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; +} +/** + *

Represents a replica to be created.

+ * @public + */ +export interface CreateReplicationGroupMemberAction { + /** + *

The Region where the new replica will be created.

+ * @public + */ + RegionName: string | undefined; + /** + *

The KMS key that should be used for KMS encryption in + * the new replica. To specify a key, use its key ID, Amazon Resource Name (ARN), alias + * name, or alias ARN. Note that you should only provide this parameter if the key is + * different from the default DynamoDB KMS key + * alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not specified, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

The maximum on-demand throughput settings for the specified replica table being + * created. You can only modify MaxReadRequestUnits, because you can't modify + * MaxWriteRequestUnits for individual replica tables.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + TableClassOverride?: TableClass | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndex { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use either + * OnDemandThroughput or ProvisionedThroughput based + * on your table's capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The maximum number of read and write units for the specified global secondary index. + * If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both. You must use either + * OnDemandThroughput or ProvisionedThroughput based + * on your table's capacity mode.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) for the specified secondary index. If you use this parameter, you must specify + * ReadUnitsPerSecond, WriteUnitsPerSecond, or both.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the properties of a local secondary index.

+ * @public + */ +export interface LocalSecondaryIndex { + /** + *

The name of the local secondary index. The name must be unique among all other indexes + * on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The complete key schema for the local secondary index, consisting of one or more pairs + * of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the local + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection: Projection | undefined; +} +/** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ +export interface SSESpecification { + /** + *

Indicates whether server-side encryption is done using an Amazon Web Services managed + * key or an Amazon Web Services owned key. If enabled (true), server-side encryption type + * is set to KMS and an Amazon Web Services managed key is used (KMS charges apply). If disabled (false) or not specified, server-side + * encryption is set to Amazon Web Services owned key.

+ * @public + */ + Enabled?: boolean | undefined; + /** + *

Server-side encryption type. The only supported value is:

+ *
    + *
  • + *

    + * KMS - Server-side encryption that uses Key Management Service. The + * key is stored in your account and is managed by KMS (KMS charges apply).

    + *
  • + *
+ * @public + */ + SSEType?: SSEType | undefined; + /** + *

The KMS key that should be used for the KMS encryption. + * To specify a key, use its key ID, Amazon Resource Name (ARN), alias name, or alias ARN. + * Note that you should only provide this parameter if the key is different from the + * default DynamoDB key alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; +} +/** + *

Describes a tag. A tag is a key-value pair. You can add up to 50 tags to a single + * DynamoDB table.

+ *

Amazon Web Services-assigned tag names and values are automatically assigned the + * aws: prefix, which the user cannot assign. Amazon Web Services-assigned + * tag names do not count towards the tag limit of 50. User-assigned tag names have the + * prefix user: in the Cost Allocation Report. You cannot backdate the + * application of a tag.

+ *

For an overview on tagging DynamoDB resources, see Tagging + * for DynamoDB in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface Tag { + /** + *

The key of the tag. Tag keys are case sensitive. Each DynamoDB table can + * only have up to one tag with the same key. If you try to add an existing tag (same key), + * the existing tag value will be updated to the new value.

+ * @public + */ + Key: string | undefined; + /** + *

The value of the tag. Tag values are case-sensitive and can be null.

+ * @public + */ + Value: string | undefined; +} +/** + *

Represents the input of a CreateTable operation.

+ * @public + */ +export interface CreateTableInput { + /** + *

An array of attributes that describe the key schema for the table and indexes.

+ * @public + */ + AttributeDefinitions: AttributeDefinition[] | undefined; + /** + *

The name of the table to create. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Specifies the attributes that make up the primary key for a table or an index. The + * attributes in KeySchema must also be defined in the + * AttributeDefinitions array. For more information, see Data + * Model in the Amazon DynamoDB Developer Guide.

+ *

Each KeySchemaElement in the array is composed of:

+ *
    + *
  • + *

    + * AttributeName - The name of this key attribute.

    + *
  • + *
  • + *

    + * KeyType - The role that the key attribute will assume:

    + *
      + *
    • + *

      + * HASH - partition key

      + *
    • + *
    • + *

      + * RANGE - sort key

      + *
    • + *
    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from the DynamoDB usage + * of an internal hash function to evenly distribute data items across partitions, + * based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ *

For a simple primary key (partition key), you must provide exactly one element with a + * KeyType of HASH.

+ *

For a composite primary key (partition key and sort key), you must provide exactly two + * elements, in this order: The first element must have a KeyType of + * HASH, and the second element must have a KeyType of + * RANGE.

+ *

For more information, see Working with Tables in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

One or more local secondary indexes (the maximum is 5) to be created on the table. + * Each index is scoped to a given partition key value. There is a 10 GB size limit per + * partition key value; otherwise, the size of a local secondary index is + * unconstrained.

+ *

Each local secondary index in the array includes the following:

+ *
    + *
  • + *

    + * IndexName - The name of the local secondary index. Must be unique + * only for this table.

    + *

    + *
  • + *
  • + *

    + * KeySchema - Specifies the key schema for the local secondary index. + * The key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can specify the + * ProjectionType of ALL to project all attributes from the + * source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndex[] | undefined; + /** + *

One or more global secondary indexes (the maximum is 20) to be created on the table. + * Each global secondary index in the array includes the following:

+ *
    + *
  • + *

    + * IndexName - The name of the global secondary index. Must be unique + * only for this table.

    + *

    + *
  • + *
  • + *

    + * KeySchema - Specifies the key schema for the global secondary + * index.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * ProvisionedThroughput - The provisioned throughput settings for the + * global secondary index, consisting of read and write capacity units.

    + *
  • + *
+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for most DynamoDB workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * steady workloads with predictable growth where capacity requirements can be + * reliably forecasted. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the provisioned throughput settings for a specified table or index. The + * settings can be modified using the UpdateTable operation.

+ *

If you set BillingMode as PROVISIONED, you must specify this property. + * If you set BillingMode as PAY_PER_REQUEST, you cannot specify this + * property.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The settings for DynamoDB Streams on the table. These settings consist of:

+ *
    + *
  • + *

    + * StreamEnabled - Indicates whether DynamoDB Streams is to be enabled + * (true) or disabled (false).

    + *
  • + *
  • + *

    + * StreamViewType - When an item in the table is modified, + * StreamViewType determines what information is written to the + * table's stream. Valid values for StreamViewType are:

    + *
      + *
    • + *

      + * KEYS_ONLY - Only the key attributes of the modified item + * are written to the stream.

      + *
    • + *
    • + *

      + * NEW_IMAGE - The entire item, as it appears after it was + * modified, is written to the stream.

      + *
    • + *
    • + *

      + * OLD_IMAGE - The entire item, as it appeared before it was + * modified, is written to the stream.

      + *
    • + *
    • + *

      + * NEW_AND_OLD_IMAGES - Both the new and the old item images + * of the item are written to the stream.

      + *
    • + *
    + *
  • + *
+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

A list of key-value pairs to label the table. For more information, see Tagging + * for DynamoDB.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

The table class of the new table. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

Indicates whether deletion protection is to be enabled (true) or disabled (false) on + * the table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

Represents the warm throughput (in read units per second and write units per second) + * for creating a table.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; + /** + *

An Amazon Web Services resource-based policy document in JSON format that will be + * attached to the table.

+ *

When you attach a resource-based policy while creating a table, the policy application + * is strongly consistent.

+ *

The maximum size supported for a resource-based policy document is 20 KB. DynamoDB counts whitespaces when calculating the size of a policy against this + * limit. For a full list of all considerations that apply for resource-based policies, see + * Resource-based + * policy considerations.

+ * + *

You need to specify the CreateTable and + * PutResourcePolicy + * IAM actions for authorizing a user to create a table with a + * resource-based policy.

+ *
+ * @public + */ + ResourcePolicy?: string | undefined; + /** + *

Sets the maximum number of read and write units for the specified table in on-demand + * capacity mode. If you use this parameter, you must specify + * MaxReadRequestUnits, MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; +} +/** + *

Represents the provisioned throughput settings for the table, consisting of read and + * write capacity units, along with data about increases and decreases.

+ * @public + */ +export interface ProvisionedThroughputDescription { + /** + *

The date and time of the last provisioned throughput increase for this table.

+ * @public + */ + LastIncreaseDateTime?: Date | undefined; + /** + *

The date and time of the last provisioned throughput decrease for this table.

+ * @public + */ + LastDecreaseDateTime?: Date | undefined; + /** + *

The number of provisioned throughput decreases for this table during this UTC calendar + * day. For current maximums on provisioned throughput decreases, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + NumberOfDecreasesToday?: number | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. Eventually consistent reads require less + * effort than strongly consistent reads, so a setting of 50 ReadCapacityUnits + * per second provides 100 eventually consistent ReadCapacityUnits per + * second.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException.

+ * @public + */ + WriteCapacityUnits?: number | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndexDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

The current state of the global secondary index:

+ *
    + *
  • + *

    + * CREATING - The index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The index is being updated.

    + *
  • + *
  • + *

    + * DELETING - The index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

Indicates whether the index is currently backfilling. Backfilling + * is the process of reading items from the table and determining whether they can be added + * to the index. (Not all items will qualify: For example, a partition key cannot have any + * duplicate values.) If an item can be added to the index, DynamoDB will do so. After all + * items have been processed, the backfilling operation is complete and + * Backfilling is false.

+ *

You can delete an index that is being created during the Backfilling + * phase when IndexStatus is set to CREATING and Backfilling is + * true. You can't delete the index that is being created when IndexStatus is + * set to CREATING and Backfilling is false.

+ * + *

For indexes that were created during a CreateTable operation, the + * Backfilling attribute does not appear in the + * DescribeTable output.

+ *
+ * @public + */ + Backfilling?: boolean | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + /** + *

The total size of the specified index, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + IndexSizeBytes?: number | undefined; + /** + *

The number of items in the specified index. DynamoDB updates this value approximately + * every six hours. Recent changes might not be reflected in this value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the index.

+ * @public + */ + IndexArn?: string | undefined; + /** + *

The maximum number of read and write units for the specified global secondary index. + * If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) for the specified secondary index.

+ * @public + */ + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +/** + *

Represents the properties of a local secondary index.

+ * @public + */ +export interface LocalSecondaryIndexDescription { + /** + *

Represents the name of the local secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for the local secondary index, consisting of one or more pairs + * of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

The total size of the specified index, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + IndexSizeBytes?: number | undefined; + /** + *

The number of items in the specified index. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the index.

+ * @public + */ + IndexArn?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const MultiRegionConsistency: { + readonly EVENTUAL: "EVENTUAL"; + readonly STRONG: "STRONG"; +}; +/** + * @public + */ +export type MultiRegionConsistency = (typeof MultiRegionConsistency)[keyof typeof MultiRegionConsistency]; +/** + *

Contains details for the restore.

+ * @public + */ +export interface RestoreSummary { + /** + *

The Amazon Resource Name (ARN) of the backup from which the table was restored.

+ * @public + */ + SourceBackupArn?: string | undefined; + /** + *

The ARN of the source table of the backup that is being restored.

+ * @public + */ + SourceTableArn?: string | undefined; + /** + *

Point in time or source backup time.

+ * @public + */ + RestoreDateTime: Date | undefined; + /** + *

Indicates if a restore is in progress or not.

+ * @public + */ + RestoreInProgress: boolean | undefined; +} +/** + *

Represents the properties of a table.

+ * @public + */ +export interface TableDescription { + /** + *

An array of AttributeDefinition objects. Each of these objects describes + * one attribute in the table and index key schema.

+ *

Each AttributeDefinition object in this array is composed of:

+ *
    + *
  • + *

    + * AttributeName - The name of the attribute.

    + *
  • + *
  • + *

    + * AttributeType - The data type for the attribute.

    + *
  • + *
+ * @public + */ + AttributeDefinitions?: AttributeDefinition[] | undefined; + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The primary key structure for the table. Each KeySchemaElement consists + * of:

+ *
    + *
  • + *

    + * AttributeName - The name of the attribute.

    + *
  • + *
  • + *

    + * KeyType - The role of the attribute:

    + *
      + *
    • + *

      + * HASH - partition key

      + *
    • + *
    • + *

      + * RANGE - sort key

      + *
    • + *
    + * + *

    The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's + * usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

    + *

    The sort key of an item is also known as its range + * attribute. The term "range attribute" derives from the way + * DynamoDB stores items with the same partition key physically close together, + * in sorted order by the sort key value.

    + *
    + *
  • + *
+ *

For more information about primary keys, see Primary Key in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

The current state of the table:

+ *
    + *
  • + *

    + * CREATING - The table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table/index configuration is being updated. The + * table/index remains available for data operations when + * UPDATING.

    + *
  • + *
  • + *

    + * DELETING - The table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The table is ready for use.

    + *
  • + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The KMS key + * used to encrypt the table in inaccessible. Table operations may fail due to + * failure to use the KMS key. DynamoDB will initiate the + * table archival process when a table's KMS key remains + * inaccessible for more than seven days.

    + *
  • + *
  • + *

    + * ARCHIVING - The table is being archived. Operations are not allowed + * until archival is complete.

    + *
  • + *
  • + *

    + * ARCHIVED - The table has been archived. See the ArchivalReason for + * more information.

    + *
  • + *
+ * @public + */ + TableStatus?: TableStatus | undefined; + /** + *

The date and time when the table was created, in UNIX epoch time format.

+ * @public + */ + CreationDateTime?: Date | undefined; + /** + *

The provisioned throughput settings for the table, consisting of read and write + * capacity units, along with data about increases and decreases.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + /** + *

The total size of the specified table, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + TableSizeBytes?: number | undefined; + /** + *

The number of items in the specified table. DynamoDB updates this value approximately + * every six hours. Recent changes might not be reflected in this value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Unique identifier for the table for which the backup was created.

+ * @public + */ + TableId?: string | undefined; + /** + *

Contains the details for the read/write capacity mode.

+ * @public + */ + BillingModeSummary?: BillingModeSummary | undefined; + /** + *

Represents one or more local secondary indexes on the table. Each index is scoped to a + * given partition key value. Tables with one or more local secondary indexes are subject + * to an item collection size limit, where the amount of data within a given item + * collection cannot exceed 10 GB. Each element is composed of:

+ *
    + *
  • + *

    + * IndexName - The name of the local secondary index.

    + *
  • + *
  • + *

    + * KeySchema - Specifies the complete index key schema. The attribute + * names in the key schema must be between 1 and 255 characters (inclusive). The + * key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * IndexSizeBytes - Represents the total size of the index, in bytes. + * DynamoDB updates this value approximately every six hours. Recent changes might + * not be reflected in this value.

    + *
  • + *
  • + *

    + * ItemCount - Represents the number of items in the index. DynamoDB + * updates this value approximately every six hours. Recent changes might not be + * reflected in this value.

    + *
  • + *
+ *

If the table is in the DELETING state, no information about indexes will + * be returned.

+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndexDescription[] | undefined; + /** + *

The global secondary indexes, if any, on the table. Each index is scoped to a given + * partition key value. Each element is composed of:

+ *
    + *
  • + *

    + * Backfilling - If true, then the index is currently in the + * backfilling phase. Backfilling occurs only when a new global secondary index is + * added to the table. It is the process by which DynamoDB populates the new index + * with data from the table. (This attribute does not appear for indexes that were + * created during a CreateTable operation.)

    + *

    You can delete an index that is being created during the + * Backfilling phase when IndexStatus is set to + * CREATING and Backfilling is true. You can't delete the index that + * is being created when IndexStatus is set to CREATING and + * Backfilling is false. (This attribute does not appear for + * indexes that were created during a CreateTable operation.)

    + *
  • + *
  • + *

    + * IndexName - The name of the global secondary index.

    + *
  • + *
  • + *

    + * IndexSizeBytes - The total size of the global secondary index, in + * bytes. DynamoDB updates this value approximately every six hours. Recent changes + * might not be reflected in this value.

    + *
  • + *
  • + *

    + * IndexStatus - The current status of the global secondary + * index:

    + *
      + *
    • + *

      + * CREATING - The index is being created.

      + *
    • + *
    • + *

      + * UPDATING - The index is being updated.

      + *
    • + *
    • + *

      + * DELETING - The index is being deleted.

      + *
    • + *
    • + *

      + * ACTIVE - The index is ready for use.

      + *
    • + *
    + *
  • + *
  • + *

    + * ItemCount - The number of items in the global secondary index. + * DynamoDB updates this value approximately every six hours. Recent changes might + * not be reflected in this value.

    + *
  • + *
  • + *

    + * KeySchema - Specifies the complete index key schema. The attribute + * names in the key schema must be between 1 and 255 characters (inclusive). The + * key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - In addition to the attributes described + * in KEYS_ONLY, the secondary index will include + * other non-key attributes that you specify.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * ProvisionedThroughput - The provisioned throughput settings for the + * global secondary index, consisting of read and write capacity units, along with + * data about increases and decreases.

    + *
  • + *
+ *

If the table is in the DELETING state, no information about indexes will + * be returned.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndexDescription[] | undefined; + /** + *

The current DynamoDB Streams configuration for the table.

+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

A timestamp, in ISO 8601 format, for this stream.

+ *

Note that LatestStreamLabel is not a unique identifier for the stream, + * because it is possible that a stream from another table might have the same timestamp. + * However, the combination of the following three elements is guaranteed to be + * unique:

+ *
    + *
  • + *

    Amazon Web Services customer ID

    + *
  • + *
  • + *

    Table name

    + *
  • + *
  • + *

    + * StreamLabel + *

    + *
  • + *
+ * @public + */ + LatestStreamLabel?: string | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the latest stream for this + * table.

+ * @public + */ + LatestStreamArn?: string | undefined; + /** + *

Represents the version of global tables + * in use, if the table is replicated across Amazon Web Services Regions.

+ * @public + */ + GlobalTableVersion?: string | undefined; + /** + *

Represents replicas of the table.

+ * @public + */ + Replicas?: ReplicaDescription[] | undefined; + /** + *

Contains details for the restore.

+ * @public + */ + RestoreSummary?: RestoreSummary | undefined; + /** + *

The description of the server-side encryption status on the specified table.

+ * @public + */ + SSEDescription?: SSEDescription | undefined; + /** + *

Contains information about the table archive.

+ * @public + */ + ArchivalSummary?: ArchivalSummary | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + TableClassSummary?: TableClassSummary | undefined; + /** + *

Indicates whether deletion protection is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

The maximum number of read and write units for the specified on-demand table. If you + * use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Describes the warm throughput value of the base table.

+ * @public + */ + WarmThroughput?: TableWarmThroughputDescription | undefined; + /** + *

Indicates one of the following consistency modes for a global table:

+ *
    + *
  • + *

    + * EVENTUAL: Indicates that the global table is configured for multi-Region eventual consistency.

    + *
  • + *
  • + *

    + * STRONG: Indicates that the global table is configured for multi-Region strong consistency (preview).

    + * + *

    Multi-Region strong consistency (MRSC) is a new DynamoDB global tables capability currently available in preview mode. For more information, see Global tables multi-Region strong consistency.

    + *
    + *
  • + *
+ *

If you don't specify this field, the global table consistency mode defaults to EVENTUAL.

+ * @public + */ + MultiRegionConsistency?: MultiRegionConsistency | undefined; +} +/** + *

Represents the output of a CreateTable operation.

+ * @public + */ +export interface CreateTableOutput { + /** + *

Represents the properties of the table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * @public + */ +export declare class ResourceInUseException extends __BaseException { + readonly name: "ResourceInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Processing options for the CSV file being imported.

+ * @public + */ +export interface CsvOptions { + /** + *

The delimiter used for separating items in the CSV file being imported.

+ * @public + */ + Delimiter?: string | undefined; + /** + *

List of the headers used to specify a common header for all source CSV files being + * imported. If this field is specified then the first line of each CSV file is treated as + * data instead of the header. If this field is not specified the the first line of each + * CSV file is treated as the header.

+ * @public + */ + HeaderList?: string[] | undefined; +} +/** + * @public + */ +export interface DeleteBackupInput { + /** + *

The ARN associated with the backup.

+ * @public + */ + BackupArn: string | undefined; +} +/** + * @public + */ +export interface DeleteBackupOutput { + /** + *

Contains the description of the backup created for the table.

+ * @public + */ + BackupDescription?: BackupDescription | undefined; +} +/** + *

Represents a global secondary index to be deleted from an existing table.

+ * @public + */ +export interface DeleteGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be deleted.

+ * @public + */ + IndexName: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ReturnValue: { + readonly ALL_NEW: "ALL_NEW"; + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; + readonly UPDATED_NEW: "UPDATED_NEW"; + readonly UPDATED_OLD: "UPDATED_OLD"; +}; +/** + * @public + */ +export type ReturnValue = (typeof ReturnValue)[keyof typeof ReturnValue]; +/** + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * @public + */ +export declare class ReplicatedWriteConflictException extends __BaseException { + readonly name: "ReplicatedWriteConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * @public + */ +export declare class TransactionConflictException extends __BaseException { + readonly name: "TransactionConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a replica to be removed.

+ * @public + */ +export interface DeleteReplicaAction { + /** + *

The Region of the replica to be removed.

+ * @public + */ + RegionName: string | undefined; +} +/** + *

Represents a replica to be deleted.

+ * @public + */ +export interface DeleteReplicationGroupMemberAction { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; +} +/** + * @public + */ +export interface DeleteResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource from which the policy will be + * removed. The resources you can specify include tables and streams. If you remove the + * policy of a table, it will also remove the permissions for the table's indexes defined + * in that policy document. This is because index permissions are defined in the table's + * policy.

+ * @public + */ + ResourceArn: string | undefined; + /** + *

A string value that you can use to conditionally delete your policy. When you provide + * an expected revision ID, if the revision ID of the existing policy on the resource + * doesn't match or if there's no policy attached to the resource, the request will fail + * and return a PolicyNotFoundException.

+ * @public + */ + ExpectedRevisionId?: string | undefined; +} +/** + * @public + */ +export interface DeleteResourcePolicyOutput { + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ *

This value will be empty if you make a request against a resource without a + * policy.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * @public + */ +export declare class PolicyNotFoundException extends __BaseException { + readonly name: "PolicyNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the input of a DeleteTable operation.

+ * @public + */ +export interface DeleteTableInput { + /** + *

The name of the table to delete. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the output of a DeleteTable operation.

+ * @public + */ +export interface DeleteTableOutput { + /** + *

Represents the properties of a table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + * @public + */ +export interface DescribeBackupInput { + /** + *

The Amazon Resource Name (ARN) associated with the backup.

+ * @public + */ + BackupArn: string | undefined; +} +/** + * @public + */ +export interface DescribeBackupOutput { + /** + *

Contains the description of the backup created for the table.

+ * @public + */ + BackupDescription?: BackupDescription | undefined; +} +/** + * @public + */ +export interface DescribeContinuousBackupsInput { + /** + *

Name of the table for which the customer wants to check the continuous backups and + * point in time recovery settings.

+ *

You can also provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + */ +export interface DescribeContinuousBackupsOutput { + /** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +/** + * @public + */ +export interface DescribeContributorInsightsInput { + /** + *

The name of the table to describe. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of the global secondary index to describe, if applicable.

+ * @public + */ + IndexName?: string | undefined; +} +/** + *

Represents a failure a contributor insights operation.

+ * @public + */ +export interface FailureException { + /** + *

Exception name.

+ * @public + */ + ExceptionName?: string | undefined; + /** + *

Description of the failure.

+ * @public + */ + ExceptionDescription?: string | undefined; +} +/** + * @public + */ +export interface DescribeContributorInsightsOutput { + /** + *

The name of the table being described.

+ * @public + */ + TableName?: string | undefined; + /** + *

The name of the global secondary index being described.

+ * @public + */ + IndexName?: string | undefined; + /** + *

List of names of the associated contributor insights rules.

+ * @public + */ + ContributorInsightsRuleList?: string[] | undefined; + /** + *

Current status of contributor insights.

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; + /** + *

Timestamp of the last time the status was changed.

+ * @public + */ + LastUpdateDateTime?: Date | undefined; + /** + *

Returns information about the last failure that was encountered.

+ *

The most common exceptions for a FAILED status are:

+ *
    + *
  • + *

    LimitExceededException - Per-account Amazon CloudWatch Contributor Insights + * rule limit reached. Please disable Contributor Insights for other tables/indexes + * OR disable Contributor Insights rules before retrying.

    + *
  • + *
  • + *

    AccessDeniedException - Amazon CloudWatch Contributor Insights rules cannot be + * modified due to insufficient permissions.

    + *
  • + *
  • + *

    AccessDeniedException - Failed to create service-linked role for Contributor + * Insights due to insufficient permissions.

    + *
  • + *
  • + *

    InternalServerError - Failed to create Amazon CloudWatch Contributor Insights + * rules. Please retry request.

    + *
  • + *
+ * @public + */ + FailureException?: FailureException | undefined; +} +/** + * @public + */ +export interface DescribeEndpointsRequest { +} +/** + *

An endpoint information details.

+ * @public + */ +export interface Endpoint { + /** + *

IP address of the endpoint.

+ * @public + */ + Address: string | undefined; + /** + *

Endpoint cache time to live (TTL) value.

+ * @public + */ + CachePeriodInMinutes: number | undefined; +} +/** + * @public + */ +export interface DescribeEndpointsResponse { + /** + *

List of endpoints.

+ * @public + */ + Endpoints: Endpoint[] | undefined; +} +/** + * @public + */ +export interface DescribeExportInput { + /** + *

The Amazon Resource Name (ARN) associated with the export.

+ * @public + */ + ExportArn: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ExportFormat: { + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +/** + * @public + */ +export type ExportFormat = (typeof ExportFormat)[keyof typeof ExportFormat]; +/** + * @public + * @enum + */ +export declare const ExportStatus: { + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +/** + * @public + */ +export type ExportStatus = (typeof ExportStatus)[keyof typeof ExportStatus]; +/** + * @public + * @enum + */ +export declare const ExportType: { + readonly FULL_EXPORT: "FULL_EXPORT"; + readonly INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT"; +}; +/** + * @public + */ +export type ExportType = (typeof ExportType)[keyof typeof ExportType]; +/** + * @public + * @enum + */ +export declare const ExportViewType: { + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; +}; +/** + * @public + */ +export type ExportViewType = (typeof ExportViewType)[keyof typeof ExportViewType]; +/** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ +export interface IncrementalExportSpecification { + /** + *

Time in the past which provides the inclusive start range for the export table's data, + * counted in seconds from the start of the Unix epoch. The incremental export will reflect + * the table's state including and after this point in time.

+ * @public + */ + ExportFromTime?: Date | undefined; + /** + *

Time in the past which provides the exclusive end range for the export table's data, + * counted in seconds from the start of the Unix epoch. The incremental export will reflect + * the table's state just prior to this point in time. If this is not provided, the latest + * time with data available will be used.

+ * @public + */ + ExportToTime?: Date | undefined; + /** + *

The view type that was chosen for the export. Valid values are + * NEW_AND_OLD_IMAGES and NEW_IMAGES. The default value is + * NEW_AND_OLD_IMAGES.

+ * @public + */ + ExportViewType?: ExportViewType | undefined; +} +/** + * @public + * @enum + */ +export declare const S3SseAlgorithm: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +/** + * @public + */ +export type S3SseAlgorithm = (typeof S3SseAlgorithm)[keyof typeof S3SseAlgorithm]; +/** + *

Represents the properties of the exported table.

+ * @public + */ +export interface ExportDescription { + /** + *

The Amazon Resource Name (ARN) of the table export.

+ * @public + */ + ExportArn?: string | undefined; + /** + *

Export can be in one of the following states: IN_PROGRESS, COMPLETED, or + * FAILED.

+ * @public + */ + ExportStatus?: ExportStatus | undefined; + /** + *

The time at which the export task began.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which the export task completed.

+ * @public + */ + EndTime?: Date | undefined; + /** + *

The name of the manifest file for the export task.

+ * @public + */ + ExportManifest?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the table that was exported.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Unique ID of the table that was exported.

+ * @public + */ + TableId?: string | undefined; + /** + *

Point in time from which table data was exported.

+ * @public + */ + ExportTime?: Date | undefined; + /** + *

The client token that was provided for the export task. A client token makes calls to + * ExportTableToPointInTimeInput idempotent, meaning that multiple + * identical calls have the same effect as one single call.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The name of the Amazon S3 bucket containing the export.

+ * @public + */ + S3Bucket?: string | undefined; + /** + *

The ID of the Amazon Web Services account that owns the bucket containing the + * export.

+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The Amazon S3 bucket prefix used as the file name and path of the exported + * snapshot.

+ * @public + */ + S3Prefix?: string | undefined; + /** + *

Type of encryption used on the bucket where export data is stored. Valid values for + * S3SseAlgorithm are:

+ *
    + *
  • + *

    + * AES256 - server-side encryption with Amazon S3 managed + * keys

    + *
  • + *
  • + *

    + * KMS - server-side encryption with KMS managed + * keys

    + *
  • + *
+ * @public + */ + S3SseAlgorithm?: S3SseAlgorithm | undefined; + /** + *

The ID of the KMS managed key used to encrypt the S3 bucket where + * export data is stored (if applicable).

+ * @public + */ + S3SseKmsKeyId?: string | undefined; + /** + *

Status code for the result of the failed export.

+ * @public + */ + FailureCode?: string | undefined; + /** + *

Export failure reason description.

+ * @public + */ + FailureMessage?: string | undefined; + /** + *

The format of the exported data. Valid values for ExportFormat are + * DYNAMODB_JSON or ION.

+ * @public + */ + ExportFormat?: ExportFormat | undefined; + /** + *

The billable size of the table export.

+ * @public + */ + BilledSizeBytes?: number | undefined; + /** + *

The number of items exported.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The type of export that was performed. Valid values are FULL_EXPORT or + * INCREMENTAL_EXPORT.

+ * @public + */ + ExportType?: ExportType | undefined; + /** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +/** + * @public + */ +export interface DescribeExportOutput { + /** + *

Represents the properties of the export.

+ * @public + */ + ExportDescription?: ExportDescription | undefined; +} +/** + *

The specified export was not found.

+ * @public + */ +export declare class ExportNotFoundException extends __BaseException { + readonly name: "ExportNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeGlobalTableInput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName: string | undefined; +} +/** + * @public + */ +export interface DescribeGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The specified global table does not exist.

+ * @public + */ +export declare class GlobalTableNotFoundException extends __BaseException { + readonly name: "GlobalTableNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeGlobalTableSettingsInput { + /** + *

The name of the global table to describe.

+ * @public + */ + GlobalTableName: string | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexSettingsDescription { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The current status of the global secondary index:

+ *
    + *
  • + *

    + * CREATING - The global secondary index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The global secondary index is being updated.

    + *
  • + *
  • + *

    + * DELETING - The global secondary index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The global secondary index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException.

+ * @public + */ + ProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global secondary index replica's read capacity + * units.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException.

+ * @public + */ + ProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global secondary index replica's write capacity + * units.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; +} +/** + *

Represents the properties of a replica.

+ * @public + */ +export interface ReplicaSettingsDescription { + /** + *

The Region name of the replica.

+ * @public + */ + RegionName: string | undefined; + /** + *

The current state of the Region:

+ *
    + *
  • + *

    + * CREATING - The Region is being created.

    + *
  • + *
  • + *

    + * UPDATING - The Region is being updated.

    + *
  • + *
  • + *

    + * DELETING - The Region is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The Region is ready for use.

    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; + /** + *

The read/write capacity mode of the replica.

+ * @public + */ + ReplicaBillingModeSummary?: BillingModeSummary | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global table replica's read capacity units.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global table replica's write capacity units.

+ * @public + */ + ReplicaProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Replica global secondary index settings for the global table.

+ * @public + */ + ReplicaGlobalSecondaryIndexSettings?: ReplicaGlobalSecondaryIndexSettingsDescription[] | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +/** + * @public + */ +export interface DescribeGlobalTableSettingsOutput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Region-specific settings for the global table.

+ * @public + */ + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +/** + * @public + */ +export interface DescribeImportInput { + /** + *

The Amazon Resource Name (ARN) associated with the table you're importing to.

+ * @public + */ + ImportArn: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ImportStatus: { + readonly CANCELLED: "CANCELLED"; + readonly CANCELLING: "CANCELLING"; + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +/** + * @public + */ +export type ImportStatus = (typeof ImportStatus)[keyof typeof ImportStatus]; +/** + * @public + * @enum + */ +export declare const InputCompressionType: { + readonly GZIP: "GZIP"; + readonly NONE: "NONE"; + readonly ZSTD: "ZSTD"; +}; +/** + * @public + */ +export type InputCompressionType = (typeof InputCompressionType)[keyof typeof InputCompressionType]; +/** + * @public + * @enum + */ +export declare const InputFormat: { + readonly CSV: "CSV"; + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +/** + * @public + */ +export type InputFormat = (typeof InputFormat)[keyof typeof InputFormat]; +/** + *

The format options for the data that was imported into the target table. There is one + * value, CsvOption.

+ * @public + */ +export interface InputFormatOptions { + /** + *

The options for imported source files in CSV format. The values are Delimiter and + * HeaderList.

+ * @public + */ + Csv?: CsvOptions | undefined; +} +/** + *

The S3 bucket that is being imported from.

+ * @public + */ +export interface S3BucketSource { + /** + *

The account number of the S3 bucket that is being imported from. If the bucket is + * owned by the requester this is optional.

+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The S3 bucket that is being imported from.

+ * @public + */ + S3Bucket: string | undefined; + /** + *

The key prefix shared by all S3 Objects that are being imported.

+ * @public + */ + S3KeyPrefix?: string | undefined; +} +/** + *

The parameters for the table created as part of the import operation.

+ * @public + */ +export interface TableCreationParameters { + /** + *

The name of the table created as part of the import operation.

+ * @public + */ + TableName: string | undefined; + /** + *

The attributes of the table created as part of the import operation.

+ * @public + */ + AttributeDefinitions: AttributeDefinition[] | undefined; + /** + *

The primary key and option sort key of the table created as part of the import + * operation.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

The billing mode for provisioning the table created as part of the import operation. + *

+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use ProvisionedThroughput or + * OnDemandThroughput based on your table’s capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

The Global Secondary Indexes (GSI) of the table to be created as part of the import + * operation.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; +} +/** + *

Represents the properties of the table being imported into. + *

+ * @public + */ +export interface ImportTableDescription { + /** + *

The Amazon Resource Number (ARN) corresponding to the import request. + *

+ * @public + */ + ImportArn?: string | undefined; + /** + *

The status of the import.

+ * @public + */ + ImportStatus?: ImportStatus | undefined; + /** + *

The Amazon Resource Number (ARN) of the table being imported into. + *

+ * @public + */ + TableArn?: string | undefined; + /** + *

The table id corresponding to the table created by import table process. + *

+ * @public + */ + TableId?: string | undefined; + /** + *

The client token that was provided for the import task. Reusing the client token on + * retry makes a call to ImportTable idempotent.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

Values for the S3 bucket the source file is imported from. Includes bucket name + * (required), key prefix (optional) and bucket account owner ID (optional).

+ * @public + */ + S3BucketSource?: S3BucketSource | undefined; + /** + *

The number of errors occurred on importing the source file into the target table. + *

+ * @public + */ + ErrorCount?: number | undefined; + /** + *

The Amazon Resource Number (ARN) of the Cloudwatch Log Group associated with the + * target table.

+ * @public + */ + CloudWatchLogGroupArn?: string | undefined; + /** + *

The format of the source data going into the target table. + *

+ * @public + */ + InputFormat?: InputFormat | undefined; + /** + *

The format options for the data that was imported into the target table. There is one + * value, CsvOption.

+ * @public + */ + InputFormatOptions?: InputFormatOptions | undefined; + /** + *

The compression options for the data that has been imported into the target table. + * The values are NONE, GZIP, or ZSTD.

+ * @public + */ + InputCompressionType?: InputCompressionType | undefined; + /** + *

The parameters for the new table that is being imported into.

+ * @public + */ + TableCreationParameters?: TableCreationParameters | undefined; + /** + *

The time when this import task started.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which the creation of the table associated with this import task + * completed.

+ * @public + */ + EndTime?: Date | undefined; + /** + *

The total size of data processed from the source file, in Bytes.

+ * @public + */ + ProcessedSizeBytes?: number | undefined; + /** + *

The total number of items processed from the source file.

+ * @public + */ + ProcessedItemCount?: number | undefined; + /** + *

The number of items successfully imported into the new table.

+ * @public + */ + ImportedItemCount?: number | undefined; + /** + *

The error code corresponding to the failure that the import job ran into during + * execution.

+ * @public + */ + FailureCode?: string | undefined; + /** + *

The error message corresponding to the failure that the import job ran into during + * execution.

+ * @public + */ + FailureMessage?: string | undefined; +} +/** + * @public + */ +export interface DescribeImportOutput { + /** + *

Represents the properties of the table created for the import, and parameters of the + * import. The import parameters include import status, how many items were processed, and + * how many errors were encountered.

+ * @public + */ + ImportTableDescription: ImportTableDescription | undefined; +} +/** + *

+ * The specified import was not found. + *

+ * @public + */ +export declare class ImportNotFoundException extends __BaseException { + readonly name: "ImportNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeKinesisStreamingDestinationInput { + /** + *

The name of the table being described. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + * @enum + */ +export declare const DestinationStatus: { + readonly ACTIVE: "ACTIVE"; + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLE_FAILED: "ENABLE_FAILED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type DestinationStatus = (typeof DestinationStatus)[keyof typeof DestinationStatus]; +/** + *

Describes a Kinesis data stream destination.

+ * @public + */ +export interface KinesisDataStreamDestination { + /** + *

The ARN for a specific Kinesis data stream.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The current status of replication.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The human-readable string that corresponds to the replica status.

+ * @public + */ + DestinationStatusDescription?: string | undefined; + /** + *

The precision of the Kinesis data stream timestamp. The values are either + * MILLISECOND or MICROSECOND.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface DescribeKinesisStreamingDestinationOutput { + /** + *

The name of the table being described.

+ * @public + */ + TableName?: string | undefined; + /** + *

The list of replica structures for the table being described.

+ * @public + */ + KinesisDataStreamDestinations?: KinesisDataStreamDestination[] | undefined; +} +/** + *

Represents the input of a DescribeLimits operation. Has no + * content.

+ * @public + */ +export interface DescribeLimitsInput { +} +/** + *

Represents the output of a DescribeLimits operation.

+ * @public + */ +export interface DescribeLimitsOutput { + /** + *

The maximum total read capacity units that your account allows you to provision across + * all of your tables in this Region.

+ * @public + */ + AccountMaxReadCapacityUnits?: number | undefined; + /** + *

The maximum total write capacity units that your account allows you to provision + * across all of your tables in this Region.

+ * @public + */ + AccountMaxWriteCapacityUnits?: number | undefined; + /** + *

The maximum read capacity units that your account allows you to provision for a new + * table that you are creating in this Region, including the read capacity units + * provisioned for its global secondary indexes (GSIs).

+ * @public + */ + TableMaxReadCapacityUnits?: number | undefined; + /** + *

The maximum write capacity units that your account allows you to provision for a new + * table that you are creating in this Region, including the write capacity units + * provisioned for its global secondary indexes (GSIs).

+ * @public + */ + TableMaxWriteCapacityUnits?: number | undefined; +} +/** + *

Represents the input of a DescribeTable operation.

+ * @public + */ +export interface DescribeTableInput { + /** + *

The name of the table to describe. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the output of a DescribeTable operation.

+ * @public + */ +export interface DescribeTableOutput { + /** + *

The properties of the table.

+ * @public + */ + Table?: TableDescription | undefined; +} +/** + * @public + */ +export interface DescribeTableReplicaAutoScalingInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the auto scaling configuration for a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexAutoScalingDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The current state of the replica global secondary index:

+ *
    + *
  • + *

    + * CREATING - The index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table/index configuration is being updated. The + * table/index remains available for data operations when + * UPDATING + *

    + *
  • + *
  • + *

    + * DELETING - The index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; +} +/** + *

Represents the auto scaling settings of the replica.

+ * @public + */ +export interface ReplicaAutoScalingDescription { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName?: string | undefined; + /** + *

Replica-specific global secondary index auto scaling settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexAutoScalingDescription[] | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ReplicaProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The current state of the replica:

+ *
    + *
  • + *

    + * CREATING - The replica is being created.

    + *
  • + *
  • + *

    + * UPDATING - The replica is being updated.

    + *
  • + *
  • + *

    + * DELETING - The replica is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The replica is ready for use.

    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; +} +/** + *

Represents the auto scaling configuration for a global table.

+ * @public + */ +export interface TableAutoScalingDescription { + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The current state of the table:

+ *
    + *
  • + *

    + * CREATING - The table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table is being updated.

    + *
  • + *
  • + *

    + * DELETING - The table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The table is ready for use.

    + *
  • + *
+ * @public + */ + TableStatus?: TableStatus | undefined; + /** + *

Represents replicas of the global table.

+ * @public + */ + Replicas?: ReplicaAutoScalingDescription[] | undefined; +} +/** + * @public + */ +export interface DescribeTableReplicaAutoScalingOutput { + /** + *

Represents the auto scaling properties of the table.

+ * @public + */ + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +/** + * @public + */ +export interface DescribeTimeToLiveInput { + /** + *

The name of the table to be described. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + */ +export interface DescribeTimeToLiveOutput { + /** + *

+ * @public + */ + TimeToLiveDescription?: TimeToLiveDescription | undefined; +} +/** + *

Enables setting the configuration for Kinesis Streaming.

+ * @public + */ +export interface EnableKinesisStreamingConfiguration { + /** + *

Toggle for the precision of Kinesis data stream timestamp. The values are either + * MILLISECOND or MICROSECOND.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface KinesisStreamingDestinationInput { + /** + *

The name of the DynamoDB table. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The ARN for a Kinesis data stream.

+ * @public + */ + StreamArn: string | undefined; + /** + *

The source for the Kinesis streaming information that is being enabled.

+ * @public + */ + EnableKinesisStreamingConfiguration?: EnableKinesisStreamingConfiguration | undefined; +} +/** + * @public + */ +export interface KinesisStreamingDestinationOutput { + /** + *

The name of the table being modified.

+ * @public + */ + TableName?: string | undefined; + /** + *

The ARN for the specific Kinesis data stream.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The current status of the replication.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The destination for the Kinesis streaming information that is being enabled.

+ * @public + */ + EnableKinesisStreamingConfiguration?: EnableKinesisStreamingConfiguration | undefined; +} +/** + *

There was an attempt to insert an item with the same primary key as an item that + * already exists in the DynamoDB table.

+ * @public + */ +export declare class DuplicateItemException extends __BaseException { + readonly name: "DuplicateItemException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * @public + */ +export declare class IdempotentParameterMismatchException extends __BaseException { + readonly name: "IdempotentParameterMismatchException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * @public + */ +export declare class TransactionInProgressException extends __BaseException { + readonly name: "TransactionInProgressException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

There was a conflict when writing to the specified S3 bucket.

+ * @public + */ +export declare class ExportConflictException extends __BaseException { + readonly name: "ExportConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ExportTableToPointInTimeInput { + /** + *

The Amazon Resource Name (ARN) associated with the table to export.

+ * @public + */ + TableArn: string | undefined; + /** + *

Time in the past from which to export table data, counted in seconds from the start of + * the Unix epoch. The table export will be a snapshot of the table's state at this point + * in time.

+ * @public + */ + ExportTime?: Date | undefined; + /** + *

Providing a ClientToken makes the call to + * ExportTableToPointInTimeInput idempotent, meaning that multiple + * identical calls have the same effect as one single call.

+ *

A client token is valid for 8 hours after the first request that uses it is completed. + * After 8 hours, any request with the same client token is treated as a new request. Do + * not resubmit the same request with the same client token for more than 8 hours, or the + * result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 8-hour idempotency window, DynamoDB returns an + * ImportConflictException.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The name of the Amazon S3 bucket to export the snapshot to.

+ * @public + */ + S3Bucket: string | undefined; + /** + *

The ID of the Amazon Web Services account that owns the bucket the export will be + * stored in.

+ * + *

S3BucketOwner is a required parameter when exporting to a S3 bucket in another + * account.

+ *
+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The Amazon S3 bucket prefix to use as the file name and path of the exported + * snapshot.

+ * @public + */ + S3Prefix?: string | undefined; + /** + *

Type of encryption used on the bucket where export data will be stored. Valid values + * for S3SseAlgorithm are:

+ *
    + *
  • + *

    + * AES256 - server-side encryption with Amazon S3 managed + * keys

    + *
  • + *
  • + *

    + * KMS - server-side encryption with KMS managed + * keys

    + *
  • + *
+ * @public + */ + S3SseAlgorithm?: S3SseAlgorithm | undefined; + /** + *

The ID of the KMS managed key used to encrypt the S3 bucket where + * export data will be stored (if applicable).

+ * @public + */ + S3SseKmsKeyId?: string | undefined; + /** + *

The format for the exported data. Valid values for ExportFormat are + * DYNAMODB_JSON or ION.

+ * @public + */ + ExportFormat?: ExportFormat | undefined; + /** + *

Choice of whether to execute as a full export or incremental export. Valid values are + * FULL_EXPORT or INCREMENTAL_EXPORT. The default value is FULL_EXPORT. If + * INCREMENTAL_EXPORT is provided, the IncrementalExportSpecification must also be + * used.

+ * @public + */ + ExportType?: ExportType | undefined; + /** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +/** + * @public + */ +export interface ExportTableToPointInTimeOutput { + /** + *

Contains a description of the table export.

+ * @public + */ + ExportDescription?: ExportDescription | undefined; +} +/** + *

The specified ExportTime is outside of the point in time recovery + * window.

+ * @public + */ +export declare class InvalidExportTimeException extends __BaseException { + readonly name: "InvalidExportTimeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Point in time recovery has not yet been enabled for this source table.

+ * @public + */ +export declare class PointInTimeRecoveryUnavailableException extends __BaseException { + readonly name: "PointInTimeRecoveryUnavailableException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface GetResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource to which the policy is attached. The + * resources you can specify include tables and streams.

+ * @public + */ + ResourceArn: string | undefined; +} +/** + * @public + */ +export interface GetResourcePolicyOutput { + /** + *

The resource-based policy document attached to the resource, which can be a table or + * stream, in JSON format.

+ * @public + */ + Policy?: string | undefined; + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + *

+ * There was a conflict when importing from the specified S3 source. + * This can occur when the current import conflicts with a previous import request + * that had the same client token. + *

+ * @public + */ +export declare class ImportConflictException extends __BaseException { + readonly name: "ImportConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ImportTableInput { + /** + *

Providing a ClientToken makes the call to ImportTableInput + * idempotent, meaning that multiple identical calls have the same effect as one single + * call.

+ *

A client token is valid for 8 hours after the first request that uses it is completed. + * After 8 hours, any request with the same client token is treated as a new request. Do + * not resubmit the same request with the same client token for more than 8 hours, or the + * result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 8-hour idempotency window, DynamoDB returns an + * IdempotentParameterMismatch exception.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The S3 bucket that provides the source for the import.

+ * @public + */ + S3BucketSource: S3BucketSource | undefined; + /** + *

The format of the source data. Valid values for ImportFormat are + * CSV, DYNAMODB_JSON or ION.

+ * @public + */ + InputFormat: InputFormat | undefined; + /** + *

Additional properties that specify how the input is formatted,

+ * @public + */ + InputFormatOptions?: InputFormatOptions | undefined; + /** + *

Type of compression to be used on the input coming from the imported table.

+ * @public + */ + InputCompressionType?: InputCompressionType | undefined; + /** + *

Parameters for the table to import the data into.

+ * @public + */ + TableCreationParameters: TableCreationParameters | undefined; +} +/** + * @public + */ +export interface ImportTableOutput { + /** + *

Represents the properties of the table created for the import, and parameters of the + * import. The import parameters include import status, how many items were processed, and + * how many errors were encountered.

+ * @public + */ + ImportTableDescription: ImportTableDescription | undefined; +} +/** + * @public + */ +export interface ListBackupsInput { + /** + *

Lists the backups from the table specified in TableName. You can also + * provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName?: string | undefined; + /** + *

Maximum number of backups to return at once.

+ * @public + */ + Limit?: number | undefined; + /** + *

Only backups created after this time are listed. TimeRangeLowerBound is + * inclusive.

+ * @public + */ + TimeRangeLowerBound?: Date | undefined; + /** + *

Only backups created before this time are listed. TimeRangeUpperBound is + * exclusive.

+ * @public + */ + TimeRangeUpperBound?: Date | undefined; + /** + *

+ * LastEvaluatedBackupArn is the Amazon Resource Name (ARN) of the backup last + * evaluated when the current page of results was returned, inclusive of the current page + * of results. This value may be specified as the ExclusiveStartBackupArn of a + * new ListBackups operation in order to fetch the next page of results. + *

+ * @public + */ + ExclusiveStartBackupArn?: string | undefined; + /** + *

The backups from the table specified by BackupType are listed.

+ *

Where BackupType can be:

+ *
    + *
  • + *

    + * USER - On-demand backup created by you. (The default setting if no + * other backup types are specified.)

    + *
  • + *
  • + *

    + * SYSTEM - On-demand backup automatically created by DynamoDB.

    + *
  • + *
  • + *

    + * ALL - All types of on-demand backups (USER and SYSTEM).

    + *
  • + *
+ * @public + */ + BackupType?: BackupTypeFilter | undefined; +} +/** + * @public + */ +export interface ListBackupsOutput { + /** + *

List of BackupSummary objects.

+ * @public + */ + BackupSummaries?: BackupSummary[] | undefined; + /** + *

The ARN of the backup last evaluated when the current page of results was returned, + * inclusive of the current page of results. This value may be specified as the + * ExclusiveStartBackupArn of a new ListBackups operation in + * order to fetch the next page of results.

+ *

If LastEvaluatedBackupArn is empty, then the last page of results has + * been processed and there are no more results to be retrieved.

+ *

If LastEvaluatedBackupArn is not empty, this may or may not indicate + * that there is more data to be returned. All results are guaranteed to have been returned + * if and only if no value for LastEvaluatedBackupArn is returned.

+ * @public + */ + LastEvaluatedBackupArn?: string | undefined; +} +/** + * @public + */ +export interface ListContributorInsightsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName?: string | undefined; + /** + *

A token to for the desired page, if there is one.

+ * @public + */ + NextToken?: string | undefined; + /** + *

Maximum number of results to return per page.

+ * @public + */ + MaxResults?: number | undefined; +} +/** + * @public + */ +export interface ListContributorInsightsOutput { + /** + *

A list of ContributorInsightsSummary.

+ * @public + */ + ContributorInsightsSummaries?: ContributorInsightsSummary[] | undefined; + /** + *

A token to go to the next page if there is one.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListExportsInput { + /** + *

The Amazon Resource Name (ARN) associated with the exported table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Maximum number of results to return per page.

+ * @public + */ + MaxResults?: number | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListExports. When provided in this manner, the API fetches the next + * page of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Summary information about an export task.

+ * @public + */ +export interface ExportSummary { + /** + *

The Amazon Resource Name (ARN) of the export.

+ * @public + */ + ExportArn?: string | undefined; + /** + *

Export can be in one of the following states: IN_PROGRESS, COMPLETED, or + * FAILED.

+ * @public + */ + ExportStatus?: ExportStatus | undefined; + /** + *

The type of export that was performed. Valid values are FULL_EXPORT or + * INCREMENTAL_EXPORT.

+ * @public + */ + ExportType?: ExportType | undefined; +} +/** + * @public + */ +export interface ListExportsOutput { + /** + *

A list of ExportSummary objects.

+ * @public + */ + ExportSummaries?: ExportSummary[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListExports again, with NextToken set to this + * value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListGlobalTablesInput { + /** + *

The first global table name that this operation will evaluate.

+ * @public + */ + ExclusiveStartGlobalTableName?: string | undefined; + /** + *

The maximum number of table names to return, if the parameter is not specified + * DynamoDB defaults to 100.

+ *

If the number of global tables DynamoDB finds reaches this limit, it stops the + * operation and returns the table names collected up to that point, with a table name in + * the LastEvaluatedGlobalTableName to apply in a subsequent operation to the + * ExclusiveStartGlobalTableName parameter.

+ * @public + */ + Limit?: number | undefined; + /** + *

Lists the global tables in a specific Region.

+ * @public + */ + RegionName?: string | undefined; +} +/** + *

Represents the properties of a global table.

+ * @public + */ +export interface GlobalTable { + /** + *

The global table name.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Regions where the global table has replicas.

+ * @public + */ + ReplicationGroup?: Replica[] | undefined; +} +/** + * @public + */ +export interface ListGlobalTablesOutput { + /** + *

List of global table names.

+ * @public + */ + GlobalTables?: GlobalTable[] | undefined; + /** + *

Last evaluated global table name.

+ * @public + */ + LastEvaluatedGlobalTableName?: string | undefined; +} +/** + * @public + */ +export interface ListImportsInput { + /** + *

The Amazon Resource Name (ARN) associated with the table that was imported to. + *

+ * @public + */ + TableArn?: string | undefined; + /** + *

The number of ImportSummary objects returned in a single page.

+ * @public + */ + PageSize?: number | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListImports. When provided in this manner, the API fetches the next + * page of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Summary information about the source file for the import. + *

+ * @public + */ +export interface ImportSummary { + /** + *

The Amazon Resource Number (ARN) corresponding to the import request.

+ * @public + */ + ImportArn?: string | undefined; + /** + *

The status of the import operation.

+ * @public + */ + ImportStatus?: ImportStatus | undefined; + /** + *

The Amazon Resource Number (ARN) of the table being imported into.

+ * @public + */ + TableArn?: string | undefined; + /** + *

The path and S3 bucket of the source file that is being imported. This includes the + * S3Bucket (required), S3KeyPrefix (optional) and S3BucketOwner (optional if the bucket is + * owned by the requester).

+ * @public + */ + S3BucketSource?: S3BucketSource | undefined; + /** + *

The Amazon Resource Number (ARN) of the Cloudwatch Log Group associated with this + * import task.

+ * @public + */ + CloudWatchLogGroupArn?: string | undefined; + /** + *

The format of the source data. Valid values are CSV, + * DYNAMODB_JSON or ION.

+ * @public + */ + InputFormat?: InputFormat | undefined; + /** + *

The time at which this import task began.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which this import task ended. (Does this include the successful complete + * creation of the table it was imported to?)

+ * @public + */ + EndTime?: Date | undefined; +} +/** + * @public + */ +export interface ListImportsOutput { + /** + *

A list of ImportSummary objects.

+ * @public + */ + ImportSummaryList?: ImportSummary[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListImports again, with NextToken set to this + * value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Represents the input of a ListTables operation.

+ * @public + */ +export interface ListTablesInput { + /** + *

The first table name that this operation will evaluate. Use the value that was + * returned for LastEvaluatedTableName in a previous operation, so that you + * can obtain the next page of results.

+ * @public + */ + ExclusiveStartTableName?: string | undefined; + /** + *

A maximum number of table names to return. If this parameter is not specified, the + * limit is 100.

+ * @public + */ + Limit?: number | undefined; +} +/** + *

Represents the output of a ListTables operation.

+ * @public + */ +export interface ListTablesOutput { + /** + *

The names of the tables associated with the current account at the current endpoint. + * The maximum size of this array is 100.

+ *

If LastEvaluatedTableName also appears in the output, you can use this + * value as the ExclusiveStartTableName parameter in a subsequent + * ListTables request and obtain the next page of results.

+ * @public + */ + TableNames?: string[] | undefined; + /** + *

The name of the last table in the current page of results. Use this value as the + * ExclusiveStartTableName in a new request to obtain the next page of + * results, until all the table names are returned.

+ *

If you do not receive a LastEvaluatedTableName value in the response, + * this means that there are no more table names to be retrieved.

+ * @public + */ + LastEvaluatedTableName?: string | undefined; +} +/** + * @public + */ +export interface ListTagsOfResourceInput { + /** + *

The Amazon DynamoDB resource with tags to be listed. This value is an Amazon Resource + * Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListTagOfResource. When provided in this manner, this API fetches the next page + * of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListTagsOfResourceOutput { + /** + *

The tags currently associated with the Amazon DynamoDB resource.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListTagsOfResource again, with NextToken set to this value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface PutResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource to which the policy will be attached. + * The resources you can specify include tables and streams.

+ *

You can control index permissions using the base table's policy. To specify the same permission level for your table and its indexes, you can provide both the table and index Amazon Resource Name (ARN)s in the Resource field of a given Statement in your policy document. Alternatively, to specify different permissions for your table, indexes, or both, you can define multiple Statement fields in your policy document.

+ * @public + */ + ResourceArn: string | undefined; + /** + *

An Amazon Web Services resource-based policy document in JSON format.

+ *
    + *
  • + *

    The maximum size supported for a resource-based policy document is 20 KB. + * DynamoDB counts whitespaces when calculating the size of a policy + * against this limit.

    + *
  • + *
  • + *

    Within a resource-based policy, if the action for a DynamoDB + * service-linked role (SLR) to replicate data for a global table is denied, adding + * or deleting a replica will fail with an error.

    + *
  • + *
+ *

For a full list of all considerations that apply while attaching a resource-based + * policy, see Resource-based + * policy considerations.

+ * @public + */ + Policy: string | undefined; + /** + *

A string value that you can use to conditionally update your policy. You can provide + * the revision ID of your existing policy to make mutating requests against that + * policy.

+ * + *

When you provide an expected revision ID, if the revision ID of the existing + * policy on the resource doesn't match or if there's no policy attached to the + * resource, your request will be rejected with a + * PolicyNotFoundException.

+ *
+ *

To conditionally attach a policy when no policy exists for the resource, specify + * NO_POLICY for the revision ID.

+ * @public + */ + ExpectedRevisionId?: string | undefined; + /** + *

Set this parameter to true to confirm that you want to remove your + * permissions to change the policy of this resource in the future.

+ * @public + */ + ConfirmRemoveSelfResourceAccess?: boolean | undefined; +} +/** + * @public + */ +export interface PutResourcePolicyOutput { + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const Select: { + readonly ALL_ATTRIBUTES: "ALL_ATTRIBUTES"; + readonly ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES"; + readonly COUNT: "COUNT"; + readonly SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES"; +}; +/** + * @public + */ +export type Select = (typeof Select)[keyof typeof Select]; +/** + * @public + */ +export interface RestoreTableFromBackupInput { + /** + *

The name of the new table to which the backup must be restored.

+ * @public + */ + TargetTableName: string | undefined; + /** + *

The Amazon Resource Name (ARN) associated with the backup.

+ * @public + */ + BackupArn: string | undefined; + /** + *

The billing mode of the restored table.

+ * @public + */ + BillingModeOverride?: BillingMode | undefined; + /** + *

List of global secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + /** + *

List of local secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + /** + *

Provisioned throughput settings for the restored table.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + /** + *

The new server-side encryption settings for the restored table.

+ * @public + */ + SSESpecificationOverride?: SSESpecification | undefined; +} +/** + * @public + */ +export interface RestoreTableFromBackupOutput { + /** + *

The description of the table created from an existing backup.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

A target table with the specified name already exists.

+ * @public + */ +export declare class TableAlreadyExistsException extends __BaseException { + readonly name: "TableAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

An invalid restore time was specified. RestoreDateTime must be between + * EarliestRestorableDateTime and LatestRestorableDateTime.

+ * @public + */ +export declare class InvalidRestoreTimeException extends __BaseException { + readonly name: "InvalidRestoreTimeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface RestoreTableToPointInTimeInput { + /** + *

The DynamoDB table that will be restored. This value is an Amazon Resource Name + * (ARN).

+ * @public + */ + SourceTableArn?: string | undefined; + /** + *

Name of the source table that is being restored.

+ * @public + */ + SourceTableName?: string | undefined; + /** + *

The name of the new table to which it must be restored to.

+ * @public + */ + TargetTableName: string | undefined; + /** + *

Restore the table to the latest possible time. LatestRestorableDateTime + * is typically 5 minutes before the current time.

+ * @public + */ + UseLatestRestorableTime?: boolean | undefined; + /** + *

Time in the past to restore the table to.

+ * @public + */ + RestoreDateTime?: Date | undefined; + /** + *

The billing mode of the restored table.

+ * @public + */ + BillingModeOverride?: BillingMode | undefined; + /** + *

List of global secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + /** + *

List of local secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + /** + *

Provisioned throughput settings for the restored table.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + /** + *

The new server-side encryption settings for the restored table.

+ * @public + */ + SSESpecificationOverride?: SSESpecification | undefined; +} +/** + * @public + */ +export interface RestoreTableToPointInTimeOutput { + /** + *

Represents the properties of a table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + * @public + */ +export interface TagResourceInput { + /** + *

Identifies the Amazon DynamoDB resource to which tags should be added. This value is + * an Amazon Resource Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

The tags to be assigned to the Amazon DynamoDB resource.

+ * @public + */ + Tags: Tag[] | undefined; +} +/** + * @public + */ +export interface UntagResourceInput { + /** + *

The DynamoDB resource that the tags will be removed from. This value is an Amazon + * Resource Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

A list of tag keys. Existing tags of the resource whose keys are members of this list + * will be removed from the DynamoDB resource.

+ * @public + */ + TagKeys: string[] | undefined; +} +/** + *

Represents the settings used to enable point in time recovery.

+ * @public + */ +export interface PointInTimeRecoverySpecification { + /** + *

Indicates whether point in time recovery is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + PointInTimeRecoveryEnabled: boolean | undefined; + /** + *

The number of preceding days for which continuous backups are taken and maintained. + * Your table data is only recoverable to any point-in-time from within the configured + * recovery period. This parameter is optional. If no value is provided, the value will + * default to 35.

+ * @public + */ + RecoveryPeriodInDays?: number | undefined; +} +/** + * @public + */ +export interface UpdateContinuousBackupsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the settings used to enable point in time recovery.

+ * @public + */ + PointInTimeRecoverySpecification: PointInTimeRecoverySpecification | undefined; +} +/** + * @public + */ +export interface UpdateContinuousBackupsOutput { + /** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +/** + * @public + */ +export interface UpdateContributorInsightsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The global secondary index name, if applicable.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the contributor insights action.

+ * @public + */ + ContributorInsightsAction: ContributorInsightsAction | undefined; +} +/** + * @public + */ +export interface UpdateContributorInsightsOutput { + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The name of the global secondary index, if applicable.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The status of contributor insights

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +/** + *

The specified replica is already part of the global table.

+ * @public + */ +export declare class ReplicaAlreadyExistsException extends __BaseException { + readonly name: "ReplicaAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The specified replica is no longer part of the global table.

+ * @public + */ +export declare class ReplicaNotFoundException extends __BaseException { + readonly name: "ReplicaNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new replica to be added to an existing global table.

    + *
  • + *
  • + *

    New parameters for an existing replica.

    + *
  • + *
  • + *

    An existing replica to be removed from an existing global table.

    + *
  • + *
+ * @public + */ +export interface ReplicaUpdate { + /** + *

The parameters required for creating a replica on an existing global table.

+ * @public + */ + Create?: CreateReplicaAction | undefined; + /** + *

The name of the existing replica to be removed.

+ * @public + */ + Delete?: DeleteReplicaAction | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableInput { + /** + *

The global table name.

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

A list of Regions that should be added or removed from the global table.

+ * @public + */ + ReplicaUpdates: ReplicaUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The operation tried to access a nonexistent index.

+ * @public + */ +export declare class IndexNotFoundException extends __BaseException { + readonly name: "IndexNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ +export interface GlobalTableGlobalSecondaryIndexSettingsUpdate { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. + *

+ * @public + */ + ProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global secondary index's write capacity + * units.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexSettingsUpdate { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException.

+ * @public + */ + ProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global secondary index replica's read capacity + * units.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the settings for a global table in a Region that will be modified.

+ * @public + */ +export interface ReplicaSettingsUpdate { + /** + *

The Region of the replica to be added.

+ * @public + */ + RegionName: string | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global table replica's read capacity + * units.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ + ReplicaGlobalSecondaryIndexSettingsUpdate?: ReplicaGlobalSecondaryIndexSettingsUpdate[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + ReplicaTableClass?: TableClass | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableSettingsInput { + /** + *

The name of the global table

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

The billing mode of the global table. If GlobalTableBillingMode is not + * specified, the global table defaults to PROVISIONED capacity billing + * mode.

+ *
    + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * predictable workloads. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for unpredictable workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
+ * @public + */ + GlobalTableBillingMode?: BillingMode | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. + *

+ * @public + */ + GlobalTableProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing provisioned write capacity for the global + * table.

+ * @public + */ + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ + GlobalTableGlobalSecondaryIndexSettingsUpdate?: GlobalTableGlobalSecondaryIndexSettingsUpdate[] | undefined; + /** + *

Represents the settings for a global table in a Region that will be modified.

+ * @public + */ + ReplicaSettingsUpdate?: ReplicaSettingsUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableSettingsOutput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Region-specific settings for the global table.

+ * @public + */ + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +/** + *

Enables updating the configuration for Kinesis Streaming.

+ * @public + */ +export interface UpdateKinesisStreamingConfiguration { + /** + *

Enables updating the precision of Kinesis data stream timestamp.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface UpdateKinesisStreamingDestinationInput { + /** + *

The table name for the Kinesis streaming destination input. You can also provide the + * ARN of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The Amazon Resource Name (ARN) for the Kinesis stream input.

+ * @public + */ + StreamArn: string | undefined; + /** + *

The command to update the Kinesis stream configuration.

+ * @public + */ + UpdateKinesisStreamingConfiguration?: UpdateKinesisStreamingConfiguration | undefined; +} +/** + * @public + */ +export interface UpdateKinesisStreamingDestinationOutput { + /** + *

The table name for the Kinesis streaming destination output.

+ * @public + */ + TableName?: string | undefined; + /** + *

The ARN for the Kinesis stream input.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The status of the attempt to update the Kinesis streaming destination output.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The command to update the Kinesis streaming destination configuration.

+ * @public + */ + UpdateKinesisStreamingConfiguration?: UpdateKinesisStreamingConfiguration | undefined; +} +/** + *

Represents the new provisioned throughput settings to be applied to a global secondary + * index.

+ * @public + */ +export interface UpdateGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be updated.

+ * @public + */ + IndexName: string | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Updates the maximum number of read and write units for the specified global secondary + * index. If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value of the new provisioned throughput settings to be + * applied to a global secondary index.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new global secondary index to be added to an existing table.

    + *
  • + *
  • + *

    New provisioned throughput parameters for an existing global secondary + * index.

    + *
  • + *
  • + *

    An existing global secondary index to be removed from an existing + * table.

    + *
  • + *
+ * @public + */ +export interface GlobalSecondaryIndexUpdate { + /** + *

The name of an existing global secondary index, along with new provisioned throughput + * settings to be applied to that index.

+ * @public + */ + Update?: UpdateGlobalSecondaryIndexAction | undefined; + /** + *

The parameters required for creating a global secondary index on an existing + * table:

+ *
    + *
  • + *

    + * IndexName + *

    + *
  • + *
  • + *

    + * KeySchema + *

    + *
  • + *
  • + *

    + * AttributeDefinitions + *

    + *
  • + *
  • + *

    + * Projection + *

    + *
  • + *
  • + *

    + * ProvisionedThroughput + *

    + *
  • + *
+ * @public + */ + Create?: CreateGlobalSecondaryIndexAction | undefined; + /** + *

The name of an existing global secondary index to be removed.

+ * @public + */ + Delete?: DeleteGlobalSecondaryIndexAction | undefined; +} +/** + *

Represents a replica to be modified.

+ * @public + */ +export interface UpdateReplicationGroupMemberAction { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; + /** + *

The KMS key of the replica that should be used for KMS + * encryption. To specify a key, use its key ID, Amazon Resource Name (ARN), alias name, or + * alias ARN. Note that you should only provide this parameter if the key is different from + * the default DynamoDB KMS key alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not specified, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput for the replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + TableClassOverride?: TableClass | undefined; +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new replica to be added to an existing regional table or global table. This + * request invokes the CreateTableReplica action in the destination + * Region.

    + *
  • + *
  • + *

    New parameters for an existing replica. This request invokes the + * UpdateTable action in the destination Region.

    + *
  • + *
  • + *

    An existing replica to be deleted. The request invokes the + * DeleteTableReplica action in the destination Region, deleting + * the replica and all if its items in the destination Region.

    + *
  • + *
+ * + *

When you manually remove a table or global table replica, you do not automatically + * remove any associated scalable targets, scaling policies, or CloudWatch + * alarms.

+ *
+ * @public + */ +export interface ReplicationGroupUpdate { + /** + *

The parameters required for creating a replica for the table.

+ * @public + */ + Create?: CreateReplicationGroupMemberAction | undefined; + /** + *

The parameters required for updating a replica for the table.

+ * @public + */ + Update?: UpdateReplicationGroupMemberAction | undefined; + /** + *

The parameters required for deleting a replica for the table.

+ * @public + */ + Delete?: DeleteReplicationGroupMemberAction | undefined; +} +/** + *

Represents the input of an UpdateTable operation.

+ * @public + */ +export interface UpdateTableInput { + /** + *

An array of attributes that describe the key schema for the table and indexes. If you + * are adding a new global secondary index to the table, AttributeDefinitions + * must include the key element(s) of the new index.

+ * @public + */ + AttributeDefinitions?: AttributeDefinition[] | undefined; + /** + *

The name of the table to be updated. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. When switching from pay-per-request to provisioned capacity, initial + * provisioned capacity values must be set. The initial provisioned capacity values are + * estimated based on the consumed read and write capacity of your table and global + * secondary indexes over the past 30 minutes.

+ *
    + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for most DynamoDB workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * steady workloads with predictable growth where capacity requirements can be + * reliably forecasted. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

The new provisioned throughput settings for the specified table or index.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

An array of one or more global secondary indexes for the table. For each index in the + * array, you can request one action:

+ *
    + *
  • + *

    + * Create - add a new global secondary index to the table.

    + *
  • + *
  • + *

    + * Update - modify the provisioned throughput settings of an existing + * global secondary index.

    + *
  • + *
  • + *

    + * Delete - remove a global secondary index from the table.

    + *
  • + *
+ *

You can create or delete only one global secondary index per UpdateTable + * operation.

+ *

For more information, see Managing Global + * Secondary Indexes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexUpdate[] | undefined; + /** + *

Represents the DynamoDB Streams configuration for the table.

+ * + *

You receive a ValidationException if you try to enable a stream on a + * table that already has a stream, or if you try to disable a stream on a table that + * doesn't have a stream.

+ *
+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

The new server-side encryption settings for the specified table.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

A list of replica update actions (create, delete, or update) for the table.

+ * + *

For global tables, this property only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @public + */ + ReplicaUpdates?: ReplicationGroupUpdate[] | undefined; + /** + *

The table class of the table to be updated. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

Indicates whether deletion protection is to be enabled (true) or disabled (false) on + * the table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

Specifies the consistency mode for a new global table. This parameter is only valid + * when you create a global table by specifying one or more Create actions in the ReplicaUpdates action list.

+ *

You can specify one of the following consistency modes:

+ *
    + *
  • + *

    + * EVENTUAL: Configures a new global table for multi-Region eventual + * consistency. This is the default consistency mode for global tables.

    + *
  • + *
  • + *

    + * STRONG: Configures a new global table for multi-Region strong + * consistency (preview).

    + * + *

    Multi-Region strong consistency (MRSC) is a new DynamoDB global + * tables capability currently available in preview mode. For more information, + * see Global tables multi-Region strong consistency.

    + *
    + *
  • + *
+ *

If you don't specify this parameter, the global table consistency mode defaults to + * EVENTUAL.

+ * @public + */ + MultiRegionConsistency?: MultiRegionConsistency | undefined; + /** + *

Updates the maximum number of read and write units for the specified table in + * on-demand capacity mode. If you use this parameter, you must specify + * MaxReadRequestUnits, MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput (in read units per second and write units per second) + * for updating a table.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the output of an UpdateTable operation.

+ * @public + */ +export interface UpdateTableOutput { + /** + *

Represents the properties of the table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

Represents the auto scaling settings of a global secondary index for a global table + * that will be modified.

+ * @public + */ +export interface GlobalSecondaryIndexAutoScalingUpdate { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the auto scaling settings of a global secondary index for a replica that + * will be modified.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexAutoScalingUpdate { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedReadCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the auto scaling settings of a replica that will be modified.

+ * @public + */ +export interface ReplicaAutoScalingUpdate { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; + /** + *

Represents the auto scaling settings of global secondary indexes that will be + * modified.

+ * @public + */ + ReplicaGlobalSecondaryIndexUpdates?: ReplicaGlobalSecondaryIndexAutoScalingUpdate[] | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + * @public + */ +export interface UpdateTableReplicaAutoScalingInput { + /** + *

Represents the auto scaling settings of the global secondary indexes of the replica to + * be updated.

+ * @public + */ + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexAutoScalingUpdate[] | undefined; + /** + *

The name of the global table to be updated. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the auto scaling settings of replicas of the table that will be + * modified.

+ * @public + */ + ReplicaUpdates?: ReplicaAutoScalingUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateTableReplicaAutoScalingOutput { + /** + *

Returns information about the auto scaling settings of a table with replicas.

+ * @public + */ + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +/** + *

Represents the settings used to enable or disable Time to Live (TTL) for the specified + * table.

+ * @public + */ +export interface TimeToLiveSpecification { + /** + *

Indicates whether TTL is to be enabled (true) or disabled (false) on the table.

+ * @public + */ + Enabled: boolean | undefined; + /** + *

The name of the TTL attribute used to store the expiration time for items in the + * table.

+ * @public + */ + AttributeName: string | undefined; +} +/** + *

Represents the input of an UpdateTimeToLive operation.

+ * @public + */ +export interface UpdateTimeToLiveInput { + /** + *

The name of the table to be configured. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the settings used to enable or disable Time to Live for the specified + * table.

+ * @public + */ + TimeToLiveSpecification: TimeToLiveSpecification | undefined; +} +/** + * @public + */ +export interface UpdateTimeToLiveOutput { + /** + *

Represents the output of an UpdateTimeToLive operation.

+ * @public + */ + TimeToLiveSpecification?: TimeToLiveSpecification | undefined; +} +/** + *

Represents the data for an attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export type AttributeValue = AttributeValue.BMember | AttributeValue.BOOLMember | AttributeValue.BSMember | AttributeValue.LMember | AttributeValue.MMember | AttributeValue.NMember | AttributeValue.NSMember | AttributeValue.NULLMember | AttributeValue.SMember | AttributeValue.SSMember | AttributeValue.$UnknownMember; +/** + * @public + */ +export declare namespace AttributeValue { + /** + *

An attribute of type String. For example:

+ *

+ * "S": "Hello" + *

+ * @public + */ + interface SMember { + S: string; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Number. For example:

+ *

+ * "N": "123.45" + *

+ *

Numbers are sent across the network to DynamoDB as strings, to maximize compatibility + * across languages and libraries. However, DynamoDB treats them as number type attributes + * for mathematical operations.

+ * @public + */ + interface NMember { + S?: never; + N: string; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Binary. For example:

+ *

+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk" + *

+ * @public + */ + interface BMember { + S?: never; + N?: never; + B: Uint8Array; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type String Set. For example:

+ *

+ * "SS": ["Giraffe", "Hippo" ,"Zebra"] + *

+ * @public + */ + interface SSMember { + S?: never; + N?: never; + B?: never; + SS: string[]; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Number Set. For example:

+ *

+ * "NS": ["42.2", "-19", "7.5", "3.14"] + *

+ *

Numbers are sent across the network to DynamoDB as strings, to maximize compatibility + * across languages and libraries. However, DynamoDB treats them as number type attributes + * for mathematical operations.

+ * @public + */ + interface NSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS: string[]; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Binary Set. For example:

+ *

+ * "BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="] + *

+ * @public + */ + interface BSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS: Uint8Array[]; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Map. For example:

+ *

+ * "M": \{"Name": \{"S": "Joe"\}, "Age": \{"N": "35"\}\} + *

+ * @public + */ + interface MMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M: Record; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type List. For example:

+ *

+ * "L": [ \{"S": "Cookies"\} , \{"S": "Coffee"\}, \{"N": "3.14159"\}] + *

+ * @public + */ + interface LMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L: AttributeValue[]; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Null. For example:

+ *

+ * "NULL": true + *

+ * @public + */ + interface NULLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL: boolean; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Boolean. For example:

+ *

+ * "BOOL": true + *

+ * @public + */ + interface BOOLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL: boolean; + $unknown?: never; + } + /** + * @public + */ + interface $UnknownMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown: [string, any]; + } + interface Visitor { + S: (value: string) => T; + N: (value: string) => T; + B: (value: Uint8Array) => T; + SS: (value: string[]) => T; + NS: (value: string[]) => T; + BS: (value: Uint8Array[]) => T; + M: (value: Record) => T; + L: (value: AttributeValue[]) => T; + NULL: (value: boolean) => T; + BOOL: (value: boolean) => T; + _: (name: string, value: any) => T; + } + const visit: (value: AttributeValue, visitor: Visitor) => T; +} +/** + *

For the UpdateItem operation, represents the attributes to be modified, + * the action to perform on each, and the new value for each.

+ * + *

You cannot use UpdateItem to update any primary key attributes. + * Instead, you will need to delete the item, and then use PutItem to + * create a new item with new attributes.

+ *
+ *

Attribute values cannot be null; string and binary type attributes must have lengths + * greater than zero; and set type attributes must not be empty. Requests with empty values + * will be rejected with a ValidationException exception.

+ * @public + */ +export interface AttributeValueUpdate { + /** + *

Represents the data for an attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer Guide. + *

+ * @public + */ + Value?: AttributeValue | undefined; + /** + *

Specifies how to perform the update. Valid values are PUT (default), + * DELETE, and ADD. The behavior depends on whether the + * specified primary key already exists in the table.

+ *

+ * If an item with the specified Key is found in + * the table: + *

+ *
    + *
  • + *

    + * PUT - Adds the specified attribute to the item. If the attribute + * already exists, it is replaced by the new value.

    + *
  • + *
  • + *

    + * DELETE - If no value is specified, the attribute and its value are + * removed from the item. The data type of the specified value must match the + * existing value's data type.

    + *

    If a set of values is specified, then those values are + * subtracted from the old set. For example, if the attribute value was the set + * [a,b,c] and the DELETE action specified + * [a,c], then the final attribute value would be + * [b]. Specifying an empty set is an error.

    + *
  • + *
  • + *

    + * ADD - If the attribute does not already exist, then the attribute + * and its values are added to the item. If the attribute does exist, then the + * behavior of ADD depends on the data type of the attribute:

    + *
      + *
    • + *

      If the existing attribute is a number, and if Value is + * also a number, then the Value is mathematically added to + * the existing attribute. If Value is a negative number, then + * it is subtracted from the existing attribute.

      + * + *

      If you use ADD to increment or decrement a number + * value for an item that doesn't exist before the update, DynamoDB + * uses 0 as the initial value.

      + *

      In addition, if you use ADD to update an existing + * item, and intend to increment or decrement an attribute value which + * does not yet exist, DynamoDB uses 0 as the initial + * value. For example, suppose that the item you want to update does + * not yet have an attribute named itemcount, but + * you decide to ADD the number 3 to this + * attribute anyway, even though it currently does not exist. DynamoDB + * will create the itemcount attribute, set its + * initial value to 0, and finally add 3 to + * it. The result will be a new itemcount + * attribute in the item, with a value of 3.

      + *
      + *
    • + *
    • + *

      If the existing data type is a set, and if the Value is + * also a set, then the Value is added to the existing set. + * (This is a set operation, not mathematical + * addition.) For example, if the attribute value was the set + * [1,2], and the ADD action specified + * [3], then the final attribute value would be + * [1,2,3]. An error occurs if an Add action is specified + * for a set attribute and the attribute type specified does not match the + * existing set type.

      + *

      Both sets must have the same primitive data type. For example, if the + * existing data type is a set of strings, the Value must also + * be a set of strings. The same holds true for number sets and binary + * sets.

      + *
    • + *
    + *

    This action is only valid for an existing attribute whose data type is number + * or is a set. Do not use ADD for any other data types.

    + *
  • + *
+ *

+ * If no item with the specified Key is + * found: + *

+ *
    + *
  • + *

    + * PUT - DynamoDB creates a new item with the specified primary key, + * and then adds the attribute.

    + *
  • + *
  • + *

    + * DELETE - Nothing happens; there is no attribute to delete.

    + *
  • + *
  • + *

    + * ADD - DynamoDB creates a new item with the supplied primary key and + * number (or set) for the attribute value. The only data types allowed are number, + * number set, string set or binary set.

    + *
  • + *
+ * @public + */ + Action?: AttributeAction | undefined; +} +/** + *

An error associated with a statement in a PartiQL batch that was run.

+ * @public + */ +export interface BatchStatementError { + /** + *

The error code associated with the failed PartiQL batch statement.

+ * @public + */ + Code?: BatchStatementErrorCodeEnum | undefined; + /** + *

The error message associated with the PartiQL batch response.

+ * @public + */ + Message?: string | undefined; + /** + *

The item which caused the condition check to fail. This will be set if + * ReturnValuesOnConditionCheckFailure is specified as ALL_OLD.

+ * @public + */ + Item?: Record | undefined; +} +/** + *

A PartiQL batch statement request.

+ * @public + */ +export interface BatchStatementRequest { + /** + *

A valid PartiQL statement.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameters associated with a PartiQL statement in the batch request.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

The read consistency of the PartiQL batch request.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

An optional parameter that returns the item attributes for a PartiQL batch request + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

An ordered list of errors for each item in the request which caused the transaction to + * get cancelled. The values of the list are ordered according to the ordering of the + * TransactWriteItems request parameter. If no error occurred for the + * associated item an error with a Null code and Null message will be present.

+ * @public + */ +export interface CancellationReason { + /** + *

Item in the request which caused the transaction to get cancelled.

+ * @public + */ + Item?: Record | undefined; + /** + *

Status code for the result of the cancelled transaction.

+ * @public + */ + Code?: string | undefined; + /** + *

Cancellation reason message description.

+ * @public + */ + Message?: string | undefined; +} +/** + *

Represents the selection criteria for a Query or Scan + * operation:

+ *
    + *
  • + *

    For a Query operation, Condition is used for + * specifying the KeyConditions to use when querying a table or an + * index. For KeyConditions, only the following comparison operators + * are supported:

    + *

    + * EQ | LE | LT | GE | GT | BEGINS_WITH | BETWEEN + *

    + *

    + * Condition is also used in a QueryFilter, which + * evaluates the query results and returns only the desired values.

    + *
  • + *
  • + *

    For a Scan operation, Condition is used in a + * ScanFilter, which evaluates the scan results and returns only + * the desired values.

    + *
  • + *
+ * @public + */ +export interface Condition { + /** + *

One or more values to evaluate against the supplied attribute. The number of values in + * the list depends on the ComparisonOperator being used.

+ *

For type Number, value comparisons are numeric.

+ *

String value comparisons for greater than, equals, or less than are based on ASCII + * character code values. For example, a is greater than A, and + * a is greater than B. For a list of code values, see http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters.

+ *

For Binary, DynamoDB treats each byte of the binary data as unsigned when it + * compares binary values.

+ * @public + */ + AttributeValueList?: AttributeValue[] | undefined; + /** + *

A comparator for evaluating attributes. For example, equals, greater than, less than, + * etc.

+ *

The following comparison operators are available:

+ *

+ * EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | + * BEGINS_WITH | IN | BETWEEN + *

+ *

The following are descriptions of each comparison operator.

+ *
    + *
  • + *

    + * EQ : Equal. EQ is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, Binary, String Set, Number Set, or Binary Set. + * If an item contains an AttributeValue element of a different type + * than the one provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NE : Not equal. NE is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, Binary, String Set, Number Set, or Binary Set. If an + * item contains an AttributeValue of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LE : Less than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LT : Less than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, or Binary (not a set type). If an item contains an + * AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GE : Greater than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GT : Greater than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NOT_NULL : The attribute exists. NOT_NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the existence of an attribute, not its data type. + * If the data type of attribute "a" is null, and you evaluate it + * using NOT_NULL, the result is a Boolean true. This + * result is because the attribute "a" exists; its data type is + * not relevant to the NOT_NULL comparison operator.

    + *
    + *
  • + *
  • + *

    + * NULL : The attribute does not exist. NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the nonexistence of an attribute, not its data + * type. If the data type of attribute "a" is null, and you + * evaluate it using NULL, the result is a Boolean + * false. This is because the attribute "a" + * exists; its data type is not relevant to the NULL comparison + * operator.

    + *
    + *
  • + *
  • + *

    + * CONTAINS : Checks for a subsequence, or value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is of type String, then the operator checks for a + * substring match. If the target attribute of the comparison is of type Binary, + * then the operator looks for a subsequence of the target that matches the input. + * If the target attribute of the comparison is a set ("SS", + * "NS", or "BS"), then the operator evaluates to + * true if it finds an exact match with any member of the set.

    + *

    CONTAINS is supported for lists: When evaluating "a CONTAINS b", + * "a" can be a list; however, "b" cannot be a set, a + * map, or a list.

    + *
  • + *
  • + *

    + * NOT_CONTAINS : Checks for absence of a subsequence, or absence of a + * value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is a String, then the operator checks for the + * absence of a substring match. If the target attribute of the comparison is + * Binary, then the operator checks for the absence of a subsequence of the target + * that matches the input. If the target attribute of the comparison is a set + * ("SS", "NS", or "BS"), then the + * operator evaluates to true if it does not find an exact + * match with any member of the set.

    + *

    NOT_CONTAINS is supported for lists: When evaluating "a NOT CONTAINS + * b", "a" can be a list; however, "b" cannot + * be a set, a map, or a list.

    + *
  • + *
  • + *

    + * BEGINS_WITH : Checks for a prefix.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String or Binary (not a Number or a set type). The target attribute of + * the comparison must be of type String or Binary (not a Number or a set + * type).

    + *

    + *
  • + *
  • + *

    + * IN : Checks for matching elements in a list.

    + *

    + * AttributeValueList can contain one or more + * AttributeValue elements of type String, Number, or Binary. + * These attributes are compared against an existing attribute of an item. If any + * elements of the input are equal to the item attribute, the expression evaluates + * to true.

    + *
  • + *
  • + *

    + * BETWEEN : Greater than or equal to the first value, and less than + * or equal to the second value.

    + *

    + * AttributeValueList must contain two AttributeValue + * elements of the same type, either String, Number, or Binary (not a set type). A + * target attribute matches if the target value is greater than, or equal to, the + * first element and less than, or equal to, the second element. If an item + * contains an AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not compare to \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\} + *

    + *
  • + *
+ *

For usage examples of AttributeValueList and + * ComparisonOperator, see Legacy + * Conditional Parameters in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ComparisonOperator: ComparisonOperator | undefined; +} +/** + *

A condition specified in the operation failed to be evaluated.

+ * @public + */ +export declare class ConditionalCheckFailedException extends __BaseException { + readonly name: "ConditionalCheckFailedException"; + readonly $fault: "client"; + /** + *

Item which caused the ConditionalCheckFailedException.

+ * @public + */ + Item?: Record | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a request to perform a DeleteItem operation on an item.

+ * @public + */ +export interface DeleteRequest { + /** + *

A map of attribute name to attribute values, representing the primary key of the item + * to delete. All of the table's primary key attributes must be specified, and their data + * types must match those of the table's key schema.

+ * @public + */ + Key: Record | undefined; +} +/** + * @public + */ +export interface ExecuteStatementInput { + /** + *

The PartiQL statement representing the operation to run.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameters for the PartiQL statement, if any.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

The consistency of a read operation. If set to true, then a strongly + * consistent read is used; otherwise, an eventually consistent read is used.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

Set this value to get remaining results, if NextToken was returned in the + * statement response.

+ * @public + */ + NextToken?: string | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, along + * with a key in LastEvaluatedKey to apply in a subsequent operation so you + * can pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation.

+ * @public + */ + Limit?: number | undefined; + /** + *

An optional parameter that returns the item attributes for an + * ExecuteStatement operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Specifies an item and related attribute values to retrieve in a + * TransactGetItem object.

+ * @public + */ +export interface Get { + /** + *

A map of attribute names to AttributeValue objects that specifies the + * primary key of the item to retrieve.

+ * @public + */ + Key: Record | undefined; + /** + *

The name of the table from which to retrieve the specified item. You can also provide + * the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A string that identifies one or more attributes of the specified item to retrieve from + * the table. The attributes in the expression must be separated by commas. If no attribute + * names are specified, then all attributes of the specified item are returned. If any of + * the requested attributes are not found, they do not appear in the result.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in the ProjectionExpression + * parameter.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Represents the input of a GetItem operation.

+ * @public + */ +export interface GetItemInput { + /** + *

The name of the table containing the requested item. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute names to AttributeValue objects, representing the + * primary key of the item to retrieve.

+ *

For the primary key, you must provide all of the attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

Determines the read consistency model: If set to true, then the operation + * uses strongly consistent reads; otherwise, the operation uses eventually consistent + * reads.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes are returned. If any of the + * requested attributes are not found, they do not appear in the result.

+ *

For more information, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Represents the output of a GetItem operation.

+ * @public + */ +export interface GetItemOutput { + /** + *

A map of attribute names to AttributeValue objects, as specified by + * ProjectionExpression.

+ * @public + */ + Item?: Record | undefined; + /** + *

The capacity units consumed by the GetItem operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Information about item collections, if any, that were affected by the operation. + * ItemCollectionMetrics is only returned if the request asked for it. If + * the table does not have any local secondary indexes, this information is not returned in + * the response.

+ * @public + */ +export interface ItemCollectionMetrics { + /** + *

The partition key value of the item collection. This value is the same as the + * partition key value of the item.

+ * @public + */ + ItemCollectionKey?: Record | undefined; + /** + *

An estimate of item collection size, in gigabytes. This value is a two-element array + * containing a lower bound and an upper bound for the estimate. The estimate includes the + * size of all the items in the table, plus the size of all attributes projected into all + * of the local secondary indexes on that table. Use this estimate to measure whether a + * local secondary index is approaching its size limit.

+ *

The estimate is subject to change over time; therefore, do not rely on the precision + * or accuracy of the estimate.

+ * @public + */ + SizeEstimateRangeGB?: number[] | undefined; +} +/** + *

Details for the requested item.

+ * @public + */ +export interface ItemResponse { + /** + *

Map of attribute data consisting of the data type and attribute value.

+ * @public + */ + Item?: Record | undefined; +} +/** + *

Represents a PartiQL statement that uses parameters.

+ * @public + */ +export interface ParameterizedStatement { + /** + *

A PartiQL statement that uses parameters.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameter values.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

An optional parameter that returns the item attributes for a PartiQL + * ParameterizedStatement operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a PutItem operation on an item.

+ * @public + */ +export interface PutRequest { + /** + *

A map of attribute name to attribute values, representing the primary key of an item + * to be processed by PutItem. All of the table's primary key attributes must + * be specified, and their data types must match those of the table's key schema. If any + * attributes are present in the item that are part of an index key schema for the table, + * their types must match the index key schema.

+ * @public + */ + Item: Record | undefined; +} +/** + *

Represents a set of primary keys and, for each key, the attributes to retrieve from + * the table.

+ *

For each primary key, you must provide all of the key attributes. + * For example, with a simple primary key, you only need to provide the partition key. For + * a composite primary key, you must provide both the partition key + * and the sort key.

+ * @public + */ +export interface KeysAndAttributes { + /** + *

The primary key attribute values that define the items and the attributes associated + * with the items.

+ * @public + */ + Keys: Record[] | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see Legacy + * Conditional Parameters in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The consistency of a read operation. If set to true, then a strongly + * consistent read is used; otherwise, an eventually consistent read is used.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the ProjectionExpression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Specifies an item to be retrieved as part of the transaction.

+ * @public + */ +export interface TransactGetItem { + /** + *

Contains the primary key that identifies the item to get, together with the name of + * the table that contains the item, and optionally the specific attributes of the item to + * retrieve.

+ * @public + */ + Get: Get | undefined; +} +/** + * @public + */ +export interface BatchExecuteStatementInput { + /** + *

The list of PartiQL statements representing the batch to run.

+ * @public + */ + Statements: BatchStatementRequest[] | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface ExecuteTransactionInput { + /** + *

The list of PartiQL statements representing the transaction to run.

+ * @public + */ + TransactStatements: ParameterizedStatement[] | undefined; + /** + *

Set this value to get remaining results, if NextToken was returned in the + * statement response.

+ * @public + */ + ClientRequestToken?: string | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response. For more information, see TransactGetItems and TransactWriteItems.

+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface ExecuteTransactionOutput { + /** + *

The response to a PartiQL transaction.

+ * @public + */ + Responses?: ItemResponse[] | undefined; + /** + *

The capacity units consumed by the entire operation. The values of the list are + * ordered according to the ordering of the statements.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + * @public + */ +export interface TransactGetItemsOutput { + /** + *

If the ReturnConsumedCapacity value was TOTAL, this + * is an array of ConsumedCapacity objects, one for each table addressed by + * TransactGetItem objects in the TransactItems + * parameter. These ConsumedCapacity objects report the read-capacity units + * consumed by the TransactGetItems call in that table.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; + /** + *

An ordered array of up to 100 ItemResponse objects, each of which + * corresponds to the TransactGetItem object in the same position in the + * TransactItems array. Each ItemResponse object + * contains a Map of the name-value pairs that are the projected attributes of the + * requested item.

+ *

If a requested item could not be retrieved, the corresponding + * ItemResponse object is Null, or if the requested item has no projected + * attributes, the corresponding ItemResponse object is an empty Map.

+ * @public + */ + Responses?: ItemResponse[] | undefined; +} +/** + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * @public + */ +export declare class TransactionCanceledException extends __BaseException { + readonly name: "TransactionCanceledException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + *

A list of cancellation reasons.

+ * @public + */ + CancellationReasons?: CancellationReason[] | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the input of a BatchGetItem operation.

+ * @public + */ +export interface BatchGetItemInput { + /** + *

A map of one or more table names or table ARNs and, for each table, a map that + * describes one or more items to retrieve from that table. Each table name or ARN can be + * used only once per BatchGetItem request.

+ *

Each element in the map of items to retrieve consists of the following:

+ *
    + *
  • + *

    + * ConsistentRead - If true, a strongly consistent read + * is used; if false (the default), an eventually consistent read is + * used.

    + *
  • + *
  • + *

    + * ExpressionAttributeNames - One or more substitution tokens for + * attribute names in the ProjectionExpression parameter. The + * following are some use cases for using + * ExpressionAttributeNames:

    + *
      + *
    • + *

      To access an attribute whose name conflicts with a DynamoDB reserved + * word.

      + *
    • + *
    • + *

      To create a placeholder for repeating occurrences of an attribute name + * in an expression.

      + *
    • + *
    • + *

      To prevent special characters in an attribute name from being + * misinterpreted in an expression.

      + *
    • + *
    + *

    Use the # character in an expression to + * dereference an attribute name. For example, consider the following attribute + * name:

    + *
      + *
    • + *

      + * Percentile + *

      + *
    • + *
    + *

    The name of this attribute conflicts with a reserved word, so it cannot be + * used directly in an expression. (For the complete list of reserved words, see + * Reserved + * Words in the Amazon DynamoDB Developer Guide). + * To work around this, you could specify the following for + * ExpressionAttributeNames:

    + *
      + *
    • + *

      + * \{"#P":"Percentile"\} + *

      + *
    • + *
    + *

    You could then use this substitution in an expression, as in this + * example:

    + *
      + *
    • + *

      + * #P = :val + *

      + *
    • + *
    + * + *

    Tokens that begin with the : character + * are expression attribute values, which are placeholders + * for the actual value at runtime.

    + *
    + *

    For more information about expression attribute names, see Accessing Item Attributes in the Amazon DynamoDB + * Developer Guide.

    + *
  • + *
  • + *

    + * Keys - An array of primary key attribute values that define + * specific items in the table. For each primary key, you must provide + * all of the key attributes. For example, with a simple + * primary key, you only need to provide the partition key value. For a composite + * key, you must provide both the partition key value and the + * sort key value.

    + *
  • + *
  • + *

    + * ProjectionExpression - A string that identifies one or more + * attributes to retrieve from the table. These attributes can include scalars, + * sets, or elements of a JSON document. The attributes in the expression must be + * separated by commas.

    + *

    If no attribute names are specified, then all attributes are returned. If any + * of the requested attributes are not found, they do not appear in the + * result.

    + *

    For more information, see Accessing Item Attributes in the Amazon DynamoDB + * Developer Guide.

    + *
  • + *
  • + *

    + * AttributesToGet - This is a legacy parameter. Use + * ProjectionExpression instead. For more information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

    + *
  • + *
+ * @public + */ + RequestItems: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + *

Represents a condition to be compared with an attribute value. This condition can be + * used with DeleteItem, PutItem, or UpdateItem + * operations; if the comparison evaluates to true, the operation succeeds; if not, the + * operation fails. You can use ExpectedAttributeValue in one of two different + * ways:

+ *
    + *
  • + *

    Use AttributeValueList to specify one or more values to compare + * against an attribute. Use ComparisonOperator to specify how you + * want to perform the comparison. If the comparison evaluates to true, then the + * conditional operation succeeds.

    + *
  • + *
  • + *

    Use Value to specify a value that DynamoDB will compare against + * an attribute. If the values match, then ExpectedAttributeValue + * evaluates to true and the conditional operation succeeds. Optionally, you can + * also set Exists to false, indicating that you do + * not expect to find the attribute value in the table. In this + * case, the conditional operation succeeds only if the comparison evaluates to + * false.

    + *
  • + *
+ *

+ * Value and Exists are incompatible with + * AttributeValueList and ComparisonOperator. Note that if + * you use both sets of parameters at once, DynamoDB will return a + * ValidationException exception.

+ * @public + */ +export interface ExpectedAttributeValue { + /** + *

Represents the data for the expected attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Value?: AttributeValue | undefined; + /** + *

Causes DynamoDB to evaluate the value before attempting a conditional + * operation:

+ *
    + *
  • + *

    If Exists is true, DynamoDB will check to + * see if that attribute value already exists in the table. If it is found, then + * the operation succeeds. If it is not found, the operation fails with a + * ConditionCheckFailedException.

    + *
  • + *
  • + *

    If Exists is false, DynamoDB assumes that + * the attribute value does not exist in the table. If in fact the value does not + * exist, then the assumption is valid and the operation succeeds. If the value is + * found, despite the assumption that it does not exist, the operation fails with a + * ConditionCheckFailedException.

    + *
  • + *
+ *

The default setting for Exists is true. If you supply a + * Value all by itself, DynamoDB assumes the attribute exists: + * You don't have to set Exists to true, because it is + * implied.

+ *

DynamoDB returns a ValidationException if:

+ *
    + *
  • + *

    + * Exists is true but there is no Value to + * check. (You expect a value to exist, but don't specify what that value + * is.)

    + *
  • + *
  • + *

    + * Exists is false but you also provide a + * Value. (You cannot expect an attribute to have a value, while + * also expecting it not to exist.)

    + *
  • + *
+ * @public + */ + Exists?: boolean | undefined; + /** + *

A comparator for evaluating attributes in the AttributeValueList. For + * example, equals, greater than, less than, etc.

+ *

The following comparison operators are available:

+ *

+ * EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | + * BEGINS_WITH | IN | BETWEEN + *

+ *

The following are descriptions of each comparison operator.

+ *
    + *
  • + *

    + * EQ : Equal. EQ is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, Binary, String Set, Number Set, or Binary Set. + * If an item contains an AttributeValue element of a different type + * than the one provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NE : Not equal. NE is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, Binary, String Set, Number Set, or Binary Set. If an + * item contains an AttributeValue of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LE : Less than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LT : Less than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, or Binary (not a set type). If an item contains an + * AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GE : Greater than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GT : Greater than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NOT_NULL : The attribute exists. NOT_NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the existence of an attribute, not its data type. + * If the data type of attribute "a" is null, and you evaluate it + * using NOT_NULL, the result is a Boolean true. This + * result is because the attribute "a" exists; its data type is + * not relevant to the NOT_NULL comparison operator.

    + *
    + *
  • + *
  • + *

    + * NULL : The attribute does not exist. NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the nonexistence of an attribute, not its data + * type. If the data type of attribute "a" is null, and you + * evaluate it using NULL, the result is a Boolean + * false. This is because the attribute "a" + * exists; its data type is not relevant to the NULL comparison + * operator.

    + *
    + *
  • + *
  • + *

    + * CONTAINS : Checks for a subsequence, or value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is of type String, then the operator checks for a + * substring match. If the target attribute of the comparison is of type Binary, + * then the operator looks for a subsequence of the target that matches the input. + * If the target attribute of the comparison is a set ("SS", + * "NS", or "BS"), then the operator evaluates to + * true if it finds an exact match with any member of the set.

    + *

    CONTAINS is supported for lists: When evaluating "a CONTAINS b", + * "a" can be a list; however, "b" cannot be a set, a + * map, or a list.

    + *
  • + *
  • + *

    + * NOT_CONTAINS : Checks for absence of a subsequence, or absence of a + * value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is a String, then the operator checks for the + * absence of a substring match. If the target attribute of the comparison is + * Binary, then the operator checks for the absence of a subsequence of the target + * that matches the input. If the target attribute of the comparison is a set + * ("SS", "NS", or "BS"), then the + * operator evaluates to true if it does not find an exact + * match with any member of the set.

    + *

    NOT_CONTAINS is supported for lists: When evaluating "a NOT CONTAINS + * b", "a" can be a list; however, "b" cannot + * be a set, a map, or a list.

    + *
  • + *
  • + *

    + * BEGINS_WITH : Checks for a prefix.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String or Binary (not a Number or a set type). The target attribute of + * the comparison must be of type String or Binary (not a Number or a set + * type).

    + *

    + *
  • + *
  • + *

    + * IN : Checks for matching elements in a list.

    + *

    + * AttributeValueList can contain one or more + * AttributeValue elements of type String, Number, or Binary. + * These attributes are compared against an existing attribute of an item. If any + * elements of the input are equal to the item attribute, the expression evaluates + * to true.

    + *
  • + *
  • + *

    + * BETWEEN : Greater than or equal to the first value, and less than + * or equal to the second value.

    + *

    + * AttributeValueList must contain two AttributeValue + * elements of the same type, either String, Number, or Binary (not a set type). A + * target attribute matches if the target value is greater than, or equal to, the + * first element and less than, or equal to, the second element. If an item + * contains an AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not compare to \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\} + *

    + *
  • + *
+ * @public + */ + ComparisonOperator?: ComparisonOperator | undefined; + /** + *

One or more values to evaluate against the supplied attribute. The number of values in + * the list depends on the ComparisonOperator being used.

+ *

For type Number, value comparisons are numeric.

+ *

String value comparisons for greater than, equals, or less than are based on ASCII + * character code values. For example, a is greater than A, and + * a is greater than B. For a list of code values, see http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters.

+ *

For Binary, DynamoDB treats each byte of the binary data as unsigned when it + * compares binary values.

+ *

For information on specifying data types in JSON, see JSON Data Format + * in the Amazon DynamoDB Developer Guide.

+ * @public + */ + AttributeValueList?: AttributeValue[] | undefined; +} +/** + * @public + */ +export interface TransactGetItemsInput { + /** + *

An ordered array of up to 100 TransactGetItem objects, each of which + * contains a Get structure.

+ * @public + */ + TransactItems: TransactGetItem[] | undefined; + /** + *

A value of TOTAL causes consumed capacity information to be returned, and + * a value of NONE prevents that information from being returned. No other + * value is valid.

+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface TransactWriteItemsOutput { + /** + *

The capacity units consumed by the entire TransactWriteItems operation. + * The values of the list are ordered according to the ordering of the + * TransactItems request parameter.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; + /** + *

A list of tables that were processed by TransactWriteItems and, for each + * table, information about any item collections that were affected by individual + * UpdateItem, PutItem, or DeleteItem + * operations.

+ * @public + */ + ItemCollectionMetrics?: Record | undefined; +} +/** + *

Represents a request to perform a check that an item exists or to check the condition + * of specific attributes of the item.

+ * @public + */ +export interface ConditionCheck { + /** + *

The primary key of the item to be checked. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

Name of the table for the check item request. You can also provide the Amazon Resource Name (ARN) of + * the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to succeed. For + * more information, see Condition expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. For more + * information, see Expression attribute names in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression. For more information, see + * Condition expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * ConditionCheck condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a DeleteItem operation.

+ * @public + */ +export interface Delete { + /** + *

The primary key of the item to be deleted. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

Name of the table in which the item to be deleted resides. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional delete to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Delete condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a PutItem operation.

+ * @public + */ +export interface Put { + /** + *

A map of attribute name to attribute values, representing the primary key of the item + * to be written by PutItem. All of the table's primary key attributes must be + * specified, and their data types must match those of the table's key schema. If any + * attributes are present in the item that are part of an index key schema for the table, + * their types must match the index key schema.

+ * @public + */ + Item: Record | undefined; + /** + *

Name of the table in which to write the item. You can also provide the Amazon Resource Name (ARN) of + * the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Put condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform an UpdateItem operation.

+ * @public + */ +export interface Update { + /** + *

The primary key of the item to be updated. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

An expression that defines one or more attributes to be updated, the action to be + * performed on them, and new value(s) for them.

+ * @public + */ + UpdateExpression: string | undefined; + /** + *

Name of the table for the UpdateItem request. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Update condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

A PartiQL batch statement response..

+ * @public + */ +export interface BatchStatementResponse { + /** + *

The error associated with a failed PartiQL batch statement.

+ * @public + */ + Error?: BatchStatementError | undefined; + /** + *

The table name associated with a failed PartiQL batch statement.

+ * @public + */ + TableName?: string | undefined; + /** + *

A DynamoDB item associated with a BatchStatementResponse

+ * @public + */ + Item?: Record | undefined; +} +/** + *

Represents the output of a DeleteItem operation.

+ * @public + */ +export interface DeleteItemOutput { + /** + *

A map of attribute names to AttributeValue objects, representing the item + * as it appeared before the DeleteItem operation. This map appears in the + * response only if ReturnValues was specified as ALL_OLD in the + * request.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the DeleteItem operation. The data + * returned includes the total provisioned throughput consumed, along with statistics for + * the table and any indexes involved in the operation. ConsumedCapacity is + * only returned if the ReturnConsumedCapacity parameter was specified. For + * more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * DeleteItem operation. ItemCollectionMetrics is only + * returned if the ReturnItemCollectionMetrics parameter was specified. If the + * table does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + * @public + */ +export interface ExecuteStatementOutput { + /** + *

If a read operation was used, this property will contain the result of the read + * operation; a map of attribute names and their values. For the write operations this + * value will be empty.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

If the response of a read request exceeds the response payload limit DynamoDB will set + * this value in the response. If set, you can use that this value in the subsequent + * request to get the remaining results.

+ * @public + */ + NextToken?: string | undefined; + /** + *

The capacity units consumed by an operation. The data returned includes the total + * provisioned throughput consumed, along with statistics for the table and any indexes + * involved in the operation. ConsumedCapacity is only returned if the request + * asked for it. For more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request. If LastEvaluatedKey is empty, then the "last page" of results has + * been processed and there is no more data to be retrieved. If + * LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; +} +/** + *

Represents the output of a PutItem operation.

+ * @public + */ +export interface PutItemOutput { + /** + *

The attribute values as they appeared before the PutItem operation, but + * only if ReturnValues is specified as ALL_OLD in the request. + * Each element consists of an attribute name and an attribute value.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the PutItem operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unity consumption for write operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * PutItem operation. ItemCollectionMetrics is only returned + * if the ReturnItemCollectionMetrics parameter was specified. If the table + * does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + *

Represents the output of a Query operation.

+ * @public + */ +export interface QueryOutput { + /** + *

An array of item attributes that match the query criteria. Each element in this array + * consists of an attribute name and the value for that attribute.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

The number of items in the response.

+ *

If you used a QueryFilter in the request, then Count is the + * number of items returned after the filter was applied, and ScannedCount is + * the number of matching items before the filter was applied.

+ *

If you did not use a filter in the request, then Count and + * ScannedCount are the same.

+ * @public + */ + Count?: number | undefined; + /** + *

The number of items evaluated, before any QueryFilter is applied. A high + * ScannedCount value with few, or no, Count results + * indicates an inefficient Query operation. For more information, see Count and ScannedCount in the Amazon DynamoDB Developer + * Guide.

+ *

If you did not use a filter in the request, then ScannedCount is the same + * as Count.

+ * @public + */ + ScannedCount?: number | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request.

+ *

If LastEvaluatedKey is empty, then the "last page" of results has been + * processed and there is no more data to be retrieved.

+ *

If LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; + /** + *

The capacity units consumed by the Query operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Represents the output of a Scan operation.

+ * @public + */ +export interface ScanOutput { + /** + *

An array of item attributes that match the scan criteria. Each element in this array + * consists of an attribute name and the value for that attribute.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

The number of items in the response.

+ *

If you set ScanFilter in the request, then Count is the + * number of items returned after the filter was applied, and ScannedCount is + * the number of matching items before the filter was applied.

+ *

If you did not use a filter in the request, then Count is the same as + * ScannedCount.

+ * @public + */ + Count?: number | undefined; + /** + *

The number of items evaluated, before any ScanFilter is applied. A high + * ScannedCount value with few, or no, Count results + * indicates an inefficient Scan operation. For more information, see Count and + * ScannedCount in the Amazon DynamoDB Developer + * Guide.

+ *

If you did not use a filter in the request, then ScannedCount is the same + * as Count.

+ * @public + */ + ScannedCount?: number | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request.

+ *

If LastEvaluatedKey is empty, then the "last page" of results has been + * processed and there is no more data to be retrieved.

+ *

If LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; + /** + *

The capacity units consumed by the Scan operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Represents the output of an UpdateItem operation.

+ * @public + */ +export interface UpdateItemOutput { + /** + *

A map of attribute values as they appear before or after the UpdateItem + * operation, as determined by the ReturnValues parameter.

+ *

The Attributes map is only present if the update was successful and + * ReturnValues was specified as something other than NONE in + * the request. Each element represents one attribute.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the UpdateItem operation. The data + * returned includes the total provisioned throughput consumed, along with statistics for + * the table and any indexes involved in the operation. ConsumedCapacity is + * only returned if the ReturnConsumedCapacity parameter was specified. For + * more information, see Capacity unity consumption for write operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * UpdateItem operation. ItemCollectionMetrics is only + * returned if the ReturnItemCollectionMetrics parameter was specified. If the + * table does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + *

Represents an operation to perform - either DeleteItem or + * PutItem. You can only request one of these operations, not both, in a + * single WriteRequest. If you do need to perform both of these operations, + * you need to provide two separate WriteRequest objects.

+ * @public + */ +export interface WriteRequest { + /** + *

A request to perform a PutItem operation.

+ * @public + */ + PutRequest?: PutRequest | undefined; + /** + *

A request to perform a DeleteItem operation.

+ * @public + */ + DeleteRequest?: DeleteRequest | undefined; +} +/** + * @public + */ +export interface BatchExecuteStatementOutput { + /** + *

The response to each PartiQL statement in the batch. The values of the list are + * ordered according to the ordering of the request statements.

+ * @public + */ + Responses?: BatchStatementResponse[] | undefined; + /** + *

The capacity units consumed by the entire operation. The values of the list are + * ordered according to the ordering of the statements.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the output of a BatchGetItem operation.

+ * @public + */ +export interface BatchGetItemOutput { + /** + *

A map of table name or table ARN to a list of items. Each object in + * Responses consists of a table name or ARN, along with a map of + * attribute data consisting of the data type and attribute value.

+ * @public + */ + Responses?: Record[]> | undefined; + /** + *

A map of tables and their respective keys that were not processed with the current + * response. The UnprocessedKeys value is in the same form as + * RequestItems, so the value can be provided directly to a subsequent + * BatchGetItem operation. For more information, see + * RequestItems in the Request Parameters section.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * Keys - An array of primary key attribute values that define + * specific items in the table.

    + *
  • + *
  • + *

    + * ProjectionExpression - One or more attributes to be retrieved from + * the table or index. By default, all attributes are returned. If a requested + * attribute is not found, it does not appear in the result.

    + *
  • + *
  • + *

    + * ConsistentRead - The consistency of a read operation. If set to + * true, then a strongly consistent read is used; otherwise, an + * eventually consistent read is used.

    + *
  • + *
+ *

If there are no unprocessed keys remaining, the response contains an empty + * UnprocessedKeys map.

+ * @public + */ + UnprocessedKeys?: Record | undefined; + /** + *

The read capacity units consumed by the entire BatchGetItem + * operation.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * TableName - The table that consumed the provisioned + * throughput.

    + *
  • + *
  • + *

    + * CapacityUnits - The total number of capacity units consumed.

    + *
  • + *
+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the input of a Scan operation.

+ * @public + */ +export interface ScanInput { + /** + *

The name of the table containing the requested items or if you provide + * IndexName, the name of the table to which that index belongs.

+ *

You can also provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of a secondary index to scan. This index can be any local secondary index or + * global secondary index. Note that if you use the IndexName parameter, you + * must also provide TableName.

+ * @public + */ + IndexName?: string | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, and a + * key in LastEvaluatedKey to apply in a subsequent operation, so that you can + * pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation. For more information, see Working with Queries in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Limit?: number | undefined; + /** + *

The attributes to be returned in the result. You can retrieve all item attributes, + * specific item attributes, the count of matching items, or in the case of an index, some + * or all of the attributes projected into the index.

+ *
    + *
  • + *

    + * ALL_ATTRIBUTES - Returns all of the item attributes from the + * specified table or index. If you query a local secondary index, then for each + * matching item in the index, DynamoDB fetches the entire item from the parent + * table. If the index is configured to project all item attributes, then all of + * the data can be obtained from the local secondary index, and no fetching is + * required.

    + *
  • + *
  • + *

    + * ALL_PROJECTED_ATTRIBUTES - Allowed only when querying an index. + * Retrieves all attributes that have been projected into the index. If the index + * is configured to project all attributes, this return value is equivalent to + * specifying ALL_ATTRIBUTES.

    + *
  • + *
  • + *

    + * COUNT - Returns the number of matching items, rather than the + * matching items themselves. Note that this uses the same quantity of read + * capacity units as getting the items, and is subject to the same item size + * calculations.

    + *
  • + *
  • + *

    + * SPECIFIC_ATTRIBUTES - Returns only the attributes listed in + * ProjectionExpression. This return value is equivalent to + * specifying ProjectionExpression without specifying any value for + * Select.

    + *

    If you query or scan a local secondary index and request only attributes that + * are projected into that index, the operation reads only the index and not the + * table. If any of the requested attributes are not projected into the local + * secondary index, DynamoDB fetches each of these attributes from the parent + * table. This extra fetching incurs additional throughput cost and latency.

    + *

    If you query or scan a global secondary index, you can only request attributes + * that are projected into the index. Global secondary index queries cannot fetch + * attributes from the parent table.

    + *
  • + *
+ *

If neither Select nor ProjectionExpression are specified, + * DynamoDB defaults to ALL_ATTRIBUTES when accessing a table, and + * ALL_PROJECTED_ATTRIBUTES when accessing an index. You cannot use both + * Select and ProjectionExpression together in a single + * request, unless the value for Select is SPECIFIC_ATTRIBUTES. + * (This usage is equivalent to specifying ProjectionExpression without any + * value for Select.)

+ * + *

If you use the ProjectionExpression parameter, then the value for + * Select can only be SPECIFIC_ATTRIBUTES. Any other + * value for Select will return an error.

+ *
+ * @public + */ + Select?: Select | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ScanFilter in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ScanFilter?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

The primary key of the first item that this operation will evaluate. Use the value + * that was returned for LastEvaluatedKey in the previous operation.

+ *

The data type for ExclusiveStartKey must be String, Number or Binary. No + * set data types are allowed.

+ *

In a parallel scan, a Scan request that includes + * ExclusiveStartKey must specify the same segment whose previous + * Scan returned the corresponding value of + * LastEvaluatedKey.

+ * @public + */ + ExclusiveStartKey?: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

For a parallel Scan request, TotalSegments represents the + * total number of segments into which the Scan operation will be divided. The + * value of TotalSegments corresponds to the number of application workers + * that will perform the parallel scan. For example, if you want to use four application + * threads to scan a table or an index, specify a TotalSegments value of + * 4.

+ *

The value for TotalSegments must be greater than or equal to 1, and less + * than or equal to 1000000. If you specify a TotalSegments value of 1, the + * Scan operation will be sequential rather than parallel.

+ *

If you specify TotalSegments, you must also specify + * Segment.

+ * @public + */ + TotalSegments?: number | undefined; + /** + *

For a parallel Scan request, Segment identifies an + * individual segment to be scanned by an application worker.

+ *

Segment IDs are zero-based, so the first segment is always 0. For example, if you want + * to use four application threads to scan a table or an index, then the first thread + * specifies a Segment value of 0, the second thread specifies 1, and so + * on.

+ *

The value of LastEvaluatedKey returned from a parallel Scan + * request must be used as ExclusiveStartKey with the same segment ID in a + * subsequent Scan operation.

+ *

The value for Segment must be greater than or equal to 0, and less than + * the value provided for TotalSegments.

+ *

If you provide Segment, you must also provide + * TotalSegments.

+ * @public + */ + Segment?: number | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the specified table + * or index. These attributes can include scalars, sets, or elements of a JSON document. + * The attributes in the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

A string that contains conditions that DynamoDB applies after the Scan + * operation, but before the data is returned to you. Items that do not satisfy the + * FilterExpression criteria are not returned.

+ * + *

A FilterExpression is applied after the items have already been read; + * the process of filtering does not consume any additional read capacity units.

+ *
+ *

For more information, see Filter + * Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + FilterExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

A Boolean value that determines the read consistency model during the scan:

+ *
    + *
  • + *

    If ConsistentRead is false, then the data returned + * from Scan might not contain the results from other recently + * completed write operations (PutItem, UpdateItem, or + * DeleteItem).

    + *
  • + *
  • + *

    If ConsistentRead is true, then all of the write + * operations that completed before the Scan began are guaranteed to + * be contained in the Scan response.

    + *
  • + *
+ *

The default setting for ConsistentRead is false.

+ *

The ConsistentRead parameter is not supported on global secondary + * indexes. If you scan a global secondary index with ConsistentRead set to + * true, you will receive a ValidationException.

+ * @public + */ + ConsistentRead?: boolean | undefined; +} +/** + *

Represents the input of a BatchWriteItem operation.

+ * @public + */ +export interface BatchWriteItemInput { + /** + *

A map of one or more table names or table ARNs and, for each table, a list of + * operations to be performed (DeleteRequest or PutRequest). Each + * element in the map consists of the following:

+ *
    + *
  • + *

    + * DeleteRequest - Perform a DeleteItem operation on the + * specified item. The item to be deleted is identified by a Key + * subelement:

    + *
      + *
    • + *

      + * Key - A map of primary key attribute values that uniquely + * identify the item. Each entry in this map consists of an attribute name + * and an attribute value. For each primary key, you must provide + * all of the key attributes. For example, with a + * simple primary key, you only need to provide a value for the partition + * key. For a composite primary key, you must provide values for + * both the partition key and the sort key.

      + *
    • + *
    + *
  • + *
  • + *

    + * PutRequest - Perform a PutItem operation on the + * specified item. The item to be put is identified by an Item + * subelement:

    + *
      + *
    • + *

      + * Item - A map of attributes and their values. Each entry in + * this map consists of an attribute name and an attribute value. Attribute + * values must not be null; string and binary type attributes must have + * lengths greater than zero; and set type attributes must not be empty. + * Requests that contain empty values are rejected with a + * ValidationException exception.

      + *

      If you specify any attributes that are part of an index key, then the + * data types for those attributes must match those of the schema in the + * table's attribute definition.

      + *
    • + *
    + *
  • + *
+ * @public + */ + RequestItems: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; +} +/** + *

Represents the input of a DeleteItem operation.

+ * @public + */ +export interface DeleteItemInput { + /** + *

The name of the table from which to delete the item. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute names to AttributeValue objects, representing the + * primary key of the item to delete.

+ *

For the primary key, you must provide all of the key attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appeared + * before they were deleted. For DeleteItem, the valid values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - The content of the old item is returned.

    + *
  • + *
+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * + *

The ReturnValues parameter is used by several DynamoDB operations; + * however, DeleteItem does not recognize any values other than + * NONE or ALL_OLD.

+ *
+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

A condition that must be satisfied in order for a conditional DeleteItem + * to succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information about condition expressions, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for a DeleteItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents the input of a PutItem operation.

+ * @public + */ +export interface PutItemInput { + /** + *

The name of the table to contain the item. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute name/value pairs, one for each attribute. Only the primary key + * attributes are required; you can optionally provide other attribute name-value pairs for + * the item.

+ *

You must provide all of the attributes for the primary key. For example, with a simple + * primary key, you only need to provide a value for the partition key. For a composite + * primary key, you must provide both values for both the partition key and the sort + * key.

+ *

If you specify any attributes that are part of an index key, then the data types for + * those attributes must match those of the schema in the table's attribute + * definition.

+ *

Empty String and Binary attribute values are allowed. Attribute values of type String + * and Binary must have a length greater than zero if the attribute is used as a key + * attribute for a table or index.

+ *

For more information about primary keys, see Primary Key in the Amazon DynamoDB Developer + * Guide.

+ *

Each element in the Item map is an AttributeValue + * object.

+ * @public + */ + Item: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appeared + * before they were updated with the PutItem request. For + * PutItem, the valid values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - If PutItem overwrote an attribute name-value + * pair, then the content of the old item is returned.

    + *
  • + *
+ *

The values returned are strongly consistent.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * + *

The ReturnValues parameter is used by several DynamoDB operations; + * however, PutItem does not recognize any values other than + * NONE or ALL_OLD.

+ *
+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

A condition that must be satisfied in order for a conditional PutItem + * operation to succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information on condition expressions, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for a PutItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents the input of a Query operation.

+ * @public + */ +export interface QueryInput { + /** + *

The name of the table containing the requested items. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of an index to query. This index can be any local secondary index or global + * secondary index on the table. Note that if you use the IndexName parameter, + * you must also provide TableName. + *

+ * @public + */ + IndexName?: string | undefined; + /** + *

The attributes to be returned in the result. You can retrieve all item attributes, + * specific item attributes, the count of matching items, or in the case of an index, some + * or all of the attributes projected into the index.

+ *
    + *
  • + *

    + * ALL_ATTRIBUTES - Returns all of the item attributes from the + * specified table or index. If you query a local secondary index, then for each + * matching item in the index, DynamoDB fetches the entire item from the parent + * table. If the index is configured to project all item attributes, then all of + * the data can be obtained from the local secondary index, and no fetching is + * required.

    + *
  • + *
  • + *

    + * ALL_PROJECTED_ATTRIBUTES - Allowed only when querying an index. + * Retrieves all attributes that have been projected into the index. If the index + * is configured to project all attributes, this return value is equivalent to + * specifying ALL_ATTRIBUTES.

    + *
  • + *
  • + *

    + * COUNT - Returns the number of matching items, rather than the + * matching items themselves. Note that this uses the same quantity of read + * capacity units as getting the items, and is subject to the same item size + * calculations.

    + *
  • + *
  • + *

    + * SPECIFIC_ATTRIBUTES - Returns only the attributes listed in + * ProjectionExpression. This return value is equivalent to + * specifying ProjectionExpression without specifying any value for + * Select.

    + *

    If you query or scan a local secondary index and request only attributes that + * are projected into that index, the operation will read only the index and not + * the table. If any of the requested attributes are not projected into the local + * secondary index, DynamoDB fetches each of these attributes from the parent + * table. This extra fetching incurs additional throughput cost and latency.

    + *

    If you query or scan a global secondary index, you can only request attributes + * that are projected into the index. Global secondary index queries cannot fetch + * attributes from the parent table.

    + *
  • + *
+ *

If neither Select nor ProjectionExpression are specified, + * DynamoDB defaults to ALL_ATTRIBUTES when accessing a table, and + * ALL_PROJECTED_ATTRIBUTES when accessing an index. You cannot use both + * Select and ProjectionExpression together in a single + * request, unless the value for Select is SPECIFIC_ATTRIBUTES. + * (This usage is equivalent to specifying ProjectionExpression without any + * value for Select.)

+ * + *

If you use the ProjectionExpression parameter, then the value for + * Select can only be SPECIFIC_ATTRIBUTES. Any other + * value for Select will return an error.

+ *
+ * @public + */ + Select?: Select | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, and a + * key in LastEvaluatedKey to apply in a subsequent operation, so that you can + * pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation. For more information, see Query and Scan in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Limit?: number | undefined; + /** + *

Determines the read consistency model: If set to true, then the operation + * uses strongly consistent reads; otherwise, the operation uses eventually consistent + * reads.

+ *

Strongly consistent reads are not supported on global secondary indexes. If you query + * a global secondary index with ConsistentRead set to true, you + * will receive a ValidationException.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

This is a legacy parameter. Use KeyConditionExpression instead. For more + * information, see KeyConditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeyConditions?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see QueryFilter in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + QueryFilter?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Specifies the order for index traversal: If true (default), the traversal + * is performed in ascending order; if false, the traversal is performed in + * descending order.

+ *

Items with the same partition key value are stored in sorted order by sort key. If the + * sort key data type is Number, the results are stored in numeric order. For type String, + * the results are stored in order of UTF-8 bytes. For type Binary, DynamoDB treats each + * byte of the binary data as unsigned.

+ *

If ScanIndexForward is true, DynamoDB returns the results in + * the order in which they are stored (by sort key value). This is the default behavior. If + * ScanIndexForward is false, DynamoDB reads the results in + * reverse order by sort key value, and then returns the results to the client.

+ * @public + */ + ScanIndexForward?: boolean | undefined; + /** + *

The primary key of the first item that this operation will evaluate. Use the value + * that was returned for LastEvaluatedKey in the previous operation.

+ *

The data type for ExclusiveStartKey must be String, Number, or Binary. No + * set data types are allowed.

+ * @public + */ + ExclusiveStartKey?: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

A string that contains conditions that DynamoDB applies after the Query + * operation, but before the data is returned to you. Items that do not satisfy the + * FilterExpression criteria are not returned.

+ *

A FilterExpression does not allow key attributes. You cannot define a + * filter expression based on a partition key or a sort key.

+ * + *

A FilterExpression is applied after the items have already been read; + * the process of filtering does not consume any additional read capacity units.

+ *
+ *

For more information, see Filter + * Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + FilterExpression?: string | undefined; + /** + *

The condition that specifies the key values for items to be retrieved by the + * Query action.

+ *

The condition must perform an equality test on a single partition key value.

+ *

The condition can optionally perform one of several comparison tests on a single sort + * key value. This allows Query to retrieve one item with a given partition + * key value and sort key value, or several items that have the same partition key value + * but different sort key values.

+ *

The partition key equality test is required, and must be specified in the following + * format:

+ *

+ * partitionKeyName + * = + * :partitionkeyval + *

+ *

If you also want to provide a condition for the sort key, it must be combined using + * AND with the condition for the sort key. Following is an example, using + * the = comparison operator for the sort key:

+ *

+ * partitionKeyName + * = + * :partitionkeyval + * AND + * sortKeyName + * = + * :sortkeyval + *

+ *

Valid comparisons for the sort key condition are as follows:

+ *
    + *
  • + *

    + * sortKeyName + * = + * :sortkeyval - true if the sort key value is equal to + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * < + * :sortkeyval - true if the sort key value is less than + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * <= + * :sortkeyval - true if the sort key value is less than or equal to + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * > + * :sortkeyval - true if the sort key value is greater than + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * >= + * :sortkeyval - true if the sort key value is greater than or equal + * to :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * BETWEEN + * :sortkeyval1 + * AND + * :sortkeyval2 - true if the sort key value is greater than or equal + * to :sortkeyval1, and less than or equal to + * :sortkeyval2.

    + *
  • + *
  • + *

    + * begins_with ( + * sortKeyName, :sortkeyval + * ) - true if the sort key value begins with a particular operand. + * (You cannot use this function with a sort key that is of type Number.) Note that + * the function name begins_with is case-sensitive.

    + *
  • + *
+ *

Use the ExpressionAttributeValues parameter to replace tokens such as + * :partitionval and :sortval with actual values at + * runtime.

+ *

You can optionally use the ExpressionAttributeNames parameter to replace + * the names of the partition key and sort key with placeholder tokens. This option might + * be necessary if an attribute name conflicts with a DynamoDB reserved word. For example, + * the following KeyConditionExpression parameter causes an error because + * Size is a reserved word:

+ *
    + *
  • + *

    + * Size = :myval + *

    + *
  • + *
+ *

To work around this, define a placeholder (such a #S) to represent the + * attribute name Size. KeyConditionExpression then is as + * follows:

+ *
    + *
  • + *

    + * #S = :myval + *

    + *
  • + *
+ *

For a list of reserved words, see Reserved Words + * in the Amazon DynamoDB Developer Guide.

+ *

For more information on ExpressionAttributeNames and + * ExpressionAttributeValues, see Using + * Placeholders for Attribute Names and Values in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + KeyConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Specifying Conditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; +} +/** + *

Represents the output of a BatchWriteItem operation.

+ * @public + */ +export interface BatchWriteItemOutput { + /** + *

A map of tables and requests against those tables that were not processed. The + * UnprocessedItems value is in the same form as + * RequestItems, so you can provide this value directly to a subsequent + * BatchWriteItem operation. For more information, see + * RequestItems in the Request Parameters section.

+ *

Each UnprocessedItems entry consists of a table name or table ARN + * and, for that table, a list of operations to perform (DeleteRequest or + * PutRequest).

+ *
    + *
  • + *

    + * DeleteRequest - Perform a DeleteItem operation on the + * specified item. The item to be deleted is identified by a Key + * subelement:

    + *
      + *
    • + *

      + * Key - A map of primary key attribute values that uniquely + * identify the item. Each entry in this map consists of an attribute name + * and an attribute value.

      + *
    • + *
    + *
  • + *
  • + *

    + * PutRequest - Perform a PutItem operation on the + * specified item. The item to be put is identified by an Item + * subelement:

    + *
      + *
    • + *

      + * Item - A map of attributes and their values. Each entry in + * this map consists of an attribute name and an attribute value. Attribute + * values must not be null; string and binary type attributes must have + * lengths greater than zero; and set type attributes must not be empty. + * Requests that contain empty values will be rejected with a + * ValidationException exception.

      + *

      If you specify any attributes that are part of an index key, then the + * data types for those attributes must match those of the schema in the + * table's attribute definition.

      + *
    • + *
    + *
  • + *
+ *

If there are no unprocessed items remaining, the response contains an empty + * UnprocessedItems map.

+ * @public + */ + UnprocessedItems?: Record | undefined; + /** + *

A list of tables that were processed by BatchWriteItem and, for each + * table, information about any item collections that were affected by individual + * DeleteItem or PutItem operations.

+ *

Each entry consists of the following subelements:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, + * expressed in GB. This is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on the table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: Record | undefined; + /** + *

The capacity units consumed by the entire BatchWriteItem + * operation.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * TableName - The table that consumed the provisioned + * throughput.

    + *
  • + *
  • + *

    + * CapacityUnits - The total number of capacity units consumed.

    + *
  • + *
+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the input of an UpdateItem operation.

+ * @public + */ +export interface UpdateItemInput { + /** + *

The name of the table containing the item to update. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The primary key of the item to be updated. Each element consists of an attribute name + * and a value for that attribute.

+ *

For the primary key, you must provide all of the attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use UpdateExpression instead. For more + * information, see AttributeUpdates in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributeUpdates?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appear + * before or after they are successfully updated. For UpdateItem, the valid + * values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - Returns all of the attributes of the item, as they + * appeared before the UpdateItem operation.

    + *
  • + *
  • + *

    + * UPDATED_OLD - Returns only the updated attributes, as they appeared + * before the UpdateItem operation.

    + *
  • + *
  • + *

    + * ALL_NEW - Returns all of the attributes of the item, as they appear + * after the UpdateItem operation.

    + *
  • + *
  • + *

    + * UPDATED_NEW - Returns only the updated attributes, as they appear + * after the UpdateItem operation.

    + *
  • + *
+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ *

The values returned are strongly consistent.

+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

An expression that defines one or more attributes to be updated, the action to be + * performed on them, and new values for them.

+ *

The following action values are available for UpdateExpression.

+ *
    + *
  • + *

    + * SET - Adds one or more attributes and values to an item. If any of + * these attributes already exist, they are replaced by the new values. You can + * also use SET to add or subtract from an attribute that is of type + * Number. For example: SET myNum = myNum + :val + *

    + *

    + * SET supports the following functions:

    + *
      + *
    • + *

      + * if_not_exists (path, operand) - if the item does not + * contain an attribute at the specified path, then + * if_not_exists evaluates to operand; otherwise, it + * evaluates to path. You can use this function to avoid overwriting an + * attribute that may already be present in the item.

      + *
    • + *
    • + *

      + * list_append (operand, operand) - evaluates to a list with a + * new element added to it. You can append the new element to the start or + * the end of the list by reversing the order of the operands.

      + *
    • + *
    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    + * REMOVE - Removes one or more attributes from an item.

    + *
  • + *
  • + *

    + * ADD - Adds the specified value to the item, if the attribute does + * not already exist. If the attribute does exist, then the behavior of + * ADD depends on the data type of the attribute:

    + *
      + *
    • + *

      If the existing attribute is a number, and if Value is + * also a number, then Value is mathematically added to the + * existing attribute. If Value is a negative number, then it + * is subtracted from the existing attribute.

      + * + *

      If you use ADD to increment or decrement a number + * value for an item that doesn't exist before the update, DynamoDB + * uses 0 as the initial value.

      + *

      Similarly, if you use ADD for an existing item to + * increment or decrement an attribute value that doesn't exist before + * the update, DynamoDB uses 0 as the initial value. For + * example, suppose that the item you want to update doesn't have an + * attribute named itemcount, but you decide to + * ADD the number 3 to this attribute + * anyway. DynamoDB will create the itemcount attribute, + * set its initial value to 0, and finally add + * 3 to it. The result will be a new + * itemcount attribute in the item, with a value of + * 3.

      + *
      + *
    • + *
    • + *

      If the existing data type is a set and if Value is also a + * set, then Value is added to the existing set. For example, + * if the attribute value is the set [1,2], and the + * ADD action specified [3], then the final + * attribute value is [1,2,3]. An error occurs if an + * ADD action is specified for a set attribute and the + * attribute type specified does not match the existing set type.

      + *

      Both sets must have the same primitive data type. For example, if the + * existing data type is a set of strings, the Value must also + * be a set of strings.

      + *
    • + *
    + * + *

    The ADD action only supports Number and set data types. In + * addition, ADD can only be used on top-level attributes, not + * nested attributes.

    + *
    + *
  • + *
  • + *

    + * DELETE - Deletes an element from a set.

    + *

    If a set of values is specified, then those values are subtracted from the old + * set. For example, if the attribute value was the set [a,b,c] and + * the DELETE action specifies [a,c], then the final + * attribute value is [b]. Specifying an empty set is an error.

    + * + *

    The DELETE action only supports set data types. In addition, + * DELETE can only be used on top-level attributes, not nested + * attributes.

    + *
    + *
  • + *
+ *

You can have many actions in a single expression, such as the following: SET + * a=:value1, b=:value2 DELETE :value3, :value4, :value5 + *

+ *

For more information on update expressions, see Modifying + * Items and Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + UpdateExpression?: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information about condition expressions, see Specifying Conditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide.) To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information about expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for an UpdateItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

A list of requests that can perform update, put, delete, or check operations on + * multiple items in one or more tables atomically.

+ * @public + */ +export interface TransactWriteItem { + /** + *

A request to perform a check item operation.

+ * @public + */ + ConditionCheck?: ConditionCheck | undefined; + /** + *

A request to perform a PutItem operation.

+ * @public + */ + Put?: Put | undefined; + /** + *

A request to perform a DeleteItem operation.

+ * @public + */ + Delete?: Delete | undefined; + /** + *

A request to perform an UpdateItem operation.

+ * @public + */ + Update?: Update | undefined; +} +/** + * @public + */ +export interface TransactWriteItemsInput { + /** + *

An ordered array of up to 100 TransactWriteItem objects, each of which + * contains a ConditionCheck, Put, Update, or + * Delete object. These can operate on items in different tables, but the + * tables must reside in the same Amazon Web Services account and Region, and no two of them + * can operate on the same item.

+ * @public + */ + TransactItems: TransactWriteItem[] | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections (if any), that were modified + * during the operation and are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

Providing a ClientRequestToken makes the call to + * TransactWriteItems idempotent, meaning that multiple identical calls + * have the same effect as one single call.

+ *

Although multiple identical calls using the same client request token produce the same + * result on the server (no side effects), the responses to the calls might not be the + * same. If the ReturnConsumedCapacity parameter is set, then the initial + * TransactWriteItems call returns the amount of write capacity units + * consumed in making the changes. Subsequent TransactWriteItems calls with + * the same client token return the number of read capacity units consumed in reading the + * item.

+ *

A client request token is valid for 10 minutes after the first request that uses it is + * completed. After 10 minutes, any request with the same client token is treated as a new + * request. Do not resubmit the same request with the same client token for more than 10 + * minutes, or the result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 10-minute idempotency window, DynamoDB returns an + * IdempotentParameterMismatch exception.

+ * @public + */ + ClientRequestToken?: string | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..b27919e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * @public + */ +export interface DynamoDBPaginationConfiguration extends PaginationConfiguration { + client: DynamoDBClient; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts new file mode 100644 index 0000000..2ca65b1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "../commands/ListContributorInsightsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListContributorInsights: (config: DynamoDBPaginationConfiguration, input: ListContributorInsightsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts new file mode 100644 index 0000000..304892a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "../commands/ListExportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListExports: (config: DynamoDBPaginationConfiguration, input: ListExportsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts new file mode 100644 index 0000000..0a2639c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "../commands/ListImportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListImports: (config: DynamoDBPaginationConfiguration, input: ListImportsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts new file mode 100644 index 0000000..38cff29 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "../commands/ListTablesCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListTables: (config: DynamoDBPaginationConfiguration, input: ListTablesCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..d6e9c31 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateQuery: (config: DynamoDBPaginationConfiguration, input: QueryCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..4902f31 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateScan: (config: DynamoDBPaginationConfiguration, input: ScanCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts new file mode 100644 index 0000000..963dc98 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts @@ -0,0 +1,515 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "../commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "../commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "../commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "../commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "../commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "../commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "../commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "../commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "../commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "../commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "../commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "../commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "../commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "../commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "../commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "../commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "../commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "../commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "../commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "../commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "../commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "../commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "../commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "../commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "../commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "../commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "../commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "../commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "../commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "../commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "../commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "../commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "../commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "../commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "../commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "../commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "../commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "../commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "../commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "../commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "../commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "../commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "../commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "../commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "../commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "../commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "../commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "../commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "../commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "../commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "../commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "../commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "../commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "../commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "../commands/UpdateTimeToLiveCommand"; +/** + * serializeAws_json1_0BatchExecuteStatementCommand + */ +export declare const se_BatchExecuteStatementCommand: (input: BatchExecuteStatementCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0BatchGetItemCommand + */ +export declare const se_BatchGetItemCommand: (input: BatchGetItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0BatchWriteItemCommand + */ +export declare const se_BatchWriteItemCommand: (input: BatchWriteItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateBackupCommand + */ +export declare const se_CreateBackupCommand: (input: CreateBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateGlobalTableCommand + */ +export declare const se_CreateGlobalTableCommand: (input: CreateGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateTableCommand + */ +export declare const se_CreateTableCommand: (input: CreateTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteBackupCommand + */ +export declare const se_DeleteBackupCommand: (input: DeleteBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteItemCommand + */ +export declare const se_DeleteItemCommand: (input: DeleteItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteResourcePolicyCommand + */ +export declare const se_DeleteResourcePolicyCommand: (input: DeleteResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteTableCommand + */ +export declare const se_DeleteTableCommand: (input: DeleteTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeBackupCommand + */ +export declare const se_DescribeBackupCommand: (input: DescribeBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeContinuousBackupsCommand + */ +export declare const se_DescribeContinuousBackupsCommand: (input: DescribeContinuousBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeContributorInsightsCommand + */ +export declare const se_DescribeContributorInsightsCommand: (input: DescribeContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeEndpointsCommand + */ +export declare const se_DescribeEndpointsCommand: (input: DescribeEndpointsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeExportCommand + */ +export declare const se_DescribeExportCommand: (input: DescribeExportCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeGlobalTableCommand + */ +export declare const se_DescribeGlobalTableCommand: (input: DescribeGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeGlobalTableSettingsCommand + */ +export declare const se_DescribeGlobalTableSettingsCommand: (input: DescribeGlobalTableSettingsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeImportCommand + */ +export declare const se_DescribeImportCommand: (input: DescribeImportCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeKinesisStreamingDestinationCommand + */ +export declare const se_DescribeKinesisStreamingDestinationCommand: (input: DescribeKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeLimitsCommand + */ +export declare const se_DescribeLimitsCommand: (input: DescribeLimitsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTableCommand + */ +export declare const se_DescribeTableCommand: (input: DescribeTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTableReplicaAutoScalingCommand + */ +export declare const se_DescribeTableReplicaAutoScalingCommand: (input: DescribeTableReplicaAutoScalingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTimeToLiveCommand + */ +export declare const se_DescribeTimeToLiveCommand: (input: DescribeTimeToLiveCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DisableKinesisStreamingDestinationCommand + */ +export declare const se_DisableKinesisStreamingDestinationCommand: (input: DisableKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0EnableKinesisStreamingDestinationCommand + */ +export declare const se_EnableKinesisStreamingDestinationCommand: (input: EnableKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExecuteStatementCommand + */ +export declare const se_ExecuteStatementCommand: (input: ExecuteStatementCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExecuteTransactionCommand + */ +export declare const se_ExecuteTransactionCommand: (input: ExecuteTransactionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExportTableToPointInTimeCommand + */ +export declare const se_ExportTableToPointInTimeCommand: (input: ExportTableToPointInTimeCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0GetItemCommand + */ +export declare const se_GetItemCommand: (input: GetItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0GetResourcePolicyCommand + */ +export declare const se_GetResourcePolicyCommand: (input: GetResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ImportTableCommand + */ +export declare const se_ImportTableCommand: (input: ImportTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListBackupsCommand + */ +export declare const se_ListBackupsCommand: (input: ListBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListContributorInsightsCommand + */ +export declare const se_ListContributorInsightsCommand: (input: ListContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListExportsCommand + */ +export declare const se_ListExportsCommand: (input: ListExportsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListGlobalTablesCommand + */ +export declare const se_ListGlobalTablesCommand: (input: ListGlobalTablesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListImportsCommand + */ +export declare const se_ListImportsCommand: (input: ListImportsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListTablesCommand + */ +export declare const se_ListTablesCommand: (input: ListTablesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListTagsOfResourceCommand + */ +export declare const se_ListTagsOfResourceCommand: (input: ListTagsOfResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0PutItemCommand + */ +export declare const se_PutItemCommand: (input: PutItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0PutResourcePolicyCommand + */ +export declare const se_PutResourcePolicyCommand: (input: PutResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0QueryCommand + */ +export declare const se_QueryCommand: (input: QueryCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0RestoreTableFromBackupCommand + */ +export declare const se_RestoreTableFromBackupCommand: (input: RestoreTableFromBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0RestoreTableToPointInTimeCommand + */ +export declare const se_RestoreTableToPointInTimeCommand: (input: RestoreTableToPointInTimeCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ScanCommand + */ +export declare const se_ScanCommand: (input: ScanCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TagResourceCommand + */ +export declare const se_TagResourceCommand: (input: TagResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TransactGetItemsCommand + */ +export declare const se_TransactGetItemsCommand: (input: TransactGetItemsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TransactWriteItemsCommand + */ +export declare const se_TransactWriteItemsCommand: (input: TransactWriteItemsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UntagResourceCommand + */ +export declare const se_UntagResourceCommand: (input: UntagResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateContinuousBackupsCommand + */ +export declare const se_UpdateContinuousBackupsCommand: (input: UpdateContinuousBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateContributorInsightsCommand + */ +export declare const se_UpdateContributorInsightsCommand: (input: UpdateContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateGlobalTableCommand + */ +export declare const se_UpdateGlobalTableCommand: (input: UpdateGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateGlobalTableSettingsCommand + */ +export declare const se_UpdateGlobalTableSettingsCommand: (input: UpdateGlobalTableSettingsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateItemCommand + */ +export declare const se_UpdateItemCommand: (input: UpdateItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateKinesisStreamingDestinationCommand + */ +export declare const se_UpdateKinesisStreamingDestinationCommand: (input: UpdateKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTableCommand + */ +export declare const se_UpdateTableCommand: (input: UpdateTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTableReplicaAutoScalingCommand + */ +export declare const se_UpdateTableReplicaAutoScalingCommand: (input: UpdateTableReplicaAutoScalingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTimeToLiveCommand + */ +export declare const se_UpdateTimeToLiveCommand: (input: UpdateTimeToLiveCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_json1_0BatchExecuteStatementCommand + */ +export declare const de_BatchExecuteStatementCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0BatchGetItemCommand + */ +export declare const de_BatchGetItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0BatchWriteItemCommand + */ +export declare const de_BatchWriteItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateBackupCommand + */ +export declare const de_CreateBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateGlobalTableCommand + */ +export declare const de_CreateGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateTableCommand + */ +export declare const de_CreateTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteBackupCommand + */ +export declare const de_DeleteBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteItemCommand + */ +export declare const de_DeleteItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteResourcePolicyCommand + */ +export declare const de_DeleteResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteTableCommand + */ +export declare const de_DeleteTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeBackupCommand + */ +export declare const de_DescribeBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeContinuousBackupsCommand + */ +export declare const de_DescribeContinuousBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeContributorInsightsCommand + */ +export declare const de_DescribeContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeEndpointsCommand + */ +export declare const de_DescribeEndpointsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeExportCommand + */ +export declare const de_DescribeExportCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeGlobalTableCommand + */ +export declare const de_DescribeGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeGlobalTableSettingsCommand + */ +export declare const de_DescribeGlobalTableSettingsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeImportCommand + */ +export declare const de_DescribeImportCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeKinesisStreamingDestinationCommand + */ +export declare const de_DescribeKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeLimitsCommand + */ +export declare const de_DescribeLimitsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTableCommand + */ +export declare const de_DescribeTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTableReplicaAutoScalingCommand + */ +export declare const de_DescribeTableReplicaAutoScalingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTimeToLiveCommand + */ +export declare const de_DescribeTimeToLiveCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DisableKinesisStreamingDestinationCommand + */ +export declare const de_DisableKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0EnableKinesisStreamingDestinationCommand + */ +export declare const de_EnableKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExecuteStatementCommand + */ +export declare const de_ExecuteStatementCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExecuteTransactionCommand + */ +export declare const de_ExecuteTransactionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExportTableToPointInTimeCommand + */ +export declare const de_ExportTableToPointInTimeCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0GetItemCommand + */ +export declare const de_GetItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0GetResourcePolicyCommand + */ +export declare const de_GetResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ImportTableCommand + */ +export declare const de_ImportTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListBackupsCommand + */ +export declare const de_ListBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListContributorInsightsCommand + */ +export declare const de_ListContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListExportsCommand + */ +export declare const de_ListExportsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListGlobalTablesCommand + */ +export declare const de_ListGlobalTablesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListImportsCommand + */ +export declare const de_ListImportsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListTablesCommand + */ +export declare const de_ListTablesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListTagsOfResourceCommand + */ +export declare const de_ListTagsOfResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0PutItemCommand + */ +export declare const de_PutItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0PutResourcePolicyCommand + */ +export declare const de_PutResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0QueryCommand + */ +export declare const de_QueryCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0RestoreTableFromBackupCommand + */ +export declare const de_RestoreTableFromBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0RestoreTableToPointInTimeCommand + */ +export declare const de_RestoreTableToPointInTimeCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ScanCommand + */ +export declare const de_ScanCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TagResourceCommand + */ +export declare const de_TagResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TransactGetItemsCommand + */ +export declare const de_TransactGetItemsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TransactWriteItemsCommand + */ +export declare const de_TransactWriteItemsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UntagResourceCommand + */ +export declare const de_UntagResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateContinuousBackupsCommand + */ +export declare const de_UpdateContinuousBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateContributorInsightsCommand + */ +export declare const de_UpdateContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateGlobalTableCommand + */ +export declare const de_UpdateGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateGlobalTableSettingsCommand + */ +export declare const de_UpdateGlobalTableSettingsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateItemCommand + */ +export declare const de_UpdateItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateKinesisStreamingDestinationCommand + */ +export declare const de_UpdateKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTableCommand + */ +export declare const de_UpdateTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTableReplicaAutoScalingCommand + */ +export declare const de_UpdateTableReplicaAutoScalingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTimeToLiveCommand + */ +export declare const de_UpdateTimeToLiveCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..e8b4a74 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts @@ -0,0 +1,55 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + accountIdEndpointMode: "disabled" | "preferred" | "required" | (() => Promise); + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts new file mode 100644 index 0000000..01479fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts @@ -0,0 +1,55 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + accountIdEndpointMode: "disabled" | "preferred" | "required" | import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((init?: import("@aws-sdk/credential-provider-node").DefaultProviderInit | undefined) => import("@smithy/types").MemoizedProvider); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts new file mode 100644 index 0000000..0288659 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts @@ -0,0 +1,54 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + accountIdEndpointMode: "disabled" | "preferred" | "required" | (() => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..36f4e1e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts @@ -0,0 +1,21 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts new file mode 100644 index 0000000..ac1a4bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: DynamoDBExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts new file mode 100644 index 0000000..cf606cb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts @@ -0,0 +1,1000 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "./commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "./commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "./commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "./commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "./commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "./commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "./commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "./commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "./commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "./commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "./commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "./commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "./commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "./commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "./commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "./commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "./commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "./commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "./commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "./commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "./commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "./commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "./commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "./commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "./commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "./commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "./commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "./commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "./commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "./commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "./commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "./commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "./commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "./commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "./commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "./commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "./commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "./commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "./commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "./commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "./commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "./commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "./commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "./commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "./commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +export interface DynamoDB { + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchGetItem( + args: BatchGetItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchGetItem( + args: BatchGetItemCommandInput, + cb: (err: any, data?: BatchGetItemCommandOutput) => void + ): void; + batchGetItem( + args: BatchGetItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchGetItemCommandOutput) => void + ): void; + batchWriteItem( + args: BatchWriteItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchWriteItem( + args: BatchWriteItemCommandInput, + cb: (err: any, data?: BatchWriteItemCommandOutput) => void + ): void; + batchWriteItem( + args: BatchWriteItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchWriteItemCommandOutput) => void + ): void; + createBackup( + args: CreateBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createBackup( + args: CreateBackupCommandInput, + cb: (err: any, data?: CreateBackupCommandOutput) => void + ): void; + createBackup( + args: CreateBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateBackupCommandOutput) => void + ): void; + createGlobalTable( + args: CreateGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createGlobalTable( + args: CreateGlobalTableCommandInput, + cb: (err: any, data?: CreateGlobalTableCommandOutput) => void + ): void; + createGlobalTable( + args: CreateGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateGlobalTableCommandOutput) => void + ): void; + createTable( + args: CreateTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createTable( + args: CreateTableCommandInput, + cb: (err: any, data?: CreateTableCommandOutput) => void + ): void; + createTable( + args: CreateTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateTableCommandOutput) => void + ): void; + deleteBackup( + args: DeleteBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBackup( + args: DeleteBackupCommandInput, + cb: (err: any, data?: DeleteBackupCommandOutput) => void + ): void; + deleteBackup( + args: DeleteBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBackupCommandOutput) => void + ): void; + deleteItem( + args: DeleteItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteItem( + args: DeleteItemCommandInput, + cb: (err: any, data?: DeleteItemCommandOutput) => void + ): void; + deleteItem( + args: DeleteItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteItemCommandOutput) => void + ): void; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void + ): void; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void + ): void; + deleteTable( + args: DeleteTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteTable( + args: DeleteTableCommandInput, + cb: (err: any, data?: DeleteTableCommandOutput) => void + ): void; + deleteTable( + args: DeleteTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteTableCommandOutput) => void + ): void; + describeBackup( + args: DescribeBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeBackup( + args: DescribeBackupCommandInput, + cb: (err: any, data?: DescribeBackupCommandOutput) => void + ): void; + describeBackup( + args: DescribeBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeBackupCommandOutput) => void + ): void; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void + ): void; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void + ): void; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void + ): void; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void + ): void; + describeEndpoints(): Promise; + describeEndpoints( + args: DescribeEndpointsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeEndpoints( + args: DescribeEndpointsCommandInput, + cb: (err: any, data?: DescribeEndpointsCommandOutput) => void + ): void; + describeEndpoints( + args: DescribeEndpointsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeEndpointsCommandOutput) => void + ): void; + describeExport( + args: DescribeExportCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeExport( + args: DescribeExportCommandInput, + cb: (err: any, data?: DescribeExportCommandOutput) => void + ): void; + describeExport( + args: DescribeExportCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeExportCommandOutput) => void + ): void; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void + ): void; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void + ): void; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void + ): void; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void + ): void; + describeImport( + args: DescribeImportCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeImport( + args: DescribeImportCommandInput, + cb: (err: any, data?: DescribeImportCommandOutput) => void + ): void; + describeImport( + args: DescribeImportCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeImportCommandOutput) => void + ): void; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: DescribeKinesisStreamingDestinationCommandOutput + ) => void + ): void; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DescribeKinesisStreamingDestinationCommandOutput + ) => void + ): void; + describeLimits(): Promise; + describeLimits( + args: DescribeLimitsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeLimits( + args: DescribeLimitsCommandInput, + cb: (err: any, data?: DescribeLimitsCommandOutput) => void + ): void; + describeLimits( + args: DescribeLimitsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeLimitsCommandOutput) => void + ): void; + describeTable( + args: DescribeTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTable( + args: DescribeTableCommandInput, + cb: (err: any, data?: DescribeTableCommandOutput) => void + ): void; + describeTable( + args: DescribeTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTableCommandOutput) => void + ): void; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void + ): void; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void + ): void; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void + ): void; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void + ): void; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: DisableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DisableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: EnableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: EnableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeStatement( + args: ExecuteStatementCommandInput, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeTransaction( + args: ExecuteTransactionCommandInput, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + options?: __HttpHandlerOptions + ): Promise; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void + ): void; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void + ): void; + getItem( + args: GetItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getItem( + args: GetItemCommandInput, + cb: (err: any, data?: GetItemCommandOutput) => void + ): void; + getItem( + args: GetItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetItemCommandOutput) => void + ): void; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + cb: (err: any, data?: GetResourcePolicyCommandOutput) => void + ): void; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetResourcePolicyCommandOutput) => void + ): void; + importTable( + args: ImportTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + importTable( + args: ImportTableCommandInput, + cb: (err: any, data?: ImportTableCommandOutput) => void + ): void; + importTable( + args: ImportTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ImportTableCommandOutput) => void + ): void; + listBackups(): Promise; + listBackups( + args: ListBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listBackups( + args: ListBackupsCommandInput, + cb: (err: any, data?: ListBackupsCommandOutput) => void + ): void; + listBackups( + args: ListBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListBackupsCommandOutput) => void + ): void; + listContributorInsights(): Promise; + listContributorInsights( + args: ListContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listContributorInsights( + args: ListContributorInsightsCommandInput, + cb: (err: any, data?: ListContributorInsightsCommandOutput) => void + ): void; + listContributorInsights( + args: ListContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListContributorInsightsCommandOutput) => void + ): void; + listExports(): Promise; + listExports( + args: ListExportsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listExports( + args: ListExportsCommandInput, + cb: (err: any, data?: ListExportsCommandOutput) => void + ): void; + listExports( + args: ListExportsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListExportsCommandOutput) => void + ): void; + listGlobalTables(): Promise; + listGlobalTables( + args: ListGlobalTablesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listGlobalTables( + args: ListGlobalTablesCommandInput, + cb: (err: any, data?: ListGlobalTablesCommandOutput) => void + ): void; + listGlobalTables( + args: ListGlobalTablesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListGlobalTablesCommandOutput) => void + ): void; + listImports(): Promise; + listImports( + args: ListImportsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listImports( + args: ListImportsCommandInput, + cb: (err: any, data?: ListImportsCommandOutput) => void + ): void; + listImports( + args: ListImportsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListImportsCommandOutput) => void + ): void; + listTables(): Promise; + listTables( + args: ListTablesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listTables( + args: ListTablesCommandInput, + cb: (err: any, data?: ListTablesCommandOutput) => void + ): void; + listTables( + args: ListTablesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTablesCommandOutput) => void + ): void; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void + ): void; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void + ): void; + putItem( + args: PutItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putItem( + args: PutItemCommandInput, + cb: (err: any, data?: PutItemCommandOutput) => void + ): void; + putItem( + args: PutItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutItemCommandOutput) => void + ): void; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + cb: (err: any, data?: PutResourcePolicyCommandOutput) => void + ): void; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutResourcePolicyCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options?: __HttpHandlerOptions + ): Promise; + query( + args: QueryCommandInput, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void + ): void; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void + ): void; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + options?: __HttpHandlerOptions + ): Promise; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void + ): void; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options?: __HttpHandlerOptions + ): Promise; + scan( + args: ScanCommandInput, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + tagResource( + args: TagResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + tagResource( + args: TagResourceCommandInput, + cb: (err: any, data?: TagResourceCommandOutput) => void + ): void; + tagResource( + args: TagResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TagResourceCommandOutput) => void + ): void; + transactGetItems( + args: TransactGetItemsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactGetItems( + args: TransactGetItemsCommandInput, + cb: (err: any, data?: TransactGetItemsCommandOutput) => void + ): void; + transactGetItems( + args: TransactGetItemsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactGetItemsCommandOutput) => void + ): void; + transactWriteItems( + args: TransactWriteItemsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactWriteItems( + args: TransactWriteItemsCommandInput, + cb: (err: any, data?: TransactWriteItemsCommandOutput) => void + ): void; + transactWriteItems( + args: TransactWriteItemsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactWriteItemsCommandOutput) => void + ): void; + untagResource( + args: UntagResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + untagResource( + args: UntagResourceCommandInput, + cb: (err: any, data?: UntagResourceCommandOutput) => void + ): void; + untagResource( + args: UntagResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UntagResourceCommandOutput) => void + ): void; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void + ): void; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void + ): void; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void + ): void; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void + ): void; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void + ): void; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void + ): void; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void + ): void; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void + ): void; + updateItem( + args: UpdateItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateItem( + args: UpdateItemCommandInput, + cb: (err: any, data?: UpdateItemCommandOutput) => void + ): void; + updateItem( + args: UpdateItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateItemCommandOutput) => void + ): void; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: UpdateKinesisStreamingDestinationCommandOutput + ) => void + ): void; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: UpdateKinesisStreamingDestinationCommandOutput + ) => void + ): void; + updateTable( + args: UpdateTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTable( + args: UpdateTableCommandInput, + cb: (err: any, data?: UpdateTableCommandOutput) => void + ): void; + updateTable( + args: UpdateTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTableCommandOutput) => void + ): void; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void + ): void; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void + ): void; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void + ): void; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void + ): void; +} +export declare class DynamoDB extends DynamoDBClient implements DynamoDB {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts new file mode 100644 index 0000000..87aca7c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts @@ -0,0 +1,472 @@ +import { + AccountIdEndpointMode, + AccountIdEndpointModeInputConfig, + AccountIdEndpointModeResolvedConfig, +} from "@aws-sdk/core/account-id-endpoint"; +import { + EndpointDiscoveryInputConfig, + EndpointDiscoveryResolvedConfig, +} from "@aws-sdk/middleware-endpoint-discovery"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + AwsCredentialIdentityProvider, + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "./commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "./commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "./commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "./commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "./commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "./commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "./commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "./commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "./commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "./commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "./commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "./commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "./commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "./commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "./commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "./commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "./commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "./commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "./commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "./commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "./commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "./commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "./commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "./commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "./commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "./commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "./commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "./commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "./commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "./commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "./commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "./commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "./commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "./commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "./commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "./commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "./commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "./commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "./commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "./commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "./commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "./commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "./commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "./commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "./commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "./commands/UpdateTimeToLiveCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | BatchExecuteStatementCommandInput + | BatchGetItemCommandInput + | BatchWriteItemCommandInput + | CreateBackupCommandInput + | CreateGlobalTableCommandInput + | CreateTableCommandInput + | DeleteBackupCommandInput + | DeleteItemCommandInput + | DeleteResourcePolicyCommandInput + | DeleteTableCommandInput + | DescribeBackupCommandInput + | DescribeContinuousBackupsCommandInput + | DescribeContributorInsightsCommandInput + | DescribeEndpointsCommandInput + | DescribeExportCommandInput + | DescribeGlobalTableCommandInput + | DescribeGlobalTableSettingsCommandInput + | DescribeImportCommandInput + | DescribeKinesisStreamingDestinationCommandInput + | DescribeLimitsCommandInput + | DescribeTableCommandInput + | DescribeTableReplicaAutoScalingCommandInput + | DescribeTimeToLiveCommandInput + | DisableKinesisStreamingDestinationCommandInput + | EnableKinesisStreamingDestinationCommandInput + | ExecuteStatementCommandInput + | ExecuteTransactionCommandInput + | ExportTableToPointInTimeCommandInput + | GetItemCommandInput + | GetResourcePolicyCommandInput + | ImportTableCommandInput + | ListBackupsCommandInput + | ListContributorInsightsCommandInput + | ListExportsCommandInput + | ListGlobalTablesCommandInput + | ListImportsCommandInput + | ListTablesCommandInput + | ListTagsOfResourceCommandInput + | PutItemCommandInput + | PutResourcePolicyCommandInput + | QueryCommandInput + | RestoreTableFromBackupCommandInput + | RestoreTableToPointInTimeCommandInput + | ScanCommandInput + | TagResourceCommandInput + | TransactGetItemsCommandInput + | TransactWriteItemsCommandInput + | UntagResourceCommandInput + | UpdateContinuousBackupsCommandInput + | UpdateContributorInsightsCommandInput + | UpdateGlobalTableCommandInput + | UpdateGlobalTableSettingsCommandInput + | UpdateItemCommandInput + | UpdateKinesisStreamingDestinationCommandInput + | UpdateTableCommandInput + | UpdateTableReplicaAutoScalingCommandInput + | UpdateTimeToLiveCommandInput; +export type ServiceOutputTypes = + | BatchExecuteStatementCommandOutput + | BatchGetItemCommandOutput + | BatchWriteItemCommandOutput + | CreateBackupCommandOutput + | CreateGlobalTableCommandOutput + | CreateTableCommandOutput + | DeleteBackupCommandOutput + | DeleteItemCommandOutput + | DeleteResourcePolicyCommandOutput + | DeleteTableCommandOutput + | DescribeBackupCommandOutput + | DescribeContinuousBackupsCommandOutput + | DescribeContributorInsightsCommandOutput + | DescribeEndpointsCommandOutput + | DescribeExportCommandOutput + | DescribeGlobalTableCommandOutput + | DescribeGlobalTableSettingsCommandOutput + | DescribeImportCommandOutput + | DescribeKinesisStreamingDestinationCommandOutput + | DescribeLimitsCommandOutput + | DescribeTableCommandOutput + | DescribeTableReplicaAutoScalingCommandOutput + | DescribeTimeToLiveCommandOutput + | DisableKinesisStreamingDestinationCommandOutput + | EnableKinesisStreamingDestinationCommandOutput + | ExecuteStatementCommandOutput + | ExecuteTransactionCommandOutput + | ExportTableToPointInTimeCommandOutput + | GetItemCommandOutput + | GetResourcePolicyCommandOutput + | ImportTableCommandOutput + | ListBackupsCommandOutput + | ListContributorInsightsCommandOutput + | ListExportsCommandOutput + | ListGlobalTablesCommandOutput + | ListImportsCommandOutput + | ListTablesCommandOutput + | ListTagsOfResourceCommandOutput + | PutItemCommandOutput + | PutResourcePolicyCommandOutput + | QueryCommandOutput + | RestoreTableFromBackupCommandOutput + | RestoreTableToPointInTimeCommandOutput + | ScanCommandOutput + | TagResourceCommandOutput + | TransactGetItemsCommandOutput + | TransactWriteItemsCommandOutput + | UntagResourceCommandOutput + | UpdateContinuousBackupsCommandOutput + | UpdateContributorInsightsCommandOutput + | UpdateGlobalTableCommandOutput + | UpdateGlobalTableSettingsCommandOutput + | UpdateItemCommandOutput + | UpdateKinesisStreamingDestinationCommandOutput + | UpdateTableCommandOutput + | UpdateTableReplicaAutoScalingCommandOutput + | UpdateTimeToLiveCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + accountIdEndpointMode?: + | AccountIdEndpointMode + | __Provider; + defaultUserAgentProvider?: Provider<__UserAgent>; + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; + endpointDiscoveryEnabledProvider?: __Provider; +} +export type DynamoDBClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + AccountIdEndpointModeInputConfig & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + EndpointDiscoveryInputConfig & + ClientInputEndpointParameters; +export interface DynamoDBClientConfig extends DynamoDBClientConfigType {} +export type DynamoDBClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + AccountIdEndpointModeResolvedConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + EndpointDiscoveryResolvedConfig & + ClientResolvedEndpointParameters; +export interface DynamoDBClientResolvedConfig + extends DynamoDBClientResolvedConfigType {} +export declare class DynamoDBClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + DynamoDBClientResolvedConfig +> { + readonly config: DynamoDBClientResolvedConfig; + constructor( + ...[configuration]: __CheckOptionalClientConfig + ); + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..236dccc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { DynamoDBHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): DynamoDBHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..299733c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,47 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { DynamoDBClientResolvedConfig } from "../DynamoDBClient"; +export interface DynamoDBHttpAuthSchemeParameters + extends HttpAuthSchemeParameters { + region?: string; +} +export interface DynamoDBHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + DynamoDBClientResolvedConfig, + HandlerExecutionContext, + DynamoDBHttpAuthSchemeParameters, + object + > {} +export declare const defaultDynamoDBHttpAuthSchemeParametersProvider: ( + config: DynamoDBClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface DynamoDBHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultDynamoDBHttpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: DynamoDBHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..f7c8eb9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + BatchExecuteStatementInput, + BatchExecuteStatementOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchExecuteStatementCommandInput + extends BatchExecuteStatementInput {} +export interface BatchExecuteStatementCommandOutput + extends BatchExecuteStatementOutput, + __MetadataBearer {} +declare const BatchExecuteStatementCommand_base: { + new ( + input: BatchExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchExecuteStatementCommand extends BatchExecuteStatementCommand_base { + protected static __types: { + api: { + input: BatchExecuteStatementInput; + output: BatchExecuteStatementOutput; + }; + sdk: { + input: BatchExecuteStatementCommandInput; + output: BatchExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts new file mode 100644 index 0000000..7e11a34 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { BatchGetItemInput, BatchGetItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchGetItemCommandInput extends BatchGetItemInput {} +export interface BatchGetItemCommandOutput + extends BatchGetItemOutput, + __MetadataBearer {} +declare const BatchGetItemCommand_base: { + new ( + input: BatchGetItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchGetItemCommandInput, + BatchGetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchGetItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchGetItemCommandInput, + BatchGetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchGetItemCommand extends BatchGetItemCommand_base { + protected static __types: { + api: { + input: BatchGetItemInput; + output: BatchGetItemOutput; + }; + sdk: { + input: BatchGetItemCommandInput; + output: BatchGetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts new file mode 100644 index 0000000..0542d2f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { BatchWriteItemInput, BatchWriteItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchWriteItemCommandInput extends BatchWriteItemInput {} +export interface BatchWriteItemCommandOutput + extends BatchWriteItemOutput, + __MetadataBearer {} +declare const BatchWriteItemCommand_base: { + new ( + input: BatchWriteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchWriteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchWriteItemCommand extends BatchWriteItemCommand_base { + protected static __types: { + api: { + input: BatchWriteItemInput; + output: BatchWriteItemOutput; + }; + sdk: { + input: BatchWriteItemCommandInput; + output: BatchWriteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts new file mode 100644 index 0000000..6692e00 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { CreateBackupInput, CreateBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateBackupCommandInput extends CreateBackupInput {} +export interface CreateBackupCommandOutput + extends CreateBackupOutput, + __MetadataBearer {} +declare const CreateBackupCommand_base: { + new ( + input: CreateBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBackupCommandInput, + CreateBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBackupCommandInput, + CreateBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateBackupCommand extends CreateBackupCommand_base { + protected static __types: { + api: { + input: CreateBackupInput; + output: CreateBackupOutput; + }; + sdk: { + input: CreateBackupCommandInput; + output: CreateBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts new file mode 100644 index 0000000..65564e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + CreateGlobalTableInput, + CreateGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateGlobalTableCommandInput extends CreateGlobalTableInput {} +export interface CreateGlobalTableCommandOutput + extends CreateGlobalTableOutput, + __MetadataBearer {} +declare const CreateGlobalTableCommand_base: { + new ( + input: CreateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateGlobalTableCommand extends CreateGlobalTableCommand_base { + protected static __types: { + api: { + input: CreateGlobalTableInput; + output: CreateGlobalTableOutput; + }; + sdk: { + input: CreateGlobalTableCommandInput; + output: CreateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts new file mode 100644 index 0000000..5761cdb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { CreateTableInput, CreateTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateTableCommandInput extends CreateTableInput {} +export interface CreateTableCommandOutput + extends CreateTableOutput, + __MetadataBearer {} +declare const CreateTableCommand_base: { + new ( + input: CreateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTableCommandInput, + CreateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTableCommandInput, + CreateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateTableCommand extends CreateTableCommand_base { + protected static __types: { + api: { + input: CreateTableInput; + output: CreateTableOutput; + }; + sdk: { + input: CreateTableCommandInput; + output: CreateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts new file mode 100644 index 0000000..0b19c93 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteBackupInput, DeleteBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBackupCommandInput extends DeleteBackupInput {} +export interface DeleteBackupCommandOutput + extends DeleteBackupOutput, + __MetadataBearer {} +declare const DeleteBackupCommand_base: { + new ( + input: DeleteBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBackupCommandInput, + DeleteBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBackupCommandInput, + DeleteBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBackupCommand extends DeleteBackupCommand_base { + protected static __types: { + api: { + input: DeleteBackupInput; + output: DeleteBackupOutput; + }; + sdk: { + input: DeleteBackupCommandInput; + output: DeleteBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts new file mode 100644 index 0000000..de7976c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteItemInput, DeleteItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteItemCommandInput extends DeleteItemInput {} +export interface DeleteItemCommandOutput + extends DeleteItemOutput, + __MetadataBearer {} +declare const DeleteItemCommand_base: { + new ( + input: DeleteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteItemCommandInput, + DeleteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteItemCommandInput, + DeleteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteItemCommand extends DeleteItemCommand_base { + protected static __types: { + api: { + input: DeleteItemInput; + output: DeleteItemOutput; + }; + sdk: { + input: DeleteItemCommandInput; + output: DeleteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts new file mode 100644 index 0000000..4aad4b7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DeleteResourcePolicyInput, + DeleteResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteResourcePolicyCommandInput + extends DeleteResourcePolicyInput {} +export interface DeleteResourcePolicyCommandOutput + extends DeleteResourcePolicyOutput, + __MetadataBearer {} +declare const DeleteResourcePolicyCommand_base: { + new ( + input: DeleteResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteResourcePolicyCommand extends DeleteResourcePolicyCommand_base { + protected static __types: { + api: { + input: DeleteResourcePolicyInput; + output: DeleteResourcePolicyOutput; + }; + sdk: { + input: DeleteResourcePolicyCommandInput; + output: DeleteResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts new file mode 100644 index 0000000..5dc9c2b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteTableInput, DeleteTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteTableCommandInput extends DeleteTableInput {} +export interface DeleteTableCommandOutput + extends DeleteTableOutput, + __MetadataBearer {} +declare const DeleteTableCommand_base: { + new ( + input: DeleteTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteTableCommandInput, + DeleteTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteTableCommandInput, + DeleteTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteTableCommand extends DeleteTableCommand_base { + protected static __types: { + api: { + input: DeleteTableInput; + output: DeleteTableOutput; + }; + sdk: { + input: DeleteTableCommandInput; + output: DeleteTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts new file mode 100644 index 0000000..e8a3f6f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeBackupInput, DescribeBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeBackupCommandInput extends DescribeBackupInput {} +export interface DescribeBackupCommandOutput + extends DescribeBackupOutput, + __MetadataBearer {} +declare const DescribeBackupCommand_base: { + new ( + input: DescribeBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeBackupCommandInput, + DescribeBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeBackupCommandInput, + DescribeBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeBackupCommand extends DescribeBackupCommand_base { + protected static __types: { + api: { + input: DescribeBackupInput; + output: DescribeBackupOutput; + }; + sdk: { + input: DescribeBackupCommandInput; + output: DescribeBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..4bcc737 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeContinuousBackupsInput, + DescribeContinuousBackupsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeContinuousBackupsCommandInput + extends DescribeContinuousBackupsInput {} +export interface DescribeContinuousBackupsCommandOutput + extends DescribeContinuousBackupsOutput, + __MetadataBearer {} +declare const DescribeContinuousBackupsCommand_base: { + new ( + input: DescribeContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeContinuousBackupsCommand extends DescribeContinuousBackupsCommand_base { + protected static __types: { + api: { + input: DescribeContinuousBackupsInput; + output: DescribeContinuousBackupsOutput; + }; + sdk: { + input: DescribeContinuousBackupsCommandInput; + output: DescribeContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts new file mode 100644 index 0000000..09a11d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeContributorInsightsInput, + DescribeContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeContributorInsightsCommandInput + extends DescribeContributorInsightsInput {} +export interface DescribeContributorInsightsCommandOutput + extends DescribeContributorInsightsOutput, + __MetadataBearer {} +declare const DescribeContributorInsightsCommand_base: { + new ( + input: DescribeContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeContributorInsightsCommand extends DescribeContributorInsightsCommand_base { + protected static __types: { + api: { + input: DescribeContributorInsightsInput; + output: DescribeContributorInsightsOutput; + }; + sdk: { + input: DescribeContributorInsightsCommandInput; + output: DescribeContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts new file mode 100644 index 0000000..2f88f7e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeEndpointsRequest, + DescribeEndpointsResponse, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeEndpointsCommandInput + extends DescribeEndpointsRequest {} +export interface DescribeEndpointsCommandOutput + extends DescribeEndpointsResponse, + __MetadataBearer {} +declare const DescribeEndpointsCommand_base: { + new ( + input: DescribeEndpointsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [DescribeEndpointsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeEndpointsCommand extends DescribeEndpointsCommand_base { + protected static __types: { + api: { + input: {}; + output: DescribeEndpointsResponse; + }; + sdk: { + input: DescribeEndpointsCommandInput; + output: DescribeEndpointsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts new file mode 100644 index 0000000..81e570f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeExportInput, DescribeExportOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeExportCommandInput extends DescribeExportInput {} +export interface DescribeExportCommandOutput + extends DescribeExportOutput, + __MetadataBearer {} +declare const DescribeExportCommand_base: { + new ( + input: DescribeExportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeExportCommandInput, + DescribeExportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeExportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeExportCommandInput, + DescribeExportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeExportCommand extends DescribeExportCommand_base { + protected static __types: { + api: { + input: DescribeExportInput; + output: DescribeExportOutput; + }; + sdk: { + input: DescribeExportCommandInput; + output: DescribeExportCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts new file mode 100644 index 0000000..55ef067 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeGlobalTableInput, + DescribeGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeGlobalTableCommandInput + extends DescribeGlobalTableInput {} +export interface DescribeGlobalTableCommandOutput + extends DescribeGlobalTableOutput, + __MetadataBearer {} +declare const DescribeGlobalTableCommand_base: { + new ( + input: DescribeGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeGlobalTableCommand extends DescribeGlobalTableCommand_base { + protected static __types: { + api: { + input: DescribeGlobalTableInput; + output: DescribeGlobalTableOutput; + }; + sdk: { + input: DescribeGlobalTableCommandInput; + output: DescribeGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..7cf5373 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeGlobalTableSettingsInput, + DescribeGlobalTableSettingsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeGlobalTableSettingsCommandInput + extends DescribeGlobalTableSettingsInput {} +export interface DescribeGlobalTableSettingsCommandOutput + extends DescribeGlobalTableSettingsOutput, + __MetadataBearer {} +declare const DescribeGlobalTableSettingsCommand_base: { + new ( + input: DescribeGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeGlobalTableSettingsCommand extends DescribeGlobalTableSettingsCommand_base { + protected static __types: { + api: { + input: DescribeGlobalTableSettingsInput; + output: DescribeGlobalTableSettingsOutput; + }; + sdk: { + input: DescribeGlobalTableSettingsCommandInput; + output: DescribeGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts new file mode 100644 index 0000000..eeaa9b9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeImportInput, DescribeImportOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeImportCommandInput extends DescribeImportInput {} +export interface DescribeImportCommandOutput + extends DescribeImportOutput, + __MetadataBearer {} +declare const DescribeImportCommand_base: { + new ( + input: DescribeImportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeImportCommandInput, + DescribeImportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeImportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeImportCommandInput, + DescribeImportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeImportCommand extends DescribeImportCommand_base { + protected static __types: { + api: { + input: DescribeImportInput; + output: DescribeImportOutput; + }; + sdk: { + input: DescribeImportCommandInput; + output: DescribeImportCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..9801ad1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeKinesisStreamingDestinationInput, + DescribeKinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeKinesisStreamingDestinationCommandInput + extends DescribeKinesisStreamingDestinationInput {} +export interface DescribeKinesisStreamingDestinationCommandOutput + extends DescribeKinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const DescribeKinesisStreamingDestinationCommand_base: { + new ( + input: DescribeKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeKinesisStreamingDestinationCommand extends DescribeKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: DescribeKinesisStreamingDestinationInput; + output: DescribeKinesisStreamingDestinationOutput; + }; + sdk: { + input: DescribeKinesisStreamingDestinationCommandInput; + output: DescribeKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts new file mode 100644 index 0000000..52ce46b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeLimitsInput, DescribeLimitsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeLimitsCommandInput extends DescribeLimitsInput {} +export interface DescribeLimitsCommandOutput + extends DescribeLimitsOutput, + __MetadataBearer {} +declare const DescribeLimitsCommand_base: { + new ( + input: DescribeLimitsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [DescribeLimitsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeLimitsCommand extends DescribeLimitsCommand_base { + protected static __types: { + api: { + input: {}; + output: DescribeLimitsOutput; + }; + sdk: { + input: DescribeLimitsCommandInput; + output: DescribeLimitsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts new file mode 100644 index 0000000..d38362c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeTableInput, DescribeTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTableCommandInput extends DescribeTableInput {} +export interface DescribeTableCommandOutput + extends DescribeTableOutput, + __MetadataBearer {} +declare const DescribeTableCommand_base: { + new ( + input: DescribeTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableCommandInput, + DescribeTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableCommandInput, + DescribeTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTableCommand extends DescribeTableCommand_base { + protected static __types: { + api: { + input: DescribeTableInput; + output: DescribeTableOutput; + }; + sdk: { + input: DescribeTableCommandInput; + output: DescribeTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..07328ed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeTableReplicaAutoScalingInput, + DescribeTableReplicaAutoScalingOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTableReplicaAutoScalingCommandInput + extends DescribeTableReplicaAutoScalingInput {} +export interface DescribeTableReplicaAutoScalingCommandOutput + extends DescribeTableReplicaAutoScalingOutput, + __MetadataBearer {} +declare const DescribeTableReplicaAutoScalingCommand_base: { + new ( + input: DescribeTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTableReplicaAutoScalingCommand extends DescribeTableReplicaAutoScalingCommand_base { + protected static __types: { + api: { + input: DescribeTableReplicaAutoScalingInput; + output: DescribeTableReplicaAutoScalingOutput; + }; + sdk: { + input: DescribeTableReplicaAutoScalingCommandInput; + output: DescribeTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts new file mode 100644 index 0000000..7f8588f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeTimeToLiveInput, + DescribeTimeToLiveOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTimeToLiveCommandInput + extends DescribeTimeToLiveInput {} +export interface DescribeTimeToLiveCommandOutput + extends DescribeTimeToLiveOutput, + __MetadataBearer {} +declare const DescribeTimeToLiveCommand_base: { + new ( + input: DescribeTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTimeToLiveCommand extends DescribeTimeToLiveCommand_base { + protected static __types: { + api: { + input: DescribeTimeToLiveInput; + output: DescribeTimeToLiveOutput; + }; + sdk: { + input: DescribeTimeToLiveCommandInput; + output: DescribeTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..22257f0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + KinesisStreamingDestinationInput, + KinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DisableKinesisStreamingDestinationCommandInput + extends KinesisStreamingDestinationInput {} +export interface DisableKinesisStreamingDestinationCommandOutput + extends KinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const DisableKinesisStreamingDestinationCommand_base: { + new ( + input: DisableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DisableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DisableKinesisStreamingDestinationCommand extends DisableKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: DisableKinesisStreamingDestinationCommandInput; + output: DisableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..5d40389 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + KinesisStreamingDestinationInput, + KinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface EnableKinesisStreamingDestinationCommandInput + extends KinesisStreamingDestinationInput {} +export interface EnableKinesisStreamingDestinationCommandOutput + extends KinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const EnableKinesisStreamingDestinationCommand_base: { + new ( + input: EnableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: EnableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class EnableKinesisStreamingDestinationCommand extends EnableKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: EnableKinesisStreamingDestinationCommandInput; + output: EnableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..5b73eee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExecuteStatementInput, + ExecuteStatementOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExecuteStatementCommandInput extends ExecuteStatementInput {} +export interface ExecuteStatementCommandOutput + extends ExecuteStatementOutput, + __MetadataBearer {} +declare const ExecuteStatementCommand_base: { + new ( + input: ExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExecuteStatementCommand extends ExecuteStatementCommand_base { + protected static __types: { + api: { + input: ExecuteStatementInput; + output: ExecuteStatementOutput; + }; + sdk: { + input: ExecuteStatementCommandInput; + output: ExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..2b94d7c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExecuteTransactionInput, + ExecuteTransactionOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExecuteTransactionCommandInput + extends ExecuteTransactionInput {} +export interface ExecuteTransactionCommandOutput + extends ExecuteTransactionOutput, + __MetadataBearer {} +declare const ExecuteTransactionCommand_base: { + new ( + input: ExecuteTransactionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExecuteTransactionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExecuteTransactionCommand extends ExecuteTransactionCommand_base { + protected static __types: { + api: { + input: ExecuteTransactionInput; + output: ExecuteTransactionOutput; + }; + sdk: { + input: ExecuteTransactionCommandInput; + output: ExecuteTransactionCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..f65fac9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExportTableToPointInTimeInput, + ExportTableToPointInTimeOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExportTableToPointInTimeCommandInput + extends ExportTableToPointInTimeInput {} +export interface ExportTableToPointInTimeCommandOutput + extends ExportTableToPointInTimeOutput, + __MetadataBearer {} +declare const ExportTableToPointInTimeCommand_base: { + new ( + input: ExportTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExportTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExportTableToPointInTimeCommand extends ExportTableToPointInTimeCommand_base { + protected static __types: { + api: { + input: ExportTableToPointInTimeInput; + output: ExportTableToPointInTimeOutput; + }; + sdk: { + input: ExportTableToPointInTimeCommandInput; + output: ExportTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts new file mode 100644 index 0000000..0e7ffb9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { GetItemInput, GetItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface GetItemCommandInput extends GetItemInput {} +export interface GetItemCommandOutput extends GetItemOutput, __MetadataBearer {} +declare const GetItemCommand_base: { + new (input: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl< + GetItemCommandInput, + GetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl< + GetItemCommandInput, + GetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetItemCommand extends GetItemCommand_base { + protected static __types: { + api: { + input: GetItemInput; + output: GetItemOutput; + }; + sdk: { + input: GetItemCommandInput; + output: GetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts new file mode 100644 index 0000000..3691e80 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + GetResourcePolicyInput, + GetResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface GetResourcePolicyCommandInput extends GetResourcePolicyInput {} +export interface GetResourcePolicyCommandOutput + extends GetResourcePolicyOutput, + __MetadataBearer {} +declare const GetResourcePolicyCommand_base: { + new ( + input: GetResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetResourcePolicyCommand extends GetResourcePolicyCommand_base { + protected static __types: { + api: { + input: GetResourcePolicyInput; + output: GetResourcePolicyOutput; + }; + sdk: { + input: GetResourcePolicyCommandInput; + output: GetResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts new file mode 100644 index 0000000..57c8b04 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ImportTableInput, ImportTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ImportTableCommandInput extends ImportTableInput {} +export interface ImportTableCommandOutput + extends ImportTableOutput, + __MetadataBearer {} +declare const ImportTableCommand_base: { + new ( + input: ImportTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ImportTableCommandInput, + ImportTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ImportTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ImportTableCommandInput, + ImportTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ImportTableCommand extends ImportTableCommand_base { + protected static __types: { + api: { + input: ImportTableInput; + output: ImportTableOutput; + }; + sdk: { + input: ImportTableCommandInput; + output: ImportTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts new file mode 100644 index 0000000..2b00a39 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListBackupsInput, ListBackupsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListBackupsCommandInput extends ListBackupsInput {} +export interface ListBackupsCommandOutput + extends ListBackupsOutput, + __MetadataBearer {} +declare const ListBackupsCommand_base: { + new ( + input: ListBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBackupsCommandInput, + ListBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListBackupsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListBackupsCommandInput, + ListBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListBackupsCommand extends ListBackupsCommand_base { + protected static __types: { + api: { + input: ListBackupsInput; + output: ListBackupsOutput; + }; + sdk: { + input: ListBackupsCommandInput; + output: ListBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts new file mode 100644 index 0000000..455495e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListContributorInsightsInput, + ListContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListContributorInsightsCommandInput + extends ListContributorInsightsInput {} +export interface ListContributorInsightsCommandOutput + extends ListContributorInsightsOutput, + __MetadataBearer {} +declare const ListContributorInsightsCommand_base: { + new ( + input: ListContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListContributorInsightsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListContributorInsightsCommand extends ListContributorInsightsCommand_base { + protected static __types: { + api: { + input: ListContributorInsightsInput; + output: ListContributorInsightsOutput; + }; + sdk: { + input: ListContributorInsightsCommandInput; + output: ListContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts new file mode 100644 index 0000000..b968746 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListExportsInput, ListExportsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListExportsCommandInput extends ListExportsInput {} +export interface ListExportsCommandOutput + extends ListExportsOutput, + __MetadataBearer {} +declare const ListExportsCommand_base: { + new ( + input: ListExportsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListExportsCommandInput, + ListExportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListExportsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListExportsCommandInput, + ListExportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListExportsCommand extends ListExportsCommand_base { + protected static __types: { + api: { + input: ListExportsInput; + output: ListExportsOutput; + }; + sdk: { + input: ListExportsCommandInput; + output: ListExportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts new file mode 100644 index 0000000..3428e57 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListGlobalTablesInput, + ListGlobalTablesOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListGlobalTablesCommandInput extends ListGlobalTablesInput {} +export interface ListGlobalTablesCommandOutput + extends ListGlobalTablesOutput, + __MetadataBearer {} +declare const ListGlobalTablesCommand_base: { + new ( + input: ListGlobalTablesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListGlobalTablesCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListGlobalTablesCommand extends ListGlobalTablesCommand_base { + protected static __types: { + api: { + input: ListGlobalTablesInput; + output: ListGlobalTablesOutput; + }; + sdk: { + input: ListGlobalTablesCommandInput; + output: ListGlobalTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts new file mode 100644 index 0000000..07bfebf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListImportsInput, ListImportsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListImportsCommandInput extends ListImportsInput {} +export interface ListImportsCommandOutput + extends ListImportsOutput, + __MetadataBearer {} +declare const ListImportsCommand_base: { + new ( + input: ListImportsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListImportsCommandInput, + ListImportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListImportsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListImportsCommandInput, + ListImportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListImportsCommand extends ListImportsCommand_base { + protected static __types: { + api: { + input: ListImportsInput; + output: ListImportsOutput; + }; + sdk: { + input: ListImportsCommandInput; + output: ListImportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts new file mode 100644 index 0000000..497f02f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListTablesInput, ListTablesOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListTablesCommandInput extends ListTablesInput {} +export interface ListTablesCommandOutput + extends ListTablesOutput, + __MetadataBearer {} +declare const ListTablesCommand_base: { + new ( + input: ListTablesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTablesCommandInput, + ListTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListTablesCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListTablesCommandInput, + ListTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListTablesCommand extends ListTablesCommand_base { + protected static __types: { + api: { + input: ListTablesInput; + output: ListTablesOutput; + }; + sdk: { + input: ListTablesCommandInput; + output: ListTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts new file mode 100644 index 0000000..8bffe40 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListTagsOfResourceInput, + ListTagsOfResourceOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListTagsOfResourceCommandInput + extends ListTagsOfResourceInput {} +export interface ListTagsOfResourceCommandOutput + extends ListTagsOfResourceOutput, + __MetadataBearer {} +declare const ListTagsOfResourceCommand_base: { + new ( + input: ListTagsOfResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListTagsOfResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListTagsOfResourceCommand extends ListTagsOfResourceCommand_base { + protected static __types: { + api: { + input: ListTagsOfResourceInput; + output: ListTagsOfResourceOutput; + }; + sdk: { + input: ListTagsOfResourceCommandInput; + output: ListTagsOfResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts new file mode 100644 index 0000000..7ae0d3e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { PutItemInput, PutItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface PutItemCommandInput extends PutItemInput {} +export interface PutItemCommandOutput extends PutItemOutput, __MetadataBearer {} +declare const PutItemCommand_base: { + new (input: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl< + PutItemCommandInput, + PutItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl< + PutItemCommandInput, + PutItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutItemCommand extends PutItemCommand_base { + protected static __types: { + api: { + input: PutItemInput; + output: PutItemOutput; + }; + sdk: { + input: PutItemCommandInput; + output: PutItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts new file mode 100644 index 0000000..2c83af7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + PutResourcePolicyInput, + PutResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface PutResourcePolicyCommandInput extends PutResourcePolicyInput {} +export interface PutResourcePolicyCommandOutput + extends PutResourcePolicyOutput, + __MetadataBearer {} +declare const PutResourcePolicyCommand_base: { + new ( + input: PutResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutResourcePolicyCommand extends PutResourcePolicyCommand_base { + protected static __types: { + api: { + input: PutResourcePolicyInput; + output: PutResourcePolicyOutput; + }; + sdk: { + input: PutResourcePolicyCommandInput; + output: PutResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts new file mode 100644 index 0000000..125753a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { QueryInput, QueryOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface QueryCommandInput extends QueryInput {} +export interface QueryCommandOutput extends QueryOutput, __MetadataBearer {} +declare const QueryCommand_base: { + new (input: QueryCommandInput): import("@smithy/smithy-client").CommandImpl< + QueryCommandInput, + QueryCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: QueryCommandInput): import("@smithy/smithy-client").CommandImpl< + QueryCommandInput, + QueryCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class QueryCommand extends QueryCommand_base { + protected static __types: { + api: { + input: QueryInput; + output: QueryOutput; + }; + sdk: { + input: QueryCommandInput; + output: QueryCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts new file mode 100644 index 0000000..954e61b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + RestoreTableFromBackupInput, + RestoreTableFromBackupOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface RestoreTableFromBackupCommandInput + extends RestoreTableFromBackupInput {} +export interface RestoreTableFromBackupCommandOutput + extends RestoreTableFromBackupOutput, + __MetadataBearer {} +declare const RestoreTableFromBackupCommand_base: { + new ( + input: RestoreTableFromBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: RestoreTableFromBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class RestoreTableFromBackupCommand extends RestoreTableFromBackupCommand_base { + protected static __types: { + api: { + input: RestoreTableFromBackupInput; + output: RestoreTableFromBackupOutput; + }; + sdk: { + input: RestoreTableFromBackupCommandInput; + output: RestoreTableFromBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..8b243f5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + RestoreTableToPointInTimeInput, + RestoreTableToPointInTimeOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface RestoreTableToPointInTimeCommandInput + extends RestoreTableToPointInTimeInput {} +export interface RestoreTableToPointInTimeCommandOutput + extends RestoreTableToPointInTimeOutput, + __MetadataBearer {} +declare const RestoreTableToPointInTimeCommand_base: { + new ( + input: RestoreTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: RestoreTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class RestoreTableToPointInTimeCommand extends RestoreTableToPointInTimeCommand_base { + protected static __types: { + api: { + input: RestoreTableToPointInTimeInput; + output: RestoreTableToPointInTimeOutput; + }; + sdk: { + input: RestoreTableToPointInTimeCommandInput; + output: RestoreTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts new file mode 100644 index 0000000..fe66b35 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ScanInput, ScanOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ScanCommandInput extends ScanInput {} +export interface ScanCommandOutput extends ScanOutput, __MetadataBearer {} +declare const ScanCommand_base: { + new (input: ScanCommandInput): import("@smithy/smithy-client").CommandImpl< + ScanCommandInput, + ScanCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: ScanCommandInput): import("@smithy/smithy-client").CommandImpl< + ScanCommandInput, + ScanCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ScanCommand extends ScanCommand_base { + protected static __types: { + api: { + input: ScanInput; + output: ScanOutput; + }; + sdk: { + input: ScanCommandInput; + output: ScanCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts new file mode 100644 index 0000000..2791246 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { TagResourceInput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TagResourceCommandInput extends TagResourceInput {} +export interface TagResourceCommandOutput extends __MetadataBearer {} +declare const TagResourceCommand_base: { + new ( + input: TagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TagResourceCommandInput, + TagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TagResourceCommandInput, + TagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TagResourceCommand extends TagResourceCommand_base { + protected static __types: { + api: { + input: TagResourceInput; + output: {}; + }; + sdk: { + input: TagResourceCommandInput; + output: TagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts new file mode 100644 index 0000000..0068549 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + TransactGetItemsInput, + TransactGetItemsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TransactGetItemsCommandInput extends TransactGetItemsInput {} +export interface TransactGetItemsCommandOutput + extends TransactGetItemsOutput, + __MetadataBearer {} +declare const TransactGetItemsCommand_base: { + new ( + input: TransactGetItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TransactGetItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TransactGetItemsCommand extends TransactGetItemsCommand_base { + protected static __types: { + api: { + input: TransactGetItemsInput; + output: TransactGetItemsOutput; + }; + sdk: { + input: TransactGetItemsCommandInput; + output: TransactGetItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts new file mode 100644 index 0000000..f945bc5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + TransactWriteItemsInput, + TransactWriteItemsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TransactWriteItemsCommandInput + extends TransactWriteItemsInput {} +export interface TransactWriteItemsCommandOutput + extends TransactWriteItemsOutput, + __MetadataBearer {} +declare const TransactWriteItemsCommand_base: { + new ( + input: TransactWriteItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TransactWriteItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TransactWriteItemsCommand extends TransactWriteItemsCommand_base { + protected static __types: { + api: { + input: TransactWriteItemsInput; + output: TransactWriteItemsOutput; + }; + sdk: { + input: TransactWriteItemsCommandInput; + output: TransactWriteItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts new file mode 100644 index 0000000..7744ef6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UntagResourceInput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UntagResourceCommandInput extends UntagResourceInput {} +export interface UntagResourceCommandOutput extends __MetadataBearer {} +declare const UntagResourceCommand_base: { + new ( + input: UntagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UntagResourceCommandInput, + UntagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UntagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UntagResourceCommandInput, + UntagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UntagResourceCommand extends UntagResourceCommand_base { + protected static __types: { + api: { + input: UntagResourceInput; + output: {}; + }; + sdk: { + input: UntagResourceCommandInput; + output: UntagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..d771ccd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateContinuousBackupsInput, + UpdateContinuousBackupsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateContinuousBackupsCommandInput + extends UpdateContinuousBackupsInput {} +export interface UpdateContinuousBackupsCommandOutput + extends UpdateContinuousBackupsOutput, + __MetadataBearer {} +declare const UpdateContinuousBackupsCommand_base: { + new ( + input: UpdateContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateContinuousBackupsCommand extends UpdateContinuousBackupsCommand_base { + protected static __types: { + api: { + input: UpdateContinuousBackupsInput; + output: UpdateContinuousBackupsOutput; + }; + sdk: { + input: UpdateContinuousBackupsCommandInput; + output: UpdateContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts new file mode 100644 index 0000000..07ce57b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateContributorInsightsInput, + UpdateContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateContributorInsightsCommandInput + extends UpdateContributorInsightsInput {} +export interface UpdateContributorInsightsCommandOutput + extends UpdateContributorInsightsOutput, + __MetadataBearer {} +declare const UpdateContributorInsightsCommand_base: { + new ( + input: UpdateContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateContributorInsightsCommand extends UpdateContributorInsightsCommand_base { + protected static __types: { + api: { + input: UpdateContributorInsightsInput; + output: UpdateContributorInsightsOutput; + }; + sdk: { + input: UpdateContributorInsightsCommandInput; + output: UpdateContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts new file mode 100644 index 0000000..415ecd8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateGlobalTableInput, + UpdateGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateGlobalTableCommandInput extends UpdateGlobalTableInput {} +export interface UpdateGlobalTableCommandOutput + extends UpdateGlobalTableOutput, + __MetadataBearer {} +declare const UpdateGlobalTableCommand_base: { + new ( + input: UpdateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateGlobalTableCommand extends UpdateGlobalTableCommand_base { + protected static __types: { + api: { + input: UpdateGlobalTableInput; + output: UpdateGlobalTableOutput; + }; + sdk: { + input: UpdateGlobalTableCommandInput; + output: UpdateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..653ae6c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateGlobalTableSettingsInput, + UpdateGlobalTableSettingsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateGlobalTableSettingsCommandInput + extends UpdateGlobalTableSettingsInput {} +export interface UpdateGlobalTableSettingsCommandOutput + extends UpdateGlobalTableSettingsOutput, + __MetadataBearer {} +declare const UpdateGlobalTableSettingsCommand_base: { + new ( + input: UpdateGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateGlobalTableSettingsCommand extends UpdateGlobalTableSettingsCommand_base { + protected static __types: { + api: { + input: UpdateGlobalTableSettingsInput; + output: UpdateGlobalTableSettingsOutput; + }; + sdk: { + input: UpdateGlobalTableSettingsCommandInput; + output: UpdateGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts new file mode 100644 index 0000000..b302067 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UpdateItemInput, UpdateItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateItemCommandInput extends UpdateItemInput {} +export interface UpdateItemCommandOutput + extends UpdateItemOutput, + __MetadataBearer {} +declare const UpdateItemCommand_base: { + new ( + input: UpdateItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateItemCommandInput, + UpdateItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateItemCommandInput, + UpdateItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateItemCommand extends UpdateItemCommand_base { + protected static __types: { + api: { + input: UpdateItemInput; + output: UpdateItemOutput; + }; + sdk: { + input: UpdateItemCommandInput; + output: UpdateItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..a36aa7d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateKinesisStreamingDestinationInput, + UpdateKinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateKinesisStreamingDestinationCommandInput + extends UpdateKinesisStreamingDestinationInput {} +export interface UpdateKinesisStreamingDestinationCommandOutput + extends UpdateKinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const UpdateKinesisStreamingDestinationCommand_base: { + new ( + input: UpdateKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateKinesisStreamingDestinationCommand extends UpdateKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: UpdateKinesisStreamingDestinationInput; + output: UpdateKinesisStreamingDestinationOutput; + }; + sdk: { + input: UpdateKinesisStreamingDestinationCommandInput; + output: UpdateKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts new file mode 100644 index 0000000..59afb16 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UpdateTableInput, UpdateTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTableCommandInput extends UpdateTableInput {} +export interface UpdateTableCommandOutput + extends UpdateTableOutput, + __MetadataBearer {} +declare const UpdateTableCommand_base: { + new ( + input: UpdateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableCommandInput, + UpdateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableCommandInput, + UpdateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTableCommand extends UpdateTableCommand_base { + protected static __types: { + api: { + input: UpdateTableInput; + output: UpdateTableOutput; + }; + sdk: { + input: UpdateTableCommandInput; + output: UpdateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..9f2925f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateTableReplicaAutoScalingInput, + UpdateTableReplicaAutoScalingOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTableReplicaAutoScalingCommandInput + extends UpdateTableReplicaAutoScalingInput {} +export interface UpdateTableReplicaAutoScalingCommandOutput + extends UpdateTableReplicaAutoScalingOutput, + __MetadataBearer {} +declare const UpdateTableReplicaAutoScalingCommand_base: { + new ( + input: UpdateTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTableReplicaAutoScalingCommand extends UpdateTableReplicaAutoScalingCommand_base { + protected static __types: { + api: { + input: UpdateTableReplicaAutoScalingInput; + output: UpdateTableReplicaAutoScalingOutput; + }; + sdk: { + input: UpdateTableReplicaAutoScalingCommandInput; + output: UpdateTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts new file mode 100644 index 0000000..a3f7b3b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateTimeToLiveInput, + UpdateTimeToLiveOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTimeToLiveCommandInput extends UpdateTimeToLiveInput {} +export interface UpdateTimeToLiveCommandOutput + extends UpdateTimeToLiveOutput, + __MetadataBearer {} +declare const UpdateTimeToLiveCommand_base: { + new ( + input: UpdateTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTimeToLiveCommand extends UpdateTimeToLiveCommand_base { + protected static __types: { + api: { + input: UpdateTimeToLiveInput; + output: UpdateTimeToLiveOutput; + }; + sdk: { + input: UpdateTimeToLiveCommandInput; + output: UpdateTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..bef37c3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts @@ -0,0 +1,65 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; + accountId?: string | Provider; + accountIdEndpointMode?: string | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly AccountId: { + readonly type: "builtInParams"; + readonly name: "accountId"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; + readonly AccountIdEndpointMode: { + readonly type: "builtInParams"; + readonly name: "accountIdEndpointMode"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + AccountId?: string; + AccountIdEndpointMode?: string; + ResourceArn?: string; + ResourceArnList?: string[]; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts new file mode 100644 index 0000000..7db993d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface DynamoDBExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..58fb2ff --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,10 @@ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts new file mode 100644 index 0000000..e5bd2c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class DynamoDBServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts new file mode 100644 index 0000000..3acf86a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts @@ -0,0 +1,2036 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +export declare const ApproximateCreationDateTimePrecision: { + readonly MICROSECOND: "MICROSECOND"; + readonly MILLISECOND: "MILLISECOND"; +}; +export type ApproximateCreationDateTimePrecision = + (typeof ApproximateCreationDateTimePrecision)[keyof typeof ApproximateCreationDateTimePrecision]; +export interface ArchivalSummary { + ArchivalDateTime?: Date | undefined; + ArchivalReason?: string | undefined; + ArchivalBackupArn?: string | undefined; +} +export declare const AttributeAction: { + readonly ADD: "ADD"; + readonly DELETE: "DELETE"; + readonly PUT: "PUT"; +}; +export type AttributeAction = + (typeof AttributeAction)[keyof typeof AttributeAction]; +export declare const ScalarAttributeType: { + readonly B: "B"; + readonly N: "N"; + readonly S: "S"; +}; +export type ScalarAttributeType = + (typeof ScalarAttributeType)[keyof typeof ScalarAttributeType]; +export interface AttributeDefinition { + AttributeName: string | undefined; + AttributeType: ScalarAttributeType | undefined; +} +export interface AutoScalingTargetTrackingScalingPolicyConfigurationDescription { + DisableScaleIn?: boolean | undefined; + ScaleInCooldown?: number | undefined; + ScaleOutCooldown?: number | undefined; + TargetValue: number | undefined; +} +export interface AutoScalingPolicyDescription { + PolicyName?: string | undefined; + TargetTrackingScalingPolicyConfiguration?: + | AutoScalingTargetTrackingScalingPolicyConfigurationDescription + | undefined; +} +export interface AutoScalingTargetTrackingScalingPolicyConfigurationUpdate { + DisableScaleIn?: boolean | undefined; + ScaleInCooldown?: number | undefined; + ScaleOutCooldown?: number | undefined; + TargetValue: number | undefined; +} +export interface AutoScalingPolicyUpdate { + PolicyName?: string | undefined; + TargetTrackingScalingPolicyConfiguration: + | AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + | undefined; +} +export interface AutoScalingSettingsDescription { + MinimumUnits?: number | undefined; + MaximumUnits?: number | undefined; + AutoScalingDisabled?: boolean | undefined; + AutoScalingRoleArn?: string | undefined; + ScalingPolicies?: AutoScalingPolicyDescription[] | undefined; +} +export interface AutoScalingSettingsUpdate { + MinimumUnits?: number | undefined; + MaximumUnits?: number | undefined; + AutoScalingDisabled?: boolean | undefined; + AutoScalingRoleArn?: string | undefined; + ScalingPolicyUpdate?: AutoScalingPolicyUpdate | undefined; +} +export declare const BackupStatus: { + readonly AVAILABLE: "AVAILABLE"; + readonly CREATING: "CREATING"; + readonly DELETED: "DELETED"; +}; +export type BackupStatus = (typeof BackupStatus)[keyof typeof BackupStatus]; +export declare const BackupType: { + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +export type BackupType = (typeof BackupType)[keyof typeof BackupType]; +export interface BackupDetails { + BackupArn: string | undefined; + BackupName: string | undefined; + BackupSizeBytes?: number | undefined; + BackupStatus: BackupStatus | undefined; + BackupType: BackupType | undefined; + BackupCreationDateTime: Date | undefined; + BackupExpiryDateTime?: Date | undefined; +} +export declare const BillingMode: { + readonly PAY_PER_REQUEST: "PAY_PER_REQUEST"; + readonly PROVISIONED: "PROVISIONED"; +}; +export type BillingMode = (typeof BillingMode)[keyof typeof BillingMode]; +export declare const KeyType: { + readonly HASH: "HASH"; + readonly RANGE: "RANGE"; +}; +export type KeyType = (typeof KeyType)[keyof typeof KeyType]; +export interface KeySchemaElement { + AttributeName: string | undefined; + KeyType: KeyType | undefined; +} +export interface OnDemandThroughput { + MaxReadRequestUnits?: number | undefined; + MaxWriteRequestUnits?: number | undefined; +} +export interface ProvisionedThroughput { + ReadCapacityUnits: number | undefined; + WriteCapacityUnits: number | undefined; +} +export interface SourceTableDetails { + TableName: string | undefined; + TableId: string | undefined; + TableArn?: string | undefined; + TableSizeBytes?: number | undefined; + KeySchema: KeySchemaElement[] | undefined; + TableCreationDateTime: Date | undefined; + ProvisionedThroughput: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + ItemCount?: number | undefined; + BillingMode?: BillingMode | undefined; +} +export declare const ProjectionType: { + readonly ALL: "ALL"; + readonly INCLUDE: "INCLUDE"; + readonly KEYS_ONLY: "KEYS_ONLY"; +}; +export type ProjectionType = + (typeof ProjectionType)[keyof typeof ProjectionType]; +export interface Projection { + ProjectionType?: ProjectionType | undefined; + NonKeyAttributes?: string[] | undefined; +} +export interface GlobalSecondaryIndexInfo { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; +} +export interface LocalSecondaryIndexInfo { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; +} +export declare const SSEType: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +export type SSEType = (typeof SSEType)[keyof typeof SSEType]; +export declare const SSEStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +export type SSEStatus = (typeof SSEStatus)[keyof typeof SSEStatus]; +export interface SSEDescription { + Status?: SSEStatus | undefined; + SSEType?: SSEType | undefined; + KMSMasterKeyArn?: string | undefined; + InaccessibleEncryptionDateTime?: Date | undefined; +} +export declare const StreamViewType: { + readonly KEYS_ONLY: "KEYS_ONLY"; + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; + readonly OLD_IMAGE: "OLD_IMAGE"; +}; +export type StreamViewType = + (typeof StreamViewType)[keyof typeof StreamViewType]; +export interface StreamSpecification { + StreamEnabled: boolean | undefined; + StreamViewType?: StreamViewType | undefined; +} +export declare const TimeToLiveStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; +}; +export type TimeToLiveStatus = + (typeof TimeToLiveStatus)[keyof typeof TimeToLiveStatus]; +export interface TimeToLiveDescription { + TimeToLiveStatus?: TimeToLiveStatus | undefined; + AttributeName?: string | undefined; +} +export interface SourceTableFeatureDetails { + LocalSecondaryIndexes?: LocalSecondaryIndexInfo[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndexInfo[] | undefined; + StreamDescription?: StreamSpecification | undefined; + TimeToLiveDescription?: TimeToLiveDescription | undefined; + SSEDescription?: SSEDescription | undefined; +} +export interface BackupDescription { + BackupDetails?: BackupDetails | undefined; + SourceTableDetails?: SourceTableDetails | undefined; + SourceTableFeatureDetails?: SourceTableFeatureDetails | undefined; +} +export declare class BackupInUseException extends __BaseException { + readonly name: "BackupInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class BackupNotFoundException extends __BaseException { + readonly name: "BackupNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface BackupSummary { + TableName?: string | undefined; + TableId?: string | undefined; + TableArn?: string | undefined; + BackupArn?: string | undefined; + BackupName?: string | undefined; + BackupCreationDateTime?: Date | undefined; + BackupExpiryDateTime?: Date | undefined; + BackupStatus?: BackupStatus | undefined; + BackupType?: BackupType | undefined; + BackupSizeBytes?: number | undefined; +} +export declare const BackupTypeFilter: { + readonly ALL: "ALL"; + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +export type BackupTypeFilter = + (typeof BackupTypeFilter)[keyof typeof BackupTypeFilter]; +export declare const ReturnConsumedCapacity: { + readonly INDEXES: "INDEXES"; + readonly NONE: "NONE"; + readonly TOTAL: "TOTAL"; +}; +export type ReturnConsumedCapacity = + (typeof ReturnConsumedCapacity)[keyof typeof ReturnConsumedCapacity]; +export declare const ReturnValuesOnConditionCheckFailure: { + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; +}; +export type ReturnValuesOnConditionCheckFailure = + (typeof ReturnValuesOnConditionCheckFailure)[keyof typeof ReturnValuesOnConditionCheckFailure]; +export interface Capacity { + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; + CapacityUnits?: number | undefined; +} +export interface ConsumedCapacity { + TableName?: string | undefined; + CapacityUnits?: number | undefined; + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; + Table?: Capacity | undefined; + LocalSecondaryIndexes?: Record | undefined; + GlobalSecondaryIndexes?: Record | undefined; +} +export declare const BatchStatementErrorCodeEnum: { + readonly AccessDenied: "AccessDenied"; + readonly ConditionalCheckFailed: "ConditionalCheckFailed"; + readonly DuplicateItem: "DuplicateItem"; + readonly InternalServerError: "InternalServerError"; + readonly ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded"; + readonly ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded"; + readonly RequestLimitExceeded: "RequestLimitExceeded"; + readonly ResourceNotFound: "ResourceNotFound"; + readonly ThrottlingError: "ThrottlingError"; + readonly TransactionConflict: "TransactionConflict"; + readonly ValidationError: "ValidationError"; +}; +export type BatchStatementErrorCodeEnum = + (typeof BatchStatementErrorCodeEnum)[keyof typeof BatchStatementErrorCodeEnum]; +export declare class InternalServerError extends __BaseException { + readonly name: "InternalServerError"; + readonly $fault: "server"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class RequestLimitExceeded extends __BaseException { + readonly name: "RequestLimitExceeded"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidEndpointException extends __BaseException { + readonly name: "InvalidEndpointException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ProvisionedThroughputExceededException extends __BaseException { + readonly name: "ProvisionedThroughputExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ProvisionedThroughputExceededException, + __BaseException + > + ); +} +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare const ReturnItemCollectionMetrics: { + readonly NONE: "NONE"; + readonly SIZE: "SIZE"; +}; +export type ReturnItemCollectionMetrics = + (typeof ReturnItemCollectionMetrics)[keyof typeof ReturnItemCollectionMetrics]; +export declare class ItemCollectionSizeLimitExceededException extends __BaseException { + readonly name: "ItemCollectionSizeLimitExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ItemCollectionSizeLimitExceededException, + __BaseException + > + ); +} +export interface BillingModeSummary { + BillingMode?: BillingMode | undefined; + LastUpdateToPayPerRequestDateTime?: Date | undefined; +} +export declare const ComparisonOperator: { + readonly BEGINS_WITH: "BEGINS_WITH"; + readonly BETWEEN: "BETWEEN"; + readonly CONTAINS: "CONTAINS"; + readonly EQ: "EQ"; + readonly GE: "GE"; + readonly GT: "GT"; + readonly IN: "IN"; + readonly LE: "LE"; + readonly LT: "LT"; + readonly NE: "NE"; + readonly NOT_CONTAINS: "NOT_CONTAINS"; + readonly NOT_NULL: "NOT_NULL"; + readonly NULL: "NULL"; +}; +export type ComparisonOperator = + (typeof ComparisonOperator)[keyof typeof ComparisonOperator]; +export declare const ConditionalOperator: { + readonly AND: "AND"; + readonly OR: "OR"; +}; +export type ConditionalOperator = + (typeof ConditionalOperator)[keyof typeof ConditionalOperator]; +export declare const ContinuousBackupsStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +export type ContinuousBackupsStatus = + (typeof ContinuousBackupsStatus)[keyof typeof ContinuousBackupsStatus]; +export declare const PointInTimeRecoveryStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +export type PointInTimeRecoveryStatus = + (typeof PointInTimeRecoveryStatus)[keyof typeof PointInTimeRecoveryStatus]; +export interface PointInTimeRecoveryDescription { + PointInTimeRecoveryStatus?: PointInTimeRecoveryStatus | undefined; + RecoveryPeriodInDays?: number | undefined; + EarliestRestorableDateTime?: Date | undefined; + LatestRestorableDateTime?: Date | undefined; +} +export interface ContinuousBackupsDescription { + ContinuousBackupsStatus: ContinuousBackupsStatus | undefined; + PointInTimeRecoveryDescription?: PointInTimeRecoveryDescription | undefined; +} +export declare class ContinuousBackupsUnavailableException extends __BaseException { + readonly name: "ContinuousBackupsUnavailableException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ContinuousBackupsUnavailableException, + __BaseException + > + ); +} +export declare const ContributorInsightsAction: { + readonly DISABLE: "DISABLE"; + readonly ENABLE: "ENABLE"; +}; +export type ContributorInsightsAction = + (typeof ContributorInsightsAction)[keyof typeof ContributorInsightsAction]; +export declare const ContributorInsightsStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly FAILED: "FAILED"; +}; +export type ContributorInsightsStatus = + (typeof ContributorInsightsStatus)[keyof typeof ContributorInsightsStatus]; +export interface ContributorInsightsSummary { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +export interface CreateBackupInput { + TableName: string | undefined; + BackupName: string | undefined; +} +export interface CreateBackupOutput { + BackupDetails?: BackupDetails | undefined; +} +export declare class LimitExceededException extends __BaseException { + readonly name: "LimitExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TableInUseException extends __BaseException { + readonly name: "TableInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TableNotFoundException extends __BaseException { + readonly name: "TableNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface WarmThroughput { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; +} +export interface CreateGlobalSecondaryIndexAction { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface Replica { + RegionName?: string | undefined; +} +export interface CreateGlobalTableInput { + GlobalTableName: string | undefined; + ReplicationGroup: Replica[] | undefined; +} +export declare const GlobalTableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +export type GlobalTableStatus = + (typeof GlobalTableStatus)[keyof typeof GlobalTableStatus]; +export interface OnDemandThroughputOverride { + MaxReadRequestUnits?: number | undefined; +} +export interface ProvisionedThroughputOverride { + ReadCapacityUnits?: number | undefined; +} +export declare const IndexStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +export type IndexStatus = (typeof IndexStatus)[keyof typeof IndexStatus]; +export interface GlobalSecondaryIndexWarmThroughputDescription { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; + Status?: IndexStatus | undefined; +} +export interface ReplicaGlobalSecondaryIndexDescription { + IndexName?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +export declare const ReplicaStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly CREATION_FAILED: "CREATION_FAILED"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly REGION_DISABLED: "REGION_DISABLED"; + readonly UPDATING: "UPDATING"; +}; +export type ReplicaStatus = (typeof ReplicaStatus)[keyof typeof ReplicaStatus]; +export declare const TableClass: { + readonly STANDARD: "STANDARD"; + readonly STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS"; +}; +export type TableClass = (typeof TableClass)[keyof typeof TableClass]; +export interface TableClassSummary { + TableClass?: TableClass | undefined; + LastUpdateDateTime?: Date | undefined; +} +export declare const TableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly ARCHIVED: "ARCHIVED"; + readonly ARCHIVING: "ARCHIVING"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly UPDATING: "UPDATING"; +}; +export type TableStatus = (typeof TableStatus)[keyof typeof TableStatus]; +export interface TableWarmThroughputDescription { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; + Status?: TableStatus | undefined; +} +export interface ReplicaDescription { + RegionName?: string | undefined; + ReplicaStatus?: ReplicaStatus | undefined; + ReplicaStatusDescription?: string | undefined; + ReplicaStatusPercentProgress?: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + WarmThroughput?: TableWarmThroughputDescription | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexDescription[] | undefined; + ReplicaInaccessibleDateTime?: Date | undefined; + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +export interface GlobalTableDescription { + ReplicationGroup?: ReplicaDescription[] | undefined; + GlobalTableArn?: string | undefined; + CreationDateTime?: Date | undefined; + GlobalTableStatus?: GlobalTableStatus | undefined; + GlobalTableName?: string | undefined; +} +export interface CreateGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class GlobalTableAlreadyExistsException extends __BaseException { + readonly name: "GlobalTableAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + GlobalTableAlreadyExistsException, + __BaseException + > + ); +} +export interface CreateReplicaAction { + RegionName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndex { + IndexName: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; +} +export interface CreateReplicationGroupMemberAction { + RegionName: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + TableClassOverride?: TableClass | undefined; +} +export interface GlobalSecondaryIndex { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface LocalSecondaryIndex { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; +} +export interface SSESpecification { + Enabled?: boolean | undefined; + SSEType?: SSEType | undefined; + KMSMasterKeyId?: string | undefined; +} +export interface Tag { + Key: string | undefined; + Value: string | undefined; +} +export interface CreateTableInput { + AttributeDefinitions: AttributeDefinition[] | undefined; + TableName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + LocalSecondaryIndexes?: LocalSecondaryIndex[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + StreamSpecification?: StreamSpecification | undefined; + SSESpecification?: SSESpecification | undefined; + Tags?: Tag[] | undefined; + TableClass?: TableClass | undefined; + DeletionProtectionEnabled?: boolean | undefined; + WarmThroughput?: WarmThroughput | undefined; + ResourcePolicy?: string | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; +} +export interface ProvisionedThroughputDescription { + LastIncreaseDateTime?: Date | undefined; + LastDecreaseDateTime?: Date | undefined; + NumberOfDecreasesToday?: number | undefined; + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; +} +export interface GlobalSecondaryIndexDescription { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + IndexStatus?: IndexStatus | undefined; + Backfilling?: boolean | undefined; + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + IndexSizeBytes?: number | undefined; + ItemCount?: number | undefined; + IndexArn?: string | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +export interface LocalSecondaryIndexDescription { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + IndexSizeBytes?: number | undefined; + ItemCount?: number | undefined; + IndexArn?: string | undefined; +} +export declare const MultiRegionConsistency: { + readonly EVENTUAL: "EVENTUAL"; + readonly STRONG: "STRONG"; +}; +export type MultiRegionConsistency = + (typeof MultiRegionConsistency)[keyof typeof MultiRegionConsistency]; +export interface RestoreSummary { + SourceBackupArn?: string | undefined; + SourceTableArn?: string | undefined; + RestoreDateTime: Date | undefined; + RestoreInProgress: boolean | undefined; +} +export interface TableDescription { + AttributeDefinitions?: AttributeDefinition[] | undefined; + TableName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + TableStatus?: TableStatus | undefined; + CreationDateTime?: Date | undefined; + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + TableSizeBytes?: number | undefined; + ItemCount?: number | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + BillingModeSummary?: BillingModeSummary | undefined; + LocalSecondaryIndexes?: LocalSecondaryIndexDescription[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndexDescription[] | undefined; + StreamSpecification?: StreamSpecification | undefined; + LatestStreamLabel?: string | undefined; + LatestStreamArn?: string | undefined; + GlobalTableVersion?: string | undefined; + Replicas?: ReplicaDescription[] | undefined; + RestoreSummary?: RestoreSummary | undefined; + SSEDescription?: SSEDescription | undefined; + ArchivalSummary?: ArchivalSummary | undefined; + TableClassSummary?: TableClassSummary | undefined; + DeletionProtectionEnabled?: boolean | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: TableWarmThroughputDescription | undefined; + MultiRegionConsistency?: MultiRegionConsistency | undefined; +} +export interface CreateTableOutput { + TableDescription?: TableDescription | undefined; +} +export declare class ResourceInUseException extends __BaseException { + readonly name: "ResourceInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface CsvOptions { + Delimiter?: string | undefined; + HeaderList?: string[] | undefined; +} +export interface DeleteBackupInput { + BackupArn: string | undefined; +} +export interface DeleteBackupOutput { + BackupDescription?: BackupDescription | undefined; +} +export interface DeleteGlobalSecondaryIndexAction { + IndexName: string | undefined; +} +export declare const ReturnValue: { + readonly ALL_NEW: "ALL_NEW"; + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; + readonly UPDATED_NEW: "UPDATED_NEW"; + readonly UPDATED_OLD: "UPDATED_OLD"; +}; +export type ReturnValue = (typeof ReturnValue)[keyof typeof ReturnValue]; +export declare class ReplicatedWriteConflictException extends __BaseException { + readonly name: "ReplicatedWriteConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ReplicatedWriteConflictException, + __BaseException + > + ); +} +export declare class TransactionConflictException extends __BaseException { + readonly name: "TransactionConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DeleteReplicaAction { + RegionName: string | undefined; +} +export interface DeleteReplicationGroupMemberAction { + RegionName: string | undefined; +} +export interface DeleteResourcePolicyInput { + ResourceArn: string | undefined; + ExpectedRevisionId?: string | undefined; +} +export interface DeleteResourcePolicyOutput { + RevisionId?: string | undefined; +} +export declare class PolicyNotFoundException extends __BaseException { + readonly name: "PolicyNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DeleteTableInput { + TableName: string | undefined; +} +export interface DeleteTableOutput { + TableDescription?: TableDescription | undefined; +} +export interface DescribeBackupInput { + BackupArn: string | undefined; +} +export interface DescribeBackupOutput { + BackupDescription?: BackupDescription | undefined; +} +export interface DescribeContinuousBackupsInput { + TableName: string | undefined; +} +export interface DescribeContinuousBackupsOutput { + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +export interface DescribeContributorInsightsInput { + TableName: string | undefined; + IndexName?: string | undefined; +} +export interface FailureException { + ExceptionName?: string | undefined; + ExceptionDescription?: string | undefined; +} +export interface DescribeContributorInsightsOutput { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsRuleList?: string[] | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; + LastUpdateDateTime?: Date | undefined; + FailureException?: FailureException | undefined; +} +export interface DescribeEndpointsRequest {} +export interface Endpoint { + Address: string | undefined; + CachePeriodInMinutes: number | undefined; +} +export interface DescribeEndpointsResponse { + Endpoints: Endpoint[] | undefined; +} +export interface DescribeExportInput { + ExportArn: string | undefined; +} +export declare const ExportFormat: { + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +export type ExportFormat = (typeof ExportFormat)[keyof typeof ExportFormat]; +export declare const ExportStatus: { + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +export type ExportStatus = (typeof ExportStatus)[keyof typeof ExportStatus]; +export declare const ExportType: { + readonly FULL_EXPORT: "FULL_EXPORT"; + readonly INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT"; +}; +export type ExportType = (typeof ExportType)[keyof typeof ExportType]; +export declare const ExportViewType: { + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; +}; +export type ExportViewType = + (typeof ExportViewType)[keyof typeof ExportViewType]; +export interface IncrementalExportSpecification { + ExportFromTime?: Date | undefined; + ExportToTime?: Date | undefined; + ExportViewType?: ExportViewType | undefined; +} +export declare const S3SseAlgorithm: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +export type S3SseAlgorithm = + (typeof S3SseAlgorithm)[keyof typeof S3SseAlgorithm]; +export interface ExportDescription { + ExportArn?: string | undefined; + ExportStatus?: ExportStatus | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; + ExportManifest?: string | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + ExportTime?: Date | undefined; + ClientToken?: string | undefined; + S3Bucket?: string | undefined; + S3BucketOwner?: string | undefined; + S3Prefix?: string | undefined; + S3SseAlgorithm?: S3SseAlgorithm | undefined; + S3SseKmsKeyId?: string | undefined; + FailureCode?: string | undefined; + FailureMessage?: string | undefined; + ExportFormat?: ExportFormat | undefined; + BilledSizeBytes?: number | undefined; + ItemCount?: number | undefined; + ExportType?: ExportType | undefined; + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +export interface DescribeExportOutput { + ExportDescription?: ExportDescription | undefined; +} +export declare class ExportNotFoundException extends __BaseException { + readonly name: "ExportNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeGlobalTableInput { + GlobalTableName: string | undefined; +} +export interface DescribeGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class GlobalTableNotFoundException extends __BaseException { + readonly name: "GlobalTableNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeGlobalTableSettingsInput { + GlobalTableName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndexSettingsDescription { + IndexName: string | undefined; + IndexStatus?: IndexStatus | undefined; + ProvisionedReadCapacityUnits?: number | undefined; + ProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ProvisionedWriteCapacityUnits?: number | undefined; + ProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; +} +export interface ReplicaSettingsDescription { + RegionName: string | undefined; + ReplicaStatus?: ReplicaStatus | undefined; + ReplicaBillingModeSummary?: BillingModeSummary | undefined; + ReplicaProvisionedReadCapacityUnits?: number | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaProvisionedWriteCapacityUnits?: number | undefined; + ReplicaProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaGlobalSecondaryIndexSettings?: + | ReplicaGlobalSecondaryIndexSettingsDescription[] + | undefined; + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +export interface DescribeGlobalTableSettingsOutput { + GlobalTableName?: string | undefined; + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +export interface DescribeImportInput { + ImportArn: string | undefined; +} +export declare const ImportStatus: { + readonly CANCELLED: "CANCELLED"; + readonly CANCELLING: "CANCELLING"; + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +export type ImportStatus = (typeof ImportStatus)[keyof typeof ImportStatus]; +export declare const InputCompressionType: { + readonly GZIP: "GZIP"; + readonly NONE: "NONE"; + readonly ZSTD: "ZSTD"; +}; +export type InputCompressionType = + (typeof InputCompressionType)[keyof typeof InputCompressionType]; +export declare const InputFormat: { + readonly CSV: "CSV"; + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +export type InputFormat = (typeof InputFormat)[keyof typeof InputFormat]; +export interface InputFormatOptions { + Csv?: CsvOptions | undefined; +} +export interface S3BucketSource { + S3BucketOwner?: string | undefined; + S3Bucket: string | undefined; + S3KeyPrefix?: string | undefined; +} +export interface TableCreationParameters { + TableName: string | undefined; + AttributeDefinitions: AttributeDefinition[] | undefined; + KeySchema: KeySchemaElement[] | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + SSESpecification?: SSESpecification | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; +} +export interface ImportTableDescription { + ImportArn?: string | undefined; + ImportStatus?: ImportStatus | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + ClientToken?: string | undefined; + S3BucketSource?: S3BucketSource | undefined; + ErrorCount?: number | undefined; + CloudWatchLogGroupArn?: string | undefined; + InputFormat?: InputFormat | undefined; + InputFormatOptions?: InputFormatOptions | undefined; + InputCompressionType?: InputCompressionType | undefined; + TableCreationParameters?: TableCreationParameters | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; + ProcessedSizeBytes?: number | undefined; + ProcessedItemCount?: number | undefined; + ImportedItemCount?: number | undefined; + FailureCode?: string | undefined; + FailureMessage?: string | undefined; +} +export interface DescribeImportOutput { + ImportTableDescription: ImportTableDescription | undefined; +} +export declare class ImportNotFoundException extends __BaseException { + readonly name: "ImportNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeKinesisStreamingDestinationInput { + TableName: string | undefined; +} +export declare const DestinationStatus: { + readonly ACTIVE: "ACTIVE"; + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLE_FAILED: "ENABLE_FAILED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +export type DestinationStatus = + (typeof DestinationStatus)[keyof typeof DestinationStatus]; +export interface KinesisDataStreamDestination { + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + DestinationStatusDescription?: string | undefined; + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface DescribeKinesisStreamingDestinationOutput { + TableName?: string | undefined; + KinesisDataStreamDestinations?: KinesisDataStreamDestination[] | undefined; +} +export interface DescribeLimitsInput {} +export interface DescribeLimitsOutput { + AccountMaxReadCapacityUnits?: number | undefined; + AccountMaxWriteCapacityUnits?: number | undefined; + TableMaxReadCapacityUnits?: number | undefined; + TableMaxWriteCapacityUnits?: number | undefined; +} +export interface DescribeTableInput { + TableName: string | undefined; +} +export interface DescribeTableOutput { + Table?: TableDescription | undefined; +} +export interface DescribeTableReplicaAutoScalingInput { + TableName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndexAutoScalingDescription { + IndexName?: string | undefined; + IndexStatus?: IndexStatus | undefined; + ProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; +} +export interface ReplicaAutoScalingDescription { + RegionName?: string | undefined; + GlobalSecondaryIndexes?: + | ReplicaGlobalSecondaryIndexAutoScalingDescription[] + | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaStatus?: ReplicaStatus | undefined; +} +export interface TableAutoScalingDescription { + TableName?: string | undefined; + TableStatus?: TableStatus | undefined; + Replicas?: ReplicaAutoScalingDescription[] | undefined; +} +export interface DescribeTableReplicaAutoScalingOutput { + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +export interface DescribeTimeToLiveInput { + TableName: string | undefined; +} +export interface DescribeTimeToLiveOutput { + TimeToLiveDescription?: TimeToLiveDescription | undefined; +} +export interface EnableKinesisStreamingConfiguration { + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface KinesisStreamingDestinationInput { + TableName: string | undefined; + StreamArn: string | undefined; + EnableKinesisStreamingConfiguration?: + | EnableKinesisStreamingConfiguration + | undefined; +} +export interface KinesisStreamingDestinationOutput { + TableName?: string | undefined; + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + EnableKinesisStreamingConfiguration?: + | EnableKinesisStreamingConfiguration + | undefined; +} +export declare class DuplicateItemException extends __BaseException { + readonly name: "DuplicateItemException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class IdempotentParameterMismatchException extends __BaseException { + readonly name: "IdempotentParameterMismatchException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType< + IdempotentParameterMismatchException, + __BaseException + > + ); +} +export declare class TransactionInProgressException extends __BaseException { + readonly name: "TransactionInProgressException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ExportConflictException extends __BaseException { + readonly name: "ExportConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ExportTableToPointInTimeInput { + TableArn: string | undefined; + ExportTime?: Date | undefined; + ClientToken?: string | undefined; + S3Bucket: string | undefined; + S3BucketOwner?: string | undefined; + S3Prefix?: string | undefined; + S3SseAlgorithm?: S3SseAlgorithm | undefined; + S3SseKmsKeyId?: string | undefined; + ExportFormat?: ExportFormat | undefined; + ExportType?: ExportType | undefined; + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +export interface ExportTableToPointInTimeOutput { + ExportDescription?: ExportDescription | undefined; +} +export declare class InvalidExportTimeException extends __BaseException { + readonly name: "InvalidExportTimeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class PointInTimeRecoveryUnavailableException extends __BaseException { + readonly name: "PointInTimeRecoveryUnavailableException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + PointInTimeRecoveryUnavailableException, + __BaseException + > + ); +} +export interface GetResourcePolicyInput { + ResourceArn: string | undefined; +} +export interface GetResourcePolicyOutput { + Policy?: string | undefined; + RevisionId?: string | undefined; +} +export declare class ImportConflictException extends __BaseException { + readonly name: "ImportConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ImportTableInput { + ClientToken?: string | undefined; + S3BucketSource: S3BucketSource | undefined; + InputFormat: InputFormat | undefined; + InputFormatOptions?: InputFormatOptions | undefined; + InputCompressionType?: InputCompressionType | undefined; + TableCreationParameters: TableCreationParameters | undefined; +} +export interface ImportTableOutput { + ImportTableDescription: ImportTableDescription | undefined; +} +export interface ListBackupsInput { + TableName?: string | undefined; + Limit?: number | undefined; + TimeRangeLowerBound?: Date | undefined; + TimeRangeUpperBound?: Date | undefined; + ExclusiveStartBackupArn?: string | undefined; + BackupType?: BackupTypeFilter | undefined; +} +export interface ListBackupsOutput { + BackupSummaries?: BackupSummary[] | undefined; + LastEvaluatedBackupArn?: string | undefined; +} +export interface ListContributorInsightsInput { + TableName?: string | undefined; + NextToken?: string | undefined; + MaxResults?: number | undefined; +} +export interface ListContributorInsightsOutput { + ContributorInsightsSummaries?: ContributorInsightsSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListExportsInput { + TableArn?: string | undefined; + MaxResults?: number | undefined; + NextToken?: string | undefined; +} +export interface ExportSummary { + ExportArn?: string | undefined; + ExportStatus?: ExportStatus | undefined; + ExportType?: ExportType | undefined; +} +export interface ListExportsOutput { + ExportSummaries?: ExportSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListGlobalTablesInput { + ExclusiveStartGlobalTableName?: string | undefined; + Limit?: number | undefined; + RegionName?: string | undefined; +} +export interface GlobalTable { + GlobalTableName?: string | undefined; + ReplicationGroup?: Replica[] | undefined; +} +export interface ListGlobalTablesOutput { + GlobalTables?: GlobalTable[] | undefined; + LastEvaluatedGlobalTableName?: string | undefined; +} +export interface ListImportsInput { + TableArn?: string | undefined; + PageSize?: number | undefined; + NextToken?: string | undefined; +} +export interface ImportSummary { + ImportArn?: string | undefined; + ImportStatus?: ImportStatus | undefined; + TableArn?: string | undefined; + S3BucketSource?: S3BucketSource | undefined; + CloudWatchLogGroupArn?: string | undefined; + InputFormat?: InputFormat | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; +} +export interface ListImportsOutput { + ImportSummaryList?: ImportSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListTablesInput { + ExclusiveStartTableName?: string | undefined; + Limit?: number | undefined; +} +export interface ListTablesOutput { + TableNames?: string[] | undefined; + LastEvaluatedTableName?: string | undefined; +} +export interface ListTagsOfResourceInput { + ResourceArn: string | undefined; + NextToken?: string | undefined; +} +export interface ListTagsOfResourceOutput { + Tags?: Tag[] | undefined; + NextToken?: string | undefined; +} +export interface PutResourcePolicyInput { + ResourceArn: string | undefined; + Policy: string | undefined; + ExpectedRevisionId?: string | undefined; + ConfirmRemoveSelfResourceAccess?: boolean | undefined; +} +export interface PutResourcePolicyOutput { + RevisionId?: string | undefined; +} +export declare const Select: { + readonly ALL_ATTRIBUTES: "ALL_ATTRIBUTES"; + readonly ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES"; + readonly COUNT: "COUNT"; + readonly SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES"; +}; +export type Select = (typeof Select)[keyof typeof Select]; +export interface RestoreTableFromBackupInput { + TargetTableName: string | undefined; + BackupArn: string | undefined; + BillingModeOverride?: BillingMode | undefined; + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + SSESpecificationOverride?: SSESpecification | undefined; +} +export interface RestoreTableFromBackupOutput { + TableDescription?: TableDescription | undefined; +} +export declare class TableAlreadyExistsException extends __BaseException { + readonly name: "TableAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidRestoreTimeException extends __BaseException { + readonly name: "InvalidRestoreTimeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface RestoreTableToPointInTimeInput { + SourceTableArn?: string | undefined; + SourceTableName?: string | undefined; + TargetTableName: string | undefined; + UseLatestRestorableTime?: boolean | undefined; + RestoreDateTime?: Date | undefined; + BillingModeOverride?: BillingMode | undefined; + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + SSESpecificationOverride?: SSESpecification | undefined; +} +export interface RestoreTableToPointInTimeOutput { + TableDescription?: TableDescription | undefined; +} +export interface TagResourceInput { + ResourceArn: string | undefined; + Tags: Tag[] | undefined; +} +export interface UntagResourceInput { + ResourceArn: string | undefined; + TagKeys: string[] | undefined; +} +export interface PointInTimeRecoverySpecification { + PointInTimeRecoveryEnabled: boolean | undefined; + RecoveryPeriodInDays?: number | undefined; +} +export interface UpdateContinuousBackupsInput { + TableName: string | undefined; + PointInTimeRecoverySpecification: + | PointInTimeRecoverySpecification + | undefined; +} +export interface UpdateContinuousBackupsOutput { + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +export interface UpdateContributorInsightsInput { + TableName: string | undefined; + IndexName?: string | undefined; + ContributorInsightsAction: ContributorInsightsAction | undefined; +} +export interface UpdateContributorInsightsOutput { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +export declare class ReplicaAlreadyExistsException extends __BaseException { + readonly name: "ReplicaAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ReplicaNotFoundException extends __BaseException { + readonly name: "ReplicaNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ReplicaUpdate { + Create?: CreateReplicaAction | undefined; + Delete?: DeleteReplicaAction | undefined; +} +export interface UpdateGlobalTableInput { + GlobalTableName: string | undefined; + ReplicaUpdates: ReplicaUpdate[] | undefined; +} +export interface UpdateGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class IndexNotFoundException extends __BaseException { + readonly name: "IndexNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface GlobalTableGlobalSecondaryIndexSettingsUpdate { + IndexName: string | undefined; + ProvisionedWriteCapacityUnits?: number | undefined; + ProvisionedWriteCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaGlobalSecondaryIndexSettingsUpdate { + IndexName: string | undefined; + ProvisionedReadCapacityUnits?: number | undefined; + ProvisionedReadCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaSettingsUpdate { + RegionName: string | undefined; + ReplicaProvisionedReadCapacityUnits?: number | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; + ReplicaGlobalSecondaryIndexSettingsUpdate?: + | ReplicaGlobalSecondaryIndexSettingsUpdate[] + | undefined; + ReplicaTableClass?: TableClass | undefined; +} +export interface UpdateGlobalTableSettingsInput { + GlobalTableName: string | undefined; + GlobalTableBillingMode?: BillingMode | undefined; + GlobalTableProvisionedWriteCapacityUnits?: number | undefined; + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; + GlobalTableGlobalSecondaryIndexSettingsUpdate?: + | GlobalTableGlobalSecondaryIndexSettingsUpdate[] + | undefined; + ReplicaSettingsUpdate?: ReplicaSettingsUpdate[] | undefined; +} +export interface UpdateGlobalTableSettingsOutput { + GlobalTableName?: string | undefined; + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +export interface UpdateKinesisStreamingConfiguration { + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface UpdateKinesisStreamingDestinationInput { + TableName: string | undefined; + StreamArn: string | undefined; + UpdateKinesisStreamingConfiguration?: + | UpdateKinesisStreamingConfiguration + | undefined; +} +export interface UpdateKinesisStreamingDestinationOutput { + TableName?: string | undefined; + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + UpdateKinesisStreamingConfiguration?: + | UpdateKinesisStreamingConfiguration + | undefined; +} +export interface UpdateGlobalSecondaryIndexAction { + IndexName: string | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface GlobalSecondaryIndexUpdate { + Update?: UpdateGlobalSecondaryIndexAction | undefined; + Create?: CreateGlobalSecondaryIndexAction | undefined; + Delete?: DeleteGlobalSecondaryIndexAction | undefined; +} +export interface UpdateReplicationGroupMemberAction { + RegionName: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + TableClassOverride?: TableClass | undefined; +} +export interface ReplicationGroupUpdate { + Create?: CreateReplicationGroupMemberAction | undefined; + Update?: UpdateReplicationGroupMemberAction | undefined; + Delete?: DeleteReplicationGroupMemberAction | undefined; +} +export interface UpdateTableInput { + AttributeDefinitions?: AttributeDefinition[] | undefined; + TableName: string | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexUpdate[] | undefined; + StreamSpecification?: StreamSpecification | undefined; + SSESpecification?: SSESpecification | undefined; + ReplicaUpdates?: ReplicationGroupUpdate[] | undefined; + TableClass?: TableClass | undefined; + DeletionProtectionEnabled?: boolean | undefined; + MultiRegionConsistency?: MultiRegionConsistency | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface UpdateTableOutput { + TableDescription?: TableDescription | undefined; +} +export interface GlobalSecondaryIndexAutoScalingUpdate { + IndexName?: string | undefined; + ProvisionedWriteCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaGlobalSecondaryIndexAutoScalingUpdate { + IndexName?: string | undefined; + ProvisionedReadCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaAutoScalingUpdate { + RegionName: string | undefined; + ReplicaGlobalSecondaryIndexUpdates?: + | ReplicaGlobalSecondaryIndexAutoScalingUpdate[] + | undefined; + ReplicaProvisionedReadCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface UpdateTableReplicaAutoScalingInput { + GlobalSecondaryIndexUpdates?: + | GlobalSecondaryIndexAutoScalingUpdate[] + | undefined; + TableName: string | undefined; + ProvisionedWriteCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; + ReplicaUpdates?: ReplicaAutoScalingUpdate[] | undefined; +} +export interface UpdateTableReplicaAutoScalingOutput { + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +export interface TimeToLiveSpecification { + Enabled: boolean | undefined; + AttributeName: string | undefined; +} +export interface UpdateTimeToLiveInput { + TableName: string | undefined; + TimeToLiveSpecification: TimeToLiveSpecification | undefined; +} +export interface UpdateTimeToLiveOutput { + TimeToLiveSpecification?: TimeToLiveSpecification | undefined; +} +export type AttributeValue = + | AttributeValue.BMember + | AttributeValue.BOOLMember + | AttributeValue.BSMember + | AttributeValue.LMember + | AttributeValue.MMember + | AttributeValue.NMember + | AttributeValue.NSMember + | AttributeValue.NULLMember + | AttributeValue.SMember + | AttributeValue.SSMember + | AttributeValue.$UnknownMember; +export declare namespace AttributeValue { + interface SMember { + S: string; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NMember { + S?: never; + N: string; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface BMember { + S?: never; + N?: never; + B: Uint8Array; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface SSMember { + S?: never; + N?: never; + B?: never; + SS: string[]; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS: string[]; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface BSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS: Uint8Array[]; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface MMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M: Record; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface LMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L: AttributeValue[]; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NULLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL: boolean; + BOOL?: never; + $unknown?: never; + } + interface BOOLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL: boolean; + $unknown?: never; + } + interface $UnknownMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown: [string, any]; + } + interface Visitor { + S: (value: string) => T; + N: (value: string) => T; + B: (value: Uint8Array) => T; + SS: (value: string[]) => T; + NS: (value: string[]) => T; + BS: (value: Uint8Array[]) => T; + M: (value: Record) => T; + L: (value: AttributeValue[]) => T; + NULL: (value: boolean) => T; + BOOL: (value: boolean) => T; + _: (name: string, value: any) => T; + } + const visit: (value: AttributeValue, visitor: Visitor) => T; +} +export interface AttributeValueUpdate { + Value?: AttributeValue | undefined; + Action?: AttributeAction | undefined; +} +export interface BatchStatementError { + Code?: BatchStatementErrorCodeEnum | undefined; + Message?: string | undefined; + Item?: Record | undefined; +} +export interface BatchStatementRequest { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ConsistentRead?: boolean | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface CancellationReason { + Item?: Record | undefined; + Code?: string | undefined; + Message?: string | undefined; +} +export interface Condition { + AttributeValueList?: AttributeValue[] | undefined; + ComparisonOperator: ComparisonOperator | undefined; +} +export declare class ConditionalCheckFailedException extends __BaseException { + readonly name: "ConditionalCheckFailedException"; + readonly $fault: "client"; + Item?: Record | undefined; + constructor( + opts: __ExceptionOptionType< + ConditionalCheckFailedException, + __BaseException + > + ); +} +export interface DeleteRequest { + Key: Record | undefined; +} +export interface ExecuteStatementInput { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ConsistentRead?: boolean | undefined; + NextToken?: string | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + Limit?: number | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Get { + Key: Record | undefined; + TableName: string | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface GetItemInput { + TableName: string | undefined; + Key: Record | undefined; + AttributesToGet?: string[] | undefined; + ConsistentRead?: boolean | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface GetItemOutput { + Item?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface ItemCollectionMetrics { + ItemCollectionKey?: Record | undefined; + SizeEstimateRangeGB?: number[] | undefined; +} +export interface ItemResponse { + Item?: Record | undefined; +} +export interface ParameterizedStatement { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface PutRequest { + Item: Record | undefined; +} +export interface KeysAndAttributes { + Keys: Record[] | undefined; + AttributesToGet?: string[] | undefined; + ConsistentRead?: boolean | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface TransactGetItem { + Get: Get | undefined; +} +export interface BatchExecuteStatementInput { + Statements: BatchStatementRequest[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExecuteTransactionInput { + TransactStatements: ParameterizedStatement[] | undefined; + ClientRequestToken?: string | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExecuteTransactionOutput { + Responses?: ItemResponse[] | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface TransactGetItemsOutput { + ConsumedCapacity?: ConsumedCapacity[] | undefined; + Responses?: ItemResponse[] | undefined; +} +export declare class TransactionCanceledException extends __BaseException { + readonly name: "TransactionCanceledException"; + readonly $fault: "client"; + Message?: string | undefined; + CancellationReasons?: CancellationReason[] | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export interface BatchGetItemInput { + RequestItems: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExpectedAttributeValue { + Value?: AttributeValue | undefined; + Exists?: boolean | undefined; + ComparisonOperator?: ComparisonOperator | undefined; + AttributeValueList?: AttributeValue[] | undefined; +} +export interface TransactGetItemsInput { + TransactItems: TransactGetItem[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface TransactWriteItemsOutput { + ConsumedCapacity?: ConsumedCapacity[] | undefined; + ItemCollectionMetrics?: Record | undefined; +} +export interface ConditionCheck { + Key: Record | undefined; + TableName: string | undefined; + ConditionExpression: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Delete { + Key: Record | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Put { + Item: Record | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Update { + Key: Record | undefined; + UpdateExpression: string | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface BatchStatementResponse { + Error?: BatchStatementError | undefined; + TableName?: string | undefined; + Item?: Record | undefined; +} +export interface DeleteItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface ExecuteStatementOutput { + Items?: Record[] | undefined; + NextToken?: string | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + LastEvaluatedKey?: Record | undefined; +} +export interface PutItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface QueryOutput { + Items?: Record[] | undefined; + Count?: number | undefined; + ScannedCount?: number | undefined; + LastEvaluatedKey?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface ScanOutput { + Items?: Record[] | undefined; + Count?: number | undefined; + ScannedCount?: number | undefined; + LastEvaluatedKey?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface UpdateItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface WriteRequest { + PutRequest?: PutRequest | undefined; + DeleteRequest?: DeleteRequest | undefined; +} +export interface BatchExecuteStatementOutput { + Responses?: BatchStatementResponse[] | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface BatchGetItemOutput { + Responses?: Record[]> | undefined; + UnprocessedKeys?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface ScanInput { + TableName: string | undefined; + IndexName?: string | undefined; + AttributesToGet?: string[] | undefined; + Limit?: number | undefined; + Select?: Select | undefined; + ScanFilter?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ExclusiveStartKey?: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + TotalSegments?: number | undefined; + Segment?: number | undefined; + ProjectionExpression?: string | undefined; + FilterExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ConsistentRead?: boolean | undefined; +} +export interface BatchWriteItemInput { + RequestItems: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; +} +export interface DeleteItemInput { + TableName: string | undefined; + Key: Record | undefined; + Expected?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface PutItemInput { + TableName: string | undefined; + Item: Record | undefined; + Expected?: Record | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface QueryInput { + TableName: string | undefined; + IndexName?: string | undefined; + Select?: Select | undefined; + AttributesToGet?: string[] | undefined; + Limit?: number | undefined; + ConsistentRead?: boolean | undefined; + KeyConditions?: Record | undefined; + QueryFilter?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ScanIndexForward?: boolean | undefined; + ExclusiveStartKey?: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ProjectionExpression?: string | undefined; + FilterExpression?: string | undefined; + KeyConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +} +export interface BatchWriteItemOutput { + UnprocessedItems?: Record | undefined; + ItemCollectionMetrics?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface UpdateItemInput { + TableName: string | undefined; + Key: Record | undefined; + AttributeUpdates?: Record | undefined; + Expected?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + UpdateExpression?: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface TransactWriteItem { + ConditionCheck?: ConditionCheck | undefined; + Put?: Put | undefined; + Delete?: Delete | undefined; + Update?: Update | undefined; +} +export interface TransactWriteItemsInput { + TransactItems: TransactWriteItem[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ClientRequestToken?: string | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..109e7f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,6 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBClient } from "../DynamoDBClient"; +export interface DynamoDBPaginationConfiguration + extends PaginationConfiguration { + client: DynamoDBClient; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts new file mode 100644 index 0000000..f4961a0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "../commands/ListContributorInsightsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListContributorInsights: ( + config: DynamoDBPaginationConfiguration, + input: ListContributorInsightsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts new file mode 100644 index 0000000..29a8603 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "../commands/ListExportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListExports: ( + config: DynamoDBPaginationConfiguration, + input: ListExportsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts new file mode 100644 index 0000000..f8903e8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "../commands/ListImportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListImports: ( + config: DynamoDBPaginationConfiguration, + input: ListImportsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts new file mode 100644 index 0000000..6246d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "../commands/ListTablesCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListTables: ( + config: DynamoDBPaginationConfiguration, + input: ListTablesCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..2ff4976 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateQuery: ( + config: DynamoDBPaginationConfiguration, + input: QueryCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..a84dfd1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts @@ -0,0 +1,8 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateScan: ( + config: DynamoDBPaginationConfiguration, + input: ScanCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts new file mode 100644 index 0000000..83e200c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts @@ -0,0 +1,686 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "../commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "../commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "../commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "../commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "../commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "../commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "../commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "../commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "../commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "../commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "../commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "../commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "../commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "../commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "../commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "../commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "../commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "../commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "../commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "../commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "../commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "../commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "../commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "../commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "../commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "../commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "../commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "../commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "../commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "../commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "../commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "../commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "../commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "../commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "../commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "../commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "../commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "../commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "../commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "../commands/PutResourcePolicyCommand"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "../commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "../commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "../commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "../commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "../commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "../commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "../commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "../commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "../commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "../commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "../commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "../commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "../commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "../commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "../commands/UpdateTimeToLiveCommand"; +export declare const se_BatchExecuteStatementCommand: ( + input: BatchExecuteStatementCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_BatchGetItemCommand: ( + input: BatchGetItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_BatchWriteItemCommand: ( + input: BatchWriteItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateBackupCommand: ( + input: CreateBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateGlobalTableCommand: ( + input: CreateGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateTableCommand: ( + input: CreateTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBackupCommand: ( + input: DeleteBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteItemCommand: ( + input: DeleteItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteResourcePolicyCommand: ( + input: DeleteResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteTableCommand: ( + input: DeleteTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeBackupCommand: ( + input: DescribeBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeContinuousBackupsCommand: ( + input: DescribeContinuousBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeContributorInsightsCommand: ( + input: DescribeContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeEndpointsCommand: ( + input: DescribeEndpointsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeExportCommand: ( + input: DescribeExportCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeGlobalTableCommand: ( + input: DescribeGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeGlobalTableSettingsCommand: ( + input: DescribeGlobalTableSettingsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeImportCommand: ( + input: DescribeImportCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeKinesisStreamingDestinationCommand: ( + input: DescribeKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeLimitsCommand: ( + input: DescribeLimitsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTableCommand: ( + input: DescribeTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTableReplicaAutoScalingCommand: ( + input: DescribeTableReplicaAutoScalingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTimeToLiveCommand: ( + input: DescribeTimeToLiveCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DisableKinesisStreamingDestinationCommand: ( + input: DisableKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_EnableKinesisStreamingDestinationCommand: ( + input: EnableKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExecuteStatementCommand: ( + input: ExecuteStatementCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExecuteTransactionCommand: ( + input: ExecuteTransactionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExportTableToPointInTimeCommand: ( + input: ExportTableToPointInTimeCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetItemCommand: ( + input: GetItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetResourcePolicyCommand: ( + input: GetResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ImportTableCommand: ( + input: ImportTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListBackupsCommand: ( + input: ListBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListContributorInsightsCommand: ( + input: ListContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListExportsCommand: ( + input: ListExportsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListGlobalTablesCommand: ( + input: ListGlobalTablesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListImportsCommand: ( + input: ListImportsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListTablesCommand: ( + input: ListTablesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListTagsOfResourceCommand: ( + input: ListTagsOfResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutItemCommand: ( + input: PutItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutResourcePolicyCommand: ( + input: PutResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_QueryCommand: ( + input: QueryCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_RestoreTableFromBackupCommand: ( + input: RestoreTableFromBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_RestoreTableToPointInTimeCommand: ( + input: RestoreTableToPointInTimeCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ScanCommand: ( + input: ScanCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TagResourceCommand: ( + input: TagResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TransactGetItemsCommand: ( + input: TransactGetItemsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TransactWriteItemsCommand: ( + input: TransactWriteItemsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UntagResourceCommand: ( + input: UntagResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateContinuousBackupsCommand: ( + input: UpdateContinuousBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateContributorInsightsCommand: ( + input: UpdateContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateGlobalTableCommand: ( + input: UpdateGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateGlobalTableSettingsCommand: ( + input: UpdateGlobalTableSettingsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateItemCommand: ( + input: UpdateItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateKinesisStreamingDestinationCommand: ( + input: UpdateKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTableCommand: ( + input: UpdateTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTableReplicaAutoScalingCommand: ( + input: UpdateTableReplicaAutoScalingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTimeToLiveCommand: ( + input: UpdateTimeToLiveCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_BatchExecuteStatementCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_BatchGetItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_BatchWriteItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeContinuousBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeEndpointsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeExportCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeGlobalTableSettingsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeImportCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeLimitsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTableReplicaAutoScalingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTimeToLiveCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DisableKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_EnableKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExecuteStatementCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExecuteTransactionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExportTableToPointInTimeCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ImportTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListExportsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListGlobalTablesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListImportsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListTablesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListTagsOfResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_QueryCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_RestoreTableFromBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_RestoreTableToPointInTimeCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ScanCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TagResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TransactGetItemsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TransactWriteItemsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UntagResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateContinuousBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateGlobalTableSettingsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTableReplicaAutoScalingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTimeToLiveCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..80f326c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts @@ -0,0 +1,111 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | (() => Promise); + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts new file mode 100644 index 0000000..32d5489 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts @@ -0,0 +1,111 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | import("@smithy/types").Provider< + import("@aws-sdk/core/account-id-endpoint").AccountIdEndpointMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + init?: + | import("@aws-sdk/credential-provider-node").DefaultProviderInit + | undefined + ) => import("@smithy/types").MemoizedProvider< + import("@smithy/types").AwsCredentialIdentity + >); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts new file mode 100644 index 0000000..f3583eb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts @@ -0,0 +1,115 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | (() => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..5d480e8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts @@ -0,0 +1,21 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts new file mode 100644 index 0000000..632b45b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: DynamoDBExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts new file mode 100644 index 0000000..5dba224 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export declare const waitForTableExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; +export declare const waitUntilTableExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts new file mode 100644 index 0000000..1df1b34 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export declare const waitForTableNotExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; +export declare const waitUntilTableNotExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts new file mode 100644 index 0000000..a8ae496 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * + * @deprecated Use waitUntilTableExists instead. waitForTableExists does not throw error in non-success cases. + */ +export declare const waitForTableExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to DescribeTableCommand for polling. + */ +export declare const waitUntilTableExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts new file mode 100644 index 0000000..df87d8a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * + * @deprecated Use waitUntilTableNotExists instead. waitForTableNotExists does not throw error in non-success cases. + */ +export declare const waitForTableNotExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to DescribeTableCommand for polling. + */ +export declare const waitUntilTableNotExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/package.json new file mode 100644 index 0000000..9377573 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-dynamodb/package.json @@ -0,0 +1,103 @@ +{ + "name": "@aws-sdk/client-dynamodb", + "description": "AWS SDK for JavaScript Dynamodb Client for Node.js, Browser and React Native", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline client-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "generate:client": "node ../../scripts/generate-clients/single-service --solo dynamodb" + }, + "main": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@tsconfig/node18": "18.2.4", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "browser": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser" + }, + "react-native": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.native" + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-dynamodb" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/README.md new file mode 100644 index 0000000..09d5fe3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/README.md @@ -0,0 +1,252 @@ + + +# @aws-sdk/client-sso + +## Description + +AWS SDK for JavaScript SSO Client for Node.js, Browser and React Native. + +

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to +IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles +assigned to them and get federated into the application.

+ +

Although AWS Single Sign-On was renamed, the sso and +identitystore API namespaces will continue to retain their original name for +backward compatibility purposes. For more information, see IAM Identity Center rename.

+
+

This reference guide describes the IAM Identity Center Portal operations that you can call +programatically and includes detailed information on data types and errors.

+ +

AWS provides SDKs that consist of libraries and sample code for various programming +languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a +convenient way to create programmatic access to IAM Identity Center and other AWS services. For more +information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+
+ +## Installing + +To install this package, simply type add or install @aws-sdk/client-sso +using your favorite package manager: + +- `npm install @aws-sdk/client-sso` +- `yarn add @aws-sdk/client-sso` +- `pnpm add @aws-sdk/client-sso` + +## Getting Started + +### Import + +The AWS SDK is modulized by clients and commands. +To send a request, you only need to import the `SSOClient` and +the commands you need, for example `ListAccountsCommand`: + +```js +// ES5 example +const { SSOClient, ListAccountsCommand } = require("@aws-sdk/client-sso"); +``` + +```ts +// ES6+ example +import { SSOClient, ListAccountsCommand } from "@aws-sdk/client-sso"; +``` + +### Usage + +To send a request, you: + +- Initiate client with configuration (e.g. credentials, region). +- Initiate command with input parameters. +- Call `send` operation on client with command object as input. +- If you are using a custom http handler, you may call `destroy()` to close open connections. + +```js +// a client can be shared by different commands. +const client = new SSOClient({ region: "REGION" }); + +const params = { + /** input parameters */ +}; +const command = new ListAccountsCommand(params); +``` + +#### Async/await + +We recommend using [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) +operator to wait for the promise returned by send operation as follows: + +```js +// async/await. +try { + const data = await client.send(command); + // process data. +} catch (error) { + // error handling. +} finally { + // finally. +} +``` + +Async-await is clean, concise, intuitive, easy to debug and has better error handling +as compared to using Promise chains or callbacks. + +#### Promises + +You can also use [Promise chaining](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises#chaining) +to execute send operation. + +```js +client.send(command).then( + (data) => { + // process data. + }, + (error) => { + // error handling. + } +); +``` + +Promises can also be called using `.catch()` and `.finally()` as follows: + +```js +client + .send(command) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }) + .finally(() => { + // finally. + }); +``` + +#### Callbacks + +We do not recommend using callbacks because of [callback hell](http://callbackhell.com/), +but they are supported by the send operation. + +```js +// callbacks. +client.send(command, (err, data) => { + // process err and data. +}); +``` + +#### v2 compatible style + +The client can also send requests using v2 compatible style. +However, it results in a bigger bundle size and may be dropped in next major version. More details in the blog post +on [modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/) + +```ts +import * as AWS from "@aws-sdk/client-sso"; +const client = new AWS.SSO({ region: "REGION" }); + +// async/await. +try { + const data = await client.listAccounts(params); + // process data. +} catch (error) { + // error handling. +} + +// Promises. +client + .listAccounts(params) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }); + +// callbacks. +client.listAccounts(params, (err, data) => { + // process err and data. +}); +``` + +### Troubleshooting + +When the service returns an exception, the error will include the exception information, +as well as response metadata (e.g. request id). + +```js +try { + const data = await client.send(command); + // process data. +} catch (error) { + const { requestId, cfId, extendedRequestId } = error.$metadata; + console.log({ requestId, cfId, extendedRequestId }); + /** + * The keys within exceptions are also parsed. + * You can access them by specifying exception names: + * if (error.name === 'SomeServiceException') { + * const value = error.specialKeyInException; + * } + */ +} +``` + +## Getting Help + +Please use these community resources for getting help. +We use the GitHub issues for tracking bugs and feature requests, but have limited bandwidth to address them. + +- Visit [Developer Guide](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) + or [API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html). +- Check out the blog posts tagged with [`aws-sdk-js`](https://aws.amazon.com/blogs/developer/tag/aws-sdk-js/) + on AWS Developer Blog. +- Ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/aws-sdk-js) and tag it with `aws-sdk-js`. +- Join the AWS JavaScript community on [gitter](https://gitter.im/aws/aws-sdk-js-v3). +- If it turns out that you may have found a bug, please [open an issue](https://github.com/aws/aws-sdk-js-v3/issues/new/choose). + +To test your universal JavaScript code in Node.js, browser and react-native environments, +visit our [code samples repo](https://github.com/aws-samples/aws-sdk-js-tests). + +## Contributing + +This client code is generated automatically. Any modifications will be overwritten the next time the `@aws-sdk/client-sso` package is updated. +To contribute to client you can check our [generate clients scripts](https://github.com/aws/aws-sdk-js-v3/tree/main/scripts/generate-clients). + +## License + +This SDK is distributed under the +[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0), +see LICENSE for more information. + +## Client Commands (Operations List) + +
+ +GetRoleCredentials + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/GetRoleCredentialsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/GetRoleCredentialsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/GetRoleCredentialsCommandOutput/) + +
+
+ +ListAccountRoles + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/ListAccountRolesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountRolesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountRolesCommandOutput/) + +
+
+ +ListAccounts + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/ListAccountsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountsCommandOutput/) + +
+
+ +Logout + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/LogoutCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/LogoutCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/LogoutCommandOutput/) + +
diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..2c256ee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOHttpAuthSchemeProvider = exports.defaultSSOHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOHttpAuthSchemeParametersProvider = defaultSSOHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOHttpAuthSchemeProvider = defaultSSOHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js new file mode 100644 index 0000000..7258a35 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js new file mode 100644 index 0000000..4321ed9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/index.js new file mode 100644 index 0000000..8383b07 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/index.js @@ -0,0 +1,625 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + GetRoleCredentialsCommand: () => GetRoleCredentialsCommand, + GetRoleCredentialsRequestFilterSensitiveLog: () => GetRoleCredentialsRequestFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog: () => GetRoleCredentialsResponseFilterSensitiveLog, + InvalidRequestException: () => InvalidRequestException, + ListAccountRolesCommand: () => ListAccountRolesCommand, + ListAccountRolesRequestFilterSensitiveLog: () => ListAccountRolesRequestFilterSensitiveLog, + ListAccountsCommand: () => ListAccountsCommand, + ListAccountsRequestFilterSensitiveLog: () => ListAccountsRequestFilterSensitiveLog, + LogoutCommand: () => LogoutCommand, + LogoutRequestFilterSensitiveLog: () => LogoutRequestFilterSensitiveLog, + ResourceNotFoundException: () => ResourceNotFoundException, + RoleCredentialsFilterSensitiveLog: () => RoleCredentialsFilterSensitiveLog, + SSO: () => SSO, + SSOClient: () => SSOClient, + SSOServiceException: () => SSOServiceException, + TooManyRequestsException: () => TooManyRequestsException, + UnauthorizedException: () => UnauthorizedException, + __Client: () => import_smithy_client.Client, + paginateListAccountRoles: () => paginateListAccountRoles, + paginateListAccounts: () => paginateListAccounts +}); +module.exports = __toCommonJS(index_exports); + +// src/SSOClient.ts +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_retry = require("@smithy/middleware-retry"); + +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/SSOClient.ts +var import_runtimeConfig = require("././runtimeConfig"); + +// src/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/SSOClient.ts +var SSOClient = class extends import_smithy_client.Client { + static { + __name(this, "SSOClient"); + } + /** + * The resolved configuration of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/SSO.ts + + +// src/commands/GetRoleCredentialsCommand.ts + +var import_middleware_serde = require("@smithy/middleware-serde"); + + +// src/models/models_0.ts + + +// src/models/SSOServiceException.ts + +var SSOServiceException = class _SSOServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "SSOServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOServiceException.prototype); + } +}; + +// src/models/models_0.ts +var InvalidRequestException = class _InvalidRequestException extends SSOServiceException { + static { + __name(this, "InvalidRequestException"); + } + name = "InvalidRequestException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + } +}; +var ResourceNotFoundException = class _ResourceNotFoundException extends SSOServiceException { + static { + __name(this, "ResourceNotFoundException"); + } + name = "ResourceNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + } +}; +var TooManyRequestsException = class _TooManyRequestsException extends SSOServiceException { + static { + __name(this, "TooManyRequestsException"); + } + name = "TooManyRequestsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TooManyRequestsException.prototype); + } +}; +var UnauthorizedException = class _UnauthorizedException extends SSOServiceException { + static { + __name(this, "UnauthorizedException"); + } + name = "UnauthorizedException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnauthorizedException.prototype); + } +}; +var GetRoleCredentialsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "GetRoleCredentialsRequestFilterSensitiveLog"); +var RoleCredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.secretAccessKey && { secretAccessKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.sessionToken && { sessionToken: import_smithy_client.SENSITIVE_STRING } +}), "RoleCredentialsFilterSensitiveLog"); +var GetRoleCredentialsResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) } +}), "GetRoleCredentialsResponseFilterSensitiveLog"); +var ListAccountRolesRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountRolesRequestFilterSensitiveLog"); +var ListAccountsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountsRequestFilterSensitiveLog"); +var LogoutRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "LogoutRequestFilterSensitiveLog"); + +// src/protocols/Aws_restJson1.ts +var import_core2 = require("@aws-sdk/core"); + + +var se_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/federation/credentials"); + const query = (0, import_smithy_client.map)({ + [_rn]: [, (0, import_smithy_client.expectNonNull)(input[_rN], `roleName`)], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetRoleCredentialsCommand"); +var se_ListAccountRolesCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/roles"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountRolesCommand"); +var se_ListAccountsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/accounts"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountsCommand"); +var se_LogoutCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_LogoutCommand"); +var de_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + roleCredentials: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_GetRoleCredentialsCommand"); +var de_ListAccountRolesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + nextToken: import_smithy_client.expectString, + roleList: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountRolesCommand"); +var de_ListAccountsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + accountList: import_smithy_client._json, + nextToken: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountsCommand"); +var de_LogoutCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_LogoutCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(SSOServiceException); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_ResourceNotFoundExceptionRes"); +var de_TooManyRequestsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_TooManyRequestsExceptionRes"); +var de_UnauthorizedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var _aI = "accountId"; +var _aT = "accessToken"; +var _ai = "account_id"; +var _mR = "maxResults"; +var _mr = "max_result"; +var _nT = "nextToken"; +var _nt = "next_token"; +var _rN = "roleName"; +var _rn = "role_name"; +var _xasbt = "x-amz-sso_bearer_token"; + +// src/commands/GetRoleCredentialsCommand.ts +var GetRoleCredentialsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "GetRoleCredentials", {}).n("SSOClient", "GetRoleCredentialsCommand").f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog).ser(se_GetRoleCredentialsCommand).de(de_GetRoleCredentialsCommand).build() { + static { + __name(this, "GetRoleCredentialsCommand"); + } +}; + +// src/commands/ListAccountRolesCommand.ts + + + +var ListAccountRolesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccountRoles", {}).n("SSOClient", "ListAccountRolesCommand").f(ListAccountRolesRequestFilterSensitiveLog, void 0).ser(se_ListAccountRolesCommand).de(de_ListAccountRolesCommand).build() { + static { + __name(this, "ListAccountRolesCommand"); + } +}; + +// src/commands/ListAccountsCommand.ts + + + +var ListAccountsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccounts", {}).n("SSOClient", "ListAccountsCommand").f(ListAccountsRequestFilterSensitiveLog, void 0).ser(se_ListAccountsCommand).de(de_ListAccountsCommand).build() { + static { + __name(this, "ListAccountsCommand"); + } +}; + +// src/commands/LogoutCommand.ts + + + +var LogoutCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "Logout", {}).n("SSOClient", "LogoutCommand").f(LogoutRequestFilterSensitiveLog, void 0).ser(se_LogoutCommand).de(de_LogoutCommand).build() { + static { + __name(this, "LogoutCommand"); + } +}; + +// src/SSO.ts +var commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand +}; +var SSO = class extends SSOClient { + static { + __name(this, "SSO"); + } +}; +(0, import_smithy_client.createAggregatedClient)(commands, SSO); + +// src/pagination/ListAccountRolesPaginator.ts + +var paginateListAccountRoles = (0, import_core.createPaginator)(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); + +// src/pagination/ListAccountsPaginator.ts + +var paginateListAccounts = (0, import_core.createPaginator)(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + SSOServiceException, + __Client, + SSOClient, + SSO, + $Command, + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand, + paginateListAccountRoles, + paginateListAccounts, + InvalidRequestException, + ResourceNotFoundException, + TooManyRequestsException, + UnauthorizedException, + GetRoleCredentialsRequestFilterSensitiveLog, + RoleCredentialsFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog, + ListAccountRolesRequestFilterSensitiveLog, + ListAccountsRequestFilterSensitiveLog, + LogoutRequestFilterSensitiveLog +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js new file mode 100644 index 0000000..3b40936 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js new file mode 100644 index 0000000..befc739 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js new file mode 100644 index 0000000..24a378c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/SSO.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/SSO.js new file mode 100644 index 0000000..04d3169 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/SSO.js @@ -0,0 +1,15 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { GetRoleCredentialsCommand, } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommand, } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommand, } from "./commands/ListAccountsCommand"; +import { LogoutCommand } from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +const commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand, +}; +export class SSO extends SSOClient { +} +createAggregatedClient(commands, SSO); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js new file mode 100644 index 0000000..890a848 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSSOHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class SSOClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..f7ff90f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js @@ -0,0 +1,62 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js new file mode 100644 index 0000000..aa4c2e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_GetRoleCredentialsCommand, se_GetRoleCredentialsCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class GetRoleCredentialsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "GetRoleCredentials", {}) + .n("SSOClient", "GetRoleCredentialsCommand") + .f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog) + .ser(se_GetRoleCredentialsCommand) + .de(de_GetRoleCredentialsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js new file mode 100644 index 0000000..d5bcc14 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListAccountRolesRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_ListAccountRolesCommand, se_ListAccountRolesCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class ListAccountRolesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "ListAccountRoles", {}) + .n("SSOClient", "ListAccountRolesCommand") + .f(ListAccountRolesRequestFilterSensitiveLog, void 0) + .ser(se_ListAccountRolesCommand) + .de(de_ListAccountRolesCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js new file mode 100644 index 0000000..d4ab8ba --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListAccountsRequestFilterSensitiveLog } from "../models/models_0"; +import { de_ListAccountsCommand, se_ListAccountsCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class ListAccountsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "ListAccounts", {}) + .n("SSOClient", "ListAccountsCommand") + .f(ListAccountsRequestFilterSensitiveLog, void 0) + .ser(se_ListAccountsCommand) + .de(de_ListAccountsCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js new file mode 100644 index 0000000..29a37ed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { LogoutRequestFilterSensitiveLog } from "../models/models_0"; +import { de_LogoutCommand, se_LogoutCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class LogoutCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "Logout", {}) + .n("SSOClient", "LogoutCommand") + .f(LogoutRequestFilterSensitiveLog, void 0) + .ser(se_LogoutCommand) + .de(de_LogoutCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js new file mode 100644 index 0000000..77e34f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js @@ -0,0 +1,13 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js new file mode 100644 index 0000000..0ac15bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js new file mode 100644 index 0000000..c48673d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +export const ruleSet = _data; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/index.js new file mode 100644 index 0000000..b297556 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./SSOClient"; +export * from "./SSO"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js new file mode 100644 index 0000000..fa5d8fb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class SSOServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOServiceException.prototype); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js new file mode 100644 index 0000000..56ec16d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js @@ -0,0 +1,75 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +export class InvalidRequestException extends __BaseException { + name = "InvalidRequestException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequestException.prototype); + } +} +export class ResourceNotFoundException extends __BaseException { + name = "ResourceNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +export class TooManyRequestsException extends __BaseException { + name = "TooManyRequestsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TooManyRequestsException.prototype); + } +} +export class UnauthorizedException extends __BaseException { + name = "UnauthorizedException"; + $fault = "client"; + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnauthorizedException.prototype); + } +} +export const GetRoleCredentialsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const RoleCredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.secretAccessKey && { secretAccessKey: SENSITIVE_STRING }), + ...(obj.sessionToken && { sessionToken: SENSITIVE_STRING }), +}); +export const GetRoleCredentialsResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) }), +}); +export const ListAccountRolesRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const ListAccountsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const LogoutRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js new file mode 100644 index 0000000..b18c3a8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListAccountRolesCommand, } from "../commands/ListAccountRolesCommand"; +import { SSOClient } from "../SSOClient"; +export const paginateListAccountRoles = createPaginator(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js new file mode 100644 index 0000000..342c663 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListAccountsCommand, } from "../commands/ListAccountsCommand"; +import { SSOClient } from "../SSOClient"; +export const paginateListAccounts = createPaginator(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js new file mode 100644 index 0000000..11b1892 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js @@ -0,0 +1,210 @@ +import { loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { requestBuilder as rb } from "@smithy/core"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, isSerializableHeaderValue, map, take, withBaseException, } from "@smithy/smithy-client"; +import { InvalidRequestException, ResourceNotFoundException, TooManyRequestsException, UnauthorizedException, } from "../models/models_0"; +import { SSOServiceException as __BaseException } from "../models/SSOServiceException"; +export const se_GetRoleCredentialsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/federation/credentials"); + const query = map({ + [_rn]: [, __expectNonNull(input[_rN], `roleName`)], + [_ai]: [, __expectNonNull(input[_aI], `accountId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListAccountRolesCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/assignment/roles"); + const query = map({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, __expectNonNull(input[_aI], `accountId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListAccountsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/assignment/accounts"); + const query = map({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_LogoutCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_GetRoleCredentialsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + roleCredentials: _json, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_ListAccountRolesCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + nextToken: __expectString, + roleList: _json, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_ListAccountsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + accountList: _json, + nextToken: __expectString, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_LogoutCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_TooManyRequestsExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnauthorizedExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const _aI = "accountId"; +const _aT = "accessToken"; +const _ai = "account_id"; +const _mR = "maxResults"; +const _mr = "max_result"; +const _nT = "nextToken"; +const _nt = "next_token"; +const _rN = "roleName"; +const _rn = "role_name"; +const _xasbt = "x-amz-sso_bearer_token"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js new file mode 100644 index 0000000..7c8fe85 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js @@ -0,0 +1,33 @@ +import packageInfo from "../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js new file mode 100644 index 0000000..d8440b7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js @@ -0,0 +1,46 @@ +import packageInfo from "../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js new file mode 100644 index 0000000..3dfac58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSSOHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts new file mode 100644 index 0000000..8500e0c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts @@ -0,0 +1,53 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "./commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +export interface SSO { + /** + * @see {@link GetRoleCredentialsCommand} + */ + getRoleCredentials(args: GetRoleCredentialsCommandInput, options?: __HttpHandlerOptions): Promise; + getRoleCredentials(args: GetRoleCredentialsCommandInput, cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void): void; + getRoleCredentials(args: GetRoleCredentialsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void): void; + /** + * @see {@link ListAccountRolesCommand} + */ + listAccountRoles(args: ListAccountRolesCommandInput, options?: __HttpHandlerOptions): Promise; + listAccountRoles(args: ListAccountRolesCommandInput, cb: (err: any, data?: ListAccountRolesCommandOutput) => void): void; + listAccountRoles(args: ListAccountRolesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListAccountRolesCommandOutput) => void): void; + /** + * @see {@link ListAccountsCommand} + */ + listAccounts(args: ListAccountsCommandInput, options?: __HttpHandlerOptions): Promise; + listAccounts(args: ListAccountsCommandInput, cb: (err: any, data?: ListAccountsCommandOutput) => void): void; + listAccounts(args: ListAccountsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListAccountsCommandOutput) => void): void; + /** + * @see {@link LogoutCommand} + */ + logout(args: LogoutCommandInput, options?: __HttpHandlerOptions): Promise; + logout(args: LogoutCommandInput, cb: (err: any, data?: LogoutCommandOutput) => void): void; + logout(args: LogoutCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: LogoutCommandOutput) => void): void; +} +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * @public + */ +export declare class SSO extends SSOClient implements SSO { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts new file mode 100644 index 0000000..acfb2fd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts @@ -0,0 +1,200 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "./commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "./commands/LogoutCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = GetRoleCredentialsCommandInput | ListAccountRolesCommandInput | ListAccountsCommandInput | LogoutCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = GetRoleCredentialsCommandOutput | ListAccountRolesCommandOutput | ListAccountsCommandOutput | LogoutCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type SSOClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of SSOClient class constructor that set the region, credentials and other options. + */ +export interface SSOClientConfig extends SSOClientConfigType { +} +/** + * @public + */ +export type SSOClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ +export interface SSOClientResolvedConfig extends SSOClientResolvedConfigType { +} +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * @public + */ +export declare class SSOClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig> { + /** + * The resolved configuration of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ + readonly config: SSOClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..7e7ff4c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { SSOHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: SSOHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): SSOHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..bf3aad6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { SSOClientResolvedConfig } from "../SSOClient"; +/** + * @internal + */ +export interface SSOHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface SSOHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSSOHttpAuthSchemeParametersProvider: (config: SSOClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface SSOHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSSOHttpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: SSOHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts new file mode 100644 index 0000000..f306bd5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetRoleCredentialsRequest, GetRoleCredentialsResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetRoleCredentialsCommand}. + */ +export interface GetRoleCredentialsCommandInput extends GetRoleCredentialsRequest { +} +/** + * @public + * + * The output of {@link GetRoleCredentialsCommand}. + */ +export interface GetRoleCredentialsCommandOutput extends GetRoleCredentialsResponse, __MetadataBearer { +} +declare const GetRoleCredentialsCommand_base: { + new (input: GetRoleCredentialsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetRoleCredentialsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the STS short-term credentials for a given role name that is assigned to the + * user.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, GetRoleCredentialsCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, GetRoleCredentialsCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // GetRoleCredentialsRequest + * roleName: "STRING_VALUE", // required + * accountId: "STRING_VALUE", // required + * accessToken: "STRING_VALUE", // required + * }; + * const command = new GetRoleCredentialsCommand(input); + * const response = await client.send(command); + * // { // GetRoleCredentialsResponse + * // roleCredentials: { // RoleCredentials + * // accessKeyId: "STRING_VALUE", + * // secretAccessKey: "STRING_VALUE", + * // sessionToken: "STRING_VALUE", + * // expiration: Number("long"), + * // }, + * // }; + * + * ``` + * + * @param GetRoleCredentialsCommandInput - {@link GetRoleCredentialsCommandInput} + * @returns {@link GetRoleCredentialsCommandOutput} + * @see {@link GetRoleCredentialsCommandInput} for command's `input` shape. + * @see {@link GetRoleCredentialsCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class GetRoleCredentialsCommand extends GetRoleCredentialsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetRoleCredentialsRequest; + output: GetRoleCredentialsResponse; + }; + sdk: { + input: GetRoleCredentialsCommandInput; + output: GetRoleCredentialsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts new file mode 100644 index 0000000..8ce6a04 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts @@ -0,0 +1,96 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountRolesRequest, ListAccountRolesResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListAccountRolesCommand}. + */ +export interface ListAccountRolesCommandInput extends ListAccountRolesRequest { +} +/** + * @public + * + * The output of {@link ListAccountRolesCommand}. + */ +export interface ListAccountRolesCommandOutput extends ListAccountRolesResponse, __MetadataBearer { +} +declare const ListAccountRolesCommand_base: { + new (input: ListAccountRolesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListAccountRolesCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all roles that are assigned to the user for a given AWS account.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, ListAccountRolesCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, ListAccountRolesCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // ListAccountRolesRequest + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * accessToken: "STRING_VALUE", // required + * accountId: "STRING_VALUE", // required + * }; + * const command = new ListAccountRolesCommand(input); + * const response = await client.send(command); + * // { // ListAccountRolesResponse + * // nextToken: "STRING_VALUE", + * // roleList: [ // RoleListType + * // { // RoleInfo + * // roleName: "STRING_VALUE", + * // accountId: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListAccountRolesCommandInput - {@link ListAccountRolesCommandInput} + * @returns {@link ListAccountRolesCommandOutput} + * @see {@link ListAccountRolesCommandInput} for command's `input` shape. + * @see {@link ListAccountRolesCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class ListAccountRolesCommand extends ListAccountRolesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListAccountRolesRequest; + output: ListAccountRolesResponse; + }; + sdk: { + input: ListAccountRolesCommandInput; + output: ListAccountRolesCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts new file mode 100644 index 0000000..cffc47e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts @@ -0,0 +1,98 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountsRequest, ListAccountsResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListAccountsCommand}. + */ +export interface ListAccountsCommandInput extends ListAccountsRequest { +} +/** + * @public + * + * The output of {@link ListAccountsCommand}. + */ +export interface ListAccountsCommandOutput extends ListAccountsResponse, __MetadataBearer { +} +declare const ListAccountsCommand_base: { + new (input: ListAccountsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListAccountsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all AWS accounts assigned to the user. These AWS accounts are assigned by the + * administrator of the account. For more information, see Assign User Access in the IAM Identity Center User Guide. This operation + * returns a paginated response.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, ListAccountsCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, ListAccountsCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // ListAccountsRequest + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * accessToken: "STRING_VALUE", // required + * }; + * const command = new ListAccountsCommand(input); + * const response = await client.send(command); + * // { // ListAccountsResponse + * // nextToken: "STRING_VALUE", + * // accountList: [ // AccountListType + * // { // AccountInfo + * // accountId: "STRING_VALUE", + * // accountName: "STRING_VALUE", + * // emailAddress: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListAccountsCommandInput - {@link ListAccountsCommandInput} + * @returns {@link ListAccountsCommandOutput} + * @see {@link ListAccountsCommandInput} for command's `input` shape. + * @see {@link ListAccountsCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class ListAccountsCommand extends ListAccountsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListAccountsRequest; + output: ListAccountsResponse; + }; + sdk: { + input: ListAccountsCommandInput; + output: ListAccountsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts new file mode 100644 index 0000000..e85fe33 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { LogoutRequest } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link LogoutCommand}. + */ +export interface LogoutCommandInput extends LogoutRequest { +} +/** + * @public + * + * The output of {@link LogoutCommand}. + */ +export interface LogoutCommandOutput extends __MetadataBearer { +} +declare const LogoutCommand_base: { + new (input: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Removes the locally stored SSO tokens from the client-side cache and sends an API call to + * the IAM Identity Center service to invalidate the corresponding server-side IAM Identity Center sign in + * session.

+ * + *

If a user uses IAM Identity Center to access the AWS CLI, the user’s IAM Identity Center sign in session is + * used to obtain an IAM session, as specified in the corresponding IAM Identity Center permission set. + * More specifically, IAM Identity Center assumes an IAM role in the target account on behalf of the user, + * and the corresponding temporary AWS credentials are returned to the client.

+ *

After user logout, any existing IAM role sessions that were created by using IAM Identity Center + * permission sets continue based on the duration configured in the permission set. + * For more information, see User + * authentications in the IAM Identity Center User + * Guide.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, LogoutCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, LogoutCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // LogoutRequest + * accessToken: "STRING_VALUE", // required + * }; + * const command = new LogoutCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param LogoutCommandInput - {@link LogoutCommandInput} + * @returns {@link LogoutCommandOutput} + * @see {@link LogoutCommandInput} for command's `input` shape. + * @see {@link LogoutCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class LogoutCommand extends LogoutCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: LogoutRequest; + output: {}; + }; + sdk: { + input: LogoutCommandInput; + output: LogoutCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..23f42e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts @@ -0,0 +1,40 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts new file mode 100644 index 0000000..0f76dd3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface SSOExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts new file mode 100644 index 0000000..3b3bcea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts @@ -0,0 +1,29 @@ +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * + * @packageDocumentation + */ +export * from "./SSOClient"; +export * from "./SSO"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { SSOExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts new file mode 100644 index 0000000..9172f1a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from SSO service. + */ +export declare class SSOServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts new file mode 100644 index 0000000..0d40fa7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts @@ -0,0 +1,266 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +/** + *

Provides information about your AWS account.

+ * @public + */ +export interface AccountInfo { + /** + *

The identifier of the AWS account that is assigned to the user.

+ * @public + */ + accountId?: string | undefined; + /** + *

The display name of the AWS account that is assigned to the user.

+ * @public + */ + accountName?: string | undefined; + /** + *

The email address of the AWS account that is assigned to the user.

+ * @public + */ + emailAddress?: string | undefined; +} +/** + * @public + */ +export interface GetRoleCredentialsRequest { + /** + *

The friendly name of the role that is assigned to the user.

+ * @public + */ + roleName: string | undefined; + /** + *

The identifier for the AWS account that is assigned to the user.

+ * @public + */ + accountId: string | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + *

Provides information about the role credentials that are assigned to the user.

+ * @public + */ +export interface RoleCredentials { + /** + *

The identifier used for the temporary security credentials. For more information, see + * Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + accessKeyId?: string | undefined; + /** + *

The key that is used to sign the request. For more information, see Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + secretAccessKey?: string | undefined; + /** + *

The token used for temporary credentials. For more information, see Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + sessionToken?: string | undefined; + /** + *

The date on which temporary security credentials expire.

+ * @public + */ + expiration?: number | undefined; +} +/** + * @public + */ +export interface GetRoleCredentialsResponse { + /** + *

The credentials for the role that is assigned to the user.

+ * @public + */ + roleCredentials?: RoleCredentials | undefined; +} +/** + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * @public + */ +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The specified resource doesn't exist.

+ * @public + */ +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * @public + */ +export declare class TooManyRequestsException extends __BaseException { + readonly name: "TooManyRequestsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * @public + */ +export declare class UnauthorizedException extends __BaseException { + readonly name: "UnauthorizedException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ListAccountRolesRequest { + /** + *

The page token from the previous response output when you request subsequent pages.

+ * @public + */ + nextToken?: string | undefined; + /** + *

The number of items that clients can request per page.

+ * @public + */ + maxResults?: number | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; + /** + *

The identifier for the AWS account that is assigned to the user.

+ * @public + */ + accountId: string | undefined; +} +/** + *

Provides information about the role that is assigned to the user.

+ * @public + */ +export interface RoleInfo { + /** + *

The friendly name of the role that is assigned to the user.

+ * @public + */ + roleName?: string | undefined; + /** + *

The identifier of the AWS account assigned to the user.

+ * @public + */ + accountId?: string | undefined; +} +/** + * @public + */ +export interface ListAccountRolesResponse { + /** + *

The page token client that is used to retrieve the list of accounts.

+ * @public + */ + nextToken?: string | undefined; + /** + *

A paginated response with the list of roles and the next token if more results are + * available.

+ * @public + */ + roleList?: RoleInfo[] | undefined; +} +/** + * @public + */ +export interface ListAccountsRequest { + /** + *

(Optional) When requesting subsequent pages, this is the page token from the previous + * response output.

+ * @public + */ + nextToken?: string | undefined; + /** + *

This is the number of items clients can request per page.

+ * @public + */ + maxResults?: number | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + * @public + */ +export interface ListAccountsResponse { + /** + *

The page token client that is used to retrieve the list of accounts.

+ * @public + */ + nextToken?: string | undefined; + /** + *

A paginated response with the list of account information and the next token if more + * results are available.

+ * @public + */ + accountList?: AccountInfo[] | undefined; +} +/** + * @public + */ +export interface LogoutRequest { + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + * @internal + */ +export declare const GetRoleCredentialsRequestFilterSensitiveLog: (obj: GetRoleCredentialsRequest) => any; +/** + * @internal + */ +export declare const RoleCredentialsFilterSensitiveLog: (obj: RoleCredentials) => any; +/** + * @internal + */ +export declare const GetRoleCredentialsResponseFilterSensitiveLog: (obj: GetRoleCredentialsResponse) => any; +/** + * @internal + */ +export declare const ListAccountRolesRequestFilterSensitiveLog: (obj: ListAccountRolesRequest) => any; +/** + * @internal + */ +export declare const ListAccountsRequestFilterSensitiveLog: (obj: ListAccountsRequest) => any; +/** + * @internal + */ +export declare const LogoutRequestFilterSensitiveLog: (obj: LogoutRequest) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..81addca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { SSOClient } from "../SSOClient"; +/** + * @public + */ +export interface SSOPaginationConfiguration extends PaginationConfiguration { + client: SSOClient; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts new file mode 100644 index 0000000..fa309d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "../commands/ListAccountRolesCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListAccountRoles: (config: SSOPaginationConfiguration, input: ListAccountRolesCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts new file mode 100644 index 0000000..21c2559 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "../commands/ListAccountsCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListAccounts: (config: SSOPaginationConfiguration, input: ListAccountsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..02d97aa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts @@ -0,0 +1,38 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "../commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "../commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "../commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "../commands/LogoutCommand"; +/** + * serializeAws_restJson1GetRoleCredentialsCommand + */ +export declare const se_GetRoleCredentialsCommand: (input: GetRoleCredentialsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1ListAccountRolesCommand + */ +export declare const se_ListAccountRolesCommand: (input: ListAccountRolesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1ListAccountsCommand + */ +export declare const se_ListAccountsCommand: (input: ListAccountsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1LogoutCommand + */ +export declare const se_LogoutCommand: (input: LogoutCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restJson1GetRoleCredentialsCommand + */ +export declare const de_GetRoleCredentialsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1ListAccountRolesCommand + */ +export declare const de_ListAccountRolesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1ListAccountsCommand + */ +export declare const de_ListAccountsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1LogoutCommand + */ +export declare const de_LogoutCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..c593515 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts @@ -0,0 +1,57 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts new file mode 100644 index 0000000..4194fd5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts new file mode 100644 index 0000000..38c1d33 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts @@ -0,0 +1,56 @@ +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..20ab682 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts new file mode 100644 index 0000000..a0f078c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { SSOExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: SSOExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts new file mode 100644 index 0000000..9a242fc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts @@ -0,0 +1,73 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "./commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "./commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "./commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +export interface SSO { + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void + ): void; + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void + ): void; + listAccountRoles( + args: ListAccountRolesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listAccountRoles( + args: ListAccountRolesCommandInput, + cb: (err: any, data?: ListAccountRolesCommandOutput) => void + ): void; + listAccountRoles( + args: ListAccountRolesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAccountRolesCommandOutput) => void + ): void; + listAccounts( + args: ListAccountsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listAccounts( + args: ListAccountsCommandInput, + cb: (err: any, data?: ListAccountsCommandOutput) => void + ): void; + listAccounts( + args: ListAccountsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAccountsCommandOutput) => void + ): void; + logout( + args: LogoutCommandInput, + options?: __HttpHandlerOptions + ): Promise; + logout( + args: LogoutCommandInput, + cb: (err: any, data?: LogoutCommandOutput) => void + ): void; + logout( + args: LogoutCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: LogoutCommandOutput) => void + ): void; +} +export declare class SSO extends SSOClient implements SSO {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts new file mode 100644 index 0000000..efd5a5f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts @@ -0,0 +1,138 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "./commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "./commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "./commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "./commands/LogoutCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | GetRoleCredentialsCommandInput + | ListAccountRolesCommandInput + | ListAccountsCommandInput + | LogoutCommandInput; +export type ServiceOutputTypes = + | GetRoleCredentialsCommandOutput + | ListAccountRolesCommandOutput + | ListAccountsCommandOutput + | LogoutCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type SSOClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface SSOClientConfig extends SSOClientConfigType {} +export type SSOClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface SSOClientResolvedConfig extends SSOClientResolvedConfigType {} +export declare class SSOClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig +> { + readonly config: SSOClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..29f38b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { SSOHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): SSOHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..864f755 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,46 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { SSOClientResolvedConfig } from "../SSOClient"; +export interface SSOHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface SSOHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + SSOClientResolvedConfig, + HandlerExecutionContext, + SSOHttpAuthSchemeParameters, + object + > {} +export declare const defaultSSOHttpAuthSchemeParametersProvider: ( + config: SSOClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface SSOHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSSOHttpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: SSOHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts new file mode 100644 index 0000000..7c1b358 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetRoleCredentialsRequest, + GetRoleCredentialsResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface GetRoleCredentialsCommandInput + extends GetRoleCredentialsRequest {} +export interface GetRoleCredentialsCommandOutput + extends GetRoleCredentialsResponse, + __MetadataBearer {} +declare const GetRoleCredentialsCommand_base: { + new ( + input: GetRoleCredentialsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetRoleCredentialsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetRoleCredentialsCommand extends GetRoleCredentialsCommand_base { + protected static __types: { + api: { + input: GetRoleCredentialsRequest; + output: GetRoleCredentialsResponse; + }; + sdk: { + input: GetRoleCredentialsCommandInput; + output: GetRoleCredentialsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts new file mode 100644 index 0000000..3b898a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListAccountRolesRequest, + ListAccountRolesResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface ListAccountRolesCommandInput extends ListAccountRolesRequest {} +export interface ListAccountRolesCommandOutput + extends ListAccountRolesResponse, + __MetadataBearer {} +declare const ListAccountRolesCommand_base: { + new ( + input: ListAccountRolesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListAccountRolesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListAccountRolesCommand extends ListAccountRolesCommand_base { + protected static __types: { + api: { + input: ListAccountRolesRequest; + output: ListAccountRolesResponse; + }; + sdk: { + input: ListAccountRolesCommandInput; + output: ListAccountRolesCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts new file mode 100644 index 0000000..3a00cc1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountsRequest, ListAccountsResponse } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface ListAccountsCommandInput extends ListAccountsRequest {} +export interface ListAccountsCommandOutput + extends ListAccountsResponse, + __MetadataBearer {} +declare const ListAccountsCommand_base: { + new ( + input: ListAccountsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountsCommandInput, + ListAccountsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListAccountsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountsCommandInput, + ListAccountsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListAccountsCommand extends ListAccountsCommand_base { + protected static __types: { + api: { + input: ListAccountsRequest; + output: ListAccountsResponse; + }; + sdk: { + input: ListAccountsCommandInput; + output: ListAccountsCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts new file mode 100644 index 0000000..2599250 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { LogoutRequest } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface LogoutCommandInput extends LogoutRequest {} +export interface LogoutCommandOutput extends __MetadataBearer {} +declare const LogoutCommand_base: { + new (input: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl< + LogoutCommandInput, + LogoutCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl< + LogoutCommandInput, + LogoutCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class LogoutCommand extends LogoutCommand_base { + protected static __types: { + api: { + input: LogoutRequest; + output: {}; + }; + sdk: { + input: LogoutCommandInput; + output: LogoutCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..7f24540 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts @@ -0,0 +1,51 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts new file mode 100644 index 0000000..c1b43ff --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface SSOExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..891aed3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +export * from "./SSOClient"; +export * from "./SSO"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { SSOExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts new file mode 100644 index 0000000..1ad045d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class SSOServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts new file mode 100644 index 0000000..4bbe08c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts @@ -0,0 +1,93 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +export interface AccountInfo { + accountId?: string | undefined; + accountName?: string | undefined; + emailAddress?: string | undefined; +} +export interface GetRoleCredentialsRequest { + roleName: string | undefined; + accountId: string | undefined; + accessToken: string | undefined; +} +export interface RoleCredentials { + accessKeyId?: string | undefined; + secretAccessKey?: string | undefined; + sessionToken?: string | undefined; + expiration?: number | undefined; +} +export interface GetRoleCredentialsResponse { + roleCredentials?: RoleCredentials | undefined; +} +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TooManyRequestsException extends __BaseException { + readonly name: "TooManyRequestsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class UnauthorizedException extends __BaseException { + readonly name: "UnauthorizedException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ListAccountRolesRequest { + nextToken?: string | undefined; + maxResults?: number | undefined; + accessToken: string | undefined; + accountId: string | undefined; +} +export interface RoleInfo { + roleName?: string | undefined; + accountId?: string | undefined; +} +export interface ListAccountRolesResponse { + nextToken?: string | undefined; + roleList?: RoleInfo[] | undefined; +} +export interface ListAccountsRequest { + nextToken?: string | undefined; + maxResults?: number | undefined; + accessToken: string | undefined; +} +export interface ListAccountsResponse { + nextToken?: string | undefined; + accountList?: AccountInfo[] | undefined; +} +export interface LogoutRequest { + accessToken: string | undefined; +} +export declare const GetRoleCredentialsRequestFilterSensitiveLog: ( + obj: GetRoleCredentialsRequest +) => any; +export declare const RoleCredentialsFilterSensitiveLog: ( + obj: RoleCredentials +) => any; +export declare const GetRoleCredentialsResponseFilterSensitiveLog: ( + obj: GetRoleCredentialsResponse +) => any; +export declare const ListAccountRolesRequestFilterSensitiveLog: ( + obj: ListAccountRolesRequest +) => any; +export declare const ListAccountsRequestFilterSensitiveLog: ( + obj: ListAccountsRequest +) => any; +export declare const LogoutRequestFilterSensitiveLog: ( + obj: LogoutRequest +) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..2970898 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,5 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { SSOClient } from "../SSOClient"; +export interface SSOPaginationConfiguration extends PaginationConfiguration { + client: SSOClient; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts new file mode 100644 index 0000000..174f32b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "../commands/ListAccountRolesCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +export declare const paginateListAccountRoles: ( + config: SSOPaginationConfiguration, + input: ListAccountRolesCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts new file mode 100644 index 0000000..bb5e66d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "../commands/ListAccountsCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +export declare const paginateListAccounts: ( + config: SSOPaginationConfiguration, + input: ListAccountsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..74eebdc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts @@ -0,0 +1,53 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "../commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "../commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "../commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "../commands/LogoutCommand"; +export declare const se_GetRoleCredentialsCommand: ( + input: GetRoleCredentialsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListAccountRolesCommand: ( + input: ListAccountRolesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListAccountsCommand: ( + input: ListAccountsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_LogoutCommand: ( + input: LogoutCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_GetRoleCredentialsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListAccountRolesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListAccountsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_LogoutCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..4042bcf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts @@ -0,0 +1,120 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts new file mode 100644 index 0000000..7152445 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts @@ -0,0 +1,114 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts new file mode 100644 index 0000000..3dc6c95 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts @@ -0,0 +1,124 @@ +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..00b2942 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts @@ -0,0 +1,49 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts new file mode 100644 index 0000000..fbec1e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { SSOExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: SSOExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/package.json new file mode 100644 index 0000000..971fcd1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/client-sso/package.json @@ -0,0 +1,98 @@ +{ + "name": "@aws-sdk/client-sso", + "description": "AWS SDK for JavaScript Sso Client for Node.js, Browser and React Native", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline client-sso", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "generate:client": "node ../../scripts/generate-clients/single-service --solo sso" + }, + "main": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/node18": "18.2.4", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "browser": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser" + }, + "react-native": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.native" + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sso", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-sso" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/README.md new file mode 100644 index 0000000..6056468 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/README.md @@ -0,0 +1,39 @@ +# `@aws-sdk/core` + +This package provides common or core functionality to the AWS SDK for JavaScript (v3). + +You do not need to explicitly install this package, since it will be transitively installed by AWS SDK clients. + +## `@aws-sdk/core` submodules + +Core submodules are organized for distribution via the `package.json` `exports` field. + +`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be +enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support). + +Think of `@aws-sdk/core` as a mono-package within the monorepo. +It preserves the benefits of modularization, for example to optimize Node.js initialization speed, +while making it easier to have a consistent version of core dependencies, reducing package sprawl when +installing an SDK client. + +### Guide for submodules + +- Each `index.ts` file corresponding to the pattern `./src/submodules//index.ts` will be + published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script. +- create a folder as `./src/submodules/` including an `index.ts` file and a `README.md` file. + - The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible. +- a submodule is equivalent to a standalone `@aws-sdk/` package in that importing it in Node.js will resolve a separate bundle. +- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import. + - The linter will check for this and throw an error. +- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@aws-sdk/core`. + The linter runs during `yarn build` and also as `yarn lint`. + +### When should I create an `@aws-sdk/core/submodule` vs. `@aws-sdk/new-package`? + +Keep in mind that the core package is installed by all AWS SDK clients. + +If the component functionality is upstream of multiple clients, it is +a good candidate for a core submodule. For example, XML serialization. + +If the component's functionality is downstream of a client, for example S3 pre-signing, +it should be a standalone package with potentially a peer or runtime dependency on an AWS SDK client. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/account-id-endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/account-id-endpoint.d.ts new file mode 100644 index 0000000..60f14d1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/account-id-endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/account-id-endpoint" { + export * from "@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/account-id-endpoint.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/account-id-endpoint.js new file mode 100644 index 0000000..b2550f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/account-id-endpoint.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/account-id-endpoint/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/client.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/client.d.ts new file mode 100644 index 0000000..ce995ae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/client.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/client" { + export * from "@aws-sdk/core/dist-types/submodules/client/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/client.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/client.js new file mode 100644 index 0000000..e3a644b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/client.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/client/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/index.js new file mode 100644 index 0000000..cddde6a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/index.js @@ -0,0 +1,6 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./submodules/client/index"), exports); +tslib_1.__exportStar(require("./submodules/httpAuthSchemes/index"), exports); +tslib_1.__exportStar(require("./submodules/protocols/index"), exports); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js new file mode 100644 index 0000000..c277b79 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js @@ -0,0 +1,95 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/account-id-endpoint/index.ts +var index_exports = {}; +__export(index_exports, { + ACCOUNT_ID_ENDPOINT_MODE_VALUES: () => ACCOUNT_ID_ENDPOINT_MODE_VALUES, + CONFIG_ACCOUNT_ID_ENDPOINT_MODE: () => CONFIG_ACCOUNT_ID_ENDPOINT_MODE, + DEFAULT_ACCOUNT_ID_ENDPOINT_MODE: () => DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, + ENV_ACCOUNT_ID_ENDPOINT_MODE: () => ENV_ACCOUNT_ID_ENDPOINT_MODE, + NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: () => NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, + resolveAccountIdEndpointModeConfig: () => resolveAccountIdEndpointModeConfig, + validateAccountIdEndpointMode: () => validateAccountIdEndpointMode +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.ts +var import_util_middleware = require("@smithy/util-middleware"); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConstants.ts +var DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +var ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"]; +function validateAccountIdEndpointMode(value) { + return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value); +} +__name(validateAccountIdEndpointMode, "validateAccountIdEndpointMode"); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.ts +var resolveAccountIdEndpointModeConfig = /* @__PURE__ */ __name((input) => { + const { accountIdEndpointMode } = input; + const accountIdEndpointModeProvider = (0, import_util_middleware.normalizeProvider)(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE); + return Object.assign(input, { + accountIdEndpointMode: /* @__PURE__ */ __name(async () => { + const accIdMode = await accountIdEndpointModeProvider(); + if (!validateAccountIdEndpointMode(accIdMode)) { + throw new Error( + `Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".` + ); + } + return accIdMode; + }, "accountIdEndpointMode") + }); +}, "resolveAccountIdEndpointModeConfig"); + +// src/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.ts +var err = "Invalid AccountIdEndpointMode value"; +var _throw = /* @__PURE__ */ __name((message) => { + throw new Error(message); +}, "_throw"); +var ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +var CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +var NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => { + const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, "configFileSelector"), + default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + ACCOUNT_ID_ENDPOINT_MODE_VALUES, + CONFIG_ACCOUNT_ID_ENDPOINT_MODE, + DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, + ENV_ACCOUNT_ID_ENDPOINT_MODE, + NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, + resolveAccountIdEndpointModeConfig, + validateAccountIdEndpointMode +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js new file mode 100644 index 0000000..ebd6c61 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js @@ -0,0 +1,78 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/client/index.ts +var index_exports = {}; +__export(index_exports, { + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + setCredentialFeature: () => setCredentialFeature, + setFeature: () => setFeature, + state: () => state +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/client/emitWarningIfUnsupportedVersion.ts +var state = { + warningEmitted: false +}; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning( + `NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI` + ); + } +}, "emitWarningIfUnsupportedVersion"); + +// src/submodules/client/setCredentialFeature.ts +function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} +__name(setCredentialFeature, "setCredentialFeature"); + +// src/submodules/client/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {} + }; + } else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} +__name(setFeature, "setFeature"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + emitWarningIfUnsupportedVersion, + setCredentialFeature, + setFeature, + state +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js new file mode 100644 index 0000000..82db91e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js @@ -0,0 +1,382 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/httpAuthSchemes/index.ts +var index_exports = {}; +__export(index_exports, { + AWSSDKSigV4Signer: () => AWSSDKSigV4Signer, + AwsSdkSigV4ASigner: () => AwsSdkSigV4ASigner, + AwsSdkSigV4Signer: () => AwsSdkSigV4Signer, + NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: () => NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, + NODE_SIGV4A_CONFIG_OPTIONS: () => NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config: () => resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig: () => resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config: () => resolveAwsSdkSigV4Config, + validateSigningProperties: () => validateSigningProperties +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var import_protocol_http2 = require("@smithy/protocol-http"); + +// src/submodules/httpAuthSchemes/utils/getDateHeader.ts +var import_protocol_http = require("@smithy/protocol-http"); +var getDateHeader = /* @__PURE__ */ __name((response) => import_protocol_http.HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : void 0, "getDateHeader"); + +// src/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.ts +var getSkewCorrectedDate = /* @__PURE__ */ __name((systemClockOffset) => new Date(Date.now() + systemClockOffset), "getSkewCorrectedDate"); + +// src/submodules/httpAuthSchemes/utils/isClockSkewed.ts +var isClockSkewed = /* @__PURE__ */ __name((clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 3e5, "isClockSkewed"); + +// src/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.ts +var getUpdatedSystemClockOffset = /* @__PURE__ */ __name((clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}, "getUpdatedSystemClockOffset"); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var throwSigningPropertyError = /* @__PURE__ */ __name((name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}, "throwSigningPropertyError"); +var validateSigningProperties = /* @__PURE__ */ __name(async (signingProperties) => { + const context = throwSigningPropertyError( + "context", + signingProperties.context + ); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = context.endpointV2?.properties?.authSchemes?.[0]; + const signerFunction = throwSigningPropertyError( + "signer", + config.signer + ); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties?.signingRegion; + const signingRegionSet = signingProperties?.signingRegionSet; + const signingName = signingProperties?.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName + }; +}, "validateSigningProperties"); +var AwsSdkSigV4Signer = class { + static { + __name(this, "AwsSdkSigV4Signer"); + } + async sign(httpRequest, identity, signingProperties) { + if (!import_protocol_http2.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if (first?.name === "sigv4a" && second?.name === "sigv4") { + signingRegion = second?.signingRegion ?? signingRegion; + signingName = second?.signingName ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion, + signingService: signingName + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +}; +var AWSSDKSigV4Signer = AwsSdkSigV4Signer; + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.ts +var import_protocol_http3 = require("@smithy/protocol-http"); +var AwsSdkSigV4ASigner = class extends AwsSdkSigV4Signer { + static { + __name(this, "AwsSdkSigV4ASigner"); + } + async sign(httpRequest, identity, signingProperties) { + if (!import_protocol_http3.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties( + signingProperties + ); + const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.(); + const multiRegionOverride = (configResolvedSigningRegionSet ?? signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName + }); + return signedRequest; + } +}; + +// src/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.ts +var getArrayForCommaSeparatedString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : [], "getArrayForCommaSeparatedString"); + +// src/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.ts +var NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE"; +var NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference"; +var NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = { + /** + * Retrieves auth scheme preference from environment variables + * @param env - Node process environment object + * @returns Array of auth scheme strings if preference is set, undefined otherwise + */ + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env)) return void 0; + return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]); + }, "environmentVariableSelector"), + /** + * Retrieves auth scheme preference from config file + * @param profile - Config profile object + * @returns Array of auth scheme strings if preference is set, undefined otherwise + */ + configFileSelector: /* @__PURE__ */ __name((profile) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile)) return void 0; + return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]); + }, "configFileSelector"), + /** + * Default auth scheme preference if not specified in environment or config + */ + default: [] +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.ts +var import_core = require("@smithy/core"); +var import_property_provider = require("@smithy/property-provider"); +var resolveAwsSdkSigV4AConfig = /* @__PURE__ */ __name((config) => { + config.sigv4aSigningRegionSet = (0, import_core.normalizeProvider)(config.sigv4aSigningRegionSet); + return config; +}, "resolveAwsSdkSigV4AConfig"); +var NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true + }); + }, + default: void 0 +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.ts +var import_client = require("@aws-sdk/core/client"); +var import_core2 = require("@smithy/core"); +var import_signature_v4 = require("@smithy/signature-v4"); +var resolveAwsSdkSigV4Config = /* @__PURE__ */ __name((config) => { + let inputCredentials = config.credentials; + let isUserSupplied = !!config.credentials; + let resolvedCredentials = void 0; + Object.defineProperty(config, "credentials", { + set(credentials) { + if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) { + isUserSupplied = true; + } + inputCredentials = credentials; + const memoizedProvider = normalizeCredentialProvider(config, { + credentials: inputCredentials, + credentialDefaultProvider: config.credentialDefaultProvider + }); + const boundProvider = bindCallerConfig(config, memoizedProvider); + if (isUserSupplied && !boundProvider.attributed) { + resolvedCredentials = /* @__PURE__ */ __name(async (options) => boundProvider(options).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_CODE", "e") + ), "resolvedCredentials"); + resolvedCredentials.memoized = boundProvider.memoized; + resolvedCredentials.configBound = boundProvider.configBound; + resolvedCredentials.attributed = true; + } else { + resolvedCredentials = boundProvider; + } + }, + get() { + return resolvedCredentials; + }, + enumerable: true, + configurable: true + }); + config.credentials = inputCredentials; + const { + // Default for signingEscapePath + signingEscapePath = true, + // Default for systemClockOffset + systemClockOffset = config.systemClockOffset || 0, + // No default for sha256 since it is platform dependent + sha256 + } = config; + let signer; + if (config.signer) { + signer = (0, import_core2.normalizeProvider)(config.signer); + } else if (config.regionInfoProvider) { + signer = /* @__PURE__ */ __name(() => (0, import_core2.normalizeProvider)(config.region)().then( + async (region) => [ + await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint() + }) || {}, + region + ] + ).then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }), "signer"); + } else { + signer = /* @__PURE__ */ __name(async (authScheme) => { + authScheme = Object.assign( + {}, + { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await (0, import_core2.normalizeProvider)(config.region)(), + properties: {} + }, + authScheme + ); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }, "signer"); + } + const resolvedConfig = Object.assign(config, { + systemClockOffset, + signingEscapePath, + signer + }); + return resolvedConfig; +}, "resolveAwsSdkSigV4Config"); +var resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +function normalizeCredentialProvider(config, { + credentials, + credentialDefaultProvider +}) { + let credentialsProvider; + if (credentials) { + if (!credentials?.memoized) { + credentialsProvider = (0, import_core2.memoizeIdentityProvider)(credentials, import_core2.isIdentityExpired, import_core2.doesIdentityRequireRefresh); + } else { + credentialsProvider = credentials; + } + } else { + if (credentialDefaultProvider) { + credentialsProvider = (0, import_core2.normalizeProvider)( + credentialDefaultProvider( + Object.assign({}, config, { + parentClientConfig: config + }) + ) + ); + } else { + credentialsProvider = /* @__PURE__ */ __name(async () => { + throw new Error( + "@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured." + ); + }, "credentialsProvider"); + } + } + credentialsProvider.memoized = true; + return credentialsProvider; +} +__name(normalizeCredentialProvider, "normalizeCredentialProvider"); +function bindCallerConfig(config, credentialsProvider) { + if (credentialsProvider.configBound) { + return credentialsProvider; + } + const fn = /* @__PURE__ */ __name(async (options) => credentialsProvider({ ...options, callerClientConfig: config }), "fn"); + fn.memoized = credentialsProvider.memoized; + fn.configBound = true; + return fn; +} +__name(bindCallerConfig, "bindCallerConfig"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + AWSSDKSigV4Signer, + AwsSdkSigV4ASigner, + AwsSdkSigV4Signer, + NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, + NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config, + validateSigningProperties +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js new file mode 100644 index 0000000..d84c65b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js @@ -0,0 +1,227 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var index_exports = {}; +__export(index_exports, { + _toBool: () => _toBool, + _toNum: () => _toNum, + _toStr: () => _toStr, + awsExpectUnion: () => awsExpectUnion, + loadRestJsonErrorCode: () => loadRestJsonErrorCode, + loadRestXmlErrorCode: () => loadRestXmlErrorCode, + parseJsonBody: () => parseJsonBody, + parseJsonErrorBody: () => parseJsonErrorBody, + parseXmlBody: () => parseXmlBody, + parseXmlErrorBody: () => parseXmlErrorBody +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/protocols/coercing-serializers.ts +var _toStr = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}, "_toStr"); +var _toBool = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}, "_toBool"); +var _toNum = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}, "_toNum"); + +// src/submodules/protocols/json/awsExpectUnion.ts +var import_smithy_client = require("@smithy/smithy-client"); +var awsExpectUnion = /* @__PURE__ */ __name((value) => { + if (value == null) { + return void 0; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return (0, import_smithy_client.expectUnion)(value); +}, "awsExpectUnion"); + +// src/submodules/protocols/common.ts +var import_smithy_client2 = require("@smithy/smithy-client"); +var collectBodyString = /* @__PURE__ */ __name((streamBody, context) => (0, import_smithy_client2.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)), "collectBodyString"); + +// src/submodules/protocols/json/parseJsonBody.ts +var parseJsonBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } catch (e) { + if (e?.name === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + } + return {}; +}), "parseJsonBody"); +var parseJsonErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}, "parseJsonErrorBody"); +var loadRestJsonErrorCode = /* @__PURE__ */ __name((output, data) => { + const findKey = /* @__PURE__ */ __name((object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()), "findKey"); + const sanitizeErrorCode = /* @__PURE__ */ __name((rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }, "sanitizeErrorCode"); + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== void 0) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== void 0) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== void 0) { + return sanitizeErrorCode(data["__type"]); + } +}, "loadRestJsonErrorCode"); + +// src/submodules/protocols/xml/parseXmlBody.ts +var import_smithy_client3 = require("@smithy/smithy-client"); +var import_fast_xml_parser = require("fast-xml-parser"); +var parseXmlBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new import_fast_xml_parser.XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: /* @__PURE__ */ __name((_, val) => val.trim() === "" && val.includes("\n") ? "" : void 0, "tagValueProcessor") + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return (0, import_smithy_client3.getValueFromTextNode)(parsedObjToReturn); + } + return {}; +}), "parseXmlBody"); +var parseXmlErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}, "parseXmlErrorBody"); +var loadRestXmlErrorCode = /* @__PURE__ */ __name((output, data) => { + if (data?.Error?.Code !== void 0) { + return data.Error.Code; + } + if (data?.Code !== void 0) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadRestXmlErrorCode"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + _toBool, + _toNum, + _toStr, + awsExpectUnion, + loadRestJsonErrorCode, + loadRestXmlErrorCode, + parseJsonBody, + parseJsonErrorBody, + parseXmlBody, + parseXmlErrorBody +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/index.js new file mode 100644 index 0000000..239de7a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js new file mode 100644 index 0000000..cc0c55a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js @@ -0,0 +1,15 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants"; +export const resolveAccountIdEndpointModeConfig = (input) => { + const { accountIdEndpointMode } = input; + const accountIdEndpointModeProvider = normalizeProvider(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE); + return Object.assign(input, { + accountIdEndpointMode: async () => { + const accIdMode = await accountIdEndpointModeProvider(); + if (!validateAccountIdEndpointMode(accIdMode)) { + throw new Error(`Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".`); + } + return accIdMode; + }, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js new file mode 100644 index 0000000..e7a2ca0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js @@ -0,0 +1,5 @@ +export const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +export const ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"]; +export function validateAccountIdEndpointMode(value) { + return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js new file mode 100644 index 0000000..54832d5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js @@ -0,0 +1,24 @@ +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants"; +const err = "Invalid AccountIdEndpointMode value"; +const _throw = (message) => { + throw new Error(message); +}; +export const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +export const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +export const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, + default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..d1dab1d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js @@ -0,0 +1,15 @@ +export const state = { + warningEmitted: false, +}; +export const emitWarningIfUnsupportedVersion = (version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning(`NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI`); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js new file mode 100644 index 0000000..a489c40 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js @@ -0,0 +1,7 @@ +export function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js new file mode 100644 index 0000000..2d8804b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js @@ -0,0 +1,11 @@ +export function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {}, + }; + } + else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js new file mode 100644 index 0000000..548fefb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js @@ -0,0 +1,20 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getSkewCorrectedDate } from "../utils"; +import { AwsSdkSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + if (!HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties(signingProperties); + const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.(); + const multiRegionOverride = (configResolvedSigningRegionSet ?? + signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName, + }); + return signedRequest; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js new file mode 100644 index 0000000..ee236cd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js @@ -0,0 +1,72 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getDateHeader, getSkewCorrectedDate, getUpdatedSystemClockOffset } from "../utils"; +const throwSigningPropertyError = (name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}; +export const validateSigningProperties = async (signingProperties) => { + const context = throwSigningPropertyError("context", signingProperties.context); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = context.endpointV2?.properties?.authSchemes?.[0]; + const signerFunction = throwSigningPropertyError("signer", config.signer); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties?.signingRegion; + const signingRegionSet = signingProperties?.signingRegionSet; + const signingName = signingProperties?.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName, + }; +}; +export class AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + if (!HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if (first?.name === "sigv4a" && second?.name === "sigv4") { + signingRegion = second?.signingRegion ?? signingRegion; + signingName = second?.signingName ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: signingRegion, + signingService: signingName, + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +} +export const AWSSDKSigV4Signer = AwsSdkSigV4Signer; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js new file mode 100644 index 0000000..17e3d2e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js @@ -0,0 +1,16 @@ +import { getArrayForCommaSeparatedString } from "../utils/getArrayForCommaSeparatedString"; +const NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE"; +const NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference"; +export const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = { + environmentVariableSelector: (env) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env)) + return undefined; + return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]); + }, + configFileSelector: (profile) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile)) + return undefined; + return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]); + }, + default: [], +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js new file mode 100644 index 0000000..4071225 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js @@ -0,0 +1,5 @@ +export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js new file mode 100644 index 0000000..0e62ef0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js @@ -0,0 +1,25 @@ +import { normalizeProvider } from "@smithy/core"; +import { ProviderError } from "@smithy/property-provider"; +export const resolveAwsSdkSigV4AConfig = (config) => { + config.sigv4aSigningRegionSet = normalizeProvider(config.sigv4aSigningRegionSet); + return config; +}; +export const NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true, + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true, + }); + }, + default: undefined, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js new file mode 100644 index 0000000..6da968d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js @@ -0,0 +1,131 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { doesIdentityRequireRefresh, isIdentityExpired, memoizeIdentityProvider, normalizeProvider, } from "@smithy/core"; +import { SignatureV4 } from "@smithy/signature-v4"; +export const resolveAwsSdkSigV4Config = (config) => { + let inputCredentials = config.credentials; + let isUserSupplied = !!config.credentials; + let resolvedCredentials = undefined; + Object.defineProperty(config, "credentials", { + set(credentials) { + if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) { + isUserSupplied = true; + } + inputCredentials = credentials; + const memoizedProvider = normalizeCredentialProvider(config, { + credentials: inputCredentials, + credentialDefaultProvider: config.credentialDefaultProvider, + }); + const boundProvider = bindCallerConfig(config, memoizedProvider); + if (isUserSupplied && !boundProvider.attributed) { + resolvedCredentials = async (options) => boundProvider(options).then((creds) => setCredentialFeature(creds, "CREDENTIALS_CODE", "e")); + resolvedCredentials.memoized = boundProvider.memoized; + resolvedCredentials.configBound = boundProvider.configBound; + resolvedCredentials.attributed = true; + } + else { + resolvedCredentials = boundProvider; + } + }, + get() { + return resolvedCredentials; + }, + enumerable: true, + configurable: true, + }); + config.credentials = inputCredentials; + const { signingEscapePath = true, systemClockOffset = config.systemClockOffset || 0, sha256, } = config; + let signer; + if (config.signer) { + signer = normalizeProvider(config.signer); + } + else if (config.regionInfoProvider) { + signer = () => normalizeProvider(config.region)() + .then(async (region) => [ + (await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint(), + })) || {}, + region, + ]) + .then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = config.signerConstructor || SignatureV4; + return new SignerCtor(params); + }); + } + else { + signer = async (authScheme) => { + authScheme = Object.assign({}, { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await normalizeProvider(config.region)(), + properties: {}, + }, authScheme); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = config.signerConstructor || SignatureV4; + return new SignerCtor(params); + }; + } + const resolvedConfig = Object.assign(config, { + systemClockOffset, + signingEscapePath, + signer, + }); + return resolvedConfig; +}; +export const resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +function normalizeCredentialProvider(config, { credentials, credentialDefaultProvider, }) { + let credentialsProvider; + if (credentials) { + if (!credentials?.memoized) { + credentialsProvider = memoizeIdentityProvider(credentials, isIdentityExpired, doesIdentityRequireRefresh); + } + else { + credentialsProvider = credentials; + } + } + else { + if (credentialDefaultProvider) { + credentialsProvider = normalizeProvider(credentialDefaultProvider(Object.assign({}, config, { + parentClientConfig: config, + }))); + } + else { + credentialsProvider = async () => { + throw new Error("@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured."); + }; + } + } + credentialsProvider.memoized = true; + return credentialsProvider; +} +function bindCallerConfig(config, credentialsProvider) { + if (credentialsProvider.configBound) { + return credentialsProvider; + } + const fn = async (options) => credentialsProvider({ ...options, callerClientConfig: config }); + fn.memoized = credentialsProvider.memoized; + fn.configBound = true; + return fn; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js new file mode 100644 index 0000000..aa60799 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js @@ -0,0 +1 @@ +export const getArrayForCommaSeparatedString = (str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : []; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js new file mode 100644 index 0000000..449c182 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js @@ -0,0 +1,2 @@ +import { HttpResponse } from "@smithy/protocol-http"; +export const getDateHeader = (response) => HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js new file mode 100644 index 0000000..6ee8036 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js @@ -0,0 +1 @@ +export const getSkewCorrectedDate = (systemClockOffset) => new Date(Date.now() + systemClockOffset); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js new file mode 100644 index 0000000..859c41a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js @@ -0,0 +1,8 @@ +import { isClockSkewed } from "./isClockSkewed"; +export const getUpdatedSystemClockOffset = (clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js new file mode 100644 index 0000000..086d7a8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js @@ -0,0 +1,2 @@ +import { getSkewCorrectedDate } from "./getSkewCorrectedDate"; +export const isClockSkewed = (clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 300000; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js new file mode 100644 index 0000000..fce893b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js @@ -0,0 +1,53 @@ +export const _toStr = (val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}; +export const _toBool = (val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}; +export const _toNum = (val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js new file mode 100644 index 0000000..4348b08 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js @@ -0,0 +1,2 @@ +import { collectBody } from "@smithy/smithy-client"; +export const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js new file mode 100644 index 0000000..1c6cc32 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js @@ -0,0 +1,10 @@ +import { expectUnion } from "@smithy/smithy-client"; +export const awsExpectUnion = (value) => { + if (value == null) { + return undefined; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return expectUnion(value); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js new file mode 100644 index 0000000..d9c1564 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js @@ -0,0 +1,51 @@ +import { collectBodyString } from "../common"; +export const parseJsonBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } + catch (e) { + if (e?.name === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded, + }); + } + throw e; + } + } + return {}; +}); +export const parseJsonErrorBody = async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +export const loadRestJsonErrorCode = (output, data) => { + const findKey = (object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js new file mode 100644 index 0000000..556a967 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js @@ -0,0 +1,57 @@ +import { getValueFromTextNode } from "@smithy/smithy-client"; +import { XMLParser } from "fast-xml-parser"; +import { collectBodyString } from "../common"; +export const parseXmlBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: (_, val) => (val.trim() === "" && val.includes("\n") ? "" : undefined), + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } + catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded, + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return getValueFromTextNode(parsedObjToReturn); + } + return {}; +}); +export const parseXmlErrorBody = async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}; +export const loadRestXmlErrorCode = (output, data) => { + if (data?.Error?.Code !== undefined) { + return data.Error.Code; + } + if (data?.Code !== undefined) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts new file mode 100644 index 0000000..e83f927 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts @@ -0,0 +1,5 @@ +export * from "./index"; +export * from "./submodules/account-id-endpoint/index"; +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/index.d.ts new file mode 100644 index 0000000..5d51cdb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/index.d.ts @@ -0,0 +1,22 @@ +/** + * Submodules annotated with "Legacy" are from prior to the submodule system. + * They are exported from the package's root index to preserve backwards compatibility. + * + * New development should go in a proper submodule and not be exported from the root index. + */ +/** + * Legacy submodule. + */ +export * from "./submodules/client/index"; +/** + * Legacy submodule. + */ +export * from "./submodules/httpAuthSchemes/index"; +/** + * Legacy submodule. + */ +export * from "./submodules/protocols/index"; +/** + * Warning: do not export any additional submodules from the root of this package. See readme.md for + * guide on developing submodules. + */ diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts new file mode 100644 index 0000000..bf612a2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts @@ -0,0 +1,27 @@ +import { Provider } from "@smithy/types"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +/** + * @public + */ +export interface AccountIdEndpointModeInputConfig { + /** + * The account ID endpoint mode to use. + */ + accountIdEndpointMode?: AccountIdEndpointMode | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface AccountIdEndpointModeResolvedConfig { + accountIdEndpointMode: Provider; +} +/** + * @internal + */ +export declare const resolveAccountIdEndpointModeConfig: (input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved) => T & AccountIdEndpointModeResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts new file mode 100644 index 0000000..640a747 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts @@ -0,0 +1,16 @@ +/** + * @public + */ +export type AccountIdEndpointMode = "disabled" | "preferred" | "required"; +/** + * @internal + */ +export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +/** + * @internal + */ +export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[]; +/** + * @internal + */ +export declare function validateAccountIdEndpointMode(value: any): value is AccountIdEndpointMode; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..96b8059 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts @@ -0,0 +1,14 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +/** + * @internal + */ +export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +/** + * @internal + */ +export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..d97bc8c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,12 @@ +export declare const state: { + warningEmitted: boolean; +}; +/** + * @internal + * + * Emits warning if the provided Node.js version string is + * pending deprecation by AWS SDK JSv3. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts new file mode 100644 index 0000000..b3b4a68 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts @@ -0,0 +1,7 @@ +import type { AttributedAwsCredentialIdentity, AwsSdkCredentialsFeatures } from "@aws-sdk/types"; +/** + * @internal + * + * @returns the credentials with source feature attribution. + */ +export declare function setCredentialFeature(credentials: AttributedAwsCredentialIdentity, feature: F, value: AwsSdkCredentialsFeatures[F]): AttributedAwsCredentialIdentity; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts new file mode 100644 index 0000000..93458bf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts @@ -0,0 +1,12 @@ +import type { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the SDK not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: AwsHandlerExecutionContext, feature: F, value: AwsSdkFeatures[F]): void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts new file mode 100644 index 0000000..051b17c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts @@ -0,0 +1,10 @@ +import { AwsCredentialIdentity, HttpRequest as IHttpRequest } from "@smithy/types"; +import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer"; +/** + * @internal + * Note: this is not a signing algorithm implementation. The sign method + * accepts the real signer as an input parameter. + */ +export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + sign(httpRequest: IHttpRequest, identity: AwsCredentialIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts new file mode 100644 index 0000000..7c1b550 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts @@ -0,0 +1,43 @@ +import { AuthScheme, AwsCredentialIdentity, HttpRequest as IHttpRequest, HttpResponse, HttpSigner, RequestSigner } from "@smithy/types"; +import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig"; +/** + * @internal + */ +interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig { + systemClockOffset: number; + signer: (authScheme?: AuthScheme) => Promise; +} +/** + * @internal + */ +interface AwsSdkSigV4AuthSigningProperties { + config: AwsSdkSigV4Config; + signer: RequestSigner; + signingRegion?: string; + signingRegionSet?: string[]; + signingName?: string; +} +/** + * @internal + */ +export declare const validateSigningProperties: (signingProperties: Record) => Promise; +/** + * Note: this is not a signing algorithm implementation. The sign method + * accepts the real signer as an input parameter. + * @internal + */ +export declare class AwsSdkSigV4Signer implements HttpSigner { + sign(httpRequest: IHttpRequest, + /** + * `identity` is bound in {@link resolveAWSSDKSigV4Config} + */ + identity: AwsCredentialIdentity, signingProperties: Record): Promise; + errorHandler(signingProperties: Record): (error: Error) => never; + successHandler(httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4Signer} + */ +export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts new file mode 100644 index 0000000..edf3162 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts @@ -0,0 +1,5 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @public + */ +export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts new file mode 100644 index 0000000..4071225 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts @@ -0,0 +1,5 @@ +export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts new file mode 100644 index 0000000..f741625 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts @@ -0,0 +1,38 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface AwsSdkSigV4AAuthInputConfig { + /** + * This option will override the AWS sigv4a + * signing regionSet from any other source. + * + * The lookup order is: + * 1. this value + * 2. configuration file value of sigv4a_signing_region_set. + * 3. environment value of AWS_SIGV4A_SIGNING_REGION_SET. + * 4. signingRegionSet given by endpoint resolution. + * 5. the singular region of the SDK client. + */ + sigv4aSigningRegionSet?: string[] | undefined | Provider; +} +/** + * @internal + */ +export interface AwsSdkSigV4APreviouslyResolved { +} +/** + * @internal + */ +export interface AwsSdkSigV4AAuthResolvedConfig { + sigv4aSigningRegionSet: Provider; +} +/** + * @internal + */ +export declare const resolveAwsSdkSigV4AConfig: (config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved) => T & AwsSdkSigV4AAuthResolvedConfig; +/** + * @internal + */ +export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts new file mode 100644 index 0000000..cf42eec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts @@ -0,0 +1,117 @@ +import type { MergeFunctions } from "@aws-sdk/types"; +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { AuthScheme, AwsCredentialIdentity, AwsCredentialIdentityProvider, ChecksumConstructor, HashConstructor, MemoizedProvider, Provider, RegionInfoProvider, RequestSigner } from "@smithy/types"; +/** + * @public + */ +export interface AwsSdkSigV4AuthInputConfig { + /** + * The credentials used to sign requests. + */ + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + /** + * The signer to use when signing requests. + */ + signer?: RequestSigner | ((authScheme?: AuthScheme) => Promise); + /** + * Whether to escape request path when signing the request. + */ + signingEscapePath?: boolean; + /** + * An offset value in milliseconds to apply to all signing times. + */ + systemClockOffset?: number; + /** + * The region where you want to sign your request against. This + * can be different to the region in the endpoint. + */ + signingRegion?: string; + /** + * The injectable SigV4-compatible signer class constructor. If not supplied, + * regular SignatureV4 constructor will be used. + * + * @internal + */ + signerConstructor?: new (options: SignatureV4Init & SignatureV4CryptoInit) => RequestSigner; +} +/** + * Used to indicate whether a credential provider function was memoized by this resolver. + * @public + */ +export type AwsSdkSigV4Memoized = { + /** + * The credential provider has been memoized by the AWS SDK SigV4 config resolver. + */ + memoized?: boolean; + /** + * The credential provider has the caller client config object bound to its arguments. + */ + configBound?: boolean; + /** + * Function is wrapped with attribution transform. + */ + attributed?: boolean; +}; +/** + * @internal + */ +export interface AwsSdkSigV4PreviouslyResolved { + credentialDefaultProvider?: (input: any) => MemoizedProvider; + region: string | Provider; + sha256: ChecksumConstructor | HashConstructor; + signingName?: string; + regionInfoProvider?: RegionInfoProvider; + defaultSigningName?: string; + serviceId: string; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +/** + * @internal + */ +export interface AwsSdkSigV4AuthResolvedConfig { + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.credentials} + * This provider MAY memoize the loaded credentials for certain period. + */ + credentials: MergeFunctions> & AwsSdkSigV4Memoized; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signer} + */ + signer: (authScheme?: AuthScheme) => Promise; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signingEscapePath} + */ + signingEscapePath: boolean; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.systemClockOffset} + */ + systemClockOffset: number; +} +/** + * @internal + */ +export declare const resolveAwsSdkSigV4Config: (config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig; +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4AuthInputConfig} + */ +export interface AWSSDKSigV4AuthInputConfig extends AwsSdkSigV4AuthInputConfig { +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4PreviouslyResolved} + */ +export interface AWSSDKSigV4PreviouslyResolved extends AwsSdkSigV4PreviouslyResolved { +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4AuthResolvedConfig} + */ +export interface AWSSDKSigV4AuthResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { +} +/** + * @internal + * @deprecated renamed to {@link resolveAwsSdkSigV4Config} + */ +export declare const resolveAWSSDKSigV4Config: (config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts new file mode 100644 index 0000000..823921b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a comma-separated string into an array of trimmed strings + * @param str The comma-separated input string to split + * @returns Array of trimmed strings split from the input + * + * @internal + */ +export declare const getArrayForCommaSeparatedString: (str: string) => string[]; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts new file mode 100644 index 0000000..2c9157b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getDateHeader: (response: unknown) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts new file mode 100644 index 0000000..4b72690 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Returns a date that is corrected for clock skew. + * + * @param systemClockOffset The offset of the system clock in milliseconds. + */ +export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts new file mode 100644 index 0000000..2d554b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + * + * If clock is skewed, it returns the difference between serverTime and current time. + * If clock is not skewed, it returns currentSystemClockOffset. + * + * @param clockTime The string value of the server time. + * @param currentSystemClockOffset The current system clock offset. + */ +export declare const getUpdatedSystemClockOffset: (clockTime: string, currentSystemClockOffset: number) => number; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts new file mode 100644 index 0000000..970fa15 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + * + * Checks if the provided date is within the skew window of 300000ms. + * + * @param clockTime - The time to check for skew in milliseconds. + * @param systemClockOffset - The offset of the system clock in milliseconds. + */ +export declare const isClockSkewed: (clockTime: number, systemClockOffset: number) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts new file mode 100644 index 0000000..10d9d39 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts @@ -0,0 +1,18 @@ +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toStr: (val: unknown) => string | undefined; +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toBool: (val: unknown) => boolean | undefined; +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toNum: (val: unknown) => number | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts new file mode 100644 index 0000000..ec78fb2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts @@ -0,0 +1,2 @@ +import type { SerdeContext } from "@smithy/types"; +export declare const collectBodyString: (streamBody: any, context: SerdeContext) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts new file mode 100644 index 0000000..98607ea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Forwards to Smithy's expectUnion function, but also ignores + * the `__type` field if it is present. + */ +export declare const awsExpectUnion: (value: unknown) => Record | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts new file mode 100644 index 0000000..827ffe9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts @@ -0,0 +1,13 @@ +import type { HttpResponse, SerdeContext } from "@smithy/types"; +/** + * @internal + */ +export declare const parseJsonBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const parseJsonErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadRestJsonErrorCode: (output: HttpResponse, data: any) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts new file mode 100644 index 0000000..30cfc30 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts @@ -0,0 +1,13 @@ +import type { HttpResponse, SerdeContext } from "@smithy/types"; +/** + * @internal + */ +export declare const parseXmlBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const parseXmlErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadRestXmlErrorCode: (output: HttpResponse, data: any) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts new file mode 100644 index 0000000..e83f927 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts @@ -0,0 +1,5 @@ +export * from "./index"; +export * from "./submodules/account-id-endpoint/index"; +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..239de7a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts new file mode 100644 index 0000000..10d5c21 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts @@ -0,0 +1,15 @@ +import { Provider } from "@smithy/types"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +export interface AccountIdEndpointModeInputConfig { + accountIdEndpointMode?: + | AccountIdEndpointMode + | Provider; +} +interface PreviouslyResolved {} +export interface AccountIdEndpointModeResolvedConfig { + accountIdEndpointMode: Provider; +} +export declare const resolveAccountIdEndpointModeConfig: ( + input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved +) => T & AccountIdEndpointModeResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts new file mode 100644 index 0000000..27bdce9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts @@ -0,0 +1,6 @@ +export type AccountIdEndpointMode = "disabled" | "preferred" | "required"; +export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[]; +export declare function validateAccountIdEndpointMode( + value: any +): value is AccountIdEndpointMode; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..9b04566 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts @@ -0,0 +1,7 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = + "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = + "account_id_endpoint_mode"; +export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..84af567 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,4 @@ +export declare const state: { + warningEmitted: boolean; +}; +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts new file mode 100644 index 0000000..1336619 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts @@ -0,0 +1,11 @@ +import { + AttributedAwsCredentialIdentity, + AwsSdkCredentialsFeatures, +} from "@aws-sdk/types"; +export declare function setCredentialFeature< + F extends keyof AwsSdkCredentialsFeatures +>( + credentials: AttributedAwsCredentialIdentity, + feature: F, + value: AwsSdkCredentialsFeatures[F] +): AttributedAwsCredentialIdentity; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts new file mode 100644 index 0000000..84482ee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts @@ -0,0 +1,6 @@ +import { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types"; +export declare function setFeature( + context: AwsHandlerExecutionContext, + feature: F, + value: AwsSdkFeatures[F] +): void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts new file mode 100644 index 0000000..b8c2b74 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts @@ -0,0 +1,12 @@ +import { + AwsCredentialIdentity, + HttpRequest as IHttpRequest, +} from "@smithy/types"; +import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer"; +export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + sign( + httpRequest: IHttpRequest, + identity: AwsCredentialIdentity, + signingProperties: Record + ): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts new file mode 100644 index 0000000..0be6b41 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts @@ -0,0 +1,39 @@ +import { + AuthScheme, + AwsCredentialIdentity, + HttpRequest as IHttpRequest, + HttpResponse, + HttpSigner, + RequestSigner, +} from "@smithy/types"; +import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig"; +interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig { + systemClockOffset: number; + signer: (authScheme?: AuthScheme) => Promise; +} +interface AwsSdkSigV4AuthSigningProperties { + config: AwsSdkSigV4Config; + signer: RequestSigner; + signingRegion?: string; + signingRegionSet?: string[]; + signingName?: string; +} +export declare const validateSigningProperties: ( + signingProperties: Record +) => Promise; +export declare class AwsSdkSigV4Signer implements HttpSigner { + sign( + httpRequest: IHttpRequest, + identity: AwsCredentialIdentity, + signingProperties: Record + ): Promise; + errorHandler( + signingProperties: Record + ): (error: Error) => never; + successHandler( + httpResponse: HttpResponse | unknown, + signingProperties: Record + ): void; +} +export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts new file mode 100644 index 0000000..effc1e0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors< + string[] +>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts new file mode 100644 index 0000000..6047921 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts @@ -0,0 +1,9 @@ +export { + AwsSdkSigV4Signer, + AWSSDKSigV4Signer, + validateSigningProperties, +} from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts new file mode 100644 index 0000000..9f949b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts @@ -0,0 +1,18 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider } from "@smithy/types"; +export interface AwsSdkSigV4AAuthInputConfig { + sigv4aSigningRegionSet?: + | string[] + | undefined + | Provider; +} +export interface AwsSdkSigV4APreviouslyResolved {} +export interface AwsSdkSigV4AAuthResolvedConfig { + sigv4aSigningRegionSet: Provider; +} +export declare const resolveAwsSdkSigV4AConfig: ( + config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved +) => T & AwsSdkSigV4AAuthResolvedConfig; +export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors< + string[] | undefined +>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts new file mode 100644 index 0000000..fc562d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts @@ -0,0 +1,65 @@ +import { MergeFunctions } from "@aws-sdk/types"; +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { + AuthScheme, + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + ChecksumConstructor, + HashConstructor, + MemoizedProvider, + Provider, + RegionInfoProvider, + RequestSigner, +} from "@smithy/types"; +export interface AwsSdkSigV4AuthInputConfig { + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + signer?: + | RequestSigner + | ((authScheme?: AuthScheme) => Promise); + signingEscapePath?: boolean; + systemClockOffset?: number; + signingRegion?: string; + signerConstructor?: new ( + options: SignatureV4Init & SignatureV4CryptoInit + ) => RequestSigner; +} +export type AwsSdkSigV4Memoized = { + memoized?: boolean; + configBound?: boolean; + attributed?: boolean; +}; +export interface AwsSdkSigV4PreviouslyResolved { + credentialDefaultProvider?: ( + input: any + ) => MemoizedProvider; + region: string | Provider; + sha256: ChecksumConstructor | HashConstructor; + signingName?: string; + regionInfoProvider?: RegionInfoProvider; + defaultSigningName?: string; + serviceId: string; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +export interface AwsSdkSigV4AuthResolvedConfig { + credentials: MergeFunctions< + AwsCredentialIdentityProvider, + MemoizedProvider + > & + AwsSdkSigV4Memoized; + signer: (authScheme?: AuthScheme) => Promise; + signingEscapePath: boolean; + systemClockOffset: number; +} +export declare const resolveAwsSdkSigV4Config: ( + config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & AwsSdkSigV4AuthResolvedConfig; +export interface AWSSDKSigV4AuthInputConfig + extends AwsSdkSigV4AuthInputConfig {} +export interface AWSSDKSigV4PreviouslyResolved + extends AwsSdkSigV4PreviouslyResolved {} +export interface AWSSDKSigV4AuthResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig {} +export declare const resolveAWSSDKSigV4Config: ( + config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & AwsSdkSigV4AuthResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts new file mode 100644 index 0000000..aee2328 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts @@ -0,0 +1 @@ +export declare const getArrayForCommaSeparatedString: (str: string) => string[]; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts new file mode 100644 index 0000000..73fc529 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts @@ -0,0 +1 @@ +export declare const getDateHeader: (response: unknown) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts new file mode 100644 index 0000000..741c5ea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts @@ -0,0 +1 @@ +export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts new file mode 100644 index 0000000..eae3311 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts @@ -0,0 +1,4 @@ +export declare const getUpdatedSystemClockOffset: ( + clockTime: string, + currentSystemClockOffset: number +) => number; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts new file mode 100644 index 0000000..9f994f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts @@ -0,0 +1,4 @@ +export declare const isClockSkewed: ( + clockTime: number, + systemClockOffset: number +) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts new file mode 100644 index 0000000..7657ceb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts @@ -0,0 +1,3 @@ +export declare const _toStr: (val: unknown) => string | undefined; +export declare const _toBool: (val: unknown) => boolean | undefined; +export declare const _toNum: (val: unknown) => number | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts new file mode 100644 index 0000000..73486db --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts @@ -0,0 +1,5 @@ +import { SerdeContext } from "@smithy/types"; +export declare const collectBodyString: ( + streamBody: any, + context: SerdeContext +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts new file mode 100644 index 0000000..fdc331e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts @@ -0,0 +1,3 @@ +export declare const awsExpectUnion: ( + value: unknown +) => Record | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts new file mode 100644 index 0000000..b400419 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse, SerdeContext } from "@smithy/types"; +export declare const parseJsonBody: ( + streamBody: any, + context: SerdeContext +) => any; +export declare const parseJsonErrorBody: ( + errorBody: any, + context: SerdeContext +) => Promise; +export declare const loadRestJsonErrorCode: ( + output: HttpResponse, + data: any +) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts new file mode 100644 index 0000000..f151834 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse, SerdeContext } from "@smithy/types"; +export declare const parseXmlBody: ( + streamBody: any, + context: SerdeContext +) => any; +export declare const parseXmlErrorBody: ( + errorBody: any, + context: SerdeContext +) => Promise; +export declare const loadRestXmlErrorCode: ( + output: HttpResponse, + data: any +) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts new file mode 100644 index 0000000..3783b5e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/httpAuthSchemes" { + export * from "@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/httpAuthSchemes.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/httpAuthSchemes.js new file mode 100644 index 0000000..17685b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/httpAuthSchemes.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/httpAuthSchemes/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/package.json new file mode 100644 index 0000000..a41d77a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/package.json @@ -0,0 +1,119 @@ +{ + "name": "@aws-sdk/core", + "version": "3.799.0", + "description": "Core functions & classes shared by multiple AWS SDK clients.", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline core && rimraf ./dist-cjs/api-extractor-type-index.js", + "build:es": "tsc -p tsconfig.es.json && rimraf ./dist-es/api-extractor-type-index.js", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "lint": "node ../../scripts/validation/submodules-linter.js --pkg core", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "exports": { + ".": { + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "node": "./dist-cjs/index.js", + "import": "./dist-es/index.js", + "require": "./dist-cjs/index.js" + }, + "./package.json": { + "module": "./package.json", + "node": "./package.json", + "import": "./package.json", + "require": "./package.json" + }, + "./client": { + "types": "./dist-types/submodules/client/index.d.ts", + "module": "./dist-es/submodules/client/index.js", + "node": "./dist-cjs/submodules/client/index.js", + "import": "./dist-es/submodules/client/index.js", + "require": "./dist-cjs/submodules/client/index.js" + }, + "./httpAuthSchemes": { + "types": "./dist-types/submodules/httpAuthSchemes/index.d.ts", + "module": "./dist-es/submodules/httpAuthSchemes/index.js", + "node": "./dist-cjs/submodules/httpAuthSchemes/index.js", + "import": "./dist-es/submodules/httpAuthSchemes/index.js", + "require": "./dist-cjs/submodules/httpAuthSchemes/index.js" + }, + "./account-id-endpoint": { + "types": "./dist-types/submodules/account-id-endpoint/index.d.ts", + "module": "./dist-es/submodules/account-id-endpoint/index.js", + "node": "./dist-cjs/submodules/account-id-endpoint/index.js", + "import": "./dist-es/submodules/account-id-endpoint/index.js", + "require": "./dist-cjs/submodules/account-id-endpoint/index.js" + }, + "./protocols": { + "types": "./dist-types/submodules/protocols/index.d.ts", + "module": "./dist-es/submodules/protocols/index.js", + "node": "./dist-cjs/submodules/protocols/index.js", + "import": "./dist-es/submodules/protocols/index.js", + "require": "./dist-cjs/submodules/protocols/index.js" + } + }, + "files": [ + "./account-id-endpoint.d.ts", + "./account-id-endpoint.js", + "./client.d.ts", + "./client.js", + "./httpAuthSchemes.d.ts", + "./httpAuthSchemes.js", + "./protocols.d.ts", + "./protocols.js", + "dist-*/**" + ], + "sideEffects": false, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/core", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/core" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/protocols.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/protocols.d.ts new file mode 100644 index 0000000..7a36334 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/protocols.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/protocols" { + export * from "@aws-sdk/core/dist-types/submodules/protocols/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/protocols.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/protocols.js new file mode 100644 index 0000000..e2916e8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/core/protocols.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/protocols/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/README.md new file mode 100644 index 0000000..61a6436 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-env + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-env/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-env) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-env.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-env) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js new file mode 100644 index 0000000..c906a6b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js @@ -0,0 +1,76 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ENV_ACCOUNT_ID: () => ENV_ACCOUNT_ID, + ENV_CREDENTIAL_SCOPE: () => ENV_CREDENTIAL_SCOPE, + ENV_EXPIRATION: () => ENV_EXPIRATION, + ENV_KEY: () => ENV_KEY, + ENV_SECRET: () => ENV_SECRET, + ENV_SESSION: () => ENV_SESSION, + fromEnv: () => fromEnv +}); +module.exports = __toCommonJS(index_exports); + +// src/fromEnv.ts +var import_client = require("@aws-sdk/core/client"); +var import_property_provider = require("@smithy/property-provider"); +var ENV_KEY = "AWS_ACCESS_KEY_ID"; +var ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +var ENV_SESSION = "AWS_SESSION_TOKEN"; +var ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +var ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +var ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +var fromEnv = /* @__PURE__ */ __name((init) => async () => { + init?.logger?.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...sessionToken && { sessionToken }, + ...expiry && { expiration: new Date(expiry) }, + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new import_property_provider.CredentialsProviderError("Unable to find environment variable credentials.", { logger: init?.logger }); +}, "fromEnv"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_KEY, + ENV_SECRET, + ENV_SESSION, + ENV_EXPIRATION, + ENV_CREDENTIAL_SCOPE, + ENV_ACCOUNT_ID, + fromEnv +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js new file mode 100644 index 0000000..a6a2928 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +export const ENV_KEY = "AWS_ACCESS_KEY_ID"; +export const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +export const ENV_SESSION = "AWS_SESSION_TOKEN"; +export const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +export const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +export const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +export const fromEnv = (init) => async () => { + init?.logger?.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }), + ...(expiry && { expiration: new Date(expiry) }), + ...(credentialScope && { credentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new CredentialsProviderError("Unable to find environment variable credentials.", { logger: init?.logger }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js new file mode 100644 index 0000000..17bf6da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromEnv"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts new file mode 100644 index 0000000..541aa69 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts @@ -0,0 +1,36 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export interface FromEnvInit extends CredentialProviderOptions { +} +/** + * @internal + */ +export declare const ENV_KEY = "AWS_ACCESS_KEY_ID"; +/** + * @internal + */ +export declare const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +/** + * @internal + */ +export declare const ENV_SESSION = "AWS_SESSION_TOKEN"; +/** + * @internal + */ +export declare const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +/** + * @internal + */ +export declare const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +/** + * @internal + */ +export declare const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +/** + * @internal + * + * Source AWS credentials from known environment variables. If either the + * `AWS_ACCESS_KEY_ID` or `AWS_SECRET_ACCESS_KEY` environment variable is not + * set in this process, the provider will return a rejected promise. + */ +export declare const fromEnv: (init?: FromEnvInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts new file mode 100644 index 0000000..fe76e31 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromEnv"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts new file mode 100644 index 0000000..55c454e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts @@ -0,0 +1,12 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export interface FromEnvInit extends CredentialProviderOptions {} +export declare const ENV_KEY = "AWS_ACCESS_KEY_ID"; +export declare const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +export declare const ENV_SESSION = "AWS_SESSION_TOKEN"; +export declare const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +export declare const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +export declare const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +export declare const fromEnv: ( + init?: FromEnvInit +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..17bf6da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromEnv"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/package.json new file mode 100644 index 0000000..a66a0de --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-env/package.json @@ -0,0 +1,62 @@ +{ + "name": "@aws-sdk/credential-provider-env", + "version": "3.799.0", + "description": "AWS credential provider that sources credentials from known environment variables", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-env", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-env", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-env" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/README.md new file mode 100644 index 0000000..e8f19f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/credential-provider-http + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-http/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-http) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-http.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-http) + +> An internal transitively required package. + +## Usage + +See https://www.npmjs.com/package/@aws-sdk/credential-providers diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js new file mode 100644 index 0000000..c4adb5f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkUrl = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new property_provider_1.CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; +exports.checkUrl = checkUrl; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js new file mode 100644 index 0000000..d7c0efa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const property_provider_1 = require("@smithy/property-provider"); +const checkUrl_1 = require("./checkUrl"); +const requestHelpers_1 = require("./requestHelpers"); +const retry_wrapper_1 = require("./retry-wrapper"); +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const full = options.credentialsFullUri; + if (full) { + host = full; + } + else { + throw new property_provider_1.CredentialsProviderError("No HTTP credential provider host provided.", { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new fetch_http_handler_1.FetchHttpHandler(); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (options.authorizationToken) { + request.headers.Authorization = options.authorizationToken; + } + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response); + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js new file mode 100644 index 0000000..6e0269a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +const tslib_1 = require("tslib"); +const client_1 = require("@aws-sdk/core/client"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const property_provider_1 = require("@smithy/property-provider"); +const promises_1 = tslib_1.__importDefault(require("fs/promises")); +const checkUrl_1 = require("./checkUrl"); +const requestHelpers_1 = require("./requestHelpers"); +const retry_wrapper_1 = require("./retry-wrapper"); +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new property_provider_1.CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new node_http_handler_1.NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await promises_1.default.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response).then((creds) => (0, client_1.setCredentialFeature)(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js new file mode 100644 index 0000000..5229d79 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getCredentials = exports.createGetRequest = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const protocol_http_1 = require("@smithy/protocol-http"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_stream_1 = require("@smithy/util-stream"); +function createGetRequest(url) { + return new protocol_http_1.HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +exports.createGetRequest = createGetRequest; +async function getCredentials(response, logger) { + const stream = (0, util_stream_1.sdkStreamMixin)(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new property_provider_1.CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: (0, smithy_client_1.parseRfc3339DateTime)(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} +exports.getCredentials = getCredentials; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js new file mode 100644 index 0000000..b99b2ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryWrapper = void 0; +const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; +exports.retryWrapper = retryWrapper; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js new file mode 100644 index 0000000..9300747 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +var fromHttp_browser_1 = require("./fromHttp/fromHttp.browser"); +Object.defineProperty(exports, "fromHttp", { enumerable: true, get: function () { return fromHttp_browser_1.fromHttp; } }); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js new file mode 100644 index 0000000..0286ea0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +var fromHttp_1 = require("./fromHttp/fromHttp"); +Object.defineProperty(exports, "fromHttp", { enumerable: true, get: function () { return fromHttp_1.fromHttp; } }); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js new file mode 100644 index 0000000..2a42ed7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js @@ -0,0 +1,42 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +export const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js new file mode 100644 index 0000000..7189b92 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js @@ -0,0 +1,27 @@ +import { FetchHttpHandler } from "@smithy/fetch-http-handler"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { checkUrl } from "./checkUrl"; +import { createGetRequest, getCredentials } from "./requestHelpers"; +import { retryWrapper } from "./retry-wrapper"; +export const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const full = options.credentialsFullUri; + if (full) { + host = full; + } + else { + throw new CredentialsProviderError("No HTTP credential provider host provided.", { logger: options.logger }); + } + const url = new URL(host); + checkUrl(url, options.logger); + const requestHandler = new FetchHttpHandler(); + return retryWrapper(async () => { + const request = createGetRequest(url); + if (options.authorizationToken) { + request.headers.Authorization = options.authorizationToken; + } + const result = await requestHandler.handle(request); + return getCredentials(result.response); + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js new file mode 100644 index 0000000..36dd8a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js @@ -0,0 +1,63 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { NodeHttpHandler } from "@smithy/node-http-handler"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import fs from "fs/promises"; +import { checkUrl } from "./checkUrl"; +import { createGetRequest, getCredentials } from "./requestHelpers"; +import { retryWrapper } from "./retry-wrapper"; +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +export const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + checkUrl(url, options.logger); + const requestHandler = new NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return retryWrapper(async () => { + const request = createGetRequest(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await fs.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return getCredentials(result.response).then((creds) => setCredentialFeature(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js new file mode 100644 index 0000000..9e271ce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js @@ -0,0 +1,49 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { HttpRequest } from "@smithy/protocol-http"; +import { parseRfc3339DateTime } from "@smithy/smithy-client"; +import { sdkStreamMixin } from "@smithy/util-stream"; +export function createGetRequest(url) { + return new HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +export async function getCredentials(response, logger) { + const stream = sdkStreamMixin(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: parseRfc3339DateTime(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js new file mode 100644 index 0000000..7006f3c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js @@ -0,0 +1,13 @@ +export const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js new file mode 100644 index 0000000..98204c5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js @@ -0,0 +1 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js new file mode 100644 index 0000000..2911386 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js @@ -0,0 +1 @@ +export { fromHttp } from "./fromHttp/fromHttp"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts new file mode 100644 index 0000000..933b12c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts @@ -0,0 +1,9 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + * + * @param url - to be validated. + * @param logger - passed to CredentialsProviderError. + * @throws if not acceptable to this provider. + */ +export declare const checkUrl: (url: URL, logger?: Logger) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts new file mode 100644 index 0000000..cb3a03b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import type { FromHttpOptions } from "./fromHttpTypes"; +/** + * Creates a provider that gets credentials via HTTP request. + */ +export declare const fromHttp: (options?: FromHttpOptions) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts new file mode 100644 index 0000000..cb3a03b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import type { FromHttpOptions } from "./fromHttpTypes"; +/** + * Creates a provider that gets credentials via HTTP request. + */ +export declare const fromHttp: (options?: FromHttpOptions) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts new file mode 100644 index 0000000..b751ded --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts @@ -0,0 +1,69 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +/** + * @public + * + * Input for the fromHttp function in the HTTP Credentials Provider for Node.js. + */ +export interface FromHttpOptions extends CredentialProviderOptions { + /** + * If this value is provided, it will be used as-is. + * + * For browser environments, use instead {@link credentialsFullUri}. + */ + awsContainerCredentialsFullUri?: string; + /** + * If this value is provided instead of the full URI, it + * will be appended to the default link local host of 169.254.170.2. + * + * Not supported in browsers. + */ + awsContainerCredentialsRelativeUri?: string; + /** + * Will be read on each credentials request to + * add an Authorization request header value. + * + * Not supported in browsers. + */ + awsContainerAuthorizationTokenFile?: string; + /** + * An alternative to awsContainerAuthorizationTokenFile, + * this is the token value itself. + * + * For browser environments, use instead {@link authorizationToken}. + */ + awsContainerAuthorizationToken?: string; + /** + * BROWSER ONLY. + * + * In browsers, a relative URI is not allowed, and a full URI must be provided. + * HTTPS is required. + * + * This value is required for the browser environment. + */ + credentialsFullUri?: string; + /** + * BROWSER ONLY. + * + * Providing this value will set an "Authorization" request + * header value on the GET request. + */ + authorizationToken?: string; + /** + * Default is 3 retry attempts or 4 total attempts. + */ + maxRetries?: number; + /** + * Default is 1000ms. Time in milliseconds to spend waiting between retry attempts. + */ + timeout?: number; +} +/** + * @public + */ +export type HttpProviderCredentials = { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + AccountId?: string; + Expiration: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts new file mode 100644 index 0000000..6d1c16e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts @@ -0,0 +1,11 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpResponse, Logger } from "@smithy/types"; +/** + * @internal + */ +export declare function createGetRequest(url: URL): HttpRequest; +/** + * @internal + */ +export declare function getCredentials(response: HttpResponse, logger?: Logger): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts new file mode 100644 index 0000000..bf63add --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retryWrapper: (toRetry: RetryableProvider, maxRetries: number, delayMs: number) => RetryableProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts new file mode 100644 index 0000000..2a9e4ec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts @@ -0,0 +1,2 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; +export type { FromHttpOptions, HttpProviderCredentials } from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts new file mode 100644 index 0000000..b1e9985 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export { fromHttp } from "./fromHttp/fromHttp"; +export type { FromHttpOptions, HttpProviderCredentials } from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts new file mode 100644 index 0000000..9f518b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts @@ -0,0 +1,2 @@ +import { Logger } from "@smithy/types"; +export declare const checkUrl: (url: URL, logger?: Logger) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts new file mode 100644 index 0000000..00f1506 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromHttpOptions } from "./fromHttpTypes"; +export declare const fromHttp: ( + options?: FromHttpOptions +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts new file mode 100644 index 0000000..00f1506 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromHttpOptions } from "./fromHttpTypes"; +export declare const fromHttp: ( + options?: FromHttpOptions +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts new file mode 100644 index 0000000..767b6b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts @@ -0,0 +1,18 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +export interface FromHttpOptions extends CredentialProviderOptions { + awsContainerCredentialsFullUri?: string; + awsContainerCredentialsRelativeUri?: string; + awsContainerAuthorizationTokenFile?: string; + awsContainerAuthorizationToken?: string; + credentialsFullUri?: string; + authorizationToken?: string; + maxRetries?: number; + timeout?: number; +} +export type HttpProviderCredentials = { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + AccountId?: string; + Expiration: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts new file mode 100644 index 0000000..68a3285 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts @@ -0,0 +1,8 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpResponse, Logger } from "@smithy/types"; +export declare function createGetRequest(url: URL): HttpRequest; +export declare function getCredentials( + response: HttpResponse, + logger?: Logger +): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts new file mode 100644 index 0000000..f992038 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts @@ -0,0 +1,8 @@ +export interface RetryableProvider { + (): Promise; +} +export declare const retryWrapper: ( + toRetry: RetryableProvider, + maxRetries: number, + delayMs: number +) => RetryableProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts new file mode 100644 index 0000000..40696b9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts @@ -0,0 +1,5 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; +export { + FromHttpOptions, + HttpProviderCredentials, +} from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..560256f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +export { fromHttp } from "./fromHttp/fromHttp"; +export { + FromHttpOptions, + HttpProviderCredentials, +} from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/package.json new file mode 100644 index 0000000..2ad154b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-http/package.json @@ -0,0 +1,69 @@ +{ + "name": "@aws-sdk/credential-provider-http", + "version": "3.799.0", + "description": "AWS credential provider for containers and HTTP sources", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": "./dist-es/index.browser.js", + "react-native": "./dist-es/index.browser.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-http", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-http", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-http" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/README.md new file mode 100644 index 0000000..b4f3af1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-ini + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-ini/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-ini) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-ini.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-ini) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js new file mode 100644 index 0000000..e9b6049 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js @@ -0,0 +1,276 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromIni: () => fromIni +}); +module.exports = __toCommonJS(index_exports); + +// src/fromIni.ts + + +// src/resolveProfileData.ts + + +// src/resolveAssumeRoleCredentials.ts + + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/resolveCredentialSource.ts +var import_client = require("@aws-sdk/core/client"); +var import_property_provider = require("@smithy/property-provider"); +var resolveCredentialSource = /* @__PURE__ */ __name((credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: /* @__PURE__ */ __name(async (options) => { + const { fromHttp } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-http"))); + const { fromContainerMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => (0, import_property_provider.chain)(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, "EcsContainer"), + Ec2InstanceMetadata: /* @__PURE__ */ __name(async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, "Ec2InstanceMetadata"), + Environment: /* @__PURE__ */ __name(async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-env"))); + return async () => fromEnv(options)().then(setNamedProvider); + }, "Environment") + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } else { + throw new import_property_provider.CredentialsProviderError( + `Unsupported credential source in profile ${profileName}. Got ${credentialSource}, expected EcsContainer or Ec2InstanceMetadata or Environment.`, + { logger } + ); + } +}, "resolveCredentialSource"); +var setNamedProvider = /* @__PURE__ */ __name((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"), "setNamedProvider"); + +// src/resolveAssumeRoleCredentials.ts +var isAssumeRoleProfile = /* @__PURE__ */ __name((arg, { profile = "default", logger } = {}) => { + return Boolean(arg) && typeof arg === "object" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger })); +}, "isAssumeRoleProfile"); +var isAssumeRoleWithSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + logger?.debug?.(` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}, "isAssumeRoleWithSourceProfile"); +var isCredentialSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + logger?.debug?.(` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}, "isCredentialSourceProfile"); +var resolveAssumeRoleCredentials = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sts"))); + options.roleAssumer = getDefaultRoleAssumer( + { + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options?.parentClientConfig, + region: region ?? options?.parentClientConfig?.region + } + }, + options.clientPlugins + ); + } + if (source_profile && source_profile in visitedProfiles) { + throw new import_property_provider.CredentialsProviderError( + `Detected a cycle attempting to resolve credentials for profile ${(0, import_shared_ini_file_loader.getProfileName)(options)}. Profiles visited: ` + Object.keys(visitedProfiles).join(", "), + { logger: options.logger } + ); + } + options.logger?.debug( + `@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}` + ); + const sourceCredsProvider = source_profile ? resolveProfileData( + source_profile, + profiles, + options, + { + ...visitedProfiles, + [source_profile]: true + }, + isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {}) + ) : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10) + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new import_property_provider.CredentialsProviderError( + `Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, + { logger: options.logger, tryNextLink: false } + ); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o") + ); + } +}, "resolveAssumeRoleCredentials"); +var isCredentialSourceWithoutRoleArn = /* @__PURE__ */ __name((section) => { + return !section.role_arn && !!section.credential_source; +}, "isCredentialSourceWithoutRoleArn"); + +// src/resolveProcessCredentials.ts + +var isProcessProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string", "isProcessProfile"); +var resolveProcessCredentials = /* @__PURE__ */ __name(async (options, profile) => Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-process"))).then( + ({ fromProcess }) => fromProcess({ + ...options, + profile + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_PROCESS", "v")) +), "resolveProcessCredentials"); + +// src/resolveSsoCredentials.ts + +var resolveSsoCredentials = /* @__PURE__ */ __name(async (profile, profileData, options = {}) => { + const { fromSSO } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-sso"))); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig + })().then((creds) => { + if (profileData.sso_session) { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } else { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}, "resolveSsoCredentials"); +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveStaticCredentials.ts + +var isStaticCredsProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.aws_access_key_id === "string" && typeof arg.aws_secret_access_key === "string" && ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1, "isStaticCredsProfile"); +var resolveStaticCredentials = /* @__PURE__ */ __name(async (profile, options) => { + options?.logger?.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }, + ...profile.aws_account_id && { accountId: profile.aws_account_id } + }; + return (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROFILE", "n"); +}, "resolveStaticCredentials"); + +// src/resolveWebIdentityCredentials.ts + +var isWebIdentityProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.web_identity_token_file === "string" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1, "isWebIdentityProfile"); +var resolveWebIdentityCredentials = /* @__PURE__ */ __name(async (profile, options) => Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-web-identity"))).then( + ({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q")) +), "resolveWebIdentityCredentials"); + +// src/resolveProfileData.ts +var resolveProfileData = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new import_property_provider.CredentialsProviderError( + `Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, + { logger: options.logger } + ); +}, "resolveProfileData"); + +// src/fromIni.ts +var fromIni = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + init.logger?.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProfileData( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: _init.profile ?? callerClientConfig?.profile + }), + profiles, + init + ); +}, "fromIni"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromIni +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js new file mode 100644 index 0000000..ccf0397 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js @@ -0,0 +1,16 @@ +import { getProfileName, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { resolveProfileData } from "./resolveProfileData"; +export const fromIni = (_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig, + }, + }; + init.logger?.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await parseKnownFiles(init); + return resolveProfileData(getProfileName({ + profile: _init.profile ?? callerClientConfig?.profile, + }), profiles, init); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js new file mode 100644 index 0000000..b019131 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromIni"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js new file mode 100644 index 0000000..1411318 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js @@ -0,0 +1,80 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName } from "@smithy/shared-ini-file-loader"; +import { resolveCredentialSource } from "./resolveCredentialSource"; +import { resolveProfileData } from "./resolveProfileData"; +export const isAssumeRoleProfile = (arg, { profile = "default", logger } = {}) => { + return (Boolean(arg) && + typeof arg === "object" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && + ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && + ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && + (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger }))); +}; +const isAssumeRoleWithSourceProfile = (arg, { profile, logger }) => { + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + logger?.debug?.(` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}; +const isCredentialSourceProfile = (arg, { profile, logger }) => { + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + logger?.debug?.(` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}; +export const resolveAssumeRoleCredentials = async (profileName, profiles, options, visitedProfiles = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await import("@aws-sdk/nested-clients/sts"); + options.roleAssumer = getDefaultRoleAssumer({ + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options?.parentClientConfig, + region: region ?? options?.parentClientConfig?.region, + }, + }, options.clientPlugins); + } + if (source_profile && source_profile in visitedProfiles) { + throw new CredentialsProviderError(`Detected a cycle attempting to resolve credentials for profile` + + ` ${getProfileName(options)}. Profiles visited: ` + + Object.keys(visitedProfiles).join(", "), { logger: options.logger }); + } + options.logger?.debug(`@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}`); + const sourceCredsProvider = source_profile + ? resolveProfileData(source_profile, profiles, options, { + ...visitedProfiles, + [source_profile]: true, + }, isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {})) + : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } + else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10), + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new CredentialsProviderError(`Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, { logger: options.logger, tryNextLink: false }); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } +}; +const isCredentialSourceWithoutRoleArn = (section) => { + return !section.role_arn && !!section.credential_source; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js new file mode 100644 index 0000000..b004933 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { chain, CredentialsProviderError } from "@smithy/property-provider"; +export const resolveCredentialSource = (credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: async (options) => { + const { fromHttp } = await import("@aws-sdk/credential-provider-http"); + const { fromContainerMetadata } = await import("@smithy/credential-provider-imds"); + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => chain(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, + Ec2InstanceMetadata: async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await import("@smithy/credential-provider-imds"); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, + Environment: async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await import("@aws-sdk/credential-provider-env"); + return async () => fromEnv(options)().then(setNamedProvider); + }, + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } + else { + throw new CredentialsProviderError(`Unsupported credential source in profile ${profileName}. Got ${credentialSource}, ` + + `expected EcsContainer or Ec2InstanceMetadata or Environment.`, { logger }); + } +}; +const setNamedProvider = (creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js new file mode 100644 index 0000000..5a9f975 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js @@ -0,0 +1,6 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isProcessProfile = (arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string"; +export const resolveProcessCredentials = async (options, profile) => import("@aws-sdk/credential-provider-process").then(({ fromProcess }) => fromProcess({ + ...options, + profile, +})().then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_PROCESS", "v"))); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js new file mode 100644 index 0000000..3e64e9e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js @@ -0,0 +1,28 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { isAssumeRoleProfile, resolveAssumeRoleCredentials } from "./resolveAssumeRoleCredentials"; +import { isProcessProfile, resolveProcessCredentials } from "./resolveProcessCredentials"; +import { isSsoProfile, resolveSsoCredentials } from "./resolveSsoCredentials"; +import { isStaticCredsProfile, resolveStaticCredentials } from "./resolveStaticCredentials"; +import { isWebIdentityProfile, resolveWebIdentityCredentials } from "./resolveWebIdentityCredentials"; +export const resolveProfileData = async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new CredentialsProviderError(`Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, { logger: options.logger }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js new file mode 100644 index 0000000..5da74da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js @@ -0,0 +1,23 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const resolveSsoCredentials = async (profile, profileData, options = {}) => { + const { fromSSO } = await import("@aws-sdk/credential-provider-sso"); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig, + })().then((creds) => { + if (profileData.sso_session) { + return setCredentialFeature(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } + else { + return setCredentialFeature(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}; +export const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js new file mode 100644 index 0000000..c04435f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js @@ -0,0 +1,18 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isStaticCredsProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.aws_access_key_id === "string" && + typeof arg.aws_secret_access_key === "string" && + ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && + ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1; +export const resolveStaticCredentials = async (profile, options) => { + options?.logger?.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...(profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }), + ...(profile.aws_account_id && { accountId: profile.aws_account_id }), + }; + return setCredentialFeature(credentials, "CREDENTIALS_PROFILE", "n"); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js new file mode 100644 index 0000000..10adfe7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js @@ -0,0 +1,14 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isWebIdentityProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.web_identity_token_file === "string" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1; +export const resolveWebIdentityCredentials = async (profile, options) => import("@aws-sdk/credential-provider-web-identity").then(({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig, +})().then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q"))); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts new file mode 100644 index 0000000..5554125 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts @@ -0,0 +1,55 @@ +import type { AssumeRoleWithWebIdentityParams } from "@aws-sdk/credential-provider-web-identity"; +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import type { RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import type { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +import { AssumeRoleParams } from "./resolveAssumeRoleCredentials"; +/** + * @public + */ +export interface FromIniInit extends SourceProfileInit, CredentialProviderOptions { + /** + * A function that returns a promise fulfilled with an MFA token code for + * the provided MFA Serial code. If a profile requires an MFA code and + * `mfaCodeProvider` is not a valid function, the credential provider + * promise will be rejected. + * + * @param mfaSerial The serial code of the MFA device specified. + */ + mfaCodeProvider?: (mfaSerial: string) => Promise; + /** + * A function that assumes a role and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param sourceCreds The credentials with which to assume a role. + * @param params + */ + roleAssumer?: (sourceCreds: AwsCredentialIdentity, params: AssumeRoleParams) => Promise; + /** + * A function that assumes a role with web identity and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param sourceCreds The credentials with which to assume a role. + * @param params + */ + roleAssumerWithWebIdentity?: (params: AssumeRoleWithWebIdentityParams) => Promise; + /** + * STSClientConfig or SSOClientConfig to be used for creating inner client + * for auth operations. + * @internal + */ + clientConfig?: any; + clientPlugins?: Pluggable[]; + /** + * When true, always reload credentials from the file system instead of using cached values. + * This is useful when you need to detect changes to the credentials file. + */ + ignoreCache?: boolean; +} +/** + * @internal + * + * Creates a credential provider that will read from ini files and supports + * role assumption and multi-factor authentication. + */ +export declare const fromIni: (_init?: FromIniInit) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts new file mode 100644 index 0000000..75680c0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromIni"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts new file mode 100644 index 0000000..dd9a896 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts @@ -0,0 +1,47 @@ +import { Logger, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + * + * @see http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/STS.html#assumeRole-property + * TODO update the above to link to V3 docs + */ +export interface AssumeRoleParams { + /** + * The identifier of the role to be assumed. + */ + RoleArn: string; + /** + * A name for the assumed role session. + */ + RoleSessionName: string; + /** + * A unique identifier that is used by third parties when assuming roles in + * their customers' accounts. + */ + ExternalId?: string; + /** + * The identification number of the MFA device that is associated with the + * user who is making the `AssumeRole` call. + */ + SerialNumber?: string; + /** + * The value provided by the MFA device. + */ + TokenCode?: string; + /** + * The duration, in seconds, of the role session. + */ + DurationSeconds?: number; +} +/** + * @internal + */ +export declare const isAssumeRoleProfile: (arg: any, { profile, logger }?: { + profile?: string | undefined; + logger?: Logger | undefined; +}) => boolean; +/** + * @internal + */ +export declare const resolveAssumeRoleCredentials: (profileName: string, profiles: ParsedIniData, options: FromIniInit, visitedProfiles?: Record) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts new file mode 100644 index 0000000..6f1c9b7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts @@ -0,0 +1,12 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider, Logger } from "@smithy/types"; +/** + * @internal + * + * Resolve the `credential_source` entry from the profile, and return the + * credential providers respectively. No memoization is needed for the + * credential source providers because memoization should be added outside the + * fromIni() provider. The source credential needs to be refreshed every time + * fromIni() is called. + */ +export declare const resolveCredentialSource: (credentialSource: string, profileName: string, logger?: Logger) => (options?: CredentialProviderOptions) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..7194518 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts @@ -0,0 +1,16 @@ +import { Credentials, Profile } from "@aws-sdk/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface ProcessProfile extends Profile { + credential_process: string; +} +/** + * @internal + */ +export declare const isProcessProfile: (arg: any) => arg is ProcessProfile; +/** + * @internal + */ +export declare const resolveProcessCredentials: (options: FromIniInit, profile: string) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts new file mode 100644 index 0000000..e59ca93 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts @@ -0,0 +1,6 @@ +import type { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export declare const resolveProfileData: (profileName: string, profiles: ParsedIniData, options: FromIniInit, visitedProfiles?: Record, isAssumeRoleRecursiveCall?: boolean) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts new file mode 100644 index 0000000..1909a51 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts @@ -0,0 +1,12 @@ +import type { SsoProfile } from "@aws-sdk/credential-provider-sso"; +import type { IniSection, Profile } from "@smithy/types"; +import type { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export declare const resolveSsoCredentials: (profile: string, profileData: IniSection, options?: FromIniInit) => Promise; +/** + * @internal + * duplicated from \@aws-sdk/credential-provider-sso to defer import. + */ +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts new file mode 100644 index 0000000..e04cf26 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts @@ -0,0 +1,20 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface StaticCredsProfile extends Profile { + aws_access_key_id: string; + aws_secret_access_key: string; + aws_session_token?: string; + aws_credential_scope?: string; + aws_account_id?: string; +} +/** + * @internal + */ +export declare const isStaticCredsProfile: (arg: any) => arg is StaticCredsProfile; +/** + * @internal + */ +export declare const resolveStaticCredentials: (profile: StaticCredsProfile, options?: FromIniInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts new file mode 100644 index 0000000..acb1d45 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts @@ -0,0 +1,18 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface WebIdentityProfile extends Profile { + web_identity_token_file: string; + role_arn: string; + role_session_name?: string; +} +/** + * @internal + */ +export declare const isWebIdentityProfile: (arg: any) => arg is WebIdentityProfile; +/** + * @internal + */ +export declare const resolveWebIdentityCredentials: (profile: WebIdentityProfile, options: FromIniInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts new file mode 100644 index 0000000..9d640a0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts @@ -0,0 +1,24 @@ +import { AssumeRoleWithWebIdentityParams } from "@aws-sdk/credential-provider-web-identity"; +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +import { AssumeRoleParams } from "./resolveAssumeRoleCredentials"; +export interface FromIniInit + extends SourceProfileInit, + CredentialProviderOptions { + mfaCodeProvider?: (mfaSerial: string) => Promise; + roleAssumer?: ( + sourceCreds: AwsCredentialIdentity, + params: AssumeRoleParams + ) => Promise; + roleAssumerWithWebIdentity?: ( + params: AssumeRoleWithWebIdentityParams + ) => Promise; + clientConfig?: any; + clientPlugins?: Pluggable[]; + ignoreCache?: boolean; +} +export declare const fromIni: ( + _init?: FromIniInit +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..b019131 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromIni"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts new file mode 100644 index 0000000..eb782f3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts @@ -0,0 +1,26 @@ +import { Logger, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface AssumeRoleParams { + RoleArn: string; + RoleSessionName: string; + ExternalId?: string; + SerialNumber?: string; + TokenCode?: string; + DurationSeconds?: number; +} +export declare const isAssumeRoleProfile: ( + arg: any, + { + profile, + logger, + }?: { + profile?: string | undefined; + logger?: Logger | undefined; + } +) => boolean; +export declare const resolveAssumeRoleCredentials: ( + profileName: string, + profiles: ParsedIniData, + options: FromIniInit, + visitedProfiles?: Record +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts new file mode 100644 index 0000000..21a7f9f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts @@ -0,0 +1,9 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider, Logger } from "@smithy/types"; +export declare const resolveCredentialSource: ( + credentialSource: string, + profileName: string, + logger?: Logger +) => ( + options?: CredentialProviderOptions +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..dbd5583 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts @@ -0,0 +1,10 @@ +import { Credentials, Profile } from "@aws-sdk/types"; +import { FromIniInit } from "./fromIni"; +export interface ProcessProfile extends Profile { + credential_process: string; +} +export declare const isProcessProfile: (arg: any) => arg is ProcessProfile; +export declare const resolveProcessCredentials: ( + options: FromIniInit, + profile: string +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts new file mode 100644 index 0000000..d821bb4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts @@ -0,0 +1,9 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export declare const resolveProfileData: ( + profileName: string, + profiles: ParsedIniData, + options: FromIniInit, + visitedProfiles?: Record, + isAssumeRoleRecursiveCall?: boolean +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts new file mode 100644 index 0000000..88bec34 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts @@ -0,0 +1,9 @@ +import { SsoProfile } from "@aws-sdk/credential-provider-sso"; +import { IniSection, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export declare const resolveSsoCredentials: ( + profile: string, + profileData: IniSection, + options?: FromIniInit +) => Promise; +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts new file mode 100644 index 0000000..5f5daa9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts @@ -0,0 +1,16 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface StaticCredsProfile extends Profile { + aws_access_key_id: string; + aws_secret_access_key: string; + aws_session_token?: string; + aws_credential_scope?: string; + aws_account_id?: string; +} +export declare const isStaticCredsProfile: ( + arg: any +) => arg is StaticCredsProfile; +export declare const resolveStaticCredentials: ( + profile: StaticCredsProfile, + options?: FromIniInit +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts new file mode 100644 index 0000000..4179f94 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts @@ -0,0 +1,14 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface WebIdentityProfile extends Profile { + web_identity_token_file: string; + role_arn: string; + role_session_name?: string; +} +export declare const isWebIdentityProfile: ( + arg: any +) => arg is WebIdentityProfile; +export declare const resolveWebIdentityCredentials: ( + profile: WebIdentityProfile, + options: FromIniInit +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/package.json new file mode 100644 index 0000000..313a386 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-ini/package.json @@ -0,0 +1,72 @@ +{ + "name": "@aws-sdk/credential-provider-ini", + "version": "3.803.0", + "description": "AWS credential provider that sources credentials from ~/.aws/credentials and ~/.aws/config", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-ini", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-ini", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-ini" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/README.md new file mode 100644 index 0000000..7957cc0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/README.md @@ -0,0 +1,104 @@ +# @aws-sdk/credential-provider-node + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-node/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-node) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-node.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-node) + +## AWS Credential Provider for Node.JS + +This module provides a factory function, `defaultProvider`, that will attempt to +source AWS credentials from a Node.JS environment. It will attempt to find +credentials from the following sources (listed in order of precedence): + +- Environment variables exposed via `process.env` +- SSO credentials from token cache +- Web identity token credentials +- Shared credentials and config ini files +- The EC2/ECS Instance Metadata Service + +The default credential provider will invoke one provider at a time and only +continue to the next if no credentials have been located. For example, if the +process finds values defined via the `AWS_ACCESS_KEY_ID` and +`AWS_SECRET_ACCESS_KEY` environment variables, the files at `~/.aws/credentials` +and `~/.aws/config` will not be read, nor will any messages be sent to the +Instance Metadata Service. + +If invalid configuration is encountered (such as a profile in +`~/.aws/credentials` specifying as its `source_profile` the name of a profile +that does not exist), then the chained provider will be rejected with an error +and will not invoke the next provider in the list. + +_IMPORTANT_: if you intend to acquire credentials using EKS +[IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html), +then you must explicitly specify a value for `roleAssumerWithWebIdentity`. There is a +default function available in `@aws-sdk/client-sts` package. An example of using +this: + +```js +const { getDefaultRoleAssumerWithWebIdentity } = require("@aws-sdk/client-sts"); +const { defaultProvider } = require("@aws-sdk/credential-provider-node"); +const { S3Client, GetObjectCommand } = require("@aws-sdk/client-s3"); + +const provider = defaultProvider({ + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity({ + // You must explicitly pass a region if you are not using us-east-1 + region: "eu-west-1" + }), +}); + +const client = new S3Client({ credentialDefaultProvider: provider }); +``` + +_IMPORTANT_: We provide a wrapper of this provider in `@aws-sdk/credential-providers` +package to save you from importing `getDefaultRoleAssumerWithWebIdentity()` or +`getDefaultRoleAssume()` from STS package. Similarly, you can do: + +```js +const { fromNodeProviderChain } = require("@aws-sdk/credential-providers"); + +const credentials = fromNodeProviderChain(); + +const client = new S3Client({ credentials }); +``` + +## Supported configuration + +You may customize how credentials are resolved by providing an options hash to +the `defaultProvider` factory function. The following options are +supported: + +- `profile` - The configuration profile to use. If not specified, the provider + will use the value in the `AWS_PROFILE` environment variable or a default of + `default`. +- `filepath` - The path to the shared credentials file. If not specified, the + provider will use the value in the `AWS_SHARED_CREDENTIALS_FILE` environment + variable or a default of `~/.aws/credentials`. +- `configFilepath` - The path to the shared config file. If not specified, the + provider will use the value in the `AWS_CONFIG_FILE` environment variable or a + default of `~/.aws/config`. +- `mfaCodeProvider` - A function that returns a a promise fulfilled with an + MFA token code for the provided MFA Serial code. If a profile requires an MFA + code and `mfaCodeProvider` is not a valid function, the credential provider + promise will be rejected. +- `roleAssumer` - A function that assumes a role and returns a promise + fulfilled with credentials for the assumed role. If not specified, no role + will be assumed, and an error will be thrown. +- `roleArn` - ARN to assume. If not specified, the provider will use the value + in the `AWS_ROLE_ARN` environment variable. +- `webIdentityTokenFile` - File location of where the `OIDC` token is stored. + If not specified, the provider will use the value in the `AWS_WEB_IDENTITY_TOKEN_FILE` + environment variable. +- `roleAssumerWithWebIdentity` - A function that assumes a role with web identity and + returns a promise fulfilled with credentials for the assumed role. +- `timeout` - The connection timeout (in milliseconds) to apply to any remote + requests. If not specified, a default value of `1000` (one second) is used. +- `maxRetries` - The maximum number of times any HTTP connections should be + retried. If not specified, a default value of `0` will be used. + +## Related packages: + +- [AWS Credential Provider for Node.JS - Environment Variables](../credential-provider-env) +- [AWS Credential Provider for Node.JS - SSO](../credential-provider-sso) +- [AWS Credential Provider for Node.JS - Web Identity](../credential-provider-web-identity) +- [AWS Credential Provider for Node.JS - Shared Configuration Files](../credential-provider-ini) +- [AWS Credential Provider for Node.JS - Instance and Container Metadata](../credential-provider-imds) +- [AWS Shared Configuration File Loader](../shared-ini-file-loader) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js new file mode 100644 index 0000000..be4d2b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js @@ -0,0 +1,147 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + credentialsTreatedAsExpired: () => credentialsTreatedAsExpired, + credentialsWillNeedRefresh: () => credentialsWillNeedRefresh, + defaultProvider: () => defaultProvider +}); +module.exports = __toCommonJS(index_exports); + +// src/defaultProvider.ts +var import_credential_provider_env = require("@aws-sdk/credential-provider-env"); + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/remoteProvider.ts +var import_property_provider = require("@smithy/property-provider"); +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var remoteProvider = /* @__PURE__ */ __name(async (init) => { + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-http"))); + return (0, import_property_provider.chain)(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED] && process.env[ENV_IMDS_DISABLED] !== "false") { + return async () => { + throw new import_property_provider.CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}, "remoteProvider"); + +// src/defaultProvider.ts +var multipleCredentialSourceWarningEmitted = false; +var defaultProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + async () => { + const profile = init.profile ?? process.env[import_shared_ini_file_loader.ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[import_credential_provider_env.ENV_KEY] && process.env[import_credential_provider_env.ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = init.logger?.warn && init.logger?.constructor?.name !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn( + `@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +` + ); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new import_property_provider.CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true + }); + } + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return (0, import_credential_provider_env.fromEnv)(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new import_property_provider.CredentialsProviderError( + "Skipping SSO provider in default chain (inputs do not include SSO fields).", + { logger: init.logger } + ); + } + const { fromSSO } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-sso"))); + return fromSSO(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-ini"))); + return fromIni(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-process"))); + return fromProcess(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-web-identity"))); + return fromTokenFile(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); + }, + async () => { + throw new import_property_provider.CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger + }); + } + ), + credentialsTreatedAsExpired, + credentialsWillNeedRefresh +), "defaultProvider"); +var credentialsWillNeedRefresh = /* @__PURE__ */ __name((credentials) => credentials?.expiration !== void 0, "credentialsWillNeedRefresh"); +var credentialsTreatedAsExpired = /* @__PURE__ */ __name((credentials) => credentials?.expiration !== void 0 && credentials.expiration.getTime() - Date.now() < 3e5, "credentialsTreatedAsExpired"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + defaultProvider, + credentialsWillNeedRefresh, + credentialsTreatedAsExpired +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js new file mode 100644 index 0000000..d582cf8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js @@ -0,0 +1,62 @@ +import { ENV_KEY, ENV_SECRET, fromEnv } from "@aws-sdk/credential-provider-env"; +import { chain, CredentialsProviderError, memoize } from "@smithy/property-provider"; +import { ENV_PROFILE } from "@smithy/shared-ini-file-loader"; +import { remoteProvider } from "./remoteProvider"; +let multipleCredentialSourceWarningEmitted = false; +export const defaultProvider = (init = {}) => memoize(chain(async () => { + const profile = init.profile ?? process.env[ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[ENV_KEY] && process.env[ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = init.logger?.warn && init.logger?.constructor?.name !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn(`@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +`); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true, + }); + } + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return fromEnv(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new CredentialsProviderError("Skipping SSO provider in default chain (inputs do not include SSO fields).", { logger: init.logger }); + } + const { fromSSO } = await import("@aws-sdk/credential-provider-sso"); + return fromSSO(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await import("@aws-sdk/credential-provider-ini"); + return fromIni(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await import("@aws-sdk/credential-provider-process"); + return fromProcess(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await import("@aws-sdk/credential-provider-web-identity"); + return fromTokenFile(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); +}, async () => { + throw new CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger, + }); +}), credentialsTreatedAsExpired, credentialsWillNeedRefresh); +export const credentialsWillNeedRefresh = (credentials) => credentials?.expiration !== undefined; +export const credentialsTreatedAsExpired = (credentials) => credentials?.expiration !== undefined && credentials.expiration.getTime() - Date.now() < 300000; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js new file mode 100644 index 0000000..c455bc1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js @@ -0,0 +1,17 @@ +import { chain, CredentialsProviderError } from "@smithy/property-provider"; +export const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export const remoteProvider = async (init) => { + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await import("@smithy/credential-provider-imds"); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await import("@aws-sdk/credential-provider-http"); + return chain(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED] && process.env[ENV_IMDS_DISABLED] !== "false") { + return async () => { + throw new CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts new file mode 100644 index 0000000..fd40150 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts @@ -0,0 +1,58 @@ +import type { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import type { FromIniInit } from "@aws-sdk/credential-provider-ini"; +import type { FromProcessInit } from "@aws-sdk/credential-provider-process"; +import type { FromSSOInit, SsoCredentialsParameters } from "@aws-sdk/credential-provider-sso"; +import type { FromTokenFileInit } from "@aws-sdk/credential-provider-web-identity"; +import type { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentity, MemoizedProvider } from "@smithy/types"; +/** + * @public + */ +export type DefaultProviderInit = FromIniInit & FromHttpOptions & RemoteProviderInit & FromProcessInit & (FromSSOInit & Partial) & FromTokenFileInit; +/** + * Creates a credential provider that will attempt to find credentials from the + * following sources (listed in order of precedence): + * * Environment variables exposed via `process.env` + * * SSO credentials from token cache + * * Web identity token credentials + * * Shared credentials and config ini files + * * The EC2/ECS Instance Metadata Service + * + * The default credential provider will invoke one provider at a time and only + * continue to the next if no credentials have been located. For example, if + * the process finds values defined via the `AWS_ACCESS_KEY_ID` and + * `AWS_SECRET_ACCESS_KEY` environment variables, the files at + * `~/.aws/credentials` and `~/.aws/config` will not be read, nor will any + * messages be sent to the Instance Metadata Service. + * + * @param init Configuration that is passed to each individual + * provider + * + * @see {@link fromEnv} The function used to source credentials from + * environment variables. + * @see {@link fromSSO} The function used to source credentials from + * resolved SSO token cache. + * @see {@link fromTokenFile} The function used to source credentials from + * token file. + * @see {@link fromIni} The function used to source credentials from INI + * files. + * @see {@link fromProcess} The function used to sources credentials from + * credential_process in INI files. + * @see {@link fromInstanceMetadata} The function used to source credentials from the + * EC2 Instance Metadata Service. + * @see {@link fromContainerMetadata} The function used to source credentials from the + * ECS Container Metadata Service. + */ +export declare const defaultProvider: (init?: DefaultProviderInit) => MemoizedProvider; +/** + * @internal + * + * @returns credentials have expiration. + */ +export declare const credentialsWillNeedRefresh: (credentials: AwsCredentialIdentity) => boolean; +/** + * @internal + * + * @returns credentials with less than 5 minutes left. + */ +export declare const credentialsTreatedAsExpired: (credentials: AwsCredentialIdentity) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts new file mode 100644 index 0000000..4022a4e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts @@ -0,0 +1,11 @@ +import type { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import type { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import type { AwsCredentialIdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const remoteProvider: (init: RemoteProviderInit | FromHttpOptions) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts new file mode 100644 index 0000000..e1f1a8d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts @@ -0,0 +1,25 @@ +import { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import { FromIniInit } from "@aws-sdk/credential-provider-ini"; +import { FromProcessInit } from "@aws-sdk/credential-provider-process"; +import { + FromSSOInit, + SsoCredentialsParameters, +} from "@aws-sdk/credential-provider-sso"; +import { FromTokenFileInit } from "@aws-sdk/credential-provider-web-identity"; +import { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentity, MemoizedProvider } from "@smithy/types"; +export type DefaultProviderInit = FromIniInit & + FromHttpOptions & + RemoteProviderInit & + FromProcessInit & + (FromSSOInit & Partial) & + FromTokenFileInit; +export declare const defaultProvider: ( + init?: DefaultProviderInit +) => MemoizedProvider; +export declare const credentialsWillNeedRefresh: ( + credentials: AwsCredentialIdentity +) => boolean; +export declare const credentialsTreatedAsExpired: ( + credentials: AwsCredentialIdentity +) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts new file mode 100644 index 0000000..90948cc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts @@ -0,0 +1,7 @@ +import { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export declare const remoteProvider: ( + init: RemoteProviderInit | FromHttpOptions +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/package.json new file mode 100644 index 0000000..40b3009 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-node/package.json @@ -0,0 +1,70 @@ +{ + "name": "@aws-sdk/credential-provider-node", + "version": "3.803.0", + "description": "AWS credential provider that sources credentials from a Node.JS environment. ", + "engines": { + "node": ">=18.0.0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-node", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:jest -c jest.config.integ.js", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-node", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-node" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/README.md new file mode 100644 index 0000000..4e9d9bd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-process + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-process/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-process) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-process.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-process) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js new file mode 100644 index 0000000..57146de --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js @@ -0,0 +1,114 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromProcess: () => fromProcess +}); +module.exports = __toCommonJS(index_exports); + +// src/fromProcess.ts +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/resolveProcessCredentials.ts +var import_property_provider = require("@smithy/property-provider"); +var import_child_process = require("child_process"); +var import_util = require("util"); + +// src/getValidatedProcessCredentials.ts +var import_client = require("@aws-sdk/core/client"); +var getValidatedProcessCredentials = /* @__PURE__ */ __name((profileName, data, profiles) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === void 0 || data.SecretAccessKey === void 0) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = /* @__PURE__ */ new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && profiles?.[profileName]?.aws_account_id) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...data.SessionToken && { sessionToken: data.SessionToken }, + ...data.Expiration && { expiration: new Date(data.Expiration) }, + ...data.CredentialScope && { credentialScope: data.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}, "getValidatedProcessCredentials"); + +// src/resolveProcessCredentials.ts +var resolveProcessCredentials = /* @__PURE__ */ __name(async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== void 0) { + const execPromise = (0, import_util.promisify)(import_child_process.exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } catch (error) { + throw new import_property_provider.CredentialsProviderError(error.message, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger + }); + } +}, "resolveProcessCredentials"); + +// src/fromProcess.ts +var fromProcess = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProcessCredentials( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }), + profiles, + init.logger + ); +}, "fromProcess"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromProcess +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js new file mode 100644 index 0000000..9e1e800 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js @@ -0,0 +1,9 @@ +import { getProfileName, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { resolveProcessCredentials } from "./resolveProcessCredentials"; +export const fromProcess = (init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await parseKnownFiles(init); + return resolveProcessCredentials(getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }), profiles, init.logger); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js new file mode 100644 index 0000000..caa0dd1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const getValidatedProcessCredentials = (profileName, data, profiles) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === undefined || data.SecretAccessKey === undefined) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && profiles?.[profileName]?.aws_account_id) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...(data.SessionToken && { sessionToken: data.SessionToken }), + ...(data.Expiration && { expiration: new Date(data.Expiration) }), + ...(data.CredentialScope && { credentialScope: data.CredentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js new file mode 100644 index 0000000..b921d35 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromProcess"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js new file mode 100644 index 0000000..334e0af --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js @@ -0,0 +1,35 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { exec } from "child_process"; +import { promisify } from "util"; +import { getValidatedProcessCredentials } from "./getValidatedProcessCredentials"; +export const resolveProcessCredentials = async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== undefined) { + const execPromise = promisify(exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } + catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } + catch (error) { + throw new CredentialsProviderError(error.message, { logger }); + } + } + else { + throw new CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } + else { + throw new CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger, + }); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts new file mode 100644 index 0000000..a4e6b46 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export type ProcessCredentials = { + Version: number; + AccessKeyId: string; + SecretAccessKey: string; + SessionToken?: string; + Expiration?: number; + CredentialScope?: string; + AccountId?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts new file mode 100644 index 0000000..2177630 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts @@ -0,0 +1,14 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +/** + * @internal + */ +export interface FromProcessInit extends SourceProfileInit, CredentialProviderOptions { +} +/** + * @internal + * + * Creates a credential provider that will read from a credential_process specified + * in ini files. + */ +export declare const fromProcess: (init?: FromProcessInit) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts new file mode 100644 index 0000000..1e86d6b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { ProcessCredentials } from "./ProcessCredentials"; +/** + * @internal + */ +export declare const getValidatedProcessCredentials: (profileName: string, data: ProcessCredentials, profiles: ParsedIniData) => AwsCredentialIdentity; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts new file mode 100644 index 0000000..adad939 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromProcess"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..4f69ca7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentity, Logger, ParsedIniData } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveProcessCredentials: (profileName: string, profiles: ParsedIniData, logger?: Logger) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts new file mode 100644 index 0000000..45acf5e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts @@ -0,0 +1,9 @@ +export type ProcessCredentials = { + Version: number; + AccessKeyId: string; + SecretAccessKey: string; + SessionToken?: string; + Expiration?: number; + CredentialScope?: string; + AccountId?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts new file mode 100644 index 0000000..8e39656 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts @@ -0,0 +1,11 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromProcessInit + extends SourceProfileInit, + CredentialProviderOptions {} +export declare const fromProcess: ( + init?: FromProcessInit +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts new file mode 100644 index 0000000..f44c81c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { ProcessCredentials } from "./ProcessCredentials"; +export declare const getValidatedProcessCredentials: ( + profileName: string, + data: ProcessCredentials, + profiles: ParsedIniData +) => AwsCredentialIdentity; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..b921d35 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromProcess"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..a204db4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity, Logger, ParsedIniData } from "@smithy/types"; +export declare const resolveProcessCredentials: ( + profileName: string, + profiles: ParsedIniData, + logger?: Logger +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/package.json new file mode 100644 index 0000000..fb1f383 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-process/package.json @@ -0,0 +1,63 @@ +{ + "name": "@aws-sdk/credential-provider-process", + "version": "3.799.0", + "description": "AWS credential provider that sources credential_process from ~/.aws/credentials and ~/.aws/config", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-process", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-process", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-process" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/README.md new file mode 100644 index 0000000..aba3fa8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-sso + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-sso/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-sso) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-sso.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-sso) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js new file mode 100644 index 0000000..ab7549a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js @@ -0,0 +1,246 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __esm = (fn, res) => function __init() { + return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res; +}; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/loadSso.ts +var loadSso_exports = {}; +__export(loadSso_exports, { + GetRoleCredentialsCommand: () => import_client_sso.GetRoleCredentialsCommand, + SSOClient: () => import_client_sso.SSOClient +}); +var import_client_sso; +var init_loadSso = __esm({ + "src/loadSso.ts"() { + "use strict"; + import_client_sso = require("@aws-sdk/client-sso"); + } +}); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromSSO: () => fromSSO, + isSsoProfile: () => isSsoProfile, + validateSsoProfile: () => validateSsoProfile +}); +module.exports = __toCommonJS(index_exports); + +// src/fromSSO.ts + + + +// src/isSsoProfile.ts +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveSSOCredentials.ts +var import_client = require("@aws-sdk/core/client"); +var import_token_providers = require("@aws-sdk/token-providers"); +var import_property_provider = require("@smithy/property-provider"); +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var SHOULD_FAIL_CREDENTIAL_CHAIN = false; +var resolveSSOCredentials = /* @__PURE__ */ __name(async ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger +}) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await (0, import_token_providers.fromSso)({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString() + }; + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } else { + try { + token = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoStartUrl); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { accessToken } = token; + const { SSOClient: SSOClient2, GetRoleCredentialsCommand: GetRoleCredentialsCommand2 } = await Promise.resolve().then(() => (init_loadSso(), loadSso_exports)); + const sso = ssoClient || new SSOClient2( + Object.assign({}, clientConfig ?? {}, { + logger: clientConfig?.logger ?? parentClientConfig?.logger, + region: clientConfig?.region ?? ssoRegion + }) + ); + let ssoResp; + try { + ssoResp = await sso.send( + new GetRoleCredentialsCommand2({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken + }) + ); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { + roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {} + } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new import_property_provider.CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + if (ssoSession) { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO", "s"); + } else { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}, "resolveSSOCredentials"); + +// src/validateSsoProfile.ts + +var validateSsoProfile = /* @__PURE__ */ __name((profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new import_property_provider.CredentialsProviderError( + `Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", "sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join( + ", " + )} +Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, + { tryNextLink: false, logger } + ); + } + return profile; +}, "validateSsoProfile"); + +// src/fromSSO.ts +var fromSSO = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger + }); + } + if (profile?.sso_session) { + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile( + profile, + init.logger + ); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new import_property_provider.CredentialsProviderError( + 'Incomplete configuration. The fromSSO() argument hash must include "ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', + { tryNextLink: false, logger: init.logger } + ); + } else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } +}, "fromSSO"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromSSO, + isSsoProfile, + validateSsoProfile +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js new file mode 100644 index 0000000..75f1860 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js @@ -0,0 +1,73 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName, loadSsoSessionData, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { isSsoProfile } from "./isSsoProfile"; +import { resolveSSOCredentials } from "./resolveSSOCredentials"; +import { validateSsoProfile } from "./validateSsoProfile"; +export const fromSSO = (init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await parseKnownFiles(init); + const profile = profiles[profileName]; + if (!profile) { + throw new CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger, + }); + } + if (profile?.sso_session) { + const ssoSessions = await loadSsoSessionData(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger, + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger, + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile(profile, init.logger); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient: ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName, + }); + } + else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new CredentialsProviderError("Incomplete configuration. The fromSSO() argument hash must include " + + '"ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', { tryNextLink: false, logger: init.logger }); + } + else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName, + }); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js new file mode 100644 index 0000000..7215fb6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js @@ -0,0 +1,4 @@ +export * from "./fromSSO"; +export * from "./isSsoProfile"; +export * from "./types"; +export * from "./validateSsoProfile"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js new file mode 100644 index 0000000..e655438 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js @@ -0,0 +1,6 @@ +export const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js new file mode 100644 index 0000000..6a4414f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js @@ -0,0 +1,2 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js new file mode 100644 index 0000000..979d9b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js @@ -0,0 +1,84 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { fromSso as getSsoTokenProvider } from "@aws-sdk/token-providers"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getSSOTokenFromFile } from "@smithy/shared-ini-file-loader"; +const SHOULD_FAIL_CREDENTIAL_CHAIN = false; +export const resolveSSOCredentials = async ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, clientConfig, parentClientConfig, profile, logger, }) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await getSsoTokenProvider({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString(), + }; + } + catch (e) { + throw new CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + } + else { + try { + token = await getSSOTokenFromFile(ssoStartUrl); + } + catch (e) { + throw new CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const { accessToken } = token; + const { SSOClient, GetRoleCredentialsCommand } = await import("./loadSso"); + const sso = ssoClient || + new SSOClient(Object.assign({}, clientConfig ?? {}, { + logger: clientConfig?.logger ?? parentClientConfig?.logger, + region: clientConfig?.region ?? ssoRegion, + })); + let ssoResp; + try { + ssoResp = await sso.send(new GetRoleCredentialsCommand({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken, + })); + } + catch (e) { + throw new CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const { roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {}, } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...(credentialScope && { credentialScope }), + ...(accountId && { accountId }), + }; + if (ssoSession) { + setCredentialFeature(credentials, "CREDENTIALS_SSO", "s"); + } + else { + setCredentialFeature(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js new file mode 100644 index 0000000..94174b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js @@ -0,0 +1,9 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +export const validateSsoProfile = (profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new CredentialsProviderError(`Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", ` + + `"sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join(", ")}\nReference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, { tryNextLink: false, logger }); + } + return profile; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts new file mode 100644 index 0000000..47521a6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts @@ -0,0 +1,68 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import type { SSOClient, SSOClientConfig } from "./loadSso"; +/** + * @internal + */ +export interface SsoCredentialsParameters { + /** + * The URL to the AWS SSO service. + */ + ssoStartUrl: string; + /** + * SSO session identifier. + * Presence implies usage of the SSOTokenProvider. + */ + ssoSession?: string; + /** + * The ID of the AWS account to use for temporary credentials. + */ + ssoAccountId: string; + /** + * The AWS region to use for temporary credentials. + */ + ssoRegion: string; + /** + * The name of the AWS role to assume. + */ + ssoRoleName: string; +} +/** + * @internal + */ +export interface FromSSOInit extends SourceProfileInit, CredentialProviderOptions { + ssoClient?: SSOClient; + clientConfig?: SSOClientConfig; +} +/** + * @internal + * + * Creates a credential provider that will read from a credential_process specified + * in ini files. + * + * The SSO credential provider must support both + * + * 1. the legacy profile format, + * @example + * ``` + * [profile sample-profile] + * sso_account_id = 012345678901 + * sso_region = us-east-1 + * sso_role_name = SampleRole + * sso_start_url = https://www.....com/start + * ``` + * + * 2. and the profile format for SSO Token Providers. + * @example + * ``` + * [profile sso-profile] + * sso_session = dev + * sso_account_id = 012345678901 + * sso_role_name = SampleRole + * + * [sso-session dev] + * sso_region = us-east-1 + * sso_start_url = https://www.....com/start + * ``` + */ +export declare const fromSSO: (init?: FromSSOInit & Partial) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts new file mode 100644 index 0000000..d851c15 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./fromSSO"; +/** + * @internal + */ +export * from "./isSsoProfile"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export * from "./validateSsoProfile"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts new file mode 100644 index 0000000..77c1fb2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Profile } from "@smithy/types"; +import { SsoProfile } from "./types"; +/** + * @internal + */ +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts new file mode 100644 index 0000000..f44232f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts @@ -0,0 +1,3 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; +export type { SSOClientConfig, GetRoleCredentialsCommandOutput } from "@aws-sdk/client-sso"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts new file mode 100644 index 0000000..e4e3fcc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +import { FromSSOInit, SsoCredentialsParameters } from "./fromSSO"; +/** + * @internal + */ +export declare const resolveSSOCredentials: ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, clientConfig, parentClientConfig, profile, logger, }: FromSSOInit & SsoCredentialsParameters) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts new file mode 100644 index 0000000..bf50b63 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts @@ -0,0 +1,22 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { SSOClient, SSOClientConfig } from "./loadSso"; +export interface SsoCredentialsParameters { + ssoStartUrl: string; + ssoSession?: string; + ssoAccountId: string; + ssoRegion: string; + ssoRoleName: string; +} +export interface FromSSOInit + extends SourceProfileInit, + CredentialProviderOptions { + ssoClient?: SSOClient; + clientConfig?: SSOClientConfig; +} +export declare const fromSSO: ( + init?: FromSSOInit & Partial +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..7215fb6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +export * from "./fromSSO"; +export * from "./isSsoProfile"; +export * from "./types"; +export * from "./validateSsoProfile"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts new file mode 100644 index 0000000..b4e8bdd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts @@ -0,0 +1,3 @@ +import { Profile } from "@smithy/types"; +import { SsoProfile } from "./types"; +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts new file mode 100644 index 0000000..2d3249f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts @@ -0,0 +1,6 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; +export { + SSOClientConfig, + GetRoleCredentialsCommandOutput, +} from "@aws-sdk/client-sso"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts new file mode 100644 index 0000000..cc16554 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts @@ -0,0 +1,14 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +import { FromSSOInit, SsoCredentialsParameters } from "./fromSSO"; +export declare const resolveSSOCredentials: ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger, +}: FromSSOInit & SsoCredentialsParameters) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..4a3986b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts @@ -0,0 +1,14 @@ +import { Profile } from "@smithy/types"; +export interface SSOToken { + accessToken: string; + expiresAt: string; + region?: string; + startUrl?: string; +} +export interface SsoProfile extends Profile { + sso_start_url: string; + sso_session?: string; + sso_account_id: string; + sso_region: string; + sso_role_name: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts new file mode 100644 index 0000000..6572fc4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { SsoProfile } from "./types"; +export declare const validateSsoProfile: ( + profile: Partial, + logger?: Logger +) => SsoProfile; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts new file mode 100644 index 0000000..551d678 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts @@ -0,0 +1,22 @@ +import { Profile } from "@smithy/types"; +/** + * @internal + * + * Cached SSO token retrieved from SSO login flow. + */ +export interface SSOToken { + accessToken: string; + expiresAt: string; + region?: string; + startUrl?: string; +} +/** + * @internal + */ +export interface SsoProfile extends Profile { + sso_start_url: string; + sso_session?: string; + sso_account_id: string; + sso_region: string; + sso_role_name: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts new file mode 100644 index 0000000..8b0ab31 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { SsoProfile } from "./types"; +/** + * @internal + */ +export declare const validateSsoProfile: (profile: Partial, logger?: Logger) => SsoProfile; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/package.json new file mode 100644 index 0000000..0d5cfb5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-sso/package.json @@ -0,0 +1,65 @@ +{ + "name": "@aws-sdk/credential-provider-sso", + "version": "3.803.0", + "description": "AWS credential provider that exchanges a resolved SSO login token file for temporary AWS credentials", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-sso", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-sso", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-sso" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/README.md new file mode 100644 index 0000000..e4858a4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-web-identity + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-web-identity/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-web-identity) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-web-identity.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-web-identity) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js new file mode 100644 index 0000000..2be7727 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromTokenFile = void 0; +const client_1 = require("@aws-sdk/core/client"); +const property_provider_1 = require("@smithy/property-provider"); +const fs_1 = require("fs"); +const fromWebToken_1 = require("./fromWebToken"); +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new property_provider_1.CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await (0, fromWebToken_1.fromWebToken)({ + ...init, + webIdentityToken: (0, fs_1.readFileSync)(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + (0, client_1.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; +exports.fromTokenFile = fromTokenFile; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js new file mode 100644 index 0000000..f8eafde --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js @@ -0,0 +1,52 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromWebToken = void 0; +const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await Promise.resolve().then(() => __importStar(require("@aws-sdk/nested-clients/sts"))); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; +exports.fromWebToken = fromWebToken; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js new file mode 100644 index 0000000..5dc29db --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js @@ -0,0 +1,28 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +module.exports = __toCommonJS(index_exports); +__reExport(index_exports, require("././fromTokenFile"), module.exports); +__reExport(index_exports, require("././fromWebToken"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromTokenFile, + fromWebToken +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js new file mode 100644 index 0000000..64a5032 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js @@ -0,0 +1,28 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { readFileSync } from "fs"; +import { fromWebToken } from "./fromWebToken"; +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +export const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await fromWebToken({ + ...init, + webIdentityToken: readFileSync(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + setCredentialFeature(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js new file mode 100644 index 0000000..268e0aa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js @@ -0,0 +1,25 @@ +export const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await import("@aws-sdk/nested-clients/sts"); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js new file mode 100644 index 0000000..0e900c0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fromTokenFile"; +export * from "./fromWebToken"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts new file mode 100644 index 0000000..58f885f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts @@ -0,0 +1,18 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import type { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromWebTokenInit } from "./fromWebToken"; +/** + * @public + */ +export interface FromTokenFileInit extends Partial>, CredentialProviderOptions { + /** + * File location of where the `OIDC` token is stored. + */ + webIdentityTokenFile?: string; +} +/** + * @internal + * + * Represents OIDC credentials from a file on disk. + */ +export declare const fromTokenFile: (init?: FromTokenFileInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts new file mode 100644 index 0000000..6b5e066 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts @@ -0,0 +1,145 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import type { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +/** + * @public + */ +export interface AssumeRoleWithWebIdentityParams { + /** + *

The Amazon Resource Name (ARN) of the role that the caller is assuming.

+ */ + RoleArn: string; + /** + *

An identifier for the assumed role session. Typically, you pass the name or identifier + * that is associated with the user who is using your application. That way, the temporary + * security credentials that your application will use are associated with that user. This + * session name is included as part of the ARN and assumed role ID in the + * AssumedRoleUser response element.

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ */ + RoleSessionName: string; + /** + *

The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity + * provider. Your application must get this token by authenticating the user who is using your + * application with a web identity provider before the application makes an + * AssumeRoleWithWebIdentity call.

+ */ + WebIdentityToken: string; + /** + *

The fully qualified host component of the domain name of the identity provider.

+ *

Specify this value only for OAuth 2.0 access tokens. Currently + * www.amazon.com and graph.facebook.com are the only supported + * identity providers for OAuth 2.0 access tokens. Do not include URL schemes and port + * numbers.

+ *

Do not specify this value for OpenID Connect ID tokens.

+ */ + ProviderId?: string; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plain text that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and AWS + * Service Namespaces in the AWS General Reference.

+ * + *

An AWS conversion compresses the passed session policies and session tags into a + * packed binary format that has a separate limit. Your request can fail for this limit + * even if your plain text meets the other requirements. The PackedPolicySize + * response element indicates by percentage how close the policies and tags for your + * request are to the upper size limit. + *

+ *
+ * + *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent AWS API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ */ + PolicyArns?: { + arn?: string; + }[]; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent AWS API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plain text that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ * + *

An AWS conversion compresses the passed session policies and session tags into a + * packed binary format that has a separate limit. Your request can fail for this limit + * even if your plain text meets the other requirements. The PackedPolicySize + * response element indicates by percentage how close the policies and tags for your + * request are to the upper size limit. + *

+ *
+ */ + Policy?: string; + /** + *

The duration, in seconds, of the role session. The value can range from 900 seconds (15 + * minutes) up to the maximum session duration setting for the role. This setting can have a + * value from 1 hour to 12 hours. If you specify a value higher than this setting, the + * operation fails. For example, if you specify a session duration of 12 hours, but your + * administrator set the maximum session duration to 6 hours, your operation fails. To learn + * how to view the maximum value for your role, see View the + * Maximum Session Duration Setting for a Role in the + * IAM User Guide.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the AWS Management Console in the + * IAM User Guide.

+ *
+ */ + DurationSeconds?: number; +} +type LowerCaseKey = { + [K in keyof T as `${Uncapitalize}`]: T[K]; +}; +/** + * @public + */ +export interface FromWebTokenInit extends Omit, "roleSessionName">, CredentialProviderOptions { + /** + * The IAM session name used to distinguish sessions. + */ + roleSessionName?: string; + /** + * A function that assumes a role with web identity and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param params input parameter of sts:AssumeRoleWithWebIdentity API. + */ + roleAssumerWithWebIdentity?: (params: AssumeRoleWithWebIdentityParams) => Promise; + /** + * STSClientConfig to be used for creating STS Client for assuming role. + * @internal + */ + clientConfig?: any; + /** + * @internal + */ + clientPlugins?: Pluggable[]; +} +/** + * @internal + */ +export declare const fromWebToken: (init: FromWebTokenInit) => RuntimeConfigAwsCredentialIdentityProvider; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts new file mode 100644 index 0000000..36c15dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./fromTokenFile"; +/** + * @internal + */ +export * from "./fromWebToken"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts new file mode 100644 index 0000000..4f67356 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts @@ -0,0 +1,16 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromWebTokenInit } from "./fromWebToken"; +export interface FromTokenFileInit + extends Partial< + Pick< + FromWebTokenInit, + Exclude + > + >, + CredentialProviderOptions { + webIdentityTokenFile?: string; +} +export declare const fromTokenFile: ( + init?: FromTokenFileInit +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts new file mode 100644 index 0000000..73529a1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts @@ -0,0 +1,39 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +export interface AssumeRoleWithWebIdentityParams { + RoleArn: string; + RoleSessionName: string; + WebIdentityToken: string; + ProviderId?: string; + PolicyArns?: { + arn?: string; + }[]; + Policy?: string; + DurationSeconds?: number; +} +type LowerCaseKey = { + [K in keyof T as `${Uncapitalize}`]: T[K]; +}; +export interface FromWebTokenInit + extends Pick< + LowerCaseKey, + Exclude< + keyof LowerCaseKey, + "roleSessionName" + > + >, + CredentialProviderOptions { + roleSessionName?: string; + roleAssumerWithWebIdentity?: ( + params: AssumeRoleWithWebIdentityParams + ) => Promise; + clientConfig?: any; + clientPlugins?: Pluggable[]; +} +export declare const fromWebToken: ( + init: FromWebTokenInit +) => RuntimeConfigAwsCredentialIdentityProvider; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..0e900c0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromTokenFile"; +export * from "./fromWebToken"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/package.json new file mode 100644 index 0000000..500eb21 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/credential-provider-web-identity/package.json @@ -0,0 +1,71 @@ +{ + "name": "@aws-sdk/credential-provider-web-identity", + "version": "3.803.0", + "description": "AWS credential provider that calls STS assumeRole for temporary AWS credentials", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-web-identity", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "browser": { + "./dist-cjs/fromTokenFile": false, + "./dist-es/fromTokenFile": false + }, + "react-native": { + "./dist-es/fromTokenFile": false, + "./dist-cjs/fromTokenFile": false + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-web-identity", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-web-identity" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/README.md new file mode 100644 index 0000000..5d72b8c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/README.md @@ -0,0 +1,17 @@ +# @aws-sdk/endpoint-cache + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/endpoint-cache/latest.svg)](https://www.npmjs.com/package/@aws-sdk/endpoint-cache) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/endpoint-cache.svg)](https://www.npmjs.com/package/@aws-sdk/endpoint-cache) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. + +## EndpointCache + +- uses `mnemonist/lru-cache` for storing the cache. +- the `set` operation stores milliseconds elapsed since the UNIX epoch in Expires param based on CachePeriodInMinutes provided in Endpoint. +- the `get` operation returns all un-expired endpoints with their Expires values. +- the `getEndpoint` operation returns a randomly selected un-expired endpoint. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js new file mode 100644 index 0000000..6975621 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js @@ -0,0 +1,140 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EndpointCache: () => EndpointCache +}); +module.exports = __toCommonJS(src_exports); + +// src/EndpointCache.ts +var import_lru_cache = __toESM(require("mnemonist/lru-cache")); +var EndpointCache = class { + static { + __name(this, "EndpointCache"); + } + cache; + constructor(capacity) { + this.cache = new import_lru_cache.default(capacity); + } + /** + * Returns an un-expired endpoint for the given key. + * + * @param endpointsWithExpiry + * @returns + */ + getEndpoint(key) { + const endpointsWithExpiry = this.get(key); + if (!endpointsWithExpiry || endpointsWithExpiry.length === 0) { + return void 0; + } + const endpoints = endpointsWithExpiry.map((endpoint) => endpoint.Address); + return endpoints[Math.floor(Math.random() * endpoints.length)]; + } + /** + * Returns un-expired endpoints for the given key. + * + * @param key + * @returns + */ + get(key) { + if (!this.has(key)) { + return; + } + const value = this.cache.get(key); + if (!value) { + return; + } + const now = Date.now(); + const endpointsWithExpiry = value.filter((endpoint) => now < endpoint.Expires); + if (endpointsWithExpiry.length === 0) { + this.delete(key); + return void 0; + } + return endpointsWithExpiry; + } + /** + * Stores the endpoints passed for the key in cache. + * If not defined, uses empty string for the Address in endpoint. + * If not defined, uses one minute for CachePeriodInMinutes in endpoint. + * Stores milliseconds elapsed since the UNIX epoch in Expires param based + * on value provided in CachePeriodInMinutes. + * + * @param key + * @param endpoints + */ + set(key, endpoints) { + const now = Date.now(); + this.cache.set( + key, + endpoints.map(({ Address, CachePeriodInMinutes }) => ({ + Address, + Expires: now + CachePeriodInMinutes * 60 * 1e3 + })) + ); + } + /** + * Deletes the value for the given key in the cache. + * + * @param {string} key + */ + delete(key) { + this.cache.set(key, []); + } + /** + * Checks whether the key exists in cache. + * + * @param {string} key + * @returns {boolean} + */ + has(key) { + if (!this.cache.has(key)) { + return false; + } + const endpoints = this.cache.peek(key); + if (!endpoints) { + return false; + } + return endpoints.length > 0; + } + /** + * Clears the cache. + */ + clear() { + this.cache.clear(); + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EndpointCache +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js new file mode 100644 index 0000000..decd3f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js @@ -0,0 +1,54 @@ +import LRUCache from "mnemonist/lru-cache"; +export class EndpointCache { + cache; + constructor(capacity) { + this.cache = new LRUCache(capacity); + } + getEndpoint(key) { + const endpointsWithExpiry = this.get(key); + if (!endpointsWithExpiry || endpointsWithExpiry.length === 0) { + return undefined; + } + const endpoints = endpointsWithExpiry.map((endpoint) => endpoint.Address); + return endpoints[Math.floor(Math.random() * endpoints.length)]; + } + get(key) { + if (!this.has(key)) { + return; + } + const value = this.cache.get(key); + if (!value) { + return; + } + const now = Date.now(); + const endpointsWithExpiry = value.filter((endpoint) => now < endpoint.Expires); + if (endpointsWithExpiry.length === 0) { + this.delete(key); + return undefined; + } + return endpointsWithExpiry; + } + set(key, endpoints) { + const now = Date.now(); + this.cache.set(key, endpoints.map(({ Address, CachePeriodInMinutes }) => ({ + Address, + Expires: now + CachePeriodInMinutes * 60 * 1000, + }))); + } + delete(key) { + this.cache.set(key, []); + } + has(key) { + if (!this.cache.has(key)) { + return false; + } + const endpoints = this.cache.peek(key); + if (!endpoints) { + return false; + } + return endpoints.length > 0; + } + clear() { + this.cache.clear(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js new file mode 100644 index 0000000..41fce6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./Endpoint"; +export * from "./EndpointCache"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts new file mode 100644 index 0000000..17b37cf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts @@ -0,0 +1,13 @@ +/** + * @internal + */ +export interface Endpoint { + /** + *

An endpoint address.

+ */ + Address: string; + /** + *

The TTL for the endpoint, in minutes.

+ */ + CachePeriodInMinutes: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts new file mode 100644 index 0000000..5128e14 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts @@ -0,0 +1,56 @@ +import { Endpoint } from "./Endpoint"; +/** + * @internal + */ +export interface EndpointWithExpiry extends Pick { + Expires: number; +} +/** + * @internal + */ +export declare class EndpointCache { + private readonly cache; + constructor(capacity: number); + /** + * Returns an un-expired endpoint for the given key. + * + * @param endpointsWithExpiry + * @returns + */ + getEndpoint(key: string): string | undefined; + /** + * Returns un-expired endpoints for the given key. + * + * @param key + * @returns + */ + get(key: string): EndpointWithExpiry[] | undefined; + /** + * Stores the endpoints passed for the key in cache. + * If not defined, uses empty string for the Address in endpoint. + * If not defined, uses one minute for CachePeriodInMinutes in endpoint. + * Stores milliseconds elapsed since the UNIX epoch in Expires param based + * on value provided in CachePeriodInMinutes. + * + * @param key + * @param endpoints + */ + set(key: string, endpoints: Endpoint[]): void; + /** + * Deletes the value for the given key in the cache. + * + * @param {string} key + */ + delete(key: string): void; + /** + * Checks whether the key exists in cache. + * + * @param {string} key + * @returns {boolean} + */ + has(key: string): boolean; + /** + * Clears the cache. + */ + clear(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts new file mode 100644 index 0000000..f2f149f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./Endpoint"; +/** + * @internal + */ +export * from "./EndpointCache"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts new file mode 100644 index 0000000..c1caacb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts @@ -0,0 +1,4 @@ +export interface Endpoint { + Address: string; + CachePeriodInMinutes: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts new file mode 100644 index 0000000..c01e2b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts @@ -0,0 +1,14 @@ +import { Endpoint } from "./Endpoint"; +export interface EndpointWithExpiry extends Pick { + Expires: number; +} +export declare class EndpointCache { + private readonly cache; + constructor(capacity: number); + getEndpoint(key: string): string | undefined; + get(key: string): EndpointWithExpiry[] | undefined; + set(key: string, endpoints: Endpoint[]): void; + delete(key: string): void; + has(key: string): boolean; + clear(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..41fce6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./Endpoint"; +export * from "./EndpointCache"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/package.json new file mode 100644 index 0000000..13edb6f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/endpoint-cache/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/endpoint-cache", + "version": "3.723.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline endpoint-cache", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/endpoint-cache", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/endpoint-cache" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/README.md new file mode 100644 index 0000000..fc88a48 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/README.md @@ -0,0 +1,393 @@ +# @aws-sdk/lib-dynamodb + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/lib-dynamodb/latest.svg)](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/lib-dynamodb.svg)](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) + +## Overview + +The document client simplifies working with items in Amazon DynamoDB by +abstracting away the notion of attribute values. This abstraction annotates native +JavaScript types supplied as input parameters, as well as converts annotated +response data to native JavaScript types. + +## Marshalling Input and Unmarshalling Response Data + +The document client affords developers the use of native JavaScript types +instead of `AttributeValue`s to simplify the JavaScript development +experience with Amazon DynamoDB. JavaScript objects passed in as parameters +are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. +Responses from DynamoDB are unmarshalled into plain JavaScript objects +by the `DocumentClient`. The `DocumentClient` does not accept +`AttributeValue`s in favor of native JavaScript types. + +| JavaScript Type | DynamoDB AttributeValue | +| :--------------------------------: | ----------------------- | +| String | S | +| Number / BigInt / NumberValue | N | +| Boolean | BOOL | +| null | NULL | +| Array | L | +| Object | M | +| Set\ | BS | +| Set\ | NS | +| Set\ | SS | +| Uint8Array, Buffer, File, Blob... | B | + +### Example + +Here is an example list which is sent to DynamoDB client in an operation: + +```json +{ "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } +``` + +The DynamoDB document client abstracts the attribute values as follows in +both input and output: + +```json +[null, false, 1, "two"] +``` + +## Usage + +To create document client, you need to create DynamoDB client first as follows: + +```js +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; // ES6 import +// const { DynamoDBClient } = require("@aws-sdk/client-dynamodb"); // CommonJS import + +// Bare-bones DynamoDB Client +const client = new DynamoDBClient({}); +``` + +```js +import { DynamoDB } from "@aws-sdk/client-dynamodb"; // ES6 import +// const { DynamoDB } = require("@aws-sdk/client-dynamodb"); // CommonJS import + +// Full DynamoDB Client +const client = new DynamoDB({}); +``` + +The bare-bones clients are more modular. They reduce bundle size and improve +loading performance over full clients as explained in blog post on +[modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/). + +### Constructor + +Once DynamoDB client is created, you can either create the bare-bones +document client or full document client as follows: + +```js +import { DynamoDBDocumentClient } from "@aws-sdk/lib-dynamodb"; // ES6 import +// const { DynamoDBDocumentClient } = require("@aws-sdk/lib-dynamodb"); // CommonJS import + +// Bare-bones document client +const ddbDocClient = DynamoDBDocumentClient.from(client); // client is DynamoDB client +``` + +```js +import { DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; // ES6 import +// const { DynamoDBDocument } = require("@aws-sdk/lib-dynamodb"); // CommonJS import + +// Full document client +const ddbDocClient = DynamoDBDocument.from(client); // client is DynamoDB client +``` + +### Configuration + +The configuration for marshalling and unmarshalling can be sent as an optional +second parameter during creation of document client as follows: + +```ts +export interface marshallOptions { + /** + * Whether to automatically convert empty strings, blobs, and sets to `null` + */ + convertEmptyValues?: boolean; + /** + * Whether to remove undefined values from JS arrays/Sets/objects + * when marshalling to DynamoDB lists/sets/maps respectively. + * + * A DynamoDB item is not itself considered a map. Only + * attributes of an item are examined. + */ + removeUndefinedValues?: boolean; + /** + * Whether to convert typeof object to map attribute. + */ + convertClassInstanceToMap?: boolean; + /** + * Whether to convert the top level container + * if it is a map or list. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the marshall function (backwards compatibility). + */ + convertTopLevelContainer?: boolean; + /** + * Whether to allow numbers beyond Number.MAX_SAFE_INTEGER during marshalling. + * When set to true, allows numbers that may lose precision when converted to JavaScript numbers. + * When false (default), throws an error if a number exceeds Number.MAX_SAFE_INTEGER to prevent + * unintended loss of precision. Consider using the NumberValue type from @aws-sdk/lib-dynamodb + * for precise handling of large numbers. + */ + allowImpreciseNumbers?: boolean; +} + +export interface unmarshallOptions { + /** + * Whether to modify how numbers are unmarshalled from DynamoDB. + * When set to true, returns numbers as NumberValue instances instead of native JavaScript numbers. + * This allows for the safe round-trip transport of numbers of arbitrary size. + * + * If a function is provided, it will be called with the string representation of numbers to handle + * custom conversions (e.g., using BigInt or decimal libraries). + */ + wrapNumbers?: boolean | ((value: string) => number | bigint | NumberValue | any); + /** + * When true, skip wrapping the data in `{ M: data }` before converting. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the unmarshall function (backwards compatibility). + */ + convertWithoutMapWrapper?: boolean; +} + +const marshallOptions: marshallOptions = {}; +const unmarshallOptions: unmarshallOptions = {}; + +const translateConfig = { marshallOptions, unmarshallOptions }; + +const client = new DynamoDBClient({}); +const ddbDocClient = DynamoDBDocument.from(client, translateConfig); +``` + +### Calling operations + +You can call the document client operations using command objects on bare-bones +client as follows: + +```js +import { DynamoDBDocumentClient, PutCommand } from "@aws-sdk/lib-dynamodb"; + +// ... DynamoDB client creation + +const ddbDocClient = DynamoDBDocumentClient.from(client); +// Call using bare-bones client and Command object. +await ddbDocClient.send( + new PutCommand({ + TableName, + Item: { + id: "1", + content: "content from DynamoDBDocumentClient", + }, + }) +); +``` + +You can also call operations on full client as follows: + +```js +import { DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +// ... DynamoDB client creation + +const ddbDocClient = DynamoDBDocument.from(client); +// Call using full client. +await ddbDocClient.put({ + TableName, + Item: { + id: "2", + content: "content from DynamoDBDocument", + }, +}); +``` + +### Large Numbers and `NumberValue`. + +On the input or marshalling side, the class `NumberValue` can be used +anywhere to represent a DynamoDB number value, even small numbers. + +```ts +import { DynamoDB } from "@aws-sdk/client-dynamodb"; +import { NumberValue, DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +// Note, the client will not validate the acceptability of the number +// in terms of size or format. +// It is only here to preserve your precise representation. +const client = DynamoDBDocument.from(new DynamoDB({})); + +await client.put({ + Item: { + id: 1, + smallNumber: NumberValue.from("123"), + bigNumber: NumberValue.from("1000000000000000000000.000000000001"), + nSet: new Set([123, NumberValue.from("456"), 789]), + }, +}); +``` + +On the output or unmarshalling side, the class `NumberValue` is used +depending on your setting for the `unmarshallOptions` flag `wrapnumbers`, +shown above. + +```ts +import { DynamoDB } from "@aws-sdk/client-dynamodb"; +import { NumberValue, DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +const client = DynamoDBDocument.from(new DynamoDB({})); + +const response = await client.get({ + Key: { + id: 1, + }, +}); + +/** + * Numbers in the response may be a number, a BigInt, or a NumberValue depending + * on how you set `wrapNumbers`. + */ +const value = response.Item.bigNumber; +``` + +You can also provide a custom function to handle number conversion during unmarshalling: + +```typescript +const client = DynamoDBDocument.from(new DynamoDB({}), { + unmarshallOptions: { + // Use BigInt for all numbers + wrapNumbers: (str) => BigInt(str), + }, +}); + +const response = await client.get({ + Key: { id: 1 }, +}); + +// Numbers in response will be BigInt instead of NumberValue or regular numbers +``` + +`NumberValue` does not provide a way to do mathematical operations on itself. +To do mathematical operations, take the string value of `NumberValue` by calling +`.toString()` and supply it to your chosen big number implementation. + +The client protects against precision loss by throwing an error on large numbers, but you can either +allow imprecise values with `allowImpreciseNumbers` or maintain exact precision using `NumberValue`. + +```typescript +const preciseValue = "34567890123456789012345678901234567890"; + +// 1. Default behavior - will throw error +await client.send( + new PutCommand({ + TableName: "Table", + Item: { + id: "1", + number: Number(preciseValue), // Throws error: Number is greater than Number.MAX_SAFE_INTEGER + }, + }) +); + +// 2. Using allowImpreciseNumbers - will store but loses precision (mimics the v2 implicit behavior) +const impreciseClient = DynamoDBDocumentClient.from(new DynamoDBClient({}), { + marshallOptions: { allowImpreciseNumbers: true }, +}); +await impreciseClient.send( + new PutCommand({ + TableName: "Table", + Item: { + id: "2", + number: Number(preciseValue), // Loses precision 34567890123456790000000000000000000000n + }, + }) +); +``` + +### Client and Command middleware stacks + +As with other AWS SDK for JavaScript v3 clients, you can apply middleware functions +both on the client itself and individual `Command`s. + +For individual `Command`s, here are examples of how to add middleware before and after +both marshalling and unmarshalling. We will use `QueryCommand` as an example. +Others follow the same pattern. + +```js +import { DynamoDBDocumentClient, QueryCommand } from "@aws-sdk/lib-dynamodb"; + +const client = new DynamoDBClient({ + /*...*/ +}); +const doc = DynamoDBDocumentClient.from(client); +const command = new QueryCommand({ + /*...*/ +}); +``` + +Before and after marshalling: + +```js +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + console.log("pre-marshall", args.input); + return next(args); + }, + { + relation: "before", + toMiddleware: "DocumentMarshall", + } +); +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + console.log("post-marshall", args.input); + return next(args); + }, + { + relation: "after", + toMiddleware: "DocumentMarshall", + } +); +``` + +Before and after unmarshalling: + +```js +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + const result = await next(args); + console.log("pre-unmarshall", result.output.Items); + return result; + }, + { + relation: "after", // <- after for pre-unmarshall + toMiddleware: "DocumentUnmarshall", + } +); +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + const result = await next(args); + console.log("post-unmarshall", result.output.Items); + return result; + }, + { + relation: "before", // <- before for post-unmarshall + toMiddleware: "DocumentUnmarshall", + } +); +``` + +### Destroying document client + +The `destroy()` call on document client is a no-op as document client does not +create a new DynamoDB client. You need to call `destroy()` on DynamoDB client to +clean resources used by it as shown below. + +```js +const client = new DynamoDBClient({}); +const ddbDocClient = DynamoDBDocumentClient.from(client); + +// Perform operations on document client. + +ddbDocClient.destroy(); // no-op +client.destroy(); // destroys DynamoDBClient +``` diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..b8cf754 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js @@ -0,0 +1,1053 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + BatchExecuteStatementCommand: () => BatchExecuteStatementCommand, + BatchGetCommand: () => BatchGetCommand, + BatchWriteCommand: () => BatchWriteCommand, + DeleteCommand: () => DeleteCommand, + DynamoDBDocument: () => DynamoDBDocument, + DynamoDBDocumentClient: () => DynamoDBDocumentClient, + DynamoDBDocumentClientCommand: () => DynamoDBDocumentClientCommand, + ExecuteStatementCommand: () => ExecuteStatementCommand, + ExecuteTransactionCommand: () => ExecuteTransactionCommand, + GetCommand: () => GetCommand, + NativeAttributeBinary: () => import_util_dynamodb.NativeAttributeBinary, + NativeAttributeValue: () => import_util_dynamodb.NativeAttributeValue, + NativeScalarAttributeValue: () => import_util_dynamodb.NativeScalarAttributeValue, + NumberValue: () => import_util_dynamodb.NumberValueImpl, + PaginationConfiguration: () => import_types.PaginationConfiguration, + PutCommand: () => PutCommand, + QueryCommand: () => QueryCommand, + ScanCommand: () => ScanCommand, + TransactGetCommand: () => TransactGetCommand, + TransactWriteCommand: () => TransactWriteCommand, + UpdateCommand: () => UpdateCommand, + __Client: () => import_smithy_client.Client, + marshallOptions: () => import_util_dynamodb.marshallOptions, + paginateQuery: () => paginateQuery, + paginateScan: () => paginateScan, + unmarshallOptions: () => import_util_dynamodb.unmarshallOptions +}); +module.exports = __toCommonJS(index_exports); + +// src/commands/BatchExecuteStatementCommand.ts +var import_client_dynamodb = require("@aws-sdk/client-dynamodb"); + + +// src/baseCommand/DynamoDBDocumentClientCommand.ts +var import_core = require("@aws-sdk/core"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/commands/utils.ts +var import_util_dynamodb = require("@aws-sdk/util-dynamodb"); +var SELF = null; +var ALL_VALUES = {}; +var ALL_MEMBERS = []; +var NEXT_LEVEL = "*"; +var processObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + if (obj !== void 0) { + if (keyNodes == null) { + return processFunc(obj); + } else { + const keys = Object.keys(keyNodes); + const goToNextLevel = keys.length === 1 && keys[0] === NEXT_LEVEL; + const someChildren = keys.length >= 1 && !goToNextLevel; + const allChildren = keys.length === 0; + if (someChildren) { + return processKeysInObj(obj, processFunc, keyNodes); + } else if (allChildren) { + return processAllKeysInObj(obj, processFunc, SELF); + } else if (goToNextLevel) { + return Object.entries(obj ?? {}).reduce((acc, [k, v]) => { + if (typeof v !== "function") { + acc[k] = processObj(v, processFunc, keyNodes[NEXT_LEVEL]); + } + return acc; + }, Array.isArray(obj) ? [] : {}); + } + } + } + return void 0; +}, "processObj"); +var processKeysInObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + let accumulator; + if (Array.isArray(obj)) { + accumulator = obj.filter((item) => typeof item !== "function"); + } else { + accumulator = {}; + for (const [k, v] of Object.entries(obj)) { + if (typeof v !== "function") { + accumulator[k] = v; + } + } + } + for (const [nodeKey, nodes] of Object.entries(keyNodes)) { + if (typeof obj[nodeKey] === "function") { + continue; + } + const processedValue = processObj(obj[nodeKey], processFunc, nodes); + if (processedValue !== void 0 && typeof processedValue !== "function") { + accumulator[nodeKey] = processedValue; + } + } + return accumulator; +}, "processKeysInObj"); +var processAllKeysInObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + if (Array.isArray(obj)) { + return obj.filter((item) => typeof item !== "function").map((item) => processObj(item, processFunc, keyNodes)); + } + return Object.entries(obj).reduce((acc, [key, value]) => { + if (typeof value === "function") { + return acc; + } + const processedValue = processObj(value, processFunc, keyNodes); + if (processedValue !== void 0 && typeof processedValue !== "function") { + acc[key] = processedValue; + } + return acc; + }, {}); +}, "processAllKeysInObj"); +var marshallInput = /* @__PURE__ */ __name((obj, keyNodes, options) => { + const marshallFunc = /* @__PURE__ */ __name((toMarshall) => (0, import_util_dynamodb.marshall)(toMarshall, options), "marshallFunc"); + return processKeysInObj(obj, marshallFunc, keyNodes); +}, "marshallInput"); +var unmarshallOutput = /* @__PURE__ */ __name((obj, keyNodes, options) => { + const unmarshallFunc = /* @__PURE__ */ __name((toMarshall) => (0, import_util_dynamodb.unmarshall)(toMarshall, options), "unmarshallFunc"); + return processKeysInObj(obj, unmarshallFunc, keyNodes); +}, "unmarshallOutput"); + +// src/baseCommand/DynamoDBDocumentClientCommand.ts +var DynamoDBDocumentClientCommand = class extends import_smithy_client.Command { + static { + __name(this, "DynamoDBDocumentClientCommand"); + } + addMarshallingMiddleware(configuration) { + const { marshallOptions: marshallOptions3 = {}, unmarshallOptions: unmarshallOptions3 = {} } = configuration.translateConfig || {}; + marshallOptions3.convertTopLevelContainer = marshallOptions3.convertTopLevelContainer ?? true; + unmarshallOptions3.convertWithoutMapWrapper = unmarshallOptions3.convertWithoutMapWrapper ?? true; + this.clientCommand.middlewareStack.addRelativeTo( + (next, context) => async (args) => { + (0, import_core.setFeature)(context, "DDB_MAPPER", "d"); + args.input = marshallInput(args.input, this.inputKeyNodes, marshallOptions3); + return next(args); + }, + { + name: "DocumentMarshall", + relation: "before", + toMiddleware: "serializerMiddleware", + override: true + } + ); + this.clientCommand.middlewareStack.addRelativeTo( + (next, context) => async (args) => { + const deserialized = await next(args); + deserialized.output = unmarshallOutput(deserialized.output, this.outputKeyNodes, unmarshallOptions3); + return deserialized; + }, + { + name: "DocumentUnmarshall", + relation: "before", + toMiddleware: "deserializerMiddleware", + override: true + } + ); + } +}; + +// src/commands/BatchExecuteStatementCommand.ts +var BatchExecuteStatementCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchExecuteStatementCommand"); + } + inputKeyNodes = { + Statements: { + "*": { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Error: { + Item: ALL_VALUES + // map with AttributeValue + }, + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/BatchGetCommand.ts + + +var BatchGetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchGetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchGetCommand"); + } + inputKeyNodes = { + RequestItems: { + "*": { + Keys: { + "*": ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + "*": ALL_VALUES + // map with AttributeValue + } + }, + UnprocessedKeys: { + "*": { + Keys: { + "*": ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/BatchWriteCommand.ts + + +var BatchWriteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchWriteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchWriteCommand"); + } + inputKeyNodes = { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES + // map with AttributeValue + }, + DeleteRequest: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + } + }; + outputKeyNodes = { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES + // map with AttributeValue + }, + DeleteRequest: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + }, + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/DeleteCommand.ts + + +var DeleteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.DeleteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "DeleteCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES, + // map with AttributeValue + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ExecuteStatementCommand.ts + + +var ExecuteStatementCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ExecuteStatementCommand"); + } + inputKeyNodes = { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ExecuteTransactionCommand.ts + + +var ExecuteTransactionCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ExecuteTransactionCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ExecuteTransactionCommand"); + } + inputKeyNodes = { + TransactStatements: { + "*": { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/GetCommand.ts + + +var GetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.GetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "GetCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Item: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/PutCommand.ts + + +var PutCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.PutItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "PutCommand"); + } + inputKeyNodes = { + Item: ALL_VALUES, + // map with AttributeValue + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/QueryCommand.ts + + +var QueryCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.QueryCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "QueryCommand"); + } + inputKeyNodes = { + KeyConditions: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + QueryFilter: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExclusiveStartKey: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ScanCommand.ts + + +var ScanCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ScanCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ScanCommand"); + } + inputKeyNodes = { + ScanFilter: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExclusiveStartKey: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/TransactGetCommand.ts + + +var TransactGetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.TransactGetItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "TransactGetCommand"); + } + inputKeyNodes = { + TransactItems: { + "*": { + Get: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/TransactWriteCommand.ts + + +var TransactWriteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.TransactWriteItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "TransactWriteCommand"); + } + inputKeyNodes = { + TransactItems: { + "*": { + ConditionCheck: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Put: { + Item: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Delete: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Update: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/UpdateCommand.ts + + +var UpdateCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.UpdateItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "UpdateCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES, + // map with AttributeValue + AttributeUpdates: { + "*": { + Value: SELF + } + }, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/DynamoDBDocumentClient.ts + +var DynamoDBDocumentClient = class _DynamoDBDocumentClient extends import_smithy_client.Client { + static { + __name(this, "DynamoDBDocumentClient"); + } + config; + constructor(client, translateConfig) { + super(client.config); + this.config = client.config; + this.config.translateConfig = translateConfig; + this.middlewareStack = client.middlewareStack; + if (this.config?.cacheMiddleware) { + throw new Error( + "@aws-sdk/lib-dynamodb - cacheMiddleware=true is not compatible with the DynamoDBDocumentClient. This option must be set to false." + ); + } + } + static from(client, translateConfig) { + return new _DynamoDBDocumentClient(client, translateConfig); + } + destroy() { + } +}; + +// src/DynamoDBDocument.ts +var DynamoDBDocument = class _DynamoDBDocument extends DynamoDBDocumentClient { + static { + __name(this, "DynamoDBDocument"); + } + static from(client, translateConfig) { + return new _DynamoDBDocument(client, translateConfig); + } + batchExecuteStatement(args, optionsOrCb, cb) { + const command = new BatchExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + batchGet(args, optionsOrCb, cb) { + const command = new BatchGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + batchWrite(args, optionsOrCb, cb) { + const command = new BatchWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + delete(args, optionsOrCb, cb) { + const command = new DeleteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + executeStatement(args, optionsOrCb, cb) { + const command = new ExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + executeTransaction(args, optionsOrCb, cb) { + const command = new ExecuteTransactionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + get(args, optionsOrCb, cb) { + const command = new GetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + put(args, optionsOrCb, cb) { + const command = new PutCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + query(args, optionsOrCb, cb) { + const command = new QueryCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + scan(args, optionsOrCb, cb) { + const command = new ScanCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + transactGet(args, optionsOrCb, cb) { + const command = new TransactGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + transactWrite(args, optionsOrCb, cb) { + const command = new TransactWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + update(args, optionsOrCb, cb) { + const command = new UpdateCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +}; + +// src/pagination/Interfaces.ts +var import_types = require("@smithy/types"); + +// src/pagination/QueryPaginator.ts +var import_core2 = require("@smithy/core"); + +var paginateQuery = (0, import_core2.createPaginator)(DynamoDBDocumentClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/pagination/ScanPaginator.ts +var import_core3 = require("@smithy/core"); + +var paginateScan = (0, import_core3.createPaginator)(DynamoDBDocumentClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/index.ts + + + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NumberValue, + DynamoDBDocument, + __Client, + DynamoDBDocumentClient, + DynamoDBDocumentClientCommand, + $Command, + BatchExecuteStatementCommand, + BatchGetCommand, + BatchWriteCommand, + DeleteCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + GetCommand, + PutCommand, + QueryCommand, + ScanCommand, + TransactGetCommand, + TransactWriteCommand, + UpdateCommand, + paginateQuery, + paginateScan +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js new file mode 100644 index 0000000..206f25a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js @@ -0,0 +1,214 @@ +import { BatchExecuteStatementCommand, } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommand } from "./commands/BatchGetCommand"; +import { BatchWriteCommand } from "./commands/BatchWriteCommand"; +import { DeleteCommand } from "./commands/DeleteCommand"; +import { ExecuteStatementCommand, } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommand, } from "./commands/ExecuteTransactionCommand"; +import { GetCommand } from "./commands/GetCommand"; +import { PutCommand } from "./commands/PutCommand"; +import { QueryCommand } from "./commands/QueryCommand"; +import { ScanCommand } from "./commands/ScanCommand"; +import { TransactGetCommand } from "./commands/TransactGetCommand"; +import { TransactWriteCommand, } from "./commands/TransactWriteCommand"; +import { UpdateCommand } from "./commands/UpdateCommand"; +import { DynamoDBDocumentClient } from "./DynamoDBDocumentClient"; +export class DynamoDBDocument extends DynamoDBDocumentClient { + static from(client, translateConfig) { + return new DynamoDBDocument(client, translateConfig); + } + batchExecuteStatement(args, optionsOrCb, cb) { + const command = new BatchExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + batchGet(args, optionsOrCb, cb) { + const command = new BatchGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + batchWrite(args, optionsOrCb, cb) { + const command = new BatchWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + delete(args, optionsOrCb, cb) { + const command = new DeleteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + executeStatement(args, optionsOrCb, cb) { + const command = new ExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + executeTransaction(args, optionsOrCb, cb) { + const command = new ExecuteTransactionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + get(args, optionsOrCb, cb) { + const command = new GetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + put(args, optionsOrCb, cb) { + const command = new PutCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + query(args, optionsOrCb, cb) { + const command = new QueryCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + scan(args, optionsOrCb, cb) { + const command = new ScanCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + transactGet(args, optionsOrCb, cb) { + const command = new TransactGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + transactWrite(args, optionsOrCb, cb) { + const command = new TransactWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + update(args, optionsOrCb, cb) { + const command = new UpdateCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js new file mode 100644 index 0000000..f8dc9bb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js @@ -0,0 +1,20 @@ +import { Client as __Client } from "@smithy/smithy-client"; +export { __Client }; +export class DynamoDBDocumentClient extends __Client { + config; + constructor(client, translateConfig) { + super(client.config); + this.config = client.config; + this.config.translateConfig = translateConfig; + this.middlewareStack = client.middlewareStack; + if (this.config?.cacheMiddleware) { + throw new Error("@aws-sdk/lib-dynamodb - cacheMiddleware=true is not compatible with the" + + " DynamoDBDocumentClient. This option must be set to false."); + } + } + static from(client, translateConfig) { + return new DynamoDBDocumentClient(client, translateConfig); + } + destroy() { + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js new file mode 100644 index 0000000..5f751c1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js @@ -0,0 +1,30 @@ +import { setFeature } from "@aws-sdk/core"; +import { Command as $Command } from "@smithy/smithy-client"; +import { marshallInput, unmarshallOutput } from "../commands/utils"; +export class DynamoDBDocumentClientCommand extends $Command { + addMarshallingMiddleware(configuration) { + const { marshallOptions = {}, unmarshallOptions = {} } = configuration.translateConfig || {}; + marshallOptions.convertTopLevelContainer = marshallOptions.convertTopLevelContainer ?? true; + unmarshallOptions.convertWithoutMapWrapper = unmarshallOptions.convertWithoutMapWrapper ?? true; + this.clientCommand.middlewareStack.addRelativeTo((next, context) => async (args) => { + setFeature(context, "DDB_MAPPER", "d"); + args.input = marshallInput(args.input, this.inputKeyNodes, marshallOptions); + return next(args); + }, { + name: "DocumentMarshall", + relation: "before", + toMiddleware: "serializerMiddleware", + override: true, + }); + this.clientCommand.middlewareStack.addRelativeTo((next, context) => async (args) => { + const deserialized = await next(args); + deserialized.output = unmarshallOutput(deserialized.output, this.outputKeyNodes, unmarshallOptions); + return deserialized; + }, { + name: "DocumentUnmarshall", + relation: "before", + toMiddleware: "deserializerMiddleware", + override: true, + }); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js new file mode 100644 index 0000000..ee5acdc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js @@ -0,0 +1,39 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Statements: { + "*": { + Parameters: ALL_MEMBERS, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Error: { + Item: ALL_VALUES, + }, + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js new file mode 100644 index 0000000..bab370e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js @@ -0,0 +1,45 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchGetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + RequestItems: { + "*": { + Keys: { + "*": ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + "*": ALL_VALUES, + }, + }, + UnprocessedKeys: { + "*": { + Keys: { + "*": ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchGetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js new file mode 100644 index 0000000..c2a480a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js @@ -0,0 +1,57 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchWriteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES, + }, + DeleteRequest: { + Key: ALL_VALUES, + }, + }, + }, + }, + }; + outputKeyNodes = { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES, + }, + DeleteRequest: { + Key: ALL_VALUES, + }, + }, + }, + }, + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchWriteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js new file mode 100644 index 0000000..5622ae6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js @@ -0,0 +1,38 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class DeleteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __DeleteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js new file mode 100644 index 0000000..e3f3cfa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js @@ -0,0 +1,31 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ExecuteStatementCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Parameters: ALL_MEMBERS, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js new file mode 100644 index 0000000..3c575aa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js @@ -0,0 +1,36 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactStatements: { + "*": { + Parameters: ALL_MEMBERS, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ExecuteTransactionCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js new file mode 100644 index 0000000..14ae2f3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js @@ -0,0 +1,28 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class GetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + }; + outputKeyNodes = { + Item: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __GetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js new file mode 100644 index 0000000..51ae545 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js @@ -0,0 +1,38 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class PutCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Item: ALL_VALUES, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __PutItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js new file mode 100644 index 0000000..36a5418 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js @@ -0,0 +1,42 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class QueryCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + KeyConditions: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + QueryFilter: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + ExclusiveStartKey: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __QueryCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js new file mode 100644 index 0000000..a5ba2c9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js @@ -0,0 +1,37 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ScanCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + ScanFilter: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + ExclusiveStartKey: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ScanCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js new file mode 100644 index 0000000..3de3ec5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js @@ -0,0 +1,38 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class TransactGetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactItems: { + "*": { + Get: { + Key: ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __TransactGetItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js new file mode 100644 index 0000000..5f1fea0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js @@ -0,0 +1,53 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class TransactWriteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactItems: { + "*": { + ConditionCheck: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Put: { + Item: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Delete: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Update: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __TransactWriteItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js new file mode 100644 index 0000000..a5a2da1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js @@ -0,0 +1,43 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class UpdateCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + AttributeUpdates: { + "*": { + Value: SELF, + }, + }, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __UpdateItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js new file mode 100644 index 0000000..19c4e99 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js @@ -0,0 +1,80 @@ +import { marshall, unmarshall } from "@aws-sdk/util-dynamodb"; +export const SELF = null; +export const ALL_VALUES = {}; +export const ALL_MEMBERS = []; +const NEXT_LEVEL = "*"; +const processObj = (obj, processFunc, keyNodes) => { + if (obj !== undefined) { + if (keyNodes == null) { + return processFunc(obj); + } + else { + const keys = Object.keys(keyNodes); + const goToNextLevel = keys.length === 1 && keys[0] === NEXT_LEVEL; + const someChildren = keys.length >= 1 && !goToNextLevel; + const allChildren = keys.length === 0; + if (someChildren) { + return processKeysInObj(obj, processFunc, keyNodes); + } + else if (allChildren) { + return processAllKeysInObj(obj, processFunc, SELF); + } + else if (goToNextLevel) { + return Object.entries(obj ?? {}).reduce((acc, [k, v]) => { + if (typeof v !== "function") { + acc[k] = processObj(v, processFunc, keyNodes[NEXT_LEVEL]); + } + return acc; + }, (Array.isArray(obj) ? [] : {})); + } + } + } + return undefined; +}; +const processKeysInObj = (obj, processFunc, keyNodes) => { + let accumulator; + if (Array.isArray(obj)) { + accumulator = obj.filter((item) => typeof item !== "function"); + } + else { + accumulator = {}; + for (const [k, v] of Object.entries(obj)) { + if (typeof v !== "function") { + accumulator[k] = v; + } + } + } + for (const [nodeKey, nodes] of Object.entries(keyNodes)) { + if (typeof obj[nodeKey] === "function") { + continue; + } + const processedValue = processObj(obj[nodeKey], processFunc, nodes); + if (processedValue !== undefined && typeof processedValue !== "function") { + accumulator[nodeKey] = processedValue; + } + } + return accumulator; +}; +const processAllKeysInObj = (obj, processFunc, keyNodes) => { + if (Array.isArray(obj)) { + return obj.filter((item) => typeof item !== "function").map((item) => processObj(item, processFunc, keyNodes)); + } + return Object.entries(obj).reduce((acc, [key, value]) => { + if (typeof value === "function") { + return acc; + } + const processedValue = processObj(value, processFunc, keyNodes); + if (processedValue !== undefined && typeof processedValue !== "function") { + acc[key] = processedValue; + } + return acc; + }, {}); +}; +export const marshallInput = (obj, keyNodes, options) => { + const marshallFunc = (toMarshall) => marshall(toMarshall, options); + return processKeysInObj(obj, marshallFunc, keyNodes); +}; +export const unmarshallOutput = (obj, keyNodes, options) => { + const unmarshallFunc = (toMarshall) => unmarshall(toMarshall, options); + return processKeysInObj(obj, unmarshallFunc, keyNodes); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js new file mode 100644 index 0000000..77c5a74 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js new file mode 100644 index 0000000..da9e058 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { QueryCommand } from "../commands/QueryCommand"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export const paginateQuery = createPaginator(DynamoDBDocumentClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js new file mode 100644 index 0000000..ae01799 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ScanCommand } from "../commands/ScanCommand"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export const paginateScan = createPaginator(DynamoDBDocumentClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts new file mode 100644 index 0000000..38d4a20 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts @@ -0,0 +1,195 @@ +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommandInput, BatchGetCommandOutput } from "./commands/BatchGetCommand"; +import { BatchWriteCommandInput, BatchWriteCommandOutput } from "./commands/BatchWriteCommand"; +import { DeleteCommandInput, DeleteCommandOutput } from "./commands/DeleteCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TransactGetCommandInput, TransactGetCommandOutput } from "./commands/TransactGetCommand"; +import { TransactWriteCommandInput, TransactWriteCommandOutput } from "./commands/TransactWriteCommand"; +import { UpdateCommandInput, UpdateCommandOutput } from "./commands/UpdateCommand"; +import { DynamoDBDocumentClient, TranslateConfig } from "./DynamoDBDocumentClient"; +/** + * The document client simplifies working with items in Amazon DynamoDB by + * abstracting away the notion of attribute values. This abstraction annotates native + * JavaScript types supplied as input parameters, as well as converts annotated + * response data to native JavaScript types. + * + * ## Marshalling Input and Unmarshalling Response Data + * + * The document client affords developers the use of native JavaScript types + * instead of `AttributeValue`s to simplify the JavaScript development + * experience with Amazon DynamoDB. JavaScript objects passed in as parameters + * are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. + * Responses from DynamoDB are unmarshalled into plain JavaScript objects + * by the `DocumentClient`. The `DocumentClient` does not accept + * `AttributeValue`s in favor of native JavaScript types. + * + * | JavaScript Type | DynamoDB AttributeValue | + * | :-------------------------------: | ----------------------- | + * | String | S | + * | Number / BigInt | N | + * | Boolean | BOOL | + * | null | NULL | + * | Array | L | + * | Object | M | + * | Set\ | BS | + * | Set\ | NS | + * | Set\ | SS | + * | Uint8Array, Buffer, File, Blob... | B | + * + * ### Example + * + * Here is an example list which is sent to DynamoDB client in an operation: + * + * ```json + * { "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } + * ``` + * + * The DynamoDB document client abstracts the attribute values as follows in + * both input and output: + * + * ```json + * [null, false, 1, "two"] + * ``` + * + * @see {@link https://www.npmjs.com/package/@aws-sdk/client-dynamodb | @aws-sdk/client-dynamodb} + */ +export declare class DynamoDBDocument extends DynamoDBDocumentClient { + static from(client: DynamoDBClient, translateConfig?: TranslateConfig): DynamoDBDocument; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#BatchExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchGetItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchGetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchGet(args: BatchGetCommandInput, options?: __HttpHandlerOptions): Promise; + batchGet(args: BatchGetCommandInput, cb: (err: any, data?: BatchGetCommandOutput) => void): void; + batchGet(args: BatchGetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchGetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchWriteItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchWriteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchWrite(args: BatchWriteCommandInput, options?: __HttpHandlerOptions): Promise; + batchWrite(args: BatchWriteCommandInput, cb: (err: any, data?: BatchWriteCommandOutput) => void): void; + batchWrite(args: BatchWriteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchWriteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * DeleteItemCommand operation from {@link @aws-sdk/client-dynamodb#DeleteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + delete(args: DeleteCommandInput, options?: __HttpHandlerOptions): Promise; + delete(args: DeleteCommandInput, cb: (err: any, data?: DeleteCommandOutput) => void): void; + delete(args: DeleteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + executeStatement(args: ExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + executeStatement(args: ExecuteStatementCommandInput, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + executeStatement(args: ExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteTransactionCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteTransactionCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + executeTransaction(args: ExecuteTransactionCommandInput, options?: __HttpHandlerOptions): Promise; + executeTransaction(args: ExecuteTransactionCommandInput, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + executeTransaction(args: ExecuteTransactionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * GetItemCommand operation from {@link @aws-sdk/client-dynamodb#GetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + get(args: GetCommandInput, options?: __HttpHandlerOptions): Promise; + get(args: GetCommandInput, cb: (err: any, data?: GetCommandOutput) => void): void; + get(args: GetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * PutItemCommand operation from {@link @aws-sdk/client-dynamodb#PutItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + put(args: PutCommandInput, options?: __HttpHandlerOptions): Promise; + put(args: PutCommandInput, cb: (err: any, data?: PutCommandOutput) => void): void; + put(args: PutCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * QueryCommand operation from {@link @aws-sdk/client-dynamodb#QueryCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + query(args: QueryCommandInput, options?: __HttpHandlerOptions): Promise; + query(args: QueryCommandInput, cb: (err: any, data?: QueryCommandOutput) => void): void; + query(args: QueryCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: QueryCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ScanCommand operation from {@link @aws-sdk/client-dynamodb#ScanCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + scan(args: ScanCommandInput, options?: __HttpHandlerOptions): Promise; + scan(args: ScanCommandInput, cb: (err: any, data?: ScanCommandOutput) => void): void; + scan(args: ScanCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ScanCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactGetItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactGetItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + transactGet(args: TransactGetCommandInput, options?: __HttpHandlerOptions): Promise; + transactGet(args: TransactGetCommandInput, cb: (err: any, data?: TransactGetCommandOutput) => void): void; + transactGet(args: TransactGetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactGetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactWriteItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactWriteItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + transactWrite(args: TransactWriteCommandInput, options?: __HttpHandlerOptions): Promise; + transactWrite(args: TransactWriteCommandInput, cb: (err: any, data?: TransactWriteCommandOutput) => void): void; + transactWrite(args: TransactWriteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactWriteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * UpdateItemCommand operation from {@link @aws-sdk/client-dynamodb#UpdateItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + update(args: UpdateCommandInput, options?: __HttpHandlerOptions): Promise; + update(args: UpdateCommandInput, cb: (err: any, data?: UpdateCommandOutput) => void): void; + update(args: UpdateCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateCommandOutput) => void): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts new file mode 100644 index 0000000..260830f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts @@ -0,0 +1,96 @@ +import { DynamoDBClient, DynamoDBClientResolvedConfig, ServiceInputTypes as __ServiceInputTypes, ServiceOutputTypes as __ServiceOutputTypes } from "@aws-sdk/client-dynamodb"; +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +import { Client as __Client } from "@smithy/smithy-client"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommandInput, BatchGetCommandOutput } from "./commands/BatchGetCommand"; +import { BatchWriteCommandInput, BatchWriteCommandOutput } from "./commands/BatchWriteCommand"; +import { DeleteCommandInput, DeleteCommandOutput } from "./commands/DeleteCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TransactGetCommandInput, TransactGetCommandOutput } from "./commands/TransactGetCommand"; +import { TransactWriteCommandInput, TransactWriteCommandOutput } from "./commands/TransactWriteCommand"; +import { UpdateCommandInput, UpdateCommandOutput } from "./commands/UpdateCommand"; +/** + * @public + */ +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = __ServiceInputTypes | BatchExecuteStatementCommandInput | BatchGetCommandInput | BatchWriteCommandInput | DeleteCommandInput | ExecuteStatementCommandInput | ExecuteTransactionCommandInput | GetCommandInput | PutCommandInput | QueryCommandInput | ScanCommandInput | TransactGetCommandInput | TransactWriteCommandInput | UpdateCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = __ServiceOutputTypes | BatchExecuteStatementCommandOutput | BatchGetCommandOutput | BatchWriteCommandOutput | DeleteCommandOutput | ExecuteStatementCommandOutput | ExecuteTransactionCommandOutput | GetCommandOutput | PutCommandOutput | QueryCommandOutput | ScanCommandOutput | TransactGetCommandOutput | TransactWriteCommandOutput | UpdateCommandOutput; +/** + * @public + */ +export type TranslateConfig = { + marshallOptions?: marshallOptions; + unmarshallOptions?: unmarshallOptions; +}; +/** + * @public + */ +export type DynamoDBDocumentClientResolvedConfig = DynamoDBClientResolvedConfig & { + translateConfig?: TranslateConfig; +}; +/** + * The document client simplifies working with items in Amazon DynamoDB by + * abstracting away the notion of attribute values. This abstraction annotates native + * JavaScript types supplied as input parameters, as well as converts annotated + * response data to native JavaScript types. + * + * ## Marshalling Input and Unmarshalling Response Data + * + * The document client affords developers the use of native JavaScript types + * instead of `AttributeValue`s to simplify the JavaScript development + * experience with Amazon DynamoDB. JavaScript objects passed in as parameters + * are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. + * Responses from DynamoDB are unmarshalled into plain JavaScript objects + * by the `DocumentClient`. The `DocumentClient` does not accept + * `AttributeValue`s in favor of native JavaScript types. + * + * | JavaScript Type | DynamoDB AttributeValue | + * | :-------------------------------: | ----------------------- | + * | String | S | + * | Number / BigInt | N | + * | Boolean | BOOL | + * | null | NULL | + * | Array | L | + * | Object | M | + * | Set\ | BS | + * | Set\ | NS | + * | Set\ | SS | + * | Uint8Array, Buffer, File, Blob... | B | + * + * ### Example + * + * Here is an example list which is sent to DynamoDB client in an operation: + * + * ```json + * { "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } + * ``` + * + * The DynamoDB document client abstracts the attribute values as follows in + * both input and output: + * + * ```json + * [null, false, 1, "two"] + * ``` + * + * @see {@link https://www.npmjs.com/package/@aws-sdk/client-dynamodb | @aws-sdk/client-dynamodb} + * + * @public + */ +export declare class DynamoDBDocumentClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, DynamoDBDocumentClientResolvedConfig> { + readonly config: DynamoDBDocumentClientResolvedConfig; + protected constructor(client: DynamoDBClient, translateConfig?: TranslateConfig); + static from(client: DynamoDBClient, translateConfig?: TranslateConfig): DynamoDBDocumentClient; + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts new file mode 100644 index 0000000..e78690a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts @@ -0,0 +1,17 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MiddlewareStack } from "@smithy/types"; +import { KeyNodeChildren } from "../commands/utils"; +import { DynamoDBDocumentClientResolvedConfig } from "../DynamoDBDocumentClient"; +/** + * Base class for Commands in lib-dynamodb used to pass middleware to + * the underlying DynamoDBClient Commands. + * + * @public + */ +export declare abstract class DynamoDBDocumentClientCommand extends $Command { + protected abstract readonly inputKeyNodes: KeyNodeChildren; + protected abstract readonly outputKeyNodes: KeyNodeChildren; + protected abstract clientCommand: $Command; + abstract middlewareStack: MiddlewareStack; + protected addMarshallingMiddleware(configuration: DynamoDBDocumentClientResolvedConfig): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..8f55283 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,66 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchExecuteStatementCommandInput = Omit<__BatchExecuteStatementCommandInput, "Statements"> & { + Statements: (Omit & { + Parameters?: NativeAttributeValue[] | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type BatchExecuteStatementCommandOutput = Omit<__BatchExecuteStatementCommandOutput, "Responses"> & { + Responses?: (Omit & { + Error?: (Omit & { + Item?: Record | undefined; + }) | undefined; + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#BatchExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Statements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Error: { + Item: import("../commands/utils").KeyNodeChildren; + }; + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __BatchExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchExecuteStatementCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchExecuteStatementCommandInput as __BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput as __BatchExecuteStatementCommandOutput, BatchStatementError, BatchStatementRequest, BatchStatementResponse } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts new file mode 100644 index 0000000..60cd689 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts @@ -0,0 +1,70 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchGetCommandInput = Omit<__BatchGetItemCommandInput, "RequestItems"> & { + RequestItems: Record & { + Keys: Record[] | undefined; + }> | undefined; +}; +/** + * @public + */ +export type BatchGetCommandOutput = Omit<__BatchGetItemCommandOutput, "Responses" | "UnprocessedKeys"> & { + Responses?: Record[]> | undefined; + UnprocessedKeys?: Record & { + Keys: Record[] | undefined; + }> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchGetItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchGetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchGetCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchGetCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + UnprocessedKeys: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchGetItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchGetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchGetItemCommandInput as __BatchGetItemCommandInput, BatchGetItemCommandOutput as __BatchGetItemCommandOutput, KeysAndAttributes } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts new file mode 100644 index 0000000..930d3f5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts @@ -0,0 +1,94 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchWriteCommandInput = Omit<__BatchWriteItemCommandInput, "RequestItems"> & { + RequestItems: Record & { + PutRequest?: (Omit & { + Item: Record | undefined; + }) | undefined; + DeleteRequest?: (Omit & { + Key: Record | undefined; + }) | undefined; + })[]> | undefined; +}; +/** + * @public + */ +export type BatchWriteCommandOutput = Omit<__BatchWriteItemCommandOutput, "UnprocessedItems" | "ItemCollectionMetrics"> & { + UnprocessedItems?: Record & { + PutRequest?: (Omit & { + Item: Record | undefined; + }) | undefined; + DeleteRequest?: (Omit & { + Key: Record | undefined; + }) | undefined; + })[]> | undefined; + ItemCollectionMetrics?: Record & { + ItemCollectionKey?: Record | undefined; + })[]> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchWriteItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchWriteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchWriteCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchWriteCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchWriteItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchWriteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchWriteItemCommandInput as __BatchWriteItemCommandInput, BatchWriteItemCommandOutput as __BatchWriteItemCommandOutput, DeleteRequest, ItemCollectionMetrics, PutRequest, WriteRequest } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts new file mode 100644 index 0000000..c091175 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts @@ -0,0 +1,66 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type DeleteCommandInput = Omit<__DeleteItemCommandInput, "Key" | "Expected" | "ExpressionAttributeValues"> & { + Key: Record | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type DeleteCommandOutput = Omit<__DeleteItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * DeleteItemCommand operation from {@link @aws-sdk/client-dynamodb#DeleteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class DeleteCommand extends DynamoDBDocumentClientCommand { + readonly input: DeleteCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __DeleteItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: DeleteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { DeleteItemCommandInput as __DeleteItemCommandInput, DeleteItemCommandOutput as __DeleteItemCommandOutput, ExpectedAttributeValue, ItemCollectionMetrics } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..4498320 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,52 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ExecuteStatementCommandInput = Omit<__ExecuteStatementCommandInput, "Parameters"> & { + Parameters?: NativeAttributeValue[] | undefined; +}; +/** + * @public + */ +export type ExecuteStatementCommandOutput = Omit<__ExecuteStatementCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ExecuteStatementCommand extends DynamoDBDocumentClientCommand { + readonly input: ExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ExecuteStatementCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExecuteStatementCommandInput as __ExecuteStatementCommandInput, ExecuteStatementCommandOutput as __ExecuteStatementCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..04873aa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,60 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ExecuteTransactionCommandInput = Omit<__ExecuteTransactionCommandInput, "TransactStatements"> & { + TransactStatements: (Omit & { + Parameters?: NativeAttributeValue[] | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type ExecuteTransactionCommandOutput = Omit<__ExecuteTransactionCommandOutput, "Responses"> & { + Responses?: (Omit & { + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteTransactionCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteTransactionCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand { + readonly input: ExecuteTransactionCommandInput; + protected readonly inputKeyNodes: { + TransactStatements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __ExecuteTransactionCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ExecuteTransactionCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExecuteTransactionCommandInput as __ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput as __ExecuteTransactionCommandOutput, ItemResponse, ParameterizedStatement } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts new file mode 100644 index 0000000..d425131 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts @@ -0,0 +1,48 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type GetCommandInput = Omit<__GetItemCommandInput, "Key"> & { + Key: Record | undefined; +}; +/** + * @public + */ +export type GetCommandOutput = Omit<__GetItemCommandOutput, "Item"> & { + Item?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * GetItemCommand operation from {@link @aws-sdk/client-dynamodb#GetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class GetCommand extends DynamoDBDocumentClientCommand { + readonly input: GetCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __GetItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: GetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { GetItemCommandInput as __GetItemCommandInput, GetItemCommandOutput as __GetItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts new file mode 100644 index 0000000..babe831 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts @@ -0,0 +1,66 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type PutCommandInput = Omit<__PutItemCommandInput, "Item" | "Expected" | "ExpressionAttributeValues"> & { + Item: Record | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type PutCommandOutput = Omit<__PutItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * PutItemCommand operation from {@link @aws-sdk/client-dynamodb#PutItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class PutCommand extends DynamoDBDocumentClientCommand { + readonly input: PutCommandInput; + protected readonly inputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __PutItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: PutCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExpectedAttributeValue, ItemCollectionMetrics, PutItemCommandInput as __PutItemCommandInput, PutItemCommandOutput as __PutItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts new file mode 100644 index 0000000..4948a70 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts @@ -0,0 +1,70 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type QueryCommandInput = Omit<__QueryCommandInput, "KeyConditions" | "QueryFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues"> & { + KeyConditions?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + QueryFilter?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type QueryCommandOutput = Omit<__QueryCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * QueryCommand operation from {@link @aws-sdk/client-dynamodb#QueryCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class QueryCommand extends DynamoDBDocumentClientCommand { + readonly input: QueryCommandInput; + protected readonly inputKeyNodes: { + KeyConditions: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + QueryFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __QueryCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: QueryCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Condition, QueryCommandInput as __QueryCommandInput, QueryCommandOutput as __QueryCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts new file mode 100644 index 0000000..86b0089 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts @@ -0,0 +1,62 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ScanCommandInput = Omit<__ScanCommandInput, "ScanFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues"> & { + ScanFilter?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type ScanCommandOutput = Omit<__ScanCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ScanCommand operation from {@link @aws-sdk/client-dynamodb#ScanCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ScanCommand extends DynamoDBDocumentClientCommand { + readonly input: ScanCommandInput; + protected readonly inputKeyNodes: { + ScanFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ScanCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ScanCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Condition, ScanCommandInput as __ScanCommandInput, ScanCommandOutput as __ScanCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts new file mode 100644 index 0000000..3355455 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts @@ -0,0 +1,64 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type TransactGetCommandInput = Omit<__TransactGetItemsCommandInput, "TransactItems"> & { + TransactItems: (Omit & { + Get: (Omit & { + Key: Record | undefined; + }) | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type TransactGetCommandOutput = Omit<__TransactGetItemsCommandOutput, "Responses"> & { + Responses?: (Omit & { + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactGetItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactGetItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class TransactGetCommand extends DynamoDBDocumentClientCommand { + readonly input: TransactGetCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + Get: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __TransactGetItemsCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: TransactGetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Get, ItemResponse, TransactGetItem, TransactGetItemsCommandInput as __TransactGetItemsCommandInput, TransactGetItemsCommandOutput as __TransactGetItemsCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts new file mode 100644 index 0000000..eef56d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts @@ -0,0 +1,92 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type TransactWriteCommandInput = Omit<__TransactWriteItemsCommandInput, "TransactItems"> & { + TransactItems: (Omit & { + ConditionCheck?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Put?: (Omit & { + Item: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Delete?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Update?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type TransactWriteCommandOutput = Omit<__TransactWriteItemsCommandOutput, "ItemCollectionMetrics"> & { + ItemCollectionMetrics?: Record & { + ItemCollectionKey?: Record | undefined; + })[]> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactWriteItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactWriteItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class TransactWriteCommand extends DynamoDBDocumentClientCommand { + readonly input: TransactWriteCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + ConditionCheck: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Put: { + Item: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Delete: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Update: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __TransactWriteItemsCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: TransactWriteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ConditionCheck, Delete, ItemCollectionMetrics, Put, TransactWriteItem, TransactWriteItemsCommandInput as __TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput as __TransactWriteItemsCommandOutput, Update } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts new file mode 100644 index 0000000..905e751 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts @@ -0,0 +1,74 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type UpdateCommandInput = Omit<__UpdateItemCommandInput, "Key" | "AttributeUpdates" | "Expected" | "ExpressionAttributeValues"> & { + Key: Record | undefined; + AttributeUpdates?: Record & { + Value?: NativeAttributeValue | undefined; + }> | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type UpdateCommandOutput = Omit<__UpdateItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * UpdateItemCommand operation from {@link @aws-sdk/client-dynamodb#UpdateItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class UpdateCommand extends DynamoDBDocumentClientCommand { + readonly input: UpdateCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + AttributeUpdates: { + "*": { + Value: null; + }; + }; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __UpdateItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: UpdateCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { AttributeValueUpdate, ExpectedAttributeValue, ItemCollectionMetrics, UpdateItemCommandInput as __UpdateItemCommandInput, UpdateItemCommandOutput as __UpdateItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts new file mode 100644 index 0000000..d24d22c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts @@ -0,0 +1,33 @@ +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +/** + * @internal + */ +export type KeyNodeSelf = null; +/** + * @internal + */ +export declare const SELF: KeyNodeSelf; +/** + * @internal + */ +export type KeyNodeChildren = Record; +/** + * @internal + */ +export declare const ALL_VALUES: KeyNodeChildren; +/** + * @internal + */ +export declare const ALL_MEMBERS: KeyNodeChildren; +/** + * @internal + */ +export type KeyNodes = KeyNodeSelf | KeyNodeChildren; +/** + * @internal + */ +export declare const marshallInput: (obj: any, keyNodes: KeyNodeChildren, options?: marshallOptions) => any; +/** + * @internal + */ +export declare const unmarshallOutput: (obj: any, keyNodes: KeyNodeChildren, options?: unmarshallOptions) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..957530d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; +export { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export { NativeAttributeValue, NativeAttributeBinary, NativeScalarAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..f98a7b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,13 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBDocument } from "../DynamoDBDocument"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { PaginationConfiguration }; +/** + * @public + */ +export interface DynamoDBDocumentPaginationConfiguration extends PaginationConfiguration { + client: DynamoDBDocument | DynamoDBDocumentClient; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..692d9f9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export { Paginator }; +/** + * @public + */ +export declare const paginateQuery: (config: DynamoDBDocumentPaginationConfiguration, input: QueryCommandInput, ...additionalArguments: any) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..b47d0a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export { Paginator }; +/** + * @public + */ +export declare const paginateScan: (config: DynamoDBDocumentPaginationConfiguration, input: ScanCommandInput, ...additionalArguments: any) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts new file mode 100644 index 0000000..fbcd8b7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts @@ -0,0 +1,221 @@ +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetCommandInput, + BatchGetCommandOutput, +} from "./commands/BatchGetCommand"; +import { + BatchWriteCommandInput, + BatchWriteCommandOutput, +} from "./commands/BatchWriteCommand"; +import { + DeleteCommandInput, + DeleteCommandOutput, +} from "./commands/DeleteCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TransactGetCommandInput, + TransactGetCommandOutput, +} from "./commands/TransactGetCommand"; +import { + TransactWriteCommandInput, + TransactWriteCommandOutput, +} from "./commands/TransactWriteCommand"; +import { + UpdateCommandInput, + UpdateCommandOutput, +} from "./commands/UpdateCommand"; +import { + DynamoDBDocumentClient, + TranslateConfig, +} from "./DynamoDBDocumentClient"; +export declare class DynamoDBDocument extends DynamoDBDocumentClient { + static from( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ): DynamoDBDocument; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchGet( + args: BatchGetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchGet( + args: BatchGetCommandInput, + cb: (err: any, data?: BatchGetCommandOutput) => void + ): void; + batchGet( + args: BatchGetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchGetCommandOutput) => void + ): void; + batchWrite( + args: BatchWriteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchWrite( + args: BatchWriteCommandInput, + cb: (err: any, data?: BatchWriteCommandOutput) => void + ): void; + batchWrite( + args: BatchWriteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchWriteCommandOutput) => void + ): void; + delete( + args: DeleteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + delete( + args: DeleteCommandInput, + cb: (err: any, data?: DeleteCommandOutput) => void + ): void; + delete( + args: DeleteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeStatement( + args: ExecuteStatementCommandInput, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeTransaction( + args: ExecuteTransactionCommandInput, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + get( + args: GetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + get( + args: GetCommandInput, + cb: (err: any, data?: GetCommandOutput) => void + ): void; + get( + args: GetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetCommandOutput) => void + ): void; + put( + args: PutCommandInput, + options?: __HttpHandlerOptions + ): Promise; + put( + args: PutCommandInput, + cb: (err: any, data?: PutCommandOutput) => void + ): void; + put( + args: PutCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options?: __HttpHandlerOptions + ): Promise; + query( + args: QueryCommandInput, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options?: __HttpHandlerOptions + ): Promise; + scan( + args: ScanCommandInput, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + transactGet( + args: TransactGetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactGet( + args: TransactGetCommandInput, + cb: (err: any, data?: TransactGetCommandOutput) => void + ): void; + transactGet( + args: TransactGetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactGetCommandOutput) => void + ): void; + transactWrite( + args: TransactWriteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactWrite( + args: TransactWriteCommandInput, + cb: (err: any, data?: TransactWriteCommandOutput) => void + ): void; + transactWrite( + args: TransactWriteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactWriteCommandOutput) => void + ): void; + update( + args: UpdateCommandInput, + options?: __HttpHandlerOptions + ): Promise; + update( + args: UpdateCommandInput, + cb: (err: any, data?: UpdateCommandOutput) => void + ): void; + update( + args: UpdateCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateCommandOutput) => void + ): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts new file mode 100644 index 0000000..67494ec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts @@ -0,0 +1,105 @@ +import { + DynamoDBClient, + DynamoDBClientResolvedConfig, + ServiceInputTypes as __ServiceInputTypes, + ServiceOutputTypes as __ServiceOutputTypes, +} from "@aws-sdk/client-dynamodb"; +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +import { Client as __Client } from "@smithy/smithy-client"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetCommandInput, + BatchGetCommandOutput, +} from "./commands/BatchGetCommand"; +import { + BatchWriteCommandInput, + BatchWriteCommandOutput, +} from "./commands/BatchWriteCommand"; +import { + DeleteCommandInput, + DeleteCommandOutput, +} from "./commands/DeleteCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TransactGetCommandInput, + TransactGetCommandOutput, +} from "./commands/TransactGetCommand"; +import { + TransactWriteCommandInput, + TransactWriteCommandOutput, +} from "./commands/TransactWriteCommand"; +import { + UpdateCommandInput, + UpdateCommandOutput, +} from "./commands/UpdateCommand"; +export { __Client }; +export type ServiceInputTypes = + | __ServiceInputTypes + | BatchExecuteStatementCommandInput + | BatchGetCommandInput + | BatchWriteCommandInput + | DeleteCommandInput + | ExecuteStatementCommandInput + | ExecuteTransactionCommandInput + | GetCommandInput + | PutCommandInput + | QueryCommandInput + | ScanCommandInput + | TransactGetCommandInput + | TransactWriteCommandInput + | UpdateCommandInput; +export type ServiceOutputTypes = + | __ServiceOutputTypes + | BatchExecuteStatementCommandOutput + | BatchGetCommandOutput + | BatchWriteCommandOutput + | DeleteCommandOutput + | ExecuteStatementCommandOutput + | ExecuteTransactionCommandOutput + | GetCommandOutput + | PutCommandOutput + | QueryCommandOutput + | ScanCommandOutput + | TransactGetCommandOutput + | TransactWriteCommandOutput + | UpdateCommandOutput; +export type TranslateConfig = { + marshallOptions?: marshallOptions; + unmarshallOptions?: unmarshallOptions; +}; +export type DynamoDBDocumentClientResolvedConfig = + DynamoDBClientResolvedConfig & { + translateConfig?: TranslateConfig; + }; +export declare class DynamoDBDocumentClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + DynamoDBDocumentClientResolvedConfig +> { + readonly config: DynamoDBDocumentClientResolvedConfig; + protected constructor( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ); + static from( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ): DynamoDBDocumentClient; + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts new file mode 100644 index 0000000..17c787f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts @@ -0,0 +1,30 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MiddlewareStack } from "@smithy/types"; +import { KeyNodeChildren } from "../commands/utils"; +import { DynamoDBDocumentClientResolvedConfig } from "../DynamoDBDocumentClient"; +export declare abstract class DynamoDBDocumentClientCommand< + Input extends object, + Output extends object, + BaseInput extends object, + BaseOutput extends object, + ResolvedClientConfiguration +> extends $Command< + Input | BaseInput, + Output | BaseOutput, + ResolvedClientConfiguration +> { + protected abstract readonly inputKeyNodes: KeyNodeChildren; + protected abstract readonly outputKeyNodes: KeyNodeChildren; + protected abstract clientCommand: $Command< + Input | BaseInput, + Output | BaseOutput, + ResolvedClientConfiguration + >; + abstract middlewareStack: MiddlewareStack< + Input | BaseInput, + Output | BaseOutput + >; + protected addMarshallingMiddleware( + configuration: DynamoDBDocumentClientResolvedConfig + ): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..d8f3dfe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,96 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchExecuteStatementCommandInput = Pick< + __BatchExecuteStatementCommandInput, + Exclude +> & { + Statements: + | (Pick< + BatchStatementRequest, + Exclude + > & { + Parameters?: NativeAttributeValue[] | undefined; + })[] + | undefined; +}; +export type BatchExecuteStatementCommandOutput = Pick< + __BatchExecuteStatementCommandOutput, + Exclude +> & { + Responses?: + | (Pick< + BatchStatementResponse, + Exclude + > & { + Error?: + | (Pick< + BatchStatementError, + Exclude + > & { + Item?: Record | undefined; + }) + | undefined; + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + __BatchExecuteStatementCommandInput, + __BatchExecuteStatementCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Statements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Error: { + Item: import("../commands/utils").KeyNodeChildren; + }; + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __BatchExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack< + BatchExecuteStatementCommandInput | __BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput | __BatchExecuteStatementCommandOutput + >; + constructor(input: BatchExecuteStatementCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput + >; +} +import { + BatchExecuteStatementCommandInput as __BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput as __BatchExecuteStatementCommandOutput, + BatchStatementError, + BatchStatementRequest, + BatchStatementResponse, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts new file mode 100644 index 0000000..6203cf6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts @@ -0,0 +1,92 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchGetCommandInput = Pick< + __BatchGetItemCommandInput, + Exclude +> & { + RequestItems: + | Record< + string, + Pick> & { + Keys: Record[] | undefined; + } + > + | undefined; +}; +export type BatchGetCommandOutput = Pick< + __BatchGetItemCommandOutput, + Exclude +> & { + Responses?: + | Record[]> + | undefined; + UnprocessedKeys?: + | Record< + string, + Pick> & { + Keys: Record[] | undefined; + } + > + | undefined; +}; +export declare class BatchGetCommand extends DynamoDBDocumentClientCommand< + BatchGetCommandInput, + BatchGetCommandOutput, + __BatchGetItemCommandInput, + __BatchGetItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchGetCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + UnprocessedKeys: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchGetItemCommand; + readonly middlewareStack: MiddlewareStack< + BatchGetCommandInput | __BatchGetItemCommandInput, + BatchGetCommandOutput | __BatchGetItemCommandOutput + >; + constructor(input: BatchGetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + BatchGetItemCommandInput as __BatchGetItemCommandInput, + BatchGetItemCommandOutput as __BatchGetItemCommandOutput, + KeysAndAttributes, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts new file mode 100644 index 0000000..a02d177 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts @@ -0,0 +1,142 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchWriteCommandInput = Pick< + __BatchWriteItemCommandInput, + Exclude +> & { + RequestItems: + | Record< + string, + (Pick< + WriteRequest, + Exclude + > & { + PutRequest?: + | (Pick> & { + Item: Record | undefined; + }) + | undefined; + DeleteRequest?: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + > + | undefined; +}; +export type BatchWriteCommandOutput = Pick< + __BatchWriteItemCommandOutput, + Exclude< + keyof __BatchWriteItemCommandOutput, + "UnprocessedItems" | "ItemCollectionMetrics" + > +> & { + UnprocessedItems?: + | Record< + string, + (Pick< + WriteRequest, + Exclude + > & { + PutRequest?: + | (Pick> & { + Item: Record | undefined; + }) + | undefined; + DeleteRequest?: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + > + | undefined; + ItemCollectionMetrics?: + | Record< + string, + (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + })[] + > + | undefined; +}; +export declare class BatchWriteCommand extends DynamoDBDocumentClientCommand< + BatchWriteCommandInput, + BatchWriteCommandOutput, + __BatchWriteItemCommandInput, + __BatchWriteItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchWriteCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchWriteItemCommand; + readonly middlewareStack: MiddlewareStack< + BatchWriteCommandInput | __BatchWriteItemCommandInput, + BatchWriteCommandOutput | __BatchWriteItemCommandOutput + >; + constructor(input: BatchWriteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + BatchWriteItemCommandInput as __BatchWriteItemCommandInput, + BatchWriteItemCommandOutput as __BatchWriteItemCommandOutput, + DeleteRequest, + ItemCollectionMetrics, + PutRequest, + WriteRequest, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts new file mode 100644 index 0000000..9906c10 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts @@ -0,0 +1,96 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type DeleteCommandInput = Pick< + __DeleteItemCommandInput, + Exclude< + keyof __DeleteItemCommandInput, + "Key" | "Expected" | "ExpressionAttributeValues" + > +> & { + Key: Record | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type DeleteCommandOutput = Pick< + __DeleteItemCommandOutput, + Exclude< + keyof __DeleteItemCommandOutput, + "Attributes" | "ItemCollectionMetrics" + > +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class DeleteCommand extends DynamoDBDocumentClientCommand< + DeleteCommandInput, + DeleteCommandOutput, + __DeleteItemCommandInput, + __DeleteItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: DeleteCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __DeleteItemCommand; + readonly middlewareStack: MiddlewareStack< + DeleteCommandInput | __DeleteItemCommandInput, + DeleteCommandOutput | __DeleteItemCommandOutput + >; + constructor(input: DeleteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + DeleteItemCommandInput as __DeleteItemCommandInput, + DeleteItemCommandOutput as __DeleteItemCommandOutput, + ExpectedAttributeValue, + ItemCollectionMetrics, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..938727c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,61 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ExecuteStatementCommandInput = Pick< + __ExecuteStatementCommandInput, + Exclude +> & { + Parameters?: NativeAttributeValue[] | undefined; +}; +export type ExecuteStatementCommandOutput = Pick< + __ExecuteStatementCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class ExecuteStatementCommand extends DynamoDBDocumentClientCommand< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + __ExecuteStatementCommandInput, + __ExecuteStatementCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack< + ExecuteStatementCommandInput | __ExecuteStatementCommandInput, + ExecuteStatementCommandOutput | __ExecuteStatementCommandOutput + >; + constructor(input: ExecuteStatementCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExecuteStatementCommandInput as __ExecuteStatementCommandInput, + ExecuteStatementCommandOutput as __ExecuteStatementCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..10205be --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,78 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ExecuteTransactionCommandInput = Pick< + __ExecuteTransactionCommandInput, + Exclude +> & { + TransactStatements: + | (Pick< + ParameterizedStatement, + Exclude + > & { + Parameters?: NativeAttributeValue[] | undefined; + })[] + | undefined; +}; +export type ExecuteTransactionCommandOutput = Pick< + __ExecuteTransactionCommandOutput, + Exclude +> & { + Responses?: + | (Pick> & { + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + __ExecuteTransactionCommandInput, + __ExecuteTransactionCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ExecuteTransactionCommandInput; + protected readonly inputKeyNodes: { + TransactStatements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __ExecuteTransactionCommand; + readonly middlewareStack: MiddlewareStack< + ExecuteTransactionCommandInput | __ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput | __ExecuteTransactionCommandOutput + >; + constructor(input: ExecuteTransactionCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExecuteTransactionCommandInput as __ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput as __ExecuteTransactionCommandOutput, + ItemResponse, + ParameterizedStatement, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts new file mode 100644 index 0000000..dba5fd4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts @@ -0,0 +1,57 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type GetCommandInput = Pick< + __GetItemCommandInput, + Exclude +> & { + Key: Record | undefined; +}; +export type GetCommandOutput = Pick< + __GetItemCommandOutput, + Exclude +> & { + Item?: Record | undefined; +}; +export declare class GetCommand extends DynamoDBDocumentClientCommand< + GetCommandInput, + GetCommandOutput, + __GetItemCommandInput, + __GetItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: GetCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __GetItemCommand; + readonly middlewareStack: MiddlewareStack< + GetCommandInput | __GetItemCommandInput, + GetCommandOutput | __GetItemCommandOutput + >; + constructor(input: GetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + GetItemCommandInput as __GetItemCommandInput, + GetItemCommandOutput as __GetItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts new file mode 100644 index 0000000..af2ca2c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts @@ -0,0 +1,93 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type PutCommandInput = Pick< + __PutItemCommandInput, + Exclude< + keyof __PutItemCommandInput, + "Item" | "Expected" | "ExpressionAttributeValues" + > +> & { + Item: Record | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type PutCommandOutput = Pick< + __PutItemCommandOutput, + Exclude +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class PutCommand extends DynamoDBDocumentClientCommand< + PutCommandInput, + PutCommandOutput, + __PutItemCommandInput, + __PutItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: PutCommandInput; + protected readonly inputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __PutItemCommand; + readonly middlewareStack: MiddlewareStack< + PutCommandInput | __PutItemCommandInput, + PutCommandOutput | __PutItemCommandOutput + >; + constructor(input: PutCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExpectedAttributeValue, + ItemCollectionMetrics, + PutItemCommandInput as __PutItemCommandInput, + PutItemCommandOutput as __PutItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts new file mode 100644 index 0000000..80c57e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts @@ -0,0 +1,96 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type QueryCommandInput = Pick< + __QueryCommandInput, + Exclude< + keyof __QueryCommandInput, + | "KeyConditions" + | "QueryFilter" + | "ExclusiveStartKey" + | "ExpressionAttributeValues" + > +> & { + KeyConditions?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + QueryFilter?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type QueryCommandOutput = Pick< + __QueryCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class QueryCommand extends DynamoDBDocumentClientCommand< + QueryCommandInput, + QueryCommandOutput, + __QueryCommandInput, + __QueryCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: QueryCommandInput; + protected readonly inputKeyNodes: { + KeyConditions: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + QueryFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __QueryCommand; + readonly middlewareStack: MiddlewareStack< + QueryCommandInput | __QueryCommandInput, + QueryCommandOutput | __QueryCommandOutput + >; + constructor(input: QueryCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Condition, + QueryCommandInput as __QueryCommandInput, + QueryCommandOutput as __QueryCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts new file mode 100644 index 0000000..c2dc93b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts @@ -0,0 +1,80 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ScanCommandInput = Pick< + __ScanCommandInput, + Exclude< + keyof __ScanCommandInput, + "ScanFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues" + > +> & { + ScanFilter?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type ScanCommandOutput = Pick< + __ScanCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class ScanCommand extends DynamoDBDocumentClientCommand< + ScanCommandInput, + ScanCommandOutput, + __ScanCommandInput, + __ScanCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ScanCommandInput; + protected readonly inputKeyNodes: { + ScanFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ScanCommand; + readonly middlewareStack: MiddlewareStack< + ScanCommandInput | __ScanCommandInput, + ScanCommandOutput | __ScanCommandOutput + >; + constructor(input: ScanCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Condition, + ScanCommandInput as __ScanCommandInput, + ScanCommandOutput as __ScanCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts new file mode 100644 index 0000000..6568c81 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts @@ -0,0 +1,82 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type TransactGetCommandInput = Pick< + __TransactGetItemsCommandInput, + Exclude +> & { + TransactItems: + | (Pick> & { + Get: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + | undefined; +}; +export type TransactGetCommandOutput = Pick< + __TransactGetItemsCommandOutput, + Exclude +> & { + Responses?: + | (Pick> & { + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class TransactGetCommand extends DynamoDBDocumentClientCommand< + TransactGetCommandInput, + TransactGetCommandOutput, + __TransactGetItemsCommandInput, + __TransactGetItemsCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: TransactGetCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + Get: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __TransactGetItemsCommand; + readonly middlewareStack: MiddlewareStack< + TransactGetCommandInput | __TransactGetItemsCommandInput, + TransactGetCommandOutput | __TransactGetItemsCommandOutput + >; + constructor(input: TransactGetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Get, + ItemResponse, + TransactGetItem, + TransactGetItemsCommandInput as __TransactGetItemsCommandInput, + TransactGetItemsCommandOutput as __TransactGetItemsCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts new file mode 100644 index 0000000..91ba34e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts @@ -0,0 +1,151 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type TransactWriteCommandInput = Pick< + __TransactWriteItemsCommandInput, + Exclude +> & { + TransactItems: + | (Pick< + TransactWriteItem, + Exclude< + keyof TransactWriteItem, + "ConditionCheck" | "Put" | "Delete" | "Update" + > + > & { + ConditionCheck?: + | (Pick< + ConditionCheck, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Put?: + | (Pick< + Put, + Exclude + > & { + Item: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Delete?: + | (Pick< + Delete, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Update?: + | (Pick< + Update, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + })[] + | undefined; +}; +export type TransactWriteCommandOutput = Pick< + __TransactWriteItemsCommandOutput, + Exclude +> & { + ItemCollectionMetrics?: + | Record< + string, + (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + })[] + > + | undefined; +}; +export declare class TransactWriteCommand extends DynamoDBDocumentClientCommand< + TransactWriteCommandInput, + TransactWriteCommandOutput, + __TransactWriteItemsCommandInput, + __TransactWriteItemsCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: TransactWriteCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + ConditionCheck: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Put: { + Item: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Delete: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Update: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __TransactWriteItemsCommand; + readonly middlewareStack: MiddlewareStack< + TransactWriteCommandInput | __TransactWriteItemsCommandInput, + TransactWriteCommandOutput | __TransactWriteItemsCommandOutput + >; + constructor(input: TransactWriteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ConditionCheck, + Delete, + ItemCollectionMetrics, + Put, + TransactWriteItem, + TransactWriteItemsCommandInput as __TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput as __TransactWriteItemsCommandOutput, + Update, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts new file mode 100644 index 0000000..7c97b97 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts @@ -0,0 +1,113 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type UpdateCommandInput = Pick< + __UpdateItemCommandInput, + Exclude< + keyof __UpdateItemCommandInput, + "Key" | "AttributeUpdates" | "Expected" | "ExpressionAttributeValues" + > +> & { + Key: Record | undefined; + AttributeUpdates?: + | Record< + string, + Pick< + AttributeValueUpdate, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + } + > + | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type UpdateCommandOutput = Pick< + __UpdateItemCommandOutput, + Exclude< + keyof __UpdateItemCommandOutput, + "Attributes" | "ItemCollectionMetrics" + > +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class UpdateCommand extends DynamoDBDocumentClientCommand< + UpdateCommandInput, + UpdateCommandOutput, + __UpdateItemCommandInput, + __UpdateItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: UpdateCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + AttributeUpdates: { + "*": { + Value: null; + }; + }; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __UpdateItemCommand; + readonly middlewareStack: MiddlewareStack< + UpdateCommandInput | __UpdateItemCommandInput, + UpdateCommandOutput | __UpdateItemCommandOutput + >; + constructor(input: UpdateCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + AttributeValueUpdate, + ExpectedAttributeValue, + ItemCollectionMetrics, + UpdateItemCommandInput as __UpdateItemCommandInput, + UpdateItemCommandOutput as __UpdateItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts new file mode 100644 index 0000000..c0473c3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts @@ -0,0 +1,17 @@ +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export type KeyNodeSelf = null; +export declare const SELF: KeyNodeSelf; +export type KeyNodeChildren = Record; +export declare const ALL_VALUES: KeyNodeChildren; +export declare const ALL_MEMBERS: KeyNodeChildren; +export type KeyNodes = KeyNodeSelf | KeyNodeChildren; +export declare const marshallInput: ( + obj: any, + keyNodes: KeyNodeChildren, + options?: marshallOptions +) => any; +export declare const unmarshallOutput: ( + obj: any, + keyNodes: KeyNodeChildren, + options?: unmarshallOptions +) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab7a55d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,11 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; +export { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export { + NativeAttributeValue, + NativeAttributeBinary, + NativeScalarAttributeValue, +} from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..5bd45d2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBDocument } from "../DynamoDBDocument"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export { PaginationConfiguration }; +export interface DynamoDBDocumentPaginationConfiguration + extends PaginationConfiguration { + client: DynamoDBDocument | DynamoDBDocumentClient; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..93d4aff --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts @@ -0,0 +1,12 @@ +import { Paginator } from "@smithy/types"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +export { Paginator }; +export declare const paginateQuery: ( + config: DynamoDBDocumentPaginationConfiguration, + input: QueryCommandInput, + ...additionalArguments: any +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..0a2c6d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts @@ -0,0 +1,9 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +export { Paginator }; +export declare const paginateScan: ( + config: DynamoDBDocumentPaginationConfiguration, + input: ScanCommandInput, + ...additionalArguments: any +) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/package.json new file mode 100644 index 0000000..0b72a76 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/lib-dynamodb/package.json @@ -0,0 +1,66 @@ +{ + "name": "@aws-sdk/lib-dynamodb", + "version": "3.803.0", + "description": "The document client simplifies working with items in Amazon DynamoDB by abstracting away the notion of attribute values.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline lib-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts --mode development", + "test:watch": "yarn g:vitest watch", + "test:e2e:watch": "yarn g:vitest watch -c vitest.config.e2e.ts" + }, + "engines": { + "node": ">=18.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + }, + "devDependencies": { + "@aws-sdk/client-dynamodb": "3.803.0", + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/lib/lib-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "lib/lib-dynamodb" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md new file mode 100644 index 0000000..4a50903 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-endpoint-discovery + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-endpoint-discovery/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-endpoint-discovery) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-endpoint-discovery.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-endpoint-discovery) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js new file mode 100644 index 0000000..f534fd5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js @@ -0,0 +1,229 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: () => NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, + endpointDiscoveryMiddlewareOptions: () => endpointDiscoveryMiddlewareOptions, + getEndpointDiscoveryOptionalPlugin: () => getEndpointDiscoveryOptionalPlugin, + getEndpointDiscoveryPlugin: () => getEndpointDiscoveryPlugin, + getEndpointDiscoveryRequiredPlugin: () => getEndpointDiscoveryRequiredPlugin, + resolveEndpointDiscoveryConfig: () => resolveEndpointDiscoveryConfig +}); +module.exports = __toCommonJS(index_exports); + +// src/configurations.ts +var ENV_ENDPOINT_DISCOVERY = ["AWS_ENABLE_ENDPOINT_DISCOVERY", "AWS_ENDPOINT_DISCOVERY_ENABLED"]; +var CONFIG_ENDPOINT_DISCOVERY = "endpoint_discovery_enabled"; +var isFalsy = /* @__PURE__ */ __name((value) => ["false", "0"].indexOf(value) >= 0, "isFalsy"); +var NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + for (let i = 0; i < ENV_ENDPOINT_DISCOVERY.length; i++) { + const envKey = ENV_ENDPOINT_DISCOVERY[i]; + if (envKey in env) { + const value = env[envKey]; + if (value === "") { + throw Error(`Environment variable ${envKey} can't be empty of undefined, got "${value}"`); + } + return !isFalsy(value); + } + } + }, "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => { + if (CONFIG_ENDPOINT_DISCOVERY in profile) { + const value = profile[CONFIG_ENDPOINT_DISCOVERY]; + if (value === void 0) { + throw Error(`Shared config entry ${CONFIG_ENDPOINT_DISCOVERY} can't be undefined, got "${value}"`); + } + return !isFalsy(value); + } + }, "configFileSelector"), + default: void 0 +}; + +// src/endpointDiscoveryMiddleware.ts +var import_protocol_http = require("@smithy/protocol-http"); + +// src/getCacheKey.ts +var getCacheKey = /* @__PURE__ */ __name(async (commandName, config, options) => { + const { accessKeyId } = await config.credentials(); + const { identifiers } = options; + return JSON.stringify({ + ...accessKeyId && { accessKeyId }, + ...identifiers && { + commandName, + identifiers: Object.entries(identifiers).sort().reduce((acc, [key, value]) => ({ ...acc, [key]: value }), {}) + } + }); +}, "getCacheKey"); + +// src/updateDiscoveredEndpointInCache.ts +var requestQueue = {}; +var updateDiscoveredEndpointInCache = /* @__PURE__ */ __name(async (config, options) => new Promise((resolve, reject) => { + const { endpointCache } = config; + const { cacheKey, commandName, identifiers } = options; + const endpoints = endpointCache.get(cacheKey); + if (endpoints && endpoints.length === 1 && endpoints[0].Address === "") { + if (options.isDiscoveredEndpointRequired) { + if (!requestQueue[cacheKey]) requestQueue[cacheKey] = []; + requestQueue[cacheKey].push({ resolve, reject }); + } else { + resolve(); + } + } else if (endpoints && endpoints.length > 0) { + resolve(); + } else { + const placeholderEndpoints = [{ Address: "", CachePeriodInMinutes: 1 }]; + endpointCache.set(cacheKey, placeholderEndpoints); + const command = new options.endpointDiscoveryCommandCtor({ + Operation: commandName.slice(0, -7), + // strip "Command" + Identifiers: identifiers + }); + const handler = command.resolveMiddleware(options.clientStack, config, options.options); + handler(command).then((result) => { + endpointCache.set(cacheKey, result.output.Endpoints); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ resolve: resolve2 }) => { + resolve2(); + }); + delete requestQueue[cacheKey]; + } + resolve(); + }).catch((error) => { + endpointCache.delete(cacheKey); + const errorToThrow = Object.assign( + new Error( + `The operation to discover endpoint failed. Please retry, or provide a custom endpoint and disable endpoint discovery to proceed.` + ), + { reason: error } + ); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ reject: reject2 }) => { + reject2(errorToThrow); + }); + delete requestQueue[cacheKey]; + } + if (options.isDiscoveredEndpointRequired) { + reject(errorToThrow); + } else { + endpointCache.set(cacheKey, placeholderEndpoints); + resolve(); + } + }); + } +}), "updateDiscoveredEndpointInCache"); + +// src/endpointDiscoveryMiddleware.ts +var endpointDiscoveryMiddleware = /* @__PURE__ */ __name((config, middlewareConfig) => (next, context) => async (args) => { + if (config.isCustomEndpoint) { + if (config.isClientEndpointDiscoveryEnabled) { + throw new Error(`Custom endpoint is supplied; endpointDiscoveryEnabled must not be true.`); + } + return next(args); + } + const { endpointDiscoveryCommandCtor } = config; + const { isDiscoveredEndpointRequired, identifiers } = middlewareConfig; + const clientName = context.clientName; + const commandName = context.commandName; + const isEndpointDiscoveryEnabled = await config.endpointDiscoveryEnabled(); + const cacheKey = await getCacheKey(commandName, config, { identifiers }); + if (isDiscoveredEndpointRequired) { + if (isEndpointDiscoveryEnabled === false) { + throw new Error( + `Endpoint Discovery is disabled but ${commandName} on ${clientName} requires it. Please check your configurations.` + ); + } + await updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor + }); + } else if (isEndpointDiscoveryEnabled) { + updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor + }); + } + const { request } = args; + if (cacheKey && import_protocol_http.HttpRequest.isInstance(request)) { + const endpoint = config.endpointCache.getEndpoint(cacheKey); + if (endpoint) { + request.hostname = endpoint; + } + } + return next(args); +}, "endpointDiscoveryMiddleware"); + +// src/getEndpointDiscoveryPlugin.ts +var endpointDiscoveryMiddlewareOptions = { + name: "endpointDiscoveryMiddleware", + step: "build", + tags: ["ENDPOINT_DISCOVERY"], + override: true +}; +var getEndpointDiscoveryPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, middlewareConfig), endpointDiscoveryMiddlewareOptions); + }, "applyToStack") +}), "getEndpointDiscoveryPlugin"); +var getEndpointDiscoveryRequiredPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add( + endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: true }), + endpointDiscoveryMiddlewareOptions + ); + }, "applyToStack") +}), "getEndpointDiscoveryRequiredPlugin"); +var getEndpointDiscoveryOptionalPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add( + endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: false }), + endpointDiscoveryMiddlewareOptions + ); + }, "applyToStack") +}), "getEndpointDiscoveryOptionalPlugin"); + +// src/resolveEndpointDiscoveryConfig.ts +var import_endpoint_cache = require("@aws-sdk/endpoint-cache"); +var resolveEndpointDiscoveryConfig = /* @__PURE__ */ __name((input, { endpointDiscoveryCommandCtor }) => { + const { endpointCacheSize, endpointDiscoveryEnabled, endpointDiscoveryEnabledProvider } = input; + return Object.assign(input, { + endpointDiscoveryCommandCtor, + endpointCache: new import_endpoint_cache.EndpointCache(endpointCacheSize ?? 1e3), + endpointDiscoveryEnabled: endpointDiscoveryEnabled !== void 0 ? () => Promise.resolve(endpointDiscoveryEnabled) : endpointDiscoveryEnabledProvider, + isClientEndpointDiscoveryEnabled: endpointDiscoveryEnabled !== void 0 + }); +}, "resolveEndpointDiscoveryConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, + endpointDiscoveryMiddlewareOptions, + getEndpointDiscoveryPlugin, + getEndpointDiscoveryRequiredPlugin, + getEndpointDiscoveryOptionalPlugin, + resolveEndpointDiscoveryConfig +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js new file mode 100644 index 0000000..8a5fdd6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js @@ -0,0 +1,27 @@ +const ENV_ENDPOINT_DISCOVERY = ["AWS_ENABLE_ENDPOINT_DISCOVERY", "AWS_ENDPOINT_DISCOVERY_ENABLED"]; +const CONFIG_ENDPOINT_DISCOVERY = "endpoint_discovery_enabled"; +const isFalsy = (value) => ["false", "0"].indexOf(value) >= 0; +export const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + for (let i = 0; i < ENV_ENDPOINT_DISCOVERY.length; i++) { + const envKey = ENV_ENDPOINT_DISCOVERY[i]; + if (envKey in env) { + const value = env[envKey]; + if (value === "") { + throw Error(`Environment variable ${envKey} can't be empty of undefined, got "${value}"`); + } + return !isFalsy(value); + } + } + }, + configFileSelector: (profile) => { + if (CONFIG_ENDPOINT_DISCOVERY in profile) { + const value = profile[CONFIG_ENDPOINT_DISCOVERY]; + if (value === undefined) { + throw Error(`Shared config entry ${CONFIG_ENDPOINT_DISCOVERY} can't be undefined, got "${value}"`); + } + return !isFalsy(value); + } + }, + default: undefined, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js new file mode 100644 index 0000000..80672eb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js @@ -0,0 +1,45 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getCacheKey } from "./getCacheKey"; +import { updateDiscoveredEndpointInCache } from "./updateDiscoveredEndpointInCache"; +export const endpointDiscoveryMiddleware = (config, middlewareConfig) => (next, context) => async (args) => { + if (config.isCustomEndpoint) { + if (config.isClientEndpointDiscoveryEnabled) { + throw new Error(`Custom endpoint is supplied; endpointDiscoveryEnabled must not be true.`); + } + return next(args); + } + const { endpointDiscoveryCommandCtor } = config; + const { isDiscoveredEndpointRequired, identifiers } = middlewareConfig; + const clientName = context.clientName; + const commandName = context.commandName; + const isEndpointDiscoveryEnabled = await config.endpointDiscoveryEnabled(); + const cacheKey = await getCacheKey(commandName, config, { identifiers }); + if (isDiscoveredEndpointRequired) { + if (isEndpointDiscoveryEnabled === false) { + throw new Error(`Endpoint Discovery is disabled but ${commandName} on ${clientName} requires it.` + + ` Please check your configurations.`); + } + await updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor, + }); + } + else if (isEndpointDiscoveryEnabled) { + updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor, + }); + } + const { request } = args; + if (cacheKey && HttpRequest.isInstance(request)) { + const endpoint = config.endpointCache.getEndpoint(cacheKey); + if (endpoint) { + request.hostname = endpoint; + } + } + return next(args); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js new file mode 100644 index 0000000..ca72e41 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js @@ -0,0 +1,13 @@ +export const getCacheKey = async (commandName, config, options) => { + const { accessKeyId } = await config.credentials(); + const { identifiers } = options; + return JSON.stringify({ + ...(accessKeyId && { accessKeyId }), + ...(identifiers && { + commandName, + identifiers: Object.entries(identifiers) + .sort() + .reduce((acc, [key, value]) => ({ ...acc, [key]: value }), {}), + }), + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js new file mode 100644 index 0000000..656e7fe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js @@ -0,0 +1,22 @@ +import { endpointDiscoveryMiddleware } from "./endpointDiscoveryMiddleware"; +export const endpointDiscoveryMiddlewareOptions = { + name: "endpointDiscoveryMiddleware", + step: "build", + tags: ["ENDPOINT_DISCOVERY"], + override: true, +}; +export const getEndpointDiscoveryPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, middlewareConfig), endpointDiscoveryMiddlewareOptions); + }, +}); +export const getEndpointDiscoveryRequiredPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: true }), endpointDiscoveryMiddlewareOptions); + }, +}); +export const getEndpointDiscoveryOptionalPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: false }), endpointDiscoveryMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js new file mode 100644 index 0000000..cc1cc9c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js @@ -0,0 +1,12 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +export const resolveEndpointDiscoveryConfig = (input, { endpointDiscoveryCommandCtor }) => { + const { endpointCacheSize, endpointDiscoveryEnabled, endpointDiscoveryEnabledProvider } = input; + return Object.assign(input, { + endpointDiscoveryCommandCtor, + endpointCache: new EndpointCache(endpointCacheSize ?? 1000), + endpointDiscoveryEnabled: endpointDiscoveryEnabled !== undefined + ? () => Promise.resolve(endpointDiscoveryEnabled) + : endpointDiscoveryEnabledProvider, + isClientEndpointDiscoveryEnabled: endpointDiscoveryEnabled !== undefined, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js new file mode 100644 index 0000000..c0a9831 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js @@ -0,0 +1,57 @@ +const requestQueue = {}; +export const updateDiscoveredEndpointInCache = async (config, options) => new Promise((resolve, reject) => { + const { endpointCache } = config; + const { cacheKey, commandName, identifiers } = options; + const endpoints = endpointCache.get(cacheKey); + if (endpoints && endpoints.length === 1 && endpoints[0].Address === "") { + if (options.isDiscoveredEndpointRequired) { + if (!requestQueue[cacheKey]) + requestQueue[cacheKey] = []; + requestQueue[cacheKey].push({ resolve, reject }); + } + else { + resolve(); + } + } + else if (endpoints && endpoints.length > 0) { + resolve(); + } + else { + const placeholderEndpoints = [{ Address: "", CachePeriodInMinutes: 1 }]; + endpointCache.set(cacheKey, placeholderEndpoints); + const command = new options.endpointDiscoveryCommandCtor({ + Operation: commandName.slice(0, -7), + Identifiers: identifiers, + }); + const handler = command.resolveMiddleware(options.clientStack, config, options.options); + handler(command) + .then((result) => { + endpointCache.set(cacheKey, result.output.Endpoints); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ resolve }) => { + resolve(); + }); + delete requestQueue[cacheKey]; + } + resolve(); + }) + .catch((error) => { + endpointCache.delete(cacheKey); + const errorToThrow = Object.assign(new Error(`The operation to discover endpoint failed.` + + ` Please retry, or provide a custom endpoint and disable endpoint discovery to proceed.`), { reason: error }); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ reject }) => { + reject(errorToThrow); + }); + delete requestQueue[cacheKey]; + } + if (options.isDiscoveredEndpointRequired) { + reject(errorToThrow); + } + else { + endpointCache.set(cacheKey, placeholderEndpoints); + resolve(); + } + }); + } +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts new file mode 100644 index 0000000..428209a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts @@ -0,0 +1,5 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts new file mode 100644 index 0000000..0116bfc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts @@ -0,0 +1,4 @@ +import { BuildHandler, HandlerExecutionContext, MetadataBearer } from "@smithy/types"; +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddleware: (config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: EndpointDiscoveryMiddlewareConfig) => (next: BuildHandler, context: HandlerExecutionContext) => BuildHandler; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts new file mode 100644 index 0000000..153a5b9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts @@ -0,0 +1,9 @@ +import { AwsCredentialIdentity, Provider } from "@smithy/types"; +/** + * Generate key to index the endpoints in the cache + */ +export declare const getCacheKey: (commandName: string, config: { + credentials: Provider; +}, options: { + identifiers?: Record; +}) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts new file mode 100644 index 0000000..06565e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts @@ -0,0 +1,29 @@ +import { BuildHandlerOptions, HttpHandlerOptions, MiddlewareStack, Pluggable } from "@smithy/types"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +/** + * @internal + */ +export declare const endpointDiscoveryMiddlewareOptions: BuildHandlerOptions; +/** + * @public + */ +export interface EndpointDiscoveryMiddlewareConfig { + isDiscoveredEndpointRequired: boolean; + clientStack: MiddlewareStack; + options?: HttpHandlerOptions; + identifiers?: Record; +} +/** + * @internal + */ +export declare const getEndpointDiscoveryPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: EndpointDiscoveryMiddlewareConfig) => Pluggable; +/** + * @internal + * @deprecated Use getEndpointDiscoveryPlugin + */ +export declare const getEndpointDiscoveryRequiredPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: Omit) => Pluggable; +/** + * @internal + * @deprecated Use getEndpointDiscoveryPlugin + */ +export declare const getEndpointDiscoveryOptionalPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: Omit) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts new file mode 100644 index 0000000..dd132a4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts @@ -0,0 +1,60 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +import { AwsCredentialIdentity, MemoizedProvider, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + isCustomEndpoint?: boolean; + credentials: MemoizedProvider; + endpointDiscoveryEnabledProvider: Provider; +} +/** + * @public + */ +export interface EndpointDiscoveryInputConfig { + /** + * The size of the client cache storing endpoints from endpoint discovery operations. + * Defaults to 1000. + */ + endpointCacheSize?: number; + /** + * Whether to call operations with endpoints given by service dynamically. + * Setting this config to `true` will enable endpoint discovery for all applicable operations. + * Setting it to `false` will explicitly disable endpoint discovery even though operations that + * require endpoint discovery will presumably fail. Leaving it to undefined means SDK only do + * endpoint discovery when it's required. Defaults to `undefined`. + */ + endpointDiscoveryEnabled?: boolean | undefined; +} +export interface EndpointDiscoveryResolvedConfig { + /** + * LRU Cache which stores endpoints from endpoint discovery operations. + * The size is either provided by {@link EndpointDiscoveryInputConfig.endpointCacheSize}. + */ + endpointCache: EndpointCache; + /** + * The constructor of the Command used for discovering endpoints. + * @internal + */ + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; + /** + * Resolved value for input config {@link EndpointDiscoveryInputConfig.endpointDiscoveryEnabled}. + */ + endpointDiscoveryEnabled: Provider; + /** + * Stores whether endpoint discovery configuration is set locally by passing + * {@link EndpointDiscoveryInputConfig.endpointDiscoveryEnabled} during client creation. + * @internal + */ + isClientEndpointDiscoveryEnabled: boolean; +} +export interface EndpointDiscoveryConfigOptions { + /** + * The constructor of the Command used for discovering endpoints. + */ + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +/** + * @internal + */ +export declare const resolveEndpointDiscoveryConfig: (input: T & PreviouslyResolved & EndpointDiscoveryInputConfig, { endpointDiscoveryCommandCtor }: EndpointDiscoveryConfigOptions) => T & EndpointDiscoveryResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..366f145 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: LoadedConfigSelectors< + boolean | undefined +>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts new file mode 100644 index 0000000..ceff474 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts @@ -0,0 +1,17 @@ +import { + BuildHandler, + HandlerExecutionContext, + MetadataBearer, +} from "@smithy/types"; +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddleware: ( + config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: EndpointDiscoveryMiddlewareConfig +) => ( + next: BuildHandler, + context: HandlerExecutionContext +) => BuildHandler; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts new file mode 100644 index 0000000..d9be17e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts @@ -0,0 +1,10 @@ +import { AwsCredentialIdentity, Provider } from "@smithy/types"; +export declare const getCacheKey: ( + commandName: string, + config: { + credentials: Provider; + }, + options: { + identifiers?: Record; + } +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts new file mode 100644 index 0000000..8c60174 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts @@ -0,0 +1,41 @@ +import { + BuildHandlerOptions, + HttpHandlerOptions, + MiddlewareStack, + Pluggable, +} from "@smithy/types"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddlewareOptions: BuildHandlerOptions; +export interface EndpointDiscoveryMiddlewareConfig { + isDiscoveredEndpointRequired: boolean; + clientStack: MiddlewareStack; + options?: HttpHandlerOptions; + identifiers?: Record; +} +export declare const getEndpointDiscoveryPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: EndpointDiscoveryMiddlewareConfig +) => Pluggable; +export declare const getEndpointDiscoveryRequiredPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: Pick< + EndpointDiscoveryMiddlewareConfig, + Exclude< + keyof EndpointDiscoveryMiddlewareConfig, + "isDiscoveredEndpointRequired" + > + > +) => Pluggable; +export declare const getEndpointDiscoveryOptionalPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: Pick< + EndpointDiscoveryMiddlewareConfig, + Exclude< + keyof EndpointDiscoveryMiddlewareConfig, + "isDiscoveredEndpointRequired" + > + > +) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts new file mode 100644 index 0000000..eaa95f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts @@ -0,0 +1,28 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +import { + AwsCredentialIdentity, + MemoizedProvider, + Provider, +} from "@smithy/types"; +export interface PreviouslyResolved { + isCustomEndpoint?: boolean; + credentials: MemoizedProvider; + endpointDiscoveryEnabledProvider: Provider; +} +export interface EndpointDiscoveryInputConfig { + endpointCacheSize?: number; + endpointDiscoveryEnabled?: boolean | undefined; +} +export interface EndpointDiscoveryResolvedConfig { + endpointCache: EndpointCache; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; + endpointDiscoveryEnabled: Provider; + isClientEndpointDiscoveryEnabled: boolean; +} +export interface EndpointDiscoveryConfigOptions { + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const resolveEndpointDiscoveryConfig: ( + input: T & PreviouslyResolved & EndpointDiscoveryInputConfig, + { endpointDiscoveryCommandCtor }: EndpointDiscoveryConfigOptions +) => T & EndpointDiscoveryResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts new file mode 100644 index 0000000..0887cb5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts @@ -0,0 +1,15 @@ +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export interface UpdateDiscoveredEndpointInCacheOptions + extends EndpointDiscoveryMiddlewareConfig { + cacheKey: string; + commandName: string; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const updateDiscoveredEndpointInCache: ( + config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + options: UpdateDiscoveredEndpointInCacheOptions +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts new file mode 100644 index 0000000..993753d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts @@ -0,0 +1,8 @@ +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +export interface UpdateDiscoveredEndpointInCacheOptions extends EndpointDiscoveryMiddlewareConfig { + cacheKey: string; + commandName: string; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const updateDiscoveredEndpointInCache: (config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, options: UpdateDiscoveredEndpointInCacheOptions) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json new file mode 100644 index 0000000..44fec2e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json @@ -0,0 +1,60 @@ +{ + "name": "@aws-sdk/middleware-endpoint-discovery", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-endpoint-discovery", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-endpoint-discovery", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-endpoint-discovery" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/README.md new file mode 100644 index 0000000..123940e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-host-header + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-host-header/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-host-header) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-host-header.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-host-header) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js new file mode 100644 index 0000000..bdfe2a5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js @@ -0,0 +1,69 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getHostHeaderPlugin: () => getHostHeaderPlugin, + hostHeaderMiddleware: () => hostHeaderMiddleware, + hostHeaderMiddlewareOptions: () => hostHeaderMiddlewareOptions, + resolveHostHeaderConfig: () => resolveHostHeaderConfig +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +function resolveHostHeaderConfig(input) { + return input; +} +__name(resolveHostHeaderConfig, "resolveHostHeaderConfig"); +var hostHeaderMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}, "hostHeaderMiddleware"); +var hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true +}; +var getHostHeaderPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + }, "applyToStack") +}), "getHostHeaderPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveHostHeaderConfig, + hostHeaderMiddleware, + hostHeaderMiddlewareOptions, + getHostHeaderPlugin +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js new file mode 100644 index 0000000..2e2fb62 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js @@ -0,0 +1,33 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export function resolveHostHeaderConfig(input) { + return input; +} +export const hostHeaderMiddleware = (options) => (next) => async (args) => { + if (!HttpRequest.isInstance(args.request)) + return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } + else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) + host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}; +export const hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true, +}; +export const getHostHeaderPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts new file mode 100644 index 0000000..752bb00 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts @@ -0,0 +1,35 @@ +import { AbsoluteLocation, BuildHandlerOptions, BuildMiddleware, Pluggable, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface HostHeaderInputConfig { +} +interface PreviouslyResolved { + requestHandler: RequestHandler; +} +/** + * @internal + */ +export interface HostHeaderResolvedConfig { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler: RequestHandler; +} +/** + * @internal + */ +export declare function resolveHostHeaderConfig(input: T & PreviouslyResolved & HostHeaderInputConfig): T & HostHeaderResolvedConfig; +/** + * @internal + */ +export declare const hostHeaderMiddleware: (options: HostHeaderResolvedConfig) => BuildMiddleware; +/** + * @internal + */ +export declare const hostHeaderMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getHostHeaderPlugin: (options: HostHeaderResolvedConfig) => Pluggable; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..3ca5561 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts @@ -0,0 +1,29 @@ +import { + AbsoluteLocation, + BuildHandlerOptions, + BuildMiddleware, + Pluggable, + RequestHandler, +} from "@smithy/types"; +export interface HostHeaderInputConfig {} +interface PreviouslyResolved { + requestHandler: RequestHandler; +} +export interface HostHeaderResolvedConfig { + requestHandler: RequestHandler; +} +export declare function resolveHostHeaderConfig( + input: T & PreviouslyResolved & HostHeaderInputConfig +): T & HostHeaderResolvedConfig; +export declare const hostHeaderMiddleware: < + Input extends object, + Output extends object +>( + options: HostHeaderResolvedConfig +) => BuildMiddleware; +export declare const hostHeaderMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getHostHeaderPlugin: ( + options: HostHeaderResolvedConfig +) => Pluggable; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/package.json new file mode 100644 index 0000000..523f8a1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-host-header/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/middleware-host-header", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-host-header", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-host-header", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-host-header" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/README.md new file mode 100644 index 0000000..861fa43 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-logger + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-logger/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-logger) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-logger.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-logger) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js new file mode 100644 index 0000000..b1db308 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js @@ -0,0 +1,79 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getLoggerPlugin: () => getLoggerPlugin, + loggerMiddleware: () => loggerMiddleware, + loggerMiddlewareOptions: () => loggerMiddlewareOptions +}); +module.exports = __toCommonJS(index_exports); + +// src/loggerMiddleware.ts +var loggerMiddleware = /* @__PURE__ */ __name(() => (next, context) => async (args) => { + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + logger?.info?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata + }); + return response; + } catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + logger?.error?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata + }); + throw error; + } +}, "loggerMiddleware"); +var loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true +}; +var getLoggerPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + }, "applyToStack") +}), "getLoggerPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + loggerMiddleware, + loggerMiddlewareOptions, + getLoggerPlugin +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-es/index.js new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-es/index.js @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js new file mode 100644 index 0000000..50da4cc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js @@ -0,0 +1,42 @@ +export const loggerMiddleware = () => (next, context) => async (args) => { + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + logger?.info?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata, + }); + return response; + } + catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + logger?.error?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata, + }); + throw error; + } +}; +export const loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true, +}; +export const getLoggerPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts new file mode 100644 index 0000000..5712017 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts @@ -0,0 +1,4 @@ +import { AbsoluteLocation, HandlerExecutionContext, InitializeHandler, InitializeHandlerOptions, MetadataBearer, Pluggable } from "@smithy/types"; +export declare const loggerMiddleware: () => (next: InitializeHandler, context: HandlerExecutionContext) => InitializeHandler; +export declare const loggerMiddlewareOptions: InitializeHandlerOptions & AbsoluteLocation; +export declare const getLoggerPlugin: (options: any) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts new file mode 100644 index 0000000..10ded9e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts @@ -0,0 +1,17 @@ +import { + AbsoluteLocation, + HandlerExecutionContext, + InitializeHandler, + InitializeHandlerOptions, + MetadataBearer, + Pluggable, +} from "@smithy/types"; +export declare const loggerMiddleware: () => < + Output extends MetadataBearer = MetadataBearer +>( + next: InitializeHandler, + context: HandlerExecutionContext +) => InitializeHandler; +export declare const loggerMiddlewareOptions: InitializeHandlerOptions & + AbsoluteLocation; +export declare const getLoggerPlugin: (options: any) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/package.json new file mode 100644 index 0000000..7187da9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-logger/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/middleware-logger", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-logger", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-logger", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-logger" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/README.md new file mode 100644 index 0000000..2d5437e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/middleware-recursion-detection + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-recursion-detection/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-recursion-detection) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-recursion-detection.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-recursion-detection) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js new file mode 100644 index 0000000..a387687 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js @@ -0,0 +1,72 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + addRecursionDetectionMiddlewareOptions: () => addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin: () => getRecursionDetectionPlugin, + recursionDetectionMiddleware: () => recursionDetectionMiddleware +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +var TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +var ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +var ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +var recursionDetectionMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request) || options.runtime !== "node") { + return next(args); + } + const traceIdHeader = Object.keys(request.headers ?? {}).find((h) => h.toLowerCase() === TRACE_ID_HEADER_NAME.toLowerCase()) ?? TRACE_ID_HEADER_NAME; + if (request.headers.hasOwnProperty(traceIdHeader)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0, "nonEmptyString"); + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request + }); +}, "recursionDetectionMiddleware"); +var addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low" +}; +var getRecursionDetectionPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + }, "applyToStack") +}), "getRecursionDetectionPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + recursionDetectionMiddleware, + addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js new file mode 100644 index 0000000..8ac4748 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js @@ -0,0 +1,37 @@ +import { HttpRequest } from "@smithy/protocol-http"; +const TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +const ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +const ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +export const recursionDetectionMiddleware = (options) => (next) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request) || options.runtime !== "node") { + return next(args); + } + const traceIdHeader = Object.keys(request.headers ?? {}).find((h) => h.toLowerCase() === TRACE_ID_HEADER_NAME.toLowerCase()) ?? + TRACE_ID_HEADER_NAME; + if (request.headers.hasOwnProperty(traceIdHeader)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = (str) => typeof str === "string" && str.length > 0; + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request, + }); +}; +export const addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low", +}; +export const getRecursionDetectionPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts new file mode 100644 index 0000000..9f92984 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; +} +/** + * Inject to trace ID to request header to detect recursion invocation in Lambda. + * @internal + */ +export declare const recursionDetectionMiddleware: (options: PreviouslyResolved) => BuildMiddleware; +/** + * @internal + */ +export declare const addRecursionDetectionMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRecursionDetectionPlugin: (options: PreviouslyResolved) => Pluggable; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..8d1658b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts @@ -0,0 +1,18 @@ +import { + AbsoluteLocation, + BuildHandlerOptions, + BuildMiddleware, + Pluggable, +} from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; +} +export declare const recursionDetectionMiddleware: ( + options: PreviouslyResolved +) => BuildMiddleware; +export declare const addRecursionDetectionMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getRecursionDetectionPlugin: ( + options: PreviouslyResolved +) => Pluggable; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/package.json new file mode 100644 index 0000000..7c831f9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-recursion-detection/package.json @@ -0,0 +1,57 @@ +{ + "name": "@aws-sdk/middleware-recursion-detection", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-recursion-detection", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-recursion-detection", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-recursion-detection" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/README.md new file mode 100644 index 0000000..a0bf1a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-user-agent + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-user-agent/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-user-agent) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-user-agent.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-user-agent) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js new file mode 100644 index 0000000..aaf267c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js @@ -0,0 +1,227 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + DEFAULT_UA_APP_ID: () => DEFAULT_UA_APP_ID, + getUserAgentMiddlewareOptions: () => getUserAgentMiddlewareOptions, + getUserAgentPlugin: () => getUserAgentPlugin, + resolveUserAgentConfig: () => resolveUserAgentConfig, + userAgentMiddleware: () => userAgentMiddleware +}); +module.exports = __toCommonJS(index_exports); + +// src/configurations.ts +var import_core = require("@smithy/core"); +var DEFAULT_UA_APP_ID = void 0; +function isValidUserAgentAppId(appId) { + if (appId === void 0) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +__name(isValidUserAgentAppId, "isValidUserAgentAppId"); +function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = (0, import_core.normalizeProvider)(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + const { customUserAgent } = input; + return Object.assign(input, { + customUserAgent: typeof customUserAgent === "string" ? [[customUserAgent]] : customUserAgent, + userAgentAppId: /* @__PURE__ */ __name(async () => { + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = input.logger?.constructor?.name === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger?.warn("userAgentAppId must be a string or undefined."); + } else if (appId.length > 50) { + logger?.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + }, "userAgentAppId") + }); +} +__name(resolveUserAgentConfig, "resolveUserAgentConfig"); + +// src/user-agent-middleware.ts +var import_util_endpoints = require("@aws-sdk/util-endpoints"); +var import_protocol_http = require("@smithy/protocol-http"); + +// src/check-features.ts +var import_core2 = require("@aws-sdk/core"); +var ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +async function checkFeatures(context, config, args) { + const request = args.request; + if (request?.headers?.["smithy-protocol"] === "rpc-v2-cbor") { + (0, import_core2.setFeature)(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if (retryStrategy.constructor?.name?.includes("Adaptive")) { + (0, import_core2.setFeature)(context, "RETRY_MODE_ADAPTIVE", "F"); + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_STANDARD", "E"); + } + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String(endpointV2?.url?.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + (0, import_core2.setFeature)(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await config.accountIdEndpointMode?.()) { + case "disabled": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = context.__smithy_context?.selectedHttpAuthScheme?.identity; + if (identity?.$source) { + const credentials = identity; + if (credentials.accountId) { + (0, import_core2.setFeature)(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + (0, import_core2.setFeature)(context, key, value); + } + } +} +__name(checkFeatures, "checkFeatures"); + +// src/constants.ts +var USER_AGENT = "user-agent"; +var X_AMZ_USER_AGENT = "x-amz-user-agent"; +var SPACE = " "; +var UA_NAME_SEPARATOR = "/"; +var UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +var UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +var UA_ESCAPE_CHAR = "-"; + +// src/encode-features.ts +var BYTE_LIMIT = 1024; +function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} +__name(encodeFeatures, "encodeFeatures"); + +// src/user-agent-middleware.ts +var userAgentMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = context?.userAgent?.map(escapeUserAgent) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push( + `m/${encodeFeatures( + Object.assign({}, context.__smithy_context?.features, awsContext.__aws_sdk_context?.features) + )}` + ); + const customUserAgent = options?.customUserAgent?.map(escapeUserAgent) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = (0, import_util_endpoints.getUserAgentPrefix)(); + const sdkUserAgentValue = (prefix ? [prefix] : []).concat([...defaultUserAgent, ...userAgent, ...customUserAgent]).join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] ? `${headers[USER_AGENT]} ${normalUAValue}` : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request + }); +}, "userAgentMiddleware"); +var escapeUserAgent = /* @__PURE__ */ __name((userAgentPair) => { + const name = userAgentPair[0].split(UA_NAME_SEPARATOR).map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)).join(UA_NAME_SEPARATOR); + const version = userAgentPair[1]?.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version].filter((item) => item && item.length > 0).reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}, "escapeUserAgent"); +var getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true +}; +var getUserAgentPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + }, "applyToStack") +}), "getUserAgentPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DEFAULT_UA_APP_ID, + resolveUserAgentConfig, + userAgentMiddleware, + getUserAgentMiddlewareOptions, + getUserAgentPlugin +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js new file mode 100644 index 0000000..1f115a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js @@ -0,0 +1,49 @@ +import { setFeature } from "@aws-sdk/core"; +const ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +export async function checkFeatures(context, config, args) { + const request = args.request; + if (request?.headers?.["smithy-protocol"] === "rpc-v2-cbor") { + setFeature(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if (retryStrategy.constructor?.name?.includes("Adaptive")) { + setFeature(context, "RETRY_MODE_ADAPTIVE", "F"); + } + else { + setFeature(context, "RETRY_MODE_STANDARD", "E"); + } + } + else { + setFeature(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String(endpointV2?.url?.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + setFeature(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await config.accountIdEndpointMode?.()) { + case "disabled": + setFeature(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + setFeature(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + setFeature(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = context.__smithy_context?.selectedHttpAuthScheme?.identity; + if (identity?.$source) { + const credentials = identity; + if (credentials.accountId) { + setFeature(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + setFeature(context, key, value); + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js new file mode 100644 index 0000000..7fff087 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js @@ -0,0 +1,28 @@ +import { normalizeProvider } from "@smithy/core"; +export const DEFAULT_UA_APP_ID = undefined; +function isValidUserAgentAppId(appId) { + if (appId === undefined) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +export function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = normalizeProvider(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + const { customUserAgent } = input; + return Object.assign(input, { + customUserAgent: typeof customUserAgent === "string" ? [[customUserAgent]] : customUserAgent, + userAgentAppId: async () => { + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = input.logger?.constructor?.name === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger?.warn("userAgentAppId must be a string or undefined."); + } + else if (appId.length > 50) { + logger?.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + }, + }); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js new file mode 100644 index 0000000..33e3391 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js @@ -0,0 +1,7 @@ +export const USER_AGENT = "user-agent"; +export const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export const SPACE = " "; +export const UA_NAME_SEPARATOR = "/"; +export const UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +export const UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +export const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js new file mode 100644 index 0000000..23002b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js @@ -0,0 +1,18 @@ +const BYTE_LIMIT = 1024; +export function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } + else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js new file mode 100644 index 0000000..188bda0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js @@ -0,0 +1,82 @@ +import { getUserAgentPrefix } from "@aws-sdk/util-endpoints"; +import { HttpRequest } from "@smithy/protocol-http"; +import { checkFeatures } from "./check-features"; +import { SPACE, UA_ESCAPE_CHAR, UA_NAME_ESCAPE_REGEX, UA_NAME_SEPARATOR, UA_VALUE_ESCAPE_REGEX, USER_AGENT, X_AMZ_USER_AGENT, } from "./constants"; +import { encodeFeatures } from "./encode-features"; +export const userAgentMiddleware = (options) => (next, context) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = context?.userAgent?.map(escapeUserAgent) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push(`m/${encodeFeatures(Object.assign({}, context.__smithy_context?.features, awsContext.__aws_sdk_context?.features))}`); + const customUserAgent = options?.customUserAgent?.map(escapeUserAgent) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = getUserAgentPrefix(); + const sdkUserAgentValue = (prefix ? [prefix] : []) + .concat([...defaultUserAgent, ...userAgent, ...customUserAgent]) + .join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent, + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] + ? `${headers[USER_AGENT]} ${normalUAValue}` + : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } + else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request, + }); +}; +const escapeUserAgent = (userAgentPair) => { + const name = userAgentPair[0] + .split(UA_NAME_SEPARATOR) + .map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)) + .join(UA_NAME_SEPARATOR); + const version = userAgentPair[1]?.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version] + .filter((item) => item && item.length > 0) + .reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}; +export const getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true, +}; +export const getUserAgentPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts new file mode 100644 index 0000000..a75d08b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts @@ -0,0 +1,18 @@ +import type { AccountIdEndpointMode } from "@aws-sdk/core/account-id-endpoint"; +import type { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import type { AwsCredentialIdentityProvider, BuildHandlerArguments, Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +type PreviouslyResolved = Partial<{ + credentials?: AwsCredentialIdentityProvider; + accountIdEndpointMode?: Provider; + retryStrategy?: Provider; +}>; +/** + * @internal + * Check for features that don't have a middleware activation site but + * may be detected on the context, client config, or request. + */ +export declare function checkFeatures(context: AwsHandlerExecutionContext, config: PreviouslyResolved, args: BuildHandlerArguments): Promise; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts new file mode 100644 index 0000000..f8183f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts @@ -0,0 +1,44 @@ +import { Logger, Provider, UserAgent } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_UA_APP_ID: undefined; +/** + * @public + */ +export interface UserAgentInputConfig { + /** + * The custom user agent header that would be appended to default one + */ + customUserAgent?: string | UserAgent; + /** + * The application ID used to identify the application. + */ + userAgentAppId?: string | undefined | Provider; +} +interface PreviouslyResolved { + defaultUserAgentProvider: Provider; + runtime: string; + logger?: Logger; +} +export interface UserAgentResolvedConfig { + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header. + * @internal + */ + defaultUserAgentProvider: Provider; + /** + * The custom user agent header that would be appended to default one + */ + customUserAgent?: UserAgent; + /** + * The runtime environment + */ + runtime: string; + /** + * Resolved value for input config {config.userAgentAppId} + */ + userAgentAppId: Provider; +} +export declare function resolveUserAgentConfig(input: T & PreviouslyResolved & UserAgentInputConfig): T & UserAgentResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts new file mode 100644 index 0000000..8c0dfc9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts @@ -0,0 +1,7 @@ +export declare const USER_AGENT = "user-agent"; +export declare const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export declare const SPACE = " "; +export declare const UA_NAME_SEPARATOR = "/"; +export declare const UA_NAME_ESCAPE_REGEX: RegExp; +export declare const UA_VALUE_ESCAPE_REGEX: RegExp; +export declare const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts new file mode 100644 index 0000000..d6079ae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts @@ -0,0 +1,5 @@ +import type { AwsSdkFeatures } from "@aws-sdk/types"; +/** + * @internal + */ +export declare function encodeFeatures(features: AwsSdkFeatures): string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts new file mode 100644 index 0000000..d8fc201 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts @@ -0,0 +1,20 @@ +import { AccountIdEndpointMode } from "@aws-sdk/core/account-id-endpoint"; +import { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { + AwsCredentialIdentityProvider, + BuildHandlerArguments, + Provider, + RetryStrategy, + RetryStrategyV2, +} from "@smithy/types"; +type PreviouslyResolved = Partial<{ + credentials?: AwsCredentialIdentityProvider; + accountIdEndpointMode?: Provider; + retryStrategy?: Provider; +}>; +export declare function checkFeatures( + context: AwsHandlerExecutionContext, + config: PreviouslyResolved, + args: BuildHandlerArguments +): Promise; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..a4a1b10 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,21 @@ +import { Logger, Provider, UserAgent } from "@smithy/types"; +export declare const DEFAULT_UA_APP_ID: undefined; +export interface UserAgentInputConfig { + customUserAgent?: string | UserAgent; + userAgentAppId?: string | undefined | Provider; +} +interface PreviouslyResolved { + defaultUserAgentProvider: Provider; + runtime: string; + logger?: Logger; +} +export interface UserAgentResolvedConfig { + defaultUserAgentProvider: Provider; + customUserAgent?: UserAgent; + runtime: string; + userAgentAppId: Provider; +} +export declare function resolveUserAgentConfig( + input: T & PreviouslyResolved & UserAgentInputConfig +): T & UserAgentResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..8c0dfc9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,7 @@ +export declare const USER_AGENT = "user-agent"; +export declare const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export declare const SPACE = " "; +export declare const UA_NAME_SEPARATOR = "/"; +export declare const UA_NAME_ESCAPE_REGEX: RegExp; +export declare const UA_VALUE_ESCAPE_REGEX: RegExp; +export declare const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts new file mode 100644 index 0000000..a7be5b7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts @@ -0,0 +1,2 @@ +import { AwsSdkFeatures } from "@aws-sdk/types"; +export declare function encodeFeatures(features: AwsSdkFeatures): string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts new file mode 100644 index 0000000..a4da01e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts @@ -0,0 +1,21 @@ +import { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { + AbsoluteLocation, + BuildHandler, + BuildHandlerOptions, + HandlerExecutionContext, + MetadataBearer, + Pluggable, +} from "@smithy/types"; +import { UserAgentResolvedConfig } from "./configurations"; +export declare const userAgentMiddleware: ( + options: UserAgentResolvedConfig +) => ( + next: BuildHandler, + context: HandlerExecutionContext | AwsHandlerExecutionContext +) => BuildHandler; +export declare const getUserAgentMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getUserAgentPlugin: ( + config: UserAgentResolvedConfig +) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts new file mode 100644 index 0000000..d36dee5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts @@ -0,0 +1,18 @@ +import type { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { AbsoluteLocation, BuildHandler, BuildHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { UserAgentResolvedConfig } from "./configurations"; +/** + * Build user agent header sections from: + * 1. runtime-specific default user agent provider; + * 2. custom user agent from `customUserAgent` client config; + * 3. handler execution context set by internal SDK components; + * The built user agent will be set to `x-amz-user-agent` header for ALL the + * runtimes. + * Please note that any override to the `user-agent` or `x-amz-user-agent` header + * in the HTTP request is discouraged. Please use `customUserAgent` client + * config or middleware setting the `userAgent` context to generate desired user + * agent. + */ +export declare const userAgentMiddleware: (options: UserAgentResolvedConfig) => (next: BuildHandler, context: HandlerExecutionContext | AwsHandlerExecutionContext) => BuildHandler; +export declare const getUserAgentMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +export declare const getUserAgentPlugin: (config: UserAgentResolvedConfig) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/package.json new file mode 100644 index 0000000..34cff38 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/middleware-user-agent/package.json @@ -0,0 +1,61 @@ +{ + "name": "@aws-sdk/middleware-user-agent", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-user-agent", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-user-agent", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-user-agent" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/README.md new file mode 100644 index 0000000..1182bbd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/README.md @@ -0,0 +1,13 @@ +# @aws-sdk/nested-clients + +## Description + +This is an internal package. Do not install this as a direct dependency. + +This package contains separate internal implementations of the STS and SSO-OIDC AWS SDK clients +to be used by the AWS SDK credential providers to break a cyclic dependency. + +### Bundlers + +This package may be marked as external if you do not use STS nor SSO-OIDC +in your credential resolution process. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..7a9f28a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOOIDCHttpAuthSchemeProvider = exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = defaultSSOOIDCHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOOIDCHttpAuthSchemeProvider = defaultSSOOIDCHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js new file mode 100644 index 0000000..7258a35 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js new file mode 100644 index 0000000..72e0adc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js new file mode 100644 index 0000000..55f595a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js @@ -0,0 +1,872 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/sso-oidc/index.ts +var index_exports = {}; +__export(index_exports, { + $Command: () => import_smithy_client6.Command, + AccessDeniedException: () => AccessDeniedException, + AuthorizationPendingException: () => AuthorizationPendingException, + CreateTokenCommand: () => CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog: () => CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog: () => CreateTokenResponseFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + InternalServerException: () => InternalServerException, + InvalidClientException: () => InvalidClientException, + InvalidGrantException: () => InvalidGrantException, + InvalidRequestException: () => InvalidRequestException, + InvalidScopeException: () => InvalidScopeException, + SSOOIDC: () => SSOOIDC, + SSOOIDCClient: () => SSOOIDCClient, + SSOOIDCServiceException: () => SSOOIDCServiceException, + SlowDownException: () => SlowDownException, + UnauthorizedClientException: () => UnauthorizedClientException, + UnsupportedGrantTypeException: () => UnsupportedGrantTypeException, + __Client: () => import_smithy_client2.Client +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_retry = require("@smithy/middleware-retry"); +var import_smithy_client2 = require("@smithy/smithy-client"); +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/submodules/sso-oidc/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var import_runtimeConfig = require("./runtimeConfig"); + +// src/submodules/sso-oidc/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/submodules/sso-oidc/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var SSOOIDCClient = class extends import_smithy_client2.Client { + static { + __name(this, "SSOOIDCClient"); + } + /** + * The resolved configuration of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/submodules/sso-oidc/SSOOIDC.ts +var import_smithy_client7 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/commands/CreateTokenCommand.ts +var import_middleware_endpoint2 = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); +var import_smithy_client6 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/models/models_0.ts +var import_smithy_client4 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/models/SSOOIDCServiceException.ts +var import_smithy_client3 = require("@smithy/smithy-client"); +var SSOOIDCServiceException = class _SSOOIDCServiceException extends import_smithy_client3.ServiceException { + static { + __name(this, "SSOOIDCServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOOIDCServiceException.prototype); + } +}; + +// src/submodules/sso-oidc/models/models_0.ts +var AccessDeniedException = class _AccessDeniedException extends SSOOIDCServiceException { + static { + __name(this, "AccessDeniedException"); + } + name = "AccessDeniedException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be access_denied.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var AuthorizationPendingException = class _AuthorizationPendingException extends SSOOIDCServiceException { + static { + __name(this, "AuthorizationPendingException"); + } + name = "AuthorizationPendingException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * authorization_pending.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var CreateTokenRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.clientSecret && { clientSecret: import_smithy_client4.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.codeVerifier && { codeVerifier: import_smithy_client4.SENSITIVE_STRING } +}), "CreateTokenRequestFilterSensitiveLog"); +var CreateTokenResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.idToken && { idToken: import_smithy_client4.SENSITIVE_STRING } +}), "CreateTokenResponseFilterSensitiveLog"); +var ExpiredTokenException = class _ExpiredTokenException extends SSOOIDCServiceException { + static { + __name(this, "ExpiredTokenException"); + } + name = "ExpiredTokenException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be expired_token.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InternalServerException = class _InternalServerException extends SSOOIDCServiceException { + static { + __name(this, "InternalServerException"); + } + name = "InternalServerException"; + $fault = "server"; + /** + *

Single error code. For this exception the value will be server_error.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts + }); + Object.setPrototypeOf(this, _InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidClientException = class _InvalidClientException extends SSOOIDCServiceException { + static { + __name(this, "InvalidClientException"); + } + name = "InvalidClientException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * invalid_client.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidGrantException = class _InvalidGrantException extends SSOOIDCServiceException { + static { + __name(this, "InvalidGrantException"); + } + name = "InvalidGrantException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be invalid_grant.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidRequestException = class _InvalidRequestException extends SSOOIDCServiceException { + static { + __name(this, "InvalidRequestException"); + } + name = "InvalidRequestException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * invalid_request.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidScopeException = class _InvalidScopeException extends SSOOIDCServiceException { + static { + __name(this, "InvalidScopeException"); + } + name = "InvalidScopeException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be invalid_scope.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var SlowDownException = class _SlowDownException extends SSOOIDCServiceException { + static { + __name(this, "SlowDownException"); + } + name = "SlowDownException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be slow_down.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var UnauthorizedClientException = class _UnauthorizedClientException extends SSOOIDCServiceException { + static { + __name(this, "UnauthorizedClientException"); + } + name = "UnauthorizedClientException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * unauthorized_client.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var UnsupportedGrantTypeException = class _UnsupportedGrantTypeException extends SSOOIDCServiceException { + static { + __name(this, "UnsupportedGrantTypeException"); + } + name = "UnsupportedGrantTypeException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * unsupported_grant_type.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; + +// src/submodules/sso-oidc/protocols/Aws_restJson1.ts +var import_core2 = require("@aws-sdk/core"); +var import_core3 = require("@smithy/core"); +var import_smithy_client5 = require("@smithy/smithy-client"); +var se_CreateTokenCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core3.requestBuilder)(input, context); + const headers = { + "content-type": "application/json" + }; + b.bp("/token"); + let body; + body = JSON.stringify( + (0, import_smithy_client5.take)(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: /* @__PURE__ */ __name((_) => (0, import_smithy_client5._json)(_), "scope") + }) + ); + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_CreateTokenCommand"); +var de_CreateTokenCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client5.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client5.expectNonNull)((0, import_smithy_client5.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client5.take)(data, { + accessToken: import_smithy_client5.expectString, + expiresIn: import_smithy_client5.expectInt32, + idToken: import_smithy_client5.expectString, + refreshToken: import_smithy_client5.expectString, + tokenType: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_CreateTokenCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client5.withBaseException)(SSOOIDCServiceException); +var de_AccessDeniedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_AccessDeniedExceptionRes"); +var de_AuthorizationPendingExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_AuthorizationPendingExceptionRes"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_ExpiredTokenExceptionRes"); +var de_InternalServerExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InternalServerExceptionRes"); +var de_InvalidClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidClientExceptionRes"); +var de_InvalidGrantExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidGrantExceptionRes"); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_InvalidScopeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidScopeExceptionRes"); +var de_SlowDownExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_SlowDownExceptionRes"); +var de_UnauthorizedClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedClientExceptionRes"); +var de_UnsupportedGrantTypeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnsupportedGrantTypeExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/submodules/sso-oidc/commands/CreateTokenCommand.ts +var CreateTokenCommand = class extends import_smithy_client6.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint2.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSSOOIDCService", "CreateToken", {}).n("SSOOIDCClient", "CreateTokenCommand").f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog).ser(se_CreateTokenCommand).de(de_CreateTokenCommand).build() { + static { + __name(this, "CreateTokenCommand"); + } +}; + +// src/submodules/sso-oidc/SSOOIDC.ts +var commands = { + CreateTokenCommand +}; +var SSOOIDC = class extends SSOOIDCClient { + static { + __name(this, "SSOOIDC"); + } +}; +(0, import_smithy_client7.createAggregatedClient)(commands, SSOOIDC); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + $Command, + AccessDeniedException, + AuthorizationPendingException, + CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog, + ExpiredTokenException, + InternalServerException, + InvalidClientException, + InvalidGrantException, + InvalidRequestException, + InvalidScopeException, + SSOOIDC, + SSOOIDCClient, + SSOOIDCServiceException, + SlowDownException, + UnauthorizedClientException, + UnsupportedGrantTypeException, + __Client +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js new file mode 100644 index 0000000..6654024 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js new file mode 100644 index 0000000..9cc237f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js new file mode 100644 index 0000000..a305a1b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js new file mode 100644 index 0000000..13c3c74 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js @@ -0,0 +1,52 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.STSClient = exports.__Client = void 0; +const middleware_host_header_1 = require("@aws-sdk/middleware-host-header"); +const middleware_logger_1 = require("@aws-sdk/middleware-logger"); +const middleware_recursion_detection_1 = require("@aws-sdk/middleware-recursion-detection"); +const middleware_user_agent_1 = require("@aws-sdk/middleware-user-agent"); +const config_resolver_1 = require("@smithy/config-resolver"); +const core_1 = require("@smithy/core"); +const middleware_content_length_1 = require("@smithy/middleware-content-length"); +const middleware_endpoint_1 = require("@smithy/middleware-endpoint"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const smithy_client_1 = require("@smithy/smithy-client"); +Object.defineProperty(exports, "__Client", { enumerable: true, get: function () { return smithy_client_1.Client; } }); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const EndpointParameters_1 = require("./endpoint/EndpointParameters"); +const runtimeConfig_1 = require("./runtimeConfig"); +const runtimeExtensions_1 = require("./runtimeExtensions"); +class STSClient extends smithy_client_1.Client { + config; + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, middleware_retry_1.resolveRetryConfig)(_config_2); + const _config_4 = (0, config_resolver_1.resolveRegionConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_5); + const _config_7 = (0, httpAuthSchemeProvider_1.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = (0, runtimeExtensions_1.resolveRuntimeExtensions)(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, core_1.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new core_1.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use((0, core_1.getHttpSigningPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.STSClient = STSClient; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..239095e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthRuntimeConfig = exports.getHttpAuthExtensionConfiguration = void 0; +const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +exports.getHttpAuthExtensionConfiguration = getHttpAuthExtensionConfiguration; +const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; +exports.resolveHttpAuthRuntimeConfig = resolveHttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..842241a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.resolveStsAuthConfig = exports.defaultSTSHttpAuthSchemeProvider = exports.defaultSTSHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const STSClient_1 = require("../STSClient"); +const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSTSHttpAuthSchemeParametersProvider = defaultSTSHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSTSHttpAuthSchemeProvider = defaultSTSHttpAuthSchemeProvider; +const resolveStsAuthConfig = (input) => Object.assign(input, { + stsClientCtor: STSClient_1.STSClient, +}); +exports.resolveStsAuthConfig = resolveStsAuthConfig; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, exports.resolveStsAuthConfig)(config); + const config_1 = (0, core_1.resolveAwsSdkSigV4Config)(config_0); + return Object.assign(config_1, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js new file mode 100644 index 0000000..3aec6a5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.commonParams = exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }); +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; +exports.commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js new file mode 100644 index 0000000..6bfb6e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js new file mode 100644 index 0000000..7428259 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js new file mode 100644 index 0000000..bb0c42a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js @@ -0,0 +1,951 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/sts/index.ts +var index_exports = {}; +__export(index_exports, { + AssumeRoleCommand: () => AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog: () => AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand: () => AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog: () => AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog: () => AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + ClientInputEndpointParameters: () => import_EndpointParameters3.ClientInputEndpointParameters, + CredentialsFilterSensitiveLog: () => CredentialsFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + IDPCommunicationErrorException: () => IDPCommunicationErrorException, + IDPRejectedClaimException: () => IDPRejectedClaimException, + InvalidIdentityTokenException: () => InvalidIdentityTokenException, + MalformedPolicyDocumentException: () => MalformedPolicyDocumentException, + PackedPolicyTooLargeException: () => PackedPolicyTooLargeException, + RegionDisabledException: () => RegionDisabledException, + STS: () => STS, + STSServiceException: () => STSServiceException, + decorateDefaultCredentialProvider: () => decorateDefaultCredentialProvider, + getDefaultRoleAssumer: () => getDefaultRoleAssumer2, + getDefaultRoleAssumerWithWebIdentity: () => getDefaultRoleAssumerWithWebIdentity2 +}); +module.exports = __toCommonJS(index_exports); +__reExport(index_exports, require("./STSClient"), module.exports); + +// src/submodules/sts/STS.ts +var import_smithy_client6 = require("@smithy/smithy-client"); + +// src/submodules/sts/commands/AssumeRoleCommand.ts +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); +var import_smithy_client4 = require("@smithy/smithy-client"); +var import_EndpointParameters = require("./endpoint/EndpointParameters"); + +// src/submodules/sts/models/models_0.ts +var import_smithy_client2 = require("@smithy/smithy-client"); + +// src/submodules/sts/models/STSServiceException.ts +var import_smithy_client = require("@smithy/smithy-client"); +var STSServiceException = class _STSServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "STSServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _STSServiceException.prototype); + } +}; + +// src/submodules/sts/models/models_0.ts +var CredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretAccessKey && { SecretAccessKey: import_smithy_client2.SENSITIVE_STRING } +}), "CredentialsFilterSensitiveLog"); +var AssumeRoleResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleResponseFilterSensitiveLog"); +var ExpiredTokenException = class _ExpiredTokenException extends STSServiceException { + static { + __name(this, "ExpiredTokenException"); + } + name = "ExpiredTokenException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + } +}; +var MalformedPolicyDocumentException = class _MalformedPolicyDocumentException extends STSServiceException { + static { + __name(this, "MalformedPolicyDocumentException"); + } + name = "MalformedPolicyDocumentException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _MalformedPolicyDocumentException.prototype); + } +}; +var PackedPolicyTooLargeException = class _PackedPolicyTooLargeException extends STSServiceException { + static { + __name(this, "PackedPolicyTooLargeException"); + } + name = "PackedPolicyTooLargeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PackedPolicyTooLargeException.prototype); + } +}; +var RegionDisabledException = class _RegionDisabledException extends STSServiceException { + static { + __name(this, "RegionDisabledException"); + } + name = "RegionDisabledException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _RegionDisabledException.prototype); + } +}; +var IDPRejectedClaimException = class _IDPRejectedClaimException extends STSServiceException { + static { + __name(this, "IDPRejectedClaimException"); + } + name = "IDPRejectedClaimException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IDPRejectedClaimException.prototype); + } +}; +var InvalidIdentityTokenException = class _InvalidIdentityTokenException extends STSServiceException { + static { + __name(this, "InvalidIdentityTokenException"); + } + name = "InvalidIdentityTokenException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidIdentityTokenException.prototype); + } +}; +var AssumeRoleWithWebIdentityRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.WebIdentityToken && { WebIdentityToken: import_smithy_client2.SENSITIVE_STRING } +}), "AssumeRoleWithWebIdentityRequestFilterSensitiveLog"); +var AssumeRoleWithWebIdentityResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleWithWebIdentityResponseFilterSensitiveLog"); +var IDPCommunicationErrorException = class _IDPCommunicationErrorException extends STSServiceException { + static { + __name(this, "IDPCommunicationErrorException"); + } + name = "IDPCommunicationErrorException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IDPCommunicationErrorException.prototype); + } +}; + +// src/submodules/sts/protocols/Aws_query.ts +var import_core = require("@aws-sdk/core"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client3 = require("@smithy/smithy-client"); +var se_AssumeRoleCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleCommand"); +var se_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleWithWebIdentityCommand"); +var de_AssumeRoleCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleCommand"); +var de_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleWithWebIdentityCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseXmlErrorBody)(output.body, context) + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode + }); + } +}, "de_CommandError"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_ExpiredTokenExceptionRes"); +var de_IDPCommunicationErrorExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_IDPCommunicationErrorExceptionRes"); +var de_IDPRejectedClaimExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_IDPRejectedClaimExceptionRes"); +var de_InvalidIdentityTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_InvalidIdentityTokenExceptionRes"); +var de_MalformedPolicyDocumentExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_MalformedPolicyDocumentExceptionRes"); +var de_PackedPolicyTooLargeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_PackedPolicyTooLargeExceptionRes"); +var de_RegionDisabledExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_RegionDisabledExceptionRes"); +var se_AssumeRoleRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (input[_T]?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (input[_TTK]?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (input[_PC]?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}, "se_AssumeRoleRequest"); +var se_AssumeRoleWithWebIdentityRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}, "se_AssumeRoleWithWebIdentityRequest"); +var se_policyDescriptorListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_policyDescriptorListType"); +var se_PolicyDescriptorType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}, "se_PolicyDescriptorType"); +var se_ProvidedContext = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}, "se_ProvidedContext"); +var se_ProvidedContextsListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_ProvidedContextsListType"); +var se_Tag = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}, "se_Tag"); +var se_tagKeyListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}, "se_tagKeyListType"); +var se_tagListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_tagListType"); +var de_AssumedRoleUser = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = (0, import_smithy_client3.expectString)(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = (0, import_smithy_client3.expectString)(output[_Ar]); + } + return contents; +}, "de_AssumedRoleUser"); +var de_AssumeRoleResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client3.strictParseInt32)(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client3.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleResponse"); +var de_AssumeRoleWithWebIdentityResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = (0, import_smithy_client3.expectString)(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client3.strictParseInt32)(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = (0, import_smithy_client3.expectString)(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = (0, import_smithy_client3.expectString)(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client3.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleWithWebIdentityResponse"); +var de_Credentials = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = (0, import_smithy_client3.expectString)(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = (0, import_smithy_client3.expectString)(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = (0, import_smithy_client3.expectString)(output[_ST]); + } + if (output[_E] != null) { + contents[_E] = (0, import_smithy_client3.expectNonNull)((0, import_smithy_client3.parseRfc3339DateTimeWithOffset)(output[_E])); + } + return contents; +}, "de_Credentials"); +var de_ExpiredTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_ExpiredTokenException"); +var de_IDPCommunicationErrorException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_IDPCommunicationErrorException"); +var de_IDPRejectedClaimException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_IDPRejectedClaimException"); +var de_InvalidIdentityTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_InvalidIdentityTokenException"); +var de_MalformedPolicyDocumentException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_MalformedPolicyDocumentException"); +var de_PackedPolicyTooLargeException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_PackedPolicyTooLargeException"); +var de_RegionDisabledException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_RegionDisabledException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client3.withBaseException)(STSServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +var SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded" +}; +var _ = "2011-06-15"; +var _A = "Action"; +var _AKI = "AccessKeyId"; +var _AR = "AssumeRole"; +var _ARI = "AssumedRoleId"; +var _ARU = "AssumedRoleUser"; +var _ARWWI = "AssumeRoleWithWebIdentity"; +var _Ar = "Arn"; +var _Au = "Audience"; +var _C = "Credentials"; +var _CA = "ContextAssertion"; +var _DS = "DurationSeconds"; +var _E = "Expiration"; +var _EI = "ExternalId"; +var _K = "Key"; +var _P = "Policy"; +var _PA = "PolicyArns"; +var _PAr = "ProviderArn"; +var _PC = "ProvidedContexts"; +var _PI = "ProviderId"; +var _PPS = "PackedPolicySize"; +var _Pr = "Provider"; +var _RA = "RoleArn"; +var _RSN = "RoleSessionName"; +var _SAK = "SecretAccessKey"; +var _SFWIT = "SubjectFromWebIdentityToken"; +var _SI = "SourceIdentity"; +var _SN = "SerialNumber"; +var _ST = "SessionToken"; +var _T = "Tags"; +var _TC = "TokenCode"; +var _TTK = "TransitiveTagKeys"; +var _V = "Version"; +var _Va = "Value"; +var _WIT = "WebIdentityToken"; +var _a = "arn"; +var _m = "message"; +var buildFormUrlencodedString = /* @__PURE__ */ __name((formEntries) => Object.entries(formEntries).map(([key, value]) => (0, import_smithy_client3.extendedEncodeURIComponent)(key) + "=" + (0, import_smithy_client3.extendedEncodeURIComponent)(value)).join("&"), "buildFormUrlencodedString"); +var loadQueryErrorCode = /* @__PURE__ */ __name((output, data) => { + if (data.Error?.Code !== void 0) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadQueryErrorCode"); + +// src/submodules/sts/commands/AssumeRoleCommand.ts +var AssumeRoleCommand = class extends import_smithy_client4.Command.classBuilder().ep(import_EndpointParameters.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}).n("STSClient", "AssumeRoleCommand").f(void 0, AssumeRoleResponseFilterSensitiveLog).ser(se_AssumeRoleCommand).de(de_AssumeRoleCommand).build() { + static { + __name(this, "AssumeRoleCommand"); + } +}; + +// src/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.ts +var import_middleware_endpoint2 = require("@smithy/middleware-endpoint"); +var import_middleware_serde2 = require("@smithy/middleware-serde"); +var import_smithy_client5 = require("@smithy/smithy-client"); +var import_EndpointParameters2 = require("./endpoint/EndpointParameters"); +var AssumeRoleWithWebIdentityCommand = class extends import_smithy_client5.Command.classBuilder().ep(import_EndpointParameters2.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde2.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint2.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}).n("STSClient", "AssumeRoleWithWebIdentityCommand").f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog).ser(se_AssumeRoleWithWebIdentityCommand).de(de_AssumeRoleWithWebIdentityCommand).build() { + static { + __name(this, "AssumeRoleWithWebIdentityCommand"); + } +}; + +// src/submodules/sts/STS.ts +var import_STSClient = require("./STSClient"); +var commands = { + AssumeRoleCommand, + AssumeRoleWithWebIdentityCommand +}; +var STS = class extends import_STSClient.STSClient { + static { + __name(this, "STS"); + } +}; +(0, import_smithy_client6.createAggregatedClient)(commands, STS); + +// src/submodules/sts/index.ts +var import_EndpointParameters3 = require("./endpoint/EndpointParameters"); + +// src/submodules/sts/defaultStsRoleAssumers.ts +var import_client = require("@aws-sdk/core/client"); +var ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +var getAccountIdFromAssumedRoleUser = /* @__PURE__ */ __name((assumedRoleUser) => { + if (typeof assumedRoleUser?.Arn === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return void 0; +}, "getAccountIdFromAssumedRoleUser"); +var resolveRegion = /* @__PURE__ */ __name(async (_region, _parentRegion, credentialProviderLogger) => { + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + credentialProviderLogger?.debug?.( + "@aws-sdk/client-sts::resolveRegion", + "accepting first of:", + `${region} (provider)`, + `${parentRegion} (parent client)`, + `${ASSUME_ROLE_DEFAULT_REGION} (STS default)` + ); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}, "resolveRegion"); +var getDefaultRoleAssumer = /* @__PURE__ */ __name((stsOptions, STSClient3) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { + logger = stsOptions?.parentClientConfig?.logger, + region, + requestHandler = stsOptions?.parentClientConfig?.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + stsOptions?.parentClientConfig?.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient3({ + profile: stsOptions?.parentClientConfig?.profile, + // A hack to make sts client uses the credential in current closure. + credentialDefaultProvider: /* @__PURE__ */ __name(() => async () => closureSourceCreds, "credentialDefaultProvider"), + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}, "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity = /* @__PURE__ */ __name((stsOptions, STSClient3) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { + logger = stsOptions?.parentClientConfig?.logger, + region, + requestHandler = stsOptions?.parentClientConfig?.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + stsOptions?.parentClientConfig?.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient3({ + profile: stsOptions?.parentClientConfig?.profile, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + if (accountId) { + (0, import_client.setCredentialFeature)(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}, "getDefaultRoleAssumerWithWebIdentity"); +var isH2 = /* @__PURE__ */ __name((requestHandler) => { + return requestHandler?.metadata?.handlerProtocol === "h2"; +}, "isH2"); + +// src/submodules/sts/defaultRoleAssumers.ts +var import_STSClient2 = require("./STSClient"); +var getCustomizableStsClientCtor = /* @__PURE__ */ __name((baseCtor, customizations) => { + if (!customizations) return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + static { + __name(this, "CustomizableSTSClient"); + } + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}, "getCustomizableStsClientCtor"); +var getDefaultRoleAssumer2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumerWithWebIdentity"); +var decorateDefaultCredentialProvider = /* @__PURE__ */ __name((provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer2(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity2(input), + ...input +}), "decorateDefaultCredentialProvider"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + ClientInputEndpointParameters, + CredentialsFilterSensitiveLog, + ExpiredTokenException, + IDPCommunicationErrorException, + IDPRejectedClaimException, + InvalidIdentityTokenException, + MalformedPolicyDocumentException, + PackedPolicyTooLargeException, + RegionDisabledException, + STS, + STSServiceException, + decorateDefaultCredentialProvider, + getDefaultRoleAssumer, + getDefaultRoleAssumerWithWebIdentity, + ...require("./STSClient") +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js new file mode 100644 index 0000000..63cedb1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js new file mode 100644 index 0000000..de3b0e7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js @@ -0,0 +1,65 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const core_2 = require("@smithy/core"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await config.credentialDefaultProvider(idProps?.__config || {})()), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js new file mode 100644 index 0000000..1e03d8b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js new file mode 100644 index 0000000..a50ebec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveRuntimeExtensions = void 0; +const region_config_resolver_1 = require("@aws-sdk/region-config-resolver"); +const protocol_http_1 = require("@smithy/protocol-http"); +const smithy_client_1 = require("@smithy/smithy-client"); +const httpAuthExtensionConfiguration_1 = require("./auth/httpAuthExtensionConfiguration"); +const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign((0, region_config_resolver_1.getAwsRegionExtensionConfiguration)(runtimeConfig), (0, smithy_client_1.getDefaultExtensionConfiguration)(runtimeConfig), (0, protocol_http_1.getHttpHandlerExtensionConfiguration)(runtimeConfig), (0, httpAuthExtensionConfiguration_1.getHttpAuthExtensionConfiguration)(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, (0, region_config_resolver_1.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), (0, smithy_client_1.resolveDefaultRuntimeConfig)(extensionConfiguration), (0, protocol_http_1.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), (0, httpAuthExtensionConfiguration_1.resolveHttpAuthRuntimeConfig)(extensionConfiguration)); +}; +exports.resolveRuntimeExtensions = resolveRuntimeExtensions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/index.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/index.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js new file mode 100644 index 0000000..bcb161f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js @@ -0,0 +1,9 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { CreateTokenCommand } from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +const commands = { + CreateTokenCommand, +}; +export class SSOOIDC extends SSOOIDCClient { +} +createAggregatedClient(commands, SSOOIDC); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js new file mode 100644 index 0000000..003cad7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSSOOIDCHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class SSOOIDCClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..a5e9eab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js @@ -0,0 +1,50 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js new file mode 100644 index 0000000..7863247 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_CreateTokenCommand, se_CreateTokenCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class CreateTokenCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSSOOIDCService", "CreateToken", {}) + .n("SSOOIDCClient", "CreateTokenCommand") + .f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog) + .ser(se_CreateTokenCommand) + .de(de_CreateTokenCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js new file mode 100644 index 0000000..2b26c44 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js @@ -0,0 +1,13 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js new file mode 100644 index 0000000..0ac15bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js new file mode 100644 index 0000000..040ea39 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +export const ruleSet = _data; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js new file mode 100644 index 0000000..c2894a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js @@ -0,0 +1,5 @@ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js new file mode 100644 index 0000000..176cec3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class SSOOIDCServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOOIDCServiceException.prototype); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js new file mode 100644 index 0000000..b350ef1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js @@ -0,0 +1,190 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +export class AccessDeniedException extends __BaseException { + name = "AccessDeniedException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class AuthorizationPendingException extends __BaseException { + name = "AuthorizationPendingException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export const CreateTokenRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.clientSecret && { clientSecret: SENSITIVE_STRING }), + ...(obj.refreshToken && { refreshToken: SENSITIVE_STRING }), + ...(obj.codeVerifier && { codeVerifier: SENSITIVE_STRING }), +}); +export const CreateTokenResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), + ...(obj.refreshToken && { refreshToken: SENSITIVE_STRING }), + ...(obj.idToken && { idToken: SENSITIVE_STRING }), +}); +export class ExpiredTokenException extends __BaseException { + name = "ExpiredTokenException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InternalServerException extends __BaseException { + name = "InternalServerException"; + $fault = "server"; + error; + error_description; + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts, + }); + Object.setPrototypeOf(this, InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidClientException extends __BaseException { + name = "InvalidClientException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidGrantException extends __BaseException { + name = "InvalidGrantException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidRequestException extends __BaseException { + name = "InvalidRequestException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidScopeException extends __BaseException { + name = "InvalidScopeException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class SlowDownException extends __BaseException { + name = "SlowDownException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class UnauthorizedClientException extends __BaseException { + name = "UnauthorizedClientException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class UnsupportedGrantTypeException extends __BaseException { + name = "UnsupportedGrantTypeException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js new file mode 100644 index 0000000..b58850b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js @@ -0,0 +1,255 @@ +import { loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { requestBuilder as rb } from "@smithy/core"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectInt32 as __expectInt32, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, map, take, withBaseException, } from "@smithy/smithy-client"; +import { AccessDeniedException, AuthorizationPendingException, ExpiredTokenException, InternalServerException, InvalidClientException, InvalidGrantException, InvalidRequestException, InvalidScopeException, SlowDownException, UnauthorizedClientException, UnsupportedGrantTypeException, } from "../models/models_0"; +import { SSOOIDCServiceException as __BaseException } from "../models/SSOOIDCServiceException"; +export const se_CreateTokenCommand = async (input, context) => { + const b = rb(input, context); + const headers = { + "content-type": "application/json", + }; + b.bp("/token"); + let body; + body = JSON.stringify(take(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: (_) => _json(_), + })); + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_CreateTokenCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + accessToken: __expectString, + expiresIn: __expectInt32, + idToken: __expectString, + refreshToken: __expectString, + tokenType: __expectString, + }); + Object.assign(contents, doc); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_AccessDeniedExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_AuthorizationPendingExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InternalServerExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidClientExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidGrantExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidScopeExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_SlowDownExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnauthorizedClientExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnsupportedGrantTypeExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js new file mode 100644 index 0000000..94d7b87 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js @@ -0,0 +1,33 @@ +import packageInfo from "../../../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js new file mode 100644 index 0000000..32d413c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js @@ -0,0 +1,46 @@ +import packageInfo from "../../../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js new file mode 100644 index 0000000..49a0235 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSSOOIDCHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js new file mode 100644 index 0000000..71edef7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js @@ -0,0 +1,11 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { AssumeRoleCommand } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommand, } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +const commands = { + AssumeRoleCommand, + AssumeRoleWithWebIdentityCommand, +}; +export class STS extends STSClient { +} +createAggregatedClient(commands, STS); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js new file mode 100644 index 0000000..81b1040 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSTSHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class STSClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..3ea1e49 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js @@ -0,0 +1,55 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +import { STSClient } from "../STSClient"; +export const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveStsAuthConfig = (input) => Object.assign(input, { + stsClientCtor: STSClient, +}); +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveStsAuthConfig(config); + const config_1 = resolveAwsSdkSigV4Config(config_0); + return Object.assign(config_1, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js new file mode 100644 index 0000000..bcb8589 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { AssumeRoleResponseFilterSensitiveLog } from "../models/models_0"; +import { de_AssumeRoleCommand, se_AssumeRoleCommand } from "../protocols/Aws_query"; +export { $Command }; +export class AssumeRoleCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}) + .n("STSClient", "AssumeRoleCommand") + .f(void 0, AssumeRoleResponseFilterSensitiveLog) + .ser(se_AssumeRoleCommand) + .de(de_AssumeRoleCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js new file mode 100644 index 0000000..e4ecc2e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_AssumeRoleWithWebIdentityCommand, se_AssumeRoleWithWebIdentityCommand } from "../protocols/Aws_query"; +export { $Command }; +export class AssumeRoleWithWebIdentityCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}) + .n("STSClient", "AssumeRoleWithWebIdentityCommand") + .f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog) + .ser(se_AssumeRoleWithWebIdentityCommand) + .de(de_AssumeRoleWithWebIdentityCommand) + .build() { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js new file mode 100644 index 0000000..aafb8c4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js @@ -0,0 +1,22 @@ +import { getDefaultRoleAssumer as StsGetDefaultRoleAssumer, getDefaultRoleAssumerWithWebIdentity as StsGetDefaultRoleAssumerWithWebIdentity, } from "./defaultStsRoleAssumers"; +import { STSClient } from "./STSClient"; +const getCustomizableStsClientCtor = (baseCtor, customizations) => { + if (!customizations) + return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}; +export const getDefaultRoleAssumer = (stsOptions = {}, stsPlugins) => StsGetDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(STSClient, stsPlugins)); +export const getDefaultRoleAssumerWithWebIdentity = (stsOptions = {}, stsPlugins) => StsGetDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(STSClient, stsPlugins)); +export const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity(input), + ...input, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js new file mode 100644 index 0000000..e7c7a90 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js @@ -0,0 +1,95 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { AssumeRoleCommand } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommand, } from "./commands/AssumeRoleWithWebIdentityCommand"; +const ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +const getAccountIdFromAssumedRoleUser = (assumedRoleUser) => { + if (typeof assumedRoleUser?.Arn === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return undefined; +}; +const resolveRegion = async (_region, _parentRegion, credentialProviderLogger) => { + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + credentialProviderLogger?.debug?.("@aws-sdk/client-sts::resolveRegion", "accepting first of:", `${region} (provider)`, `${parentRegion} (parent client)`, `${ASSUME_ROLE_DEFAULT_REGION} (STS default)`); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}; +export const getDefaultRoleAssumer = (stsOptions, STSClient) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { logger = stsOptions?.parentClientConfig?.logger, region, requestHandler = stsOptions?.parentClientConfig?.requestHandler, credentialProviderLogger, } = stsOptions; + const resolvedRegion = await resolveRegion(region, stsOptions?.parentClientConfig?.region, credentialProviderLogger); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient({ + profile: stsOptions?.parentClientConfig?.profile, + credentialDefaultProvider: () => async () => closureSourceCreds, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : undefined, + logger: logger, + }); + } + const { Credentials, AssumedRoleUser } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser); + const credentials = { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + ...(Credentials.CredentialScope && { credentialScope: Credentials.CredentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}; +export const getDefaultRoleAssumerWithWebIdentity = (stsOptions, STSClient) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { logger = stsOptions?.parentClientConfig?.logger, region, requestHandler = stsOptions?.parentClientConfig?.requestHandler, credentialProviderLogger, } = stsOptions; + const resolvedRegion = await resolveRegion(region, stsOptions?.parentClientConfig?.region, credentialProviderLogger); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient({ + profile: stsOptions?.parentClientConfig?.profile, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : undefined, + logger: logger, + }); + } + const { Credentials, AssumedRoleUser } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser); + const credentials = { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + ...(Credentials.CredentialScope && { credentialScope: Credentials.CredentialScope }), + ...(accountId && { accountId }), + }; + if (accountId) { + setCredentialFeature(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + setCredentialFeature(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}; +export const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer(input, input.stsClientCtor), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity(input, input.stsClientCtor), + ...input, +}); +const isH2 = (requestHandler) => { + return requestHandler?.metadata?.handlerProtocol === "h2"; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js new file mode 100644 index 0000000..1c74b01 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js @@ -0,0 +1,15 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }); +}; +export const commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js new file mode 100644 index 0000000..f54d279 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js new file mode 100644 index 0000000..99a438a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +export const ruleSet = _data; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js new file mode 100644 index 0000000..fa366be --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js @@ -0,0 +1,6 @@ +export * from "./STSClient"; +export * from "./STS"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js new file mode 100644 index 0000000..6d2963c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class STSServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, STSServiceException.prototype); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js new file mode 100644 index 0000000..63e9c52 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js @@ -0,0 +1,102 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +export const CredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SecretAccessKey && { SecretAccessKey: SENSITIVE_STRING }), +}); +export const AssumeRoleResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export class ExpiredTokenException extends __BaseException { + name = "ExpiredTokenException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + } +} +export class MalformedPolicyDocumentException extends __BaseException { + name = "MalformedPolicyDocumentException"; + $fault = "client"; + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, MalformedPolicyDocumentException.prototype); + } +} +export class PackedPolicyTooLargeException extends __BaseException { + name = "PackedPolicyTooLargeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PackedPolicyTooLargeException.prototype); + } +} +export class RegionDisabledException extends __BaseException { + name = "RegionDisabledException"; + $fault = "client"; + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, RegionDisabledException.prototype); + } +} +export class IDPRejectedClaimException extends __BaseException { + name = "IDPRejectedClaimException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IDPRejectedClaimException.prototype); + } +} +export class InvalidIdentityTokenException extends __BaseException { + name = "InvalidIdentityTokenException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidIdentityTokenException.prototype); + } +} +export const AssumeRoleWithWebIdentityRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.WebIdentityToken && { WebIdentityToken: SENSITIVE_STRING }), +}); +export const AssumeRoleWithWebIdentityResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export class IDPCommunicationErrorException extends __BaseException { + name = "IDPCommunicationErrorException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IDPCommunicationErrorException.prototype); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js new file mode 100644 index 0000000..a98e41a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js @@ -0,0 +1,528 @@ +import { parseXmlBody as parseBody, parseXmlErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { collectBody, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectString as __expectString, extendedEncodeURIComponent as __extendedEncodeURIComponent, parseRfc3339DateTimeWithOffset as __parseRfc3339DateTimeWithOffset, strictParseInt32 as __strictParseInt32, withBaseException, } from "@smithy/smithy-client"; +import { ExpiredTokenException, IDPCommunicationErrorException, IDPRejectedClaimException, InvalidIdentityTokenException, MalformedPolicyDocumentException, PackedPolicyTooLargeException, RegionDisabledException, } from "../models/models_0"; +import { STSServiceException as __BaseException } from "../models/STSServiceException"; +export const se_AssumeRoleCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _, + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_AssumeRoleWithWebIdentityCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _, + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const de_AssumeRoleCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_AssumeRoleWithWebIdentityCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IDPCommunicationErrorExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IDPRejectedClaimExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidIdentityTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_MalformedPolicyDocumentExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PackedPolicyTooLargeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_RegionDisabledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const se_AssumeRoleRequest = (input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (input[_T]?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (input[_TTK]?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (input[_PC]?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}; +const se_AssumeRoleWithWebIdentityRequest = (input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}; +const se_policyDescriptorListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_PolicyDescriptorType = (input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}; +const se_ProvidedContext = (input, context) => { + const entries = {}; + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}; +const se_ProvidedContextsListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_Tag = (input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}; +const se_tagKeyListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}; +const se_tagListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const de_AssumedRoleUser = (output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = __expectString(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = __expectString(output[_Ar]); + } + return contents; +}; +const de_AssumeRoleResponse = (output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = __strictParseInt32(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = __expectString(output[_SI]); + } + return contents; +}; +const de_AssumeRoleWithWebIdentityResponse = (output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = __expectString(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = __strictParseInt32(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = __expectString(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = __expectString(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = __expectString(output[_SI]); + } + return contents; +}; +const de_Credentials = (output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = __expectString(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = __expectString(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = __expectString(output[_ST]); + } + if (output[_E] != null) { + contents[_E] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_E])); + } + return contents; +}; +const de_ExpiredTokenException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_IDPCommunicationErrorException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_IDPRejectedClaimException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_InvalidIdentityTokenException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_MalformedPolicyDocumentException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_PackedPolicyTooLargeException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_RegionDisabledException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = withBaseException(__BaseException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new __HttpRequest(contents); +}; +const SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded", +}; +const _ = "2011-06-15"; +const _A = "Action"; +const _AKI = "AccessKeyId"; +const _AR = "AssumeRole"; +const _ARI = "AssumedRoleId"; +const _ARU = "AssumedRoleUser"; +const _ARWWI = "AssumeRoleWithWebIdentity"; +const _Ar = "Arn"; +const _Au = "Audience"; +const _C = "Credentials"; +const _CA = "ContextAssertion"; +const _DS = "DurationSeconds"; +const _E = "Expiration"; +const _EI = "ExternalId"; +const _K = "Key"; +const _P = "Policy"; +const _PA = "PolicyArns"; +const _PAr = "ProviderArn"; +const _PC = "ProvidedContexts"; +const _PI = "ProviderId"; +const _PPS = "PackedPolicySize"; +const _Pr = "Provider"; +const _RA = "RoleArn"; +const _RSN = "RoleSessionName"; +const _SAK = "SecretAccessKey"; +const _SFWIT = "SubjectFromWebIdentityToken"; +const _SI = "SourceIdentity"; +const _SN = "SerialNumber"; +const _ST = "SessionToken"; +const _T = "Tags"; +const _TC = "TokenCode"; +const _TTK = "TransitiveTagKeys"; +const _V = "Version"; +const _Va = "Value"; +const _WIT = "WebIdentityToken"; +const _a = "arn"; +const _m = "message"; +const buildFormUrlencodedString = (formEntries) => Object.entries(formEntries) + .map(([key, value]) => __extendedEncodeURIComponent(key) + "=" + __extendedEncodeURIComponent(value)) + .join("&"); +const loadQueryErrorCode = (output, data) => { + if (data.Error?.Code !== undefined) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js new file mode 100644 index 0000000..f45dbd3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js @@ -0,0 +1,34 @@ +import packageInfo from "../../../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js new file mode 100644 index 0000000..6ac2412 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js @@ -0,0 +1,60 @@ +import packageInfo from "../../../package.json"; +import { AwsSdkSigV4Signer, NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion, } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { NoAuthSigner } from "@smithy/core"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await config.credentialDefaultProvider(idProps?.__config || {})()), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js new file mode 100644 index 0000000..5c6df20 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSTSHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts new file mode 100644 index 0000000..9d99a73 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts @@ -0,0 +1,7 @@ +/** + * This package exports nothing at the root. + * Use submodules e.g. \@aws-sdk/nested-clients/client-sts. + * + * @internal + */ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts new file mode 100644 index 0000000..ebec5e6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts @@ -0,0 +1,55 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +export interface SSOOIDC { + /** + * @see {@link CreateTokenCommand} + */ + createToken(args: CreateTokenCommandInput, options?: __HttpHandlerOptions): Promise; + createToken(args: CreateTokenCommandInput, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; + createToken(args: CreateTokenCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; +} +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * @public + */ +export declare class SSOOIDC extends SSOOIDCClient implements SSOOIDC { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts new file mode 100644 index 0000000..5490889 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts @@ -0,0 +1,220 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = CreateTokenCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = CreateTokenCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type SSOOIDCClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of SSOOIDCClient class constructor that set the region, credentials and other options. + */ +export interface SSOOIDCClientConfig extends SSOOIDCClientConfigType { +} +/** + * @public + */ +export type SSOOIDCClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ +export interface SSOOIDCClientResolvedConfig extends SSOOIDCClientResolvedConfigType { +} +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * @public + */ +export declare class SSOOIDCClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, SSOOIDCClientResolvedConfig> { + /** + * The resolved configuration of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ + readonly config: SSOOIDCClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..a56a608 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { SSOOIDCHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): SSOOIDCHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..8fc989a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSSOOIDCHttpAuthSchemeParametersProvider: (config: SSOOIDCClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSSOOIDCHttpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: SSOOIDCHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts new file mode 100644 index 0000000..042fb52 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts @@ -0,0 +1,174 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateTokenCommand}. + */ +export interface CreateTokenCommandInput extends CreateTokenRequest { +} +/** + * @public + * + * The output of {@link CreateTokenCommand}. + */ +export interface CreateTokenCommandOutput extends CreateTokenResponse, __MetadataBearer { +} +declare const CreateTokenCommand_base: { + new (input: CreateTokenCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateTokenCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates and returns access and refresh tokens for clients that are authenticated using + * client secrets. The access token can be used to fetch short-lived credentials for the assigned + * AWS accounts or to access application APIs using bearer authentication.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOOIDCClient, CreateTokenCommand } from "@aws-sdk/client-sso-oidc"; // ES Modules import + * // const { SSOOIDCClient, CreateTokenCommand } = require("@aws-sdk/client-sso-oidc"); // CommonJS import + * const client = new SSOOIDCClient(config); + * const input = { // CreateTokenRequest + * clientId: "STRING_VALUE", // required + * clientSecret: "STRING_VALUE", // required + * grantType: "STRING_VALUE", // required + * deviceCode: "STRING_VALUE", + * code: "STRING_VALUE", + * refreshToken: "STRING_VALUE", + * scope: [ // Scopes + * "STRING_VALUE", + * ], + * redirectUri: "STRING_VALUE", + * codeVerifier: "STRING_VALUE", + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * // { // CreateTokenResponse + * // accessToken: "STRING_VALUE", + * // tokenType: "STRING_VALUE", + * // expiresIn: Number("int"), + * // refreshToken: "STRING_VALUE", + * // idToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param CreateTokenCommandInput - {@link CreateTokenCommandInput} + * @returns {@link CreateTokenCommandOutput} + * @see {@link CreateTokenCommandInput} for command's `input` shape. + * @see {@link CreateTokenCommandOutput} for command's `response` shape. + * @see {@link SSOOIDCClientResolvedConfig | config} for SSOOIDCClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *

You do not have sufficient access to perform this action.

+ * + * @throws {@link AuthorizationPendingException} (client fault) + *

Indicates that a request to authorize a client with an access user session token is + * pending.

+ * + * @throws {@link ExpiredTokenException} (client fault) + *

Indicates that the token issued by the service is expired and is no longer valid.

+ * + * @throws {@link InternalServerException} (server fault) + *

Indicates that an error from the service occurred while trying to process a + * request.

+ * + * @throws {@link InvalidClientException} (client fault) + *

Indicates that the clientId or clientSecret in the request is + * invalid. For example, this can occur when a client sends an incorrect clientId or + * an expired clientSecret.

+ * + * @throws {@link InvalidGrantException} (client fault) + *

Indicates that a request contains an invalid grant. This can occur if a client makes a + * CreateToken request with an invalid grant type.

+ * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that something is wrong with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link InvalidScopeException} (client fault) + *

Indicates that the scope provided in the request is invalid.

+ * + * @throws {@link SlowDownException} (client fault) + *

Indicates that the client is making the request too frequently and is more than the + * service can handle.

+ * + * @throws {@link UnauthorizedClientException} (client fault) + *

Indicates that the client is not currently authorized to make the request. This can happen + * when a clientId is not issued for a public client.

+ * + * @throws {@link UnsupportedGrantTypeException} (client fault) + *

Indicates that the grant type in the request is not supported by the service.

+ * + * @throws {@link SSOOIDCServiceException} + *

Base exception class for all service exceptions from SSOOIDC service.

+ * + * + * @example Call OAuth/OIDC /token endpoint for Device Code grant with Secret authentication + * ```javascript + * // + * const input = { + * clientId: "_yzkThXVzLWVhc3QtMQEXAMPLECLIENTID", + * clientSecret: "VERYLONGSECRETeyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0", + * deviceCode: "yJraWQiOiJrZXktMTU2Njk2ODA4OCIsImFsZyI6IkhTMzIn0EXAMPLEDEVICECODE", + * grantType: "urn:ietf:params:oauth:grant-type:device-code" + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * /* response is + * { + * accessToken: "aoal-YigITUDiNX1xZwOMXM5MxOWDL0E0jg9P6_C_jKQPxS_SKCP6f0kh1Up4g7TtvQqkMnD-GJiU_S1gvug6SrggAkc0:MGYCMQD3IatVjV7jAJU91kK3PkS/SfA2wtgWzOgZWDOR7sDGN9t0phCZz5It/aes/3C1Zj0CMQCKWOgRaiz6AIhza3DSXQNMLjRKXC8F8ceCsHlgYLMZ7hZidEXAMPLEACCESSTOKEN", + * expiresIn: 1579729529, + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * tokenType: "Bearer" + * } + * *\/ + * ``` + * + * @example Call OAuth/OIDC /token endpoint for Refresh Token grant with Secret authentication + * ```javascript + * // + * const input = { + * clientId: "_yzkThXVzLWVhc3QtMQEXAMPLECLIENTID", + * clientSecret: "VERYLONGSECRETeyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0", + * grantType: "refresh_token", + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * scope: [ + * "codewhisperer:completions" + * ] + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * /* response is + * { + * accessToken: "aoal-YigITUDiNX1xZwOMXM5MxOWDL0E0jg9P6_C_jKQPxS_SKCP6f0kh1Up4g7TtvQqkMnD-GJiU_S1gvug6SrggAkc0:MGYCMQD3IatVjV7jAJU91kK3PkS/SfA2wtgWzOgZWDOR7sDGN9t0phCZz5It/aes/3C1Zj0CMQCKWOgRaiz6AIhza3DSXQNMLjRKXC8F8ceCsHlgYLMZ7hZidEXAMPLEACCESSTOKEN", + * expiresIn: 1579729529, + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * tokenType: "Bearer" + * } + * *\/ + * ``` + * + * @public + */ +export declare class CreateTokenCommand extends CreateTokenCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateTokenRequest; + output: CreateTokenResponse; + }; + sdk: { + input: CreateTokenCommandInput; + output: CreateTokenCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..23f42e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts @@ -0,0 +1,40 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts new file mode 100644 index 0000000..c78de85 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface SSOOIDCExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts new file mode 100644 index 0000000..54c46dd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts @@ -0,0 +1,51 @@ +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * + * @packageDocumentation + */ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts new file mode 100644 index 0000000..d45f71a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from SSOOIDC service. + */ +export declare class SSOOIDCServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts new file mode 100644 index 0000000..2d3c3f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts @@ -0,0 +1,387 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +/** + *

You do not have sufficient access to perform this action.

+ * @public + */ +export declare class AccessDeniedException extends __BaseException { + readonly name: "AccessDeniedException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be access_denied.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that a request to authorize a client with an access user session token is + * pending.

+ * @public + */ +export declare class AuthorizationPendingException extends __BaseException { + readonly name: "AuthorizationPendingException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * authorization_pending.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface CreateTokenRequest { + /** + *

The unique identifier string for the client or application. This value comes from the + * result of the RegisterClient API.

+ * @public + */ + clientId: string | undefined; + /** + *

A secret string generated for the client. This value should come from the persisted result + * of the RegisterClient API.

+ * @public + */ + clientSecret: string | undefined; + /** + *

Supports the following OAuth grant types: Authorization Code, Device Code, and Refresh + * Token. Specify one of the following values, depending on the grant type that you want:

+ *

* Authorization Code - authorization_code + *

+ *

* Device Code - urn:ietf:params:oauth:grant-type:device_code + *

+ *

* Refresh Token - refresh_token + *

+ * @public + */ + grantType: string | undefined; + /** + *

Used only when calling this API for the Device Code grant type. This short-lived code is + * used to identify this authorization request. This comes from the result of the StartDeviceAuthorization API.

+ * @public + */ + deviceCode?: string | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. The short-lived + * code is used to identify this authorization request.

+ * @public + */ + code?: string | undefined; + /** + *

Used only when calling this API for the Refresh Token grant type. This token is used to + * refresh short-lived tokens, such as the access token, that might expire.

+ *

For more information about the features and limitations of the current IAM Identity Center OIDC + * implementation, see Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ * @public + */ + refreshToken?: string | undefined; + /** + *

The list of scopes for which authorization is requested. The access token that is issued + * is limited to the scopes that are granted. If this value is not specified, IAM Identity Center authorizes + * all scopes that are configured for the client during the call to RegisterClient.

+ * @public + */ + scope?: string[] | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. This value + * specifies the location of the client or application that has registered to receive the + * authorization code.

+ * @public + */ + redirectUri?: string | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. This value is + * generated by the client and presented to validate the original code challenge value the client + * passed at authorization time.

+ * @public + */ + codeVerifier?: string | undefined; +} +/** + * @internal + */ +export declare const CreateTokenRequestFilterSensitiveLog: (obj: CreateTokenRequest) => any; +/** + * @public + */ +export interface CreateTokenResponse { + /** + *

A bearer token to access Amazon Web Services accounts and applications assigned to a user.

+ * @public + */ + accessToken?: string | undefined; + /** + *

Used to notify the client that the returned token is an access token. The supported token + * type is Bearer.

+ * @public + */ + tokenType?: string | undefined; + /** + *

Indicates the time in seconds when an access token will expire.

+ * @public + */ + expiresIn?: number | undefined; + /** + *

A token that, if present, can be used to refresh a previously issued access token that + * might have expired.

+ *

For more information about the features and limitations of the current IAM Identity Center OIDC + * implementation, see Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ * @public + */ + refreshToken?: string | undefined; + /** + *

The idToken is not implemented or supported. For more information about the + * features and limitations of the current IAM Identity Center OIDC implementation, see + * Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ *

A JSON Web Token (JWT) that identifies who is associated with the issued access token. + *

+ * @public + */ + idToken?: string | undefined; +} +/** + * @internal + */ +export declare const CreateTokenResponseFilterSensitiveLog: (obj: CreateTokenResponse) => any; +/** + *

Indicates that the token issued by the service is expired and is no longer valid.

+ * @public + */ +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be expired_token.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that an error from the service occurred while trying to process a + * request.

+ * @public + */ +export declare class InternalServerException extends __BaseException { + readonly name: "InternalServerException"; + readonly $fault: "server"; + /** + *

Single error code. For this exception the value will be server_error.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the clientId or clientSecret in the request is + * invalid. For example, this can occur when a client sends an incorrect clientId or + * an expired clientSecret.

+ * @public + */ +export declare class InvalidClientException extends __BaseException { + readonly name: "InvalidClientException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * invalid_client.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that a request contains an invalid grant. This can occur if a client makes a + * CreateToken request with an invalid grant type.

+ * @public + */ +export declare class InvalidGrantException extends __BaseException { + readonly name: "InvalidGrantException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be invalid_grant.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that something is wrong with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * @public + */ +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * invalid_request.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the scope provided in the request is invalid.

+ * @public + */ +export declare class InvalidScopeException extends __BaseException { + readonly name: "InvalidScopeException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be invalid_scope.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the client is making the request too frequently and is more than the + * service can handle.

+ * @public + */ +export declare class SlowDownException extends __BaseException { + readonly name: "SlowDownException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be slow_down.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the client is not currently authorized to make the request. This can happen + * when a clientId is not issued for a public client.

+ * @public + */ +export declare class UnauthorizedClientException extends __BaseException { + readonly name: "UnauthorizedClientException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * unauthorized_client.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the grant type in the request is not supported by the service.

+ * @public + */ +export declare class UnsupportedGrantTypeException extends __BaseException { + readonly name: "UnsupportedGrantTypeException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * unsupported_grant_type.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..d4e38b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts @@ -0,0 +1,11 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "../commands/CreateTokenCommand"; +/** + * serializeAws_restJson1CreateTokenCommand + */ +export declare const se_CreateTokenCommand: (input: CreateTokenCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restJson1CreateTokenCommand + */ +export declare const de_CreateTokenCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..26c727f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts @@ -0,0 +1,57 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts new file mode 100644 index 0000000..1819a97 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts new file mode 100644 index 0000000..86acac7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts @@ -0,0 +1,56 @@ +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..e110017 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts new file mode 100644 index 0000000..1bdf704 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: SSOOIDCExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts new file mode 100644 index 0000000..bee83a5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts @@ -0,0 +1,27 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +export interface STS { + /** + * @see {@link AssumeRoleCommand} + */ + assumeRole(args: AssumeRoleCommandInput, options?: __HttpHandlerOptions): Promise; + assumeRole(args: AssumeRoleCommandInput, cb: (err: any, data?: AssumeRoleCommandOutput) => void): void; + assumeRole(args: AssumeRoleCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AssumeRoleCommandOutput) => void): void; + /** + * @see {@link AssumeRoleWithWebIdentityCommand} + */ + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, options?: __HttpHandlerOptions): Promise; + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void): void; + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void): void; +} +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * @public + */ +export declare class STS extends STSClient implements STS { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts new file mode 100644 index 0000000..bd21c4b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts @@ -0,0 +1,192 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { AwsCredentialIdentityProvider, BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = AssumeRoleCommandInput | AssumeRoleWithWebIdentityCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = AssumeRoleCommandOutput | AssumeRoleWithWebIdentityCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Default credentials provider; Not available in browser runtime. + * @deprecated + * @internal + */ + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type STSClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of STSClient class constructor that set the region, credentials and other options. + */ +export interface STSClientConfig extends STSClientConfigType { +} +/** + * @public + */ +export type STSClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of STSClient class. This is resolved and normalized from the {@link STSClientConfig | constructor configuration interface}. + */ +export interface STSClientResolvedConfig extends STSClientResolvedConfigType { +} +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * @public + */ +export declare class STSClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig> { + /** + * The resolved configuration of STSClient class. This is resolved and normalized from the {@link STSClientConfig | constructor configuration interface}. + */ + readonly config: STSClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..1066c88 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { STSHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: STSHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): STSHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: STSHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..8e39cbe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,85 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { Client, HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { STSClientResolvedConfig } from "../STSClient"; +/** + * @internal + */ +export interface STSHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface STSHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSTSHttpAuthSchemeParametersProvider: (config: STSClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface STSHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSTSHttpAuthSchemeProvider: STSHttpAuthSchemeProvider; +export interface StsAuthInputConfig { +} +export interface StsAuthResolvedConfig { + /** + * Reference to STSClient class constructor. + * @internal + */ + stsClientCtor: new (clientConfig: any) => Client; +} +export declare const resolveStsAuthConfig: (input: T & StsAuthInputConfig) => T & StsAuthResolvedConfig; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends StsAuthInputConfig, AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: STSHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends StsAuthResolvedConfig, AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: STSHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts new file mode 100644 index 0000000..f9e6ccd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts @@ -0,0 +1,269 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleRequest, AssumeRoleResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig } from "../STSClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AssumeRoleCommand}. + */ +export interface AssumeRoleCommandInput extends AssumeRoleRequest { +} +/** + * @public + * + * The output of {@link AssumeRoleCommand}. + */ +export interface AssumeRoleCommandOutput extends AssumeRoleResponse, __MetadataBearer { +} +declare const AssumeRoleCommand_base: { + new (input: AssumeRoleCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AssumeRoleCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a set of temporary security credentials that you can use to access Amazon Web Services + * resources. These temporary credentials consist of an access key ID, a secret access key, + * and a security token. Typically, you use AssumeRole within your account or for + * cross-account access. For a comparison of AssumeRole with other API operations + * that produce temporary credentials, see Requesting Temporary Security + * Credentials and Compare STS + * credentials in the IAM User Guide.

+ *

+ * Permissions + *

+ *

The temporary security credentials created by AssumeRole can be used to + * make API calls to any Amazon Web Services service with the following exception: You cannot call the + * Amazon Web Services STS GetFederationToken or GetSessionToken API + * operations.

+ *

(Optional) You can pass inline or managed session policies to this operation. You can + * pass a single JSON policy document to use as an inline session policy. You can also specify + * up to 10 managed policy Amazon Resource Names (ARNs) to use as managed session policies. + * The plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

When you create a role, you create two policies: a role trust policy that specifies + * who can assume the role, and a permissions policy that specifies + * what can be done with the role. You specify the trusted principal + * that is allowed to assume the role in the role trust policy.

+ *

To assume a role from a different account, your Amazon Web Services account must be trusted by the + * role. The trust relationship is defined in the role's trust policy when the role is + * created. That trust policy states which accounts are allowed to delegate that access to + * users in the account.

+ *

A user who wants to access a role in a different account must also have permissions that + * are delegated from the account administrator. The administrator must attach a policy that + * allows the user to call AssumeRole for the ARN of the role in the other + * account.

+ *

To allow a user to assume a role in the same account, you can do either of the + * following:

+ *
    + *
  • + *

    Attach a policy to the user that allows the user to call AssumeRole + * (as long as the role's trust policy trusts the account).

    + *
  • + *
  • + *

    Add the user as a principal directly in the role's trust policy.

    + *
  • + *
+ *

You can do either because the role’s trust policy acts as an IAM resource-based + * policy. When a resource-based policy grants access to a principal in the same account, no + * additional identity-based policy is required. For more information about trust policies and + * resource-based policies, see IAM Policies in the + * IAM User Guide.

+ *

+ * Tags + *

+ *

(Optional) You can pass tag key-value pairs to your session. These tags are called + * session tags. For more information about session tags, see Passing Session Tags in STS in the + * IAM User Guide.

+ *

An administrator must grant you the permissions necessary to pass session tags. The + * administrator can also create granular permissions to allow you to pass only specific + * session tags. For more information, see Tutorial: Using Tags + * for Attribute-Based Access Control in the + * IAM User Guide.

+ *

You can set the session tags as transitive. Transitive tags persist during role + * chaining. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

+ * Using MFA with AssumeRole + *

+ *

(Optional) You can include multi-factor authentication (MFA) information when you call + * AssumeRole. This is useful for cross-account scenarios to ensure that the + * user that assumes the role has been authenticated with an Amazon Web Services MFA device. In that + * scenario, the trust policy of the role being assumed includes a condition that tests for + * MFA authentication. If the caller does not include valid MFA information, the request to + * assume the role is denied. The condition in a trust policy that tests for MFA + * authentication might look like the following example.

+ *

+ * "Condition": \{"Bool": \{"aws:MultiFactorAuthPresent": true\}\} + *

+ *

For more information, see Configuring MFA-Protected API Access + * in the IAM User Guide guide.

+ *

To use MFA with AssumeRole, you pass values for the + * SerialNumber and TokenCode parameters. The + * SerialNumber value identifies the user's hardware or virtual MFA device. + * The TokenCode is the time-based one-time password (TOTP) that the MFA device + * produces.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { STSClient, AssumeRoleCommand } from "@aws-sdk/client-sts"; // ES Modules import + * // const { STSClient, AssumeRoleCommand } = require("@aws-sdk/client-sts"); // CommonJS import + * const client = new STSClient(config); + * const input = { // AssumeRoleRequest + * RoleArn: "STRING_VALUE", // required + * RoleSessionName: "STRING_VALUE", // required + * PolicyArns: [ // policyDescriptorListType + * { // PolicyDescriptorType + * arn: "STRING_VALUE", + * }, + * ], + * Policy: "STRING_VALUE", + * DurationSeconds: Number("int"), + * Tags: [ // tagListType + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * TransitiveTagKeys: [ // tagKeyListType + * "STRING_VALUE", + * ], + * ExternalId: "STRING_VALUE", + * SerialNumber: "STRING_VALUE", + * TokenCode: "STRING_VALUE", + * SourceIdentity: "STRING_VALUE", + * ProvidedContexts: [ // ProvidedContextsListType + * { // ProvidedContext + * ProviderArn: "STRING_VALUE", + * ContextAssertion: "STRING_VALUE", + * }, + * ], + * }; + * const command = new AssumeRoleCommand(input); + * const response = await client.send(command); + * // { // AssumeRoleResponse + * // Credentials: { // Credentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // AssumedRoleUser: { // AssumedRoleUser + * // AssumedRoleId: "STRING_VALUE", // required + * // Arn: "STRING_VALUE", // required + * // }, + * // PackedPolicySize: Number("int"), + * // SourceIdentity: "STRING_VALUE", + * // }; + * + * ``` + * + * @param AssumeRoleCommandInput - {@link AssumeRoleCommandInput} + * @returns {@link AssumeRoleCommandOutput} + * @see {@link AssumeRoleCommandInput} for command's `input` shape. + * @see {@link AssumeRoleCommandOutput} for command's `response` shape. + * @see {@link STSClientResolvedConfig | config} for STSClient's `config` shape. + * + * @throws {@link ExpiredTokenException} (client fault) + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * + * @throws {@link MalformedPolicyDocumentException} (client fault) + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * + * @throws {@link PackedPolicyTooLargeException} (client fault) + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * + * @throws {@link RegionDisabledException} (client fault) + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * + * @throws {@link STSServiceException} + *

Base exception class for all service exceptions from STS service.

+ * + * + * @example To assume a role + * ```javascript + * // + * const input = { + * ExternalId: "123ABC", + * Policy: `{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:ListAllMyBuckets","Resource":"*"}]}`, + * RoleArn: "arn:aws:iam::123456789012:role/demo", + * RoleSessionName: "testAssumeRoleSession", + * Tags: [ + * { + * Key: "Project", + * Value: "Unicorn" + * }, + * { + * Key: "Team", + * Value: "Automation" + * }, + * { + * Key: "Cost-Center", + * Value: "12345" + * } + * ], + * TransitiveTagKeys: [ + * "Project", + * "Cost-Center" + * ] + * }; + * const command = new AssumeRoleCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AssumedRoleUser: { + * Arn: "arn:aws:sts::123456789012:assumed-role/demo/Bob", + * AssumedRoleId: "ARO123EXAMPLE123:Bob" + * }, + * Credentials: { + * AccessKeyId: "AKIAIOSFODNN7EXAMPLE", + * Expiration: "2011-07-15T23:28:33.359Z", + * SecretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", + * SessionToken: "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==" + * }, + * PackedPolicySize: 8 + * } + * *\/ + * ``` + * + * @public + */ +export declare class AssumeRoleCommand extends AssumeRoleCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AssumeRoleRequest; + output: AssumeRoleResponse; + }; + sdk: { + input: AssumeRoleCommandInput; + output: AssumeRoleCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts new file mode 100644 index 0000000..58d7df8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts @@ -0,0 +1,288 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleWithWebIdentityRequest, AssumeRoleWithWebIdentityResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig } from "../STSClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AssumeRoleWithWebIdentityCommand}. + */ +export interface AssumeRoleWithWebIdentityCommandInput extends AssumeRoleWithWebIdentityRequest { +} +/** + * @public + * + * The output of {@link AssumeRoleWithWebIdentityCommand}. + */ +export interface AssumeRoleWithWebIdentityCommandOutput extends AssumeRoleWithWebIdentityResponse, __MetadataBearer { +} +declare const AssumeRoleWithWebIdentityCommand_base: { + new (input: AssumeRoleWithWebIdentityCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AssumeRoleWithWebIdentityCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a set of temporary security credentials for users who have been authenticated in + * a mobile or web application with a web identity provider. Example providers include the + * OAuth 2.0 providers Login with Amazon and Facebook, or any OpenID Connect-compatible + * identity provider such as Google or Amazon Cognito federated identities.

+ * + *

For mobile applications, we recommend that you use Amazon Cognito. You can use Amazon Cognito with the + * Amazon Web Services SDK for iOS Developer Guide and the Amazon Web Services SDK for Android Developer Guide to uniquely + * identify a user. You can also supply the user with a consistent identity throughout the + * lifetime of an application.

+ *

To learn more about Amazon Cognito, see Amazon Cognito identity + * pools in Amazon Cognito Developer Guide.

+ *
+ *

Calling AssumeRoleWithWebIdentity does not require the use of Amazon Web Services + * security credentials. Therefore, you can distribute an application (for example, on mobile + * devices) that requests temporary security credentials without including long-term Amazon Web Services + * credentials in the application. You also don't need to deploy server-based proxy services + * that use long-term Amazon Web Services credentials. Instead, the identity of the caller is validated by + * using a token from the web identity provider. For a comparison of + * AssumeRoleWithWebIdentity with the other API operations that produce + * temporary credentials, see Requesting Temporary Security + * Credentials and Compare STS + * credentials in the IAM User Guide.

+ *

The temporary security credentials returned by this API consist of an access key ID, a + * secret access key, and a security token. Applications can use these temporary security + * credentials to sign calls to Amazon Web Services service API operations.

+ *

+ * Session Duration + *

+ *

By default, the temporary security credentials created by + * AssumeRoleWithWebIdentity last for one hour. However, you can use the + * optional DurationSeconds parameter to specify the duration of your session. + * You can provide a value from 900 seconds (15 minutes) up to the maximum session duration + * setting for the role. This setting can have a value from 1 hour to 12 hours. To learn how + * to view the maximum value for your role, see Update the maximum session duration for a role in the + * IAM User Guide. The maximum session duration limit applies when + * you use the AssumeRole* API operations or the assume-role* CLI + * commands. However the limit does not apply when you use those operations to create a + * console URL. For more information, see Using IAM Roles in the + * IAM User Guide.

+ *

+ * Permissions + *

+ *

The temporary security credentials created by AssumeRoleWithWebIdentity can + * be used to make API calls to any Amazon Web Services service with the following exception: you cannot + * call the STS GetFederationToken or GetSessionToken API + * operations.

+ *

(Optional) You can pass inline or managed session policies to + * this operation. You can pass a single JSON policy document to use as an inline session + * policy. You can also specify up to 10 managed policy Amazon Resource Names (ARNs) to use as + * managed session policies. The plaintext that you use for both inline and managed session + * policies can't exceed 2,048 characters. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

+ * Tags + *

+ *

(Optional) You can configure your IdP to pass attributes into your web identity token as + * session tags. Each session tag consists of a key name and an associated value. For more + * information about session tags, see Passing Session Tags in STS in the + * IAM User Guide.

+ *

You can pass up to 50 session tags. The plaintext session tag keys can’t exceed 128 + * characters and the values can’t exceed 256 characters. For these and additional limits, see + * IAM + * and STS Character Limits in the IAM User Guide.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

You can pass a session tag with the same key as a tag that is attached to the role. When + * you do, the session tag overrides the role tag with the same key.

+ *

An administrator must grant you the permissions necessary to pass session tags. The + * administrator can also create granular permissions to allow you to pass only specific + * session tags. For more information, see Tutorial: Using Tags + * for Attribute-Based Access Control in the + * IAM User Guide.

+ *

You can set the session tags as transitive. Transitive tags persist during role + * chaining. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

+ * Identities + *

+ *

Before your application can call AssumeRoleWithWebIdentity, you must have + * an identity token from a supported identity provider and create a role that the application + * can assume. The role that your application assumes must trust the identity provider that is + * associated with the identity token. In other words, the identity provider must be specified + * in the role's trust policy.

+ * + *

Calling AssumeRoleWithWebIdentity can result in an entry in your + * CloudTrail logs. The entry includes the Subject of + * the provided web identity token. We recommend that you avoid using any personally + * identifiable information (PII) in this field. For example, you could instead use a GUID + * or a pairwise identifier, as suggested + * in the OIDC specification.

+ *
+ *

For more information about how to use OIDC federation and the + * AssumeRoleWithWebIdentity API, see the following resources:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { STSClient, AssumeRoleWithWebIdentityCommand } from "@aws-sdk/client-sts"; // ES Modules import + * // const { STSClient, AssumeRoleWithWebIdentityCommand } = require("@aws-sdk/client-sts"); // CommonJS import + * const client = new STSClient(config); + * const input = { // AssumeRoleWithWebIdentityRequest + * RoleArn: "STRING_VALUE", // required + * RoleSessionName: "STRING_VALUE", // required + * WebIdentityToken: "STRING_VALUE", // required + * ProviderId: "STRING_VALUE", + * PolicyArns: [ // policyDescriptorListType + * { // PolicyDescriptorType + * arn: "STRING_VALUE", + * }, + * ], + * Policy: "STRING_VALUE", + * DurationSeconds: Number("int"), + * }; + * const command = new AssumeRoleWithWebIdentityCommand(input); + * const response = await client.send(command); + * // { // AssumeRoleWithWebIdentityResponse + * // Credentials: { // Credentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // SubjectFromWebIdentityToken: "STRING_VALUE", + * // AssumedRoleUser: { // AssumedRoleUser + * // AssumedRoleId: "STRING_VALUE", // required + * // Arn: "STRING_VALUE", // required + * // }, + * // PackedPolicySize: Number("int"), + * // Provider: "STRING_VALUE", + * // Audience: "STRING_VALUE", + * // SourceIdentity: "STRING_VALUE", + * // }; + * + * ``` + * + * @param AssumeRoleWithWebIdentityCommandInput - {@link AssumeRoleWithWebIdentityCommandInput} + * @returns {@link AssumeRoleWithWebIdentityCommandOutput} + * @see {@link AssumeRoleWithWebIdentityCommandInput} for command's `input` shape. + * @see {@link AssumeRoleWithWebIdentityCommandOutput} for command's `response` shape. + * @see {@link STSClientResolvedConfig | config} for STSClient's `config` shape. + * + * @throws {@link ExpiredTokenException} (client fault) + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * + * @throws {@link IDPCommunicationErrorException} (client fault) + *

The request could not be fulfilled because the identity provider (IDP) that was asked + * to verify the incoming identity token could not be reached. This is often a transient + * error caused by network conditions. Retry the request a limited number of times so that + * you don't exceed the request rate. If the error persists, the identity provider might be + * down or not responding.

+ * + * @throws {@link IDPRejectedClaimException} (client fault) + *

The identity provider (IdP) reported that authentication failed. This might be because + * the claim is invalid.

+ *

If this error is returned for the AssumeRoleWithWebIdentity operation, it + * can also mean that the claim has expired or has been explicitly revoked.

+ * + * @throws {@link InvalidIdentityTokenException} (client fault) + *

The web identity token that was passed could not be validated by Amazon Web Services. Get a new + * identity token from the identity provider and then retry the request.

+ * + * @throws {@link MalformedPolicyDocumentException} (client fault) + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * + * @throws {@link PackedPolicyTooLargeException} (client fault) + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * + * @throws {@link RegionDisabledException} (client fault) + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * + * @throws {@link STSServiceException} + *

Base exception class for all service exceptions from STS service.

+ * + * + * @example To assume a role as an OpenID Connect-federated user + * ```javascript + * // + * const input = { + * DurationSeconds: 3600, + * Policy: `{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:ListAllMyBuckets","Resource":"*"}]}`, + * ProviderId: "www.amazon.com", + * RoleArn: "arn:aws:iam::123456789012:role/FederatedWebIdentityRole", + * RoleSessionName: "app1", + * WebIdentityToken: "Atza%7CIQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDansFBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFOzTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ" + * }; + * const command = new AssumeRoleWithWebIdentityCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AssumedRoleUser: { + * Arn: "arn:aws:sts::123456789012:assumed-role/FederatedWebIdentityRole/app1", + * AssumedRoleId: "AROACLKWSDQRAOEXAMPLE:app1" + * }, + * Audience: "client.5498841531868486423.1548@apps.example.com", + * Credentials: { + * AccessKeyId: "AKIAIOSFODNN7EXAMPLE", + * Expiration: "2014-10-24T23:00:23Z", + * SecretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", + * SessionToken: "AQoDYXdzEE0a8ANXXXXXXXXNO1ewxE5TijQyp+IEXAMPLE" + * }, + * PackedPolicySize: 123, + * Provider: "www.amazon.com", + * SubjectFromWebIdentityToken: "amzn1.account.AF6RHO7KZU5XRVQJGXK6HEXAMPLE" + * } + * *\/ + * ``` + * + * @public + */ +export declare class AssumeRoleWithWebIdentityCommand extends AssumeRoleWithWebIdentityCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AssumeRoleWithWebIdentityRequest; + output: AssumeRoleWithWebIdentityResponse; + }; + sdk: { + input: AssumeRoleWithWebIdentityCommandInput; + output: AssumeRoleWithWebIdentityCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts new file mode 100644 index 0000000..0e25207 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts @@ -0,0 +1,23 @@ +import { Pluggable } from "@smithy/types"; +import { DefaultCredentialProvider, RoleAssumer, RoleAssumerWithWebIdentity, STSRoleAssumerOptions } from "./defaultStsRoleAssumers"; +import { ServiceInputTypes, ServiceOutputTypes } from "./STSClient"; +/** + * The default role assumer that used by credential providers when sts:AssumeRole API is needed. + */ +export declare const getDefaultRoleAssumer: (stsOptions?: STSRoleAssumerOptions, stsPlugins?: Pluggable[]) => RoleAssumer; +/** + * The default role assumer that used by credential providers when sts:AssumeRoleWithWebIdentity API is needed. + */ +export declare const getDefaultRoleAssumerWithWebIdentity: (stsOptions?: STSRoleAssumerOptions, stsPlugins?: Pluggable[]) => RoleAssumerWithWebIdentity; +/** + * The default credential providers depend STS client to assume role with desired API: sts:assumeRole, + * sts:assumeRoleWithWebIdentity, etc. This function decorates the default credential provider with role assumers which + * encapsulates the process of calling STS commands. This can only be imported by AWS client packages to avoid circular + * dependencies. + * + * @internal + * + * @deprecated this is no longer needed. Use the defaultProvider directly, + * which will load STS if needed. + */ +export declare const decorateDefaultCredentialProvider: (provider: DefaultCredentialProvider) => DefaultCredentialProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts new file mode 100644 index 0000000..c4ba0c4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts @@ -0,0 +1,43 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentity, Logger, Provider } from "@smithy/types"; +import { AssumeRoleCommandInput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import type { STSClient, STSClientConfig } from "./STSClient"; +/** + * @public + */ +export type STSRoleAssumerOptions = Pick & { + credentialProviderLogger?: Logger; + parentClientConfig?: CredentialProviderOptions["parentClientConfig"]; +}; +/** + * @internal + */ +export type RoleAssumer = (sourceCreds: AwsCredentialIdentity, params: AssumeRoleCommandInput) => Promise; +/** + * The default role assumer that used by credential providers when sts:AssumeRole API is needed. + * @internal + */ +export declare const getDefaultRoleAssumer: (stsOptions: STSRoleAssumerOptions, STSClient: new (options: STSClientConfig) => STSClient) => RoleAssumer; +/** + * @internal + */ +export type RoleAssumerWithWebIdentity = (params: AssumeRoleWithWebIdentityCommandInput) => Promise; +/** + * The default role assumer that used by credential providers when sts:AssumeRoleWithWebIdentity API is needed. + * @internal + */ +export declare const getDefaultRoleAssumerWithWebIdentity: (stsOptions: STSRoleAssumerOptions, STSClient: new (options: STSClientConfig) => STSClient) => RoleAssumerWithWebIdentity; +/** + * @internal + */ +export type DefaultCredentialProvider = (input: any) => Provider; +/** + * The default credential providers depend STS client to assume role with desired API: sts:assumeRole, + * sts:assumeRoleWithWebIdentity, etc. This function decorates the default credential provider with role assumers which + * encapsulates the process of calling STS commands. This can only be imported by AWS client packages to avoid circular + * dependencies. + * + * @internal + */ +export declare const decorateDefaultCredentialProvider: (provider: DefaultCredentialProvider) => DefaultCredentialProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..39f6c7e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts @@ -0,0 +1,46 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; + useGlobalEndpoint?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + UseGlobalEndpoint?: boolean; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts new file mode 100644 index 0000000..970e12b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface STSExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts new file mode 100644 index 0000000..98b87b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts @@ -0,0 +1,17 @@ +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * + * @packageDocumentation + */ +export * from "./STSClient"; +export * from "./STS"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { STSExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts new file mode 100644 index 0000000..fd1a9a2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from STS service. + */ +export declare class STSServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts new file mode 100644 index 0000000..5b58b93 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts @@ -0,0 +1,712 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +/** + *

The identifiers for the temporary security credentials that the operation + * returns.

+ * @public + */ +export interface AssumedRoleUser { + /** + *

A unique identifier that contains the role ID and the role session name of the role that + * is being assumed. The role ID is generated by Amazon Web Services when the role is created.

+ * @public + */ + AssumedRoleId: string | undefined; + /** + *

The ARN of the temporary security credentials that are returned from the AssumeRole action. For more information about ARNs and how to use them in + * policies, see IAM Identifiers in the + * IAM User Guide.

+ * @public + */ + Arn: string | undefined; +} +/** + *

A reference to the IAM managed policy that is passed as a session policy for a role + * session or a federated user session.

+ * @public + */ +export interface PolicyDescriptorType { + /** + *

The Amazon Resource Name (ARN) of the IAM managed policy to use as a session policy + * for the role. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * @public + */ + arn?: string | undefined; +} +/** + *

Contains information about the provided context. This includes the signed and encrypted + * trusted context assertion and the context provider ARN from which the trusted context + * assertion was generated.

+ * @public + */ +export interface ProvidedContext { + /** + *

The context provider ARN from which the trusted context assertion was generated.

+ * @public + */ + ProviderArn?: string | undefined; + /** + *

The signed and encrypted trusted context assertion generated by the context provider. + * The trusted context assertion is signed and encrypted by Amazon Web Services STS.

+ * @public + */ + ContextAssertion?: string | undefined; +} +/** + *

You can pass custom key-value pair attributes when you assume a role or federate a user. + * These are called session tags. You can then use the session tags to control access to + * resources. For more information, see Tagging Amazon Web Services STS Sessions in the + * IAM User Guide.

+ * @public + */ +export interface Tag { + /** + *

The key for a session tag.

+ *

You can pass up to 50 session tags. The plain text session tag keys can’t exceed 128 + * characters. For these and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * @public + */ + Key: string | undefined; + /** + *

The value for a session tag.

+ *

You can pass up to 50 session tags. The plain text session tag values can’t exceed 256 + * characters. For these and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * @public + */ + Value: string | undefined; +} +/** + * @public + */ +export interface AssumeRoleRequest { + /** + *

The Amazon Resource Name (ARN) of the role to assume.

+ * @public + */ + RoleArn: string | undefined; + /** + *

An identifier for the assumed role session.

+ *

Use the role session name to uniquely identify a session when the same role is assumed + * by different principals or for different reasons. In cross-account scenarios, the role + * session name is visible to, and can be logged by the account that owns the role. The role + * session name is also used in the ARN of the assumed role principal. This means that + * subsequent cross-account API requests that use the temporary security credentials will + * expose the role session name to the external account in their CloudTrail logs.

+ *

For security purposes, administrators can view this field in CloudTrail logs to help identify who performed an action in Amazon Web Services. Your + * administrator might require that you specify your user name as the session name when you + * assume the role. For more information, see + * sts:RoleSessionName + * .

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + RoleSessionName: string | undefined; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ * @public + */ + PolicyArns?: PolicyDescriptorType[] | undefined; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plaintext that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

For more information about role session permissions, see Session + * policies.

+ * @public + */ + Policy?: string | undefined; + /** + *

The duration, in seconds, of the role session. The value specified can range from 900 + * seconds (15 minutes) up to the maximum session duration set for the role. The maximum + * session duration setting can have a value from 1 hour to 12 hours. If you specify a value + * higher than this setting or the administrator setting (whichever is lower), the operation + * fails. For example, if you specify a session duration of 12 hours, but your administrator + * set the maximum session duration to 6 hours, your operation fails.

+ *

Role chaining limits your Amazon Web Services CLI or Amazon Web Services API role session to a maximum of one hour. + * When you use the AssumeRole API operation to assume a role, you can specify + * the duration of your role session with the DurationSeconds parameter. You can + * specify a parameter value of up to 43200 seconds (12 hours), depending on the maximum + * session duration setting for your role. However, if you assume a role using role chaining + * and provide a DurationSeconds parameter value greater than one hour, the + * operation fails. To learn how to view the maximum value for your role, see Update the maximum session duration for a role.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the Amazon Web Services Management Console in the + * IAM User Guide.

+ *
+ * @public + */ + DurationSeconds?: number | undefined; + /** + *

A list of session tags that you want to pass. Each session tag consists of a key name + * and an associated value. For more information about session tags, see Tagging Amazon Web Services STS + * Sessions in the IAM User Guide.

+ *

This parameter is optional. You can pass up to 50 session tags. The plaintext session + * tag keys can’t exceed 128 characters, and the values can’t exceed 256 characters. For these + * and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

You can pass a session tag with the same key as a tag that is already attached to the + * role. When you do, session tags override a role tag with the same key.

+ *

Tag key–value pairs are not case sensitive, but case is preserved. This means that you + * cannot have separate Department and department tag keys. Assume + * that the role has the Department=Marketing tag and you pass the + * department=engineering session tag. Department + * and department are not saved as separate tags, and the session tag passed in + * the request takes precedence over the role tag.

+ *

Additionally, if you used temporary credentials to perform this operation, the new + * session inherits any transitive session tags from the calling session. If you pass a + * session tag with the same key as an inherited tag, the operation fails. To view the + * inherited tags for a session, see the CloudTrail logs. For more information, see Viewing Session Tags in CloudTrail in the + * IAM User Guide.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

A list of keys for session tags that you want to set as transitive. If you set a tag key + * as transitive, the corresponding key and value passes to subsequent sessions in a role + * chain. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

This parameter is optional. The transitive status of a session tag does not impact its + * packed binary size.

+ *

If you choose not to specify a transitive tag key, then no tags are passed from this + * session to any subsequent sessions.

+ * @public + */ + TransitiveTagKeys?: string[] | undefined; + /** + *

A unique identifier that might be required when you assume a role in another account. If + * the administrator of the account to which the role belongs provided you with an external + * ID, then provide that value in the ExternalId parameter. This value can be any + * string, such as a passphrase or account number. A cross-account role is usually set up to + * trust everyone in an account. Therefore, the administrator of the trusting account might + * send an external ID to the administrator of the trusted account. That way, only someone + * with the ID can assume the role, rather than everyone in the account. For more information + * about the external ID, see How to Use an External ID + * When Granting Access to Your Amazon Web Services Resources to a Third Party in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of + * characters consisting of upper- and lower-case alphanumeric characters with no spaces. + * You can also include underscores or any of the following characters: =,.@:/-

+ * @public + */ + ExternalId?: string | undefined; + /** + *

The identification number of the MFA device that is associated with the user who is + * making the AssumeRole call. Specify this value if the trust policy of the role + * being assumed includes a condition that requires MFA authentication. The value is either + * the serial number for a hardware device (such as GAHT12345678) or an Amazon + * Resource Name (ARN) for a virtual device (such as + * arn:aws:iam::123456789012:mfa/user).

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + SerialNumber?: string | undefined; + /** + *

The value provided by the MFA device, if the trust policy of the role being assumed + * requires MFA. (In other words, if the policy includes a condition that tests for MFA). If + * the role being assumed requires MFA and if the TokenCode value is missing or + * expired, the AssumeRole call returns an "access denied" error.

+ *

The format for this parameter, as described by its regex pattern, is a sequence of six + * numeric digits.

+ * @public + */ + TokenCode?: string | undefined; + /** + *

The source identity specified by the principal that is calling the + * AssumeRole operation. The source identity value persists across chained role sessions.

+ *

You can require users to specify a source identity when they assume a role. You do this + * by using the + * sts:SourceIdentity + * condition key in a role trust policy. You + * can use source identity information in CloudTrail logs to determine who took actions with a + * role. You can use the aws:SourceIdentity condition key to further control + * access to Amazon Web Services resources based on the value of source identity. For more information about + * using source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters consisting of upper- + * and lower-case alphanumeric characters with no spaces. You can also include underscores or + * any of the following characters: +=,.@-. You cannot use a value that begins with the text + * aws:. This prefix is reserved for Amazon Web Services internal use.

+ * @public + */ + SourceIdentity?: string | undefined; + /** + *

A list of previously acquired trusted context assertions in the format of a JSON array. + * The trusted context assertion is signed and encrypted by Amazon Web Services STS.

+ *

The following is an example of a ProvidedContext value that includes a + * single trusted context assertion and the ARN of the context provider from which the trusted + * context assertion was generated.

+ *

+ * [\{"ProviderArn":"arn:aws:iam::aws:contextProvider/IdentityCenter","ContextAssertion":"trusted-context-assertion"\}] + *

+ * @public + */ + ProvidedContexts?: ProvidedContext[] | undefined; +} +/** + *

Amazon Web Services credentials for API authentication.

+ * @public + */ +export interface Credentials { + /** + *

The access key ID that identifies the temporary security credentials.

+ * @public + */ + AccessKeyId: string | undefined; + /** + *

The secret access key that can be used to sign requests.

+ * @public + */ + SecretAccessKey: string | undefined; + /** + *

The token that users must pass to the service API to use the temporary + * credentials.

+ * @public + */ + SessionToken: string | undefined; + /** + *

The date on which the current credentials expire.

+ * @public + */ + Expiration: Date | undefined; +} +/** + * @internal + */ +export declare const CredentialsFilterSensitiveLog: (obj: Credentials) => any; +/** + *

Contains the response to a successful AssumeRole request, including + * temporary Amazon Web Services credentials that can be used to make Amazon Web Services requests.

+ * @public + */ +export interface AssumeRoleResponse { + /** + *

The temporary security credentials, which include an access key ID, a secret access key, + * and a security (or session) token.

+ * + *

The size of the security token that STS API operations return is not fixed. We + * strongly recommend that you make no assumptions about the maximum size.

+ *
+ * @public + */ + Credentials?: Credentials | undefined; + /** + *

The Amazon Resource Name (ARN) and the assumed role ID, which are identifiers that you + * can use to refer to the resulting temporary security credentials. For example, you can + * reference these credentials as a principal in a resource-based policy by using the ARN or + * assumed role ID. The ARN and ID include the RoleSessionName that you specified + * when you called AssumeRole.

+ * @public + */ + AssumedRoleUser?: AssumedRoleUser | undefined; + /** + *

A percentage value that indicates the packed size of the session policies and session + * tags combined passed in the request. The request fails if the packed size is greater than 100 percent, + * which means the policies and tags exceeded the allowed space.

+ * @public + */ + PackedPolicySize?: number | undefined; + /** + *

The source identity specified by the principal that is calling the + * AssumeRole operation.

+ *

You can require users to specify a source identity when they assume a role. You do this + * by using the sts:SourceIdentity condition key in a role trust policy. You can + * use source identity information in CloudTrail logs to determine who took actions with a role. + * You can use the aws:SourceIdentity condition key to further control access to + * Amazon Web Services resources based on the value of source identity. For more information about using + * source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters consisting of upper- + * and lower-case alphanumeric characters with no spaces. You can also include underscores or + * any of the following characters: =,.@-

+ * @public + */ + SourceIdentity?: string | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleResponseFilterSensitiveLog: (obj: AssumeRoleResponse) => any; +/** + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * @public + */ +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * @public + */ +export declare class MalformedPolicyDocumentException extends __BaseException { + readonly name: "MalformedPolicyDocumentException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * @public + */ +export declare class PackedPolicyTooLargeException extends __BaseException { + readonly name: "PackedPolicyTooLargeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * @public + */ +export declare class RegionDisabledException extends __BaseException { + readonly name: "RegionDisabledException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The identity provider (IdP) reported that authentication failed. This might be because + * the claim is invalid.

+ *

If this error is returned for the AssumeRoleWithWebIdentity operation, it + * can also mean that the claim has expired or has been explicitly revoked.

+ * @public + */ +export declare class IDPRejectedClaimException extends __BaseException { + readonly name: "IDPRejectedClaimException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The web identity token that was passed could not be validated by Amazon Web Services. Get a new + * identity token from the identity provider and then retry the request.

+ * @public + */ +export declare class InvalidIdentityTokenException extends __BaseException { + readonly name: "InvalidIdentityTokenException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface AssumeRoleWithWebIdentityRequest { + /** + *

The Amazon Resource Name (ARN) of the role that the caller is assuming.

+ * + *

Additional considerations apply to Amazon Cognito identity pools that assume cross-account IAM roles. The trust policies of these roles must accept the + * cognito-identity.amazonaws.com service principal and must contain the + * cognito-identity.amazonaws.com:aud condition key to restrict role + * assumption to users from your intended identity pools. A policy that trusts Amazon Cognito + * identity pools without this condition creates a risk that a user from an unintended + * identity pool can assume the role. For more information, see Trust policies for + * IAM roles in Basic (Classic) authentication in the Amazon Cognito + * Developer Guide.

+ *
+ * @public + */ + RoleArn: string | undefined; + /** + *

An identifier for the assumed role session. Typically, you pass the name or identifier + * that is associated with the user who is using your application. That way, the temporary + * security credentials that your application will use are associated with that user. This + * session name is included as part of the ARN and assumed role ID in the + * AssumedRoleUser response element.

+ *

For security purposes, administrators can view this field in CloudTrail logs to help identify who performed an action in Amazon Web Services. Your + * administrator might require that you specify your user name as the session name when you + * assume the role. For more information, see + * sts:RoleSessionName + * .

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + RoleSessionName: string | undefined; + /** + *

The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity + * provider. Your application must get this token by authenticating the user who is using your + * application with a web identity provider before the application makes an + * AssumeRoleWithWebIdentity call. Timestamps in the token must be formatted + * as either an integer or a long integer. Tokens must be signed using either RSA keys (RS256, + * RS384, or RS512) or ECDSA keys (ES256, ES384, or ES512).

+ * @public + */ + WebIdentityToken: string | undefined; + /** + *

The fully qualified host component of the domain name of the OAuth 2.0 identity + * provider. Do not specify this value for an OpenID Connect identity provider.

+ *

Currently www.amazon.com and graph.facebook.com are the only + * supported identity providers for OAuth 2.0 access tokens. Do not include URL schemes and + * port numbers.

+ *

Do not specify this value for OpenID Connect ID tokens.

+ * @public + */ + ProviderId?: string | undefined; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ * @public + */ + PolicyArns?: PolicyDescriptorType[] | undefined; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plaintext that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ *

For more information about role session permissions, see Session + * policies.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ * @public + */ + Policy?: string | undefined; + /** + *

The duration, in seconds, of the role session. The value can range from 900 seconds (15 + * minutes) up to the maximum session duration setting for the role. This setting can have a + * value from 1 hour to 12 hours. If you specify a value higher than this setting, the + * operation fails. For example, if you specify a session duration of 12 hours, but your + * administrator set the maximum session duration to 6 hours, your operation fails. To learn + * how to view the maximum value for your role, see View the + * Maximum Session Duration Setting for a Role in the + * IAM User Guide.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the Amazon Web Services Management Console in the + * IAM User Guide.

+ *
+ * @public + */ + DurationSeconds?: number | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleWithWebIdentityRequestFilterSensitiveLog: (obj: AssumeRoleWithWebIdentityRequest) => any; +/** + *

Contains the response to a successful AssumeRoleWithWebIdentity + * request, including temporary Amazon Web Services credentials that can be used to make Amazon Web Services requests.

+ * @public + */ +export interface AssumeRoleWithWebIdentityResponse { + /** + *

The temporary security credentials, which include an access key ID, a secret access key, + * and a security token.

+ * + *

The size of the security token that STS API operations return is not fixed. We + * strongly recommend that you make no assumptions about the maximum size.

+ *
+ * @public + */ + Credentials?: Credentials | undefined; + /** + *

The unique user identifier that is returned by the identity provider. This identifier is + * associated with the WebIdentityToken that was submitted with the + * AssumeRoleWithWebIdentity call. The identifier is typically unique to the + * user and the application that acquired the WebIdentityToken (pairwise + * identifier). For OpenID Connect ID tokens, this field contains the value returned by the + * identity provider as the token's sub (Subject) claim.

+ * @public + */ + SubjectFromWebIdentityToken?: string | undefined; + /** + *

The Amazon Resource Name (ARN) and the assumed role ID, which are identifiers that you + * can use to refer to the resulting temporary security credentials. For example, you can + * reference these credentials as a principal in a resource-based policy by using the ARN or + * assumed role ID. The ARN and ID include the RoleSessionName that you specified + * when you called AssumeRole.

+ * @public + */ + AssumedRoleUser?: AssumedRoleUser | undefined; + /** + *

A percentage value that indicates the packed size of the session policies and session + * tags combined passed in the request. The request fails if the packed size is greater than 100 percent, + * which means the policies and tags exceeded the allowed space.

+ * @public + */ + PackedPolicySize?: number | undefined; + /** + *

The issuing authority of the web identity token presented. For OpenID Connect ID + * tokens, this contains the value of the iss field. For OAuth 2.0 access tokens, + * this contains the value of the ProviderId parameter that was passed in the + * AssumeRoleWithWebIdentity request.

+ * @public + */ + Provider?: string | undefined; + /** + *

The intended audience (also known as client ID) of the web identity token. This is + * traditionally the client identifier issued to the application that requested the web + * identity token.

+ * @public + */ + Audience?: string | undefined; + /** + *

The value of the source identity that is returned in the JSON web token (JWT) from the + * identity provider.

+ *

You can require users to set a source identity value when they assume a role. You do + * this by using the sts:SourceIdentity condition key in a role trust policy. + * That way, actions that are taken with the role are associated with that user. After the + * source identity is set, the value cannot be changed. It is present in the request for all + * actions that are taken by the role and persists across chained role + * sessions. You can configure your identity provider to use an attribute associated with your + * users, like user name or email, as the source identity when calling + * AssumeRoleWithWebIdentity. You do this by adding a claim to the JSON web + * token. To learn more about OIDC tokens and claims, see Using Tokens with User Pools in the Amazon Cognito Developer Guide. + * For more information about using source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + SourceIdentity?: string | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleWithWebIdentityResponseFilterSensitiveLog: (obj: AssumeRoleWithWebIdentityResponse) => any; +/** + *

The request could not be fulfilled because the identity provider (IDP) that was asked + * to verify the incoming identity token could not be reached. This is often a transient + * error caused by network conditions. Retry the request a limited number of times so that + * you don't exceed the request rate. If the error persists, the identity provider might be + * down or not responding.

+ * @public + */ +export declare class IDPCommunicationErrorException extends __BaseException { + readonly name: "IDPCommunicationErrorException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts new file mode 100644 index 0000000..db11c3a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts @@ -0,0 +1,20 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "../commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "../commands/AssumeRoleWithWebIdentityCommand"; +/** + * serializeAws_queryAssumeRoleCommand + */ +export declare const se_AssumeRoleCommand: (input: AssumeRoleCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_queryAssumeRoleWithWebIdentityCommand + */ +export declare const se_AssumeRoleWithWebIdentityCommand: (input: AssumeRoleWithWebIdentityCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_queryAssumeRoleCommand + */ +export declare const de_AssumeRoleCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_queryAssumeRoleWithWebIdentityCommand + */ +export declare const de_AssumeRoleWithWebIdentityCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..5513a9b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts @@ -0,0 +1,59 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts new file mode 100644 index 0000000..c9924b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NoAuthSigner } from "@smithy/core"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + }[]; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + credentialDefaultProvider?: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts new file mode 100644 index 0000000..5bf519f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts @@ -0,0 +1,58 @@ +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..5b99276 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts new file mode 100644 index 0000000..ebd8567 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { STSExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: STSExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts new file mode 100644 index 0000000..10ee849 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts @@ -0,0 +1,22 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +export interface SSOOIDC { + createToken( + args: CreateTokenCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createToken( + args: CreateTokenCommandInput, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; + createToken( + args: CreateTokenCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; +} +export declare class SSOOIDC extends SSOOIDCClient implements SSOOIDC {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts new file mode 100644 index 0000000..d44b7af --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts @@ -0,0 +1,121 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "./commands/CreateTokenCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = CreateTokenCommandInput; +export type ServiceOutputTypes = CreateTokenCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type SSOOIDCClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface SSOOIDCClientConfig extends SSOOIDCClientConfigType {} +export type SSOOIDCClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface SSOOIDCClientResolvedConfig + extends SSOOIDCClientResolvedConfigType {} +export declare class SSOOIDCClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SSOOIDCClientResolvedConfig +> { + readonly config: SSOOIDCClientResolvedConfig; + constructor( + ...[configuration]: __CheckOptionalClientConfig + ); + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..c39ba91 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { SSOOIDCHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): SSOOIDCHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..936b101 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,47 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +export interface SSOOIDCHttpAuthSchemeParameters + extends HttpAuthSchemeParameters { + region?: string; +} +export interface SSOOIDCHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + SSOOIDCClientResolvedConfig, + HandlerExecutionContext, + SSOOIDCHttpAuthSchemeParameters, + object + > {} +export declare const defaultSSOOIDCHttpAuthSchemeParametersProvider: ( + config: SSOOIDCClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface SSOOIDCHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSSOOIDCHttpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: SSOOIDCHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts new file mode 100644 index 0000000..cb1de8b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts @@ -0,0 +1,43 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateTokenCommandInput extends CreateTokenRequest {} +export interface CreateTokenCommandOutput + extends CreateTokenResponse, + __MetadataBearer {} +declare const CreateTokenCommand_base: { + new ( + input: CreateTokenCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTokenCommandInput, + CreateTokenCommandOutput, + SSOOIDCClientResolvedConfig, + CreateTokenCommandInput, + CreateTokenCommandOutput + >; + new ( + __0_0: CreateTokenCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTokenCommandInput, + CreateTokenCommandOutput, + SSOOIDCClientResolvedConfig, + CreateTokenCommandInput, + CreateTokenCommandOutput + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateTokenCommand extends CreateTokenCommand_base { + protected static __types: { + api: { + input: CreateTokenRequest; + output: CreateTokenResponse; + }; + sdk: { + input: CreateTokenCommandInput; + output: CreateTokenCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..7f24540 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts @@ -0,0 +1,51 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts new file mode 100644 index 0000000..c208e33 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface SSOOIDCExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts new file mode 100644 index 0000000..1e9247f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts @@ -0,0 +1,8 @@ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts new file mode 100644 index 0000000..dae636f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class SSOOIDCServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts new file mode 100644 index 0000000..68de714 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts @@ -0,0 +1,123 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +export declare class AccessDeniedException extends __BaseException { + readonly name: "AccessDeniedException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class AuthorizationPendingException extends __BaseException { + readonly name: "AuthorizationPendingException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export interface CreateTokenRequest { + clientId: string | undefined; + clientSecret: string | undefined; + grantType: string | undefined; + deviceCode?: string | undefined; + code?: string | undefined; + refreshToken?: string | undefined; + scope?: string[] | undefined; + redirectUri?: string | undefined; + codeVerifier?: string | undefined; +} +export declare const CreateTokenRequestFilterSensitiveLog: ( + obj: CreateTokenRequest +) => any; +export interface CreateTokenResponse { + accessToken?: string | undefined; + tokenType?: string | undefined; + expiresIn?: number | undefined; + refreshToken?: string | undefined; + idToken?: string | undefined; +} +export declare const CreateTokenResponseFilterSensitiveLog: ( + obj: CreateTokenResponse +) => any; +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InternalServerException extends __BaseException { + readonly name: "InternalServerException"; + readonly $fault: "server"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidClientException extends __BaseException { + readonly name: "InvalidClientException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidGrantException extends __BaseException { + readonly name: "InvalidGrantException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidScopeException extends __BaseException { + readonly name: "InvalidScopeException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class SlowDownException extends __BaseException { + readonly name: "SlowDownException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor(opts: __ExceptionOptionType); +} +export declare class UnauthorizedClientException extends __BaseException { + readonly name: "UnauthorizedClientException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class UnsupportedGrantTypeException extends __BaseException { + readonly name: "UnsupportedGrantTypeException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..d0657b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts @@ -0,0 +1,17 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "../commands/CreateTokenCommand"; +export declare const se_CreateTokenCommand: ( + input: CreateTokenCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_CreateTokenCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..c469a24 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts @@ -0,0 +1,120 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts new file mode 100644 index 0000000..a24c900 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts @@ -0,0 +1,114 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts new file mode 100644 index 0000000..c3610fd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts @@ -0,0 +1,124 @@ +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..130a1e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts @@ -0,0 +1,49 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts new file mode 100644 index 0000000..d226882 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: SSOOIDCExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts new file mode 100644 index 0000000..cca9cbb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts @@ -0,0 +1,39 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "./commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +export interface STS { + assumeRole( + args: AssumeRoleCommandInput, + options?: __HttpHandlerOptions + ): Promise; + assumeRole( + args: AssumeRoleCommandInput, + cb: (err: any, data?: AssumeRoleCommandOutput) => void + ): void; + assumeRole( + args: AssumeRoleCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AssumeRoleCommandOutput) => void + ): void; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + options?: __HttpHandlerOptions + ): Promise; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void + ): void; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void + ): void; +} +export declare class STS extends STSClient implements STS {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts new file mode 100644 index 0000000..8bffddf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts @@ -0,0 +1,128 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + AwsCredentialIdentityProvider, + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "./commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "./commands/AssumeRoleWithWebIdentityCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | AssumeRoleCommandInput + | AssumeRoleWithWebIdentityCommandInput; +export type ServiceOutputTypes = + | AssumeRoleCommandOutput + | AssumeRoleWithWebIdentityCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type STSClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface STSClientConfig extends STSClientConfigType {} +export type STSClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface STSClientResolvedConfig extends STSClientResolvedConfigType {} +export declare class STSClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig +> { + readonly config: STSClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..ef83018 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { STSHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: STSHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): STSHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: STSHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..0e17e2f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,57 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + Client, + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { STSClientResolvedConfig } from "../STSClient"; +export interface STSHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface STSHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + STSClientResolvedConfig, + HandlerExecutionContext, + STSHttpAuthSchemeParameters, + object + > {} +export declare const defaultSTSHttpAuthSchemeParametersProvider: ( + config: STSClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface STSHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSTSHttpAuthSchemeProvider: STSHttpAuthSchemeProvider; +export interface StsAuthInputConfig {} +export interface StsAuthResolvedConfig { + stsClientCtor: new (clientConfig: any) => Client; +} +export declare const resolveStsAuthConfig: ( + input: T & StsAuthInputConfig +) => T & StsAuthResolvedConfig; +export interface HttpAuthSchemeInputConfig + extends StsAuthInputConfig, + AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: STSHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends StsAuthResolvedConfig, + AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: STSHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts new file mode 100644 index 0000000..9333fbb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleRequest, AssumeRoleResponse } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig, +} from "../STSClient"; +export { __MetadataBearer }; +export { $Command }; +export interface AssumeRoleCommandInput extends AssumeRoleRequest {} +export interface AssumeRoleCommandOutput + extends AssumeRoleResponse, + __MetadataBearer {} +declare const AssumeRoleCommand_base: { + new ( + input: AssumeRoleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleCommandInput, + AssumeRoleCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AssumeRoleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleCommandInput, + AssumeRoleCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AssumeRoleCommand extends AssumeRoleCommand_base { + protected static __types: { + api: { + input: AssumeRoleRequest; + output: AssumeRoleResponse; + }; + sdk: { + input: AssumeRoleCommandInput; + output: AssumeRoleCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts new file mode 100644 index 0000000..222e034 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + AssumeRoleWithWebIdentityRequest, + AssumeRoleWithWebIdentityResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig, +} from "../STSClient"; +export { __MetadataBearer }; +export { $Command }; +export interface AssumeRoleWithWebIdentityCommandInput + extends AssumeRoleWithWebIdentityRequest {} +export interface AssumeRoleWithWebIdentityCommandOutput + extends AssumeRoleWithWebIdentityResponse, + __MetadataBearer {} +declare const AssumeRoleWithWebIdentityCommand_base: { + new ( + input: AssumeRoleWithWebIdentityCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AssumeRoleWithWebIdentityCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AssumeRoleWithWebIdentityCommand extends AssumeRoleWithWebIdentityCommand_base { + protected static __types: { + api: { + input: AssumeRoleWithWebIdentityRequest; + output: AssumeRoleWithWebIdentityResponse; + }; + sdk: { + input: AssumeRoleWithWebIdentityCommandInput; + output: AssumeRoleWithWebIdentityCommandOutput; + }; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts new file mode 100644 index 0000000..b6f22cc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts @@ -0,0 +1,19 @@ +import { Pluggable } from "@smithy/types"; +import { + DefaultCredentialProvider, + RoleAssumer, + RoleAssumerWithWebIdentity, + STSRoleAssumerOptions, +} from "./defaultStsRoleAssumers"; +import { ServiceInputTypes, ServiceOutputTypes } from "./STSClient"; +export declare const getDefaultRoleAssumer: ( + stsOptions?: STSRoleAssumerOptions, + stsPlugins?: Pluggable[] +) => RoleAssumer; +export declare const getDefaultRoleAssumerWithWebIdentity: ( + stsOptions?: STSRoleAssumerOptions, + stsPlugins?: Pluggable[] +) => RoleAssumerWithWebIdentity; +export declare const decorateDefaultCredentialProvider: ( + provider: DefaultCredentialProvider +) => DefaultCredentialProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts new file mode 100644 index 0000000..3831379 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts @@ -0,0 +1,33 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentity, Logger, Provider } from "@smithy/types"; +import { AssumeRoleCommandInput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient, STSClientConfig } from "./STSClient"; +export type STSRoleAssumerOptions = Pick< + STSClientConfig, + "logger" | "region" | "requestHandler" +> & { + credentialProviderLogger?: Logger; + parentClientConfig?: CredentialProviderOptions["parentClientConfig"]; +}; +export type RoleAssumer = ( + sourceCreds: AwsCredentialIdentity, + params: AssumeRoleCommandInput +) => Promise; +export declare const getDefaultRoleAssumer: ( + stsOptions: STSRoleAssumerOptions, + STSClient: new (options: STSClientConfig) => STSClient +) => RoleAssumer; +export type RoleAssumerWithWebIdentity = ( + params: AssumeRoleWithWebIdentityCommandInput +) => Promise; +export declare const getDefaultRoleAssumerWithWebIdentity: ( + stsOptions: STSRoleAssumerOptions, + STSClient: new (options: STSClientConfig) => STSClient +) => RoleAssumerWithWebIdentity; +export type DefaultCredentialProvider = ( + input: any +) => Provider; +export declare const decorateDefaultCredentialProvider: ( + provider: DefaultCredentialProvider +) => DefaultCredentialProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..33567fd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts @@ -0,0 +1,57 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; + useGlobalEndpoint?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + UseGlobalEndpoint?: boolean; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts new file mode 100644 index 0000000..14b124b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface STSExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts new file mode 100644 index 0000000..157a306 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts @@ -0,0 +1,9 @@ +export * from "./STSClient"; +export * from "./STS"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { STSExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts new file mode 100644 index 0000000..95fc485 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class STSServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts new file mode 100644 index 0000000..1cba371 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts @@ -0,0 +1,123 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +export interface AssumedRoleUser { + AssumedRoleId: string | undefined; + Arn: string | undefined; +} +export interface PolicyDescriptorType { + arn?: string | undefined; +} +export interface ProvidedContext { + ProviderArn?: string | undefined; + ContextAssertion?: string | undefined; +} +export interface Tag { + Key: string | undefined; + Value: string | undefined; +} +export interface AssumeRoleRequest { + RoleArn: string | undefined; + RoleSessionName: string | undefined; + PolicyArns?: PolicyDescriptorType[] | undefined; + Policy?: string | undefined; + DurationSeconds?: number | undefined; + Tags?: Tag[] | undefined; + TransitiveTagKeys?: string[] | undefined; + ExternalId?: string | undefined; + SerialNumber?: string | undefined; + TokenCode?: string | undefined; + SourceIdentity?: string | undefined; + ProvidedContexts?: ProvidedContext[] | undefined; +} +export interface Credentials { + AccessKeyId: string | undefined; + SecretAccessKey: string | undefined; + SessionToken: string | undefined; + Expiration: Date | undefined; +} +export declare const CredentialsFilterSensitiveLog: (obj: Credentials) => any; +export interface AssumeRoleResponse { + Credentials?: Credentials | undefined; + AssumedRoleUser?: AssumedRoleUser | undefined; + PackedPolicySize?: number | undefined; + SourceIdentity?: string | undefined; +} +export declare const AssumeRoleResponseFilterSensitiveLog: ( + obj: AssumeRoleResponse +) => any; +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class MalformedPolicyDocumentException extends __BaseException { + readonly name: "MalformedPolicyDocumentException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + MalformedPolicyDocumentException, + __BaseException + > + ); +} +export declare class PackedPolicyTooLargeException extends __BaseException { + readonly name: "PackedPolicyTooLargeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class RegionDisabledException extends __BaseException { + readonly name: "RegionDisabledException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class IDPRejectedClaimException extends __BaseException { + readonly name: "IDPRejectedClaimException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidIdentityTokenException extends __BaseException { + readonly name: "InvalidIdentityTokenException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface AssumeRoleWithWebIdentityRequest { + RoleArn: string | undefined; + RoleSessionName: string | undefined; + WebIdentityToken: string | undefined; + ProviderId?: string | undefined; + PolicyArns?: PolicyDescriptorType[] | undefined; + Policy?: string | undefined; + DurationSeconds?: number | undefined; +} +export declare const AssumeRoleWithWebIdentityRequestFilterSensitiveLog: ( + obj: AssumeRoleWithWebIdentityRequest +) => any; +export interface AssumeRoleWithWebIdentityResponse { + Credentials?: Credentials | undefined; + SubjectFromWebIdentityToken?: string | undefined; + AssumedRoleUser?: AssumedRoleUser | undefined; + PackedPolicySize?: number | undefined; + Provider?: string | undefined; + Audience?: string | undefined; + SourceIdentity?: string | undefined; +} +export declare const AssumeRoleWithWebIdentityResponseFilterSensitiveLog: ( + obj: AssumeRoleWithWebIdentityResponse +) => any; +export declare class IDPCommunicationErrorException extends __BaseException { + readonly name: "IDPCommunicationErrorException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts new file mode 100644 index 0000000..1d03deb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts @@ -0,0 +1,29 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "../commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "../commands/AssumeRoleWithWebIdentityCommand"; +export declare const se_AssumeRoleCommand: ( + input: AssumeRoleCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_AssumeRoleWithWebIdentityCommand: ( + input: AssumeRoleWithWebIdentityCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_AssumeRoleCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_AssumeRoleWithWebIdentityCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..54a4e79 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts @@ -0,0 +1,131 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts new file mode 100644 index 0000000..50cd2c7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts @@ -0,0 +1,112 @@ +import { NoAuthSigner } from "@smithy/core"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + }[]; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + credentialDefaultProvider?: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts new file mode 100644 index 0000000..5eda45e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts @@ -0,0 +1,135 @@ +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..860b0c8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts @@ -0,0 +1,51 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts new file mode 100644 index 0000000..d3cd411 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { STSExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: STSExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/package.json new file mode 100644 index 0000000..26191ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/package.json @@ -0,0 +1,115 @@ +{ + "name": "@aws-sdk/nested-clients", + "version": "3.803.0", + "description": "Nested clients for AWS SDK packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline nested-clients", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "lint": "node ../../scripts/validation/submodules-linter.js --pkg nested-clients", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "engines": { + "node": ">=18.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "./sso-oidc.d.ts", + "./sso-oidc.js", + "./sts.d.ts", + "./sts.js", + "dist-*/**" + ], + "browser": { + "./dist-es/submodules/sso-oidc/runtimeConfig": "./dist-es/submodules/sso-oidc/runtimeConfig.browser", + "./dist-es/submodules/sts/runtimeConfig": "./dist-es/submodules/sts/runtimeConfig.browser" + }, + "react-native": {}, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/nested-clients", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/nested-clients" + }, + "exports": { + "./sso-oidc": { + "types": "./dist-types/submodules/sso-oidc/index.d.ts", + "module": "./dist-es/submodules/sso-oidc/index.js", + "node": "./dist-cjs/submodules/sso-oidc/index.js", + "import": "./dist-es/submodules/sso-oidc/index.js", + "require": "./dist-cjs/submodules/sso-oidc/index.js" + }, + "./sts": { + "types": "./dist-types/submodules/sts/index.d.ts", + "module": "./dist-es/submodules/sts/index.js", + "node": "./dist-cjs/submodules/sts/index.js", + "import": "./dist-es/submodules/sts/index.js", + "require": "./dist-cjs/submodules/sts/index.js" + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts new file mode 100644 index 0000000..ab47282 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/nested-clients/sso-oidc" { + export * from "@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.js new file mode 100644 index 0000000..896865c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/sso-oidc/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sts.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sts.d.ts new file mode 100644 index 0000000..03b8e68 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sts.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/nested-clients/sts" { + export * from "@aws-sdk/nested-clients/dist-types/submodules/sts/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sts.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sts.js new file mode 100644 index 0000000..8976f12 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/nested-clients/sts.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/sts/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/README.md new file mode 100644 index 0000000..389b765 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/README.md @@ -0,0 +1,12 @@ +# @aws-sdk/region-config-resolver + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/region-config-resolver/latest.svg)](https://www.npmjs.com/package/@aws-sdk/region-config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/region-config-resolver.svg)](https://www.npmjs.com/package/@aws-sdk/region-config-resolver) + +> An internal package + +This package provides utilities for AWS region config resolvers. + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js new file mode 100644 index 0000000..ddc184f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js @@ -0,0 +1,105 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getAwsRegionExtensionConfiguration: () => getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration: () => resolveAwsRegionExtensionConfiguration, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(index_exports); + +// src/extensions/index.ts +var getAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setRegion(region) { + runtimeConfig.region = region; + }, + region() { + return runtimeConfig.region; + } + }; +}, "getAwsRegionExtensionConfiguration"); +var resolveAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region() + }; +}, "resolveAwsRegionExtensionConfiguration"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => env[REGION_ENV_NAME], "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => profile[REGION_INI_NAME], "configFileSelector"), + default: /* @__PURE__ */ __name(() => { + throw new Error("Region is missing"); + }, "default") +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: /* @__PURE__ */ __name(async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, "region"), + useFipsEndpoint: /* @__PURE__ */ __name(async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, "useFipsEndpoint") + }); +}, "resolveRegionConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration, + REGION_ENV_NAME, + REGION_INI_NAME, + NODE_REGION_CONFIG_OPTIONS, + NODE_REGION_CONFIG_FILE_OPTIONS, + resolveRegionConfig +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js new file mode 100644 index 0000000..eb03314 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js @@ -0,0 +1,15 @@ +export const getAwsRegionExtensionConfiguration = (runtimeConfig) => { + return { + setRegion(region) { + runtimeConfig.region = region; + }, + region() { + return runtimeConfig.region; + }, + }; +}; +export const resolveAwsRegionExtensionConfiguration = (awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region(), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js new file mode 100644 index 0000000..7db9896 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js @@ -0,0 +1,12 @@ +export const REGION_ENV_NAME = "AWS_REGION"; +export const REGION_INI_NAME = "region"; +export const NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +export const NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js new file mode 100644 index 0000000..8d1246b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js @@ -0,0 +1,6 @@ +import { isFipsRegion } from "./isFipsRegion"; +export const getRealRegion = (region) => isFipsRegion(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..d758967 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +export const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..f88e00f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js @@ -0,0 +1,24 @@ +import { getRealRegion } from "./getRealRegion"; +import { isFipsRegion } from "./isFipsRegion"; +export const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..7756bad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts @@ -0,0 +1,16 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { Provider } from "@smithy/types"; +export type RegionExtensionRuntimeConfigType = Partial<{ + region: string | Provider; +}>; +/** + * @internal + */ +export declare const getAwsRegionExtensionConfiguration: (runtimeConfig: RegionExtensionRuntimeConfigType) => { + setRegion(region: Provider): void; + region(): Provider; +}; +/** + * @internal + */ +export declare const resolveAwsRegionExtensionConfiguration: (awsRegionExtensionConfiguration: AwsRegionExtensionConfiguration) => RegionExtensionRuntimeConfigType; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts new file mode 100644 index 0000000..d203bb0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..c70fb5b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts new file mode 100644 index 0000000..6dcf5e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..b42cee7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..84ed4d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,37 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..c1328e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,14 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { Provider } from "@smithy/types"; +export type RegionExtensionRuntimeConfigType = Partial<{ + region: string | Provider; +}>; +export declare const getAwsRegionExtensionConfiguration: ( + runtimeConfig: RegionExtensionRuntimeConfigType +) => { + setRegion(region: Provider): void; + region(): Provider; +}; +export declare const resolveAwsRegionExtensionConfiguration: ( + awsRegionExtensionConfiguration: AwsRegionExtensionConfiguration +) => RegionExtensionRuntimeConfigType; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts new file mode 100644 index 0000000..ceb3e02 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts @@ -0,0 +1,8 @@ +import { + LoadedConfigSelectors, + LocalConfigOptions, +} from "@smithy/node-config-provider"; +export declare const REGION_ENV_NAME = "AWS_REGION"; +export declare const REGION_INI_NAME = "region"; +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..f06119b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts @@ -0,0 +1 @@ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..13d34f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts @@ -0,0 +1 @@ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..86b8364 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,14 @@ +import { Provider } from "@smithy/types"; +export interface RegionInputConfig { + region?: string | Provider; + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved {} +export interface RegionResolvedConfig { + region: Provider; + useFipsEndpoint: Provider; +} +export declare const resolveRegionConfig: ( + input: T & RegionInputConfig & PreviouslyResolved +) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/package.json new file mode 100644 index 0000000..605f530 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/region-config-resolver/package.json @@ -0,0 +1,59 @@ +{ + "name": "@aws-sdk/region-config-resolver", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline region-config-resolver", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "jest": "28.1.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/region-config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/region-config-resolver" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/README.md new file mode 100644 index 0000000..9078019 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/README.md @@ -0,0 +1,53 @@ +# @aws-sdk/token-providers + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/token-providers/latest.svg)](https://www.npmjs.com/package/@aws-sdk/token-providers) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/token-providers.svg)](https://www.npmjs.com/package/@aws-sdk/token-providers) + +A collection of all token providers. The token providers should be used when the authorization +type is going to be token based. For example, the `bearer` authorization type set using +[httpBearerAuth trait][http-bearer-auth-trait] in Smithy. + +## Static Token Provider + +```ts +import { fromStatic } from "@aws-sdk/token-providers"; + +const token = { token: "TOKEN" }; +const staticTokenProvider = fromStatic(token); + +const staticToken = await staticTokenProvider(); // returns { token: "TOKEN" } +``` + +## SSO Token Provider + +```ts +import { fromSso } from "@aws-sdk/token-providers"; + +// returns token from SSO token cache or ssoOidc.createToken() call. +const ssoToken = await fromSso(); +``` + +## Token Provider Chain + +```ts +import { nodeProvider } from "@aws-sdk/token-providers"; + +// returns token from default providers. +const token = await nodeProvider(); +``` + +[http-bearer-auth-trait]: https://smithy.io/2.0/spec/authentication-traits.html#smithy-api-httpbearerauth-trait + +--- + +### Development + +This package contains a minimal copy of the SSO OIDC client, instead of relying on the full client, which +would cause a circular dependency. + +When regenerating the bundled version of the SSO OIDC client, run the esbuild.js script and then make the following changes: + +- Remove any dependency of the generated client on the credential chain such that it would create + a circular dependency back to this package. Because we only need the `CreateTokenCommand`, the client, and this command's + associated `Exception`s, it is possible to remove auth dependencies. +- Ensure all required packages are declared in the `package.json` of token-providers. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-cjs/index.js new file mode 100644 index 0000000..51a38df --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-cjs/index.js @@ -0,0 +1,217 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromSso: () => fromSso, + fromStatic: () => fromStatic, + nodeProvider: () => nodeProvider +}); +module.exports = __toCommonJS(index_exports); + +// src/fromSso.ts + + + +// src/constants.ts +var EXPIRE_WINDOW_MS = 5 * 60 * 1e3; +var REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; + +// src/getSsoOidcClient.ts +var getSsoOidcClient = /* @__PURE__ */ __name(async (ssoRegion, init = {}) => { + const { SSOOIDCClient } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sso-oidc"))); + const ssoOidcClient = new SSOOIDCClient( + Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? init.clientConfig?.region, + logger: init.clientConfig?.logger ?? init.parentClientConfig?.logger + }) + ); + return ssoOidcClient; +}, "getSsoOidcClient"); + +// src/getNewSsoOidcToken.ts +var getNewSsoOidcToken = /* @__PURE__ */ __name(async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sso-oidc"))); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send( + new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token" + }) + ); +}, "getNewSsoOidcToken"); + +// src/validateTokenExpiry.ts +var import_property_provider = require("@smithy/property-provider"); +var validateTokenExpiry = /* @__PURE__ */ __name((token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new import_property_provider.TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}, "validateTokenExpiry"); + +// src/validateTokenKey.ts + +var validateTokenKey = /* @__PURE__ */ __name((key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new import_property_provider.TokenProviderError( + `Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, + false + ); + } +}, "validateTokenKey"); + +// src/writeSSOTokenToFile.ts +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var import_fs = require("fs"); +var { writeFile } = import_fs.promises; +var writeSSOTokenToFile = /* @__PURE__ */ __name((id, ssoToken) => { + const tokenFilepath = (0, import_shared_ini_file_loader.getSSOTokenFilepath)(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}, "writeSSOTokenToFile"); + +// src/fromSso.ts +var lastRefreshAttemptTime = /* @__PURE__ */ new Date(0); +var fromSso = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + init.logger?.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } else if (!profile["sso_session"]) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' could not be found in shared credentials file.`, + false + ); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, + false + ); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoSessionName); + } catch (e) { + throw new import_property_provider.TokenProviderError( + `The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, + false + ); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1e3) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1e3); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken + }); + } catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration + }; + } catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}, "fromSso"); + +// src/fromStatic.ts + +var fromStatic = /* @__PURE__ */ __name(({ token, logger }) => async () => { + logger?.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new import_property_provider.TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}, "fromStatic"); + +// src/nodeProvider.ts + +var nodeProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)(fromSso(init), async () => { + throw new import_property_provider.TokenProviderError("Could not load token from any providers", false); + }), + (token) => token.expiration !== void 0 && token.expiration.getTime() - Date.now() < 3e5, + (token) => token.expiration !== void 0 +), "nodeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromSso, + fromStatic, + nodeProvider +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/constants.js new file mode 100644 index 0000000..b84a126 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/constants.js @@ -0,0 +1,2 @@ +export const EXPIRE_WINDOW_MS = 5 * 60 * 1000; +export const REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js new file mode 100644 index 0000000..61d2075 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js @@ -0,0 +1,88 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { getProfileName, getSSOTokenFromFile, loadSsoSessionData, parseKnownFiles, } from "@smithy/shared-ini-file-loader"; +import { EXPIRE_WINDOW_MS, REFRESH_MESSAGE } from "./constants"; +import { getNewSsoOidcToken } from "./getNewSsoOidcToken"; +import { validateTokenExpiry } from "./validateTokenExpiry"; +import { validateTokenKey } from "./validateTokenKey"; +import { writeSSOTokenToFile } from "./writeSSOTokenToFile"; +const lastRefreshAttemptTime = new Date(0); +export const fromSso = (_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig, + }, + }; + init.logger?.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await parseKnownFiles(init); + const profileName = getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }); + const profile = profiles[profileName]; + if (!profile) { + throw new TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } + else if (!profile["sso_session"]) { + throw new TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await loadSsoSessionData(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new TokenProviderError(`Sso session '${ssoSessionName}' could not be found in shared credentials file.`, false); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new TokenProviderError(`Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, false); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await getSSOTokenFromFile(ssoSessionName); + } + catch (e) { + throw new TokenProviderError(`The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, false); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1000) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1000); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken, + }); + } + catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration, + }; + } + catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js new file mode 100644 index 0000000..0704ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js @@ -0,0 +1,8 @@ +import { TokenProviderError } from "@smithy/property-provider"; +export const fromStatic = ({ token, logger }) => async () => { + logger?.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js new file mode 100644 index 0000000..00f7b2c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js @@ -0,0 +1,11 @@ +import { getSsoOidcClient } from "./getSsoOidcClient"; +export const getNewSsoOidcToken = async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await import("@aws-sdk/nested-clients/sso-oidc"); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send(new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token", + })); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js new file mode 100644 index 0000000..689be72 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js @@ -0,0 +1,8 @@ +export const getSsoOidcClient = async (ssoRegion, init = {}) => { + const { SSOOIDCClient } = await import("@aws-sdk/nested-clients/sso-oidc"); + const ssoOidcClient = new SSOOIDCClient(Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? init.clientConfig?.region, + logger: init.clientConfig?.logger ?? init.parentClientConfig?.logger, + })); + return ssoOidcClient; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/index.js new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js new file mode 100644 index 0000000..a0c7b52 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js @@ -0,0 +1,5 @@ +import { chain, memoize, TokenProviderError } from "@smithy/property-provider"; +import { fromSso } from "./fromSso"; +export const nodeProvider = (init = {}) => memoize(chain(fromSso(init), async () => { + throw new TokenProviderError("Could not load token from any providers", false); +}), (token) => token.expiration !== undefined && token.expiration.getTime() - Date.now() < 300000, (token) => token.expiration !== undefined); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js new file mode 100644 index 0000000..8118d7c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js @@ -0,0 +1,7 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { REFRESH_MESSAGE } from "./constants"; +export const validateTokenExpiry = (token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js new file mode 100644 index 0000000..4979638 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js @@ -0,0 +1,7 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { REFRESH_MESSAGE } from "./constants"; +export const validateTokenKey = (key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new TokenProviderError(`Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, false); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js new file mode 100644 index 0000000..6da2c9b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js @@ -0,0 +1,8 @@ +import { getSSOTokenFilepath } from "@smithy/shared-ini-file-loader"; +import { promises as fsPromises } from "fs"; +const { writeFile } = fsPromises; +export const writeSSOTokenToFile = (id, ssoToken) => { + const tokenFilepath = getSSOTokenFilepath(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts new file mode 100644 index 0000000..de28cde --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts @@ -0,0 +1,8 @@ +/** + * The time window (5 mins) that SDK will treat the SSO token expires in before the defined expiration date in token. + * This is needed because server side may have invalidated the token before the defined expiration date. + * + * @internal + */ +export declare const EXPIRE_WINDOW_MS: number; +export declare const REFRESH_MESSAGE = "To refresh this SSO session run 'aws sso login' with the corresponding profile."; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts new file mode 100644 index 0000000..03f5359 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts @@ -0,0 +1,12 @@ +import { CredentialProviderOptions, RuntimeConfigIdentityProvider, TokenIdentity } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromSsoInit extends SourceProfileInit, CredentialProviderOptions { + /** + * @see SSOOIDCClientConfig in \@aws-sdk/client-sso-oidc. + */ + clientConfig?: any; +} +/** + * Creates a token provider that will read from SSO token cache or ssoOidc.createToken() call. + */ +export declare const fromSso: (_init?: FromSsoInit) => RuntimeConfigIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..d496172 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { CredentialProviderOptions, TokenIdentity, TokenIdentityProvider } from "@aws-sdk/types"; +export interface FromStaticInit extends CredentialProviderOptions { + token?: TokenIdentity; +} +/** + * Creates a token provider that will read from static token. + * @public + */ +export declare const fromStatic: ({ token, logger }: FromStaticInit) => TokenIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts new file mode 100644 index 0000000..75c6322 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts @@ -0,0 +1,8 @@ +/// +import { SSOToken } from "@smithy/shared-ini-file-loader"; +import { FromSsoInit } from "./fromSso"; +/** + * Returns a new SSO OIDC token from ssoOids.createToken() API call. + * @internal + */ +export declare const getNewSsoOidcToken: (ssoToken: SSOToken, ssoRegion: string, init?: FromSsoInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts new file mode 100644 index 0000000..5c9dcb4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts @@ -0,0 +1,7 @@ +/// +import { FromSsoInit } from "./fromSso"; +/** + * Returns a SSOOIDC client for the given region. + * @internal + */ +export declare const getSsoOidcClient: (ssoRegion: string, init?: FromSsoInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts new file mode 100644 index 0000000..e4846ec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts @@ -0,0 +1,18 @@ +import { TokenIdentityProvider } from "@aws-sdk/types"; +import { FromSsoInit } from "./fromSso"; +/** + * Creates a token provider that will attempt to find token from the + * following sources (listed in order of precedence): + * * SSO token from SSO cache or ssoOidc.createToken() call + * + * The default token provider is designed to invoke one provider at a time and only + * continue to the next if no token has been located. It currently has only SSO + * Token Provider in the chain. + * + * @param init Configuration that is passed to each individual + * provider + * + * @see fromSso The function used to source credentials from + * SSO cache or ssoOidc.createToken() call + */ +export declare const nodeProvider: (init?: FromSsoInit) => TokenIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..d7e7577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,3 @@ +export declare const EXPIRE_WINDOW_MS: number; +export declare const REFRESH_MESSAGE = + "To refresh this SSO session run 'aws sso login' with the corresponding profile."; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts new file mode 100644 index 0000000..3b5bb60 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts @@ -0,0 +1,14 @@ +import { + CredentialProviderOptions, + RuntimeConfigIdentityProvider, + TokenIdentity, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromSsoInit + extends SourceProfileInit, + CredentialProviderOptions { + clientConfig?: any; +} +export declare const fromSso: ( + _init?: FromSsoInit +) => RuntimeConfigIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..e680012 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,12 @@ +import { + CredentialProviderOptions, + TokenIdentity, + TokenIdentityProvider, +} from "@aws-sdk/types"; +export interface FromStaticInit extends CredentialProviderOptions { + token?: TokenIdentity; +} +export declare const fromStatic: ({ + token, + logger, +}: FromStaticInit) => TokenIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts new file mode 100644 index 0000000..6bcd71d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts @@ -0,0 +1,9 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +import { FromSsoInit } from "./fromSso"; +export declare const getNewSsoOidcToken: ( + ssoToken: SSOToken, + ssoRegion: string, + init?: FromSsoInit +) => Promise< + import("@aws-sdk/nested-clients/sso-oidc").CreateTokenCommandOutput +>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts new file mode 100644 index 0000000..c07dc69 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts @@ -0,0 +1,5 @@ +import { FromSsoInit } from "./fromSso"; +export declare const getSsoOidcClient: ( + ssoRegion: string, + init?: FromSsoInit +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts new file mode 100644 index 0000000..11a9bd4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts @@ -0,0 +1,5 @@ +import { TokenIdentityProvider } from "@aws-sdk/types"; +import { FromSsoInit } from "./fromSso"; +export declare const nodeProvider: ( + init?: FromSsoInit +) => TokenIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts new file mode 100644 index 0000000..9003605 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts @@ -0,0 +1,2 @@ +import { TokenIdentity } from "@aws-sdk/types"; +export declare const validateTokenExpiry: (token: TokenIdentity) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts new file mode 100644 index 0000000..105b2b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts @@ -0,0 +1,5 @@ +export declare const validateTokenKey: ( + key: string, + value: unknown, + forRefresh?: boolean +) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts new file mode 100644 index 0000000..a6d025f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts @@ -0,0 +1,5 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +export declare const writeSSOTokenToFile: ( + id: string, + ssoToken: SSOToken +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts new file mode 100644 index 0000000..1253784 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts @@ -0,0 +1,5 @@ +import { TokenIdentity } from "@aws-sdk/types"; +/** + * Throws TokenProviderError is token is expired. + */ +export declare const validateTokenExpiry: (token: TokenIdentity) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts new file mode 100644 index 0000000..a9618fd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts @@ -0,0 +1,4 @@ +/** + * Throws TokenProviderError if value is undefined for key. + */ +export declare const validateTokenKey: (key: string, value: unknown, forRefresh?: boolean) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts new file mode 100644 index 0000000..a1e17e8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts @@ -0,0 +1,5 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +/** + * Writes SSO token to file based on filepath computed from ssoStartUrl or session name. + */ +export declare const writeSSOTokenToFile: (id: string, ssoToken: SSOToken) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/package.json new file mode 100644 index 0000000..d035426 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/token-providers/package.json @@ -0,0 +1,67 @@ +{ + "name": "@aws-sdk/token-providers", + "version": "3.803.0", + "description": "A collection of token providers", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "sideEffects": false, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline token-providers", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "token" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": {}, + "react-native": {}, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/token-providers", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/token-providers" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/README.md new file mode 100644 index 0000000..a5658db --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/types + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/types/latest.svg)](https://www.npmjs.com/package/@aws-sdk/types) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/types.svg)](https://www.npmjs.com/package/@aws-sdk/types) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-cjs/index.js new file mode 100644 index 0000000..8114db0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-cjs/index.js @@ -0,0 +1,294 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + AbortController: () => import_types.AbortController, + AbortHandler: () => import_types.AbortHandler, + AbortSignal: () => import_types.AbortSignal, + AbsoluteLocation: () => import_types.AbsoluteLocation, + AuthScheme: () => import_types.AuthScheme, + AvailableMessage: () => import_types.AvailableMessage, + AvailableMessages: () => import_types.AvailableMessages, + AwsCredentialIdentity: () => import_types.AwsCredentialIdentity, + AwsCredentialIdentityProvider: () => import_types.AwsCredentialIdentityProvider, + BinaryHeaderValue: () => import_types.BinaryHeaderValue, + BlobTypes: () => import_types.BlobTypes, + BodyLengthCalculator: () => import_types.BodyLengthCalculator, + BooleanHeaderValue: () => import_types.BooleanHeaderValue, + BuildHandler: () => import_types.BuildHandler, + BuildHandlerArguments: () => import_types.BuildHandlerArguments, + BuildHandlerOptions: () => import_types.BuildHandlerOptions, + BuildHandlerOutput: () => import_types.BuildHandlerOutput, + BuildMiddleware: () => import_types.BuildMiddleware, + ByteHeaderValue: () => import_types.ByteHeaderValue, + Checksum: () => import_types.Checksum, + ChecksumConstructor: () => import_types.ChecksumConstructor, + Client: () => import_types.Client, + Command: () => import_types.Command, + ConnectConfiguration: () => import_types.ConnectConfiguration, + ConnectionManager: () => import_types.ConnectionManager, + ConnectionManagerConfiguration: () => import_types.ConnectionManagerConfiguration, + ConnectionPool: () => import_types.ConnectionPool, + DateInput: () => import_types.DateInput, + Decoder: () => import_types.Decoder, + DeserializeHandler: () => import_types.DeserializeHandler, + DeserializeHandlerArguments: () => import_types.DeserializeHandlerArguments, + DeserializeHandlerOptions: () => import_types.DeserializeHandlerOptions, + DeserializeHandlerOutput: () => import_types.DeserializeHandlerOutput, + DeserializeMiddleware: () => import_types.DeserializeMiddleware, + DocumentType: () => import_types.DocumentType, + Encoder: () => import_types.Encoder, + Endpoint: () => import_types.Endpoint, + EndpointARN: () => import_types.EndpointARN, + EndpointBearer: () => import_types.EndpointBearer, + EndpointObjectProperty: () => import_types.EndpointObjectProperty, + EndpointParameters: () => import_types.EndpointParameters, + EndpointPartition: () => import_types.EndpointPartition, + EndpointURL: () => import_types.EndpointURL, + EndpointURLScheme: () => import_types.EndpointURLScheme, + EndpointV2: () => import_types.EndpointV2, + EventSigner: () => import_types.EventSigner, + EventSigningArguments: () => import_types.EventSigningArguments, + EventStreamMarshaller: () => import_types.EventStreamMarshaller, + EventStreamMarshallerDeserFn: () => import_types.EventStreamMarshallerDeserFn, + EventStreamMarshallerSerFn: () => import_types.EventStreamMarshallerSerFn, + EventStreamPayloadHandler: () => import_types.EventStreamPayloadHandler, + EventStreamPayloadHandlerProvider: () => import_types.EventStreamPayloadHandlerProvider, + EventStreamRequestSigner: () => import_types.EventStreamRequestSigner, + EventStreamSerdeContext: () => import_types.EventStreamSerdeContext, + EventStreamSerdeProvider: () => import_types.EventStreamSerdeProvider, + EventStreamSignerProvider: () => import_types.EventStreamSignerProvider, + ExponentialBackoffJitterType: () => import_types.ExponentialBackoffJitterType, + ExponentialBackoffStrategyOptions: () => import_types.ExponentialBackoffStrategyOptions, + FinalizeHandler: () => import_types.FinalizeHandler, + FinalizeHandlerArguments: () => import_types.FinalizeHandlerArguments, + FinalizeHandlerOutput: () => import_types.FinalizeHandlerOutput, + FinalizeRequestHandlerOptions: () => import_types.FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware: () => import_types.FinalizeRequestMiddleware, + FormattedEvent: () => import_types.FormattedEvent, + GetAwsChunkedEncodingStream: () => import_types.GetAwsChunkedEncodingStream, + GetAwsChunkedEncodingStreamOptions: () => import_types.GetAwsChunkedEncodingStreamOptions, + Handler: () => import_types.Handler, + HandlerExecutionContext: () => import_types.HandlerExecutionContext, + HandlerOptions: () => import_types.HandlerOptions, + Hash: () => import_types.Hash, + HashConstructor: () => import_types.HashConstructor, + HeaderBag: () => import_types.HeaderBag, + HostAddressType: () => HostAddressType, + HttpAuthDefinition: () => import_types.HttpAuthDefinition, + HttpAuthLocation: () => import_types.HttpAuthLocation, + HttpHandlerOptions: () => import_types.HttpHandlerOptions, + HttpMessage: () => import_types.HttpMessage, + HttpRequest: () => import_types.HttpRequest, + HttpResponse: () => import_types.HttpResponse, + Identity: () => import_types.Identity, + IniSection: () => import_types.IniSection, + InitializeHandler: () => import_types.InitializeHandler, + InitializeHandlerArguments: () => import_types.InitializeHandlerArguments, + InitializeHandlerOptions: () => import_types.InitializeHandlerOptions, + InitializeHandlerOutput: () => import_types.InitializeHandlerOutput, + InitializeMiddleware: () => import_types.InitializeMiddleware, + Int64: () => import_types.Int64, + IntegerHeaderValue: () => import_types.IntegerHeaderValue, + LongHeaderValue: () => import_types.LongHeaderValue, + MemoizedProvider: () => import_types.MemoizedProvider, + Message: () => import_types.Message, + MessageDecoder: () => import_types.MessageDecoder, + MessageEncoder: () => import_types.MessageEncoder, + MessageHeaderValue: () => import_types.MessageHeaderValue, + MessageHeaders: () => import_types.MessageHeaders, + MessageSigner: () => import_types.MessageSigner, + MetadataBearer: () => import_types.MetadataBearer, + MiddlewareStack: () => import_types.MiddlewareStack, + MiddlewareType: () => import_types.MiddlewareType, + PaginationConfiguration: () => import_types.PaginationConfiguration, + Paginator: () => import_types.Paginator, + ParsedIniData: () => import_types.ParsedIniData, + Pluggable: () => import_types.Pluggable, + Priority: () => import_types.Priority, + Profile: () => import_types.Profile, + Provider: () => import_types.Provider, + QueryParameterBag: () => import_types.QueryParameterBag, + RegionInfo: () => import_types.RegionInfo, + RegionInfoProvider: () => import_types.RegionInfoProvider, + RegionInfoProviderOptions: () => import_types.RegionInfoProviderOptions, + Relation: () => import_types.Relation, + RelativeLocation: () => import_types.RelativeLocation, + RelativeMiddlewareOptions: () => import_types.RelativeMiddlewareOptions, + RequestContext: () => import_types.RequestContext, + RequestHandler: () => import_types.RequestHandler, + RequestHandlerMetadata: () => import_types.RequestHandlerMetadata, + RequestHandlerOutput: () => import_types.RequestHandlerOutput, + RequestHandlerProtocol: () => import_types.RequestHandlerProtocol, + RequestPresigner: () => import_types.RequestPresigner, + RequestPresigningArguments: () => import_types.RequestPresigningArguments, + RequestSerializer: () => import_types.RequestSerializer, + RequestSigner: () => import_types.RequestSigner, + RequestSigningArguments: () => import_types.RequestSigningArguments, + ResponseDeserializer: () => import_types.ResponseDeserializer, + ResponseMetadata: () => import_types.ResponseMetadata, + RetryBackoffStrategy: () => import_types.RetryBackoffStrategy, + RetryErrorInfo: () => import_types.RetryErrorInfo, + RetryErrorType: () => import_types.RetryErrorType, + RetryStrategy: () => import_types.RetryStrategy, + RetryStrategyOptions: () => import_types.RetryStrategyOptions, + RetryStrategyV2: () => import_types.RetryStrategyV2, + RetryToken: () => import_types.RetryToken, + RetryableTrait: () => import_types.RetryableTrait, + SdkError: () => import_types.SdkError, + SdkStream: () => import_types.SdkStream, + SdkStreamMixin: () => import_types.SdkStreamMixin, + SdkStreamMixinInjector: () => import_types.SdkStreamMixinInjector, + SdkStreamSerdeContext: () => import_types.SdkStreamSerdeContext, + SerdeContext: () => import_types.SerdeContext, + SerializeHandler: () => import_types.SerializeHandler, + SerializeHandlerArguments: () => import_types.SerializeHandlerArguments, + SerializeHandlerOptions: () => import_types.SerializeHandlerOptions, + SerializeHandlerOutput: () => import_types.SerializeHandlerOutput, + SerializeMiddleware: () => import_types.SerializeMiddleware, + SharedConfigFiles: () => import_types.SharedConfigFiles, + ShortHeaderValue: () => import_types.ShortHeaderValue, + SignableMessage: () => import_types.SignableMessage, + SignedMessage: () => import_types.SignedMessage, + SigningArguments: () => import_types.SigningArguments, + SmithyException: () => import_types.SmithyException, + SourceData: () => import_types.SourceData, + StandardRetryBackoffStrategy: () => import_types.StandardRetryBackoffStrategy, + StandardRetryToken: () => import_types.StandardRetryToken, + Step: () => import_types.Step, + StreamCollector: () => import_types.StreamCollector, + StreamHasher: () => import_types.StreamHasher, + StringHeaderValue: () => import_types.StringHeaderValue, + StringSigner: () => import_types.StringSigner, + Terminalware: () => import_types.Terminalware, + TimestampHeaderValue: () => import_types.TimestampHeaderValue, + TokenIdentity: () => import_types.TokenIdentity, + TokenIdentityProvider: () => import_types.TokenIdentityProvider, + URI: () => import_types.URI, + UrlParser: () => import_types.UrlParser, + UserAgent: () => import_types.UserAgent, + UserAgentPair: () => import_types.UserAgentPair, + UuidHeaderValue: () => import_types.UuidHeaderValue, + WaiterConfiguration: () => import_types.WaiterConfiguration, + WithSdkStreamMixin: () => import_types.WithSdkStreamMixin, + randomValues: () => import_types.randomValues +}); +module.exports = __toCommonJS(index_exports); + +// src/abort.ts +var import_types = require("@smithy/types"); + +// src/auth.ts + + +// src/blob/blob-types.ts + + +// src/checksum.ts + + +// src/client.ts + + +// src/command.ts + + +// src/connection.ts + + +// src/crypto.ts + + +// src/dns.ts +var HostAddressType = /* @__PURE__ */ ((HostAddressType2) => { + HostAddressType2["AAAA"] = "AAAA"; + HostAddressType2["A"] = "A"; + return HostAddressType2; +})(HostAddressType || {}); + +// src/encode.ts + + +// src/endpoint.ts + + +// src/eventStream.ts + + +// src/http.ts + + +// src/identity/AwsCredentialIdentity.ts + + +// src/identity/Identity.ts + + +// src/identity/TokenIdentity.ts + + +// src/middleware.ts + + +// src/pagination.ts + + +// src/profile.ts + + +// src/response.ts + + +// src/retry.ts + + +// src/serde.ts + + +// src/shapes.ts + + +// src/signature.ts + + +// src/stream.ts + + +// src/transfer.ts + + +// src/uri.ts + + +// src/util.ts + + +// src/waiter.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + HttpAuthLocation, + HostAddressType, + EndpointURLScheme, + RequestHandlerProtocol +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/abort.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/abort.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/abort.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/auth.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/auth.js new file mode 100644 index 0000000..81f903b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/auth.js @@ -0,0 +1 @@ +export { HttpAuthLocation } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/checksum.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/checksum.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/checksum.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/client.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/client.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/client.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/command.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/command.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/command.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/connection.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/connection.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/connection.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/credentials.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/credentials.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/credentials.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/crypto.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/crypto.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/crypto.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/dns.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/dns.js new file mode 100644 index 0000000..c6a2cd9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/dns.js @@ -0,0 +1,5 @@ +export var HostAddressType; +(function (HostAddressType) { + HostAddressType["AAAA"] = "AAAA"; + HostAddressType["A"] = "A"; +})(HostAddressType || (HostAddressType = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/encode.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/encode.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/encode.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/endpoint.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/endpoint.js new file mode 100644 index 0000000..ec53acc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/endpoint.js @@ -0,0 +1 @@ +export { EndpointURLScheme, } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/eventStream.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/eventStream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/eventStream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/extensions/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/extensions/index.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/extensions/index.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/feature-ids.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/feature-ids.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/feature-ids.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/function.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/function.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/function.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/http.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/http.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/http.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/Identity.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/Identity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/Identity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/index.js new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/identity/index.js @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/index.js new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/index.js @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/logger.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/logger.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/logger.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/middleware.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/middleware.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/middleware.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/pagination.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/pagination.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/pagination.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/profile.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/profile.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/profile.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/request.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/request.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/request.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/response.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/response.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/response.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/retry.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/serde.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/serde.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/serde.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/shapes.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/shapes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/shapes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/signature.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/signature.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/signature.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/stream.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/stream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/stream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/token.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/token.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/token.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/transfer.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/transfer.js new file mode 100644 index 0000000..ba57589 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/transfer.js @@ -0,0 +1 @@ +export { RequestHandlerProtocol, } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/uri.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/uri.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/uri.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/util.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/util.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/util.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/waiter.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/waiter.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-es/waiter.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/abort.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/abort.d.ts new file mode 100644 index 0000000..dad6079 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/abort.d.ts @@ -0,0 +1 @@ +export { AbortController, AbortHandler, AbortSignal } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/auth.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/auth.d.ts new file mode 100644 index 0000000..6626c16 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/auth.d.ts @@ -0,0 +1 @@ +export { AuthScheme, HttpAuthDefinition, HttpAuthLocation } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts new file mode 100644 index 0000000..fedb3d5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts @@ -0,0 +1,2 @@ +import { BlobTypes } from '@smithy/types'; +export { BlobTypes }; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/checksum.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/checksum.d.ts new file mode 100644 index 0000000..f805d72 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/checksum.d.ts @@ -0,0 +1 @@ +export { Checksum, ChecksumConstructor } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/client.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/client.d.ts new file mode 100644 index 0000000..d6b3dcf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/client.d.ts @@ -0,0 +1 @@ +export { Client } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/command.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/command.d.ts new file mode 100644 index 0000000..3887267 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/command.d.ts @@ -0,0 +1 @@ +export { Command } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/connection.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/connection.d.ts new file mode 100644 index 0000000..efcb4d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/connection.d.ts @@ -0,0 +1 @@ +export { ConnectConfiguration, ConnectionManager, ConnectionManagerConfiguration, ConnectionPool } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/credentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/credentials.d.ts new file mode 100644 index 0000000..181bf8b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/credentials.d.ts @@ -0,0 +1,50 @@ +import { Logger } from "@smithy/types"; +import { AwsCredentialIdentity } from "./identity"; +import { Provider } from "./util"; +/** + * @public + * + * An object representing temporary or permanent AWS credentials. + * + * @deprecated Use {@link AwsCredentialIdentity} + */ +export interface Credentials extends AwsCredentialIdentity { +} +/** + * @public + * + * @deprecated Use {@link AwsCredentialIdentityProvider} + */ +export type CredentialProvider = Provider; +/** + * @public + * + * Common options for credential providers. + */ +export type CredentialProviderOptions = { + /** + * This logger is only used to provide information + * on what credential providers were used during resolution. + * + * It does not log credentials. + */ + logger?: Logger; + /** + * Present if the credential provider was created by calling + * the defaultCredentialProvider in a client's middleware, having + * access to the client's config. + * + * The region of that parent or outer client is important because + * an inner client used by the credential provider may need + * to match its default partition or region with that of + * the outer client. + * + * @internal + * @deprecated - not truly deprecated, marked as a warning to not use this. + */ + parentClientConfig?: { + region?: string | Provider; + profile?: string; + [key: string]: unknown; + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/crypto.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/crypto.d.ts new file mode 100644 index 0000000..aeeea50 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/crypto.d.ts @@ -0,0 +1 @@ +export { Hash, HashConstructor, StreamHasher, randomValues, SourceData } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/dns.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/dns.d.ts new file mode 100644 index 0000000..8348cc4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/dns.d.ts @@ -0,0 +1,85 @@ +/** + * @public + * + * DNS record types + */ +export declare enum HostAddressType { + /** + * IPv6 + */ + AAAA = "AAAA", + /** + * IPv4 + */ + A = "A" +} +/** + * @public + */ +export interface HostAddress { + /** + * The {@link HostAddressType} of the host address. + */ + addressType: HostAddressType; + /** + * The resolved numerical address represented as a + * string. + */ + address: string; + /** + * The host name the {@link address} was resolved from. + */ + hostName: string; + /** + * The service record of {@link hostName}. + */ + service?: string; +} +/** + * @public + */ +export interface HostResolverArguments { + /** + * The host name to resolve. + */ + hostName: string; + /** + * The service record of {@link hostName}. + */ + service?: string; +} +/** + * @public + * + * Host Resolver interface for DNS queries + */ +export interface HostResolver { + /** + * Resolves the address(es) for {@link HostResolverArguments} and returns a + * list of addresses with (most likely) two addresses, one {@link HostAddressType.AAAA} + * and one {@link HostAddressType.A}. Calls to this function will likely alter + * the cache (if implemented) so that if there's multiple addresses, a different + * set will be returned on the next call. + * In the case of multi-answer, still only a maximum of two records should be + * returned. The resolver implementation is responsible for caching and rotation + * of the multiple addresses that get returned. + * Implementations don't have to explictly call getaddrinfo(), they can use + * high level abstractions provided in their language runtimes/libraries. + * @param args - arguments with host name query addresses for + * @returns promise with a list of {@link HostAddress} + */ + resolveAddress(args: HostResolverArguments): Promise; + /** + * Reports a failure on a {@link HostAddress} so that the cache (if implemented) + * can accomodate the failure and likely not return the address until it recovers. + * @param addr - host address to report a failure on + */ + reportFailureOnAddress(addr: HostAddress): void; + /** + * Empties the cache (if implemented) for a {@link HostResolverArguments.hostName}. + * If {@link HostResolverArguments.hostName} is not provided, the cache (if + * implemented) is emptied for all host names. + * @param args - optional arguments to empty the cache for + */ + purgeCache(args?: HostResolverArguments): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/encode.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/encode.d.ts new file mode 100644 index 0000000..128ee57 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/encode.d.ts @@ -0,0 +1 @@ +export { MessageDecoder, MessageEncoder, AvailableMessage, AvailableMessages } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts new file mode 100644 index 0000000..f2ffaf5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts @@ -0,0 +1 @@ +export { EndpointARN, EndpointPartition, EndpointURLScheme, EndpointURL, EndpointObjectProperty, EndpointV2, EndpointParameters, } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts new file mode 100644 index 0000000..cee02f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts @@ -0,0 +1 @@ +export { Message, MessageHeaders, BooleanHeaderValue, ByteHeaderValue, ShortHeaderValue, IntegerHeaderValue, LongHeaderValue, BinaryHeaderValue, StringHeaderValue, TimestampHeaderValue, UuidHeaderValue, MessageHeaderValue, Int64, EventStreamSerdeContext, EventStreamMarshaller, EventStreamMarshallerDeserFn, EventStreamMarshallerSerFn, EventStreamPayloadHandler, EventStreamPayloadHandlerProvider, EventStreamRequestSigner, EventStreamSerdeProvider, EventStreamSignerProvider, } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..5a45bcb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts @@ -0,0 +1,8 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface AwsRegionExtensionConfiguration { + setRegion(region: Provider): void; + region(): Provider; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts new file mode 100644 index 0000000..f1679fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts @@ -0,0 +1,59 @@ +/** + * @internal + */ +export type AwsSdkFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + S3_EXPRESS_BUCKET: "J"; + S3_ACCESS_GRANTS: "K"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + ACCOUNT_ID_ENDPOINT: "O"; + ACCOUNT_ID_MODE_PREFERRED: "P"; + ACCOUNT_ID_MODE_DISABLED: "Q"; + ACCOUNT_ID_MODE_REQUIRED: "R"; + SIGV4A_SIGNING: "S"; + FLEXIBLE_CHECKSUMS_REQ_CRC32: "U"; + FLEXIBLE_CHECKSUMS_REQ_CRC32C: "V"; + FLEXIBLE_CHECKSUMS_REQ_CRC64: "W"; + FLEXIBLE_CHECKSUMS_REQ_SHA1: "X"; + FLEXIBLE_CHECKSUMS_REQ_SHA256: "Y"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED: "Z"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED: "a"; + FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED: "b"; + FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED: "c"; + DDB_MAPPER: "d"; +}> & AwsSdkCredentialsFeatures; +/** + * @internal + */ +export type AwsSdkCredentialsFeatures = Partial<{ + RESOLVED_ACCOUNT_ID: "T"; + CREDENTIALS_CODE: "e"; + CREDENTIALS_ENV_VARS: "g"; + CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN: "h"; + CREDENTIALS_STS_ASSUME_ROLE: "i"; + CREDENTIALS_STS_ASSUME_ROLE_SAML: "j"; + CREDENTIALS_STS_ASSUME_ROLE_WEB_ID: "k"; + CREDENTIALS_STS_FEDERATION_TOKEN: "l"; + CREDENTIALS_STS_SESSION_TOKEN: "m"; + CREDENTIALS_PROFILE: "n"; + CREDENTIALS_PROFILE_SOURCE_PROFILE: "o"; + CREDENTIALS_PROFILE_NAMED_PROVIDER: "p"; + CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN: "q"; + CREDENTIALS_PROFILE_SSO: "r"; + CREDENTIALS_SSO: "s"; + CREDENTIALS_PROFILE_SSO_LEGACY: "t"; + CREDENTIALS_SSO_LEGACY: "u"; + CREDENTIALS_PROFILE_PROCESS: "v"; + CREDENTIALS_PROCESS: "w"; + CREDENTIALS_BOTO2_CONFIG_FILE: "x"; + CREDENTIALS_AWS_SDK_STORE: "y"; + CREDENTIALS_HTTP: "z"; + CREDENTIALS_IMDS: "0"; +}>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/function.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/function.d.ts new file mode 100644 index 0000000..3c777fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/function.d.ts @@ -0,0 +1,7 @@ +/** + * Resolves a function that accepts both the object argument fields of F1 and F2. + * The function returns an intersection of what F1 and F2 return. + * + * @public + */ +export type MergeFunctions = F1 extends (arg: infer A1) => infer R1 ? F2 extends (arg: infer A2) => infer R2 ? R1 extends Promise ? (arg?: A1 & A2) => Promise & Awaited> : (arg?: A1 & A2) => R1 & R2 : never : never; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/http.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/http.d.ts new file mode 100644 index 0000000..7594b5a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/http.d.ts @@ -0,0 +1,33 @@ +import { HttpResponse } from "@smithy/types"; +export { Endpoint, HeaderBag, HttpHandlerOptions, HttpMessage, HttpRequest, HttpResponse, QueryParameterBag, } from "@smithy/types"; +/** + * @public + * + * A collection of key/value pairs with case-insensitive keys. + */ +export interface Headers extends Map { + /** + * Returns a new instance of Headers with the specified header set to the + * provided value. Does not modify the original Headers instance. + * + * @param headerName - The name of the header to add or overwrite + * @param headerValue - The value to which the header should be set + */ + withHeader(headerName: string, headerValue: string): Headers; + /** + * Returns a new instance of Headers without the specified header. Does not + * modify the original Headers instance. + * + * @param headerName - The name of the header to remove + */ + withoutHeader(headerName: string): Headers; +} +/** + * @public + * + * Represents HTTP message whose body has been resolved to a string. This is + * used in parsing http message. + */ +export interface ResolvedHttpResponse extends HttpResponse { + body: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts new file mode 100644 index 0000000..c7006e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts @@ -0,0 +1,6 @@ +import { Identity } from "./Identity"; +/** + * @public + */ +export interface AnonymousIdentity extends Identity { +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts new file mode 100644 index 0000000..c94b6c4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts @@ -0,0 +1,60 @@ +import type { AwsCredentialIdentity, AwsCredentialIdentityProvider, Logger, RequestHandler } from "@smithy/types"; +import type { AwsSdkCredentialsFeatures } from "../feature-ids"; +export { AwsCredentialIdentity, AwsCredentialIdentityProvider, IdentityProvider } from "@smithy/types"; +/** + * @public + */ +export interface AwsIdentityProperties { + /** + * These are resolved client config values, and may be async providers. + */ + callerClientConfig?: { + /** + * It is likely a programming error if you use + * the caller client config credentials in a credential provider, since + * it will recurse. + * + * @deprecated do not use. + */ + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + /** + * @internal + * @deprecated minimize use. + */ + credentialDefaultProvider?: (input?: any) => AwsCredentialIdentityProvider; + logger?: Logger; + profile?: string; + region(): Promise; + requestHandler?: RequestHandler; + }; +} +/** + * @public + * + * Variation of {@link IdentityProvider} which accepts a contextual + * client configuration that includes an AWS region and potentially other + * configurable fields. + * + * Used to link a credential provider to a client if it is being called + * in the context of a client. + */ +export type RuntimeConfigIdentityProvider = (awsIdentityProperties?: AwsIdentityProperties) => Promise; +/** + * @public + * + * Variation of {@link AwsCredentialIdentityProvider} which accepts a contextual + * client configuration that includes an AWS region and potentially other + * configurable fields. + * + * Used to link a credential provider to a client if it is being called + * in the context of a client. + */ +export type RuntimeConfigAwsCredentialIdentityProvider = RuntimeConfigIdentityProvider; +/** + * @public + * + * AwsCredentialIdentity with source attribution metadata. + */ +export type AttributedAwsCredentialIdentity = AwsCredentialIdentity & { + $source?: AwsSdkCredentialsFeatures; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts new file mode 100644 index 0000000..4175fd3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts @@ -0,0 +1 @@ +export { Identity, IdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts new file mode 100644 index 0000000..13793f9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts @@ -0,0 +1,18 @@ +import { Identity, IdentityProvider } from "./Identity"; +/** + * @public + */ +export interface LoginIdentity extends Identity { + /** + * Identity username + */ + readonly username: string; + /** + * Identity password + */ + readonly password: string; +} +/** + * @public + */ +export type LoginIdentityProvider = IdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts new file mode 100644 index 0000000..66301bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts @@ -0,0 +1 @@ +export { TokenIdentity, TokenIdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/index.d.ts new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/index.d.ts @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/logger.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/logger.d.ts new file mode 100644 index 0000000..11a33c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/logger.d.ts @@ -0,0 +1,22 @@ +import type { Logger } from "@smithy/types"; +export type { Logger } from "@smithy/types"; +/** + * @public + * + * A list of logger's log level. These levels are sorted in + * order of increasing severity. Each log level includes itself and all + * the levels behind itself. + * + * @example `new Logger({logLevel: 'warn'})` will print all the warn and error + * message. + */ +export type LogLevel = "all" | "trace" | "debug" | "log" | "info" | "warn" | "error" | "off"; +/** + * @public + * + * An object consumed by Logger constructor to initiate a logger object. + */ +export interface LoggerOptions { + logger?: Logger; + logLevel?: LogLevel; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/middleware.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/middleware.d.ts new file mode 100644 index 0000000..06ba3e2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/middleware.d.ts @@ -0,0 +1,13 @@ +import { HandlerExecutionContext } from "@smithy/types"; +import { AwsSdkFeatures } from "./feature-ids"; +export { AbsoluteLocation, BuildHandler, BuildHandlerArguments, BuildHandlerOptions, BuildHandlerOutput, BuildMiddleware, DeserializeHandler, DeserializeHandlerArguments, DeserializeHandlerOptions, DeserializeHandlerOutput, DeserializeMiddleware, FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, FinalizeRequestHandlerOptions, FinalizeRequestMiddleware, Handler, HandlerExecutionContext, HandlerOptions, InitializeHandler, InitializeHandlerArguments, InitializeHandlerOptions, InitializeHandlerOutput, InitializeMiddleware, MiddlewareStack, MiddlewareType, Pluggable, Priority, Relation, RelativeLocation, RelativeMiddlewareOptions, SerializeHandler, SerializeHandlerArguments, SerializeHandlerOptions, SerializeHandlerOutput, SerializeMiddleware, Step, Terminalware, } from "@smithy/types"; +/** + * @internal + * Contains reserved keys for AWS SDK internal usage of the + * handler execution context object. + */ +export interface AwsHandlerExecutionContext extends HandlerExecutionContext { + __aws_sdk_context?: { + features?: AwsSdkFeatures; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/pagination.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/pagination.d.ts new file mode 100644 index 0000000..af791b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/pagination.d.ts @@ -0,0 +1 @@ +export { PaginationConfiguration, Paginator } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/profile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/profile.d.ts new file mode 100644 index 0000000..9916f3b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/profile.d.ts @@ -0,0 +1 @@ +export { IniSection, Profile, ParsedIniData, SharedConfigFiles } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/request.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/request.d.ts new file mode 100644 index 0000000..95405d1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/request.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export interface Request { + destination: URL; + body?: any; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/response.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/response.d.ts new file mode 100644 index 0000000..8d99350 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/response.d.ts @@ -0,0 +1,7 @@ +export { MetadataBearer, ResponseMetadata } from "@smithy/types"; +/** + * @internal + */ +export interface Response { + body: any; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/retry.d.ts new file mode 100644 index 0000000..4b7eb98 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/retry.d.ts @@ -0,0 +1 @@ +export { ExponentialBackoffJitterType, ExponentialBackoffStrategyOptions, RetryBackoffStrategy, RetryErrorInfo, RetryErrorType, RetryStrategyOptions, RetryStrategyV2, RetryToken, StandardRetryBackoffStrategy, StandardRetryToken, } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/serde.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/serde.d.ts new file mode 100644 index 0000000..c4cab79 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/serde.d.ts @@ -0,0 +1,24 @@ +export { EndpointBearer, StreamCollector, SerdeContext, ResponseDeserializer, RequestSerializer, SdkStreamMixin, SdkStream, WithSdkStreamMixin, SdkStreamMixinInjector, SdkStreamSerdeContext, } from "@smithy/types"; +/** + * @public + * + * Declare DOM interfaces in case dom.d.ts is not added to the tsconfig lib, causing + * interfaces to not be defined. For developers with dom.d.ts added, the interfaces will + * be merged correctly. + * + * This is also required for any clients with streaming interfaces where the corresponding + * types are also referred. The type is only declared here once since this `@aws-sdk/types` + * is depended by all `@aws-sdk` packages. + */ +declare global { + /** + * @public + */ + export interface ReadableStream { + } + /** + * @public + */ + export interface Blob { + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/shapes.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/shapes.d.ts new file mode 100644 index 0000000..bc19cc7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/shapes.d.ts @@ -0,0 +1 @@ +export { DocumentType, RetryableTrait, SmithyException, SdkError } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/signature.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/signature.d.ts new file mode 100644 index 0000000..23cbe97 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/signature.d.ts @@ -0,0 +1 @@ +export { DateInput, EventSigner, EventSigningArguments, FormattedEvent, MessageSigner, RequestSigningArguments, RequestPresigner, RequestPresigningArguments, RequestSigner, SignableMessage, SignedMessage, SigningArguments, StringSigner, } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/stream.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/stream.d.ts new file mode 100644 index 0000000..9092844 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/stream.d.ts @@ -0,0 +1 @@ +export { GetAwsChunkedEncodingStream, GetAwsChunkedEncodingStreamOptions } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/token.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/token.d.ts new file mode 100644 index 0000000..a68d58f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/token.d.ts @@ -0,0 +1,17 @@ +import { TokenIdentity } from "./identity"; +import { Provider } from "./util"; +/** + * @public + * + * An object representing temporary or permanent AWS token. + * + * @deprecated Use {@link TokenIdentity} + */ +export interface Token extends TokenIdentity { +} +/** + * @public + * + * @deprecated Use {@link TokenIdentityProvider} + */ +export type TokenProvider = Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/transfer.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/transfer.d.ts new file mode 100644 index 0000000..ba78190 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/transfer.d.ts @@ -0,0 +1 @@ +export { RequestContext, RequestHandler, RequestHandlerMetadata, RequestHandlerOutput, RequestHandlerProtocol, } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts new file mode 100644 index 0000000..dad6079 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts @@ -0,0 +1 @@ +export { AbortController, AbortHandler, AbortSignal } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts new file mode 100644 index 0000000..8a02dbc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts @@ -0,0 +1,5 @@ +export { + AuthScheme, + HttpAuthDefinition, + HttpAuthLocation, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts new file mode 100644 index 0000000..df39efe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts @@ -0,0 +1,2 @@ +import { BlobTypes } from "@smithy/types"; +export { BlobTypes }; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts new file mode 100644 index 0000000..f805d72 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts @@ -0,0 +1 @@ +export { Checksum, ChecksumConstructor } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..d6b3dcf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts @@ -0,0 +1 @@ +export { Client } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..3887267 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts @@ -0,0 +1 @@ +export { Command } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts new file mode 100644 index 0000000..36ebd00 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts @@ -0,0 +1,6 @@ +export { + ConnectConfiguration, + ConnectionManager, + ConnectionManagerConfiguration, + ConnectionPool, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts new file mode 100644 index 0000000..6c91a35 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts @@ -0,0 +1,13 @@ +import { Logger } from "@smithy/types"; +import { AwsCredentialIdentity } from "./identity"; +import { Provider } from "./util"; +export interface Credentials extends AwsCredentialIdentity {} +export type CredentialProvider = Provider; +export type CredentialProviderOptions = { + logger?: Logger; + parentClientConfig?: { + region?: string | Provider; + profile?: string; + [key: string]: unknown; + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts new file mode 100644 index 0000000..dfe61bf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts @@ -0,0 +1,7 @@ +export { + Hash, + HashConstructor, + StreamHasher, + randomValues, + SourceData, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts new file mode 100644 index 0000000..d899949 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts @@ -0,0 +1,19 @@ +export declare enum HostAddressType { + AAAA = "AAAA", + A = "A", +} +export interface HostAddress { + addressType: HostAddressType; + address: string; + hostName: string; + service?: string; +} +export interface HostResolverArguments { + hostName: string; + service?: string; +} +export interface HostResolver { + resolveAddress(args: HostResolverArguments): Promise; + reportFailureOnAddress(addr: HostAddress): void; + purgeCache(args?: HostResolverArguments): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts new file mode 100644 index 0000000..76966f9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts @@ -0,0 +1,6 @@ +export { + MessageDecoder, + MessageEncoder, + AvailableMessage, + AvailableMessages, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts new file mode 100644 index 0000000..ff3c7de --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts @@ -0,0 +1,9 @@ +export { + EndpointARN, + EndpointPartition, + EndpointURLScheme, + EndpointURL, + EndpointObjectProperty, + EndpointV2, + EndpointParameters, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts new file mode 100644 index 0000000..e4c04a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts @@ -0,0 +1,24 @@ +export { + Message, + MessageHeaders, + BooleanHeaderValue, + ByteHeaderValue, + ShortHeaderValue, + IntegerHeaderValue, + LongHeaderValue, + BinaryHeaderValue, + StringHeaderValue, + TimestampHeaderValue, + UuidHeaderValue, + MessageHeaderValue, + Int64, + EventStreamSerdeContext, + EventStreamMarshaller, + EventStreamMarshallerDeserFn, + EventStreamMarshallerSerFn, + EventStreamPayloadHandler, + EventStreamPayloadHandlerProvider, + EventStreamRequestSigner, + EventStreamSerdeProvider, + EventStreamSignerProvider, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..accf5ec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +export interface AwsRegionExtensionConfiguration { + setRegion(region: Provider): void; + region(): Provider; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts new file mode 100644 index 0000000..6d57509 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts @@ -0,0 +1,54 @@ +export type AwsSdkFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + S3_EXPRESS_BUCKET: "J"; + S3_ACCESS_GRANTS: "K"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + ACCOUNT_ID_ENDPOINT: "O"; + ACCOUNT_ID_MODE_PREFERRED: "P"; + ACCOUNT_ID_MODE_DISABLED: "Q"; + ACCOUNT_ID_MODE_REQUIRED: "R"; + SIGV4A_SIGNING: "S"; + FLEXIBLE_CHECKSUMS_REQ_CRC32: "U"; + FLEXIBLE_CHECKSUMS_REQ_CRC32C: "V"; + FLEXIBLE_CHECKSUMS_REQ_CRC64: "W"; + FLEXIBLE_CHECKSUMS_REQ_SHA1: "X"; + FLEXIBLE_CHECKSUMS_REQ_SHA256: "Y"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED: "Z"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED: "a"; + FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED: "b"; + FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED: "c"; + DDB_MAPPER: "d"; +}> & + AwsSdkCredentialsFeatures; +export type AwsSdkCredentialsFeatures = Partial<{ + RESOLVED_ACCOUNT_ID: "T"; + CREDENTIALS_CODE: "e"; + CREDENTIALS_ENV_VARS: "g"; + CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN: "h"; + CREDENTIALS_STS_ASSUME_ROLE: "i"; + CREDENTIALS_STS_ASSUME_ROLE_SAML: "j"; + CREDENTIALS_STS_ASSUME_ROLE_WEB_ID: "k"; + CREDENTIALS_STS_FEDERATION_TOKEN: "l"; + CREDENTIALS_STS_SESSION_TOKEN: "m"; + CREDENTIALS_PROFILE: "n"; + CREDENTIALS_PROFILE_SOURCE_PROFILE: "o"; + CREDENTIALS_PROFILE_NAMED_PROVIDER: "p"; + CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN: "q"; + CREDENTIALS_PROFILE_SSO: "r"; + CREDENTIALS_SSO: "s"; + CREDENTIALS_PROFILE_SSO_LEGACY: "t"; + CREDENTIALS_SSO_LEGACY: "u"; + CREDENTIALS_PROFILE_PROCESS: "v"; + CREDENTIALS_PROCESS: "w"; + CREDENTIALS_BOTO2_CONFIG_FILE: "x"; + CREDENTIALS_AWS_SDK_STORE: "y"; + CREDENTIALS_HTTP: "z"; + CREDENTIALS_IMDS: "0"; +}>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts new file mode 100644 index 0000000..d6efac5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts @@ -0,0 +1,7 @@ +export type MergeFunctions = F1 extends (arg: infer A1) => infer R1 + ? F2 extends (arg: infer A2) => infer R2 + ? R1 extends Promise + ? (arg?: A1 & A2) => Promise & Awaited> + : (arg?: A1 & A2) => R1 & R2 + : never + : never; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts new file mode 100644 index 0000000..d8e0eab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts @@ -0,0 +1,17 @@ +import { HttpResponse } from "@smithy/types"; +export { + Endpoint, + HeaderBag, + HttpHandlerOptions, + HttpMessage, + HttpRequest, + HttpResponse, + QueryParameterBag, +} from "@smithy/types"; +export interface Headers extends Map { + withHeader(headerName: string, headerValue: string): Headers; + withoutHeader(headerName: string): Headers; +} +export interface ResolvedHttpResponse extends HttpResponse { + body: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts new file mode 100644 index 0000000..5b175f6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts @@ -0,0 +1,2 @@ +import { Identity } from "./Identity"; +export interface AnonymousIdentity extends Identity {} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts new file mode 100644 index 0000000..aaec358 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts @@ -0,0 +1,30 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + Logger, + RequestHandler, +} from "@smithy/types"; +import { AwsSdkCredentialsFeatures } from "../feature-ids"; +export { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + IdentityProvider, +} from "@smithy/types"; +export interface AwsIdentityProperties { + callerClientConfig?: { + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + credentialDefaultProvider?: (input?: any) => AwsCredentialIdentityProvider; + logger?: Logger; + profile?: string; + region(): Promise; + requestHandler?: RequestHandler; + }; +} +export type RuntimeConfigIdentityProvider = ( + awsIdentityProperties?: AwsIdentityProperties +) => Promise; +export type RuntimeConfigAwsCredentialIdentityProvider = + RuntimeConfigIdentityProvider; +export type AttributedAwsCredentialIdentity = AwsCredentialIdentity & { + $source?: AwsSdkCredentialsFeatures; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts new file mode 100644 index 0000000..4175fd3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts @@ -0,0 +1 @@ +export { Identity, IdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts new file mode 100644 index 0000000..3258bbb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts @@ -0,0 +1,6 @@ +import { Identity, IdentityProvider } from "./Identity"; +export interface LoginIdentity extends Identity { + readonly username: string; + readonly password: string; +} +export type LoginIdentityProvider = IdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts new file mode 100644 index 0000000..66301bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts @@ -0,0 +1 @@ +export { TokenIdentity, TokenIdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts new file mode 100644 index 0000000..c714915 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts @@ -0,0 +1,15 @@ +import { Logger } from "@smithy/types"; +export { Logger } from "@smithy/types"; +export type LogLevel = + | "all" + | "trace" + | "debug" + | "log" + | "info" + | "warn" + | "error" + | "off"; +export interface LoggerOptions { + logger?: Logger; + logLevel?: LogLevel; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts new file mode 100644 index 0000000..e101e9b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts @@ -0,0 +1,47 @@ +import { HandlerExecutionContext } from "@smithy/types"; +import { AwsSdkFeatures } from "./feature-ids"; +export { + AbsoluteLocation, + BuildHandler, + BuildHandlerArguments, + BuildHandlerOptions, + BuildHandlerOutput, + BuildMiddleware, + DeserializeHandler, + DeserializeHandlerArguments, + DeserializeHandlerOptions, + DeserializeHandlerOutput, + DeserializeMiddleware, + FinalizeHandler, + FinalizeHandlerArguments, + FinalizeHandlerOutput, + FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware, + Handler, + HandlerExecutionContext, + HandlerOptions, + InitializeHandler, + InitializeHandlerArguments, + InitializeHandlerOptions, + InitializeHandlerOutput, + InitializeMiddleware, + MiddlewareStack, + MiddlewareType, + Pluggable, + Priority, + Relation, + RelativeLocation, + RelativeMiddlewareOptions, + SerializeHandler, + SerializeHandlerArguments, + SerializeHandlerOptions, + SerializeHandlerOutput, + SerializeMiddleware, + Step, + Terminalware, +} from "@smithy/types"; +export interface AwsHandlerExecutionContext extends HandlerExecutionContext { + __aws_sdk_context?: { + features?: AwsSdkFeatures; + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts new file mode 100644 index 0000000..af791b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts @@ -0,0 +1 @@ +export { PaginationConfiguration, Paginator } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts new file mode 100644 index 0000000..b3813d8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts @@ -0,0 +1,6 @@ +export { + IniSection, + Profile, + ParsedIniData, + SharedConfigFiles, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts new file mode 100644 index 0000000..5c6e793 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts @@ -0,0 +1,4 @@ +export interface Request { + destination: URL; + body?: any; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts new file mode 100644 index 0000000..4e5fcd0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts @@ -0,0 +1,4 @@ +export { MetadataBearer, ResponseMetadata } from "@smithy/types"; +export interface Response { + body: any; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts new file mode 100644 index 0000000..8fc946a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts @@ -0,0 +1,12 @@ +export { + ExponentialBackoffJitterType, + ExponentialBackoffStrategyOptions, + RetryBackoffStrategy, + RetryErrorInfo, + RetryErrorType, + RetryStrategyOptions, + RetryStrategyV2, + RetryToken, + StandardRetryBackoffStrategy, + StandardRetryToken, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts new file mode 100644 index 0000000..a7ed76f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts @@ -0,0 +1,16 @@ +export { + EndpointBearer, + StreamCollector, + SerdeContext, + ResponseDeserializer, + RequestSerializer, + SdkStreamMixin, + SdkStream, + WithSdkStreamMixin, + SdkStreamMixinInjector, + SdkStreamSerdeContext, +} from "@smithy/types"; +declare global { + export interface ReadableStream {} + export interface Blob {} +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts new file mode 100644 index 0000000..d1efa9a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts @@ -0,0 +1,6 @@ +export { + DocumentType, + RetryableTrait, + SmithyException, + SdkError, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts new file mode 100644 index 0000000..cbabd75 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts @@ -0,0 +1,15 @@ +export { + DateInput, + EventSigner, + EventSigningArguments, + FormattedEvent, + MessageSigner, + RequestSigningArguments, + RequestPresigner, + RequestPresigningArguments, + RequestSigner, + SignableMessage, + SignedMessage, + SigningArguments, + StringSigner, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts new file mode 100644 index 0000000..1b79413 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts @@ -0,0 +1,4 @@ +export { + GetAwsChunkedEncodingStream, + GetAwsChunkedEncodingStreamOptions, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts new file mode 100644 index 0000000..c33e506 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts @@ -0,0 +1,4 @@ +import { TokenIdentity } from "./identity"; +import { Provider } from "./util"; +export interface Token extends TokenIdentity {} +export type TokenProvider = Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts new file mode 100644 index 0000000..04a7f87 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts @@ -0,0 +1,7 @@ +export { + RequestContext, + RequestHandler, + RequestHandlerMetadata, + RequestHandlerOutput, + RequestHandlerProtocol, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts new file mode 100644 index 0000000..297dfe4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts @@ -0,0 +1 @@ +export { URI } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..e7e43e6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts @@ -0,0 +1,14 @@ +export { + Encoder, + Decoder, + Provider, + UserAgentPair, + UserAgent, + UrlParser, + MemoizedProvider, + BodyLengthCalculator, + RegionInfo, + RegionInfoProviderOptions, + RegionInfoProvider, + RetryStrategy, +} from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..bb98020 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1 @@ +export { WaiterConfiguration } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/uri.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/uri.d.ts new file mode 100644 index 0000000..297dfe4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/uri.d.ts @@ -0,0 +1 @@ +export { URI } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/util.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/util.d.ts new file mode 100644 index 0000000..fd059b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/util.d.ts @@ -0,0 +1 @@ +export { Encoder, Decoder, Provider, UserAgentPair, UserAgent, UrlParser, MemoizedProvider, BodyLengthCalculator, RegionInfo, RegionInfoProviderOptions, RegionInfoProvider, RetryStrategy, } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/waiter.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/waiter.d.ts new file mode 100644 index 0000000..bb98020 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/dist-types/waiter.d.ts @@ -0,0 +1 @@ +export { WaiterConfiguration } from "@smithy/types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/package.json new file mode 100755 index 0000000..eaf5c44 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/types/package.json @@ -0,0 +1,56 @@ +{ + "name": "@aws-sdk/types", + "version": "3.775.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "description": "Types for the AWS SDK", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline types", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "tsc -p tsconfig.test.json" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/types", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/types" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "browser": {}, + "react-native": {} +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/README.md new file mode 100644 index 0000000..1d6d61a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/README.md @@ -0,0 +1,50 @@ +# @aws-sdk/util-dynamodb + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-dynamodb/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-dynamodb) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-dynamodb.svg)](https://www.npmjs.com/package/@aws-sdk/util-dynamodb) + +This package provides utilities to be used with `@aws-sdk/client-dynamodb` + +If you are looking for DynamoDB Document client, please check +[@aws-sdk/lib-dynamodb](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) +which automatically performs the necessary marshalling and unmarshalling. + +## Convert JavaScript object into DynamoDB Record + +```js +const { DynamoDB } = require("@aws-sdk/client-dynamodb"); +const { marshall } = require("@aws-sdk/util-dynamodb"); + +const client = new DynamoDB(clientParams); +const params = { + TableName: "Table", + Item: marshall({ + HashKey: "hashKey", + NumAttribute: 1, + BoolAttribute: true, + ListAttribute: [1, "two", false], + MapAttribute: { foo: "bar" }, + NullAttribute: null, + }), +}; + +await client.putItem(params); +``` + +## Convert DynamoDB Record into JavaScript object + +```js +const { DynamoDB } = require("@aws-sdk/client-dynamodb"); +const { marshall, unmarshall } = require("@aws-sdk/util-dynamodb"); + +const client = new DynamoDB(clientParams); +const params = { + TableName: "Table", + Key: marshall({ + HashKey: "hashKey", + }), +}; + +const { Item } = await client.getItem(params); +unmarshall(Item); +``` diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..955685c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js @@ -0,0 +1,350 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NumberValueImpl: () => NumberValue, + convertToAttr: () => convertToAttr, + convertToNative: () => convertToNative, + marshall: () => marshall, + unmarshall: () => unmarshall +}); +module.exports = __toCommonJS(index_exports); + +// src/NumberValue.ts +var NumberValue = class _NumberValue { + static { + __name(this, "NumberValue"); + } + value; + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + constructor(value) { + if (typeof value === "object" && "N" in value) { + this.value = String(value.N); + } else { + this.value = String(value); + } + const valueOf = typeof value.valueOf() === "number" ? value.valueOf() : 0; + const imprecise = valueOf > Number.MAX_SAFE_INTEGER || valueOf < Number.MIN_SAFE_INTEGER || Math.abs(valueOf) === Infinity || Number.isNaN(valueOf); + if (imprecise) { + throw new Error( + `NumberValue should not be initialized with an imprecise number=${valueOf}. Use a string instead.` + ); + } + } + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + static from(value) { + return new _NumberValue(value); + } + /** + * @returns the AttributeValue form for DynamoDB. + */ + toAttributeValue() { + return { + N: this.toString() + }; + } + /** + * @returns BigInt representation. + * + * @throws SyntaxError if the string representation is not convertable to a BigInt. + */ + toBigInt() { + const stringValue = this.toString(); + return BigInt(stringValue); + } + /** + * @override + * + * @returns string representation. This is the canonical format in DynamoDB. + */ + toString() { + return String(this.value); + } + /** + * @override + */ + valueOf() { + return this.toString(); + } +}; + +// src/convertToAttr.ts +var convertToAttr = /* @__PURE__ */ __name((data, options) => { + if (data === void 0) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } else if (data === null && typeof data === "object") { + return convertToNullAttr(); + } else if (Array.isArray(data)) { + return convertToListAttr(data, options); + } else if (data?.constructor?.name === "Set") { + return convertToSetAttr(data, options); + } else if (data?.constructor?.name === "Map") { + return convertToMapAttrFromIterable(data, options); + } else if (data?.constructor?.name === "Object" || // for object which is result of Object.create(null), which doesn't have constructor defined + !data.constructor && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } else if (isBinary(data)) { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToBinaryAttr(data); + } else if (typeof data === "boolean" || data?.constructor?.name === "Boolean") { + return { BOOL: data.valueOf() }; + } else if (typeof data === "number" || data?.constructor?.name === "Number") { + return convertToNumberAttr(data, options); + } else if (data instanceof NumberValue) { + return data.toAttributeValue(); + } else if (typeof data === "bigint") { + return convertToBigIntAttr(data); + } else if (typeof data === "string" || data?.constructor?.name === "String") { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToStringAttr(data); + } else if (options?.convertClassInstanceToMap && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } + throw new Error( + `Unsupported type passed: ${data}. Pass options.convertClassInstanceToMap=true to marshall typeof object as map attribute.` + ); +}, "convertToAttr"); +var convertToListAttr = /* @__PURE__ */ __name((data, options) => ({ + L: data.filter( + (item) => typeof item !== "function" && (!options?.removeUndefinedValues || options?.removeUndefinedValues && item !== void 0) + ).map((item) => convertToAttr(item, options)) +}), "convertToListAttr"); +var convertToSetAttr = /* @__PURE__ */ __name((set, options) => { + const setToOperate = options?.removeUndefinedValues ? new Set([...set].filter((value) => value !== void 0)) : set; + if (!options?.removeUndefinedValues && setToOperate.has(void 0)) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + if (setToOperate.size === 0) { + if (options?.convertEmptyValues) { + return convertToNullAttr(); + } + throw new Error(`Pass a non-empty set, or options.convertEmptyValues=true.`); + } + const item = setToOperate.values().next().value; + if (item instanceof NumberValue) { + return { + NS: Array.from(setToOperate).map((_) => _.toString()) + }; + } else if (typeof item === "number") { + return { + NS: Array.from(setToOperate).map((num) => convertToNumberAttr(num, options)).map((item2) => item2.N) + }; + } else if (typeof item === "bigint") { + return { + NS: Array.from(setToOperate).map(convertToBigIntAttr).map((item2) => item2.N) + }; + } else if (typeof item === "string") { + return { + SS: Array.from(setToOperate).map(convertToStringAttr).map((item2) => item2.S) + }; + } else if (isBinary(item)) { + return { + // Do not alter binary data passed https://github.com/aws/aws-sdk-js-v3/issues/1530 + // @ts-expect-error Type 'ArrayBuffer' is not assignable to type 'Uint8Array' + BS: Array.from(setToOperate).map(convertToBinaryAttr).map((item2) => item2.B) + }; + } else { + throw new Error(`Only Number Set (NS), Binary Set (BS) or String Set (SS) are allowed.`); + } +}, "convertToSetAttr"); +var convertToMapAttrFromIterable = /* @__PURE__ */ __name((data, options) => ({ + M: ((data2) => { + const map = {}; + for (const [key, value] of data2) { + if (typeof value !== "function" && (value !== void 0 || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data) +}), "convertToMapAttrFromIterable"); +var convertToMapAttrFromEnumerableProps = /* @__PURE__ */ __name((data, options) => ({ + M: ((data2) => { + const map = {}; + for (const key in data2) { + const value = data2[key]; + if (typeof value !== "function" && (value !== void 0 || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data) +}), "convertToMapAttrFromEnumerableProps"); +var convertToNullAttr = /* @__PURE__ */ __name(() => ({ NULL: true }), "convertToNullAttr"); +var convertToBinaryAttr = /* @__PURE__ */ __name((data) => ({ B: data }), "convertToBinaryAttr"); +var convertToStringAttr = /* @__PURE__ */ __name((data) => ({ S: data.toString() }), "convertToStringAttr"); +var convertToBigIntAttr = /* @__PURE__ */ __name((data) => ({ N: data.toString() }), "convertToBigIntAttr"); +var validateBigIntAndThrow = /* @__PURE__ */ __name((errorPrefix) => { + throw new Error(`${errorPrefix} Use NumberValue from @aws-sdk/lib-dynamodb.`); +}, "validateBigIntAndThrow"); +var convertToNumberAttr = /* @__PURE__ */ __name((num, options) => { + if ([Number.NaN, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY].map((val) => val.toString()).includes(num.toString())) { + throw new Error(`Special numeric value ${num.toString()} is not allowed`); + } else if (!options?.allowImpreciseNumbers) { + if (Number(num) > Number.MAX_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is greater than Number.MAX_SAFE_INTEGER.`); + } else if (Number(num) < Number.MIN_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is lesser than Number.MIN_SAFE_INTEGER.`); + } + } + return { N: num.toString() }; +}, "convertToNumberAttr"); +var isBinary = /* @__PURE__ */ __name((data) => { + const binaryTypes = [ + "ArrayBuffer", + "Blob", + "Buffer", + "DataView", + "File", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "BigInt64Array", + "BigUint64Array" + ]; + if (data?.constructor) { + return binaryTypes.includes(data.constructor.name); + } + return false; +}, "isBinary"); + +// src/convertToNative.ts +var convertToNative = /* @__PURE__ */ __name((data, options) => { + for (const [key, value] of Object.entries(data)) { + if (value !== void 0) { + switch (key) { + case "NULL": + return null; + case "BOOL": + return Boolean(value); + case "N": + return convertNumber(value, options); + case "B": + return convertBinary(value); + case "S": + return convertString(value); + case "L": + return convertList(value, options); + case "M": + return convertMap(value, options); + case "NS": + return new Set(value.map((item) => convertNumber(item, options))); + case "BS": + return new Set(value.map(convertBinary)); + case "SS": + return new Set(value.map(convertString)); + default: + throw new Error(`Unsupported type passed: ${key}`); + } + } + } + throw new Error(`No value defined: ${JSON.stringify(data)}`); +}, "convertToNative"); +var convertNumber = /* @__PURE__ */ __name((numString, options) => { + if (typeof options?.wrapNumbers === "function") { + return options?.wrapNumbers(numString); + } + if (options?.wrapNumbers) { + return NumberValue.from(numString); + } + const num = Number(numString); + const infinityValues = [Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]; + const isLargeFiniteNumber = (num > Number.MAX_SAFE_INTEGER || num < Number.MIN_SAFE_INTEGER) && !infinityValues.includes(num); + if (isLargeFiniteNumber) { + if (typeof BigInt === "function") { + try { + return BigInt(numString); + } catch (error) { + throw new Error(`${numString} can't be converted to BigInt. Set options.wrapNumbers to get string value.`); + } + } else { + throw new Error(`${numString} is outside SAFE_INTEGER bounds. Set options.wrapNumbers to get string value.`); + } + } + return num; +}, "convertNumber"); +var convertString = /* @__PURE__ */ __name((stringValue) => stringValue, "convertString"); +var convertBinary = /* @__PURE__ */ __name((binaryValue) => binaryValue, "convertBinary"); +var convertList = /* @__PURE__ */ __name((list, options) => list.map((item) => convertToNative(item, options)), "convertList"); +var convertMap = /* @__PURE__ */ __name((map, options) => Object.entries(map).reduce( + (acc, [key, value]) => (acc[key] = convertToNative(value, options), acc), + {} +), "convertMap"); + +// src/marshall.ts +function marshall(data, options) { + const attributeValue = convertToAttr(data, options); + const [key, value] = Object.entries(attributeValue)[0]; + switch (key) { + case "M": + case "L": + return options?.convertTopLevelContainer ? attributeValue : value; + case "SS": + case "NS": + case "BS": + case "S": + case "N": + case "B": + case "NULL": + case "BOOL": + case "$unknown": + default: + return attributeValue; + } +} +__name(marshall, "marshall"); + +// src/unmarshall.ts +var unmarshall = /* @__PURE__ */ __name((data, options) => { + if (options?.convertWithoutMapWrapper) { + return convertToNative(data, options); + } + return convertToNative({ M: data }, options); +}, "unmarshall"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NumberValueImpl, + convertToAttr, + convertToNative, + marshall, + unmarshall +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js new file mode 100644 index 0000000..a9df9f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js @@ -0,0 +1,37 @@ +export class NumberValue { + value; + constructor(value) { + if (typeof value === "object" && "N" in value) { + this.value = String(value.N); + } + else { + this.value = String(value); + } + const valueOf = typeof value.valueOf() === "number" ? value.valueOf() : 0; + const imprecise = valueOf > Number.MAX_SAFE_INTEGER || + valueOf < Number.MIN_SAFE_INTEGER || + Math.abs(valueOf) === Infinity || + Number.isNaN(valueOf); + if (imprecise) { + throw new Error(`NumberValue should not be initialized with an imprecise number=${valueOf}. Use a string instead.`); + } + } + static from(value) { + return new NumberValue(value); + } + toAttributeValue() { + return { + N: this.toString(), + }; + } + toBigInt() { + const stringValue = this.toString(); + return BigInt(stringValue); + } + toString() { + return String(this.value); + } + valueOf() { + return this.toString(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js new file mode 100644 index 0000000..62a888f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js @@ -0,0 +1,175 @@ +import { NumberValue } from "./NumberValue"; +export const convertToAttr = (data, options) => { + if (data === undefined) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + else if (data === null && typeof data === "object") { + return convertToNullAttr(); + } + else if (Array.isArray(data)) { + return convertToListAttr(data, options); + } + else if (data?.constructor?.name === "Set") { + return convertToSetAttr(data, options); + } + else if (data?.constructor?.name === "Map") { + return convertToMapAttrFromIterable(data, options); + } + else if (data?.constructor?.name === "Object" || + (!data.constructor && typeof data === "object")) { + return convertToMapAttrFromEnumerableProps(data, options); + } + else if (isBinary(data)) { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToBinaryAttr(data); + } + else if (typeof data === "boolean" || data?.constructor?.name === "Boolean") { + return { BOOL: data.valueOf() }; + } + else if (typeof data === "number" || data?.constructor?.name === "Number") { + return convertToNumberAttr(data, options); + } + else if (data instanceof NumberValue) { + return data.toAttributeValue(); + } + else if (typeof data === "bigint") { + return convertToBigIntAttr(data); + } + else if (typeof data === "string" || data?.constructor?.name === "String") { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToStringAttr(data); + } + else if (options?.convertClassInstanceToMap && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } + throw new Error(`Unsupported type passed: ${data}. Pass options.convertClassInstanceToMap=true to marshall typeof object as map attribute.`); +}; +const convertToListAttr = (data, options) => ({ + L: data + .filter((item) => typeof item !== "function" && + (!options?.removeUndefinedValues || (options?.removeUndefinedValues && item !== undefined))) + .map((item) => convertToAttr(item, options)), +}); +const convertToSetAttr = (set, options) => { + const setToOperate = options?.removeUndefinedValues ? new Set([...set].filter((value) => value !== undefined)) : set; + if (!options?.removeUndefinedValues && setToOperate.has(undefined)) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + if (setToOperate.size === 0) { + if (options?.convertEmptyValues) { + return convertToNullAttr(); + } + throw new Error(`Pass a non-empty set, or options.convertEmptyValues=true.`); + } + const item = setToOperate.values().next().value; + if (item instanceof NumberValue) { + return { + NS: Array.from(setToOperate).map((_) => _.toString()), + }; + } + else if (typeof item === "number") { + return { + NS: Array.from(setToOperate) + .map((num) => convertToNumberAttr(num, options)) + .map((item) => item.N), + }; + } + else if (typeof item === "bigint") { + return { + NS: Array.from(setToOperate) + .map(convertToBigIntAttr) + .map((item) => item.N), + }; + } + else if (typeof item === "string") { + return { + SS: Array.from(setToOperate) + .map(convertToStringAttr) + .map((item) => item.S), + }; + } + else if (isBinary(item)) { + return { + BS: Array.from(setToOperate) + .map(convertToBinaryAttr) + .map((item) => item.B), + }; + } + else { + throw new Error(`Only Number Set (NS), Binary Set (BS) or String Set (SS) are allowed.`); + } +}; +const convertToMapAttrFromIterable = (data, options) => ({ + M: ((data) => { + const map = {}; + for (const [key, value] of data) { + if (typeof value !== "function" && (value !== undefined || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data), +}); +const convertToMapAttrFromEnumerableProps = (data, options) => ({ + M: ((data) => { + const map = {}; + for (const key in data) { + const value = data[key]; + if (typeof value !== "function" && (value !== undefined || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data), +}); +const convertToNullAttr = () => ({ NULL: true }); +const convertToBinaryAttr = (data) => ({ B: data }); +const convertToStringAttr = (data) => ({ S: data.toString() }); +const convertToBigIntAttr = (data) => ({ N: data.toString() }); +const validateBigIntAndThrow = (errorPrefix) => { + throw new Error(`${errorPrefix} Use NumberValue from @aws-sdk/lib-dynamodb.`); +}; +const convertToNumberAttr = (num, options) => { + if ([Number.NaN, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY] + .map((val) => val.toString()) + .includes(num.toString())) { + throw new Error(`Special numeric value ${num.toString()} is not allowed`); + } + else if (!options?.allowImpreciseNumbers) { + if (Number(num) > Number.MAX_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is greater than Number.MAX_SAFE_INTEGER.`); + } + else if (Number(num) < Number.MIN_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is lesser than Number.MIN_SAFE_INTEGER.`); + } + } + return { N: num.toString() }; +}; +const isBinary = (data) => { + const binaryTypes = [ + "ArrayBuffer", + "Blob", + "Buffer", + "DataView", + "File", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "BigInt64Array", + "BigUint64Array", + ]; + if (data?.constructor) { + return binaryTypes.includes(data.constructor.name); + } + return false; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js new file mode 100644 index 0000000..3e7b2c2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js @@ -0,0 +1,61 @@ +import { NumberValue } from "./NumberValue"; +export const convertToNative = (data, options) => { + for (const [key, value] of Object.entries(data)) { + if (value !== undefined) { + switch (key) { + case "NULL": + return null; + case "BOOL": + return Boolean(value); + case "N": + return convertNumber(value, options); + case "B": + return convertBinary(value); + case "S": + return convertString(value); + case "L": + return convertList(value, options); + case "M": + return convertMap(value, options); + case "NS": + return new Set(value.map((item) => convertNumber(item, options))); + case "BS": + return new Set(value.map(convertBinary)); + case "SS": + return new Set(value.map(convertString)); + default: + throw new Error(`Unsupported type passed: ${key}`); + } + } + } + throw new Error(`No value defined: ${JSON.stringify(data)}`); +}; +const convertNumber = (numString, options) => { + if (typeof options?.wrapNumbers === "function") { + return options?.wrapNumbers(numString); + } + if (options?.wrapNumbers) { + return NumberValue.from(numString); + } + const num = Number(numString); + const infinityValues = [Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]; + const isLargeFiniteNumber = (num > Number.MAX_SAFE_INTEGER || num < Number.MIN_SAFE_INTEGER) && !infinityValues.includes(num); + if (isLargeFiniteNumber) { + if (typeof BigInt === "function") { + try { + return BigInt(numString); + } + catch (error) { + throw new Error(`${numString} can't be converted to BigInt. Set options.wrapNumbers to get string value.`); + } + } + else { + throw new Error(`${numString} is outside SAFE_INTEGER bounds. Set options.wrapNumbers to get string value.`); + } + } + return num; +}; +const convertString = (stringValue) => stringValue; +const convertBinary = (binaryValue) => binaryValue; +const convertList = (list, options) => list.map((item) => convertToNative(item, options)); +const convertMap = (map, options) => Object.entries(map).reduce((acc, [key, value]) => ((acc[key] = convertToNative(value, options)), acc), {}); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js new file mode 100644 index 0000000..9899c8a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js @@ -0,0 +1,21 @@ +import { convertToAttr } from "./convertToAttr"; +export function marshall(data, options) { + const attributeValue = convertToAttr(data, options); + const [key, value] = Object.entries(attributeValue)[0]; + switch (key) { + case "M": + case "L": + return options?.convertTopLevelContainer ? attributeValue : value; + case "SS": + case "NS": + case "BS": + case "S": + case "N": + case "B": + case "NULL": + case "BOOL": + case "$unknown": + default: + return attributeValue; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js new file mode 100644 index 0000000..6028656 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js @@ -0,0 +1,7 @@ +import { convertToNative } from "./convertToNative"; +export const unmarshall = (data, options) => { + if (options?.convertWithoutMapWrapper) { + return convertToNative(data, options); + } + return convertToNative({ M: data }, options); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts new file mode 100644 index 0000000..c444ff8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts @@ -0,0 +1,55 @@ +import { NumberValue as INumberValue } from "./models"; +/** + * + * Class for storing DynamoDB numbers that exceed the scale of + * JavaScript's MAX_SAFE_INTEGER and MIN_SAFE_INTEGER, or the + * decimal precision limit. + * + * This class does not support mathematical operations in JavaScript. + * Convert the contained string value to your application-specific + * large number implementation to perform mathematical operations. + * + * @public + * + */ +export declare class NumberValue implements INumberValue { + value: string; + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + constructor(value: number | Number | BigInt | string | { + N: string; + }); + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + static from(value: number | Number | BigInt | string | { + N: string; + }): NumberValue; + /** + * @returns the AttributeValue form for DynamoDB. + */ + toAttributeValue(): { + N: string; + }; + /** + * @returns BigInt representation. + * + * @throws SyntaxError if the string representation is not convertable to a BigInt. + */ + toBigInt(): bigint; + /** + * @override + * + * @returns string representation. This is the canonical format in DynamoDB. + */ + toString(): string; + /** + * @override + */ + valueOf(): string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts new file mode 100644 index 0000000..7b0eae7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts @@ -0,0 +1,10 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { marshallOptions } from "./marshall"; +import { NativeAttributeValue } from "./models"; +/** + * Convert a JavaScript value to its equivalent DynamoDB AttributeValue type. + * + * @param data - The data to convert to a DynamoDB AttributeValue. + * @param options - An optional configuration object for `convertToAttr`. + */ +export declare const convertToAttr: (data: NativeAttributeValue, options?: marshallOptions) => AttributeValue; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts new file mode 100644 index 0000000..4cbac6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts @@ -0,0 +1,10 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "./models"; +import { unmarshallOptions } from "./unmarshall"; +/** + * Convert a DynamoDB AttributeValue object to its equivalent JavaScript type. + * + * @param data - The DynamoDB record to convert to JavaScript type. + * @param options - An optional configuration object for `convertToNative`. + */ +export declare const convertToNative: (data: AttributeValue, options?: unmarshallOptions) => NativeAttributeValue; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts new file mode 100644 index 0000000..a949240 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts @@ -0,0 +1,81 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeBinary, NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +/** + * An optional configuration object for `marshall` + */ +export interface marshallOptions { + /** + * Whether to automatically convert empty strings, blobs, and sets to `null` + */ + convertEmptyValues?: boolean; + /** + * Whether to remove undefined values from JS arrays/Sets/objects + * when marshalling to DynamoDB lists/sets/maps respectively. + * + * A DynamoDB item is not itself considered a map. Only + * attributes of an item are examined. + */ + removeUndefinedValues?: boolean; + /** + * Whether to convert typeof object to map attribute. + */ + convertClassInstanceToMap?: boolean; + /** + * Whether to convert the top level container + * if it is a map or list. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the marshall function (backwards compatibility). + */ + convertTopLevelContainer?: boolean; + /** + * Whether to allow numbers beyond Number.MAX_SAFE_INTEGER during marshalling. + * When set to true, allows numbers that may lose precision when converted to JavaScript numbers. + * When false (default), throws an error if a number exceeds Number.MAX_SAFE_INTEGER to prevent + * unintended loss of precision. Consider using the NumberValue type from @aws-sdk/lib-dynamodb + * for precise handling of large numbers. + */ + allowImpreciseNumbers?: boolean; +} +/** + * Convert a JavaScript object into a DynamoDB record. + * + * @param data - The data to convert to a DynamoDB record + * @param options - An optional configuration object for `marshall` + * + */ +export declare function marshall(data: null, options?: marshallOptions): AttributeValue.NULLMember; +export declare function marshall(data: Set | Set | Set, options?: marshallOptions): AttributeValue.NSMember; +export declare function marshall(data: Set, options?: marshallOptions): AttributeValue.SSMember; +export declare function marshall(data: Set, options?: marshallOptions): AttributeValue.BSMember; +export declare function marshall(data: NativeAttributeBinary, options?: marshallOptions): AttributeValue.BMember; +export declare function marshall(data: boolean, options?: marshallOptions): AttributeValue.BOOLMember; +export declare function marshall(data: number | NumberValue | bigint, options?: marshallOptions): AttributeValue.NMember; +export declare function marshall(data: string, options?: marshallOptions): AttributeValue.SMember; +export declare function marshall(data: boolean, options?: marshallOptions): AttributeValue.BOOLMember; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue.LMember; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[]; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[] | AttributeValue.LMember; +export declare function marshall(data: NativeAttributeValue[], options?: marshallOptions): AttributeValue[]; +export declare function marshall(data: Map | Record, options: marshallOptions & O): AttributeValue.MMember; +export declare function marshall(data: Map | Record, options: marshallOptions & O): Record; +export declare function marshall(data: Map | Record, options: marshallOptions & O): Record | AttributeValue.MMember; +export declare function marshall(data: Map | Record, options?: marshallOptions): Record; +export declare function marshall(data: any, options?: marshallOptions): any; +/** + * This signature will be unmatchable but is included for information. + */ +export declare function marshall(data: unknown, options?: marshallOptions): AttributeValue.$UnknownMember; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts new file mode 100644 index 0000000..7f0a963 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts @@ -0,0 +1,40 @@ +/// +/// +/** + * A interface recognizable as a numeric value that stores the underlying number + * as a string. + * + * Intended to be a deserialization target for the DynamoDB Document Client when + * the `wrapNumbers` flag is set. This allows for numeric values that lose + * precision when converted to JavaScript's `number` type. + */ +export interface NumberValue { + readonly value: string; +} +/** + * @public + */ +export type NativeAttributeValue = NativeScalarAttributeValue | { + [key: string]: NativeAttributeValue; +} | NativeAttributeValue[] | Set | InstanceType<{ + new (...args: any[]): any; +}>; +/** + * @public + */ +export type NativeScalarAttributeValue = null | undefined | boolean | number | NumberValue | bigint | NativeAttributeBinary | string; +/** + * Declare File in case DOM is not added to the tsconfig lib causing + * File interface is not defined. For developers with DOM lib added, + * the File interface will be merged correctly. + */ +declare global { + interface File { + } +} +type IfDefined = {} extends T ? never : T; +/** + * @public + */ +export type NativeAttributeBinary = ArrayBuffer | IfDefined | IfDefined | DataView | IfDefined | Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array | BigInt64Array | BigUint64Array; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts new file mode 100644 index 0000000..8180624 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts @@ -0,0 +1,30 @@ +import { NumberValue as INumberValue } from "./models"; +export declare class NumberValue implements INumberValue { + value: string; + constructor( + value: + | number + | Number + | BigInt + | string + | { + N: string; + } + ); + static from( + value: + | number + | Number + | BigInt + | string + | { + N: string; + } + ): NumberValue; + toAttributeValue(): { + N: string; + }; + toBigInt(): bigint; + toString(): string; + valueOf(): string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts new file mode 100644 index 0000000..d148d57 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts @@ -0,0 +1,7 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { marshallOptions } from "./marshall"; +import { NativeAttributeValue } from "./models"; +export declare const convertToAttr: ( + data: NativeAttributeValue, + options?: marshallOptions +) => AttributeValue; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts new file mode 100644 index 0000000..c1a03f4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts @@ -0,0 +1,7 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { unmarshallOptions } from "./unmarshall"; +export declare const convertToNative: ( + data: AttributeValue, + options?: unmarshallOptions +) => NativeAttributeValue; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts new file mode 100644 index 0000000..f81b876 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts @@ -0,0 +1,112 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeBinary, NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +export interface marshallOptions { + convertEmptyValues?: boolean; + removeUndefinedValues?: boolean; + convertClassInstanceToMap?: boolean; + convertTopLevelContainer?: boolean; + allowImpreciseNumbers?: boolean; +} +export declare function marshall( + data: null, + options?: marshallOptions +): AttributeValue.NULLMember; +export declare function marshall( + data: Set | Set | Set, + options?: marshallOptions +): AttributeValue.NSMember; +export declare function marshall( + data: Set, + options?: marshallOptions +): AttributeValue.SSMember; +export declare function marshall( + data: Set, + options?: marshallOptions +): AttributeValue.BSMember; +export declare function marshall( + data: NativeAttributeBinary, + options?: marshallOptions +): AttributeValue.BMember; +export declare function marshall( + data: boolean, + options?: marshallOptions +): AttributeValue.BOOLMember; +export declare function marshall( + data: number | NumberValue | bigint, + options?: marshallOptions +): AttributeValue.NMember; +export declare function marshall( + data: string, + options?: marshallOptions +): AttributeValue.SMember; +export declare function marshall( + data: boolean, + options?: marshallOptions +): AttributeValue.BOOLMember; +export declare function marshall< + O extends { + convertTopLevelContainer: true; + } +>( + data: NativeAttributeValue[], + options: marshallOptions & O +): AttributeValue.LMember; +export declare function marshall< + O extends { + convertTopLevelContainer: false; + } +>(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[]; +export declare function marshall< + O extends { + convertTopLevelContainer: boolean; + } +>( + data: NativeAttributeValue[], + options: marshallOptions & O +): AttributeValue[] | AttributeValue.LMember; +export declare function marshall( + data: NativeAttributeValue[], + options?: marshallOptions +): AttributeValue[]; +export declare function marshall< + O extends { + convertTopLevelContainer: true; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): AttributeValue.MMember; +export declare function marshall< + O extends { + convertTopLevelContainer: false; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): Record; +export declare function marshall< + O extends { + convertTopLevelContainer: boolean; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): Record | AttributeValue.MMember; +export declare function marshall( + data: + | Map + | Record, + options?: marshallOptions +): Record; +export declare function marshall(data: any, options?: marshallOptions): any; +export declare function marshall( + data: unknown, + options?: marshallOptions +): AttributeValue.$UnknownMember; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts new file mode 100644 index 0000000..f2939b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts @@ -0,0 +1,46 @@ +export interface NumberValue { + readonly value: string; +} +export type NativeAttributeValue = + | NativeScalarAttributeValue + | { + [key: string]: NativeAttributeValue; + } + | NativeAttributeValue[] + | Set< + number | bigint | NumberValue | string | NativeAttributeBinary | undefined + > + | InstanceType<{ + new (...args: any[]): any; + }>; +export type NativeScalarAttributeValue = + | null + | undefined + | boolean + | number + | NumberValue + | bigint + | NativeAttributeBinary + | string; +declare global { + interface File {} +} +type IfDefined = {} extends T ? never : T; +export type NativeAttributeBinary = + | ArrayBuffer + | IfDefined + | IfDefined + | DataView + | IfDefined + | Int8Array + | Uint8Array + | Uint8ClampedArray + | Int16Array + | Uint16Array + | Int32Array + | Uint32Array + | Float32Array + | Float64Array + | BigInt64Array + | BigUint64Array; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts new file mode 100644 index 0000000..9d511e0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts @@ -0,0 +1,13 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +export interface unmarshallOptions { + wrapNumbers?: + | boolean + | ((value: string) => number | bigint | NumberValue | any); + convertWithoutMapWrapper?: boolean; +} +export declare const unmarshall: ( + data: Record | AttributeValue, + options?: unmarshallOptions +) => Record; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts new file mode 100644 index 0000000..c477e32 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts @@ -0,0 +1,31 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +/** + * An optional configuration object for `convertToNative` + */ +export interface unmarshallOptions { + /** + * Whether to modify how numbers are unmarshalled from DynamoDB. + * When set to true, returns numbers as NumberValue instances instead of native JavaScript numbers. + * This allows for the safe round-trip transport of numbers of arbitrary size. + * + * If a function is provided, it will be called with the string representation of numbers to handle + * custom conversions (e.g., using BigInt or decimal libraries). + */ + wrapNumbers?: boolean | ((value: string) => number | bigint | NumberValue | any); + /** + * When true, skip wrapping the data in `{ M: data }` before converting. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the unmarshall function (backwards compatibility). + */ + convertWithoutMapWrapper?: boolean; +} +/** + * Convert a DynamoDB record into a JavaScript object. + * + * @param data - The DynamoDB record + * @param options - An optional configuration object for `unmarshall` + */ +export declare const unmarshall: (data: Record | AttributeValue, options?: unmarshallOptions) => Record; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/package.json new file mode 100644 index 0000000..db385c7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-dynamodb/package.json @@ -0,0 +1,57 @@ +{ + "name": "@aws-sdk/util-dynamodb", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-sdk/client-dynamodb": "3.803.0", + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-dynamodb" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/README.md new file mode 100644 index 0000000..641f54a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/util-endpoints + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-endpoints/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-endpoints) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-endpoints.svg)](https://www.npmjs.com/package/@aws-sdk/util-endpoints) + +> An internal package diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js new file mode 100644 index 0000000..ee0a932 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js @@ -0,0 +1,450 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ConditionObject: () => import_util_endpoints.ConditionObject, + DeprecatedObject: () => import_util_endpoints.DeprecatedObject, + EndpointError: () => import_util_endpoints.EndpointError, + EndpointObject: () => import_util_endpoints.EndpointObject, + EndpointObjectHeaders: () => import_util_endpoints.EndpointObjectHeaders, + EndpointObjectProperties: () => import_util_endpoints.EndpointObjectProperties, + EndpointParams: () => import_util_endpoints.EndpointParams, + EndpointResolverOptions: () => import_util_endpoints.EndpointResolverOptions, + EndpointRuleObject: () => import_util_endpoints.EndpointRuleObject, + ErrorRuleObject: () => import_util_endpoints.ErrorRuleObject, + EvaluateOptions: () => import_util_endpoints.EvaluateOptions, + Expression: () => import_util_endpoints.Expression, + FunctionArgv: () => import_util_endpoints.FunctionArgv, + FunctionObject: () => import_util_endpoints.FunctionObject, + FunctionReturn: () => import_util_endpoints.FunctionReturn, + ParameterObject: () => import_util_endpoints.ParameterObject, + ReferenceObject: () => import_util_endpoints.ReferenceObject, + ReferenceRecord: () => import_util_endpoints.ReferenceRecord, + RuleSetObject: () => import_util_endpoints.RuleSetObject, + RuleSetRules: () => import_util_endpoints.RuleSetRules, + TreeRuleObject: () => import_util_endpoints.TreeRuleObject, + awsEndpointFunctions: () => awsEndpointFunctions, + getUserAgentPrefix: () => getUserAgentPrefix, + isIpAddress: () => import_util_endpoints.isIpAddress, + partition: () => partition, + resolveEndpoint: () => import_util_endpoints.resolveEndpoint, + setPartitionInfo: () => setPartitionInfo, + useDefaultPartitionInfo: () => useDefaultPartitionInfo +}); +module.exports = __toCommonJS(index_exports); + +// src/aws.ts + + +// src/lib/aws/isVirtualHostableS3Bucket.ts + + +// src/lib/isIpAddress.ts +var import_util_endpoints = require("@smithy/util-endpoints"); + +// src/lib/aws/isVirtualHostableS3Bucket.ts +var isVirtualHostableS3Bucket = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!(0, import_util_endpoints.isValidHostLabel)(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if ((0, import_util_endpoints.isIpAddress)(value)) { + return false; + } + return true; +}, "isVirtualHostableS3Bucket"); + +// src/lib/aws/parseArn.ts +var ARN_DELIMITER = ":"; +var RESOURCE_DELIMITER = "/"; +var parseArn = /* @__PURE__ */ __name((value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) return null; + const [arn, partition2, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition2 === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition: partition2, + service, + region, + accountId, + resourceId + }; +}, "parseArn"); + +// src/lib/aws/partitions.json +var partitions_default = { + partitions: [{ + id: "aws", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-east-1", + name: "aws", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + regions: { + "af-south-1": { + description: "Africa (Cape Town)" + }, + "ap-east-1": { + description: "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + description: "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + description: "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + description: "Asia Pacific (Osaka)" + }, + "ap-south-1": { + description: "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + description: "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + description: "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + description: "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + description: "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + description: "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + description: "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + description: "Asia Pacific (Thailand)" + }, + "aws-global": { + description: "AWS Standard global region" + }, + "ca-central-1": { + description: "Canada (Central)" + }, + "ca-west-1": { + description: "Canada West (Calgary)" + }, + "eu-central-1": { + description: "Europe (Frankfurt)" + }, + "eu-central-2": { + description: "Europe (Zurich)" + }, + "eu-north-1": { + description: "Europe (Stockholm)" + }, + "eu-south-1": { + description: "Europe (Milan)" + }, + "eu-south-2": { + description: "Europe (Spain)" + }, + "eu-west-1": { + description: "Europe (Ireland)" + }, + "eu-west-2": { + description: "Europe (London)" + }, + "eu-west-3": { + description: "Europe (Paris)" + }, + "il-central-1": { + description: "Israel (Tel Aviv)" + }, + "me-central-1": { + description: "Middle East (UAE)" + }, + "me-south-1": { + description: "Middle East (Bahrain)" + }, + "mx-central-1": { + description: "Mexico (Central)" + }, + "sa-east-1": { + description: "South America (Sao Paulo)" + }, + "us-east-1": { + description: "US East (N. Virginia)" + }, + "us-east-2": { + description: "US East (Ohio)" + }, + "us-west-1": { + description: "US West (N. California)" + }, + "us-west-2": { + description: "US West (Oregon)" + } + } + }, { + id: "aws-cn", + outputs: { + dnsSuffix: "amazonaws.com.cn", + dualStackDnsSuffix: "api.amazonwebservices.com.cn", + implicitGlobalRegion: "cn-northwest-1", + name: "aws-cn", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^cn\\-\\w+\\-\\d+$", + regions: { + "aws-cn-global": { + description: "AWS China global region" + }, + "cn-north-1": { + description: "China (Beijing)" + }, + "cn-northwest-1": { + description: "China (Ningxia)" + } + } + }, { + id: "aws-us-gov", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-gov-west-1", + name: "aws-us-gov", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^us\\-gov\\-\\w+\\-\\d+$", + regions: { + "aws-us-gov-global": { + description: "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + description: "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + description: "AWS GovCloud (US-West)" + } + } + }, { + id: "aws-iso", + outputs: { + dnsSuffix: "c2s.ic.gov", + dualStackDnsSuffix: "c2s.ic.gov", + implicitGlobalRegion: "us-iso-east-1", + name: "aws-iso", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-iso\\-\\w+\\-\\d+$", + regions: { + "aws-iso-global": { + description: "AWS ISO (US) global region" + }, + "us-iso-east-1": { + description: "US ISO East" + }, + "us-iso-west-1": { + description: "US ISO WEST" + } + } + }, { + id: "aws-iso-b", + outputs: { + dnsSuffix: "sc2s.sgov.gov", + dualStackDnsSuffix: "sc2s.sgov.gov", + implicitGlobalRegion: "us-isob-east-1", + name: "aws-iso-b", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isob\\-\\w+\\-\\d+$", + regions: { + "aws-iso-b-global": { + description: "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + description: "US ISOB East (Ohio)" + } + } + }, { + id: "aws-iso-e", + outputs: { + dnsSuffix: "cloud.adc-e.uk", + dualStackDnsSuffix: "cloud.adc-e.uk", + implicitGlobalRegion: "eu-isoe-west-1", + name: "aws-iso-e", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eu\\-isoe\\-\\w+\\-\\d+$", + regions: { + "aws-iso-e-global": { + description: "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + description: "EU ISOE West" + } + } + }, { + id: "aws-iso-f", + outputs: { + dnsSuffix: "csp.hci.ic.gov", + dualStackDnsSuffix: "csp.hci.ic.gov", + implicitGlobalRegion: "us-isof-south-1", + name: "aws-iso-f", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isof\\-\\w+\\-\\d+$", + regions: { + "aws-iso-f-global": { + description: "AWS ISOF global region" + }, + "us-isof-east-1": { + description: "US ISOF EAST" + }, + "us-isof-south-1": { + description: "US ISOF SOUTH" + } + } + }, { + id: "aws-eusc", + outputs: { + dnsSuffix: "amazonaws.eu", + dualStackDnsSuffix: "amazonaws.eu", + implicitGlobalRegion: "eusc-de-east-1", + name: "aws-eusc", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eusc\\-(de)\\-\\w+\\-\\d+$", + regions: { + "eusc-de-east-1": { + description: "EU (Germany)" + } + } + }], + version: "1.1" +}; + +// src/lib/aws/partition.ts +var selectedPartitionsInfo = partitions_default; +var selectedUserAgentPrefix = ""; +var partition = /* @__PURE__ */ __name((value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition2 of partitions) { + const { regions, outputs } = partition2; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData + }; + } + } + } + for (const partition2 of partitions) { + const { regionRegex, outputs } = partition2; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition2) => partition2.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error( + "Provided region was not found in the partition array or regex, and default partition with id 'aws' doesn't exist." + ); + } + return { + ...DEFAULT_PARTITION.outputs + }; +}, "partition"); +var setPartitionInfo = /* @__PURE__ */ __name((partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}, "setPartitionInfo"); +var useDefaultPartitionInfo = /* @__PURE__ */ __name(() => { + setPartitionInfo(partitions_default, ""); +}, "useDefaultPartitionInfo"); +var getUserAgentPrefix = /* @__PURE__ */ __name(() => selectedUserAgentPrefix, "getUserAgentPrefix"); + +// src/aws.ts +var awsEndpointFunctions = { + isVirtualHostableS3Bucket, + parseArn, + partition +}; +import_util_endpoints.customEndpointFunctions.aws = awsEndpointFunctions; + +// src/resolveEndpoint.ts + + +// src/types/EndpointError.ts + + +// src/types/EndpointRuleObject.ts + + +// src/types/ErrorRuleObject.ts + + +// src/types/RuleSetObject.ts + + +// src/types/TreeRuleObject.ts + + +// src/types/shared.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + awsEndpointFunctions, + partition, + setPartitionInfo, + useDefaultPartitionInfo, + getUserAgentPrefix, + isIpAddress, + resolveEndpoint, + EndpointError +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json new file mode 100644 index 0000000..a11705a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json @@ -0,0 +1,258 @@ +{ + "partitions": [{ + "id": "aws", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-east-1", + "name": "aws", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + "regions": { + "af-south-1": { + "description": "Africa (Cape Town)" + }, + "ap-east-1": { + "description": "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + "description": "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + "description": "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + "description": "Asia Pacific (Osaka)" + }, + "ap-south-1": { + "description": "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + "description": "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + "description": "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + "description": "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + "description": "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + "description": "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + "description": "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + "description": "Asia Pacific (Thailand)" + }, + "aws-global": { + "description": "AWS Standard global region" + }, + "ca-central-1": { + "description": "Canada (Central)" + }, + "ca-west-1": { + "description": "Canada West (Calgary)" + }, + "eu-central-1": { + "description": "Europe (Frankfurt)" + }, + "eu-central-2": { + "description": "Europe (Zurich)" + }, + "eu-north-1": { + "description": "Europe (Stockholm)" + }, + "eu-south-1": { + "description": "Europe (Milan)" + }, + "eu-south-2": { + "description": "Europe (Spain)" + }, + "eu-west-1": { + "description": "Europe (Ireland)" + }, + "eu-west-2": { + "description": "Europe (London)" + }, + "eu-west-3": { + "description": "Europe (Paris)" + }, + "il-central-1": { + "description": "Israel (Tel Aviv)" + }, + "me-central-1": { + "description": "Middle East (UAE)" + }, + "me-south-1": { + "description": "Middle East (Bahrain)" + }, + "mx-central-1": { + "description": "Mexico (Central)" + }, + "sa-east-1": { + "description": "South America (Sao Paulo)" + }, + "us-east-1": { + "description": "US East (N. Virginia)" + }, + "us-east-2": { + "description": "US East (Ohio)" + }, + "us-west-1": { + "description": "US West (N. California)" + }, + "us-west-2": { + "description": "US West (Oregon)" + } + } + }, { + "id": "aws-cn", + "outputs": { + "dnsSuffix": "amazonaws.com.cn", + "dualStackDnsSuffix": "api.amazonwebservices.com.cn", + "implicitGlobalRegion": "cn-northwest-1", + "name": "aws-cn", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^cn\\-\\w+\\-\\d+$", + "regions": { + "aws-cn-global": { + "description": "AWS China global region" + }, + "cn-north-1": { + "description": "China (Beijing)" + }, + "cn-northwest-1": { + "description": "China (Ningxia)" + } + } + }, { + "id": "aws-us-gov", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-gov-west-1", + "name": "aws-us-gov", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^us\\-gov\\-\\w+\\-\\d+$", + "regions": { + "aws-us-gov-global": { + "description": "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + "description": "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + "description": "AWS GovCloud (US-West)" + } + } + }, { + "id": "aws-iso", + "outputs": { + "dnsSuffix": "c2s.ic.gov", + "dualStackDnsSuffix": "c2s.ic.gov", + "implicitGlobalRegion": "us-iso-east-1", + "name": "aws-iso", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-iso\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-global": { + "description": "AWS ISO (US) global region" + }, + "us-iso-east-1": { + "description": "US ISO East" + }, + "us-iso-west-1": { + "description": "US ISO WEST" + } + } + }, { + "id": "aws-iso-b", + "outputs": { + "dnsSuffix": "sc2s.sgov.gov", + "dualStackDnsSuffix": "sc2s.sgov.gov", + "implicitGlobalRegion": "us-isob-east-1", + "name": "aws-iso-b", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isob\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-b-global": { + "description": "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + "description": "US ISOB East (Ohio)" + } + } + }, { + "id": "aws-iso-e", + "outputs": { + "dnsSuffix": "cloud.adc-e.uk", + "dualStackDnsSuffix": "cloud.adc-e.uk", + "implicitGlobalRegion": "eu-isoe-west-1", + "name": "aws-iso-e", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-e-global": { + "description": "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + "description": "EU ISOE West" + } + } + }, { + "id": "aws-iso-f", + "outputs": { + "dnsSuffix": "csp.hci.ic.gov", + "dualStackDnsSuffix": "csp.hci.ic.gov", + "implicitGlobalRegion": "us-isof-south-1", + "name": "aws-iso-f", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isof\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-f-global": { + "description": "AWS ISOF global region" + }, + "us-isof-east-1": { + "description": "US ISOF EAST" + }, + "us-isof-south-1": { + "description": "US ISOF SOUTH" + } + } + }, { + "id": "aws-eusc", + "outputs": { + "dnsSuffix": "amazonaws.eu", + "dualStackDnsSuffix": "amazonaws.eu", + "implicitGlobalRegion": "eusc-de-east-1", + "name": "aws-eusc", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eusc\\-(de)\\-\\w+\\-\\d+$", + "regions": { + "eusc-de-east-1": { + "description": "EU (Germany)" + } + } + }], + "version": "1.1" +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js new file mode 100644 index 0000000..49a408e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js @@ -0,0 +1,10 @@ +import { customEndpointFunctions } from "@smithy/util-endpoints"; +import { isVirtualHostableS3Bucket } from "./lib/aws/isVirtualHostableS3Bucket"; +import { parseArn } from "./lib/aws/parseArn"; +import { partition } from "./lib/aws/partition"; +export const awsEndpointFunctions = { + isVirtualHostableS3Bucket: isVirtualHostableS3Bucket, + parseArn: parseArn, + partition: partition, +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/index.js new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js new file mode 100644 index 0000000..f2bacc0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js @@ -0,0 +1,25 @@ +import { isValidHostLabel } from "@smithy/util-endpoints"; +import { isIpAddress } from "../isIpAddress"; +export const isVirtualHostableS3Bucket = (value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!isValidHostLabel(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if (isIpAddress(value)) { + return false; + } + return true; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js new file mode 100644 index 0000000..6b12887 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js @@ -0,0 +1,18 @@ +const ARN_DELIMITER = ":"; +const RESOURCE_DELIMITER = "/"; +export const parseArn = (value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) + return null; + const [arn, partition, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") + return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition, + service, + region, + accountId, + resourceId, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js new file mode 100644 index 0000000..8d39d81 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js @@ -0,0 +1,41 @@ +import partitionsInfo from "./partitions.json"; +let selectedPartitionsInfo = partitionsInfo; +let selectedUserAgentPrefix = ""; +export const partition = (value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition of partitions) { + const { regions, outputs } = partition; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData, + }; + } + } + } + for (const partition of partitions) { + const { regionRegex, outputs } = partition; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs, + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition) => partition.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error("Provided region was not found in the partition array or regex," + + " and default partition with id 'aws' doesn't exist."); + } + return { + ...DEFAULT_PARTITION.outputs, + }; +}; +export const setPartitionInfo = (partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}; +export const useDefaultPartitionInfo = () => { + setPartitionInfo(partitionsInfo, ""); +}; +export const getUserAgentPrefix = () => selectedUserAgentPrefix; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json new file mode 100644 index 0000000..a11705a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json @@ -0,0 +1,258 @@ +{ + "partitions": [{ + "id": "aws", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-east-1", + "name": "aws", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + "regions": { + "af-south-1": { + "description": "Africa (Cape Town)" + }, + "ap-east-1": { + "description": "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + "description": "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + "description": "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + "description": "Asia Pacific (Osaka)" + }, + "ap-south-1": { + "description": "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + "description": "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + "description": "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + "description": "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + "description": "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + "description": "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + "description": "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + "description": "Asia Pacific (Thailand)" + }, + "aws-global": { + "description": "AWS Standard global region" + }, + "ca-central-1": { + "description": "Canada (Central)" + }, + "ca-west-1": { + "description": "Canada West (Calgary)" + }, + "eu-central-1": { + "description": "Europe (Frankfurt)" + }, + "eu-central-2": { + "description": "Europe (Zurich)" + }, + "eu-north-1": { + "description": "Europe (Stockholm)" + }, + "eu-south-1": { + "description": "Europe (Milan)" + }, + "eu-south-2": { + "description": "Europe (Spain)" + }, + "eu-west-1": { + "description": "Europe (Ireland)" + }, + "eu-west-2": { + "description": "Europe (London)" + }, + "eu-west-3": { + "description": "Europe (Paris)" + }, + "il-central-1": { + "description": "Israel (Tel Aviv)" + }, + "me-central-1": { + "description": "Middle East (UAE)" + }, + "me-south-1": { + "description": "Middle East (Bahrain)" + }, + "mx-central-1": { + "description": "Mexico (Central)" + }, + "sa-east-1": { + "description": "South America (Sao Paulo)" + }, + "us-east-1": { + "description": "US East (N. Virginia)" + }, + "us-east-2": { + "description": "US East (Ohio)" + }, + "us-west-1": { + "description": "US West (N. California)" + }, + "us-west-2": { + "description": "US West (Oregon)" + } + } + }, { + "id": "aws-cn", + "outputs": { + "dnsSuffix": "amazonaws.com.cn", + "dualStackDnsSuffix": "api.amazonwebservices.com.cn", + "implicitGlobalRegion": "cn-northwest-1", + "name": "aws-cn", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^cn\\-\\w+\\-\\d+$", + "regions": { + "aws-cn-global": { + "description": "AWS China global region" + }, + "cn-north-1": { + "description": "China (Beijing)" + }, + "cn-northwest-1": { + "description": "China (Ningxia)" + } + } + }, { + "id": "aws-us-gov", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-gov-west-1", + "name": "aws-us-gov", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^us\\-gov\\-\\w+\\-\\d+$", + "regions": { + "aws-us-gov-global": { + "description": "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + "description": "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + "description": "AWS GovCloud (US-West)" + } + } + }, { + "id": "aws-iso", + "outputs": { + "dnsSuffix": "c2s.ic.gov", + "dualStackDnsSuffix": "c2s.ic.gov", + "implicitGlobalRegion": "us-iso-east-1", + "name": "aws-iso", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-iso\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-global": { + "description": "AWS ISO (US) global region" + }, + "us-iso-east-1": { + "description": "US ISO East" + }, + "us-iso-west-1": { + "description": "US ISO WEST" + } + } + }, { + "id": "aws-iso-b", + "outputs": { + "dnsSuffix": "sc2s.sgov.gov", + "dualStackDnsSuffix": "sc2s.sgov.gov", + "implicitGlobalRegion": "us-isob-east-1", + "name": "aws-iso-b", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isob\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-b-global": { + "description": "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + "description": "US ISOB East (Ohio)" + } + } + }, { + "id": "aws-iso-e", + "outputs": { + "dnsSuffix": "cloud.adc-e.uk", + "dualStackDnsSuffix": "cloud.adc-e.uk", + "implicitGlobalRegion": "eu-isoe-west-1", + "name": "aws-iso-e", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-e-global": { + "description": "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + "description": "EU ISOE West" + } + } + }, { + "id": "aws-iso-f", + "outputs": { + "dnsSuffix": "csp.hci.ic.gov", + "dualStackDnsSuffix": "csp.hci.ic.gov", + "implicitGlobalRegion": "us-isof-south-1", + "name": "aws-iso-f", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isof\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-f-global": { + "description": "AWS ISOF global region" + }, + "us-isof-east-1": { + "description": "US ISOF EAST" + }, + "us-isof-south-1": { + "description": "US ISOF SOUTH" + } + } + }, { + "id": "aws-eusc", + "outputs": { + "dnsSuffix": "amazonaws.eu", + "dualStackDnsSuffix": "amazonaws.eu", + "implicitGlobalRegion": "eusc-de-east-1", + "name": "aws-eusc", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eusc\\-(de)\\-\\w+\\-\\d+$", + "regions": { + "eusc-de-east-1": { + "description": "EU (Germany)" + } + } + }], + "version": "1.1" +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts new file mode 100644 index 0000000..13c64a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts @@ -0,0 +1,2 @@ +import { EndpointFunctions } from "@smithy/util-endpoints"; +export declare const awsEndpointFunctions: EndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts new file mode 100644 index 0000000..25d46e4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a string is a DNS compatible bucket name and can be used with + * virtual hosted style addressing. + */ +export declare const isVirtualHostableS3Bucket: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts new file mode 100644 index 0000000..fa5af83 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts @@ -0,0 +1,7 @@ +import { EndpointARN } from "@smithy/types"; +/** + * Evaluates a single string argument value, and returns an object containing + * details about the parsed ARN. + * If the input was not a valid ARN, the function returns null. + */ +export declare const parseArn: (value: string) => EndpointARN | null; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts new file mode 100644 index 0000000..96d14e4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts @@ -0,0 +1,38 @@ +import { EndpointPartition } from "@smithy/types"; +export type PartitionsInfo = { + partitions: Array<{ + id: string; + outputs: { + dnsSuffix: string; + dualStackDnsSuffix: string; + name: string; + supportsDualStack: boolean; + supportsFIPS: boolean; + }; + regionRegex: string; + regions: Record; + }>; +}; +/** + * Evaluates a single string argument value as a region, and matches the + * string value to an AWS partition. + * The matcher MUST always return a successful object describing the partition + * that the region has been determined to be a part of. + */ +export declare const partition: (value: string) => EndpointPartition; +/** + * Set custom partitions.json data. + * @internal + */ +export declare const setPartitionInfo: (partitionsInfo: PartitionsInfo, userAgentPrefix?: string) => void; +/** + * Reset to the default partitions.json data. + * @internal + */ +export declare const useDefaultPartitionInfo: () => void; +/** + * @internal + */ +export declare const getUserAgentPrefix: () => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts new file mode 100644 index 0000000..13c64a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts @@ -0,0 +1,2 @@ +import { EndpointFunctions } from "@smithy/util-endpoints"; +export declare const awsEndpointFunctions: EndpointFunctions; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts new file mode 100644 index 0000000..5ef3296 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts @@ -0,0 +1,4 @@ +export declare const isVirtualHostableS3Bucket: ( + value: string, + allowSubDomains?: boolean +) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts new file mode 100644 index 0000000..690d459 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts @@ -0,0 +1,2 @@ +import { EndpointARN } from "@smithy/types"; +export declare const parseArn: (value: string) => EndpointARN | null; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts new file mode 100644 index 0000000..0683113 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts @@ -0,0 +1,28 @@ +import { EndpointPartition } from "@smithy/types"; +export type PartitionsInfo = { + partitions: Array<{ + id: string; + outputs: { + dnsSuffix: string; + dualStackDnsSuffix: string; + name: string; + supportsDualStack: boolean; + supportsFIPS: boolean; + }; + regionRegex: string; + regions: Record< + string, + | { + description?: string; + } + | undefined + >; + }>; +}; +export declare const partition: (value: string) => EndpointPartition; +export declare const setPartitionInfo: ( + partitionsInfo: PartitionsInfo, + userAgentPrefix?: string +) => void; +export declare const useDefaultPartitionInfo: () => void; +export declare const getUserAgentPrefix: () => string; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..b48af7f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts @@ -0,0 +1,6 @@ +export { + EndpointObjectProperties, + EndpointObjectHeaders, + EndpointObject, + EndpointRuleObject, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..e7b8881 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts @@ -0,0 +1 @@ +export { ErrorRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts new file mode 100644 index 0000000..2a489c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts @@ -0,0 +1,5 @@ +export { + DeprecatedObject, + ParameterObject, + RuleSetObject, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..716ddcf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts @@ -0,0 +1 @@ +export { RuleSetRules, TreeRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts new file mode 100644 index 0000000..cfd2248 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts @@ -0,0 +1,12 @@ +export { + ReferenceObject, + FunctionObject, + FunctionArgv, + FunctionReturn, + ConditionObject, + Expression, + EndpointParams, + EndpointResolverOptions, + ReferenceRecord, + EvaluateOptions, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..ef666fe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts @@ -0,0 +1 @@ +export { EndpointObjectProperties, EndpointObjectHeaders, EndpointObject, EndpointRuleObject, } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..e7b8881 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts @@ -0,0 +1 @@ +export { ErrorRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts new file mode 100644 index 0000000..c052af0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts @@ -0,0 +1 @@ +export { DeprecatedObject, ParameterObject, RuleSetObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..716ddcf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts @@ -0,0 +1 @@ +export { RuleSetRules, TreeRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts new file mode 100644 index 0000000..af7cc53 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts @@ -0,0 +1 @@ +export { ReferenceObject, FunctionObject, FunctionArgv, FunctionReturn, ConditionObject, Expression, EndpointParams, EndpointResolverOptions, ReferenceRecord, EvaluateOptions, } from "@smithy/util-endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/package.json new file mode 100644 index 0000000..36d8d2a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-endpoints/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/util-endpoints", + "version": "3.787.0", + "description": "Utilities to help with endpoint resolution", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-endpoints", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-endpoints", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-endpoints" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/README.md new file mode 100644 index 0000000..cac53d3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/util-locate-window + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-locate-window/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-locate-window) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-locate-window.svg)](https://www.npmjs.com/package/@aws-sdk/util-locate-window) diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js new file mode 100644 index 0000000..95a6423 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js @@ -0,0 +1,42 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + locateWindow: () => locateWindow +}); +module.exports = __toCommonJS(src_exports); +var fallbackWindow = {}; +function locateWindow() { + if (typeof window !== "undefined") { + return window; + } else if (typeof self !== "undefined") { + return self; + } + return fallbackWindow; +} +__name(locateWindow, "locateWindow"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + locateWindow +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-es/index.js new file mode 100644 index 0000000..a51e644 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-es/index.js @@ -0,0 +1,10 @@ +const fallbackWindow = {}; +export function locateWindow() { + if (typeof window !== "undefined") { + return window; + } + else if (typeof self !== "undefined") { + return self; + } + return fallbackWindow; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts new file mode 100644 index 0000000..2b02d7f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts @@ -0,0 +1,6 @@ +/** + * Locates the global scope for a browser or browser-like environment. If + * neither `window` nor `self` is defined by the environment, the same object + * will be returned on each invocation. + */ +export declare function locateWindow(): Window; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a5bbba3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export declare function locateWindow(): Window; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/package.json new file mode 100644 index 0000000..2835b09 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-locate-window/package.json @@ -0,0 +1,53 @@ +{ + "name": "@aws-sdk/util-locate-window", + "version": "3.723.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-locate-window", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-locate-window", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-locate-window" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/README.md new file mode 100644 index 0000000..f2b6c62 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/util-user-agent-browser + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-user-agent-browser/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-user-agent-browser.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js new file mode 100644 index 0000000..aaf7621 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultUserAgent = exports.createDefaultUserAgentProvider = void 0; +const tslib_1 = require("tslib"); +const bowser_1 = tslib_1.__importDefault(require("bowser")); +const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser_1.default.parse(window.navigator.userAgent) + : undefined; + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${parsedUA?.os?.name || "other"}`, parsedUA?.os?.version], + ["lang/js"], + ["md/browser", `${parsedUA?.browser?.name ?? "unknown"}_${parsedUA?.browser?.version ?? "unknown"}`], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +exports.createDefaultUserAgentProvider = createDefaultUserAgentProvider; +exports.defaultUserAgent = exports.createDefaultUserAgentProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js new file mode 100644 index 0000000..4d06e36 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultUserAgent = exports.createDefaultUserAgentProvider = void 0; +const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + ["os/other"], + ["lang/js"], + ["md/rn"], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +exports.createDefaultUserAgentProvider = createDefaultUserAgentProvider; +exports.defaultUserAgent = exports.createDefaultUserAgentProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js new file mode 100644 index 0000000..1584d7e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js @@ -0,0 +1,22 @@ +import bowser from "bowser"; +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser.parse(window.navigator.userAgent) + : undefined; + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${parsedUA?.os?.name || "other"}`, parsedUA?.os?.version], + ["lang/js"], + ["md/browser", `${parsedUA?.browser?.name ?? "unknown"}_${parsedUA?.browser?.version ?? "unknown"}`], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js new file mode 100644 index 0000000..04c7ae5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js @@ -0,0 +1,18 @@ +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + ["os/other"], + ["lang/js"], + ["md/rn"], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts new file mode 100644 index 0000000..00537a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts new file mode 100644 index 0000000..fb107d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Default provider to the user agent in browsers. It's a best effort to infer + * the device information. It uses bowser library to detect the browser and version + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * @internal + * @deprecated use createDefaultUserAgentProvider + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts new file mode 100644 index 0000000..5b4926b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Default provider to the user agent in ReactNative. It's a best effort to infer + * the device information. It uses bowser library to detect the browser and virsion + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * @internal + * @deprecated use createDefaultUserAgentProvider + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..1428231 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,4 @@ +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..32e643a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts new file mode 100644 index 0000000..32e643a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/package.json new file mode 100644 index 0000000..4065f6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-browser/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/util-user-agent-browser", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-user-agent-browser", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "react-native": "dist-es/index.native.js", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-user-agent-browser", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-user-agent-browser" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/LICENSE b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/README.md b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/README.md new file mode 100644 index 0000000..fccfbb5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/util-user-agent-node + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-user-agent-node/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-node) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-user-agent-node.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js new file mode 100644 index 0000000..083dccb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js @@ -0,0 +1,102 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_APP_ID_CONFIG_OPTIONS: () => NODE_APP_ID_CONFIG_OPTIONS, + UA_APP_ID_ENV_NAME: () => UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME: () => UA_APP_ID_INI_NAME, + createDefaultUserAgentProvider: () => createDefaultUserAgentProvider, + crtAvailability: () => crtAvailability, + defaultUserAgent: () => defaultUserAgent +}); +module.exports = __toCommonJS(index_exports); + +// src/defaultUserAgent.ts +var import_os = require("os"); +var import_process = require("process"); + +// src/crt-availability.ts +var crtAvailability = { + isCrtAvailable: false +}; + +// src/is-crt-available.ts +var isCrtAvailable = /* @__PURE__ */ __name(() => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}, "isCrtAvailable"); + +// src/defaultUserAgent.ts +var createDefaultUserAgentProvider = /* @__PURE__ */ __name(({ serviceId, clientVersion }) => { + return async (config) => { + const sections = [ + // sdk-metadata + ["aws-sdk-js", clientVersion], + // ua-metadata + ["ua", "2.1"], + // os-metadata + [`os/${(0, import_os.platform)()}`, (0, import_os.release)()], + // language-metadata + // ECMAScript edition doesn't matter in JS, so no version needed. + ["lang/js"], + ["md/nodejs", `${import_process.versions.node}`] + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (import_process.env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${import_process.env.AWS_EXECUTION_ENV}`]); + } + const appId = await config?.userAgentAppId?.(); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}, "createDefaultUserAgentProvider"); +var defaultUserAgent = createDefaultUserAgentProvider; + +// src/nodeAppIdConfigOptions.ts +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +var UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +var UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +var NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env2) => env2[UA_APP_ID_ENV_NAME], "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], "configFileSelector"), + default: import_middleware_user_agent.DEFAULT_UA_APP_ID +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + crtAvailability, + createDefaultUserAgentProvider, + defaultUserAgent, + UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME, + NODE_APP_ID_CONFIG_OPTIONS +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js new file mode 100644 index 0000000..99ebeb9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js @@ -0,0 +1,3 @@ +export const crtAvailability = { + isCrtAvailable: false, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js new file mode 100644 index 0000000..d92681d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js @@ -0,0 +1,29 @@ +import { platform, release } from "os"; +import { env, versions } from "process"; +import { isCrtAvailable } from "./is-crt-available"; +export { crtAvailability } from "./crt-availability"; +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => { + return async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${platform()}`, release()], + ["lang/js"], + ["md/nodejs", `${versions.node}`], + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${env.AWS_EXECUTION_ENV}`]); + } + const appId = await config?.userAgentAppId?.(); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js new file mode 100644 index 0000000..e9f8b0d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js @@ -0,0 +1,7 @@ +import { crtAvailability } from "./crt-availability"; +export const isCrtAvailable = () => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js new file mode 100644 index 0000000..f270db9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js @@ -0,0 +1,9 @@ +import { DEFAULT_UA_APP_ID } from "@aws-sdk/middleware-user-agent"; +export const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +export const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +const UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +export const NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[UA_APP_ID_ENV_NAME], + configFileSelector: (profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], + default: DEFAULT_UA_APP_ID, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts new file mode 100644 index 0000000..c2033a0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + * + * If \@aws-sdk/signature-v4-crt is installed and loaded, it will register + * this value to true. + */ +export declare const crtAvailability: { + isCrtAvailable: boolean; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts new file mode 100644 index 0000000..28537a6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts @@ -0,0 +1,23 @@ +import { Provider, UserAgent } from "@smithy/types"; +export { crtAvailability } from "./crt-availability"; +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Collect metrics from runtime to put into user agent. + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * + * @internal + * + * @deprecated use createDefaultUserAgentProvider + * + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts new file mode 100644 index 0000000..675ffa8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts @@ -0,0 +1,5 @@ +import { UserAgentPair } from "@smithy/types"; +/** + * @internal + */ +export declare const isCrtAvailable: () => UserAgentPair | null; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts new file mode 100644 index 0000000..92a8edc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +/** + * @internal + */ +export declare const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +/** + * @internal + */ +export declare const NODE_APP_ID_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts new file mode 100644 index 0000000..9dccfb0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts @@ -0,0 +1,3 @@ +export declare const crtAvailability: { + isCrtAvailable: boolean; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts new file mode 100644 index 0000000..6e4884f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts @@ -0,0 +1,21 @@ +import { Provider, UserAgent } from "@smithy/types"; +export { crtAvailability } from "./crt-availability"; +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts new file mode 100644 index 0000000..d28355c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts @@ -0,0 +1,2 @@ +import { UserAgentPair } from "@smithy/types"; +export declare const isCrtAvailable: () => UserAgentPair | null; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts new file mode 100644 index 0000000..b9fa123 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +export declare const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +export declare const NODE_APP_ID_CONFIG_OPTIONS: LoadedConfigSelectors< + string | undefined +>; diff --git a/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/package.json b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/package.json new file mode 100644 index 0000000..14742a5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@aws-sdk/util-user-agent-node/package.json @@ -0,0 +1,65 @@ +{ + "name": "@aws-sdk/util-user-agent-node", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-user-agent-node", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-user-agent-node", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-user-agent-node" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/README.md new file mode 100644 index 0000000..175bc37 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/README.md @@ -0,0 +1,4 @@ +# @smithy/abort-controller + +[![NPM version](https://img.shields.io/npm/v/@smithy/abort-controller/latest.svg)](https://www.npmjs.com/package/@smithy/abort-controller) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/abort-controller.svg)](https://www.npmjs.com/package/@smithy/abort-controller) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/index.js new file mode 100644 index 0000000..e2f7caa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-cjs/index.js @@ -0,0 +1,84 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AbortController: () => AbortController, + AbortHandler: () => import_types.AbortHandler, + AbortSignal: () => AbortSignal, + IAbortController: () => import_types.AbortController, + IAbortSignal: () => import_types.AbortSignal +}); +module.exports = __toCommonJS(src_exports); + +// src/AbortController.ts + + +// src/AbortSignal.ts +var import_types = require("@smithy/types"); +var AbortSignal = class { + constructor() { + this.onabort = null; + this._aborted = false; + Object.defineProperty(this, "_aborted", { + value: false, + writable: true + }); + } + static { + __name(this, "AbortSignal"); + } + /** + * Whether the associated operation has already been cancelled. + */ + get aborted() { + return this._aborted; + } + /** + * @internal + */ + abort() { + this._aborted = true; + if (this.onabort) { + this.onabort(this); + this.onabort = null; + } + } +}; + +// src/AbortController.ts +var AbortController = class { + constructor() { + this.signal = new AbortSignal(); + } + static { + __name(this, "AbortController"); + } + abort() { + this.signal.abort(); + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AbortController, + AbortSignal +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/AbortController.js b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/AbortController.js new file mode 100644 index 0000000..696f137 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/AbortController.js @@ -0,0 +1,9 @@ +import { AbortSignal } from "./AbortSignal"; +export class AbortController { + constructor() { + this.signal = new AbortSignal(); + } + abort() { + this.signal.abort(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js new file mode 100644 index 0000000..9fc0813 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js @@ -0,0 +1,20 @@ +export class AbortSignal { + constructor() { + this.onabort = null; + this._aborted = false; + Object.defineProperty(this, "_aborted", { + value: false, + writable: true, + }); + } + get aborted() { + return this._aborted; + } + abort() { + this._aborted = true; + if (this.onabort) { + this.onabort(this); + this.onabort = null; + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/index.js new file mode 100644 index 0000000..a0f47f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts new file mode 100644 index 0000000..007f0f6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts @@ -0,0 +1,16 @@ +import { AbortController as DeprecatedAbortController } from "@smithy/types"; +import { AbortSignal } from "./AbortSignal"; +/** + * @public + */ +export { DeprecatedAbortController as IAbortController }; +/** + * @deprecated This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @public + */ +export declare class AbortController implements DeprecatedAbortController { + readonly signal: AbortSignal; + abort(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts new file mode 100644 index 0000000..a97c3dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts @@ -0,0 +1,21 @@ +import { AbortHandler, AbortSignal as DeprecatedAbortSignal } from "@smithy/types"; +/** + * @public + */ +export { AbortHandler, DeprecatedAbortSignal as IAbortSignal }; +/** + * @public + */ +export declare class AbortSignal implements DeprecatedAbortSignal { + onabort: AbortHandler | null; + private _aborted; + constructor(); + /** + * Whether the associated operation has already been cancelled. + */ + get aborted(): boolean; + /** + * @internal + */ + abort(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/index.d.ts new file mode 100644 index 0000000..8788e2f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/index.d.ts @@ -0,0 +1,9 @@ +/** + * This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @deprecated Use standard implementations in [Browsers](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) and [Node.js](https://nodejs.org/docs/latest/api/globals.html#class-abortcontroller) + * @packageDocumentation + */ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts new file mode 100644 index 0000000..89457d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts @@ -0,0 +1,16 @@ +import { AbortController as DeprecatedAbortController } from "@smithy/types"; +import { AbortSignal } from "./AbortSignal"; +/** + * @public + */ +export { DeprecatedAbortController as IAbortController }; +/** + * @deprecated This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @public + */ +export declare class AbortController implements DeprecatedAbortController { + readonly signal: AbortSignal; + abort(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts new file mode 100644 index 0000000..92130a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts @@ -0,0 +1,21 @@ +import { AbortHandler, AbortSignal as DeprecatedAbortSignal } from "@smithy/types"; +/** + * @public + */ +export { AbortHandler, DeprecatedAbortSignal as IAbortSignal }; +/** + * @public + */ +export declare class AbortSignal implements DeprecatedAbortSignal { + onabort: AbortHandler | null; + private _aborted; + constructor(); + /* + * Whether the associated operation has already been cancelled. + */ + readonly aborted: boolean; + /** + * @internal + */ + abort(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..5a907b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +/** + * This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @deprecated Use standard implementations in [Browsers](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) and [Node.js](https://nodejs.org/docs/latest/api/globals.html#class-abortcontroller) + * @packageDocumentation + */ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/package.json new file mode 100644 index 0000000..b7e5769 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/abort-controller/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/abort-controller", + "version": "4.0.2", + "description": "A simple abort controller library", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline abort-controller", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/abort-controller", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/abort-controller" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/README.md new file mode 100644 index 0000000..2a25da2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/README.md @@ -0,0 +1,10 @@ +# @smithy/config-resolver + +[![NPM version](https://img.shields.io/npm/v/@smithy/config-resolver/latest.svg)](https://www.npmjs.com/package/@smithy/config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/config-resolver.svg)](https://www.npmjs.com/package/@smithy/config-resolver) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/index.js new file mode 100644 index 0000000..42f7a4c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/index.js @@ -0,0 +1,228 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_USE_DUALSTACK_ENDPOINT: () => CONFIG_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT: () => CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT: () => DEFAULT_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT: () => DEFAULT_USE_FIPS_ENDPOINT, + ENV_USE_DUALSTACK_ENDPOINT: () => ENV_USE_DUALSTACK_ENDPOINT, + ENV_USE_FIPS_ENDPOINT: () => ENV_USE_FIPS_ENDPOINT, + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getRegionInfo: () => getRegionInfo, + resolveCustomEndpointsConfig: () => resolveCustomEndpointsConfig, + resolveEndpointsConfig: () => resolveEndpointsConfig, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/endpointsConfig/NodeUseDualstackEndpointConfigOptions.ts +var import_util_config_provider = require("@smithy/util-config-provider"); +var ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +var CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +var DEFAULT_USE_DUALSTACK_ENDPOINT = false; +var NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/NodeUseFipsEndpointConfigOptions.ts + +var ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +var CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +var DEFAULT_USE_FIPS_ENDPOINT = false; +var NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/resolveCustomEndpointsConfig.ts +var import_util_middleware = require("@smithy/util-middleware"); +var resolveCustomEndpointsConfig = /* @__PURE__ */ __name((input) => { + const { tls, endpoint, urlParser, useDualstackEndpoint } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(useDualstackEndpoint ?? false) + }); +}, "resolveCustomEndpointsConfig"); + +// src/endpointsConfig/resolveEndpointsConfig.ts + + +// src/endpointsConfig/utils/getEndpointFromRegion.ts +var getEndpointFromRegion = /* @__PURE__ */ __name(async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}, "getEndpointFromRegion"); + +// src/endpointsConfig/resolveEndpointsConfig.ts +var resolveEndpointsConfig = /* @__PURE__ */ __name((input) => { + const useDualstackEndpoint = (0, import_util_middleware.normalizeProvider)(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser, tls } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: endpoint ? (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint + }); +}, "resolveEndpointsConfig"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + } +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + } + }); +}, "resolveRegionConfig"); + +// src/regionInfo/getHostnameFromVariants.ts +var getHostnameFromVariants = /* @__PURE__ */ __name((variants = [], { useFipsEndpoint, useDualstackEndpoint }) => variants.find( + ({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack") +)?.hostname, "getHostnameFromVariants"); + +// src/regionInfo/getResolvedHostname.ts +var getResolvedHostname = /* @__PURE__ */ __name((resolvedRegion, { regionHostname, partitionHostname }) => regionHostname ? regionHostname : partitionHostname ? partitionHostname.replace("{region}", resolvedRegion) : void 0, "getResolvedHostname"); + +// src/regionInfo/getResolvedPartition.ts +var getResolvedPartition = /* @__PURE__ */ __name((region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws", "getResolvedPartition"); + +// src/regionInfo/getResolvedSigningRegion.ts +var getResolvedSigningRegion = /* @__PURE__ */ __name((hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}, "getResolvedSigningRegion"); + +// src/regionInfo/getRegionInfo.ts +var getRegionInfo = /* @__PURE__ */ __name((region, { + useFipsEndpoint = false, + useDualstackEndpoint = false, + signingService, + regionHash, + partitionHash +}) => { + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : partitionHash[partition]?.endpoint ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants(regionHash[resolvedRegion]?.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants(partitionHash[partition]?.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === void 0) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: regionHash[resolvedRegion]?.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint + }); + return { + partition, + signingService, + hostname, + ...signingRegion && { signingRegion }, + ...regionHash[resolvedRegion]?.signingService && { + signingService: regionHash[resolvedRegion].signingService + } + }; +}, "getRegionInfo"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + ENV_USE_FIPS_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + resolveCustomEndpointsConfig, + resolveEndpointsConfig, + REGION_ENV_NAME, + REGION_INI_NAME, + NODE_REGION_CONFIG_OPTIONS, + NODE_REGION_CONFIG_FILE_OPTIONS, + resolveRegionConfig, + getRegionInfo +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js new file mode 100644 index 0000000..d061567 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js @@ -0,0 +1,9 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +export const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +export const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +export const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, ENV_USE_DUALSTACK_ENDPOINT, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, CONFIG_USE_DUALSTACK_ENDPOINT, SelectorType.CONFIG), + default: false, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js new file mode 100644 index 0000000..8cac1e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js @@ -0,0 +1,9 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +export const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +export const DEFAULT_USE_FIPS_ENDPOINT = false; +export const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, ENV_USE_FIPS_ENDPOINT, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, CONFIG_USE_FIPS_ENDPOINT, SelectorType.CONFIG), + default: false, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js new file mode 100644 index 0000000..1424c22 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js @@ -0,0 +1,4 @@ +export * from "./NodeUseDualstackEndpointConfigOptions"; +export * from "./NodeUseFipsEndpointConfigOptions"; +export * from "./resolveCustomEndpointsConfig"; +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js new file mode 100644 index 0000000..7f9a953 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js @@ -0,0 +1,10 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +export const resolveCustomEndpointsConfig = (input) => { + const { tls, endpoint, urlParser, useDualstackEndpoint } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: normalizeProvider(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: normalizeProvider(useDualstackEndpoint ?? false), + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js new file mode 100644 index 0000000..440657d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js @@ -0,0 +1,14 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { getEndpointFromRegion } from "./utils/getEndpointFromRegion"; +export const resolveEndpointsConfig = (input) => { + const useDualstackEndpoint = normalizeProvider(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser, tls } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: endpoint + ? normalizeProvider(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) + : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js new file mode 100644 index 0000000..5627c32 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js @@ -0,0 +1,15 @@ +export const getEndpointFromRegion = async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = (await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint })) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/index.js new file mode 100644 index 0000000..61456a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./endpointsConfig"; +export * from "./regionConfig"; +export * from "./regionInfo"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js new file mode 100644 index 0000000..7db9896 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js @@ -0,0 +1,12 @@ +export const REGION_ENV_NAME = "AWS_REGION"; +export const REGION_INI_NAME = "region"; +export const NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +export const NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js new file mode 100644 index 0000000..8d1246b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js @@ -0,0 +1,6 @@ +import { isFipsRegion } from "./isFipsRegion"; +export const getRealRegion = (region) => isFipsRegion(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..d758967 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +export const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..f88e00f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js @@ -0,0 +1,24 @@ +import { getRealRegion } from "./getRealRegion"; +import { isFipsRegion } from "./isFipsRegion"; +export const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js new file mode 100644 index 0000000..84fc50e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js @@ -0,0 +1 @@ +export const getHostnameFromVariants = (variants = [], { useFipsEndpoint, useDualstackEndpoint }) => variants.find(({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack"))?.hostname; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js new file mode 100644 index 0000000..c39e2f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js @@ -0,0 +1,29 @@ +import { getHostnameFromVariants } from "./getHostnameFromVariants"; +import { getResolvedHostname } from "./getResolvedHostname"; +import { getResolvedPartition } from "./getResolvedPartition"; +import { getResolvedSigningRegion } from "./getResolvedSigningRegion"; +export const getRegionInfo = (region, { useFipsEndpoint = false, useDualstackEndpoint = false, signingService, regionHash, partitionHash, }) => { + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : partitionHash[partition]?.endpoint ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants(regionHash[resolvedRegion]?.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants(partitionHash[partition]?.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === undefined) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: regionHash[resolvedRegion]?.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint, + }); + return { + partition, + signingService, + hostname, + ...(signingRegion && { signingRegion }), + ...(regionHash[resolvedRegion]?.signingService && { + signingService: regionHash[resolvedRegion].signingService, + }), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js new file mode 100644 index 0000000..35fb988 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js @@ -0,0 +1,5 @@ +export const getResolvedHostname = (resolvedRegion, { regionHostname, partitionHostname }) => regionHostname + ? regionHostname + : partitionHostname + ? partitionHostname.replace("{region}", resolvedRegion) + : undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js new file mode 100644 index 0000000..3d7bc55 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js @@ -0,0 +1 @@ +export const getResolvedPartition = (region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js new file mode 100644 index 0000000..7977e00 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js @@ -0,0 +1,12 @@ +export const getResolvedSigningRegion = (hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } + else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js new file mode 100644 index 0000000..e29686a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js @@ -0,0 +1,3 @@ +export * from "./PartitionHash"; +export * from "./RegionHash"; +export * from "./getRegionInfo"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts new file mode 100644 index 0000000..172d8c1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts new file mode 100644 index 0000000..106bbdb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_FIPS_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts new file mode 100644 index 0000000..ea1cf59 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./NodeUseDualstackEndpointConfigOptions"; +/** + * @internal + */ +export * from "./NodeUseFipsEndpointConfigOptions"; +/** + * @internal + */ +export * from "./resolveCustomEndpointsConfig"; +/** + * @internal + */ +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts new file mode 100644 index 0000000..477afbc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts @@ -0,0 +1,32 @@ +import { Endpoint, Provider, UrlParser } from "@smithy/types"; +import { EndpointsInputConfig, EndpointsResolvedConfig } from "./resolveEndpointsConfig"; +/** + * @public + */ +export interface CustomEndpointsInputConfig extends EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. + */ + endpoint: string | Endpoint | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; +} +/** + * @internal + */ +export interface CustomEndpointsResolvedConfig extends EndpointsResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint: true; +} +/** + * @internal + */ +export declare const resolveCustomEndpointsConfig: (input: T & CustomEndpointsInputConfig & PreviouslyResolved) => T & CustomEndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts new file mode 100644 index 0000000..4cd1d8f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +/** + * @public + */ +export interface EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only required when using + * a custom endpoint (for example, when using a local version of S3). + */ + endpoint?: string | Endpoint | Provider; + /** + * Whether TLS is enabled for requests. + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + region: Provider; + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export interface EndpointsResolvedConfig extends Required { + /** + * Resolved value for input {@link EndpointsInputConfig.endpoint} + */ + endpoint: Provider; + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; +} +/** + * @internal + * + * @deprecated endpoints rulesets use \@smithy/middleware-endpoint resolveEndpointConfig. + * All generated clients should migrate to Endpoints 2.0 endpointRuleSet traits. + */ +export declare const resolveEndpointsConfig: (input: T & EndpointsInputConfig & PreviouslyResolved) => T & EndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts new file mode 100644 index 0000000..5ded732 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts @@ -0,0 +1,11 @@ +import { Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +interface GetEndpointFromRegionOptions { + region: Provider; + tls?: boolean; + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + useDualstackEndpoint: Provider; + useFipsEndpoint: Provider; +} +export declare const getEndpointFromRegion: (input: GetEndpointFromRegionOptions) => Promise; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/index.d.ts new file mode 100644 index 0000000..fde7086 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./endpointsConfig"; +/** + * @internal + */ +export * from "./regionConfig"; +/** + * @internal + */ +export * from "./regionInfo"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts new file mode 100644 index 0000000..d203bb0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..c70fb5b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts new file mode 100644 index 0000000..6dcf5e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..b42cee7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..c06c9d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,34 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts new file mode 100644 index 0000000..9b68e93 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts @@ -0,0 +1,10 @@ +import { EndpointVariantTag } from "./EndpointVariantTag"; +/** + * @internal + * + * Provides hostname information for specific host label. + */ +export type EndpointVariant = { + hostname: string; + tags: EndpointVariantTag[]; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts new file mode 100644 index 0000000..ca50e1f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The tag which mentions which area variant is providing information for. + * Can be either "fips" or "dualstack". + */ +export type EndpointVariantTag = "fips" | "dualstack"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts new file mode 100644 index 0000000..0a5be17 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts @@ -0,0 +1,14 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of partition with the information specific to that partition. + * The information includes the list of regions belonging to that partition, + * and the hostname to be used for the partition. + */ +export type PartitionHash = Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts new file mode 100644 index 0000000..01cd843 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of region with the information specific to that region. + * The information can include hostname, signingService and signingRegion. + */ +export type RegionHash = Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts new file mode 100644 index 0000000..47bcf70 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + */ +export interface GetHostnameFromVariantsOptions { + useFipsEndpoint: boolean; + useDualstackEndpoint: boolean; +} +/** + * @internal + */ +export declare const getHostnameFromVariants: (variants: EndpointVariant[] | undefined, { useFipsEndpoint, useDualstackEndpoint }: GetHostnameFromVariantsOptions) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts new file mode 100644 index 0000000..0aaae08 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts @@ -0,0 +1,17 @@ +import { RegionInfo } from "@smithy/types"; +import { PartitionHash } from "./PartitionHash"; +import { RegionHash } from "./RegionHash"; +/** + * @internal + */ +export interface GetRegionInfoOptions { + useFipsEndpoint?: boolean; + useDualstackEndpoint?: boolean; + signingService: string; + regionHash: RegionHash; + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getRegionInfo: (region: string, { useFipsEndpoint, useDualstackEndpoint, signingService, regionHash, partitionHash, }: GetRegionInfoOptions) => RegionInfo; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts new file mode 100644 index 0000000..bf7a2b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface GetResolvedHostnameOptions { + regionHostname?: string; + partitionHostname?: string; +} +/** + * @internal + */ +export declare const getResolvedHostname: (resolvedRegion: string, { regionHostname, partitionHostname }: GetResolvedHostnameOptions) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts new file mode 100644 index 0000000..587b4fc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts @@ -0,0 +1,11 @@ +import { PartitionHash } from "./PartitionHash"; +/** + * @internal + */ +export interface GetResolvedPartitionOptions { + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getResolvedPartition: (region: string, { partitionHash }: GetResolvedPartitionOptions) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts new file mode 100644 index 0000000..3f5f7af --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export interface GetResolvedSigningRegionOptions { + regionRegex: string; + signingRegion?: string; + useFipsEndpoint: boolean; +} +/** + * @internal + */ +export declare const getResolvedSigningRegion: (hostname: string, { signingRegion, regionRegex, useFipsEndpoint }: GetResolvedSigningRegionOptions) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts new file mode 100644 index 0000000..64ef0d5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./PartitionHash"; +/** + * @internal + */ +export * from "./RegionHash"; +/** + * @internal + */ +export * from "./getRegionInfo"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts new file mode 100644 index 0000000..169720a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts new file mode 100644 index 0000000..b17417e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_FIPS_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts new file mode 100644 index 0000000..cbabe5b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./NodeUseDualstackEndpointConfigOptions"; +/** + * @internal + */ +export * from "./NodeUseFipsEndpointConfigOptions"; +/** + * @internal + */ +export * from "./resolveCustomEndpointsConfig"; +/** + * @internal + */ +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts new file mode 100644 index 0000000..f49306e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts @@ -0,0 +1,32 @@ +import { Endpoint, Provider, UrlParser } from "@smithy/types"; +import { EndpointsInputConfig, EndpointsResolvedConfig } from "./resolveEndpointsConfig"; +/** + * @public + */ +export interface CustomEndpointsInputConfig extends EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. + */ + endpoint: string | Endpoint | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; +} +/** + * @internal + */ +export interface CustomEndpointsResolvedConfig extends EndpointsResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint: true; +} +/** + * @internal + */ +export declare const resolveCustomEndpointsConfig: (input: T & CustomEndpointsInputConfig & PreviouslyResolved) => T & CustomEndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts new file mode 100644 index 0000000..388819d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +/** + * @public + */ +export interface EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only required when using + * a custom endpoint (for example, when using a local version of S3). + */ + endpoint?: string | Endpoint | Provider; + /** + * Whether TLS is enabled for requests. + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + region: Provider; + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export interface EndpointsResolvedConfig extends Required { + /** + * Resolved value for input {@link EndpointsInputConfig.endpoint} + */ + endpoint: Provider; + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; +} +/** + * @internal + * + * @deprecated endpoints rulesets use \@smithy/middleware-endpoint resolveEndpointConfig. + * All generated clients should migrate to Endpoints 2.0 endpointRuleSet traits. + */ +export declare const resolveEndpointsConfig: (input: T & EndpointsInputConfig & PreviouslyResolved) => T & EndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts new file mode 100644 index 0000000..83d4635 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts @@ -0,0 +1,11 @@ +import { Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +interface GetEndpointFromRegionOptions { + region: Provider; + tls?: boolean; + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + useDualstackEndpoint: Provider; + useFipsEndpoint: Provider; +} +export declare const getEndpointFromRegion: (input: GetEndpointFromRegionOptions) => Promise; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e205411 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./endpointsConfig"; +/** + * @internal + */ +export * from "./regionConfig"; +/** + * @internal + */ +export * from "./regionInfo"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts new file mode 100644 index 0000000..8f3a9b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..6c11d4d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts new file mode 100644 index 0000000..0e6f55d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..1ee8bd4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..7aaf9e1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,34 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts new file mode 100644 index 0000000..e533cc7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts @@ -0,0 +1,10 @@ +import { EndpointVariantTag } from "./EndpointVariantTag"; +/** + * @internal + * + * Provides hostname information for specific host label. + */ +export type EndpointVariant = { + hostname: string; + tags: EndpointVariantTag[]; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts new file mode 100644 index 0000000..755bbe5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The tag which mentions which area variant is providing information for. + * Can be either "fips" or "dualstack". + */ +export type EndpointVariantTag = "fips" | "dualstack"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts new file mode 100644 index 0000000..6fed65e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts @@ -0,0 +1,14 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of partition with the information specific to that partition. + * The information includes the list of regions belonging to that partition, + * and the hostname to be used for the partition. + */ +export type PartitionHash = Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts new file mode 100644 index 0000000..cd90c70 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of region with the information specific to that region. + * The information can include hostname, signingService and signingRegion. + */ +export type RegionHash = Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts new file mode 100644 index 0000000..3d61daa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + */ +export interface GetHostnameFromVariantsOptions { + useFipsEndpoint: boolean; + useDualstackEndpoint: boolean; +} +/** + * @internal + */ +export declare const getHostnameFromVariants: (variants: EndpointVariant[] | undefined, { useFipsEndpoint, useDualstackEndpoint }: GetHostnameFromVariantsOptions) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts new file mode 100644 index 0000000..820a548 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts @@ -0,0 +1,17 @@ +import { RegionInfo } from "@smithy/types"; +import { PartitionHash } from "./PartitionHash"; +import { RegionHash } from "./RegionHash"; +/** + * @internal + */ +export interface GetRegionInfoOptions { + useFipsEndpoint?: boolean; + useDualstackEndpoint?: boolean; + signingService: string; + regionHash: RegionHash; + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getRegionInfo: (region: string, { useFipsEndpoint, useDualstackEndpoint, signingService, regionHash, partitionHash, }: GetRegionInfoOptions) => RegionInfo; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts new file mode 100644 index 0000000..6aae405 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface GetResolvedHostnameOptions { + regionHostname?: string; + partitionHostname?: string; +} +/** + * @internal + */ +export declare const getResolvedHostname: (resolvedRegion: string, { regionHostname, partitionHostname }: GetResolvedHostnameOptions) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts new file mode 100644 index 0000000..355c318 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts @@ -0,0 +1,11 @@ +import { PartitionHash } from "./PartitionHash"; +/** + * @internal + */ +export interface GetResolvedPartitionOptions { + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getResolvedPartition: (region: string, { partitionHash }: GetResolvedPartitionOptions) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts new file mode 100644 index 0000000..a7b1db6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export interface GetResolvedSigningRegionOptions { + regionRegex: string; + signingRegion?: string; + useFipsEndpoint: boolean; +} +/** + * @internal + */ +export declare const getResolvedSigningRegion: (hostname: string, { signingRegion, regionRegex, useFipsEndpoint }: GetResolvedSigningRegionOptions) => string | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts new file mode 100644 index 0000000..5826308 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./PartitionHash"; +/** + * @internal + */ +export * from "./RegionHash"; +/** + * @internal + */ +export * from "./getRegionInfo"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/package.json new file mode 100644 index 0000000..2c4927f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/config-resolver/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/config-resolver", + "version": "4.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline config-resolver", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/config-resolver" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/core/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/core/README.md new file mode 100644 index 0000000..51f8922 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/README.md @@ -0,0 +1,45 @@ +# @smithy/core + +[![NPM version](https://img.shields.io/npm/v/@smithy/core/latest.svg)](https://www.npmjs.com/package/@smithy/core) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/core.svg)](https://www.npmjs.com/package/@smithy/core) + +> An internal package. You probably shouldn't use this package, at least directly. + +This package provides common or core functionality for generic Smithy clients. + +You do not need to explicitly install this package, since it will be installed during code generation if used. + +## Development of `@smithy/core` submodules + +Core submodules are organized for distribution via the `package.json` `exports` field. + +`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be +enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support), but we also provide a compatibility redirect. + +Think of `@smithy/core` as a mono-package within the monorepo. +It preserves the benefits of modularization, for example to optimize Node.js initialization speed, +while making it easier to have a consistent version of core dependencies, reducing package sprawl when +installing a Smithy runtime client. + +### Guide for submodules + +- Each `index.ts` file corresponding to the pattern `./src/submodules//index.ts` will be + published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script. +- create a folder as `./src/submodules/` including an `index.ts` file and a `README.md` file. + - The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible. +- a submodule is equivalent to a standalone `@smithy/` package in that importing it in Node.js will resolve a separate bundle. +- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import. + - The linter will check for this and throw an error. +- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@smithy/core`. + The linter runs during `yarn build` and also as `yarn lint`. + +### When should I create an `@smithy/core/submodule` vs. `@smithy/new-package`? + +Keep in mind that the core package is installed by all downstream clients. + +If the component functionality is upstream of multiple clients, it is +a good candidate for a core submodule. For example, if `middleware-retry` had been written +after the support for submodules was added, it would have been a submodule. + +If the component's functionality is downstream of a client (rare), or only expected to be used by a very small +subset of clients, it could be written as a standalone package. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/cbor.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/cbor.d.ts new file mode 100644 index 0000000..c44b707 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/cbor.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/cbor" { + export * from "@smithy/core/dist-types/submodules/cbor/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/cbor.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/cbor.js new file mode 100644 index 0000000..710fb79 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/cbor.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/cbor/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/getSmithyContext.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/getSmithyContext.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/getSmithyContext.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/index.js new file mode 100644 index 0000000..a3735f6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/index.js @@ -0,0 +1,454 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DefaultIdentityProviderConfig: () => DefaultIdentityProviderConfig, + EXPIRATION_MS: () => EXPIRATION_MS, + HttpApiKeyAuthSigner: () => HttpApiKeyAuthSigner, + HttpBearerAuthSigner: () => HttpBearerAuthSigner, + NoAuthSigner: () => NoAuthSigner, + createIsIdentityExpiredFunction: () => createIsIdentityExpiredFunction, + createPaginator: () => createPaginator, + doesIdentityRequireRefresh: () => doesIdentityRequireRefresh, + getHttpAuthSchemeEndpointRuleSetPlugin: () => getHttpAuthSchemeEndpointRuleSetPlugin, + getHttpAuthSchemePlugin: () => getHttpAuthSchemePlugin, + getHttpSigningPlugin: () => getHttpSigningPlugin, + getSmithyContext: () => getSmithyContext, + httpAuthSchemeEndpointRuleSetMiddlewareOptions: () => httpAuthSchemeEndpointRuleSetMiddlewareOptions, + httpAuthSchemeMiddleware: () => httpAuthSchemeMiddleware, + httpAuthSchemeMiddlewareOptions: () => httpAuthSchemeMiddlewareOptions, + httpSigningMiddleware: () => httpSigningMiddleware, + httpSigningMiddlewareOptions: () => httpSigningMiddlewareOptions, + isIdentityExpired: () => isIdentityExpired, + memoizeIdentityProvider: () => memoizeIdentityProvider, + normalizeProvider: () => normalizeProvider, + requestBuilder: () => import_protocols.requestBuilder, + setFeature: () => setFeature +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = require("@smithy/types"); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +var import_util_middleware = require("@smithy/util-middleware"); + +// src/middleware-http-auth-scheme/resolveAuthOptions.ts +var resolveAuthOptions = /* @__PURE__ */ __name((candidateAuthOptions, authSchemePreference) => { + if (!authSchemePreference || authSchemePreference.length === 0) { + return candidateAuthOptions; + } + const preferredAuthOptions = []; + for (const preferredSchemeName of authSchemePreference) { + for (const candidateAuthOption of candidateAuthOptions) { + const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1]; + if (candidateAuthSchemeName === preferredSchemeName) { + preferredAuthOptions.push(candidateAuthOption); + } + } + } + for (const candidateAuthOption of candidateAuthOptions) { + if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) { + preferredAuthOptions.push(candidateAuthOption); + } + } + return preferredAuthOptions; +}, "resolveAuthOptions"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = /* @__PURE__ */ new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +__name(convertHttpAuthSchemesToMap, "convertHttpAuthSchemesToMap"); +var httpAuthSchemeMiddleware = /* @__PURE__ */ __name((config, mwOptions) => (next, context) => async (args) => { + const options = config.httpAuthSchemeProvider( + await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input) + ); + const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : []; + const resolvedOptions = resolveAuthOptions(options, authSchemePreference); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const failureReasons = []; + for (const option of resolvedOptions) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}, "httpAuthSchemeMiddleware"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.ts +var httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware" +}; +var getHttpAuthSchemeEndpointRuleSetPlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeEndpointRuleSetMiddlewareOptions + ); + } +}), "getHttpAuthSchemeEndpointRuleSetPlugin"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemePlugin.ts +var import_middleware_serde = require("@smithy/middleware-serde"); +var httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getHttpAuthSchemePlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeMiddlewareOptions + ); + } +}), "getHttpAuthSchemePlugin"); + +// src/middleware-http-signing/httpSigningMiddleware.ts +var import_protocol_http = require("@smithy/protocol-http"); + +var defaultErrorHandler = /* @__PURE__ */ __name((signingProperties) => (error) => { + throw error; +}, "defaultErrorHandler"); +var defaultSuccessHandler = /* @__PURE__ */ __name((httpResponse, signingProperties) => { +}, "defaultSuccessHandler"); +var httpSigningMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { + httpAuthOption: { signingProperties = {} }, + identity, + signer + } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties) + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}, "httpSigningMiddleware"); + +// src/middleware-http-signing/getHttpSigningMiddleware.ts +var httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware" +}; +var getHttpSigningPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + } +}), "getHttpSigningPlugin"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); + +// src/pagination/createPaginator.ts +var makePagedClientRequest = /* @__PURE__ */ __name(async (CommandCtor, client, input, withCommand = (_) => _, ...args) => { + let command = new CommandCtor(input); + command = withCommand(command) ?? command; + return await client.send(command, ...args); +}, "makePagedClientRequest"); +function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return /* @__PURE__ */ __name(async function* paginateOperation(config, input, ...additionalArguments) { + const _input = input; + let token = config.startingToken ?? _input[inputTokenName]; + let hasNext = true; + let page; + while (hasNext) { + _input[inputTokenName] = token; + if (pageSizeTokenName) { + _input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest( + CommandCtor, + config.client, + input, + config.withCommand, + ...additionalArguments + ); + } else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return void 0; + }, "paginateOperation"); +} +__name(createPaginator, "createPaginator"); +var get = /* @__PURE__ */ __name((fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return void 0; + } + cursor = cursor[step]; + } + return cursor; +}, "get"); + +// src/protocols/requestBuilder.ts +var import_protocols = require("@smithy/core/protocols"); + +// src/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {} + }; + } else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} +__name(setFeature, "setFeature"); + +// src/util-identity-and-auth/DefaultIdentityProviderConfig.ts +var DefaultIdentityProviderConfig = class { + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config) { + this.authSchemes = /* @__PURE__ */ new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== void 0) { + this.authSchemes.set(key, value); + } + } + } + static { + __name(this, "DefaultIdentityProviderConfig"); + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.ts + + +var HttpApiKeyAuthSigner = class { + static { + __name(this, "HttpApiKeyAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error( + "request could not be signed with `apiKey` since the `name` and `in` signer properties are missing" + ); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (signingProperties.in === import_types.HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } else if (signingProperties.in === import_types.HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme ? `${signingProperties.scheme} ${identity.apiKey}` : identity.apiKey; + } else { + throw new Error( + "request can only be signed with `apiKey` locations `query` or `header`, but found: `" + signingProperties.in + "`" + ); + } + return clonedRequest; + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.ts + +var HttpBearerAuthSigner = class { + static { + __name(this, "HttpBearerAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/noAuth.ts +var NoAuthSigner = class { + static { + __name(this, "NoAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +}; + +// src/util-identity-and-auth/memoizeIdentityProvider.ts +var createIsIdentityExpiredFunction = /* @__PURE__ */ __name((expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs, "createIsIdentityExpiredFunction"); +var EXPIRATION_MS = 3e5; +var isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +var doesIdentityRequireRefresh = /* @__PURE__ */ __name((identity) => identity.expiration !== void 0, "doesIdentityRequireRefresh"); +var memoizeIdentityProvider = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + if (provider === void 0) { + return void 0; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}, "memoizeIdentityProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + createPaginator, + getSmithyContext, + httpAuthSchemeMiddleware, + httpAuthSchemeEndpointRuleSetMiddlewareOptions, + getHttpAuthSchemeEndpointRuleSetPlugin, + httpAuthSchemeMiddlewareOptions, + getHttpAuthSchemePlugin, + httpSigningMiddleware, + httpSigningMiddlewareOptions, + getHttpSigningPlugin, + normalizeProvider, + requestBuilder, + setFeature, + DefaultIdentityProviderConfig, + HttpApiKeyAuthSigner, + HttpBearerAuthSigner, + NoAuthSigner, + createIsIdentityExpiredFunction, + EXPIRATION_MS, + isIdentityExpired, + doesIdentityRequireRefresh, + memoizeIdentityProvider +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/normalizeProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/normalizeProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/normalizeProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/setFeature.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/setFeature.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/setFeature.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js new file mode 100644 index 0000000..0f69723 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js @@ -0,0 +1,733 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/cbor/index.ts +var cbor_exports = {}; +__export(cbor_exports, { + buildHttpRpcRequest: () => buildHttpRpcRequest, + cbor: () => cbor, + checkCborResponse: () => checkCborResponse, + dateToTag: () => dateToTag, + loadSmithyRpcV2CborErrorCode: () => loadSmithyRpcV2CborErrorCode, + parseCborBody: () => parseCborBody, + parseCborErrorBody: () => parseCborErrorBody, + tag: () => tag, + tagSymbol: () => tagSymbol +}); +module.exports = __toCommonJS(cbor_exports); + +// src/submodules/cbor/cbor-decode.ts +var import_util_utf8 = require("@smithy/util-utf8"); + +// src/submodules/cbor/cbor-types.ts +var majorUint64 = 0; +var majorNegativeInt64 = 1; +var majorUnstructuredByteString = 2; +var majorUtf8String = 3; +var majorList = 4; +var majorMap = 5; +var majorTag = 6; +var majorSpecial = 7; +var specialFalse = 20; +var specialTrue = 21; +var specialNull = 22; +var specialUndefined = 23; +var extendedOneByte = 24; +var extendedFloat16 = 25; +var extendedFloat32 = 26; +var extendedFloat64 = 27; +var minorIndefinite = 31; +function alloc(size) { + return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size); +} +var tagSymbol = Symbol("@smithy/core/cbor::tagSymbol"); +function tag(data2) { + data2[tagSymbol] = true; + return data2; +} + +// src/submodules/cbor/cbor-decode.ts +var USE_TEXT_DECODER = typeof TextDecoder !== "undefined"; +var USE_BUFFER = typeof Buffer !== "undefined"; +var payload = alloc(0); +var dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +var textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null; +var _offset = 0; +function setPayload(bytes) { + payload = bytes; + dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +} +function decode(at, to) { + if (at >= to) { + throw new Error("unexpected end of (decode) payload."); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + switch (major) { + case majorUint64: + case majorNegativeInt64: + case majorTag: + let unsignedInt; + let offset; + if (minor < 24) { + unsignedInt = minor; + offset = 1; + } else { + switch (minor) { + case extendedOneByte: + case extendedFloat16: + case extendedFloat32: + case extendedFloat64: + const countLength = minorValueToArgumentLength[minor]; + const countOffset = countLength + 1; + offset = countOffset; + if (to - at < countOffset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + unsignedInt = payload[countIndex]; + } else if (countLength === 2) { + unsignedInt = dataView.getUint16(countIndex); + } else if (countLength === 4) { + unsignedInt = dataView.getUint32(countIndex); + } else { + unsignedInt = dataView.getBigUint64(countIndex); + } + break; + default: + throw new Error(`unexpected minor value ${minor}.`); + } + } + if (major === majorUint64) { + _offset = offset; + return castBigInt(unsignedInt); + } else if (major === majorNegativeInt64) { + let negativeInt; + if (typeof unsignedInt === "bigint") { + negativeInt = BigInt(-1) - unsignedInt; + } else { + negativeInt = -1 - unsignedInt; + } + _offset = offset; + return castBigInt(negativeInt); + } else { + const value = decode(at + offset, to); + const valueOffset = _offset; + _offset = offset + valueOffset; + return tag({ tag: castBigInt(unsignedInt), value }); + } + case majorUtf8String: + case majorMap: + case majorList: + case majorUnstructuredByteString: + if (minor === minorIndefinite) { + switch (major) { + case majorUtf8String: + return decodeUtf8StringIndefinite(at, to); + case majorMap: + return decodeMapIndefinite(at, to); + case majorList: + return decodeListIndefinite(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteStringIndefinite(at, to); + } + } else { + switch (major) { + case majorUtf8String: + return decodeUtf8String(at, to); + case majorMap: + return decodeMap(at, to); + case majorList: + return decodeList(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteString(at, to); + } + } + default: + return decodeSpecial(at, to); + } +} +function bytesToUtf8(bytes, at, to) { + if (USE_BUFFER && bytes.constructor?.name === "Buffer") { + return bytes.toString("utf-8", at, to); + } + if (textDecoder) { + return textDecoder.decode(bytes.subarray(at, to)); + } + return (0, import_util_utf8.toUtf8)(bytes.subarray(at, to)); +} +function demote(bigInteger) { + const num = Number(bigInteger); + if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) { + console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`)); + } + return num; +} +var minorValueToArgumentLength = { + [extendedOneByte]: 1, + [extendedFloat16]: 2, + [extendedFloat32]: 4, + [extendedFloat64]: 8 +}; +function bytesToFloat16(a, b) { + const sign = a >> 7; + const exponent = (a & 124) >> 2; + const fraction = (a & 3) << 8 | b; + const scalar = sign === 0 ? 1 : -1; + let exponentComponent; + let summation; + if (exponent === 0) { + if (fraction === 0) { + return 0; + } else { + exponentComponent = Math.pow(2, 1 - 15); + summation = 0; + } + } else if (exponent === 31) { + if (fraction === 0) { + return scalar * Infinity; + } else { + return NaN; + } + } else { + exponentComponent = Math.pow(2, exponent - 15); + summation = 1; + } + summation += fraction / 1024; + return scalar * (exponentComponent * summation); +} +function decodeCount(at, to) { + const minor = payload[at] & 31; + if (minor < 24) { + _offset = 1; + return minor; + } + if (minor === extendedOneByte || minor === extendedFloat16 || minor === extendedFloat32 || minor === extendedFloat64) { + const countLength = minorValueToArgumentLength[minor]; + _offset = countLength + 1; + if (to - at < _offset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + return payload[countIndex]; + } else if (countLength === 2) { + return dataView.getUint16(countIndex); + } else if (countLength === 4) { + return dataView.getUint32(countIndex); + } + return demote(dataView.getBigUint64(countIndex)); + } + throw new Error(`unexpected minor value ${minor}.`); +} +function decodeUtf8String(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`string len ${length} greater than remaining buf len.`); + } + const value = bytesToUtf8(payload, at, at + length); + _offset = offset + length; + return value; +} +function decodeUtf8StringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + const data2 = alloc(vector.length); + data2.set(vector, 0); + _offset = at - base + 2; + return bytesToUtf8(data2, 0, data2.length); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeUnstructuredByteString(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`); + } + const value = payload.subarray(at, at + length); + _offset = offset + length; + return value; +} +function decodeUnstructuredByteStringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + const data2 = alloc(vector.length); + data2.set(vector, 0); + _offset = at - base + 2; + return data2; + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUnstructuredByteString) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeList(at, to) { + const listDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const list = Array(listDataLength); + for (let i = 0; i < listDataLength; ++i) { + const item = decode(at, to); + const itemOffset = _offset; + list[i] = item; + at += itemOffset; + } + _offset = offset + (at - base); + return list; +} +function decodeListIndefinite(at, to) { + at += 1; + const list = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + _offset = at - base + 2; + return list; + } + const item = decode(at, to); + const n = _offset; + at += n; + list.push(item); + } + throw new Error("expected break marker."); +} +function decodeMap(at, to) { + const mapDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const map = {}; + for (let i = 0; i < mapDataLength; ++i) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key at index ${at}.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + _offset = offset + (at - base); + return map; +} +function decodeMapIndefinite(at, to) { + at += 1; + const base = at; + const map = {}; + for (; at < to; ) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + if (payload[at] === 255) { + _offset = at - base + 2; + return map; + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + throw new Error("expected break marker."); +} +function decodeSpecial(at, to) { + const minor = payload[at] & 31; + switch (minor) { + case specialTrue: + case specialFalse: + _offset = 1; + return minor === specialTrue; + case specialNull: + _offset = 1; + return null; + case specialUndefined: + _offset = 1; + return null; + case extendedFloat16: + if (to - at < 3) { + throw new Error("incomplete float16 at end of buf."); + } + _offset = 3; + return bytesToFloat16(payload[at + 1], payload[at + 2]); + case extendedFloat32: + if (to - at < 5) { + throw new Error("incomplete float32 at end of buf."); + } + _offset = 5; + return dataView.getFloat32(at + 1); + case extendedFloat64: + if (to - at < 9) { + throw new Error("incomplete float64 at end of buf."); + } + _offset = 9; + return dataView.getFloat64(at + 1); + default: + throw new Error(`unexpected minor value ${minor}.`); + } +} +function castBigInt(bigInt) { + if (typeof bigInt === "number") { + return bigInt; + } + const num = Number(bigInt); + if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) { + return num; + } + return bigInt; +} + +// src/submodules/cbor/cbor-encode.ts +var import_util_utf82 = require("@smithy/util-utf8"); +var USE_BUFFER2 = typeof Buffer !== "undefined"; +var initialSize = 2048; +var data = alloc(initialSize); +var dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength); +var cursor = 0; +function ensureSpace(bytes) { + const remaining = data.byteLength - cursor; + if (remaining < bytes) { + if (cursor < 16e6) { + resize(Math.max(data.byteLength * 4, data.byteLength + bytes)); + } else { + resize(data.byteLength + bytes + 16e6); + } + } +} +function toUint8Array() { + const out = alloc(cursor); + out.set(data.subarray(0, cursor), 0); + cursor = 0; + return out; +} +function resize(size) { + const old = data; + data = alloc(size); + if (old) { + if (old.copy) { + old.copy(data, 0, 0, old.byteLength); + } else { + data.set(old, 0); + } + } + dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength); +} +function encodeHeader(major, value) { + if (value < 24) { + data[cursor++] = major << 5 | value; + } else if (value < 1 << 8) { + data[cursor++] = major << 5 | 24; + data[cursor++] = value; + } else if (value < 1 << 16) { + data[cursor++] = major << 5 | extendedFloat16; + dataView2.setUint16(cursor, value); + cursor += 2; + } else if (value < 2 ** 32) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, value); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value)); + cursor += 8; + } +} +function encode(_input) { + const encodeStack = [_input]; + while (encodeStack.length) { + const input = encodeStack.pop(); + ensureSpace(typeof input === "string" ? input.length * 4 : 64); + if (typeof input === "string") { + if (USE_BUFFER2) { + encodeHeader(majorUtf8String, Buffer.byteLength(input)); + cursor += data.write(input, cursor); + } else { + const bytes = (0, import_util_utf82.fromUtf8)(input); + encodeHeader(majorUtf8String, bytes.byteLength); + data.set(bytes, cursor); + cursor += bytes.byteLength; + } + continue; + } else if (typeof input === "number") { + if (Number.isInteger(input)) { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - 1; + if (value < 24) { + data[cursor++] = major << 5 | value; + } else if (value < 256) { + data[cursor++] = major << 5 | 24; + data[cursor++] = value; + } else if (value < 65536) { + data[cursor++] = major << 5 | extendedFloat16; + data[cursor++] = value >> 8; + data[cursor++] = value; + } else if (value < 4294967296) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, value); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, BigInt(value)); + cursor += 8; + } + continue; + } + data[cursor++] = majorSpecial << 5 | extendedFloat64; + dataView2.setFloat64(cursor, input); + cursor += 8; + continue; + } else if (typeof input === "bigint") { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - BigInt(1); + const n = Number(value); + if (n < 24) { + data[cursor++] = major << 5 | n; + } else if (n < 256) { + data[cursor++] = major << 5 | 24; + data[cursor++] = n; + } else if (n < 65536) { + data[cursor++] = major << 5 | extendedFloat16; + data[cursor++] = n >> 8; + data[cursor++] = n & 255; + } else if (n < 4294967296) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, n); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, value); + cursor += 8; + } + continue; + } else if (input === null) { + data[cursor++] = majorSpecial << 5 | specialNull; + continue; + } else if (typeof input === "boolean") { + data[cursor++] = majorSpecial << 5 | (input ? specialTrue : specialFalse); + continue; + } else if (typeof input === "undefined") { + throw new Error("@smithy/core/cbor: client may not serialize undefined value."); + } else if (Array.isArray(input)) { + for (let i = input.length - 1; i >= 0; --i) { + encodeStack.push(input[i]); + } + encodeHeader(majorList, input.length); + continue; + } else if (typeof input.byteLength === "number") { + ensureSpace(input.length * 2); + encodeHeader(majorUnstructuredByteString, input.length); + data.set(input, cursor); + cursor += input.byteLength; + continue; + } else if (typeof input === "object") { + if (input[tagSymbol]) { + if ("tag" in input && "value" in input) { + encodeStack.push(input.value); + encodeHeader(majorTag, input.tag); + continue; + } else { + throw new Error( + "tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input) + ); + } + } + const keys = Object.keys(input); + for (let i = keys.length - 1; i >= 0; --i) { + const key = keys[i]; + encodeStack.push(input[key]); + encodeStack.push(key); + } + encodeHeader(majorMap, keys.length); + continue; + } + throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`); + } +} + +// src/submodules/cbor/cbor.ts +var cbor = { + deserialize(payload2) { + setPayload(payload2); + return decode(0, payload2.length); + }, + serialize(input) { + try { + encode(input); + return toUint8Array(); + } catch (e) { + toUint8Array(); + throw e; + } + }, + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size) { + resize(size); + } +}; + +// src/submodules/cbor/parseCborBody.ts +var import_protocols = require("@smithy/core/protocols"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_util_body_length_browser = require("@smithy/util-body-length-browser"); +var parseCborBody = (streamBody, context) => { + return (0, import_protocols.collectBody)(streamBody, context).then(async (bytes) => { + if (bytes.length) { + try { + return cbor.deserialize(bytes); + } catch (e) { + Object.defineProperty(e, "$responseBodyText", { + value: context.utf8Encoder(bytes) + }); + throw e; + } + } + return {}; + }); +}; +var dateToTag = (date) => { + return tag({ + tag: 1, + value: date.getTime() / 1e3 + }); +}; +var parseCborErrorBody = async (errorBody, context) => { + const value = await parseCborBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +var loadSmithyRpcV2CborErrorCode = (output, data2) => { + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + if (data2["__type"] !== void 0) { + return sanitizeErrorCode(data2["__type"]); + } + if (data2.code !== void 0) { + return sanitizeErrorCode(data2.code); + } +}; +var checkCborResponse = (response) => { + if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") { + throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode); + } +}; +var buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers: { + // intentional copy. + ...headers + } + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + try { + contents.headers["content-length"] = String((0, import_util_body_length_browser.calculateBodyLength)(body)); + } catch (e) { + } + } + return new import_protocol_http.HttpRequest(contents); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + buildHttpRpcRequest, + cbor, + checkCborResponse, + dateToTag, + loadSmithyRpcV2CborErrorCode, + parseCborBody, + parseCborErrorBody, + tag, + tagSymbol +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js new file mode 100644 index 0000000..455a5de --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js @@ -0,0 +1,164 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var protocols_exports = {}; +__export(protocols_exports, { + RequestBuilder: () => RequestBuilder, + collectBody: () => collectBody, + extendedEncodeURIComponent: () => extendedEncodeURIComponent, + requestBuilder: () => requestBuilder, + resolvedPath: () => resolvedPath +}); +module.exports = __toCommonJS(protocols_exports); + +// src/submodules/protocols/collect-stream-body.ts +var import_util_stream = require("@smithy/util-stream"); +var collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return import_util_stream.Uint8ArrayBlobAdapter.mutate(await fromContext); +}; + +// src/submodules/protocols/extended-encode-uri-component.ts +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +// src/submodules/protocols/requestBuilder.ts +var import_protocol_http = require("@smithy/protocol-http"); + +// src/submodules/protocols/resolve-path.ts +var resolvedPath = (resolvedPath2, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== void 0) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath2 = resolvedPath2.replace( + uriLabel, + isGreedyLabel ? labelValue.split("/").map((segment) => extendedEncodeURIComponent(segment)).join("/") : extendedEncodeURIComponent(labelValue) + ); + } else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath2; +}; + +// src/submodules/protocols/requestBuilder.ts +function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +var RequestBuilder = class { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new import_protocol_http.HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers + }); + } + /** + * Brevity setter for "hostname". + */ + hn(hostname) { + this.hostname = hostname; + return this; + } + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + /** + * Brevity incremental builder for "path". + */ + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + /** + * Brevity setter for "headers". + */ + h(headers) { + this.headers = headers; + return this; + } + /** + * Brevity setter for "query". + */ + q(query) { + this.query = query; + return this; + } + /** + * Brevity setter for "body". + */ + b(body) { + this.body = body; + return this; + } + /** + * Brevity setter for "method". + */ + m(method) { + this.method = method; + return this; + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + RequestBuilder, + collectBody, + extendedEncodeURIComponent, + requestBuilder, + resolvedPath +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js new file mode 100644 index 0000000..047fb9b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/serde/index.ts +var serde_exports = {}; +__export(serde_exports, { + NumericValue: () => NumericValue, + nv: () => nv +}); +module.exports = __toCommonJS(serde_exports); + +// src/submodules/serde/value/NumericValue.ts +var NumericValue = class { + constructor(string, type) { + this.string = string; + this.type = type; + } +}; +function nv(string) { + return new NumericValue(string, "bigDecimal"); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + NumericValue, + nv +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/getSmithyContext.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/getSmithyContext.js new file mode 100644 index 0000000..3848a0c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/getSmithyContext.js @@ -0,0 +1,2 @@ +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/index.js new file mode 100644 index 0000000..1dcdba1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js new file mode 100644 index 0000000..d0aaae6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js @@ -0,0 +1,17 @@ +import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware"; +export const httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware", +}; +export const getHttpAuthSchemeEndpointRuleSetPlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider, + }), httpAuthSchemeEndpointRuleSetMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js new file mode 100644 index 0000000..3fe03c5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js @@ -0,0 +1,18 @@ +import { serializerMiddlewareOption } from "@smithy/middleware-serde"; +import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware"; +export const httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: serializerMiddlewareOption.name, +}; +export const getHttpAuthSchemePlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider, + }), httpAuthSchemeMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js new file mode 100644 index 0000000..9869f65 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js @@ -0,0 +1,43 @@ +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { resolveAuthOptions } from "./resolveAuthOptions"; +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +export const httpAuthSchemeMiddleware = (config, mwOptions) => (next, context) => async (args) => { + const options = config.httpAuthSchemeProvider(await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input)); + const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : []; + const resolvedOptions = resolveAuthOptions(options, authSchemePreference); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = getSmithyContext(context); + const failureReasons = []; + for (const option of resolvedOptions) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer, + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js new file mode 100644 index 0000000..5042e7d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js new file mode 100644 index 0000000..8260757 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js @@ -0,0 +1,20 @@ +export const resolveAuthOptions = (candidateAuthOptions, authSchemePreference) => { + if (!authSchemePreference || authSchemePreference.length === 0) { + return candidateAuthOptions; + } + const preferredAuthOptions = []; + for (const preferredSchemeName of authSchemePreference) { + for (const candidateAuthOption of candidateAuthOptions) { + const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1]; + if (candidateAuthSchemeName === preferredSchemeName) { + preferredAuthOptions.push(candidateAuthOption); + } + } + } + for (const candidateAuthOption of candidateAuthOptions) { + if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) { + preferredAuthOptions.push(candidateAuthOption); + } + } + return preferredAuthOptions; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js new file mode 100644 index 0000000..e199712 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js @@ -0,0 +1,15 @@ +import { httpSigningMiddleware } from "./httpSigningMiddleware"; +export const httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware", +}; +export const getHttpSigningPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js new file mode 100644 index 0000000..dbc1b28 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js @@ -0,0 +1,24 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +const defaultErrorHandler = (signingProperties) => (error) => { + throw error; +}; +const defaultSuccessHandler = (httpResponse, signingProperties) => { }; +export const httpSigningMiddleware = (config) => (next, context) => async (args) => { + if (!HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = getSmithyContext(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { httpAuthOption: { signingProperties = {} }, identity, signer, } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties), + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js new file mode 100644 index 0000000..7bc6cfe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/normalizeProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/normalizeProvider.js new file mode 100644 index 0000000..a83ea99 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/normalizeProvider.js @@ -0,0 +1,6 @@ +export const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/pagination/createPaginator.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/pagination/createPaginator.js new file mode 100644 index 0000000..4e8f889 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/pagination/createPaginator.js @@ -0,0 +1,41 @@ +const makePagedClientRequest = async (CommandCtor, client, input, withCommand = (_) => _, ...args) => { + let command = new CommandCtor(input); + command = withCommand(command) ?? command; + return await client.send(command, ...args); +}; +export function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return async function* paginateOperation(config, input, ...additionalArguments) { + const _input = input; + let token = config.startingToken ?? _input[inputTokenName]; + let hasNext = true; + let page; + while (hasNext) { + _input[inputTokenName] = token; + if (pageSizeTokenName) { + _input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest(CommandCtor, config.client, input, config.withCommand, ...additionalArguments); + } + else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return undefined; + }; +} +const get = (fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return undefined; + } + cursor = cursor[step]; + } + return cursor; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js new file mode 100644 index 0000000..5b790a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js @@ -0,0 +1 @@ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/setFeature.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/setFeature.js new file mode 100644 index 0000000..a3a0303 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/setFeature.js @@ -0,0 +1,11 @@ +export function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {}, + }; + } + else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js new file mode 100644 index 0000000..dca1c63 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js @@ -0,0 +1,391 @@ +import { toUtf8 } from "@smithy/util-utf8"; +import { alloc, extendedFloat16, extendedFloat32, extendedFloat64, extendedOneByte, majorList, majorMap, majorNegativeInt64, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, minorIndefinite, specialFalse, specialNull, specialTrue, specialUndefined, tag, } from "./cbor-types"; +const USE_TEXT_DECODER = typeof TextDecoder !== "undefined"; +const USE_BUFFER = typeof Buffer !== "undefined"; +let payload = alloc(0); +let dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +const textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null; +let _offset = 0; +export function setPayload(bytes) { + payload = bytes; + dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +} +export function decode(at, to) { + if (at >= to) { + throw new Error("unexpected end of (decode) payload."); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + switch (major) { + case majorUint64: + case majorNegativeInt64: + case majorTag: + let unsignedInt; + let offset; + if (minor < 24) { + unsignedInt = minor; + offset = 1; + } + else { + switch (minor) { + case extendedOneByte: + case extendedFloat16: + case extendedFloat32: + case extendedFloat64: + const countLength = minorValueToArgumentLength[minor]; + const countOffset = (countLength + 1); + offset = countOffset; + if (to - at < countOffset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + unsignedInt = payload[countIndex]; + } + else if (countLength === 2) { + unsignedInt = dataView.getUint16(countIndex); + } + else if (countLength === 4) { + unsignedInt = dataView.getUint32(countIndex); + } + else { + unsignedInt = dataView.getBigUint64(countIndex); + } + break; + default: + throw new Error(`unexpected minor value ${minor}.`); + } + } + if (major === majorUint64) { + _offset = offset; + return castBigInt(unsignedInt); + } + else if (major === majorNegativeInt64) { + let negativeInt; + if (typeof unsignedInt === "bigint") { + negativeInt = BigInt(-1) - unsignedInt; + } + else { + negativeInt = -1 - unsignedInt; + } + _offset = offset; + return castBigInt(negativeInt); + } + else { + const value = decode(at + offset, to); + const valueOffset = _offset; + _offset = offset + valueOffset; + return tag({ tag: castBigInt(unsignedInt), value }); + } + case majorUtf8String: + case majorMap: + case majorList: + case majorUnstructuredByteString: + if (minor === minorIndefinite) { + switch (major) { + case majorUtf8String: + return decodeUtf8StringIndefinite(at, to); + case majorMap: + return decodeMapIndefinite(at, to); + case majorList: + return decodeListIndefinite(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteStringIndefinite(at, to); + } + } + else { + switch (major) { + case majorUtf8String: + return decodeUtf8String(at, to); + case majorMap: + return decodeMap(at, to); + case majorList: + return decodeList(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteString(at, to); + } + } + default: + return decodeSpecial(at, to); + } +} +function bytesToUtf8(bytes, at, to) { + if (USE_BUFFER && bytes.constructor?.name === "Buffer") { + return bytes.toString("utf-8", at, to); + } + if (textDecoder) { + return textDecoder.decode(bytes.subarray(at, to)); + } + return toUtf8(bytes.subarray(at, to)); +} +function demote(bigInteger) { + const num = Number(bigInteger); + if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) { + console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`)); + } + return num; +} +const minorValueToArgumentLength = { + [extendedOneByte]: 1, + [extendedFloat16]: 2, + [extendedFloat32]: 4, + [extendedFloat64]: 8, +}; +export function bytesToFloat16(a, b) { + const sign = a >> 7; + const exponent = (a & 124) >> 2; + const fraction = ((a & 3) << 8) | b; + const scalar = sign === 0 ? 1 : -1; + let exponentComponent; + let summation; + if (exponent === 0b00000) { + if (fraction === 0) { + return 0; + } + else { + exponentComponent = Math.pow(2, 1 - 15); + summation = 0; + } + } + else if (exponent === 0b11111) { + if (fraction === 0) { + return scalar * Infinity; + } + else { + return NaN; + } + } + else { + exponentComponent = Math.pow(2, exponent - 15); + summation = 1; + } + summation += fraction / 1024; + return scalar * (exponentComponent * summation); +} +function decodeCount(at, to) { + const minor = payload[at] & 31; + if (minor < 24) { + _offset = 1; + return minor; + } + if (minor === extendedOneByte || + minor === extendedFloat16 || + minor === extendedFloat32 || + minor === extendedFloat64) { + const countLength = minorValueToArgumentLength[minor]; + _offset = (countLength + 1); + if (to - at < _offset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + return payload[countIndex]; + } + else if (countLength === 2) { + return dataView.getUint16(countIndex); + } + else if (countLength === 4) { + return dataView.getUint32(countIndex); + } + return demote(dataView.getBigUint64(countIndex)); + } + throw new Error(`unexpected minor value ${minor}.`); +} +function decodeUtf8String(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`string len ${length} greater than remaining buf len.`); + } + const value = bytesToUtf8(payload, at, at + length); + _offset = offset + length; + return value; +} +function decodeUtf8StringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + const data = alloc(vector.length); + data.set(vector, 0); + _offset = at - base + 2; + return bytesToUtf8(data, 0, data.length); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeUnstructuredByteString(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`); + } + const value = payload.subarray(at, at + length); + _offset = offset + length; + return value; +} +function decodeUnstructuredByteStringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + const data = alloc(vector.length); + data.set(vector, 0); + _offset = at - base + 2; + return data; + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUnstructuredByteString) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeList(at, to) { + const listDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const list = Array(listDataLength); + for (let i = 0; i < listDataLength; ++i) { + const item = decode(at, to); + const itemOffset = _offset; + list[i] = item; + at += itemOffset; + } + _offset = offset + (at - base); + return list; +} +function decodeListIndefinite(at, to) { + at += 1; + const list = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + _offset = at - base + 2; + return list; + } + const item = decode(at, to); + const n = _offset; + at += n; + list.push(item); + } + throw new Error("expected break marker."); +} +function decodeMap(at, to) { + const mapDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const map = {}; + for (let i = 0; i < mapDataLength; ++i) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key at index ${at}.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + _offset = offset + (at - base); + return map; +} +function decodeMapIndefinite(at, to) { + at += 1; + const base = at; + const map = {}; + for (; at < to;) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + if (payload[at] === 255) { + _offset = at - base + 2; + return map; + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + throw new Error("expected break marker."); +} +function decodeSpecial(at, to) { + const minor = payload[at] & 31; + switch (minor) { + case specialTrue: + case specialFalse: + _offset = 1; + return minor === specialTrue; + case specialNull: + _offset = 1; + return null; + case specialUndefined: + _offset = 1; + return null; + case extendedFloat16: + if (to - at < 3) { + throw new Error("incomplete float16 at end of buf."); + } + _offset = 3; + return bytesToFloat16(payload[at + 1], payload[at + 2]); + case extendedFloat32: + if (to - at < 5) { + throw new Error("incomplete float32 at end of buf."); + } + _offset = 5; + return dataView.getFloat32(at + 1); + case extendedFloat64: + if (to - at < 9) { + throw new Error("incomplete float64 at end of buf."); + } + _offset = 9; + return dataView.getFloat64(at + 1); + default: + throw new Error(`unexpected minor value ${minor}.`); + } +} +function castBigInt(bigInt) { + if (typeof bigInt === "number") { + return bigInt; + } + const num = Number(bigInt); + if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) { + return num; + } + return bigInt; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js new file mode 100644 index 0000000..17af4e2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js @@ -0,0 +1,191 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { extendedFloat16, extendedFloat32, extendedFloat64, majorList, majorMap, majorNegativeInt64, majorSpecial, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, specialFalse, specialNull, specialTrue, tagSymbol, } from "./cbor-types"; +import { alloc } from "./cbor-types"; +const USE_BUFFER = typeof Buffer !== "undefined"; +const initialSize = 2048; +let data = alloc(initialSize); +let dataView = new DataView(data.buffer, data.byteOffset, data.byteLength); +let cursor = 0; +function ensureSpace(bytes) { + const remaining = data.byteLength - cursor; + if (remaining < bytes) { + if (cursor < 16000000) { + resize(Math.max(data.byteLength * 4, data.byteLength + bytes)); + } + else { + resize(data.byteLength + bytes + 16000000); + } + } +} +export function toUint8Array() { + const out = alloc(cursor); + out.set(data.subarray(0, cursor), 0); + cursor = 0; + return out; +} +export function resize(size) { + const old = data; + data = alloc(size); + if (old) { + if (old.copy) { + old.copy(data, 0, 0, old.byteLength); + } + else { + data.set(old, 0); + } + } + dataView = new DataView(data.buffer, data.byteOffset, data.byteLength); +} +function encodeHeader(major, value) { + if (value < 24) { + data[cursor++] = (major << 5) | value; + } + else if (value < 1 << 8) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = value; + } + else if (value < 1 << 16) { + data[cursor++] = (major << 5) | extendedFloat16; + dataView.setUint16(cursor, value); + cursor += 2; + } + else if (value < 2 ** 32) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, value); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value)); + cursor += 8; + } +} +export function encode(_input) { + const encodeStack = [_input]; + while (encodeStack.length) { + const input = encodeStack.pop(); + ensureSpace(typeof input === "string" ? input.length * 4 : 64); + if (typeof input === "string") { + if (USE_BUFFER) { + encodeHeader(majorUtf8String, Buffer.byteLength(input)); + cursor += data.write(input, cursor); + } + else { + const bytes = fromUtf8(input); + encodeHeader(majorUtf8String, bytes.byteLength); + data.set(bytes, cursor); + cursor += bytes.byteLength; + } + continue; + } + else if (typeof input === "number") { + if (Number.isInteger(input)) { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - 1; + if (value < 24) { + data[cursor++] = (major << 5) | value; + } + else if (value < 256) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = value; + } + else if (value < 65536) { + data[cursor++] = (major << 5) | extendedFloat16; + data[cursor++] = value >> 8; + data[cursor++] = value; + } + else if (value < 4294967296) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, value); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, BigInt(value)); + cursor += 8; + } + continue; + } + data[cursor++] = (majorSpecial << 5) | extendedFloat64; + dataView.setFloat64(cursor, input); + cursor += 8; + continue; + } + else if (typeof input === "bigint") { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - BigInt(1); + const n = Number(value); + if (n < 24) { + data[cursor++] = (major << 5) | n; + } + else if (n < 256) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = n; + } + else if (n < 65536) { + data[cursor++] = (major << 5) | extendedFloat16; + data[cursor++] = n >> 8; + data[cursor++] = n & 255; + } + else if (n < 4294967296) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, n); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, value); + cursor += 8; + } + continue; + } + else if (input === null) { + data[cursor++] = (majorSpecial << 5) | specialNull; + continue; + } + else if (typeof input === "boolean") { + data[cursor++] = (majorSpecial << 5) | (input ? specialTrue : specialFalse); + continue; + } + else if (typeof input === "undefined") { + throw new Error("@smithy/core/cbor: client may not serialize undefined value."); + } + else if (Array.isArray(input)) { + for (let i = input.length - 1; i >= 0; --i) { + encodeStack.push(input[i]); + } + encodeHeader(majorList, input.length); + continue; + } + else if (typeof input.byteLength === "number") { + ensureSpace(input.length * 2); + encodeHeader(majorUnstructuredByteString, input.length); + data.set(input, cursor); + cursor += input.byteLength; + continue; + } + else if (typeof input === "object") { + if (input[tagSymbol]) { + if ("tag" in input && "value" in input) { + encodeStack.push(input.value); + encodeHeader(majorTag, input.tag); + continue; + } + else { + throw new Error("tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input)); + } + } + const keys = Object.keys(input); + for (let i = keys.length - 1; i >= 0; --i) { + const key = keys[i]; + encodeStack.push(input[key]); + encodeStack.push(key); + } + encodeHeader(majorMap, keys.length); + continue; + } + throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js new file mode 100644 index 0000000..a720eb7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js @@ -0,0 +1,25 @@ +export const majorUint64 = 0; +export const majorNegativeInt64 = 1; +export const majorUnstructuredByteString = 2; +export const majorUtf8String = 3; +export const majorList = 4; +export const majorMap = 5; +export const majorTag = 6; +export const majorSpecial = 7; +export const specialFalse = 20; +export const specialTrue = 21; +export const specialNull = 22; +export const specialUndefined = 23; +export const extendedOneByte = 24; +export const extendedFloat16 = 25; +export const extendedFloat32 = 26; +export const extendedFloat64 = 27; +export const minorIndefinite = 31; +export function alloc(size) { + return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size); +} +export const tagSymbol = Symbol("@smithy/core/cbor::tagSymbol"); +export function tag(data) { + data[tagSymbol] = true; + return data; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js new file mode 100644 index 0000000..8df975f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js @@ -0,0 +1,21 @@ +import { decode, setPayload } from "./cbor-decode"; +import { encode, resize, toUint8Array } from "./cbor-encode"; +export const cbor = { + deserialize(payload) { + setPayload(payload); + return decode(0, payload.length); + }, + serialize(input) { + try { + encode(input); + return toUint8Array(); + } + catch (e) { + toUint8Array(); + throw e; + } + }, + resizeEncodingBuffer(size) { + resize(size); + }, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/index.js new file mode 100644 index 0000000..0910d27 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/index.js @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js new file mode 100644 index 0000000..03eeae6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js @@ -0,0 +1,85 @@ +import { collectBody } from "@smithy/core/protocols"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { cbor } from "./cbor"; +import { tag, tagSymbol } from "./cbor-types"; +export const parseCborBody = (streamBody, context) => { + return collectBody(streamBody, context).then(async (bytes) => { + if (bytes.length) { + try { + return cbor.deserialize(bytes); + } + catch (e) { + Object.defineProperty(e, "$responseBodyText", { + value: context.utf8Encoder(bytes), + }); + throw e; + } + } + return {}; + }); +}; +export const dateToTag = (date) => { + return tag({ + tag: 1, + value: date.getTime() / 1000, + }); +}; +export const parseCborErrorBody = async (errorBody, context) => { + const value = await parseCborBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +export const loadSmithyRpcV2CborErrorCode = (output, data) => { + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } +}; +export const checkCborResponse = (response) => { + if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") { + throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode); + } +}; +export const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers: { + ...headers, + }, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + try { + contents.headers["content-length"] = String(calculateBodyLength(body)); + } + catch (e) { } + } + return new __HttpRequest(contents); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js new file mode 100644 index 0000000..b6a5c0b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js @@ -0,0 +1,11 @@ +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +export const collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return Uint8ArrayBlobAdapter.mutate(await fromContext); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js new file mode 100644 index 0000000..5baeaf5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js @@ -0,0 +1,5 @@ +export function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/index.js new file mode 100644 index 0000000..a5de22f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/index.js @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js new file mode 100644 index 0000000..3391ef2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js @@ -0,0 +1,67 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { resolvedPath } from "./resolve-path"; +export function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +export class RequestBuilder { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers, + }); + } + hn(hostname) { + this.hostname = hostname; + return this; + } + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + h(headers) { + this.headers = headers; + return this; + } + q(query) { + this.query = query; + return this; + } + b(body) { + this.body = body; + return this; + } + m(method) { + this.method = method; + return this; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js new file mode 100644 index 0000000..8483e01 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js @@ -0,0 +1,19 @@ +import { extendedEncodeURIComponent } from "./extended-encode-uri-component"; +export const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== undefined) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel + ? labelValue + .split("/") + .map((segment) => extendedEncodeURIComponent(segment)) + .join("/") + : extendedEncodeURIComponent(labelValue)); + } + else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/serde/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/serde/index.js new file mode 100644 index 0000000..a70d0dd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/serde/index.js @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js new file mode 100644 index 0000000..6af270f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js @@ -0,0 +1,9 @@ +export class NumericValue { + constructor(string, type) { + this.string = string; + this.type = type; + } +} +export function nv(string) { + return new NumericValue(string, "bigDecimal"); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js new file mode 100644 index 0000000..3bc1016 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js @@ -0,0 +1,13 @@ +export class DefaultIdentityProviderConfig { + constructor(config) { + this.authSchemes = new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== undefined) { + this.authSchemes.set(key, value); + } + } + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js new file mode 100644 index 0000000..8b6f598 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js @@ -0,0 +1,34 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpApiKeyAuthLocation } from "@smithy/types"; +export class HttpApiKeyAuthSigner { + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error("request could not be signed with `apiKey` since the `name` and `in` signer properties are missing"); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = HttpRequest.clone(httpRequest); + if (signingProperties.in === HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } + else if (signingProperties.in === HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme + ? `${signingProperties.scheme} ${identity.apiKey}` + : identity.apiKey; + } + else { + throw new Error("request can only be signed with `apiKey` locations `query` or `header`, " + + "but found: `" + + signingProperties.in + + "`"); + } + return clonedRequest; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js new file mode 100644 index 0000000..b92a9c3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js @@ -0,0 +1,11 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export class HttpBearerAuthSigner { + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js new file mode 100644 index 0000000..9d240fe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js new file mode 100644 index 0000000..356193d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js @@ -0,0 +1,5 @@ +export class NoAuthSigner { + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js new file mode 100644 index 0000000..87ba64b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js new file mode 100644 index 0000000..8050585 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js @@ -0,0 +1,53 @@ +export const createIsIdentityExpiredFunction = (expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs; +export const EXPIRATION_MS = 300000; +export const isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +export const doesIdentityRequireRefresh = (identity) => identity.expiration !== undefined; +export const memoizeIdentityProvider = (provider, isExpired, requiresRefresh) => { + if (provider === undefined) { + return undefined; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts new file mode 100644 index 0000000..523ee47 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/index.d.ts new file mode 100644 index 0000000..1dcdba1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts new file mode 100644 index 0000000..996b0de --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemeEndpointRuleSetPluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemeEndpointRuleSetPlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts new file mode 100644 index 0000000..2e57733 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemePluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemePlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemePluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts new file mode 100644 index 0000000..50f1ea8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts @@ -0,0 +1,33 @@ +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, IdentityProviderConfig, Provider, SelectedHttpAuthScheme, SerializeMiddleware, SMITHY_CONTEXT_KEY } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + authSchemePreference?: Provider; + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: HttpAuthSchemeProvider; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareSmithyContext extends Record { + selectedHttpAuthScheme?: SelectedHttpAuthScheme; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareHandlerExecutionContext extends HandlerExecutionContext { + [SMITHY_CONTEXT_KEY]?: HttpAuthSchemeMiddlewareSmithyContext; +} +/** + * @internal + */ +export declare const httpAuthSchemeMiddleware: (config: TConfig & PreviouslyResolved, mwOptions: HttpAuthSchemeMiddlewareOptions) => SerializeMiddleware; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts new file mode 100644 index 0000000..5042e7d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts new file mode 100644 index 0000000..52fc604 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts @@ -0,0 +1,10 @@ +import { HttpAuthOption } from "@smithy/types"; +/** + * Resolves list of auth options based on the supported ones, vs the preference list. + * + * @param candidateAuthOptions list of supported auth options selected by the standard + * resolution process (model-based, endpoints 2.0, etc.) + * @param authSchemePreference list of auth schemes preferred by user. + * @returns + */ +export declare const resolveAuthOptions: (candidateAuthOptions: HttpAuthOption[], authSchemePreference: string[]) => HttpAuthOption[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts new file mode 100644 index 0000000..56c89a2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts @@ -0,0 +1,9 @@ +import { FinalizeRequestHandlerOptions, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddlewareOptions: FinalizeRequestHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getHttpSigningPlugin: (config: object) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts new file mode 100644 index 0000000..3b43611 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts @@ -0,0 +1,5 @@ +import { FinalizeRequestMiddleware } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddleware: (config: object) => FinalizeRequestMiddleware; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts new file mode 100644 index 0000000..7bc6cfe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts new file mode 100644 index 0000000..4fe2d9a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts new file mode 100644 index 0000000..78fcbe0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts @@ -0,0 +1,7 @@ +import type { PaginationConfiguration, Paginator } from "@smithy/types"; +/** + * @internal + * + * Creates a paginator. + */ +export declare function createPaginator(ClientCtor: any, CommandCtor: any, inputTokenName: string, outputTokenName: string, pageSizeTokenName?: string): (config: PaginationConfigType, input: InputType, ...additionalArguments: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..8e2f2ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/setFeature.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/setFeature.d.ts new file mode 100644 index 0000000..279106c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/setFeature.d.ts @@ -0,0 +1,12 @@ +import type { HandlerExecutionContext, SmithyFeatures } from "@smithy/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the library not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: HandlerExecutionContext, feature: F, value: SmithyFeatures[F]): void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts new file mode 100644 index 0000000..baf3961 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts @@ -0,0 +1,17 @@ +import { CborValueType, Float32, Uint8, Uint32 } from "./cbor-types"; +/** + * @internal + * @param bytes - to be set as the decode source. + * + * Sets the decode bytearray source and its data view. + */ +export declare function setPayload(bytes: Uint8Array): void; +/** + * @internal + * Decodes the data between the two indices. + */ +export declare function decode(at: Uint32, to: Uint32): CborValueType; +/** + * @internal + */ +export declare function bytesToFloat16(a: Uint8, b: Uint8): Float32; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts new file mode 100644 index 0000000..bfc3328 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + */ +export declare function toUint8Array(): Uint8Array; +export declare function resize(size: number): void; +/** + * @param _input - JS data object. + */ +export declare function encode(_input: any): void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts new file mode 100644 index 0000000..dd41338 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts @@ -0,0 +1,64 @@ +/// +/// +export type CborItemType = undefined | boolean | number | bigint | [CborUnstructuredByteStringType, Uint64] | string | CborTagType; +export type CborTagType = { + tag: Uint64 | number; + value: CborValueType; + [tagSymbol]: true; +}; +export type CborUnstructuredByteStringType = Uint8Array; +export type CborListType = Array; +export type CborMapType = Record; +export type CborCollectionType = CborMapType | CborListType; +export type CborValueType = CborItemType | CborCollectionType | any; +export type CborArgumentLength = 1 | 2 | 4 | 8; +export type CborArgumentLengthOffset = 1 | 2 | 3 | 5 | 9; +export type CborOffset = number; +export type Uint8 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Float32 = number; +export type Int64 = bigint; +export type Float16Binary = number; +export type Float32Binary = number; +export type CborMajorType = typeof majorUint64 | typeof majorNegativeInt64 | typeof majorUnstructuredByteString | typeof majorUtf8String | typeof majorList | typeof majorMap | typeof majorTag | typeof majorSpecial; +export declare const majorUint64 = 0; +export declare const majorNegativeInt64 = 1; +export declare const majorUnstructuredByteString = 2; +export declare const majorUtf8String = 3; +export declare const majorList = 4; +export declare const majorMap = 5; +export declare const majorTag = 6; +export declare const majorSpecial = 7; +export declare const specialFalse = 20; +export declare const specialTrue = 21; +export declare const specialNull = 22; +export declare const specialUndefined = 23; +export declare const extendedOneByte = 24; +export declare const extendedFloat16 = 25; +export declare const extendedFloat32 = 26; +export declare const extendedFloat64 = 27; +export declare const minorIndefinite = 31; +export declare function alloc(size: number): Uint8Array | Buffer; +/** + * @public + * + * The presence of this symbol as an object key indicates it should be considered a tag + * for CBOR serialization purposes. + * + * The object must also have the properties "tag" and "value". + */ +export declare const tagSymbol: unique symbol; +/** + * @public + * Applies the tag symbol to the object. + */ +export declare function tag(data: { + tag: number | bigint; + value: any; + [tagSymbol]?: true; +}): { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts new file mode 100644 index 0000000..7577213 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts @@ -0,0 +1,26 @@ +/** + * This implementation is synchronous and only implements the parts of CBOR + * specification used by Smithy RPCv2 CBOR protocol. + * + * This cbor serde implementation is derived from AWS SDK for Go's implementation. + * @see https://github.com/aws/smithy-go/tree/main/encoding/cbor + * + * The cbor-x implementation was also instructional: + * @see https://github.com/kriszyp/cbor-x + */ +export declare const cbor: { + deserialize(payload: Uint8Array): any; + serialize(input: any): Uint8Array; + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size: number): void; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts new file mode 100644 index 0000000..0910d27 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts new file mode 100644 index 0000000..8811679 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts @@ -0,0 +1,31 @@ +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { HeaderBag as __HeaderBag, HttpResponse, SerdeContext as __SerdeContext, SerdeContext } from "@smithy/types"; +import { tag, tagSymbol } from "./cbor-types"; +/** + * @internal + */ +export declare const parseCborBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const dateToTag: (date: Date) => { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; +/** + * @internal + */ +export declare const parseCborErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadSmithyRpcV2CborErrorCode: (output: HttpResponse, data: any) => string | undefined; +/** + * @internal + */ +export declare const checkCborResponse: (response: HttpResponse) => void; +/** + * @internal + */ +export declare const buildHttpRpcRequest: (context: __SerdeContext, headers: __HeaderBag, path: string, resolvedHostname: string | undefined, body: any) => Promise<__HttpRequest>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts new file mode 100644 index 0000000..b555804 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts @@ -0,0 +1,10 @@ +import { SerdeContext } from "@smithy/types"; +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +/** + * @internal + * + * Collect low-level response body stream to Uint8Array. + */ +export declare const collectBody: (streamBody: any, context: { + streamCollector: SerdeContext["streamCollector"]; +}) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..403e9ae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Function that wraps encodeURIComponent to encode additional characters + * to fully adhere to RFC 3986. + */ +export declare function extendedEncodeURIComponent(str: string): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts new file mode 100644 index 0000000..a5de22f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..3013d8a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts @@ -0,0 +1,51 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import type { SerdeContext } from "@smithy/types"; +/** + * @internal + * used in code-generated serde. + */ +export declare function requestBuilder(input: any, context: SerdeContext): RequestBuilder; +/** + * @internal + */ +export declare class RequestBuilder { + private input; + private context; + private query; + private method; + private headers; + private path; + private body; + private hostname; + private resolvePathStack; + constructor(input: any, context: SerdeContext); + build(): Promise; + /** + * Brevity setter for "hostname". + */ + hn(hostname: string): this; + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel: string): this; + /** + * Brevity incremental builder for "path". + */ + p(memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean): this; + /** + * Brevity setter for "headers". + */ + h(headers: Record): this; + /** + * Brevity setter for "query". + */ + q(query: Record): this; + /** + * Brevity setter for "body". + */ + b(body: any): this; + /** + * Brevity setter for "method". + */ + m(method: string): this; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts new file mode 100644 index 0000000..03386d6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const resolvedPath: (resolvedPath: string, input: unknown, memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts new file mode 100644 index 0000000..a70d0dd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts new file mode 100644 index 0000000..c3736fc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts @@ -0,0 +1,31 @@ +/** + * Types which may be represented by {@link NumericValue}. + * + * There is currently only one option, because BigInteger and Long should + * use JS BigInt directly, and all other numeric types can be contained in JS Number. + * + * @public + */ +export type NumericType = "bigDecimal"; +/** + * Serialization container for Smithy simple types that do not have a + * direct JavaScript runtime representation. + * + * This container does not perform numeric mathematical operations. + * It is a container for discerning a value's true type. + * + * It allows storage of numeric types not representable in JS without + * making a decision on what numeric library to use. + * + * @public + */ +export declare class NumericValue { + readonly string: string; + readonly type: NumericType; + constructor(string: string, type: NumericType); +} +/** + * Serde shortcut. + * @internal + */ +export declare function nv(string: string): NumericValue; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts new file mode 100644 index 0000000..14cd7c4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..347898d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts new file mode 100644 index 0000000..27e2e26 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemeEndpointRuleSetPluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemeEndpointRuleSetPlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts new file mode 100644 index 0000000..531e6ec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemePluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemePlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemePluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts new file mode 100644 index 0000000..bbeaf5f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts @@ -0,0 +1,33 @@ +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, IdentityProviderConfig, Provider, SelectedHttpAuthScheme, SerializeMiddleware, SMITHY_CONTEXT_KEY } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + authSchemePreference?: Provider; + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: HttpAuthSchemeProvider; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareSmithyContext extends Record { + selectedHttpAuthScheme?: SelectedHttpAuthScheme; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareHandlerExecutionContext extends HandlerExecutionContext { + [SMITHY_CONTEXT_KEY]?: HttpAuthSchemeMiddlewareSmithyContext; +} +/** + * @internal + */ +export declare const httpAuthSchemeMiddleware: (config: TConfig & PreviouslyResolved, mwOptions: HttpAuthSchemeMiddlewareOptions) => SerializeMiddleware; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts new file mode 100644 index 0000000..2f275c5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts new file mode 100644 index 0000000..8088683 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts @@ -0,0 +1,10 @@ +import { HttpAuthOption } from "@smithy/types"; +/** + * Resolves list of auth options based on the supported ones, vs the preference list. + * + * @param candidateAuthOptions list of supported auth options selected by the standard + * resolution process (model-based, endpoints 2.0, etc.) + * @param authSchemePreference list of auth schemes preferred by user. + * @returns + */ +export declare const resolveAuthOptions: (candidateAuthOptions: HttpAuthOption[], authSchemePreference: string[]) => HttpAuthOption[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts new file mode 100644 index 0000000..a01bb31 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts @@ -0,0 +1,9 @@ +import { FinalizeRequestHandlerOptions, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddlewareOptions: FinalizeRequestHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getHttpSigningPlugin: (config: object) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts new file mode 100644 index 0000000..7a86b0b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts @@ -0,0 +1,5 @@ +import { FinalizeRequestMiddleware } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddleware: (config: object) => FinalizeRequestMiddleware; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts new file mode 100644 index 0000000..578f26d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts new file mode 100644 index 0000000..594e8fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts new file mode 100644 index 0000000..50400d8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts @@ -0,0 +1,7 @@ +import { PaginationConfiguration, Paginator } from "@smithy/types"; +/** + * @internal + * + * Creates a paginator. + */ +export declare function createPaginator(ClientCtor: any, CommandCtor: any, inputTokenName: string, outputTokenName: string, pageSizeTokenName?: string): (config: PaginationConfigType, input: InputType, ...additionalArguments: any[]) => Paginator; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..25459a8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts new file mode 100644 index 0000000..a1995ab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts @@ -0,0 +1,12 @@ +import { HandlerExecutionContext, SmithyFeatures } from "@smithy/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the library not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: HandlerExecutionContext, feature: F, value: SmithyFeatures[F]): void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts new file mode 100644 index 0000000..9ddc992 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts @@ -0,0 +1,17 @@ +import { CborValueType, Float32, Uint8, Uint32 } from "./cbor-types"; +/** + * @internal + * @param bytes - to be set as the decode source. + * + * Sets the decode bytearray source and its data view. + */ +export declare function setPayload(bytes: Uint8Array): void; +/** + * @internal + * Decodes the data between the two indices. + */ +export declare function decode(at: Uint32, to: Uint32): CborValueType; +/** + * @internal + */ +export declare function bytesToFloat16(a: Uint8, b: Uint8): Float32; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts new file mode 100644 index 0000000..83218b5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + */ +export declare function toUint8Array(): Uint8Array; +export declare function resize(size: number): void; +/** + * @param _input - JS data object. + */ +export declare function encode(_input: any): void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts new file mode 100644 index 0000000..e37a6ac --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts @@ -0,0 +1,66 @@ +/// +export type CborItemType = undefined | boolean | number | bigint | [ + CborUnstructuredByteStringType, + Uint64 +] | string | CborTagType; +export type CborTagType = { + tag: Uint64 | number; + value: CborValueType; + [tagSymbol]: true; +}; +export type CborUnstructuredByteStringType = Uint8Array; +export type CborListType = Array; +export type CborMapType = Record; +export type CborCollectionType = CborMapType | CborListType; +export type CborValueType = CborItemType | CborCollectionType | any; +export type CborArgumentLength = 1 | 2 | 4 | 8; +export type CborArgumentLengthOffset = 1 | 2 | 3 | 5 | 9; +export type CborOffset = number; +export type Uint8 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Float32 = number; +export type Int64 = bigint; +export type Float16Binary = number; +export type Float32Binary = number; +export type CborMajorType = typeof majorUint64 | typeof majorNegativeInt64 | typeof majorUnstructuredByteString | typeof majorUtf8String | typeof majorList | typeof majorMap | typeof majorTag | typeof majorSpecial; +export declare const majorUint64 = 0; +export declare const majorNegativeInt64 = 1; +export declare const majorUnstructuredByteString = 2; +export declare const majorUtf8String = 3; +export declare const majorList = 4; +export declare const majorMap = 5; +export declare const majorTag = 6; +export declare const majorSpecial = 7; +export declare const specialFalse = 20; +export declare const specialTrue = 21; +export declare const specialNull = 22; +export declare const specialUndefined = 23; +export declare const extendedOneByte = 24; +export declare const extendedFloat16 = 25; +export declare const extendedFloat32 = 26; +export declare const extendedFloat64 = 27; +export declare const minorIndefinite = 31; +export declare function alloc(size: number): Uint8Array | Buffer; +/** + * @public + * + * The presence of this symbol as an object key indicates it should be considered a tag + * for CBOR serialization purposes. + * + * The object must also have the properties "tag" and "value". + */ +export declare const tagSymbol: unique symbol; +/** + * @public + * Applies the tag symbol to the object. + */ +export declare function tag(data: { + tag: number | bigint; + value: any; + [tagSymbol]?: true; +}): { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts new file mode 100644 index 0000000..d317890 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts @@ -0,0 +1,26 @@ +/** + * This implementation is synchronous and only implements the parts of CBOR + * specification used by Smithy RPCv2 CBOR protocol. + * + * This cbor serde implementation is derived from AWS SDK for Go's implementation. + * @see https://github.com/aws/smithy-go/tree/main/encoding/cbor + * + * The cbor-x implementation was also instructional: + * @see https://github.com/kriszyp/cbor-x + */ +export declare const cbor: { + deserialize(payload: Uint8Array): any; + serialize(input: any): Uint8Array; + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size: number): void; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts new file mode 100644 index 0000000..63e2787 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts new file mode 100644 index 0000000..90676a2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts @@ -0,0 +1,31 @@ +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { HeaderBag as __HeaderBag, HttpResponse, SerdeContext as __SerdeContext, SerdeContext } from "@smithy/types"; +import { tag, tagSymbol } from "./cbor-types"; +/** + * @internal + */ +export declare const parseCborBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const dateToTag: (date: Date) => { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; +/** + * @internal + */ +export declare const parseCborErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadSmithyRpcV2CborErrorCode: (output: HttpResponse, data: any) => string | undefined; +/** + * @internal + */ +export declare const checkCborResponse: (response: HttpResponse) => void; +/** + * @internal + */ +export declare const buildHttpRpcRequest: (context: __SerdeContext, headers: __HeaderBag, path: string, resolvedHostname: string | undefined, body: any) => Promise<__HttpRequest>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts new file mode 100644 index 0000000..9c5f471 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts @@ -0,0 +1,10 @@ +import { SerdeContext } from "@smithy/types"; +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +/** + * @internal + * + * Collect low-level response body stream to Uint8Array. + */ +export declare const collectBody: (streamBody: any, context: { + streamCollector: SerdeContext["streamCollector"]; +}) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..98c3802 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Function that wraps encodeURIComponent to encode additional characters + * to fully adhere to RFC 3986. + */ +export declare function extendedEncodeURIComponent(str: string): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts new file mode 100644 index 0000000..4ffc290 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..0449354 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts @@ -0,0 +1,51 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { SerdeContext } from "@smithy/types"; +/** + * @internal + * used in code-generated serde. + */ +export declare function requestBuilder(input: any, context: SerdeContext): RequestBuilder; +/** + * @internal + */ +export declare class RequestBuilder { + private input; + private context; + private query; + private method; + private headers; + private path; + private body; + private hostname; + private resolvePathStack; + constructor(input: any, context: SerdeContext); + build(): Promise; + /** + * Brevity setter for "hostname". + */ + hn(hostname: string): this; + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel: string): this; + /** + * Brevity incremental builder for "path". + */ + p(memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean): this; + /** + * Brevity setter for "headers". + */ + h(headers: Record): this; + /** + * Brevity setter for "query". + */ + q(query: Record): this; + /** + * Brevity setter for "body". + */ + b(body: any): this; + /** + * Brevity setter for "method". + */ + m(method: string): this; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts new file mode 100644 index 0000000..4c4c443 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const resolvedPath: (resolvedPath: string, input: unknown, memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts new file mode 100644 index 0000000..3e78075 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts new file mode 100644 index 0000000..00dd3b7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts @@ -0,0 +1,31 @@ +/** + * Types which may be represented by {@link NumericValue}. + * + * There is currently only one option, because BigInteger and Long should + * use JS BigInt directly, and all other numeric types can be contained in JS Number. + * + * @public + */ +export type NumericType = "bigDecimal"; +/** + * Serialization container for Smithy simple types that do not have a + * direct JavaScript runtime representation. + * + * This container does not perform numeric mathematical operations. + * It is a container for discerning a value's true type. + * + * It allows storage of numeric types not representable in JS without + * making a decision on what numeric library to use. + * + * @public + */ +export declare class NumericValue { + readonly string: string; + readonly type: NumericType; + constructor(string: string, type: NumericType); +} +/** + * Serde shortcut. + * @internal + */ +export declare function nv(string: string): NumericValue; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts new file mode 100644 index 0000000..7e80659 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts @@ -0,0 +1,15 @@ +import { HttpAuthSchemeId, Identity, IdentityProvider, IdentityProviderConfig } from "@smithy/types"; +/** + * Default implementation of IdentityProviderConfig + * @internal + */ +export declare class DefaultIdentityProviderConfig implements IdentityProviderConfig { + private authSchemes; + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config: Record | undefined>); + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts new file mode 100644 index 0000000..3981a1b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { ApiKeyIdentity, HttpRequest as IHttpRequest, HttpSigner } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpApiKeyAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: ApiKeyIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts new file mode 100644 index 0000000..9c83b1c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpRequest as IHttpRequest, HttpSigner, TokenIdentity } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpBearerAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: TokenIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..aa5caa8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts new file mode 100644 index 0000000..0d7b612 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest, HttpSigner, Identity } from "@smithy/types"; +/** + * Signer for the synthetic @smithy.api#noAuth auth scheme. + * @internal + */ +export declare class NoAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts new file mode 100644 index 0000000..626ade9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts new file mode 100644 index 0000000..270aa71 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts @@ -0,0 +1,30 @@ +import { Identity, IdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const createIsIdentityExpiredFunction: (expirationMs: number) => (identity: Identity) => boolean; +/** + * @internal + * This may need to be configurable in the future, but for now it is defaulted to 5min. + */ +export declare const EXPIRATION_MS = 300000; +/** + * @internal + */ +export declare const isIdentityExpired: (identity: Identity) => boolean; +/** + * @internal + */ +export declare const doesIdentityRequireRefresh: (identity: Identity) => boolean; +/** + * @internal + */ +export interface MemoizedIdentityProvider { + (options?: Record & { + forceRefresh?: boolean; + }): Promise; +} +/** + * @internal + */ +export declare const memoizeIdentityProvider: (provider: IdentityT | IdentityProvider | undefined, isExpired: (resolved: Identity) => boolean, requiresRefresh: (resolved: Identity) => boolean) => MemoizedIdentityProvider | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts new file mode 100644 index 0000000..0b39204 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts @@ -0,0 +1,15 @@ +import { HttpAuthSchemeId, Identity, IdentityProvider, IdentityProviderConfig } from "@smithy/types"; +/** + * Default implementation of IdentityProviderConfig + * @internal + */ +export declare class DefaultIdentityProviderConfig implements IdentityProviderConfig { + private authSchemes; + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config: Record | undefined>); + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts new file mode 100644 index 0000000..63de4bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { ApiKeyIdentity, HttpRequest as IHttpRequest, HttpSigner } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpApiKeyAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: ApiKeyIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts new file mode 100644 index 0000000..0e31e7d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpRequest as IHttpRequest, HttpSigner, TokenIdentity } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpBearerAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: TokenIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..9d240fe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts new file mode 100644 index 0000000..fc8d6b1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest, HttpSigner, Identity } from "@smithy/types"; +/** + * Signer for the synthetic @smithy.api#noAuth auth scheme. + * @internal + */ +export declare class NoAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts new file mode 100644 index 0000000..87ba64b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts new file mode 100644 index 0000000..67b3be8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts @@ -0,0 +1,30 @@ +import { Identity, IdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const createIsIdentityExpiredFunction: (expirationMs: number) => (identity: Identity) => boolean; +/** + * @internal + * This may need to be configurable in the future, but for now it is defaulted to 5min. + */ +export declare const EXPIRATION_MS = 300000; +/** + * @internal + */ +export declare const isIdentityExpired: (identity: Identity) => boolean; +/** + * @internal + */ +export declare const doesIdentityRequireRefresh: (identity: Identity) => boolean; +/** + * @internal + */ +export interface MemoizedIdentityProvider { + (options?: Record & { + forceRefresh?: boolean; + }): Promise; +} +/** + * @internal + */ +export declare const memoizeIdentityProvider: (provider: IdentityT | IdentityProvider | undefined, isExpired: (resolved: Identity) => boolean, requiresRefresh: (resolved: Identity) => boolean) => MemoizedIdentityProvider | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/core/package.json new file mode 100644 index 0000000..d3776e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/package.json @@ -0,0 +1,112 @@ +{ + "name": "@smithy/core", + "version": "3.3.1", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline core", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "npx eslint -c ../../.eslintrc.js \"src/**/*.ts\" --fix && node ./scripts/lint", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:cbor:perf": "node ./scripts/cbor-perf.mjs", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "exports": { + ".": { + "module": "./dist-es/index.js", + "node": "./dist-cjs/index.js", + "import": "./dist-es/index.js", + "require": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts" + }, + "./package.json": { + "module": "./package.json", + "node": "./package.json", + "import": "./package.json", + "require": "./package.json" + }, + "./cbor": { + "module": "./dist-es/submodules/cbor/index.js", + "node": "./dist-cjs/submodules/cbor/index.js", + "import": "./dist-es/submodules/cbor/index.js", + "require": "./dist-cjs/submodules/cbor/index.js", + "types": "./dist-types/submodules/cbor/index.d.ts" + }, + "./protocols": { + "module": "./dist-es/submodules/protocols/index.js", + "node": "./dist-cjs/submodules/protocols/index.js", + "import": "./dist-es/submodules/protocols/index.js", + "require": "./dist-cjs/submodules/protocols/index.js", + "types": "./dist-types/submodules/protocols/index.d.ts" + }, + "./serde": { + "module": "./dist-es/submodules/serde/index.js", + "node": "./dist-cjs/submodules/serde/index.js", + "import": "./dist-es/submodules/serde/index.js", + "require": "./dist-cjs/submodules/serde/index.js", + "types": "./dist-types/submodules/serde/index.d.ts" + } + }, + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "./cbor.d.ts", + "./cbor.js", + "./protocols.d.ts", + "./protocols.js", + "./serde.d.ts", + "./serde.js", + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/core", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/core" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "json-bigint": "^1.0.0", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/protocols.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/protocols.d.ts new file mode 100644 index 0000000..e0afd4e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/protocols.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/protocols" { + export * from "@smithy/core/dist-types/submodules/protocols/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/protocols.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/protocols.js new file mode 100644 index 0000000..43e0c42 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/protocols.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/protocols/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/serde.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/core/serde.d.ts new file mode 100644 index 0000000..9906bb0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/serde.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/serde" { + export * from "@smithy/core/dist-types/submodules/serde/index.d"; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/core/serde.js b/amplify/functions/deleteDocument/node_modules/@smithy/core/serde.js new file mode 100644 index 0000000..b2d727f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/core/serde.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/serde/index.js"); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/README.md new file mode 100644 index 0000000..9a8f8a5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/README.md @@ -0,0 +1,11 @@ +# @smithy/credential-provider-imds + +[![NPM version](https://img.shields.io/npm/v/@smithy/credential-provider-imds/latest.svg)](https://www.npmjs.com/package/@smithy/credential-provider-imds) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/credential-provider-imds.svg)](https://www.npmjs.com/package/@smithy/credential-provider-imds) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@smithy/credential-providers](https://www.npmjs.com/package/@smithy/credential-providers) +instead. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js new file mode 100644 index 0000000..21b3423 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js @@ -0,0 +1,445 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_MAX_RETRIES: () => DEFAULT_MAX_RETRIES, + DEFAULT_TIMEOUT: () => DEFAULT_TIMEOUT, + ENV_CMDS_AUTH_TOKEN: () => ENV_CMDS_AUTH_TOKEN, + ENV_CMDS_FULL_URI: () => ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI: () => ENV_CMDS_RELATIVE_URI, + Endpoint: () => Endpoint, + fromContainerMetadata: () => fromContainerMetadata, + fromInstanceMetadata: () => fromInstanceMetadata, + getInstanceMetadataEndpoint: () => getInstanceMetadataEndpoint, + httpRequest: () => httpRequest, + providerConfigFromInit: () => providerConfigFromInit +}); +module.exports = __toCommonJS(src_exports); + +// src/fromContainerMetadata.ts + +var import_url = require("url"); + +// src/remoteProvider/httpRequest.ts +var import_property_provider = require("@smithy/property-provider"); +var import_buffer = require("buffer"); +var import_http = require("http"); +function httpRequest(options) { + return new Promise((resolve, reject) => { + const req = (0, import_http.request)({ + method: "GET", + ...options, + // Node.js http module doesn't accept hostname with square brackets + // Refs: https://github.com/nodejs/node/issues/39738 + hostname: options.hostname?.replace(/^\[(.+)\]$/, "$1") + }); + req.on("error", (err) => { + reject(Object.assign(new import_property_provider.ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new import_property_provider.ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject( + Object.assign(new import_property_provider.ProviderError("Error response received from instance metadata service"), { statusCode }) + ); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(import_buffer.Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} +__name(httpRequest, "httpRequest"); + +// src/remoteProvider/ImdsCredentials.ts +var isImdsCredentials = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.AccessKeyId === "string" && typeof arg.SecretAccessKey === "string" && typeof arg.Token === "string" && typeof arg.Expiration === "string", "isImdsCredentials"); +var fromImdsCredentials = /* @__PURE__ */ __name((creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...creds.AccountId && { accountId: creds.AccountId } +}), "fromImdsCredentials"); + +// src/remoteProvider/RemoteProviderInit.ts +var DEFAULT_TIMEOUT = 1e3; +var DEFAULT_MAX_RETRIES = 0; +var providerConfigFromInit = /* @__PURE__ */ __name(({ + maxRetries = DEFAULT_MAX_RETRIES, + timeout = DEFAULT_TIMEOUT +}) => ({ maxRetries, timeout }), "providerConfigFromInit"); + +// src/remoteProvider/retry.ts +var retry = /* @__PURE__ */ __name((toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}, "retry"); + +// src/fromContainerMetadata.ts +var ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +var ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +var ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +var fromContainerMetadata = /* @__PURE__ */ __name((init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}, "fromContainerMetadata"); +var requestFromEcsImds = /* @__PURE__ */ __name(async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN] + }; + } + const buffer = await httpRequest({ + ...options, + timeout + }); + return buffer.toString(); +}, "requestFromEcsImds"); +var CMDS_IP = "169.254.170.2"; +var GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true +}; +var GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true +}; +var getCmdsUri = /* @__PURE__ */ __name(async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI] + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = (0, import_url.parse)(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : void 0 + }; + } + throw new import_property_provider.CredentialsProviderError( + `The container metadata credential provider cannot be used unless the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment variable is set`, + { + tryNextLink: false, + logger + } + ); +}, "getCmdsUri"); + +// src/fromInstanceMetadata.ts + + + +// src/error/InstanceMetadataV1FallbackError.ts + +var InstanceMetadataV1FallbackError = class _InstanceMetadataV1FallbackError extends import_property_provider.CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, _InstanceMetadataV1FallbackError.prototype); + } + static { + __name(this, "InstanceMetadataV1FallbackError"); + } +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var import_node_config_provider = require("@smithy/node-config-provider"); +var import_url_parser = require("@smithy/url-parser"); + +// src/config/Endpoint.ts +var Endpoint = /* @__PURE__ */ ((Endpoint2) => { + Endpoint2["IPv4"] = "http://169.254.169.254"; + Endpoint2["IPv6"] = "http://[fd00:ec2::254]"; + return Endpoint2; +})(Endpoint || {}); + +// src/config/EndpointConfigOptions.ts +var ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +var CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +var ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: void 0 +}; + +// src/config/EndpointMode.ts +var EndpointMode = /* @__PURE__ */ ((EndpointMode2) => { + EndpointMode2["IPv4"] = "IPv4"; + EndpointMode2["IPv6"] = "IPv6"; + return EndpointMode2; +})(EndpointMode || {}); + +// src/config/EndpointModeConfigOptions.ts +var ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +var CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +var ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: "IPv4" /* IPv4 */ +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var getInstanceMetadataEndpoint = /* @__PURE__ */ __name(async () => (0, import_url_parser.parseUrl)(await getFromEndpointConfig() || await getFromEndpointModeConfig()), "getInstanceMetadataEndpoint"); +var getFromEndpointConfig = /* @__PURE__ */ __name(async () => (0, import_node_config_provider.loadConfig)(ENDPOINT_CONFIG_OPTIONS)(), "getFromEndpointConfig"); +var getFromEndpointModeConfig = /* @__PURE__ */ __name(async () => { + const endpointMode = await (0, import_node_config_provider.loadConfig)(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case "IPv4" /* IPv4 */: + return "http://169.254.169.254" /* IPv4 */; + case "IPv6" /* IPv6 */: + return "http://[fd00:ec2::254]" /* IPv6 */; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}. Select from ${Object.values(EndpointMode)}`); + } +}, "getFromEndpointModeConfig"); + +// src/utils/getExtendedInstanceMetadataCredentials.ts +var STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +var STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +var STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +var getExtendedInstanceMetadataCredentials = /* @__PURE__ */ __name((credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1e3); + logger.warn( + `Attempting credential expiration extension due to a credential service availability issue. A refresh of these credentials will be attempted after ${new Date(newExpiration)}. +For more information, please visit: ` + STATIC_STABILITY_DOC_URL + ); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...originalExpiration ? { originalExpiration } : {}, + expiration: newExpiration + }; +}, "getExtendedInstanceMetadataCredentials"); + +// src/utils/staticStabilityProvider.ts +var staticStabilityProvider = /* @__PURE__ */ __name((provider, options = {}) => { + const logger = options?.logger || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}, "staticStabilityProvider"); + +// src/fromInstanceMetadata.ts +var IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +var IMDS_TOKEN_PATH = "/latest/api/token"; +var AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +var PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +var X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +var fromInstanceMetadata = /* @__PURE__ */ __name((init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }), "fromInstanceMetadata"); +var getInstanceMetadataProvider = /* @__PURE__ */ __name((init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = /* @__PURE__ */ __name(async (maxRetries2, options) => { + const isImdsV1Fallback = disableFetchToken || options.headers?.[X_AWS_EC2_METADATA_TOKEN] == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await (0, import_node_config_provider.loadConfig)( + { + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === void 0) { + throw new import_property_provider.CredentialsProviderError( + `${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, + { logger: init.logger } + ); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile2) => { + const profileValue = profile2[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false + }, + { + profile + } + )(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError( + `AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join( + ", " + )}].` + ); + } + } + const imdsProfile = (await retry(async () => { + let profile2; + try { + profile2 = await getProfile(options); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile2; + }, maxRetries2)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries2); + }, "getCredentials"); + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } catch (error) { + if (error?.statusCode === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error" + }); + } else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token + }, + timeout + }); + } + }; +}, "getInstanceMetadataProvider"); +var getMetadataToken = /* @__PURE__ */ __name(async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600" + } +}), "getMetadataToken"); +var getProfile = /* @__PURE__ */ __name(async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(), "getProfile"); +var getCredentialsFromProfile = /* @__PURE__ */ __name(async (profile, options, init) => { + const credentialsResponse = JSON.parse( + (await httpRequest({ + ...options, + path: IMDS_PATH + profile + })).toString() + ); + if (!isImdsCredentials(credentialsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credentialsResponse); +}, "getCredentialsFromProfile"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + httpRequest, + getInstanceMetadataEndpoint, + Endpoint, + ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI, + ENV_CMDS_AUTH_TOKEN, + fromContainerMetadata, + fromInstanceMetadata, + DEFAULT_TIMEOUT, + DEFAULT_MAX_RETRIES, + providerConfigFromInit +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js new file mode 100644 index 0000000..b088eb0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js @@ -0,0 +1,5 @@ +export var Endpoint; +(function (Endpoint) { + Endpoint["IPv4"] = "http://169.254.169.254"; + Endpoint["IPv6"] = "http://[fd00:ec2::254]"; +})(Endpoint || (Endpoint = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js new file mode 100644 index 0000000..f043de9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js @@ -0,0 +1,7 @@ +export const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +export const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +export const ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: undefined, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js new file mode 100644 index 0000000..bace819 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js @@ -0,0 +1,5 @@ +export var EndpointMode; +(function (EndpointMode) { + EndpointMode["IPv4"] = "IPv4"; + EndpointMode["IPv6"] = "IPv6"; +})(EndpointMode || (EndpointMode = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js new file mode 100644 index 0000000..15b19d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js @@ -0,0 +1,8 @@ +import { EndpointMode } from "./EndpointMode"; +export const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +export const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +export const ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: EndpointMode.IPv4, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js new file mode 100644 index 0000000..29aaf50 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js @@ -0,0 +1,9 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +export class InstanceMetadataV1FallbackError extends CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, InstanceMetadataV1FallbackError.prototype); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js new file mode 100644 index 0000000..4340e3e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js @@ -0,0 +1,77 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { parse } from "url"; +import { httpRequest } from "./remoteProvider/httpRequest"; +import { fromImdsCredentials, isImdsCredentials } from "./remoteProvider/ImdsCredentials"; +import { providerConfigFromInit } from "./remoteProvider/RemoteProviderInit"; +import { retry } from "./remoteProvider/retry"; +export const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +export const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +export const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +export const fromContainerMetadata = (init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger, + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}; +const requestFromEcsImds = async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN], + }; + } + const buffer = await httpRequest({ + ...options, + timeout, + }); + return buffer.toString(); +}; +const CMDS_IP = "169.254.170.2"; +const GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true, +}; +const GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true, +}; +const getCmdsUri = async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI], + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = parse(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger, + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger, + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : undefined, + }; + } + throw new CredentialsProviderError("The container metadata credential provider cannot be used unless" + + ` the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment` + + " variable is set", { + tryNextLink: false, + logger, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js new file mode 100644 index 0000000..24ecbfd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js @@ -0,0 +1,134 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { InstanceMetadataV1FallbackError } from "./error/InstanceMetadataV1FallbackError"; +import { httpRequest } from "./remoteProvider/httpRequest"; +import { fromImdsCredentials, isImdsCredentials } from "./remoteProvider/ImdsCredentials"; +import { providerConfigFromInit } from "./remoteProvider/RemoteProviderInit"; +import { retry } from "./remoteProvider/retry"; +import { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +import { staticStabilityProvider } from "./utils/staticStabilityProvider"; +const IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +const IMDS_TOKEN_PATH = "/latest/api/token"; +const AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +const PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +const X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +export const fromInstanceMetadata = (init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }); +const getInstanceMetadataProvider = (init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = async (maxRetries, options) => { + const isImdsV1Fallback = disableFetchToken || options.headers?.[X_AWS_EC2_METADATA_TOKEN] == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await loadConfig({ + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === undefined) { + throw new CredentialsProviderError(`${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, { logger: init.logger }); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile) => { + const profileValue = profile[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false, + }, { + profile, + })(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError(`AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join(", ")}].`); + } + } + const imdsProfile = (await retry(async () => { + let profile; + try { + profile = await getProfile(options); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile; + }, maxRetries)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries); + }; + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } + catch (error) { + if (error?.statusCode === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error", + }); + } + else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token, + }, + timeout, + }); + } + }; +}; +const getMetadataToken = async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600", + }, +}); +const getProfile = async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(); +const getCredentialsFromProfile = async (profile, options, init) => { + const credentialsResponse = JSON.parse((await httpRequest({ + ...options, + path: IMDS_PATH + profile, + })).toString()); + if (!isImdsCredentials(credentialsResponse)) { + throw new CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger, + }); + } + return fromImdsCredentials(credentialsResponse); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/index.js new file mode 100644 index 0000000..5362760 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./fromContainerMetadata"; +export * from "./fromInstanceMetadata"; +export * from "./remoteProvider/RemoteProviderInit"; +export * from "./types"; +export { httpRequest } from "./remoteProvider/httpRequest"; +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js new file mode 100644 index 0000000..c559c4f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js @@ -0,0 +1,13 @@ +export const isImdsCredentials = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.AccessKeyId === "string" && + typeof arg.SecretAccessKey === "string" && + typeof arg.Token === "string" && + typeof arg.Expiration === "string"; +export const fromImdsCredentials = (creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...(creds.AccountId && { accountId: creds.AccountId }), +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js new file mode 100644 index 0000000..39ace38 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js @@ -0,0 +1,3 @@ +export const DEFAULT_TIMEOUT = 1000; +export const DEFAULT_MAX_RETRIES = 0; +export const providerConfigFromInit = ({ maxRetries = DEFAULT_MAX_RETRIES, timeout = DEFAULT_TIMEOUT, }) => ({ maxRetries, timeout }); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js new file mode 100644 index 0000000..91742d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js @@ -0,0 +1,36 @@ +import { ProviderError } from "@smithy/property-provider"; +import { Buffer } from "buffer"; +import { request } from "http"; +export function httpRequest(options) { + return new Promise((resolve, reject) => { + const req = request({ + method: "GET", + ...options, + hostname: options.hostname?.replace(/^\[(.+)\]$/, "$1"), + }); + req.on("error", (err) => { + reject(Object.assign(new ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject(Object.assign(new ProviderError("Error response received from instance metadata service"), { statusCode })); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js new file mode 100644 index 0000000..d4ad601 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js @@ -0,0 +1,2 @@ +export * from "./ImdsCredentials"; +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js new file mode 100644 index 0000000..22b79bb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js @@ -0,0 +1,7 @@ +export const retry = (toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js new file mode 100644 index 0000000..5614692 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js @@ -0,0 +1,17 @@ +const STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +const STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +const STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +export const getExtendedInstanceMetadataCredentials = (credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1000); + logger.warn("Attempting credential expiration extension due to a credential service availability issue. A refresh of these " + + `credentials will be attempted after ${new Date(newExpiration)}.\nFor more information, please visit: ` + + STATIC_STABILITY_DOC_URL); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...(originalExpiration ? { originalExpiration } : {}), + expiration: newExpiration, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js new file mode 100644 index 0000000..4c611ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js @@ -0,0 +1,19 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { parseUrl } from "@smithy/url-parser"; +import { Endpoint as InstanceMetadataEndpoint } from "../config/Endpoint"; +import { ENDPOINT_CONFIG_OPTIONS } from "../config/EndpointConfigOptions"; +import { EndpointMode } from "../config/EndpointMode"; +import { ENDPOINT_MODE_CONFIG_OPTIONS, } from "../config/EndpointModeConfigOptions"; +export const getInstanceMetadataEndpoint = async () => parseUrl((await getFromEndpointConfig()) || (await getFromEndpointModeConfig())); +const getFromEndpointConfig = async () => loadConfig(ENDPOINT_CONFIG_OPTIONS)(); +const getFromEndpointModeConfig = async () => { + const endpointMode = await loadConfig(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case EndpointMode.IPv4: + return InstanceMetadataEndpoint.IPv4; + case EndpointMode.IPv6: + return InstanceMetadataEndpoint.IPv6; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}.` + ` Select from ${Object.values(EndpointMode)}`); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js new file mode 100644 index 0000000..9a1e742 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js @@ -0,0 +1,25 @@ +import { getExtendedInstanceMetadataCredentials } from "./getExtendedInstanceMetadataCredentials"; +export const staticStabilityProvider = (provider, options = {}) => { + const logger = options?.logger || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } + catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } + else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts new file mode 100644 index 0000000..000e313 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum Endpoint { + IPv4 = "http://169.254.169.254", + IPv6 = "http://[fd00:ec2::254]" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts new file mode 100644 index 0000000..c03e22c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +/** + * @internal + */ +export declare const ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts new file mode 100644 index 0000000..db70619 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum EndpointMode { + IPv4 = "IPv4", + IPv6 = "IPv6" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..c743199 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +/** + * @internal + */ +export declare const ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts new file mode 100644 index 0000000..8338ccb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts @@ -0,0 +1,12 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +/** + * @public + * + * A specific sub-case of CredentialsProviderError, when the IMDSv1 fallback + * has been attempted but shut off by SDK configuration. + */ +export declare class InstanceMetadataV1FallbackError extends CredentialsProviderError { + readonly tryNextLink: boolean; + name: string; + constructor(message: string, tryNextLink?: boolean); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts new file mode 100644 index 0000000..f6f28f0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts @@ -0,0 +1,21 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export declare const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the ECS + * Container Metadata Service + */ +export declare const fromContainerMetadata: (init?: RemoteProviderInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts new file mode 100644 index 0000000..24db95a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts @@ -0,0 +1,10 @@ +import { Provider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +import { InstanceMetadataCredentials } from "./types"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the EC2 + * Instance Metadata Service + */ +export declare const fromInstanceMetadata: (init?: RemoteProviderInit) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts new file mode 100644 index 0000000..5a87b2f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export * from "./fromContainerMetadata"; +/** + * @internal + */ +export * from "./fromInstanceMetadata"; +/** + * @internal + */ +export * from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export { httpRequest } from "./remoteProvider/httpRequest"; +/** + * @internal + */ +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +/** + * @internal + */ +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts new file mode 100644 index 0000000..c2c7d51 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface ImdsCredentials { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + Expiration: string; + AccountId?: string; +} +/** + * @internal + */ +export declare const isImdsCredentials: (arg: any) => arg is ImdsCredentials; +/** + * @internal + */ +export declare const fromImdsCredentials: (creds: ImdsCredentials) => AwsCredentialIdentity; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts new file mode 100644 index 0000000..df9eff7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts @@ -0,0 +1,40 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_TIMEOUT = 1000; +/** + * @internal + */ +export declare const DEFAULT_MAX_RETRIES = 0; +/** + * @public + */ +export interface RemoteProviderConfig { + /** + * The connection timeout (in milliseconds) + */ + timeout: number; + /** + * The maximum number of times the HTTP connection should be retried + */ + maxRetries: number; +} +/** + * @public + */ +export interface RemoteProviderInit extends Partial { + logger?: Logger; + /** + * Only used in the IMDS credential provider. + */ + ec2MetadataV1Disabled?: boolean; + /** + * AWS_PROFILE. + */ + profile?: string; +} +/** + * @internal + */ +export declare const providerConfigFromInit: ({ maxRetries, timeout, }: RemoteProviderInit) => RemoteProviderConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts new file mode 100644 index 0000000..87c7d0d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +import { Buffer } from "buffer"; +import { RequestOptions } from "http"; +/** + * @internal + */ +export declare function httpRequest(options: RequestOptions): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts new file mode 100644 index 0000000..ed18a70 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./ImdsCredentials"; +/** + * @internal + */ +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts new file mode 100644 index 0000000..4e8abc0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retry: (toRetry: RetryableProvider, maxRetries: number) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts new file mode 100644 index 0000000..b700953 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum Endpoint { + IPv4 = "http://169.254.169.254", + IPv6 = "http://[fd00:ec2::254]" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts new file mode 100644 index 0000000..dbcb243 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +/** + * @internal + */ +export declare const ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts new file mode 100644 index 0000000..7dee86e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum EndpointMode { + IPv4 = "IPv4", + IPv6 = "IPv6" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..1d5e458 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +/** + * @internal + */ +export declare const ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts new file mode 100644 index 0000000..93ac220 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts @@ -0,0 +1,12 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +/** + * @public + * + * A specific sub-case of CredentialsProviderError, when the IMDSv1 fallback + * has been attempted but shut off by SDK configuration. + */ +export declare class InstanceMetadataV1FallbackError extends CredentialsProviderError { + readonly tryNextLink: boolean; + name: string; + constructor(message: string, tryNextLink?: boolean); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts new file mode 100644 index 0000000..deb48fd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts @@ -0,0 +1,21 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export declare const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the ECS + * Container Metadata Service + */ +export declare const fromContainerMetadata: (init?: RemoteProviderInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts new file mode 100644 index 0000000..8a533f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts @@ -0,0 +1,10 @@ +import { Provider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +import { InstanceMetadataCredentials } from "./types"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the EC2 + * Instance Metadata Service + */ +export declare const fromInstanceMetadata: (init?: RemoteProviderInit) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c0bc7e4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export * from "./fromContainerMetadata"; +/** + * @internal + */ +export * from "./fromInstanceMetadata"; +/** + * @internal + */ +export * from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export { httpRequest } from "./remoteProvider/httpRequest"; +/** + * @internal + */ +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +/** + * @internal + */ +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts new file mode 100644 index 0000000..c621e0a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface ImdsCredentials { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + Expiration: string; + AccountId?: string; +} +/** + * @internal + */ +export declare const isImdsCredentials: (arg: any) => arg is ImdsCredentials; +/** + * @internal + */ +export declare const fromImdsCredentials: (creds: ImdsCredentials) => AwsCredentialIdentity; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts new file mode 100644 index 0000000..4fe25f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts @@ -0,0 +1,40 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_TIMEOUT = 1000; +/** + * @internal + */ +export declare const DEFAULT_MAX_RETRIES = 0; +/** + * @public + */ +export interface RemoteProviderConfig { + /** + * The connection timeout (in milliseconds) + */ + timeout: number; + /** + * The maximum number of times the HTTP connection should be retried + */ + maxRetries: number; +} +/** + * @public + */ +export interface RemoteProviderInit extends Partial { + logger?: Logger; + /** + * Only used in the IMDS credential provider. + */ + ec2MetadataV1Disabled?: boolean; + /** + * AWS_PROFILE. + */ + profile?: string; +} +/** + * @internal + */ +export declare const providerConfigFromInit: ({ maxRetries, timeout, }: RemoteProviderInit) => RemoteProviderConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts new file mode 100644 index 0000000..b514fef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts @@ -0,0 +1,7 @@ +/// +import { Buffer } from "buffer"; +import { RequestOptions } from "http"; +/** + * @internal + */ +export declare function httpRequest(options: RequestOptions): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts new file mode 100644 index 0000000..a9d6094 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./ImdsCredentials"; +/** + * @internal + */ +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts new file mode 100644 index 0000000..d72d604 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retry: (toRetry: RetryableProvider, maxRetries: number) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..2e9592b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface InstanceMetadataCredentials extends AwsCredentialIdentity { + readonly originalExpiration?: Date; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts new file mode 100644 index 0000000..67edd2c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + */ +export declare const getExtendedInstanceMetadataCredentials: (credentials: InstanceMetadataCredentials, logger: Logger) => InstanceMetadataCredentials; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts new file mode 100644 index 0000000..1ad772d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts @@ -0,0 +1,21 @@ +import { Endpoint } from "@smithy/types"; +/** + * Returns the host to use for instance metadata service call. + * + * The host is read from endpoint which can be set either in + * {@link ENV_ENDPOINT_NAME} environment variable or {@link CONFIG_ENDPOINT_NAME} + * configuration property. + * + * If endpoint is not set, then endpoint mode is read either from + * {@link ENV_ENDPOINT_MODE_NAME} environment variable or {@link CONFIG_ENDPOINT_MODE_NAME} + * configuration property. If endpoint mode is not set, then default endpoint mode + * {@link EndpointMode.IPv4} is used. + * + * If endpoint mode is set to {@link EndpointMode.IPv4}, then the host is {@link Endpoint.IPv4}. + * If endpoint mode is set to {@link EndpointMode.IPv6}, then the host is {@link Endpoint.IPv6}. + * + * @returns Host to use for instance metadata service call. + * + * @internal + */ +export declare const getInstanceMetadataEndpoint: () => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts new file mode 100644 index 0000000..337091e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts @@ -0,0 +1,16 @@ +import { Logger, Provider } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + * + * IMDS credential supports static stability feature. When used, the expiration + * of recently issued credentials is extended. The server side allows using + * the recently expired credentials. This mitigates impact when clients using + * refreshable credentials are unable to retrieve updates. + * + * @param provider Credential provider + * @returns A credential provider that supports static stability + */ +export declare const staticStabilityProvider: (provider: Provider, options?: { + logger?: Logger | undefined; +}) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts new file mode 100644 index 0000000..e74ec99 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface InstanceMetadataCredentials extends AwsCredentialIdentity { + readonly originalExpiration?: Date; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts new file mode 100644 index 0000000..f0ed41b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + */ +export declare const getExtendedInstanceMetadataCredentials: (credentials: InstanceMetadataCredentials, logger: Logger) => InstanceMetadataCredentials; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts new file mode 100644 index 0000000..db6b6da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts @@ -0,0 +1,21 @@ +import { Endpoint } from "@smithy/types"; +/** + * Returns the host to use for instance metadata service call. + * + * The host is read from endpoint which can be set either in + * {@link ENV_ENDPOINT_NAME} environment variable or {@link CONFIG_ENDPOINT_NAME} + * configuration property. + * + * If endpoint is not set, then endpoint mode is read either from + * {@link ENV_ENDPOINT_MODE_NAME} environment variable or {@link CONFIG_ENDPOINT_MODE_NAME} + * configuration property. If endpoint mode is not set, then default endpoint mode + * {@link EndpointMode.IPv4} is used. + * + * If endpoint mode is set to {@link EndpointMode.IPv4}, then the host is {@link Endpoint.IPv4}. + * If endpoint mode is set to {@link EndpointMode.IPv6}, then the host is {@link Endpoint.IPv6}. + * + * @returns Host to use for instance metadata service call. + * + * @internal + */ +export declare const getInstanceMetadataEndpoint: () => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts new file mode 100644 index 0000000..6bfcb69 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts @@ -0,0 +1,16 @@ +import { Logger, Provider } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + * + * IMDS credential supports static stability feature. When used, the expiration + * of recently issued credentials is extended. The server side allows using + * the recently expired credentials. This mitigates impact when clients using + * refreshable credentials are unable to retrieve updates. + * + * @param provider Credential provider + * @returns A credential provider that supports static stability + */ +export declare const staticStabilityProvider: (provider: Provider, options?: { + logger?: Logger | undefined; +}) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/package.json new file mode 100644 index 0000000..8fd0824 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/credential-provider-imds/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/credential-provider-imds", + "version": "4.0.2", + "description": "AWS credential provider that sources credentials from the EC2 instance metadata service and ECS container metadata service", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline credential-provider-imds", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/credential-provider-imds", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/credential-provider-imds" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/README.md new file mode 100644 index 0000000..e52e8f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/README.md @@ -0,0 +1,11 @@ +# @smithy/fetch-http-handler + +[![NPM version](https://img.shields.io/npm/v/@smithy/fetch-http-handler/latest.svg)](https://www.npmjs.com/package/@smithy/fetch-http-handler) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/fetch-http-handler.svg)](https://www.npmjs.com/package/@smithy/fetch-http-handler) + +This is the default `requestHandler` used for browser applications. +Since Node.js introduced experimental Web Streams API in v16.5.0 and made it stable in v21.0.0, +you can consider using `fetch-http-handler` in Node.js, although it's not recommended. + +For the Node.js default `requestHandler` implementation, see instead +[`@smithy/node-http-handler`](https://www.npmjs.com/package/@smithy/node-http-handler). diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js new file mode 100644 index 0000000..9c9c44b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js @@ -0,0 +1,264 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + FetchHttpHandler: () => FetchHttpHandler, + keepAliveSupport: () => keepAliveSupport, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/fetch-http-handler.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_querystring_builder = require("@smithy/querystring-builder"); + +// src/create-request.ts +function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} +__name(createRequest, "createRequest"); + +// src/request-timeout.ts +function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} +__name(requestTimeout, "requestTimeout"); + +// src/fetch-http-handler.ts +var keepAliveSupport = { + supported: void 0 +}; +var FetchHttpHandler = class _FetchHttpHandler { + static { + __name(this, "FetchHttpHandler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === void 0) { + keepAliveSupport.supported = Boolean( + typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]") + ); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? void 0 : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method, + credentials + }; + if (this.config?.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = /* @__PURE__ */ __name(() => { + }, "removeSignalEventListener"); + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != void 0; + if (!hasReadableStream) { + return response.blob().then((body2) => ({ + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: body2 + }) + })); + } + return { + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body + }) + }; + }), + requestTimeout(requestTimeoutInMs) + ]; + if (abortSignal) { + raceOfPromises.push( + new Promise((resolve, reject) => { + const onAbort = /* @__PURE__ */ __name(() => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = /* @__PURE__ */ __name(() => signal.removeEventListener("abort", onAbort), "removeSignalEventListener"); + } else { + abortSignal.onabort = onAbort; + } + }) + ); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; + +// src/stream-collector.ts +var import_util_base64 = require("@smithy/util-base64"); +var streamCollector = /* @__PURE__ */ __name(async (stream) => { + if (typeof Blob === "function" && stream instanceof Blob || stream.constructor?.name === "Blob") { + if (Blob.prototype.arrayBuffer !== void 0) { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectBlob(stream); + } + return collectStream(stream); +}, "streamCollector"); +async function collectBlob(blob) { + const base64 = await readToBase64(blob); + const arrayBuffer = (0, import_util_base64.fromBase64)(base64); + return new Uint8Array(arrayBuffer); +} +__name(collectBlob, "collectBlob"); +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectStream, "collectStream"); +function readToBase64(blob) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onloadend = () => { + if (reader.readyState !== 2) { + return reject(new Error("Reader aborted too early")); + } + const result = reader.result ?? ""; + const commaIndex = result.indexOf(","); + const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length; + resolve(result.substring(dataOffset)); + }; + reader.onabort = () => reject(new Error("Read aborted")); + reader.onerror = () => reject(reader.error); + reader.readAsDataURL(blob); + }); +} +__name(readToBase64, "readToBase64"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + keepAliveSupport, + FetchHttpHandler, + streamCollector +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js new file mode 100644 index 0000000..b6f1816 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js @@ -0,0 +1,3 @@ +export function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js new file mode 100644 index 0000000..dd56e37 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js @@ -0,0 +1,139 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { createRequest } from "./create-request"; +import { requestTimeout } from "./request-timeout"; +export const keepAliveSupport = { + supported: undefined, +}; +export class FetchHttpHandler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } + else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === undefined) { + keepAliveSupport.supported = Boolean(typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]")); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = buildQueryString(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? undefined : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method: method, + credentials, + }; + if (this.config?.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = () => { }; + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != undefined; + if (!hasReadableStream) { + return response.blob().then((body) => ({ + response: new HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body, + }), + })); + } + return { + response: new HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body, + }), + }; + }), + requestTimeout(requestTimeoutInMs), + ]; + if (abortSignal) { + raceOfPromises.push(new Promise((resolve, reject) => { + const onAbort = () => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = () => signal.removeEventListener("abort", onAbort); + } + else { + abortSignal.onabort = onAbort; + } + })); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/index.js new file mode 100644 index 0000000..a0c61f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js new file mode 100644 index 0000000..66b09b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js @@ -0,0 +1,11 @@ +export function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js new file mode 100644 index 0000000..a400d9b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js @@ -0,0 +1,53 @@ +import { fromBase64 } from "@smithy/util-base64"; +export const streamCollector = async (stream) => { + if ((typeof Blob === "function" && stream instanceof Blob) || stream.constructor?.name === "Blob") { + if (Blob.prototype.arrayBuffer !== undefined) { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectBlob(stream); + } + return collectStream(stream); +}; +async function collectBlob(blob) { + const base64 = await readToBase64(blob); + const arrayBuffer = fromBase64(base64); + return new Uint8Array(arrayBuffer); +} +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +function readToBase64(blob) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onloadend = () => { + if (reader.readyState !== 2) { + return reject(new Error("Reader aborted too early")); + } + const result = (reader.result ?? ""); + const commaIndex = result.indexOf(","); + const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length; + resolve(result.substring(dataOffset)); + }; + reader.onabort = () => reject(new Error("Read aborted")); + reader.onerror = () => reject(reader.error); + reader.readAsDataURL(blob); + }); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts new file mode 100644 index 0000000..d668b06 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts @@ -0,0 +1,6 @@ +import { AdditionalRequestParameters } from "./fetch-http-handler"; +/** + * @internal + * For mocking/interception. + */ +export declare function createRequest(url: string, requestOptions?: RequestInit & AdditionalRequestParameters): Request; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts new file mode 100644 index 0000000..446301c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts @@ -0,0 +1,41 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import type { FetchHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * @public + */ +export { FetchHttpHandlerOptions }; +/** + * @internal + * Detection of keepalive support. Can be overridden for testing. + */ +export declare const keepAliveSupport: { + supported: boolean | undefined; +}; +/** + * @internal + */ +export type AdditionalRequestParameters = { + duplex?: "half"; +}; +/** + * @public + * + * HttpHandler implementation using browsers' `fetch` global function. + */ +export declare class FetchHttpHandler implements HttpHandler { + private config?; + private configProvider; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | FetchHttpHandlerOptions | Provider): FetchHttpHandler | HttpHandler; + constructor(options?: FetchHttpHandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof FetchHttpHandlerOptions, value: FetchHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): FetchHttpHandlerOptions; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts new file mode 100644 index 0000000..a0c61f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts new file mode 100644 index 0000000..28d784b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts @@ -0,0 +1 @@ +export declare function requestTimeout(timeoutInMs?: number): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts new file mode 100644 index 0000000..b2ca812 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts @@ -0,0 +1,2 @@ +import { StreamCollector } from "@smithy/types"; +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts new file mode 100644 index 0000000..5f0b074 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts @@ -0,0 +1,6 @@ +import { AdditionalRequestParameters } from "./fetch-http-handler"; +/** + * @internal + * For mocking/interception. + */ +export declare function createRequest(url: string, requestOptions?: RequestInit & AdditionalRequestParameters): Request; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts new file mode 100644 index 0000000..19a2943 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts @@ -0,0 +1,41 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { FetchHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * @public + */ +export { FetchHttpHandlerOptions }; +/** + * @internal + * Detection of keepalive support. Can be overridden for testing. + */ +export declare const keepAliveSupport: { + supported: boolean | undefined; +}; +/** + * @internal + */ +export type AdditionalRequestParameters = { + duplex?: "half"; +}; +/** + * @public + * + * HttpHandler implementation using browsers' `fetch` global function. + */ +export declare class FetchHttpHandler implements HttpHandler { + private config?; + private configProvider; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | FetchHttpHandlerOptions | Provider): FetchHttpHandler | HttpHandler; + constructor(options?: FetchHttpHandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof FetchHttpHandlerOptions, value: FetchHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): FetchHttpHandlerOptions; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d30edab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts new file mode 100644 index 0000000..ca24128 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts @@ -0,0 +1 @@ +export declare function requestTimeout(timeoutInMs?: number): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts new file mode 100644 index 0000000..8259097 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts @@ -0,0 +1,2 @@ +import { StreamCollector } from "@smithy/types"; +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/package.json new file mode 100644 index 0000000..8ebcaa1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/fetch-http-handler/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/fetch-http-handler", + "version": "5.0.2", + "description": "Provides a way to make requests", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline fetch-http-handler", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run && yarn test:browser", + "test:watch": "yarn g:vitest watch", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/abort-controller": "^4.0.2", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/fetch-http-handler", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/fetch-http-handler" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/README.md new file mode 100644 index 0000000..a160019 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/README.md @@ -0,0 +1,10 @@ +# @smithy/md5-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/hash-node/latest.svg)](https://www.npmjs.com/package/@smithy/hash-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/hash-node.svg)](https://www.npmjs.com/package/@smithy/hash-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-cjs/index.js new file mode 100644 index 0000000..fc7f7de --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-cjs/index.js @@ -0,0 +1,67 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Hash: () => Hash +}); +module.exports = __toCommonJS(src_exports); +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var import_util_utf8 = require("@smithy/util-utf8"); +var import_buffer = require("buffer"); +var import_crypto = require("crypto"); +var Hash = class { + static { + __name(this, "Hash"); + } + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update((0, import_util_utf8.toUint8Array)(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret ? (0, import_crypto.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) : (0, import_crypto.createHash)(this.algorithmIdentifier); + } +}; +function castSourceData(toCast, encoding) { + if (import_buffer.Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return (0, import_util_buffer_from.fromString)(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return (0, import_util_buffer_from.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return (0, import_util_buffer_from.fromArrayBuffer)(toCast); +} +__name(castSourceData, "castSourceData"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Hash +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-es/index.js new file mode 100644 index 0000000..718d9c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-es/index.js @@ -0,0 +1,34 @@ +import { fromArrayBuffer, fromString } from "@smithy/util-buffer-from"; +import { toUint8Array } from "@smithy/util-utf8"; +import { Buffer } from "buffer"; +import { createHash, createHmac } from "crypto"; +export class Hash { + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update(toUint8Array(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret + ? createHmac(this.algorithmIdentifier, castSourceData(this.secret)) + : createHash(this.algorithmIdentifier); + } +} +function castSourceData(toCast, encoding) { + if (Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return fromString(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return fromArrayBuffer(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return fromArrayBuffer(toCast); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-types/index.d.ts new file mode 100644 index 0000000..20ed5ed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Hash implements Checksum { + private readonly algorithmIdentifier; + private readonly secret?; + private hash; + constructor(algorithmIdentifier: string, secret?: SourceData); + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..313ab7e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Hash implements Checksum { + private readonly algorithmIdentifier; + private readonly secret?; + private hash; + constructor(algorithmIdentifier: string, secret?: SourceData); + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/package.json new file mode 100644 index 0000000..527b45a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/hash-node/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/hash-node", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline hash-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "hash-test-vectors": "^1.3.2", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/hash-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/hash-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/README.md new file mode 100644 index 0000000..9110465 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/README.md @@ -0,0 +1,10 @@ +# @smithy/invalid-dependency + +[![NPM version](https://img.shields.io/npm/v/@smithy/invalid-dependency/latest.svg)](https://www.npmjs.com/package/@smithy/invalid-dependency) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/invalid-dependency.svg)](https://www.npmjs.com/package/@smithy/invalid-dependency) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/index.js new file mode 100644 index 0000000..8eeb1d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + invalidFunction: () => invalidFunction, + invalidProvider: () => invalidProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/invalidFunction.ts +var invalidFunction = /* @__PURE__ */ __name((message) => () => { + throw new Error(message); +}, "invalidFunction"); + +// src/invalidProvider.ts +var invalidProvider = /* @__PURE__ */ __name((message) => () => Promise.reject(message), "invalidProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + invalidFunction, + invalidProvider +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/index.js new file mode 100644 index 0000000..fa0f1a6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./invalidFunction"; +export * from "./invalidProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js new file mode 100644 index 0000000..676f9cb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js @@ -0,0 +1,3 @@ +export const invalidFunction = (message) => () => { + throw new Error(message); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js new file mode 100644 index 0000000..5305a0b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js @@ -0,0 +1 @@ +export const invalidProvider = (message) => () => Promise.reject(message); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts new file mode 100644 index 0000000..1c99a56 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./invalidFunction"; +/** + * @internal + */ +export * from "./invalidProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts new file mode 100644 index 0000000..2118b32 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const invalidFunction: (message: string) => () => never; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts new file mode 100644 index 0000000..3e9c28c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const invalidProvider: (message: string) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6818f1c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./invalidFunction"; +/** + * @internal + */ +export * from "./invalidProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts new file mode 100644 index 0000000..b0e8f32 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const invalidFunction: (message: string) => () => never; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts new file mode 100644 index 0000000..765ee5a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const invalidProvider: (message: string) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/package.json new file mode 100644 index 0000000..4782ea4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/invalid-dependency/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/invalid-dependency", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline invalid-dependency", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/invalid-dependency", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/invalid-dependency" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..93a468c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "4.0.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/README.md new file mode 100644 index 0000000..2d40d92 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/README.md @@ -0,0 +1,4 @@ +# @smithy/middleware-content-length + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-content-length/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-content-length) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-content-length.svg)](https://www.npmjs.com/package/@smithy/middleware-content-length) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-cjs/index.js new file mode 100644 index 0000000..9585153 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-cjs/index.js @@ -0,0 +1,71 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + contentLengthMiddleware: () => contentLengthMiddleware, + contentLengthMiddlewareOptions: () => contentLengthMiddlewareOptions, + getContentLengthPlugin: () => getContentLengthPlugin +}); +module.exports = __toCommonJS(src_exports); +var import_protocol_http = require("@smithy/protocol-http"); +var CONTENT_LENGTH_HEADER = "content-length"; +function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (import_protocol_http.HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && Object.keys(headers).map((str) => str.toLowerCase()).indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length) + }; + } catch (error) { + } + } + } + return next({ + ...args, + request + }); + }; +} +__name(contentLengthMiddleware, "contentLengthMiddleware"); +var contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true +}; +var getContentLengthPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + } +}), "getContentLengthPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + contentLengthMiddleware, + contentLengthMiddlewareOptions, + getContentLengthPlugin +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-es/index.js new file mode 100644 index 0000000..fa18e71 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-es/index.js @@ -0,0 +1,39 @@ +import { HttpRequest } from "@smithy/protocol-http"; +const CONTENT_LENGTH_HEADER = "content-length"; +export function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && + Object.keys(headers) + .map((str) => str.toLowerCase()) + .indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length), + }; + } + catch (error) { + } + } + } + return next({ + ...args, + request, + }); + }; +} +export const contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true, +}; +export const getContentLengthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts new file mode 100644 index 0000000..91a7000 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts @@ -0,0 +1,6 @@ +import { BodyLengthCalculator, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +export declare function contentLengthMiddleware(bodyLengthChecker: BodyLengthCalculator): BuildMiddleware; +export declare const contentLengthMiddlewareOptions: BuildHandlerOptions; +export declare const getContentLengthPlugin: (options: { + bodyLengthChecker: BodyLengthCalculator; +}) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..10e1e18 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +import { BodyLengthCalculator, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +export declare function contentLengthMiddleware(bodyLengthChecker: BodyLengthCalculator): BuildMiddleware; +export declare const contentLengthMiddlewareOptions: BuildHandlerOptions; +export declare const getContentLengthPlugin: (options: { + bodyLengthChecker: BodyLengthCalculator; +}) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/package.json new file mode 100644 index 0000000..807c95b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-content-length/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-content-length", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-content-length", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-content-length", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-content-length" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/README.md new file mode 100644 index 0000000..e03cbb2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/README.md @@ -0,0 +1,10 @@ +# @smithy/middleware-endpoint + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-endpoint/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-endpoint) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-endpoint.svg)](https://www.npmjs.com/package/@smithy/middleware-endpoint) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js new file mode 100644 index 0000000..9b578a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointFromConfig = void 0; +const getEndpointFromConfig = async (serviceId) => undefined; +exports.getEndpointFromConfig = getEndpointFromConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js new file mode 100644 index 0000000..c7c302b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointFromConfig = void 0; +const node_config_provider_1 = require("@smithy/node-config-provider"); +const getEndpointUrlConfig_1 = require("./getEndpointUrlConfig"); +const getEndpointFromConfig = async (serviceId) => (0, node_config_provider_1.loadConfig)((0, getEndpointUrlConfig_1.getEndpointUrlConfig)(serviceId !== null && serviceId !== void 0 ? serviceId : ""))(); +exports.getEndpointFromConfig = getEndpointFromConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js new file mode 100644 index 0000000..fe5c010 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointUrlConfig = void 0; +const shared_ini_file_loader_1 = require("@smithy/shared-ini-file-loader"); +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); +exports.getEndpointUrlConfig = getEndpointUrlConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js new file mode 100644 index 0000000..177fdc3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js @@ -0,0 +1,279 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + endpointMiddleware: () => endpointMiddleware, + endpointMiddlewareOptions: () => endpointMiddlewareOptions, + getEndpointFromInstructions: () => getEndpointFromInstructions, + getEndpointPlugin: () => getEndpointPlugin, + resolveEndpointConfig: () => resolveEndpointConfig, + resolveParams: () => resolveParams, + toEndpointV1: () => toEndpointV1 +}); +module.exports = __toCommonJS(src_exports); + +// src/service-customizations/s3.ts +var resolveParamsForS3 = /* @__PURE__ */ __name(async (endpointParams) => { + const bucket = endpointParams?.Bucket || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } else if (!isDnsCompatibleBucketName(bucket) || bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:") || bucket.toLowerCase() !== bucket || bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}, "resolveParamsForS3"); +var DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +var IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +var DOTS_PATTERN = /\.\./; +var isDnsCompatibleBucketName = /* @__PURE__ */ __name((bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName), "isDnsCompatibleBucketName"); +var isArnBucketName = /* @__PURE__ */ __name((bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}, "isArnBucketName"); + +// src/adaptors/createConfigValueProvider.ts +var createConfigValueProvider = /* @__PURE__ */ __name((configKey, canonicalEndpointParamKey, config) => { + const configProvider = /* @__PURE__ */ __name(async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }, "configProvider"); + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.credentialScope ?? credentials?.CredentialScope; + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.accountId ?? credentials?.AccountId; + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}, "createConfigValueProvider"); + +// src/adaptors/getEndpointFromInstructions.ts +var import_getEndpointFromConfig = require("./adaptors/getEndpointFromConfig"); + +// src/adaptors/toEndpointV1.ts +var import_url_parser = require("@smithy/url-parser"); +var toEndpointV1 = /* @__PURE__ */ __name((endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, import_url_parser.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, import_url_parser.parseUrl)(endpoint); +}, "toEndpointV1"); + +// src/adaptors/getEndpointFromInstructions.ts +var getEndpointFromInstructions = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } else { + endpointFromConfig = await (0, import_getEndpointFromConfig.getEndpointFromConfig)(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}, "getEndpointFromInstructions"); +var resolveParams = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig) => { + const endpointParams = {}; + const instructions = instructionsSupplier?.getEndpointParameterInstructions?.() || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}, "resolveParams"); + +// src/endpointMiddleware.ts +var import_core = require("@smithy/core"); +var import_util_middleware = require("@smithy/util-middleware"); +var endpointMiddleware = /* @__PURE__ */ __name(({ + config, + instructions +}) => { + return (next, context) => async (args) => { + if (config.endpoint) { + (0, import_core.setFeature)(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions( + args.input, + { + getEndpointParameterInstructions() { + return instructions; + } + }, + { ...config }, + context + ); + context.endpointV2 = endpoint; + context.authSchemes = endpoint.properties?.authSchemes; + const authScheme = context.authSchemes?.[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const httpAuthOption = smithyContext?.selectedHttpAuthScheme?.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign( + httpAuthOption.signingProperties || {}, + { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet + }, + authScheme.properties + ); + } + } + return next({ + ...args + }); + }; +}, "endpointMiddleware"); + +// src/getEndpointPlugin.ts +var import_middleware_serde = require("@smithy/middleware-serde"); +var endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getEndpointPlugin = /* @__PURE__ */ __name((config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + endpointMiddleware({ + config, + instructions + }), + endpointMiddlewareOptions + ); + } +}), "getEndpointPlugin"); + +// src/resolveEndpointConfig.ts + +var import_getEndpointFromConfig2 = require("./adaptors/getEndpointFromConfig"); +var resolveEndpointConfig = /* @__PURE__ */ __name((input) => { + const tls = input.tls ?? true; + const { endpoint, useDualstackEndpoint, useFipsEndpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await (0, import_util_middleware.normalizeProvider)(endpoint)()) : void 0; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = Object.assign(input, { + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(useDualstackEndpoint ?? false), + useFipsEndpoint: (0, import_util_middleware.normalizeProvider)(useFipsEndpoint ?? false) + }); + let configuredEndpointPromise = void 0; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = (0, import_getEndpointFromConfig2.getEndpointFromConfig)(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}, "resolveEndpointConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getEndpointFromInstructions, + resolveParams, + toEndpointV1, + endpointMiddleware, + endpointMiddlewareOptions, + getEndpointPlugin, + resolveEndpointConfig +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js new file mode 100644 index 0000000..b468b83 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js @@ -0,0 +1,39 @@ +export const createConfigValueProvider = (configKey, canonicalEndpointParamKey, config) => { + const configProvider = async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }; + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.credentialScope ?? credentials?.CredentialScope; + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.accountId ?? credentials?.AccountId; + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js new file mode 100644 index 0000000..75fc136 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js @@ -0,0 +1 @@ +export const getEndpointFromConfig = async (serviceId) => undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js new file mode 100644 index 0000000..33c1d45 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js @@ -0,0 +1,3 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { getEndpointUrlConfig } from "./getEndpointUrlConfig"; +export const getEndpointFromConfig = async (serviceId) => loadConfig(getEndpointUrlConfig(serviceId ?? ""))(); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js new file mode 100644 index 0000000..e445646 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js @@ -0,0 +1,54 @@ +import { resolveParamsForS3 } from "../service-customizations"; +import { createConfigValueProvider } from "./createConfigValueProvider"; +import { getEndpointFromConfig } from "./getEndpointFromConfig"; +import { toEndpointV1 } from "./toEndpointV1"; +export const getEndpointFromInstructions = async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } + else { + endpointFromConfig = await getEndpointFromConfig(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}; +export const resolveParams = async (commandInput, instructionsSupplier, clientConfig) => { + const endpointParams = {}; + const instructions = instructionsSupplier?.getEndpointParameterInstructions?.() || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js new file mode 100644 index 0000000..82a1519 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js @@ -0,0 +1,31 @@ +import { CONFIG_PREFIX_SEPARATOR } from "@smithy/shared-ini-file-loader"; +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +export const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js new file mode 100644 index 0000000..17752da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js @@ -0,0 +1,2 @@ +export * from "./getEndpointFromInstructions"; +export * from "./toEndpointV1"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js new file mode 100644 index 0000000..83f4324 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js @@ -0,0 +1,10 @@ +import { parseUrl } from "@smithy/url-parser"; +export const toEndpointV1 = (endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return parseUrl(endpoint.url); + } + return endpoint; + } + return parseUrl(endpoint); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js new file mode 100644 index 0000000..df25795 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js @@ -0,0 +1,36 @@ +import { setFeature } from "@smithy/core"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { getEndpointFromInstructions } from "./adaptors/getEndpointFromInstructions"; +export const endpointMiddleware = ({ config, instructions, }) => { + return (next, context) => async (args) => { + if (config.endpoint) { + setFeature(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions(args.input, { + getEndpointParameterInstructions() { + return instructions; + }, + }, { ...config }, context); + context.endpointV2 = endpoint; + context.authSchemes = endpoint.properties?.authSchemes; + const authScheme = context.authSchemes?.[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = getSmithyContext(context); + const httpAuthOption = smithyContext?.selectedHttpAuthScheme?.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign(httpAuthOption.signingProperties || {}, { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet, + }, authScheme.properties); + } + } + return next({ + ...args, + }); + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js new file mode 100644 index 0000000..e2335f4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js @@ -0,0 +1,18 @@ +import { serializerMiddlewareOption } from "@smithy/middleware-serde"; +import { endpointMiddleware } from "./endpointMiddleware"; +export const endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: serializerMiddlewareOption.name, +}; +export const getEndpointPlugin = (config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(endpointMiddleware({ + config, + instructions, + }), endpointMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/index.js new file mode 100644 index 0000000..f89653e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./adaptors"; +export * from "./endpointMiddleware"; +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js new file mode 100644 index 0000000..c3a0eea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js @@ -0,0 +1,24 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { getEndpointFromConfig } from "./adaptors/getEndpointFromConfig"; +import { toEndpointV1 } from "./adaptors/toEndpointV1"; +export const resolveEndpointConfig = (input) => { + const tls = input.tls ?? true; + const { endpoint, useDualstackEndpoint, useFipsEndpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await normalizeProvider(endpoint)()) : undefined; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = Object.assign(input, { + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: normalizeProvider(useDualstackEndpoint ?? false), + useFipsEndpoint: normalizeProvider(useFipsEndpoint ?? false), + }); + let configuredEndpointPromise = undefined; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = getEndpointFromConfig(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js new file mode 100644 index 0000000..e50e107 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js @@ -0,0 +1 @@ +export * from "./s3"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js new file mode 100644 index 0000000..e993fc7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js @@ -0,0 +1,37 @@ +export const resolveParamsForS3 = async (endpointParams) => { + const bucket = endpointParams?.Bucket || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } + else if (!isDnsCompatibleBucketName(bucket) || + (bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:")) || + bucket.toLowerCase() !== bucket || + bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}; +const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +const DOTS_PATTERN = /\.\./; +export const DOT_PATTERN = /\./; +export const S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +export const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); +export const isArnBucketName = (bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts new file mode 100644 index 0000000..df65914 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts @@ -0,0 +1,13 @@ +/** + * Normalize some key of the client config to an async provider. + * @internal + * + * @param configKey - the key to look up in config. + * @param canonicalEndpointParamKey - this is the name the EndpointRuleSet uses. + * it will most likely not contain the config + * value, but we use it as a fallback. + * @param config - container of the config values. + * + * @returns async function that will resolve with the value. + */ +export declare const createConfigValueProvider: >(configKey: string, canonicalEndpointParamKey: string, config: Config) => () => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts new file mode 100644 index 0000000..de05fa5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts @@ -0,0 +1 @@ +export declare const getEndpointFromConfig: (serviceId: string) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts new file mode 100644 index 0000000..42a3566 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getEndpointFromConfig: (serviceId?: string) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts new file mode 100644 index 0000000..49cef2a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts @@ -0,0 +1,28 @@ +import { EndpointParameters, EndpointV2, HandlerExecutionContext } from "@smithy/types"; +import { EndpointResolvedConfig } from "../resolveEndpointConfig"; +import { EndpointParameterInstructions } from "../types"; +/** + * @internal + */ +export type EndpointParameterInstructionsSupplier = Partial<{ + getEndpointParameterInstructions(): EndpointParameterInstructions; +}>; +/** + * This step in the endpoint resolution process is exposed as a function + * to allow packages such as signers, lib-upload, etc. to get + * the V2 Endpoint associated to an instance of some api operation command + * without needing to send it or resolve its middleware stack. + * + * @internal + * @param commandInput - the input of the Command in question. + * @param instructionsSupplier - this is typically a Command constructor. A static function supplying the + * endpoint parameter instructions will exist for commands in services + * having an endpoints ruleset trait. + * @param clientConfig - config of the service client. + * @param context - optional context. + */ +export declare const getEndpointFromInstructions: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config, context?: HandlerExecutionContext) => Promise; +/** + * @internal + */ +export declare const resolveParams: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..0971010 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts new file mode 100644 index 0000000..cc13488 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getEndpointFromInstructions"; +/** + * @internal + */ +export * from "./toEndpointV1"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts new file mode 100644 index 0000000..834aabb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const toEndpointV1: (endpoint: string | Endpoint | EndpointV2) => Endpoint; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts new file mode 100644 index 0000000..67cee64 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts @@ -0,0 +1,10 @@ +import { EndpointParameters, SerializeMiddleware } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddleware: ({ config, instructions, }: { + config: EndpointResolvedConfig; + instructions: EndpointParameterInstructions; +}) => SerializeMiddleware; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts new file mode 100644 index 0000000..910f44d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts @@ -0,0 +1,11 @@ +import { EndpointParameters, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getEndpointPlugin: (config: EndpointResolvedConfig, instructions: EndpointParameterInstructions) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts new file mode 100644 index 0000000..bea06cf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export * from "./adaptors"; +/** + * @internal + */ +export * from "./endpointMiddleware"; +/** + * @internal + */ +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +/** + * @internal + */ +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts new file mode 100644 index 0000000..ec7dc70 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts @@ -0,0 +1,107 @@ +import { Endpoint, EndpointParameters, EndpointV2, Logger, Provider, UrlParser } from "@smithy/types"; +/** + * @public + * + * Endpoint config interfaces and resolver for Endpoint v2. They live in separate package to allow per-service onboarding. + * When all services onboard Endpoint v2, the resolver in config-resolver package can be removed. + * This interface includes all the endpoint parameters with built-in bindings of "AWS::*" and "SDK::*" + */ +export interface EndpointInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only for using + * a custom endpoint (for example, when using a local version of S3). + * + * Endpoint transformations such as S3 applying a bucket to the hostname are + * still applicable to this custom endpoint. + */ + endpoint?: string | Endpoint | Provider | EndpointV2 | Provider; + /** + * Providing a custom endpointProvider will override + * built-in transformations of the endpoint such as S3 adding the bucket + * name to the hostname, since they are part of the default endpointProvider. + */ + endpointProvider?: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; + /** + * @internal + * This field is used internally so you should not fill any value to this field. + */ + serviceConfiguredEndpoint?: never; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; + region: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + logger?: Logger; + serviceId?: string; +} +/** + * @internal + * + * This supercedes the similarly named EndpointsResolvedConfig (no parametric types) + * from resolveEndpointsConfig.ts in \@smithy/config-resolver. + */ +export interface EndpointResolvedConfig { + /** + * Custom endpoint provided by the user. + * This is normalized to a single interface from the various acceptable types. + * This field will be undefined if a custom endpoint is not provided. + */ + endpoint?: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls: boolean; + /** + * Whether the endpoint is specified by caller. + * @internal + * @deprecated + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input {@link EndpointsInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * A configured endpoint global or specific to the service from ENV or AWS SDK configuration files. + * @internal + */ + serviceConfiguredEndpoint?: Provider; +} +/** + * @internal + */ +export declare const resolveEndpointConfig: (input: T & EndpointInputConfig

& PreviouslyResolved

) => T & EndpointResolvedConfig

; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts new file mode 100644 index 0000000..716a15d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./s3"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts new file mode 100644 index 0000000..80b2e6a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts @@ -0,0 +1,26 @@ +import { EndpointParameters } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveParamsForS3: (endpointParams: EndpointParameters) => Promise; +/** + * @internal + */ +export declare const DOT_PATTERN: RegExp; +/** + * @internal + */ +export declare const S3_HOSTNAME_PATTERN: RegExp; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +/** + * @internal + */ +export declare const isArnBucketName: (bucketName: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts new file mode 100644 index 0000000..842f8fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts @@ -0,0 +1,13 @@ +/** + * Normalize some key of the client config to an async provider. + * @internal + * + * @param configKey - the key to look up in config. + * @param canonicalEndpointParamKey - this is the name the EndpointRuleSet uses. + * it will most likely not contain the config + * value, but we use it as a fallback. + * @param config - container of the config values. + * + * @returns async function that will resolve with the value. + */ +export declare const createConfigValueProvider: >(configKey: string, canonicalEndpointParamKey: string, config: Config) => () => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts new file mode 100644 index 0000000..1a4f6ba --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts @@ -0,0 +1 @@ +export declare const getEndpointFromConfig: (serviceId: string) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts new file mode 100644 index 0000000..641570c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getEndpointFromConfig: (serviceId?: string) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts new file mode 100644 index 0000000..82dc8df --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts @@ -0,0 +1,28 @@ +import { EndpointParameters, EndpointV2, HandlerExecutionContext } from "@smithy/types"; +import { EndpointResolvedConfig } from "../resolveEndpointConfig"; +import { EndpointParameterInstructions } from "../types"; +/** + * @internal + */ +export type EndpointParameterInstructionsSupplier = Partial<{ + getEndpointParameterInstructions(): EndpointParameterInstructions; +}>; +/** + * This step in the endpoint resolution process is exposed as a function + * to allow packages such as signers, lib-upload, etc. to get + * the V2 Endpoint associated to an instance of some api operation command + * without needing to send it or resolve its middleware stack. + * + * @internal + * @param commandInput - the input of the Command in question. + * @param instructionsSupplier - this is typically a Command constructor. A static function supplying the + * endpoint parameter instructions will exist for commands in services + * having an endpoints ruleset trait. + * @param clientConfig - config of the service client. + * @param context - optional context. + */ +export declare const getEndpointFromInstructions: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config, context?: HandlerExecutionContext) => Promise; +/** + * @internal + */ +export declare const resolveParams: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..7b9d068 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts new file mode 100644 index 0000000..ced0520 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getEndpointFromInstructions"; +/** + * @internal + */ +export * from "./toEndpointV1"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts new file mode 100644 index 0000000..047ded8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const toEndpointV1: (endpoint: string | Endpoint | EndpointV2) => Endpoint; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts new file mode 100644 index 0000000..3f7e40a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts @@ -0,0 +1,10 @@ +import { EndpointParameters, SerializeMiddleware } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddleware: ({ config, instructions, }: { + config: EndpointResolvedConfig; + instructions: EndpointParameterInstructions; +}) => SerializeMiddleware; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts new file mode 100644 index 0000000..39f93a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts @@ -0,0 +1,11 @@ +import { EndpointParameters, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getEndpointPlugin: (config: EndpointResolvedConfig, instructions: EndpointParameterInstructions) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..2ad75b9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export * from "./adaptors"; +/** + * @internal + */ +export * from "./endpointMiddleware"; +/** + * @internal + */ +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +/** + * @internal + */ +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts new file mode 100644 index 0000000..875c9fc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts @@ -0,0 +1,107 @@ +import { Endpoint, EndpointParameters, EndpointV2, Logger, Provider, UrlParser } from "@smithy/types"; +/** + * @public + * + * Endpoint config interfaces and resolver for Endpoint v2. They live in separate package to allow per-service onboarding. + * When all services onboard Endpoint v2, the resolver in config-resolver package can be removed. + * This interface includes all the endpoint parameters with built-in bindings of "AWS::*" and "SDK::*" + */ +export interface EndpointInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only for using + * a custom endpoint (for example, when using a local version of S3). + * + * Endpoint transformations such as S3 applying a bucket to the hostname are + * still applicable to this custom endpoint. + */ + endpoint?: string | Endpoint | Provider | EndpointV2 | Provider; + /** + * Providing a custom endpointProvider will override + * built-in transformations of the endpoint such as S3 adding the bucket + * name to the hostname, since they are part of the default endpointProvider. + */ + endpointProvider?: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; + /** + * @internal + * This field is used internally so you should not fill any value to this field. + */ + serviceConfiguredEndpoint?: never; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; + region: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + logger?: Logger; + serviceId?: string; +} +/** + * @internal + * + * This supercedes the similarly named EndpointsResolvedConfig (no parametric types) + * from resolveEndpointsConfig.ts in \@smithy/config-resolver. + */ +export interface EndpointResolvedConfig { + /** + * Custom endpoint provided by the user. + * This is normalized to a single interface from the various acceptable types. + * This field will be undefined if a custom endpoint is not provided. + */ + endpoint?: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls: boolean; + /** + * Whether the endpoint is specified by caller. + * @internal + * @deprecated + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input {@link EndpointsInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * A configured endpoint global or specific to the service from ENV or AWS SDK configuration files. + * @internal + */ + serviceConfiguredEndpoint?: Provider; +} +/** + * @internal + */ +export declare const resolveEndpointConfig: (input: T & EndpointInputConfig

& PreviouslyResolved

) => T & EndpointResolvedConfig

; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts new file mode 100644 index 0000000..6529752 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./s3"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts new file mode 100644 index 0000000..cace227 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts @@ -0,0 +1,26 @@ +import { EndpointParameters } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveParamsForS3: (endpointParams: EndpointParameters) => Promise; +/** + * @internal + */ +export declare const DOT_PATTERN: RegExp; +/** + * @internal + */ +export declare const S3_HOSTNAME_PATTERN: RegExp; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +/** + * @internal + */ +export declare const isArnBucketName: (bucketName: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..a6084c8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts @@ -0,0 +1,41 @@ +/** + * @internal + */ +export interface EndpointParameterInstructions { + [name: string]: BuiltInParamInstruction | ClientContextParamInstruction | StaticContextParamInstruction | ContextParamInstruction | OperationContextParamInstruction; +} +/** + * @internal + */ +export interface BuiltInParamInstruction { + type: "builtInParams"; + name: string; +} +/** + * @internal + */ +export interface ClientContextParamInstruction { + type: "clientContextParams"; + name: string; +} +/** + * @internal + */ +export interface StaticContextParamInstruction { + type: "staticContextParams"; + value: string | boolean; +} +/** + * @internal + */ +export interface ContextParamInstruction { + type: "contextParams"; + name: string; +} +/** + * @internal + */ +export interface OperationContextParamInstruction { + type: "operationContextParams"; + get(input: any): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts new file mode 100644 index 0000000..0d1d9e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts @@ -0,0 +1,41 @@ +/** + * @internal + */ +export interface EndpointParameterInstructions { + [name: string]: BuiltInParamInstruction | ClientContextParamInstruction | StaticContextParamInstruction | ContextParamInstruction | OperationContextParamInstruction; +} +/** + * @internal + */ +export interface BuiltInParamInstruction { + type: "builtInParams"; + name: string; +} +/** + * @internal + */ +export interface ClientContextParamInstruction { + type: "clientContextParams"; + name: string; +} +/** + * @internal + */ +export interface StaticContextParamInstruction { + type: "staticContextParams"; + value: string | boolean; +} +/** + * @internal + */ +export interface ContextParamInstruction { + type: "contextParams"; + name: string; +} +/** + * @internal + */ +export interface OperationContextParamInstruction { + type: "operationContextParams"; + get(input: any): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/package.json new file mode 100644 index 0000000..e95e228 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-endpoint/package.json @@ -0,0 +1,74 @@ +{ + "name": "@smithy/middleware-endpoint", + "version": "4.1.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-endpoint", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/adaptors/getEndpointFromConfig": "./dist-es/adaptors/getEndpointFromConfig.browser" + }, + "react-native": { + "./dist-es/adaptors/getEndpointFromConfig": "./dist-es/adaptors/getEndpointFromConfig.browser", + "./dist-cjs/adaptors/getEndpointFromConfig": "./dist-cjs/adaptors/getEndpointFromConfig.browser" + }, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-endpoint", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-endpoint" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/README.md new file mode 100644 index 0000000..21ce947 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/README.md @@ -0,0 +1,11 @@ +# @smithy/middleware-retry + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-retry/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-retry) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-retry.svg)](https://www.npmjs.com/package/@smithy/middleware-retry) + +## Usage + +See [@smithy/util-retry](https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-retry) +for retry behavior and configuration. + +See also: [AWS Documentation: Retry behavior](https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html). diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/index.js new file mode 100644 index 0000000..c8375f0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/index.js @@ -0,0 +1,425 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + CONFIG_MAX_ATTEMPTS: () => CONFIG_MAX_ATTEMPTS, + CONFIG_RETRY_MODE: () => CONFIG_RETRY_MODE, + ENV_MAX_ATTEMPTS: () => ENV_MAX_ATTEMPTS, + ENV_RETRY_MODE: () => ENV_RETRY_MODE, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS: () => NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + NODE_RETRY_MODE_CONFIG_OPTIONS: () => NODE_RETRY_MODE_CONFIG_OPTIONS, + StandardRetryStrategy: () => StandardRetryStrategy, + defaultDelayDecider: () => defaultDelayDecider, + defaultRetryDecider: () => defaultRetryDecider, + getOmitRetryHeadersPlugin: () => getOmitRetryHeadersPlugin, + getRetryAfterHint: () => getRetryAfterHint, + getRetryPlugin: () => getRetryPlugin, + omitRetryHeadersMiddleware: () => omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions: () => omitRetryHeadersMiddlewareOptions, + resolveRetryConfig: () => resolveRetryConfig, + retryMiddleware: () => retryMiddleware, + retryMiddlewareOptions: () => retryMiddlewareOptions +}); +module.exports = __toCommonJS(src_exports); + +// src/AdaptiveRetryStrategy.ts + + +// src/StandardRetryStrategy.ts +var import_protocol_http = require("@smithy/protocol-http"); + + +var import_uuid = require("uuid"); + +// src/defaultRetryQuota.ts +var import_util_retry = require("@smithy/util-retry"); +var getDefaultRetryQuota = /* @__PURE__ */ __name((initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = options?.noRetryIncrement ?? import_util_retry.NO_RETRY_INCREMENT; + const retryCost = options?.retryCost ?? import_util_retry.RETRY_COST; + const timeoutRetryCost = options?.timeoutRetryCost ?? import_util_retry.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = /* @__PURE__ */ __name((error) => error.name === "TimeoutError" ? timeoutRetryCost : retryCost, "getCapacityAmount"); + const hasRetryTokens = /* @__PURE__ */ __name((error) => getCapacityAmount(error) <= availableCapacity, "hasRetryTokens"); + const retrieveRetryTokens = /* @__PURE__ */ __name((error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }, "retrieveRetryTokens"); + const releaseRetryTokens = /* @__PURE__ */ __name((capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }, "releaseRetryTokens"); + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens + }); +}, "getDefaultRetryQuota"); + +// src/delayDecider.ts + +var defaultDelayDecider = /* @__PURE__ */ __name((delayBase, attempts) => Math.floor(Math.min(import_util_retry.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)), "defaultDelayDecider"); + +// src/retryDecider.ts +var import_service_error_classification = require("@smithy/service-error-classification"); +var defaultRetryDecider = /* @__PURE__ */ __name((error) => { + if (!error) { + return false; + } + return (0, import_service_error_classification.isRetryableByTrait)(error) || (0, import_service_error_classification.isClockSkewError)(error) || (0, import_service_error_classification.isThrottlingError)(error) || (0, import_service_error_classification.isTransientError)(error); +}, "defaultRetryDecider"); + +// src/util.ts +var asSdkError = /* @__PURE__ */ __name((error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}, "asSdkError"); + +// src/StandardRetryStrategy.ts +var StandardRetryStrategy = class { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = import_util_retry.RETRY_MODES.STANDARD; + this.retryDecider = options?.retryDecider ?? defaultRetryDecider; + this.delayDecider = options?.delayDecider ?? defaultDelayDecider; + this.retryQuota = options?.retryQuota ?? getDefaultRetryQuota(import_util_retry.INITIAL_RETRY_TOKENS); + } + static { + __name(this, "StandardRetryStrategy"); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } catch (error) { + maxAttempts = import_util_retry.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options?.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options?.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider( + (0, import_service_error_classification.isThrottlingError)(err) ? import_util_retry.THROTTLING_RETRY_DELAY_BASE : import_util_retry.DEFAULT_RETRY_DELAY_BASE, + attempts + ); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +}; +var getDelayFromRetryAfterHeader = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1e3; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}, "getDelayFromRetryAfterHeader"); + +// src/AdaptiveRetryStrategy.ts +var AdaptiveRetryStrategy = class extends StandardRetryStrategy { + static { + __name(this, "AdaptiveRetryStrategy"); + } + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new import_util_retry.DefaultRateLimiter(); + this.mode = import_util_retry.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + } + }); + } +}; + +// src/configurations.ts +var import_util_middleware = require("@smithy/util-middleware"); + +var ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +var CONFIG_MAX_ATTEMPTS = "max_attempts"; +var NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: import_util_retry.DEFAULT_MAX_ATTEMPTS +}; +var resolveRetryConfig = /* @__PURE__ */ __name((input) => { + const { retryStrategy, retryMode: _retryMode, maxAttempts: _maxAttempts } = input; + const maxAttempts = (0, import_util_middleware.normalizeProvider)(_maxAttempts ?? import_util_retry.DEFAULT_MAX_ATTEMPTS); + return Object.assign(input, { + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, import_util_middleware.normalizeProvider)(_retryMode)(); + if (retryMode === import_util_retry.RETRY_MODES.ADAPTIVE) { + return new import_util_retry.AdaptiveRetryStrategy(maxAttempts); + } + return new import_util_retry.StandardRetryStrategy(maxAttempts); + } + }); +}, "resolveRetryConfig"); +var ENV_RETRY_MODE = "AWS_RETRY_MODE"; +var CONFIG_RETRY_MODE = "retry_mode"; +var NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: import_util_retry.DEFAULT_RETRY_MODE +}; + +// src/omitRetryHeadersMiddleware.ts + + +var omitRetryHeadersMiddleware = /* @__PURE__ */ __name(() => (next) => async (args) => { + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + delete request.headers[import_util_retry.INVOCATION_ID_HEADER]; + delete request.headers[import_util_retry.REQUEST_HEADER]; + } + return next(args); +}, "omitRetryHeadersMiddleware"); +var omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true +}; +var getOmitRetryHeadersPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + } +}), "getOmitRetryHeadersPlugin"); + +// src/retryMiddleware.ts + + +var import_smithy_client = require("@smithy/smithy-client"); + + +var import_isStreamingPayload = require("./isStreamingPayload/isStreamingPayload"); +var retryMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = import_protocol_http.HttpRequest.isInstance(request); + if (isRequest) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (isRequest) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && (0, import_isStreamingPayload.isStreamingPayload)(request)) { + (context.logger instanceof import_smithy_client.NoOpLogger ? console : context.logger)?.warn( + "An error was encountered in a non-retryable streaming request." + ); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } else { + retryStrategy = retryStrategy; + if (retryStrategy?.mode) + context.userAgent = [...context.userAgent || [], ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}, "retryMiddleware"); +var isRetryStrategyV2 = /* @__PURE__ */ __name((retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && typeof retryStrategy.recordSuccess !== "undefined", "isRetryStrategyV2"); +var getRetryErrorInfo = /* @__PURE__ */ __name((error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error) + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}, "getRetryErrorInfo"); +var getRetryErrorType = /* @__PURE__ */ __name((error) => { + if ((0, import_service_error_classification.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, import_service_error_classification.isTransientError)(error)) + return "TRANSIENT"; + if ((0, import_service_error_classification.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}, "getRetryErrorType"); +var retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true +}; +var getRetryPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + } +}), "getRetryPlugin"); +var getRetryAfterHint = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1e3); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}, "getRetryAfterHint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AdaptiveRetryStrategy, + StandardRetryStrategy, + ENV_MAX_ATTEMPTS, + CONFIG_MAX_ATTEMPTS, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + resolveRetryConfig, + ENV_RETRY_MODE, + CONFIG_RETRY_MODE, + NODE_RETRY_MODE_CONFIG_OPTIONS, + defaultDelayDecider, + omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions, + getOmitRetryHeadersPlugin, + defaultRetryDecider, + retryMiddleware, + retryMiddlewareOptions, + getRetryPlugin, + getRetryAfterHint +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js new file mode 100644 index 0000000..21fc19a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isStreamingPayload = void 0; +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream; +exports.isStreamingPayload = isStreamingPayload; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js new file mode 100644 index 0000000..06f420b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isStreamingPayload = void 0; +const stream_1 = require("stream"); +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof stream_1.Readable || + (typeof ReadableStream !== "undefined" && (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream); +exports.isStreamingPayload = isStreamingPayload; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/util.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/util.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-cjs/util.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..d349451 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js @@ -0,0 +1,20 @@ +import { DefaultRateLimiter, RETRY_MODES } from "@smithy/util-retry"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class AdaptiveRetryStrategy extends StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.mode = RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + }, + }); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js new file mode 100644 index 0000000..e718ad6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js @@ -0,0 +1,90 @@ +import { HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { isThrottlingError } from "@smithy/service-error-classification"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_DELAY_BASE, INITIAL_RETRY_TOKENS, INVOCATION_ID_HEADER, REQUEST_HEADER, RETRY_MODES, THROTTLING_RETRY_DELAY_BASE, } from "@smithy/util-retry"; +import { v4 } from "uuid"; +import { getDefaultRetryQuota } from "./defaultRetryQuota"; +import { defaultDelayDecider } from "./delayDecider"; +import { defaultRetryDecider } from "./retryDecider"; +import { asSdkError } from "./util"; +export class StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = RETRY_MODES.STANDARD; + this.retryDecider = options?.retryDecider ?? defaultRetryDecider; + this.delayDecider = options?.delayDecider ?? defaultDelayDecider; + this.retryQuota = options?.retryQuota ?? getDefaultRetryQuota(INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } + catch (error) { + maxAttempts = DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (HttpRequest.isInstance(request)) { + request.headers[INVOCATION_ID_HEADER] = v4(); + } + while (true) { + try { + if (HttpRequest.isInstance(request)) { + request.headers[REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options?.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options?.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } + catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider(isThrottlingError(err) ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE, attempts); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +} +const getDelayFromRetryAfterHeader = (response) => { + if (!HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1000; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/configurations.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/configurations.js new file mode 100644 index 0000000..ec375e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/configurations.js @@ -0,0 +1,51 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { AdaptiveRetryStrategy, DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE, RETRY_MODES, StandardRetryStrategy, } from "@smithy/util-retry"; +export const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +export const CONFIG_MAX_ATTEMPTS = "max_attempts"; +export const NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: DEFAULT_MAX_ATTEMPTS, +}; +export const resolveRetryConfig = (input) => { + const { retryStrategy, retryMode: _retryMode, maxAttempts: _maxAttempts } = input; + const maxAttempts = normalizeProvider(_maxAttempts ?? DEFAULT_MAX_ATTEMPTS); + return Object.assign(input, { + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await normalizeProvider(_retryMode)(); + if (retryMode === RETRY_MODES.ADAPTIVE) { + return new AdaptiveRetryStrategy(maxAttempts); + } + return new StandardRetryStrategy(maxAttempts); + }, + }); +}; +export const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +export const CONFIG_RETRY_MODE = "retry_mode"; +export const NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: DEFAULT_RETRY_MODE, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js new file mode 100644 index 0000000..4bf6771 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js @@ -0,0 +1,27 @@ +import { NO_RETRY_INCREMENT, RETRY_COST, TIMEOUT_RETRY_COST } from "@smithy/util-retry"; +export const getDefaultRetryQuota = (initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = options?.noRetryIncrement ?? NO_RETRY_INCREMENT; + const retryCost = options?.retryCost ?? RETRY_COST; + const timeoutRetryCost = options?.timeoutRetryCost ?? TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = (error) => (error.name === "TimeoutError" ? timeoutRetryCost : retryCost); + const hasRetryTokens = (error) => getCapacityAmount(error) <= availableCapacity; + const retrieveRetryTokens = (error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }; + const releaseRetryTokens = (capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }; + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js new file mode 100644 index 0000000..2928506 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js @@ -0,0 +1,2 @@ +import { MAXIMUM_RETRY_DELAY } from "@smithy/util-retry"; +export const defaultDelayDecider = (delayBase, attempts) => Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/index.js new file mode 100644 index 0000000..9ebe326 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js new file mode 100644 index 0000000..9569e92 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js @@ -0,0 +1 @@ +export const isStreamingPayload = (request) => request?.body instanceof ReadableStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js new file mode 100644 index 0000000..7dcc687 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js @@ -0,0 +1,3 @@ +import { Readable } from "stream"; +export const isStreamingPayload = (request) => request?.body instanceof Readable || + (typeof ReadableStream !== "undefined" && request?.body instanceof ReadableStream); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js new file mode 100644 index 0000000..cb3c372 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js @@ -0,0 +1,22 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { INVOCATION_ID_HEADER, REQUEST_HEADER } from "@smithy/util-retry"; +export const omitRetryHeadersMiddleware = () => (next) => async (args) => { + const { request } = args; + if (HttpRequest.isInstance(request)) { + delete request.headers[INVOCATION_ID_HEADER]; + delete request.headers[REQUEST_HEADER]; + } + return next(args); +}; +export const omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true, +}; +export const getOmitRetryHeadersPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + }, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js new file mode 100644 index 0000000..b965fba --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js @@ -0,0 +1,7 @@ +import { isClockSkewError, isRetryableByTrait, isThrottlingError, isTransientError, } from "@smithy/service-error-classification"; +export const defaultRetryDecider = (error) => { + if (!error) { + return false; + } + return isRetryableByTrait(error) || isClockSkewError(error) || isThrottlingError(error) || isTransientError(error); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js new file mode 100644 index 0000000..a897735 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js @@ -0,0 +1,112 @@ +import { HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { isServerError, isThrottlingError, isTransientError } from "@smithy/service-error-classification"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { INVOCATION_ID_HEADER, REQUEST_HEADER } from "@smithy/util-retry"; +import { v4 } from "uuid"; +import { isStreamingPayload } from "./isStreamingPayload/isStreamingPayload"; +import { asSdkError } from "./util"; +export const retryMiddleware = (options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = HttpRequest.isInstance(request); + if (isRequest) { + request.headers[INVOCATION_ID_HEADER] = v4(); + } + while (true) { + try { + if (isRequest) { + request.headers[REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } + catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && isStreamingPayload(request)) { + (context.logger instanceof NoOpLogger ? console : context.logger)?.warn("An error was encountered in a non-retryable streaming request."); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } + catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } + else { + retryStrategy = retryStrategy; + if (retryStrategy?.mode) + context.userAgent = [...(context.userAgent || []), ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}; +const isRetryStrategyV2 = (retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && + typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && + typeof retryStrategy.recordSuccess !== "undefined"; +const getRetryErrorInfo = (error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error), + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}; +const getRetryErrorType = (error) => { + if (isThrottlingError(error)) + return "THROTTLING"; + if (isTransientError(error)) + return "TRANSIENT"; + if (isServerError(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}; +export const retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true, +}; +export const getRetryPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + }, +}); +export const getRetryAfterHint = (response) => { + if (!HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1000); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/util.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/util.js new file mode 100644 index 0000000..f45e6b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-es/util.js @@ -0,0 +1,9 @@ +export const asSdkError = (error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..98a6a1d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,22 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider } from "@smithy/types"; +import { RateLimiter } from "@smithy/util-retry"; +import { StandardRetryStrategy, StandardRetryStrategyOptions } from "./StandardRetryStrategy"; +/** + * @public + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions extends StandardRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * @deprecated use AdaptiveRetryStrategy from @smithy/util-retry + */ +export declare class AdaptiveRetryStrategy extends StandardRetryStrategy { + private rateLimiter; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + retry(next: FinalizeHandler, args: FinalizeHandlerArguments): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..7007ac3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider, RetryStrategy } from "@smithy/types"; +import { DelayDecider, RetryDecider, RetryQuota } from "./types"; +/** + * Strategy options to be passed to StandardRetryStrategy + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export interface StandardRetryStrategyOptions { + retryDecider?: RetryDecider; + delayDecider?: DelayDecider; + retryQuota?: RetryQuota; +} +/** + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export declare class StandardRetryStrategy implements RetryStrategy { + private readonly maxAttemptsProvider; + private retryDecider; + private delayDecider; + private retryQuota; + mode: string; + constructor(maxAttemptsProvider: Provider, options?: StandardRetryStrategyOptions); + private shouldRetry; + private getMaxAttempts; + retry(next: FinalizeHandler, args: FinalizeHandlerArguments, options?: { + beforeRequest: Function; + afterRequest: Function; + }): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts new file mode 100644 index 0000000..150c2a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts @@ -0,0 +1,66 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +/** + * @internal + */ +export declare const CONFIG_MAX_ATTEMPTS = "max_attempts"; +/** + * @internal + */ +export declare const NODE_MAX_ATTEMPT_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @public + */ +export interface RetryInputConfig { + /** + * The maximum number of times requests that encounter retryable failures should be attempted. + */ + maxAttempts?: number | Provider; + /** + * The strategy to retry the request. Using built-in exponential backoff strategy by default. + */ + retryStrategy?: RetryStrategy | RetryStrategyV2; +} +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * Specifies provider for retry algorithm to use. + * @internal + */ + retryMode: string | Provider; +} +/** + * @internal + */ +export interface RetryResolvedConfig { + /** + * Resolved value for input config {@link RetryInputConfig.maxAttempts} + */ + maxAttempts: Provider; + /** + * Resolved value for input config {@link RetryInputConfig.retryStrategy} + */ + retryStrategy: Provider; +} +/** + * @internal + */ +export declare const resolveRetryConfig: (input: T & PreviouslyResolved & RetryInputConfig) => T & RetryResolvedConfig; +/** + * @internal + */ +export declare const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +/** + * @internal + */ +export declare const CONFIG_RETRY_MODE = "retry_mode"; +/** + * @internal + */ +export declare const NODE_RETRY_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts new file mode 100644 index 0000000..332a494 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts @@ -0,0 +1,24 @@ +import { RetryQuota } from "./types"; +/** + * @internal + */ +export interface DefaultRetryQuotaOptions { + /** + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ + noRetryIncrement?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance. + */ + retryCost?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ + timeoutRetryCost?: number; +} +/** + * @internal + */ +export declare const getDefaultRetryQuota: (initialRetryTokens: number, options?: DefaultRetryQuotaOptions) => RetryQuota; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts new file mode 100644 index 0000000..986ff42 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Calculate a capped, fully-jittered exponential backoff time. + */ +export declare const defaultDelayDecider: (delayBase: number, attempts: number) => number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/index.d.ts new file mode 100644 index 0000000..9ebe326 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts new file mode 100644 index 0000000..48d70ba --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts new file mode 100644 index 0000000..48d70ba --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts new file mode 100644 index 0000000..50c1ab6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts @@ -0,0 +1,13 @@ +import { FinalizeHandler, MetadataBearer, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const omitRetryHeadersMiddleware: () => (next: FinalizeHandler) => FinalizeHandler; +/** + * @internal + */ +export declare const omitRetryHeadersMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getOmitRetryHeadersPlugin: (options: unknown) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts new file mode 100644 index 0000000..11a4a9c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts @@ -0,0 +1,6 @@ +import { SdkError } from "@smithy/types"; +/** + * @internal + * @deprecated this is only used in the deprecated StandardRetryStrategy. Do not use in new code. + */ +export declare const defaultRetryDecider: (error: SdkError) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts new file mode 100644 index 0000000..9310301 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, FinalizeHandler, FinalizeRequestHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { RetryResolvedConfig } from "./configurations"; +/** + * @internal + */ +export declare const retryMiddleware: (options: RetryResolvedConfig) => (next: FinalizeHandler, context: HandlerExecutionContext) => FinalizeHandler; +/** + * @internal + */ +export declare const retryMiddlewareOptions: FinalizeRequestHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRetryPlugin: (options: RetryResolvedConfig) => Pluggable; +/** + * @internal + */ +export declare const getRetryAfterHint: (response: unknown) => Date | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..33f0416 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,22 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider } from "@smithy/types"; +import { RateLimiter } from "@smithy/util-retry"; +import { StandardRetryStrategy, StandardRetryStrategyOptions } from "./StandardRetryStrategy"; +/** + * @public + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions extends StandardRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * @deprecated use AdaptiveRetryStrategy from @smithy/util-retry + */ +export declare class AdaptiveRetryStrategy extends StandardRetryStrategy { + private rateLimiter; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + retry(next: FinalizeHandler, args: FinalizeHandlerArguments): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..b4656d2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider, RetryStrategy } from "@smithy/types"; +import { DelayDecider, RetryDecider, RetryQuota } from "./types"; +/** + * Strategy options to be passed to StandardRetryStrategy + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export interface StandardRetryStrategyOptions { + retryDecider?: RetryDecider; + delayDecider?: DelayDecider; + retryQuota?: RetryQuota; +} +/** + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export declare class StandardRetryStrategy implements RetryStrategy { + private readonly maxAttemptsProvider; + private retryDecider; + private delayDecider; + private retryQuota; + mode: string; + constructor(maxAttemptsProvider: Provider, options?: StandardRetryStrategyOptions); + private shouldRetry; + private getMaxAttempts; + retry(next: FinalizeHandler, args: FinalizeHandlerArguments, options?: { + beforeRequest: Function; + afterRequest: Function; + }): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..79f8646 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,66 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +/** + * @internal + */ +export declare const CONFIG_MAX_ATTEMPTS = "max_attempts"; +/** + * @internal + */ +export declare const NODE_MAX_ATTEMPT_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @public + */ +export interface RetryInputConfig { + /** + * The maximum number of times requests that encounter retryable failures should be attempted. + */ + maxAttempts?: number | Provider; + /** + * The strategy to retry the request. Using built-in exponential backoff strategy by default. + */ + retryStrategy?: RetryStrategy | RetryStrategyV2; +} +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * Specifies provider for retry algorithm to use. + * @internal + */ + retryMode: string | Provider; +} +/** + * @internal + */ +export interface RetryResolvedConfig { + /** + * Resolved value for input config {@link RetryInputConfig.maxAttempts} + */ + maxAttempts: Provider; + /** + * Resolved value for input config {@link RetryInputConfig.retryStrategy} + */ + retryStrategy: Provider; +} +/** + * @internal + */ +export declare const resolveRetryConfig: (input: T & PreviouslyResolved & RetryInputConfig) => T & RetryResolvedConfig; +/** + * @internal + */ +export declare const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +/** + * @internal + */ +export declare const CONFIG_RETRY_MODE = "retry_mode"; +/** + * @internal + */ +export declare const NODE_RETRY_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts new file mode 100644 index 0000000..704b5af --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts @@ -0,0 +1,24 @@ +import { RetryQuota } from "./types"; +/** + * @internal + */ +export interface DefaultRetryQuotaOptions { + /** + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ + noRetryIncrement?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance. + */ + retryCost?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ + timeoutRetryCost?: number; +} +/** + * @internal + */ +export declare const getDefaultRetryQuota: (initialRetryTokens: number, options?: DefaultRetryQuotaOptions) => RetryQuota; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts new file mode 100644 index 0000000..7fa73ec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Calculate a capped, fully-jittered exponential backoff time. + */ +export declare const defaultDelayDecider: (delayBase: number, attempts: number) => number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e366bbb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts new file mode 100644 index 0000000..2a4d542 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts new file mode 100644 index 0000000..2a4d542 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts new file mode 100644 index 0000000..abd8f71 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts @@ -0,0 +1,13 @@ +import { FinalizeHandler, MetadataBearer, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const omitRetryHeadersMiddleware: () => (next: FinalizeHandler) => FinalizeHandler; +/** + * @internal + */ +export declare const omitRetryHeadersMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getOmitRetryHeadersPlugin: (options: unknown) => Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts new file mode 100644 index 0000000..c00661a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts @@ -0,0 +1,6 @@ +import { SdkError } from "@smithy/types"; +/** + * @internal + * @deprecated this is only used in the deprecated StandardRetryStrategy. Do not use in new code. + */ +export declare const defaultRetryDecider: (error: SdkError) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts new file mode 100644 index 0000000..137dbf1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, FinalizeHandler, FinalizeRequestHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { RetryResolvedConfig } from "./configurations"; +/** + * @internal + */ +export declare const retryMiddleware: (options: RetryResolvedConfig) => (next: FinalizeHandler, context: HandlerExecutionContext) => FinalizeHandler; +/** + * @internal + */ +export declare const retryMiddlewareOptions: FinalizeRequestHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRetryPlugin: (options: RetryResolvedConfig) => Pluggable; +/** + * @internal + */ +export declare const getRetryAfterHint: (response: unknown) => Date | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..06775c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts @@ -0,0 +1,65 @@ +import { SdkError } from "@smithy/types"; +/** + * Determines whether an error is retryable based on the number of retries + * already attempted, the HTTP status code, and the error received (if any). + * + * @param error - The error encountered. + * + * @deprecated + * @internal + */ +export interface RetryDecider { + (error: SdkError): boolean; +} +/** + * Determines the number of milliseconds to wait before retrying an action. + * + * @param delayBase - The base delay (in milliseconds). + * @param attempts - The number of times the action has already been tried. + * + * @deprecated + * @internal + */ +export interface DelayDecider { + (delayBase: number, attempts: number): number; +} +/** + * Interface that specifies the retry quota behavior. + * @deprecated + * @internal + */ +export interface RetryQuota { + /** + * returns true if retry tokens are available from the retry quota bucket. + */ + hasRetryTokens: (error: SdkError) => boolean; + /** + * returns token amount from the retry quota bucket. + * throws error is retry tokens are not available. + */ + retrieveRetryTokens: (error: SdkError) => number; + /** + * releases tokens back to the retry quota. + */ + releaseRetryTokens: (releaseCapacityAmount?: number) => void; +} +/** + * @deprecated + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..7684a9f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts @@ -0,0 +1,2 @@ +import { SdkError } from "@smithy/types"; +export declare const asSdkError: (error: unknown) => SdkError; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/types.d.ts new file mode 100644 index 0000000..8f22712 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/types.d.ts @@ -0,0 +1,65 @@ +import { SdkError } from "@smithy/types"; +/** + * Determines whether an error is retryable based on the number of retries + * already attempted, the HTTP status code, and the error received (if any). + * + * @param error - The error encountered. + * + * @deprecated + * @internal + */ +export interface RetryDecider { + (error: SdkError): boolean; +} +/** + * Determines the number of milliseconds to wait before retrying an action. + * + * @param delayBase - The base delay (in milliseconds). + * @param attempts - The number of times the action has already been tried. + * + * @deprecated + * @internal + */ +export interface DelayDecider { + (delayBase: number, attempts: number): number; +} +/** + * Interface that specifies the retry quota behavior. + * @deprecated + * @internal + */ +export interface RetryQuota { + /** + * returns true if retry tokens are available from the retry quota bucket. + */ + hasRetryTokens: (error: SdkError) => boolean; + /** + * returns token amount from the retry quota bucket. + * throws error is retry tokens are not available. + */ + retrieveRetryTokens: (error: SdkError) => number; + /** + * releases tokens back to the retry quota. + */ + releaseRetryTokens: (releaseCapacityAmount?: number) => void; +} +/** + * @deprecated + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/util.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/util.d.ts new file mode 100644 index 0000000..00939b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/dist-types/util.d.ts @@ -0,0 +1,2 @@ +import { SdkError } from "@smithy/types"; +export declare const asSdkError: (error: unknown) => SdkError; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/package.json new file mode 100644 index 0000000..b029e53 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-retry/package.json @@ -0,0 +1,79 @@ +{ + "name": "@smithy/middleware-retry", + "version": "4.1.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-retry", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "browser": { + "./dist-es/isStreamingPayload/isStreamingPayload": "./dist-es/isStreamingPayload/isStreamingPayload.browser" + }, + "react-native": { + "./dist-cjs/isStreamingPayload/isStreamingPayload": "./dist-cjs/isStreamingPayload/isStreamingPayload.browser", + "./dist-es/isStreamingPayload/isStreamingPayload": "./dist-es/isStreamingPayload/isStreamingPayload.browser" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "@types/uuid": "^8.3.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-retry", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-retry" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/README.md new file mode 100644 index 0000000..d2bbfa6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/README.md @@ -0,0 +1,4 @@ +# @smithy/middleware-serde + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-serde/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-serde) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-serde.svg)](https://www.npmjs.com/package/@smithy/middleware-serde) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/index.js new file mode 100644 index 0000000..04fa6f3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/index.js @@ -0,0 +1,109 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + deserializerMiddleware: () => deserializerMiddleware, + deserializerMiddlewareOption: () => deserializerMiddlewareOption, + getSerdePlugin: () => getSerdePlugin, + serializerMiddleware: () => serializerMiddleware, + serializerMiddlewareOption: () => serializerMiddlewareOption +}); +module.exports = __toCommonJS(src_exports); + +// src/deserializerMiddleware.ts +var deserializerMiddleware = /* @__PURE__ */ __name((options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed + }; + } catch (error) { + Object.defineProperty(error, "$response", { + value: response + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + try { + error.message += "\n " + hint; + } catch (e) { + if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") { + console.warn(hint); + } else { + context.logger?.warn?.(hint); + } + } + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}, "deserializerMiddleware"); + +// src/serializerMiddleware.ts +var serializerMiddleware = /* @__PURE__ */ __name((options, serializer) => (next, context) => async (args) => { + const endpoint = context.endpointV2?.url && options.urlParser ? async () => options.urlParser(context.endpointV2.url) : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request + }); +}, "serializerMiddleware"); + +// src/serdePlugin.ts +var deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true +}; +var serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + } + }; +} +__name(getSerdePlugin, "getSerdePlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + deserializerMiddleware, + deserializerMiddlewareOption, + serializerMiddlewareOption, + getSerdePlugin, + serializerMiddleware +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js new file mode 100644 index 0000000..19c0c27 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js @@ -0,0 +1,35 @@ +export const deserializerMiddleware = (options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed, + }; + } + catch (error) { + Object.defineProperty(error, "$response", { + value: response, + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + try { + error.message += "\n " + hint; + } + catch (e) { + if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") { + console.warn(hint); + } + else { + context.logger?.warn?.(hint); + } + } + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/index.js new file mode 100644 index 0000000..166a2be --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js new file mode 100644 index 0000000..be2a06e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js @@ -0,0 +1,22 @@ +import { deserializerMiddleware } from "./deserializerMiddleware"; +import { serializerMiddleware } from "./serializerMiddleware"; +export const deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true, +}; +export const serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true, +}; +export function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + }, + }; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js new file mode 100644 index 0000000..b02b93d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js @@ -0,0 +1,13 @@ +export const serializerMiddleware = (options, serializer) => (next, context) => async (args) => { + const endpoint = context.endpointV2?.url && options.urlParser + ? async () => options.urlParser(context.endpointV2.url) + : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts new file mode 100644 index 0000000..4d81141 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts @@ -0,0 +1,5 @@ +import { DeserializeMiddleware, ResponseDeserializer, SerdeContext, SerdeFunctions } from "@smithy/types"; +/** + * @internal + */ +export declare const deserializerMiddleware: (options: SerdeFunctions, deserializer: ResponseDeserializer) => DeserializeMiddleware; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/index.d.ts new file mode 100644 index 0000000..166a2be --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts new file mode 100644 index 0000000..bf1091a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts @@ -0,0 +1,12 @@ +import { DeserializeHandlerOptions, Endpoint, MetadataBearer, Pluggable, Provider, RequestSerializer, ResponseDeserializer, SerdeContext, SerdeFunctions, SerializeHandlerOptions, UrlParser } from "@smithy/types"; +export declare const deserializerMiddlewareOption: DeserializeHandlerOptions; +export declare const serializerMiddlewareOption: SerializeHandlerOptions; +export type V1OrV2Endpoint = { + urlParser?: UrlParser; + endpoint?: Provider; +}; +/** + * @internal + * + */ +export declare function getSerdePlugin(config: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer, deserializer: ResponseDeserializer): Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts new file mode 100644 index 0000000..5437298 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts @@ -0,0 +1,6 @@ +import { RequestSerializer, SerdeContext, SerdeFunctions, SerializeMiddleware } from "@smithy/types"; +import type { V1OrV2Endpoint } from "./serdePlugin"; +/** + * @internal + */ +export declare const serializerMiddleware: (options: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer) => SerializeMiddleware; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts new file mode 100644 index 0000000..b0ed492 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts @@ -0,0 +1,5 @@ +import { DeserializeMiddleware, ResponseDeserializer, SerdeContext, SerdeFunctions } from "@smithy/types"; +/** + * @internal + */ +export declare const deserializerMiddleware: (options: SerdeFunctions, deserializer: ResponseDeserializer) => DeserializeMiddleware; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ec66df4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts new file mode 100644 index 0000000..c381721 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts @@ -0,0 +1,12 @@ +import { DeserializeHandlerOptions, Endpoint, MetadataBearer, Pluggable, Provider, RequestSerializer, ResponseDeserializer, SerdeContext, SerdeFunctions, SerializeHandlerOptions, UrlParser } from "@smithy/types"; +export declare const deserializerMiddlewareOption: DeserializeHandlerOptions; +export declare const serializerMiddlewareOption: SerializeHandlerOptions; +export type V1OrV2Endpoint = { + urlParser?: UrlParser; + endpoint?: Provider; +}; +/** + * @internal + * + */ +export declare function getSerdePlugin(config: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer, deserializer: ResponseDeserializer): Pluggable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts new file mode 100644 index 0000000..914b3b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts @@ -0,0 +1,6 @@ +import { RequestSerializer, SerdeContext, SerdeFunctions, SerializeMiddleware } from "@smithy/types"; +import { V1OrV2Endpoint } from "./serdePlugin"; +/** + * @internal + */ +export declare const serializerMiddleware: (options: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer) => SerializeMiddleware; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/package.json new file mode 100644 index 0000000..042be08 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-serde/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-serde", + "version": "4.0.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-serde", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-serde", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-serde" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/README.md new file mode 100644 index 0000000..c09d4d3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/README.md @@ -0,0 +1,78 @@ +# @smithy/middleware-stack + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-stack/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-stack) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-stack.svg)](https://www.npmjs.com/package/@smithy/middleware-stack) + +The package contains an implementation of middleware stack interface. Middleware +stack is a structure storing middleware in specified order and resolve these +middleware into a single handler. + +A middleware stack has five `Step`s, each of them represents a specific request life cycle: + +- **initialize**: The input is being prepared. Examples of typical initialization tasks include injecting default options computing derived parameters. + +- **serialize**: The input is complete and ready to be serialized. Examples of typical serialization tasks include input validation and building an HTTP request from user input. + +- **build**: The input has been serialized into an HTTP request, but that request may require further modification. Any request alterations will be applied to all retries. Examples of typical build tasks include injecting HTTP headers that describe a stable aspect of the request, such as `Content-Length` or a body checksum. + +- **finalizeRequest**: The request is being prepared to be sent over the wire. The request in this stage should already be semantically complete and should therefore only be altered to match the recipient's expectations. Examples of typical finalization tasks include request signing and injecting hop-by-hop headers. + +- **deserialize**: The response has arrived, the middleware here will deserialize the raw response object to structured response + +## Adding Middleware + +There are two ways to add middleware to a middleware stack. They both add middleware to specified `Step` but they provide fine-grained location control differently. + +### Absolute Location + +You can add middleware to specified step with: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", +}); +``` + +This approach works for most cases. Sometimes you want your middleware to be executed in the front of the `Step`, you can set the `Priority` to `high`. Set the `Priority` to `low` then this middleware will be executed at the end of `Step`: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + priority: "high", +}); +``` + +If multiple middleware is added to same `step` with same `priority`, the order of them is determined by the order of adding them. + +### Relative Location + +In some cases, you might want to execute your middleware before some other known middleware, then you can use `addRelativeTo()`: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + name: "myMiddleware", +}); +stack.addRelativeTo(anotherMiddleware, { + relation: "before", //or 'after' + toMiddleware: "myMiddleware", +}); +``` + +## Removing Middleware + +You can remove middleware by name one at a time: + +```javascript +stack.remove("Middleware1"); +``` + +If you specify tags for middleware, you can remove multiple middleware at a time according to tag: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + tags: ["final"], +}); +stack.removeByTag("final"); +``` diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/index.js new file mode 100644 index 0000000..4c78597 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/index.js @@ -0,0 +1,313 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + constructStack: () => constructStack +}); +module.exports = __toCommonJS(src_exports); + +// src/MiddlewareStack.ts +var getAllAliases = /* @__PURE__ */ __name((name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}, "getAllAliases"); +var getMiddlewareNameWithAliases = /* @__PURE__ */ __name((name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}, "getMiddlewareNameWithAliases"); +var constructStack = /* @__PURE__ */ __name(() => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = /* @__PURE__ */ new Set(); + const sort = /* @__PURE__ */ __name((entries) => entries.sort( + (a, b) => stepWeights[b.step] - stepWeights[a.step] || priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"] + ), "sort"); + const removeByName = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByName"); + const removeByReference = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByReference"); + const cloneTo = /* @__PURE__ */ __name((toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + toStack.identifyOnResolve?.(stack.identifyOnResolve()); + return toStack; + }, "cloneTo"); + const expandRelativeMiddlewareList = /* @__PURE__ */ __name((from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }, "expandRelativeMiddlewareList"); + const getMiddlewareList = /* @__PURE__ */ __name((debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === void 0) { + if (debug) { + return; + } + throw new Error( + `${entry.toMiddleware} is not found when adding ${getMiddlewareNameWithAliases(entry.name, entry.aliases)} middleware ${entry.relation} ${entry.toMiddleware}` + ); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries).map(expandRelativeMiddlewareList).reduce( + (wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, + [] + ); + return mainChain; + }, "getMiddlewareList"); + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex( + (entry2) => entry2.name === alias || entry2.aliases?.some((a) => a === alias) + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ${toOverride.priority} priority in ${toOverride.step} step cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ${entry.priority} priority in ${entry.step} step.` + ); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex( + (entry2) => entry2.name === alias || entry2.aliases?.some((a) => a === alias) + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} "${entry.toMiddleware}" middleware.` + ); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve( + identifyOnResolve || cloned.identifyOnResolve() || (from.identifyOnResolve?.() ?? false) + ); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? mw.relation + " " + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList().map((entry) => entry.middleware).reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + } + }; + return stack; +}, "constructStack"); +var stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1 +}; +var priorityWeights = { + high: 3, + normal: 2, + low: 1 +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + constructStack +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js new file mode 100644 index 0000000..2e02c73 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js @@ -0,0 +1,281 @@ +const getAllAliases = (name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}; +const getMiddlewareNameWithAliases = (name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}; +export const constructStack = () => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = new Set(); + const sort = (entries) => entries.sort((a, b) => stepWeights[b.step] - stepWeights[a.step] || + priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"]); + const removeByName = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const removeByReference = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const cloneTo = (toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + toStack.identifyOnResolve?.(stack.identifyOnResolve()); + return toStack; + }; + const expandRelativeMiddlewareList = (from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }; + const getMiddlewareList = (debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === undefined) { + if (debug) { + return; + } + throw new Error(`${entry.toMiddleware} is not found when adding ` + + `${getMiddlewareNameWithAliases(entry.name, entry.aliases)} ` + + `middleware ${entry.relation} ${entry.toMiddleware}`); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries) + .map(expandRelativeMiddlewareList) + .reduce((wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, []); + return mainChain; + }; + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex((entry) => entry.name === alias || entry.aliases?.some((a) => a === alias)); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ` + + `${toOverride.priority} priority in ${toOverride.step} step cannot ` + + `be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ` + + `${entry.priority} priority in ${entry.step} step.`); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex((entry) => entry.name === alias || entry.aliases?.some((a) => a === alias)); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ` + + `${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden ` + + `by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} ` + + `"${entry.toMiddleware}" middleware.`); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve(identifyOnResolve || cloned.identifyOnResolve() || (from.identifyOnResolve?.() ?? false)); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? + mw.relation + + " " + + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList() + .map((entry) => entry.middleware) + .reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + }, + }; + return stack; +}; +const stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1, +}; +const priorityWeights = { + high: 3, + normal: 2, + low: 1, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/index.js new file mode 100644 index 0000000..16f56ce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/index.js @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts new file mode 100644 index 0000000..2aa088b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts @@ -0,0 +1,5 @@ +import { MiddlewareStack } from "@smithy/types"; +/** + * @internal + */ +export declare const constructStack: () => MiddlewareStack; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/index.d.ts new file mode 100644 index 0000000..16f56ce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts new file mode 100644 index 0000000..d93ce93 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts @@ -0,0 +1,5 @@ +import { MiddlewareStack } from "@smithy/types"; +/** + * @internal + */ +export declare const constructStack: () => MiddlewareStack; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d906b7d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..38eb54c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts @@ -0,0 +1,22 @@ +import { AbsoluteLocation, HandlerOptions, MiddlewareType, Priority, RelativeLocation, Step } from "@smithy/types"; +export interface MiddlewareEntry extends HandlerOptions { + middleware: MiddlewareType; +} +export interface AbsoluteMiddlewareEntry extends MiddlewareEntry, AbsoluteLocation { + step: Step; + priority: Priority; +} +export interface RelativeMiddlewareEntry extends MiddlewareEntry, RelativeLocation { +} +export type Normalized, Input extends object = {}, Output extends object = {}> = T & { + after: Normalized, Input, Output>[]; + before: Normalized, Input, Output>[]; +}; +export interface NormalizedRelativeEntry extends HandlerOptions { + step: Step; + middleware: MiddlewareType; + next?: NormalizedRelativeEntry; + prev?: NormalizedRelativeEntry; + priority: null; +} +export type NamedMiddlewareEntriesMap = Record>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/types.d.ts new file mode 100644 index 0000000..4aa5fc6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/dist-types/types.d.ts @@ -0,0 +1,22 @@ +import { AbsoluteLocation, HandlerOptions, MiddlewareType, Priority, RelativeLocation, Step } from "@smithy/types"; +export interface MiddlewareEntry extends HandlerOptions { + middleware: MiddlewareType; +} +export interface AbsoluteMiddlewareEntry extends MiddlewareEntry, AbsoluteLocation { + step: Step; + priority: Priority; +} +export interface RelativeMiddlewareEntry extends MiddlewareEntry, RelativeLocation { +} +export type Normalized, Input extends object = {}, Output extends object = {}> = T & { + after: Normalized, Input, Output>[]; + before: Normalized, Input, Output>[]; +}; +export interface NormalizedRelativeEntry extends HandlerOptions { + step: Step; + middleware: MiddlewareType; + next?: NormalizedRelativeEntry; + prev?: NormalizedRelativeEntry; + priority: null; +} +export type NamedMiddlewareEntriesMap = Record>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/package.json new file mode 100644 index 0000000..57077ab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/middleware-stack/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-stack", + "version": "4.0.2", + "description": "Provides a means for composing multiple middleware functions into a single handler", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-stack", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-stack", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-stack" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/README.md new file mode 100644 index 0000000..af591d2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/README.md @@ -0,0 +1,4 @@ +# @smithy/node-config-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/node-config-provider/latest.svg)](https://www.npmjs.com/package/@smithy/node-config-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/node-config-provider.svg)](https://www.npmjs.com/package/@smithy/node-config-provider) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/index.js new file mode 100644 index 0000000..8a98b1b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-cjs/index.js @@ -0,0 +1,105 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + loadConfig: () => loadConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/configLoader.ts + + +// src/fromEnv.ts +var import_property_provider = require("@smithy/property-provider"); + +// src/getSelectorName.ts +function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } catch (e) { + return functionString; + } +} +__name(getSelectorName, "getSelectorName"); + +// src/fromEnv.ts +var fromEnv = /* @__PURE__ */ __name((envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === void 0) { + throw new Error(); + } + return config; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, + { logger } + ); + } +}, "fromEnv"); + +// src/fromSharedConfigFiles.ts + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var fromSharedConfigFiles = /* @__PURE__ */ __name((configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, import_shared_ini_file_loader.getProfileName)(init); + const { configFile, credentialsFile } = await (0, import_shared_ini_file_loader.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" ? { ...profileFromCredentials, ...profileFromConfig } : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === void 0) { + throw new Error(); + } + return configValue; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, + { logger: init.logger } + ); + } +}, "fromSharedConfigFiles"); + +// src/fromStatic.ts + +var isFunction = /* @__PURE__ */ __name((func) => typeof func === "function", "isFunction"); +var fromStatic = /* @__PURE__ */ __name((defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, import_property_provider.fromStatic)(defaultValue), "fromStatic"); + +// src/configLoader.ts +var loadConfig = /* @__PURE__ */ __name(({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + fromEnv(environmentVariableSelector), + fromSharedConfigFiles(configFileSelector, configuration), + fromStatic(defaultValue) + ) +), "loadConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + loadConfig +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/configLoader.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/configLoader.js new file mode 100644 index 0000000..db044dd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/configLoader.js @@ -0,0 +1,5 @@ +import { chain, memoize } from "@smithy/property-provider"; +import { fromEnv } from "./fromEnv"; +import { fromSharedConfigFiles } from "./fromSharedConfigFiles"; +import { fromStatic } from "./fromStatic"; +export const loadConfig = ({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => memoize(chain(fromEnv(environmentVariableSelector), fromSharedConfigFiles(configFileSelector, configuration), fromStatic(defaultValue))); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js new file mode 100644 index 0000000..d43edbd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js @@ -0,0 +1,14 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getSelectorName } from "./getSelectorName"; +export const fromEnv = (envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === undefined) { + throw new Error(); + } + return config; + } + catch (e) { + throw new CredentialsProviderError(e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, { logger }); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js new file mode 100644 index 0000000..b6435ed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js @@ -0,0 +1,23 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName, loadSharedConfigFiles } from "@smithy/shared-ini-file-loader"; +import { getSelectorName } from "./getSelectorName"; +export const fromSharedConfigFiles = (configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = getProfileName(init); + const { configFile, credentialsFile } = await loadSharedConfigFiles(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" + ? { ...profileFromCredentials, ...profileFromConfig } + : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === undefined) { + throw new Error(); + } + return configValue; + } + catch (e) { + throw new CredentialsProviderError(e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, { logger: init.logger }); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js new file mode 100644 index 0000000..c9f91ff --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js @@ -0,0 +1,3 @@ +import { fromStatic as convertToProvider } from "@smithy/property-provider"; +const isFunction = (func) => typeof func === "function"; +export const fromStatic = (defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : convertToProvider(defaultValue); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js new file mode 100644 index 0000000..d5e0f78 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js @@ -0,0 +1,12 @@ +export function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } + catch (e) { + return functionString; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/index.js new file mode 100644 index 0000000..2d035d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-es/index.js @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts new file mode 100644 index 0000000..0d0b232 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts @@ -0,0 +1,31 @@ +import { Provider } from "@smithy/types"; +import { GetterFromEnv } from "./fromEnv"; +import { GetterFromConfig, SharedConfigInit } from "./fromSharedConfigFiles"; +import { FromStaticConfig } from "./fromStatic"; +/** + * @internal + */ +export type LocalConfigOptions = SharedConfigInit; +/** + * @internal + */ +export interface LoadedConfigSelectors { + /** + * A getter function getting the config values from all the environment + * variables. + */ + environmentVariableSelector: GetterFromEnv; + /** + * A getter function getting config values associated with the inferred + * profile from shared INI files + */ + configFileSelector: GetterFromConfig; + /** + * Default value or getter + */ + default: FromStaticConfig; +} +/** + * @internal + */ +export declare const loadConfig: ({ environmentVariableSelector, configFileSelector, default: defaultValue }: LoadedConfigSelectors, configuration?: LocalConfigOptions) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts new file mode 100644 index 0000000..b2454c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts @@ -0,0 +1,7 @@ +import { Logger, Provider } from "@smithy/types"; +export type GetterFromEnv = (env: Record) => T | undefined; +/** + * Get config value given the environment variable name or getter from + * environment variable. + */ +export declare const fromEnv: (envVarSelector: GetterFromEnv, logger?: Logger) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts new file mode 100644 index 0000000..89a8eac --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts @@ -0,0 +1,22 @@ +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { ParsedIniData, Profile, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface SharedConfigInit extends SourceProfileInit { + /** + * The preferred shared ini file to load the config. "config" option refers to + * the shared config file(defaults to `~/.aws/config`). "credentials" option + * refers to the shared credentials file(defaults to `~/.aws/credentials`) + */ + preferredFile?: "config" | "credentials"; +} +/** + * @internal + */ +export type GetterFromConfig = (profile: Profile, configFile?: ParsedIniData) => T | undefined; +/** + * Get config value from the shared config files with inferred profile name. + * @internal + */ +export declare const fromSharedConfigFiles: (configSelector: GetterFromConfig, { preferredFile, ...init }?: SharedConfigInit) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..d2c32a4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export type FromStaticConfig = T | (() => T) | Provider; +/** + * @internal + */ +export declare const fromStatic: (defaultValue: FromStaticConfig) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts new file mode 100644 index 0000000..b5f1a1b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts @@ -0,0 +1,9 @@ +/** + * Attempts to extract the name of the variable that the functional selector is looking for. + * Improves readability over the raw Function.toString() value. + * @internal + * @param functionString - function's string representation. + * + * @returns constant value used within the function. + */ +export declare function getSelectorName(functionString: string): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/index.d.ts new file mode 100644 index 0000000..2d035d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts new file mode 100644 index 0000000..e877731 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts @@ -0,0 +1,31 @@ +import { Provider } from "@smithy/types"; +import { GetterFromEnv } from "./fromEnv"; +import { GetterFromConfig, SharedConfigInit } from "./fromSharedConfigFiles"; +import { FromStaticConfig } from "./fromStatic"; +/** + * @internal + */ +export type LocalConfigOptions = SharedConfigInit; +/** + * @internal + */ +export interface LoadedConfigSelectors { + /** + * A getter function getting the config values from all the environment + * variables. + */ + environmentVariableSelector: GetterFromEnv; + /** + * A getter function getting config values associated with the inferred + * profile from shared INI files + */ + configFileSelector: GetterFromConfig; + /** + * Default value or getter + */ + default: FromStaticConfig; +} +/** + * @internal + */ +export declare const loadConfig: ({ environmentVariableSelector, configFileSelector, default: defaultValue }: LoadedConfigSelectors, configuration?: LocalConfigOptions) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts new file mode 100644 index 0000000..e0a4cc7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts @@ -0,0 +1,7 @@ +import { Logger, Provider } from "@smithy/types"; +export type GetterFromEnv = (env: Record) => T | undefined; +/** + * Get config value given the environment variable name or getter from + * environment variable. + */ +export declare const fromEnv: (envVarSelector: GetterFromEnv, logger?: Logger) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts new file mode 100644 index 0000000..aa0efa0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts @@ -0,0 +1,22 @@ +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { ParsedIniData, Profile, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface SharedConfigInit extends SourceProfileInit { + /** + * The preferred shared ini file to load the config. "config" option refers to + * the shared config file(defaults to `~/.aws/config`). "credentials" option + * refers to the shared credentials file(defaults to `~/.aws/credentials`) + */ + preferredFile?: "config" | "credentials"; +} +/** + * @internal + */ +export type GetterFromConfig = (profile: Profile, configFile?: ParsedIniData) => T | undefined; +/** + * Get config value from the shared config files with inferred profile name. + * @internal + */ +export declare const fromSharedConfigFiles: (configSelector: GetterFromConfig, { preferredFile, ...init }?: SharedConfigInit) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..a4bab2d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export type FromStaticConfig = T | (() => T) | Provider; +/** + * @internal + */ +export declare const fromStatic: (defaultValue: FromStaticConfig) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts new file mode 100644 index 0000000..11c5da2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts @@ -0,0 +1,9 @@ +/** + * Attempts to extract the name of the variable that the functional selector is looking for. + * Improves readability over the raw Function.toString() value. + * @internal + * @param functionString - function's string representation. + * + * @returns constant value used within the function. + */ +export declare function getSelectorName(functionString: string): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..74a76f5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/package.json new file mode 100644 index 0000000..3002d8e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-config-provider/package.json @@ -0,0 +1,65 @@ +{ + "name": "@smithy/node-config-provider", + "version": "4.0.2", + "description": "Load config default values from ini config files and environmental variable", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline node-config-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/node-config-provider", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/node-config-provider" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/README.md new file mode 100644 index 0000000..214719f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/README.md @@ -0,0 +1,9 @@ +# @smithy/node-http-handler + +[![NPM version](https://img.shields.io/npm/v/@smithy/node-http-handler/latest.svg)](https://www.npmjs.com/package/@smithy/node-http-handler) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/node-http-handler.svg)](https://www.npmjs.com/package/@smithy/node-http-handler) + +This package implements the default `requestHandler` for Node.js using `node:http`, `node:https`, and `node:http2`. + +For an example on how `requestHandler`s are used by Smithy generated SDK clients, refer to +the [AWS SDK for JavaScript (v3) supplemental docs](https://github.com/aws/aws-sdk-js-v3/blob/main/supplemental-docs/CLIENTS.md#request-handler-requesthandler). diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/index.js new file mode 100644 index 0000000..e31976f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/index.js @@ -0,0 +1,806 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_REQUEST_TIMEOUT: () => DEFAULT_REQUEST_TIMEOUT, + NodeHttp2Handler: () => NodeHttp2Handler, + NodeHttpHandler: () => NodeHttpHandler, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/node-http-handler.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_querystring_builder = require("@smithy/querystring-builder"); +var import_http = require("http"); +var import_https = require("https"); + +// src/constants.ts +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; + +// src/get-transformed-headers.ts +var getTransformedHeaders = /* @__PURE__ */ __name((headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}, "getTransformedHeaders"); + +// src/timing.ts +var timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId) +}; + +// src/set-connection-timeout.ts +var DEFER_EVENT_LISTENER_TIME = 1e3; +var setConnectionTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject( + Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError" + }) + ); + }, timeoutInMs - offset); + const doWithSocket = /* @__PURE__ */ __name((socket) => { + if (socket?.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } else { + timing.clearTimeout(timeoutId); + } + }, "doWithSocket"); + if (request.socket) { + doWithSocket(request.socket); + } else { + request.on("socket", doWithSocket); + } + }, "registerTimeout"); + if (timeoutInMs < 2e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}, "setConnectionTimeout"); + +// src/set-socket-keep-alive.ts +var DEFER_EVENT_LISTENER_TIME2 = 3e3; +var setSocketKeepAlive = /* @__PURE__ */ __name((request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME2) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = /* @__PURE__ */ __name(() => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }, "registerListener"); + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}, "setSocketKeepAlive"); + +// src/set-socket-timeout.ts +var DEFER_EVENT_LISTENER_TIME3 = 3e3; +var setSocketTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = DEFAULT_REQUEST_TIMEOUT) => { + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = /* @__PURE__ */ __name(() => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }, "onTimeout"); + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + request.on("close", () => request.socket?.removeListener("timeout", onTimeout)); + } else { + request.setTimeout(timeout, onTimeout); + } + }, "registerTimeout"); + if (0 < timeoutInMs && timeoutInMs < 6e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout( + registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME3), + DEFER_EVENT_LISTENER_TIME3 + ); +}, "setSocketTimeout"); + +// src/write-request-body.ts +var import_stream = require("stream"); +var MIN_WAIT_TIME = 6e3; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(() => resolve(true), Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }) + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +__name(writeRequestBody, "writeRequestBody"); +function writeBody(httpRequest, body) { + if (body instanceof import_stream.Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && uint8.buffer && typeof uint8.byteOffset === "number" && typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} +__name(writeBody, "writeBody"); + +// src/node-http-handler.ts +var DEFAULT_REQUEST_TIMEOUT = 0; +var NodeHttpHandler = class _NodeHttpHandler { + constructor(options) { + this.socketWarningTimestamp = 0; + // Node http handler is hard-coded to http/1.1: https://github.com/nodejs/node/blob/ff5664b83b89c55e4ab5d5f60068fb457f1f5872/lib/_http_server.js#L286 + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }).catch(reject); + } else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + static { + __name(this, "NodeHttpHandler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _NodeHttpHandler(instanceOrOptions); + } + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15e3; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = sockets[origin]?.length ?? 0; + const requestsEnqueued = requests[origin]?.length ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + logger?.warn?.( + `@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.` + ); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, socketAcquisitionWarningTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + socketAcquisitionWarningTimeout, + httpAgent: (() => { + if (httpAgent instanceof import_http.Agent || typeof httpAgent?.destroy === "function") { + return httpAgent; + } + return new import_http.Agent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof import_https.Agent || typeof httpsAgent?.destroy === "function") { + return httpsAgent; + } + return new import_https.Agent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console + }; + } + destroy() { + this.config?.httpAgent?.destroy(); + this.config?.httpsAgent?.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = void 0; + const timeouts = []; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }, "reject"); + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push( + timing.setTimeout( + () => { + this.socketWarningTimestamp = _NodeHttpHandler.checkSocketUsage( + agent, + this.socketWarningTimestamp, + this.config.logger + ); + }, + this.config.socketAcquisitionWarningTimeout ?? (this.config.requestTimeout ?? 2e3) + (this.config.connectionTimeout ?? 1e3) + ) + ); + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + let auth = void 0; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth + }; + const requestFunc = isSSL ? import_https.request : import_http.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push( + setSocketKeepAlive(req, { + // @ts-expect-error keepAlive is not public on httpAgent. + keepAlive: httpAgent.keepAlive, + // @ts-expect-error keepAliveMsecs is not public on httpAgent. + keepAliveMsecs: httpAgent.keepAliveMsecs + }) + ); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; + +// src/node-http2-handler.ts + + +var import_http22 = require("http2"); + +// src/node-http2-connection-manager.ts +var import_http2 = __toESM(require("http2")); + +// src/node-http2-connection-pool.ts +var NodeHttp2ConnectionPool = class { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + static { + __name(this, "NodeHttp2ConnectionPool"); + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +}; + +// src/node-http2-connection-manager.ts +var NodeHttp2ConnectionManager = class { + constructor(config) { + this.sessionCache = /* @__PURE__ */ new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + static { + __name(this, "NodeHttp2ConnectionManager"); + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = import_http2.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error( + "Fail to set maxConcurrentStreams to " + this.config.maxConcurrency + "when creating new session for " + requestContext.destination.toString() + ); + } + }); + } + session.unref(); + const destroySessionCb = /* @__PURE__ */ __name(() => { + session.destroy(); + this.deleteSession(url, session); + }, "destroySessionCb"); + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + const cacheKey = this.getUrlString(requestContext); + this.sessionCache.get(cacheKey)?.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +}; + +// src/node-http2-handler.ts +var NodeHttp2Handler = class _NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((opts) => { + resolve(opts || {}); + }).catch(reject); + } else { + resolve(options || {}); + } + }); + } + static { + __name(this, "NodeHttp2Handler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _NodeHttp2Handler(instanceOrOptions); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + let fulfilled = false; + let writeRequestBodyPromise = void 0; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }, "reject"); + if (abortSignal?.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: this.config?.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false + }); + const rejectWithDestroy = /* @__PURE__ */ __name((err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }, "rejectWithDestroy"); + const queryString = (0, import_querystring_builder.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [import_http22.constants.HTTP2_HEADER_PATH]: path, + [import_http22.constants.HTTP2_HEADER_METHOD]: method + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy( + new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`) + ); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + /** + * Destroys a session. + * @param session - the session to destroy. + */ + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +}; + +// src/stream-collector/collector.ts + +var Collector = class extends import_stream.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + static { + __name(this, "Collector"); + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +}; + +// src/stream-collector/index.ts +var streamCollector = /* @__PURE__ */ __name((stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function() { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}, "streamCollector"); +var isReadableStreamInstance = /* @__PURE__ */ __name((stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream, "isReadableStreamInstance"); +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectReadableStream, "collectReadableStream"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DEFAULT_REQUEST_TIMEOUT, + NodeHttpHandler, + NodeHttp2Handler, + streamCollector +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/timing.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/timing.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/timing.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/constants.js new file mode 100644 index 0000000..0619d28 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/constants.js @@ -0,0 +1 @@ +export const NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js new file mode 100644 index 0000000..562883c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js @@ -0,0 +1,9 @@ +const getTransformedHeaders = (headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}; +export { getTransformedHeaders }; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/index.js new file mode 100644 index 0000000..09c0b9a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js new file mode 100644 index 0000000..f0ca1e7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js @@ -0,0 +1,209 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { Agent as hAgent, request as hRequest } from "http"; +import { Agent as hsAgent, request as hsRequest } from "https"; +import { NODEJS_TIMEOUT_ERROR_CODES } from "./constants"; +import { getTransformedHeaders } from "./get-transformed-headers"; +import { setConnectionTimeout } from "./set-connection-timeout"; +import { setSocketKeepAlive } from "./set-socket-keep-alive"; +import { setSocketTimeout } from "./set-socket-timeout"; +import { timing } from "./timing"; +import { writeRequestBody } from "./write-request-body"; +export const DEFAULT_REQUEST_TIMEOUT = 0; +export class NodeHttpHandler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new NodeHttpHandler(instanceOrOptions); + } + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15000; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = sockets[origin]?.length ?? 0; + const requestsEnqueued = requests[origin]?.length ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + logger?.warn?.(`@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.`); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + constructor(options) { + this.socketWarningTimestamp = 0; + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }) + .catch(reject); + } + else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, socketAcquisitionWarningTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + socketAcquisitionWarningTimeout, + httpAgent: (() => { + if (httpAgent instanceof hAgent || typeof httpAgent?.destroy === "function") { + return httpAgent; + } + return new hAgent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof hsAgent || typeof httpsAgent?.destroy === "function") { + return httpsAgent; + } + return new hsAgent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console, + }; + } + destroy() { + this.config?.httpAgent?.destroy(); + this.config?.httpsAgent?.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = undefined; + const timeouts = []; + const resolve = async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }; + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push(timing.setTimeout(() => { + this.socketWarningTimestamp = NodeHttpHandler.checkSocketUsage(agent, this.socketWarningTimestamp, this.config.logger); + }, this.config.socketAcquisitionWarningTimeout ?? + (this.config.requestTimeout ?? 2000) + (this.config.connectionTimeout ?? 1000))); + const queryString = buildQueryString(request.query || {}); + let auth = undefined; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } + else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth, + }; + const requestFunc = isSSL ? hsRequest : hRequest; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res, + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } + else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = () => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } + else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push(setSocketKeepAlive(req, { + keepAlive: httpAgent.keepAlive, + keepAliveMsecs: httpAgent.keepAliveMsecs, + })); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js new file mode 100644 index 0000000..206d94f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js @@ -0,0 +1,86 @@ +import http2 from "http2"; +import { NodeHttp2ConnectionPool } from "./node-http2-connection-pool"; +export class NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = http2.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error("Fail to set maxConcurrentStreams to " + + this.config.maxConcurrency + + "when creating new session for " + + requestContext.destination.toString()); + } + }); + } + session.unref(); + const destroySessionCb = () => { + session.destroy(); + this.deleteSession(url, session); + }; + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + const cacheKey = this.getUrlString(requestContext); + this.sessionCache.get(cacheKey)?.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js new file mode 100644 index 0000000..429eb49 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js @@ -0,0 +1,32 @@ +export class NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js new file mode 100644 index 0000000..b68601e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js @@ -0,0 +1,167 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { constants } from "http2"; +import { getTransformedHeaders } from "./get-transformed-headers"; +import { NodeHttp2ConnectionManager } from "./node-http2-connection-manager"; +import { writeRequestBody } from "./write-request-body"; +export class NodeHttp2Handler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new NodeHttp2Handler(instanceOrOptions); + } + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((opts) => { + resolve(opts || {}); + }) + .catch(reject); + } + else { + resolve(options || {}); + } + }); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + let fulfilled = false; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (abortSignal?.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: this.config?.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false, + }); + const rejectWithDestroy = (err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }; + const queryString = buildQueryString(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [constants.HTTP2_HEADER_PATH]: path, + [constants.HTTP2_HEADER_METHOD]: method, + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req, + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } + else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js new file mode 100644 index 0000000..41fb0b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +export class ReadFromBuffers extends Readable { + constructor(options) { + super(options); + this.numBuffersRead = 0; + this.buffersToRead = options.buffers; + this.errorAfter = typeof options.errorAfter === "number" ? options.errorAfter : -1; + } + _read() { + if (this.errorAfter !== -1 && this.errorAfter === this.numBuffersRead) { + this.emit("error", new Error("Mock Error")); + return; + } + if (this.numBuffersRead >= this.buffersToRead.length) { + return this.push(null); + } + return this.push(this.buffersToRead[this.numBuffersRead++]); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/server.mock.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/server.mock.js new file mode 100644 index 0000000..6a31adf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/server.mock.js @@ -0,0 +1,88 @@ +import { readFileSync } from "fs"; +import { createServer as createHttpServer } from "http"; +import { createServer as createHttp2Server } from "http2"; +import { createServer as createHttpsServer } from "https"; +import { join } from "path"; +import { Readable } from "stream"; +import { timing } from "./timing"; +const fixturesDir = join(__dirname, "..", "fixtures"); +const setResponseHeaders = (response, headers) => { + for (const [key, value] of Object.entries(headers)) { + response.setHeader(key, value); + } +}; +const setResponseBody = (response, body) => { + if (body instanceof Readable) { + body.pipe(response); + } + else { + response.end(body); + } +}; +export const createResponseFunction = (httpResp) => (request, response) => { + response.statusCode = httpResp.statusCode; + if (httpResp.reason) { + response.statusMessage = httpResp.reason; + } + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, httpResp.body); +}; +export const createResponseFunctionWithDelay = (httpResp, delay) => (request, response) => { + response.statusCode = httpResp.statusCode; + if (httpResp.reason) { + response.statusMessage = httpResp.reason; + } + setResponseHeaders(response, httpResp.headers); + timing.setTimeout(() => setResponseBody(response, httpResp.body), delay); +}; +export const createContinueResponseFunction = (httpResp) => (request, response) => { + response.writeContinue(); + timing.setTimeout(() => { + createResponseFunction(httpResp)(request, response); + }, 100); +}; +export const createMockHttpsServer = () => { + const server = createHttpsServer({ + key: readFileSync(join(fixturesDir, "test-server-key.pem")), + cert: readFileSync(join(fixturesDir, "test-server-cert.pem")), + }); + return server; +}; +export const createMockHttpServer = () => { + const server = createHttpServer(); + return server; +}; +export const createMockHttp2Server = () => { + const server = createHttp2Server(); + return server; +}; +export const createMirrorResponseFunction = (httpResp) => (request, response) => { + const bufs = []; + request.on("data", (chunk) => { + bufs.push(chunk); + }); + request.on("end", () => { + response.statusCode = httpResp.statusCode; + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, Buffer.concat(bufs)); + }); + request.on("error", (err) => { + response.statusCode = 500; + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, err.message); + }); +}; +export const getResponseBody = (response) => { + return new Promise((resolve, reject) => { + const bufs = []; + response.body.on("data", function (d) { + bufs.push(d); + }); + response.body.on("end", function () { + resolve(Buffer.concat(bufs).toString()); + }); + response.body.on("error", (err) => { + reject(err); + }); + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js new file mode 100644 index 0000000..587532e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js @@ -0,0 +1,36 @@ +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 1000; +export const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = (offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError", + })); + }, timeoutInMs - offset); + const doWithSocket = (socket) => { + if (socket?.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } + else { + timing.clearTimeout(timeoutId); + } + }; + if (request.socket) { + doWithSocket(request.socket); + } + else { + request.on("socket", doWithSocket); + } + }; + if (timeoutInMs < 2000) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js new file mode 100644 index 0000000..18391a8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js @@ -0,0 +1,22 @@ +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 3000; +export const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = () => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } + else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }; + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js new file mode 100644 index 0000000..5c4456c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js @@ -0,0 +1,24 @@ +import { DEFAULT_REQUEST_TIMEOUT } from "./node-http-handler"; +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 3000; +export const setSocketTimeout = (request, reject, timeoutInMs = DEFAULT_REQUEST_TIMEOUT) => { + const registerTimeout = (offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }; + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + request.on("close", () => request.socket?.removeListener("timeout", onTimeout)); + } + else { + request.setTimeout(timeout, onTimeout); + } + }; + if (0 < timeoutInMs && timeoutInMs < 6000) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js new file mode 100644 index 0000000..c3737e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js @@ -0,0 +1,11 @@ +import { Writable } from "stream"; +export class Collector extends Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js new file mode 100644 index 0000000..8ff09c0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js @@ -0,0 +1,41 @@ +import { Collector } from "./collector"; +export const streamCollector = (stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}; +const isReadableStreamInstance = (stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream; +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js new file mode 100644 index 0000000..2f653c5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +export class ReadFromBuffers extends Readable { + constructor(options) { + super(options); + this.numBuffersRead = 0; + this.buffersToRead = options.buffers; + this.errorAfter = typeof options.errorAfter === "number" ? options.errorAfter : -1; + } + _read(size) { + if (this.errorAfter !== -1 && this.errorAfter === this.numBuffersRead) { + this.emit("error", new Error("Mock Error")); + return; + } + if (this.numBuffersRead >= this.buffersToRead.length) { + return this.push(null); + } + return this.push(this.buffersToRead[this.numBuffersRead++]); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/timing.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/timing.js new file mode 100644 index 0000000..792ba48 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/timing.js @@ -0,0 +1,4 @@ +export const timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId), +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js new file mode 100644 index 0000000..36e15f9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js @@ -0,0 +1,56 @@ +import { Readable } from "stream"; +import { timing } from "./timing"; +const MIN_WAIT_TIME = 6000; +export async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(() => resolve(true), Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }), + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +function writeBody(httpRequest, body) { + if (body instanceof Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && + uint8.buffer && + typeof uint8.byteOffset === "number" && + typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts new file mode 100644 index 0000000..3540461 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts @@ -0,0 +1,5 @@ +/** + * Node.js system error codes that indicate timeout. + * @deprecated use NODEJS_TIMEOUT_ERROR_CODES from @smithy/service-error-classification/constants + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts new file mode 100644 index 0000000..bb7cd4e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +import { IncomingHttpHeaders } from "http2"; +declare const getTransformedHeaders: (headers: IncomingHttpHeaders) => HeaderBag; +export { getTransformedHeaders }; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/index.d.ts new file mode 100644 index 0000000..09c0b9a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts new file mode 100644 index 0000000..b120313 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts @@ -0,0 +1,47 @@ +/// +/// +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import type { Logger, NodeHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +import { Agent as hAgent } from "http"; +import { Agent as hsAgent } from "https"; +export { NodeHttpHandlerOptions }; +/** + * @public + * A default of 0 means no timeout. + */ +export declare const DEFAULT_REQUEST_TIMEOUT = 0; +/** + * @public + * A request handler that uses the Node.js http and https modules. + */ +export declare class NodeHttpHandler implements HttpHandler { + private config?; + private configProvider; + private socketWarningTimestamp; + readonly metadata: { + handlerProtocol: string; + }; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttpHandlerOptions | Provider): NodeHttpHandler | HttpHandler; + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent: hAgent | hsAgent, socketWarningTimestamp: number, logger?: Logger): number; + constructor(options?: NodeHttpHandlerOptions | Provider); + private resolveDefaultConfig; + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttpHandlerOptions, value: NodeHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttpHandlerOptions; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts new file mode 100644 index 0000000..24bc3b5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts @@ -0,0 +1,25 @@ +/// +import { RequestContext } from "@smithy/types"; +import { ConnectConfiguration } from "@smithy/types"; +import { ConnectionManager, ConnectionManagerConfiguration } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +/** + * @public + */ +export declare class NodeHttp2ConnectionManager implements ConnectionManager { + constructor(config: ConnectionManagerConfiguration); + private config; + private readonly sessionCache; + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): ClientHttp2Session; + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority: string, session: ClientHttp2Session): void; + release(requestContext: RequestContext, session: ClientHttp2Session): void; + destroy(): void; + setMaxConcurrentStreams(maxConcurrentStreams: number): void; + setDisableConcurrentStreams(disableConcurrentStreams: boolean): void; + private getUrlString; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts new file mode 100644 index 0000000..6695893 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts @@ -0,0 +1,13 @@ +/// +import { ConnectionPool } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +export declare class NodeHttp2ConnectionPool implements ConnectionPool { + private sessions; + constructor(sessions?: ClientHttp2Session[]); + poll(): ClientHttp2Session | void; + offerLast(session: ClientHttp2Session): void; + contains(session: ClientHttp2Session): boolean; + remove(session: ClientHttp2Session): void; + [Symbol.iterator](): IterableIterator; + destroy(connection: ClientHttp2Session): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts new file mode 100644 index 0000000..68610a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts @@ -0,0 +1,62 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * Represents the http2 options that can be passed to a node http2 client. + * @public + */ +export interface NodeHttp2HandlerOptions { + /** + * The maximum time in milliseconds that a stream may remain idle before it + * is closed. + */ + requestTimeout?: number; + /** + * The maximum time in milliseconds that a session or socket may remain idle + * before it is closed. + * https://nodejs.org/docs/latest-v12.x/api/http2.html#http2_http2session_and_sockets + */ + sessionTimeout?: number; + /** + * Disables processing concurrent streams on a ClientHttp2Session instance. When set + * to true, a new session instance is created for each request to a URL. + * **Default:** false. + * https://nodejs.org/api/http2.html#http2_class_clienthttp2session + */ + disableConcurrentStreams?: boolean; + /** + * Maximum number of concurrent Http2Stream instances per ClientHttp2Session. Each session + * may have up to 2^31-1 Http2Stream instances over its lifetime. + * This value must be greater than or equal to 0. + * https://nodejs.org/api/http2.html#class-http2stream + */ + maxConcurrentStreams?: number; +} +/** + * A request handler using the node:http2 package. + * @public + */ +export declare class NodeHttp2Handler implements HttpHandler { + private config?; + private configProvider; + readonly metadata: { + handlerProtocol: string; + }; + private readonly connectionManager; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttp2HandlerOptions | Provider): HttpHandler | NodeHttp2Handler; + constructor(options?: NodeHttp2HandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttp2HandlerOptions, value: NodeHttp2HandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttp2HandlerOptions; + /** + * Destroys a session. + * @param session - the session to destroy. + */ + private destroySession; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts new file mode 100644 index 0000000..cd7e77f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(): boolean | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts new file mode 100644 index 0000000..585a677 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/types"; +import { IncomingMessage, Server as HttpServer, ServerResponse } from "http"; +import { Http2Server } from "http2"; +import { Server as HttpsServer } from "https"; +export declare const createResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createResponseFunctionWithDelay: (httpResp: HttpResponse, delay: number) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createContinueResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createMockHttpsServer: () => HttpsServer; +export declare const createMockHttpServer: () => HttpServer; +export declare const createMockHttp2Server: () => Http2Server; +export declare const createMirrorResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const getResponseBody: (response: HttpResponse) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts new file mode 100644 index 0000000..57b811f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export declare const setConnectionTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts new file mode 100644 index 0000000..80507d8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts @@ -0,0 +1,13 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export interface SocketKeepAliveOptions { + keepAlive: boolean; + keepAliveMsecs?: number; +} +export declare const setSocketKeepAlive: (request: ClientRequest, { keepAlive, keepAliveMsecs }: SocketKeepAliveOptions, deferTimeMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts new file mode 100644 index 0000000..019a62b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export declare const setSocketTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts new file mode 100644 index 0000000..b7d4d12 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts @@ -0,0 +1,8 @@ +/// +/// +/// +import { Writable } from "stream"; +export declare class Collector extends Writable { + readonly bufferedBytes: Buffer[]; + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts new file mode 100644 index 0000000..a9a9498 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts @@ -0,0 +1,6 @@ +import { StreamCollector } from "@smithy/types"; +/** + * @internal + * Converts a stream to a byte array. + */ +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts new file mode 100644 index 0000000..2543a28 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(size: number): boolean | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts new file mode 100644 index 0000000..de5b695 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * For test spies. + */ +export declare const timing: { + setTimeout: (cb: (...ignored: any[]) => void | unknown, ms?: number) => number; + clearTimeout: (timeoutId: string | number | undefined | unknown) => void; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..b02b0b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,5 @@ +/** + * Node.js system error codes that indicate timeout. + * @deprecated use NODEJS_TIMEOUT_ERROR_CODES from @smithy/service-error-classification/constants + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts new file mode 100644 index 0000000..c6f5a8b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +import { IncomingHttpHeaders } from "http2"; +declare const getTransformedHeaders: (headers: IncomingHttpHeaders) => HeaderBag; +export { getTransformedHeaders }; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..055c48c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts new file mode 100644 index 0000000..eb1da7b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts @@ -0,0 +1,46 @@ +/// +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { Logger, NodeHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +import { Agent as hAgent } from "http"; +import { Agent as hsAgent } from "https"; +export { NodeHttpHandlerOptions }; +/** + * @public + * A default of 0 means no timeout. + */ +export declare const DEFAULT_REQUEST_TIMEOUT = 0; +/** + * @public + * A request handler that uses the Node.js http and https modules. + */ +export declare class NodeHttpHandler implements HttpHandler { + private config?; + private configProvider; + private socketWarningTimestamp; + readonly metadata: { + handlerProtocol: string; + }; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttpHandlerOptions | Provider): NodeHttpHandler | HttpHandler; + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent: hAgent | hsAgent, socketWarningTimestamp: number, logger?: Logger): number; + constructor(options?: NodeHttpHandlerOptions | Provider); + private resolveDefaultConfig; + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttpHandlerOptions, value: NodeHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttpHandlerOptions; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts new file mode 100644 index 0000000..8aa87c1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts @@ -0,0 +1,25 @@ +/// +import { RequestContext } from "@smithy/types"; +import { ConnectConfiguration } from "@smithy/types"; +import { ConnectionManager, ConnectionManagerConfiguration } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +/** + * @public + */ +export declare class NodeHttp2ConnectionManager implements ConnectionManager { + constructor(config: ConnectionManagerConfiguration); + private config; + private readonly sessionCache; + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): ClientHttp2Session; + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority: string, session: ClientHttp2Session): void; + release(requestContext: RequestContext, session: ClientHttp2Session): void; + destroy(): void; + setMaxConcurrentStreams(maxConcurrentStreams: number): void; + setDisableConcurrentStreams(disableConcurrentStreams: boolean): void; + private getUrlString; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts new file mode 100644 index 0000000..e9116cb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts @@ -0,0 +1,13 @@ +/// +import { ConnectionPool } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +export declare class NodeHttp2ConnectionPool implements ConnectionPool { + private sessions; + constructor(sessions?: ClientHttp2Session[]); + poll(): ClientHttp2Session | void; + offerLast(session: ClientHttp2Session): void; + contains(session: ClientHttp2Session): boolean; + remove(session: ClientHttp2Session): void; + [Symbol.iterator](): IterableIterator; + destroy(connection: ClientHttp2Session): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts new file mode 100644 index 0000000..eaa24bd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts @@ -0,0 +1,62 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * Represents the http2 options that can be passed to a node http2 client. + * @public + */ +export interface NodeHttp2HandlerOptions { + /** + * The maximum time in milliseconds that a stream may remain idle before it + * is closed. + */ + requestTimeout?: number; + /** + * The maximum time in milliseconds that a session or socket may remain idle + * before it is closed. + * https://nodejs.org/docs/latest-v12.x/api/http2.html#http2_http2session_and_sockets + */ + sessionTimeout?: number; + /** + * Disables processing concurrent streams on a ClientHttp2Session instance. When set + * to true, a new session instance is created for each request to a URL. + * **Default:** false. + * https://nodejs.org/api/http2.html#http2_class_clienthttp2session + */ + disableConcurrentStreams?: boolean; + /** + * Maximum number of concurrent Http2Stream instances per ClientHttp2Session. Each session + * may have up to 2^31-1 Http2Stream instances over its lifetime. + * This value must be greater than or equal to 0. + * https://nodejs.org/api/http2.html#class-http2stream + */ + maxConcurrentStreams?: number; +} +/** + * A request handler using the node:http2 package. + * @public + */ +export declare class NodeHttp2Handler implements HttpHandler { + private config?; + private configProvider; + readonly metadata: { + handlerProtocol: string; + }; + private readonly connectionManager; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttp2HandlerOptions | Provider): HttpHandler | NodeHttp2Handler; + constructor(options?: NodeHttp2HandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttp2HandlerOptions, value: NodeHttp2HandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttp2HandlerOptions; + /** + * Destroys a session. + * @param session - the session to destroy. + */ + private destroySession; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts new file mode 100644 index 0000000..f0492d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(): boolean | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts new file mode 100644 index 0000000..6a7e350 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/types"; +import { IncomingMessage, Server as HttpServer, ServerResponse } from "http"; +import { Http2Server } from "http2"; +import { Server as HttpsServer } from "https"; +export declare const createResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createResponseFunctionWithDelay: (httpResp: HttpResponse, delay: number) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createContinueResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createMockHttpsServer: () => HttpsServer; +export declare const createMockHttpServer: () => HttpServer; +export declare const createMockHttp2Server: () => Http2Server; +export declare const createMirrorResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const getResponseBody: (response: HttpResponse) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts new file mode 100644 index 0000000..96cdb66 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts @@ -0,0 +1,3 @@ +/// +import { ClientRequest } from "http"; +export declare const setConnectionTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts new file mode 100644 index 0000000..3bb6ec0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts @@ -0,0 +1,7 @@ +/// +import { ClientRequest } from "http"; +export interface SocketKeepAliveOptions { + keepAlive: boolean; + keepAliveMsecs?: number; +} +export declare const setSocketKeepAlive: (request: ClientRequest, { keepAlive, keepAliveMsecs }: SocketKeepAliveOptions, deferTimeMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts new file mode 100644 index 0000000..57f8743 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts @@ -0,0 +1,3 @@ +/// +import { ClientRequest } from "http"; +export declare const setSocketTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts new file mode 100644 index 0000000..c329bd4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts @@ -0,0 +1,6 @@ +/// +import { Writable } from "stream"; +export declare class Collector extends Writable { + readonly bufferedBytes: Buffer[]; + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts new file mode 100644 index 0000000..1022a17 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts @@ -0,0 +1,6 @@ +import { StreamCollector } from "@smithy/types"; +/** + * @internal + * Converts a stream to a byte array. + */ +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts new file mode 100644 index 0000000..e2c0a4c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(size: number): boolean | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts new file mode 100644 index 0000000..c88dd2f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * For test spies. + */ +export declare const timing: { + setTimeout: (cb: (...ignored: any[]) => void | unknown, ms?: number) => number; + clearTimeout: (timeoutId: string | number | undefined | unknown) => void; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts new file mode 100644 index 0000000..0f13e96 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts @@ -0,0 +1,12 @@ +/// +import { HttpRequest } from "@smithy/types"; +import { ClientRequest } from "http"; +import { ClientHttp2Stream } from "http2"; +/** + * This resolves when writeBody has been called. + * + * @param httpRequest - opened Node.js request. + * @param request - container with the request body. + * @param maxContinueTimeoutMs - time to wait for the continue event. + */ +export declare function writeRequestBody(httpRequest: ClientRequest | ClientHttp2Stream, request: HttpRequest, maxContinueTimeoutMs?: number): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts new file mode 100644 index 0000000..0c49e32 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts @@ -0,0 +1,13 @@ +/// +/// +import { HttpRequest } from "@smithy/types"; +import { ClientRequest } from "http"; +import { ClientHttp2Stream } from "http2"; +/** + * This resolves when writeBody has been called. + * + * @param httpRequest - opened Node.js request. + * @param request - container with the request body. + * @param maxContinueTimeoutMs - time to wait for the continue event. + */ +export declare function writeRequestBody(httpRequest: ClientRequest | ClientHttp2Stream, request: HttpRequest, maxContinueTimeoutMs?: number): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/package.json new file mode 100644 index 0000000..2e4e1e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/node-http-handler/package.json @@ -0,0 +1,67 @@ +{ + "name": "@smithy/node-http-handler", + "version": "4.0.4", + "description": "Provides a way to make requests", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline node-http-handler", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/node-http-handler", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/node-http-handler" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/README.md new file mode 100644 index 0000000..b35fafb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/README.md @@ -0,0 +1,10 @@ +# @smithy/property-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/property-provider/latest.svg)](https://www.npmjs.com/package/@smithy/property-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/property-provider.svg)](https://www.npmjs.com/package/@smithy/property-provider) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/chain.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/chain.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/chain.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/index.js new file mode 100644 index 0000000..b0fa627 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/index.js @@ -0,0 +1,170 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CredentialsProviderError: () => CredentialsProviderError, + ProviderError: () => ProviderError, + TokenProviderError: () => TokenProviderError, + chain: () => chain, + fromStatic: () => fromStatic, + memoize: () => memoize +}); +module.exports = __toCommonJS(src_exports); + +// src/ProviderError.ts +var ProviderError = class _ProviderError extends Error { + constructor(message, options = true) { + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = void 0; + tryNextLink = options; + } else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, _ProviderError.prototype); + logger?.debug?.(`@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + static { + __name(this, "ProviderError"); + } + /** + * @deprecated use new operator. + */ + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +}; + +// src/CredentialsProviderError.ts +var CredentialsProviderError = class _CredentialsProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, _CredentialsProviderError.prototype); + } + static { + __name(this, "CredentialsProviderError"); + } +}; + +// src/TokenProviderError.ts +var TokenProviderError = class _TokenProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, _TokenProviderError.prototype); + } + static { + __name(this, "TokenProviderError"); + } +}; + +// src/chain.ts +var chain = /* @__PURE__ */ __name((...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } catch (err) { + lastProviderError = err; + if (err?.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}, "chain"); + +// src/fromStatic.ts +var fromStatic = /* @__PURE__ */ __name((staticValue) => () => Promise.resolve(staticValue), "fromStatic"); + +// src/memoize.ts +var memoize = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}, "memoize"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + CredentialsProviderError, + ProviderError, + TokenProviderError, + chain, + fromStatic, + memoize +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/memoize.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/memoize.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-cjs/memoize.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js new file mode 100644 index 0000000..cec1f9e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js @@ -0,0 +1,8 @@ +import { ProviderError } from "./ProviderError"; +export class CredentialsProviderError extends ProviderError { + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, CredentialsProviderError.prototype); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/ProviderError.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/ProviderError.js new file mode 100644 index 0000000..e0db2b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/ProviderError.js @@ -0,0 +1,22 @@ +export class ProviderError extends Error { + constructor(message, options = true) { + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = undefined; + tryNextLink = options; + } + else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, ProviderError.prototype); + logger?.debug?.(`@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js new file mode 100644 index 0000000..f0e75b1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js @@ -0,0 +1,8 @@ +import { ProviderError } from "./ProviderError"; +export class TokenProviderError extends ProviderError { + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, TokenProviderError.prototype); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/chain.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/chain.js new file mode 100644 index 0000000..c389f7f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/chain.js @@ -0,0 +1,21 @@ +import { ProviderError } from "./ProviderError"; +export const chain = (...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } + catch (err) { + lastProviderError = err; + if (err?.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/fromStatic.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/fromStatic.js new file mode 100644 index 0000000..67da7a7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/fromStatic.js @@ -0,0 +1 @@ +export const fromStatic = (staticValue) => () => Promise.resolve(staticValue); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/index.js new file mode 100644 index 0000000..15d14e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./CredentialsProviderError"; +export * from "./ProviderError"; +export * from "./TokenProviderError"; +export * from "./chain"; +export * from "./fromStatic"; +export * from "./memoize"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/memoize.js b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/memoize.js new file mode 100644 index 0000000..e04839a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-es/memoize.js @@ -0,0 +1,45 @@ +export const memoize = (provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts new file mode 100644 index 0000000..7955dc1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual credential provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class CredentialsProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts new file mode 100644 index 0000000..b87b014 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts @@ -0,0 +1,39 @@ +import { Logger } from "@smithy/types"; +/** + * @public + */ +export type ProviderErrorOptionsType = { + tryNextLink?: boolean | undefined; + logger?: Logger; +}; +/** + * @public + * + * An error representing a failure of an individual provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class ProviderError extends Error { + name: string; + readonly tryNextLink: boolean; + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); + /** + * @deprecated use new operator. + */ + static from(error: Error, options?: boolean | ProviderErrorOptionsType): ProviderError; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts new file mode 100644 index 0000000..a2f9dd6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual token provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class TokenProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/chain.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/chain.d.ts new file mode 100644 index 0000000..168df5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/chain.d.ts @@ -0,0 +1,13 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * Compose a single credential provider function from multiple credential + * providers. The first provider in the argument list will always be invoked; + * subsequent providers in the list will be invoked in the order in which the + * were received if the preceding provider did not successfully resolve. + * + * If no providers were received or no provider resolves successfully, the + * returned promise will be rejected. + */ +export declare const chain: (...providers: Provider[]) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..f58bece --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const fromStatic: (staticValue: T) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/index.d.ts new file mode 100644 index 0000000..6326994 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/index.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export * from "./CredentialsProviderError"; +/** + * @internal + */ +export * from "./ProviderError"; +/** + * @internal + */ +export * from "./TokenProviderError"; +/** + * @internal + */ +export * from "./chain"; +/** + * @internal + */ +export * from "./fromStatic"; +/** + * @internal + */ +export * from "./memoize"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/memoize.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/memoize.d.ts new file mode 100644 index 0000000..ce197c0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/memoize.d.ts @@ -0,0 +1,40 @@ +import { MemoizedProvider, Provider } from "@smithy/types"; +interface MemoizeOverload { + /** + * + * Decorates a provider function with either static memoization. + * + * To create a statically memoized provider, supply a provider as the only + * argument to this function. The provider will be invoked once, and all + * invocations of the provider returned by `memoize` will return the same + * promise object. + * + * @param provider The provider whose result should be cached indefinitely. + */ + (provider: Provider): MemoizedProvider; + /** + * Decorates a provider function with refreshing memoization. + * + * @param provider The provider whose result should be cached. + * @param isExpired A function that will evaluate the resolved value and + * determine if it is expired. For example, when + * memoizing AWS credential providers, this function + * should return `true` when the credential's + * expiration is in the past (or very near future) and + * `false` otherwise. + * @param requiresRefresh A function that will evaluate the resolved value and + * determine if it represents static value or one that + * will eventually need to be refreshed. For example, + * AWS credentials that have no defined expiration will + * never need to be refreshed, so this function would + * return `true` if the credentials resolved by the + * underlying provider had an expiration and `false` + * otherwise. + */ + (provider: Provider, isExpired: (resolved: T) => boolean, requiresRefresh?: (resolved: T) => boolean): MemoizedProvider; +} +/** + * @internal + */ +export declare const memoize: MemoizeOverload; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts new file mode 100644 index 0000000..11e4aea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual credential provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class CredentialsProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts new file mode 100644 index 0000000..daf499c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts @@ -0,0 +1,39 @@ +import { Logger } from "@smithy/types"; +/** + * @public + */ +export type ProviderErrorOptionsType = { + tryNextLink?: boolean | undefined; + logger?: Logger; +}; +/** + * @public + * + * An error representing a failure of an individual provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class ProviderError extends Error { + name: string; + readonly tryNextLink: boolean; + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); + /** + * @deprecated use new operator. + */ + static from(error: Error, options?: boolean | ProviderErrorOptionsType): ProviderError; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts new file mode 100644 index 0000000..6f67fd5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual token provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class TokenProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts new file mode 100644 index 0000000..44390b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts @@ -0,0 +1,13 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * Compose a single credential provider function from multiple credential + * providers. The first provider in the argument list will always be invoked; + * subsequent providers in the list will be invoked in the order in which the + * were received if the preceding provider did not successfully resolve. + * + * If no providers were received or no provider resolves successfully, the + * returned promise will be rejected. + */ +export declare const chain: (...providers: Provider[]) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..0df6309 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const fromStatic: (staticValue: T) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e28099d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export * from "./CredentialsProviderError"; +/** + * @internal + */ +export * from "./ProviderError"; +/** + * @internal + */ +export * from "./TokenProviderError"; +/** + * @internal + */ +export * from "./chain"; +/** + * @internal + */ +export * from "./fromStatic"; +/** + * @internal + */ +export * from "./memoize"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts new file mode 100644 index 0000000..29ce53d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts @@ -0,0 +1,40 @@ +import { MemoizedProvider, Provider } from "@smithy/types"; +interface MemoizeOverload { + /** + * + * Decorates a provider function with either static memoization. + * + * To create a statically memoized provider, supply a provider as the only + * argument to this function. The provider will be invoked once, and all + * invocations of the provider returned by `memoize` will return the same + * promise object. + * + * @param provider The provider whose result should be cached indefinitely. + */ + (provider: Provider): MemoizedProvider; + /** + * Decorates a provider function with refreshing memoization. + * + * @param provider The provider whose result should be cached. + * @param isExpired A function that will evaluate the resolved value and + * determine if it is expired. For example, when + * memoizing AWS credential providers, this function + * should return `true` when the credential's + * expiration is in the past (or very near future) and + * `false` otherwise. + * @param requiresRefresh A function that will evaluate the resolved value and + * determine if it represents static value or one that + * will eventually need to be refreshed. For example, + * AWS credentials that have no defined expiration will + * never need to be refreshed, so this function would + * return `true` if the credentials resolved by the + * underlying provider had an expiration and `false` + * otherwise. + */ + (provider: Provider, isExpired: (resolved: T) => boolean, requiresRefresh?: (resolved: T) => boolean): MemoizedProvider; +} +/** + * @internal + */ +export declare const memoize: MemoizeOverload; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/package.json new file mode 100644 index 0000000..b2e7fc6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/property-provider/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/property-provider", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline property-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/property-provider", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/property-provider" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/README.md new file mode 100644 index 0000000..a547ab0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/README.md @@ -0,0 +1,4 @@ +# @smithy/protocol-http + +[![NPM version](https://img.shields.io/npm/v/@smithy/protocol-http/latest.svg)](https://www.npmjs.com/package/@smithy/protocol-http) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/protocol-http.svg)](https://www.npmjs.com/package/@smithy/protocol-http) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/Field.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/Field.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/Field.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/Fields.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/Fields.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/Fields.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/index.js new file mode 100644 index 0000000..df37109 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/index.js @@ -0,0 +1,262 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Field: () => Field, + Fields: () => Fields, + HttpRequest: () => HttpRequest, + HttpResponse: () => HttpResponse, + IHttpRequest: () => import_types.HttpRequest, + getHttpHandlerExtensionConfiguration: () => getHttpHandlerExtensionConfiguration, + isValidHostname: () => isValidHostname, + resolveHttpHandlerRuntimeConfig: () => resolveHttpHandlerRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/extensions/httpExtensionConfiguration.ts +var getHttpHandlerExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setHttpHandler(handler) { + runtimeConfig.httpHandler = handler; + }, + httpHandler() { + return runtimeConfig.httpHandler; + }, + updateHttpClientConfig(key, value) { + runtimeConfig.httpHandler?.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return runtimeConfig.httpHandler.httpHandlerConfigs(); + } + }; +}, "getHttpHandlerExtensionConfiguration"); +var resolveHttpHandlerRuntimeConfig = /* @__PURE__ */ __name((httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler() + }; +}, "resolveHttpHandlerRuntimeConfig"); + +// src/Field.ts +var import_types = require("@smithy/types"); +var Field = class { + static { + __name(this, "Field"); + } + constructor({ name, kind = import_types.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value) { + this.values.push(value); + } + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values) { + this.values = values; + } + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString() { + return this.values.map((v) => v.includes(",") || v.includes(" ") ? `"${v}"` : v).join(", "); + } + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get() { + return this.values; + } +}; + +// src/Fields.ts +var Fields = class { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + static { + __name(this, "Fields"); + } + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name) { + return this.entries[name.toLowerCase()]; + } + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +}; + +// src/httpRequest.ts + +var HttpRequest = class _HttpRequest { + static { + __name(this, "HttpRequest"); + } + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol ? options.protocol.slice(-1) !== ":" ? `${options.protocol}:` : options.protocol : "https:"; + this.path = options.path ? options.path.charAt(0) !== "/" ? `/${options.path}` : options.path : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + /** + * Note: this does not deep-clone the body. + */ + static clone(request) { + const cloned = new _HttpRequest({ + ...request, + headers: { ...request.headers } + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return "method" in req && "protocol" in req && "hostname" in req && "path" in req && typeof req["query"] === "object" && typeof req["headers"] === "object"; + } + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone() { + return _HttpRequest.clone(this); + } +}; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param + }; + }, {}); +} +__name(cloneQuery, "cloneQuery"); + +// src/httpResponse.ts +var HttpResponse = class { + static { + __name(this, "HttpResponse"); + } + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +}; + +// src/isValidHostname.ts +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +__name(isValidHostname, "isValidHostname"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getHttpHandlerExtensionConfiguration, + resolveHttpHandlerRuntimeConfig, + Field, + Fields, + HttpRequest, + HttpResponse, + isValidHostname +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/Field.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/Field.js new file mode 100644 index 0000000..918c883 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/Field.js @@ -0,0 +1,23 @@ +import { FieldPosition } from "@smithy/types"; +export class Field { + constructor({ name, kind = FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + add(value) { + this.values.push(value); + } + set(values) { + this.values = values; + } + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + toString() { + return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } + get() { + return this.values; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/Fields.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/Fields.js new file mode 100644 index 0000000..efa591f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/Fields.js @@ -0,0 +1,19 @@ +export class Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + getField(name) { + return this.entries[name.toLowerCase()]; + } + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js new file mode 100644 index 0000000..1a5aa0c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js @@ -0,0 +1,21 @@ +export const getHttpHandlerExtensionConfiguration = (runtimeConfig) => { + return { + setHttpHandler(handler) { + runtimeConfig.httpHandler = handler; + }, + httpHandler() { + return runtimeConfig.httpHandler; + }, + updateHttpClientConfig(key, value) { + runtimeConfig.httpHandler?.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return runtimeConfig.httpHandler.httpHandlerConfigs(); + }, + }; +}; +export const resolveHttpHandlerRuntimeConfig = (httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler(), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/extensions/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/extensions/index.js new file mode 100644 index 0000000..a215a4a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/extensions/index.js @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpHandler.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpHandler.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpHandler.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpRequest.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpRequest.js new file mode 100644 index 0000000..fd426ab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpRequest.js @@ -0,0 +1,53 @@ +export class HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol + ? options.protocol.slice(-1) !== ":" + ? `${options.protocol}:` + : options.protocol + : "https:"; + this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static clone(request) { + const cloned = new HttpRequest({ + ...request, + headers: { ...request.headers }, + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return ("method" in req && + "protocol" in req && + "hostname" in req && + "path" in req && + typeof req["query"] === "object" && + typeof req["headers"] === "object"); + } + clone() { + return HttpRequest.clone(this); + } +} +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; + }, {}); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpResponse.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpResponse.js new file mode 100644 index 0000000..75f470f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/httpResponse.js @@ -0,0 +1,14 @@ +export class HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/index.js new file mode 100644 index 0000000..8ff7f26 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js new file mode 100644 index 0000000..464c7db --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js @@ -0,0 +1,4 @@ +export function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/Field.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/Field.d.ts new file mode 100644 index 0000000..2d1613a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/Field.d.ts @@ -0,0 +1,49 @@ +import { FieldOptions, FieldPosition } from "@smithy/types"; +/** + * A name-value pair representing a single field + * transmitted in an HTTP Request or Response. + * + * The kind will dictate metadata placement within + * an HTTP message. + * + * All field names are case insensitive and + * case-variance must be treated as equivalent. + * Names MAY be normalized but SHOULD be preserved + * for accuracy during transmission. + */ +export declare class Field { + readonly name: string; + readonly kind: FieldPosition; + values: string[]; + constructor({ name, kind, values }: FieldOptions); + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value: string): void; + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values: string[]): void; + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value: string): void; + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString(): string; + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get(): string[]; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts new file mode 100644 index 0000000..8915826 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts @@ -0,0 +1,44 @@ +import { FieldPosition } from "@smithy/types"; +import { Field } from "./Field"; +export type FieldsOptions = { + fields?: Field[]; + encoding?: string; +}; +/** + * Collection of Field entries mapped by name. + */ +export declare class Fields { + private readonly entries; + private readonly encoding; + constructor({ fields, encoding }: FieldsOptions); + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field: Field): void; + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name: string): Field | undefined; + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name: string): void; + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind: FieldPosition): Field[]; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts new file mode 100644 index 0000000..bfe452d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts @@ -0,0 +1,37 @@ +import { HttpHandler } from "../httpHandler"; +/** + * @internal + */ +export interface HttpHandlerExtensionConfiguration { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[typeof key]): void; + httpHandlerConfigs(): HandlerConfig; +} +/** + * @internal + */ +export type HttpHandlerExtensionConfigType = Partial<{ + httpHandler: HttpHandler; +}>; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getHttpHandlerExtensionConfiguration: (runtimeConfig: Partial<{ + httpHandler: HttpHandler; +}>) => { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[keyof HandlerConfig]): void; + httpHandlerConfigs(): HandlerConfig; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveHttpHandlerRuntimeConfig: (httpHandlerExtensionConfiguration: HttpHandlerExtensionConfiguration) => Partial<{ + httpHandler: HttpHandler; +}>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..a215a4a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts new file mode 100644 index 0000000..8dc8d32 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts @@ -0,0 +1,35 @@ +import type { FetchHttpHandlerOptions, HttpHandlerOptions, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +import type { HttpRequest } from "./httpRequest"; +import type { HttpResponse } from "./httpResponse"; +/** + * @internal + */ +export type HttpHandler = RequestHandler & { + /** + * @internal + */ + updateHttpClientConfig(key: keyof HttpHandlerConfig, value: HttpHandlerConfig[typeof key]): void; + /** + * @internal + */ + httpHandlerConfigs(): HttpHandlerConfig; +}; +/** + * @public + * + * A type representing the accepted user inputs for the `requestHandler` field + * of a client's constructor object. + * + * You may provide an instance of an HttpHandler, or alternatively + * provide the constructor arguments as an object which will be passed + * to the constructor of the default request handler. + * + * The default class constructor to which your arguments will be passed + * varies. The Node.js default is the NodeHttpHandler and the browser/react-native + * default is the FetchHttpHandler. In rarer cases specific clients may be + * configured to use other default implementations such as Websocket or HTTP2. + * + * The fallback type Record is part of the union to allow + * passing constructor params to an unknown requestHandler type. + */ +export type HttpHandlerUserInput = HttpHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts new file mode 100644 index 0000000..8b64ff6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts @@ -0,0 +1,55 @@ +import { HeaderBag, HttpMessage, HttpRequest as IHttpRequest, QueryParameterBag, URI } from "@smithy/types"; +type HttpRequestOptions = Partial & Partial & { + method?: string; +}; +/** + * Use the distinct IHttpRequest interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpRequest extends IHttpRequest { +} +/** + * @public + */ +export { IHttpRequest }; +/** + * @public + */ +export declare class HttpRequest implements HttpMessage, URI { + method: string; + protocol: string; + hostname: string; + port?: number; + path: string; + query: QueryParameterBag; + headers: HeaderBag; + username?: string; + password?: string; + fragment?: string; + body?: any; + constructor(options: HttpRequestOptions); + /** + * Note: this does not deep-clone the body. + */ + static clone(request: IHttpRequest): HttpRequest; + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request: unknown): request is HttpRequest; + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone(): HttpRequest; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts new file mode 100644 index 0000000..e51f18b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts @@ -0,0 +1,29 @@ +import { HeaderBag, HttpMessage, HttpResponse as IHttpResponse } from "@smithy/types"; +type HttpResponseOptions = Partial & { + statusCode: number; + reason?: string; +}; +/** + * Use the distinct IHttpResponse interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpResponse extends IHttpResponse { +} +/** + * @public + */ +export declare class HttpResponse { + statusCode: number; + reason?: string; + headers: HeaderBag; + body?: any; + constructor(options: HttpResponseOptions); + static isInstance(response: unknown): response is HttpResponse; +} +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/index.d.ts new file mode 100644 index 0000000..8ff7f26 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts new file mode 100644 index 0000000..6fb5bcb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts @@ -0,0 +1 @@ +export declare function isValidHostname(hostname: string): boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts new file mode 100644 index 0000000..faa4b70 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts @@ -0,0 +1,49 @@ +import { FieldOptions, FieldPosition } from "@smithy/types"; +/** + * A name-value pair representing a single field + * transmitted in an HTTP Request or Response. + * + * The kind will dictate metadata placement within + * an HTTP message. + * + * All field names are case insensitive and + * case-variance must be treated as equivalent. + * Names MAY be normalized but SHOULD be preserved + * for accuracy during transmission. + */ +export declare class Field { + readonly name: string; + readonly kind: FieldPosition; + values: string[]; + constructor({ name, kind, values }: FieldOptions); + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value: string): void; + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values: string[]): void; + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value: string): void; + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString(): string; + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get(): string[]; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts new file mode 100644 index 0000000..616f55e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts @@ -0,0 +1,44 @@ +import { FieldPosition } from "@smithy/types"; +import { Field } from "./Field"; +export type FieldsOptions = { + fields?: Field[]; + encoding?: string; +}; +/** + * Collection of Field entries mapped by name. + */ +export declare class Fields { + private readonly entries; + private readonly encoding; + constructor({ fields, encoding }: FieldsOptions); + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field: Field): void; + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name: string): Field | undefined; + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name: string): void; + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind: FieldPosition): Field[]; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts new file mode 100644 index 0000000..3cd2cf6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts @@ -0,0 +1,37 @@ +import { HttpHandler } from "../httpHandler"; +/** + * @internal + */ +export interface HttpHandlerExtensionConfiguration { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[typeof key]): void; + httpHandlerConfigs(): HandlerConfig; +} +/** + * @internal + */ +export type HttpHandlerExtensionConfigType = Partial<{ + httpHandler: HttpHandler; +}>; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getHttpHandlerExtensionConfiguration: (runtimeConfig: Partial<{ + httpHandler: HttpHandler; +}>) => { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[keyof HandlerConfig]): void; + httpHandlerConfigs(): HandlerConfig; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveHttpHandlerRuntimeConfig: (httpHandlerExtensionConfiguration: HttpHandlerExtensionConfiguration) => Partial<{ + httpHandler: HttpHandler; +}>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..e0f765b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts new file mode 100644 index 0000000..b8f1978 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts @@ -0,0 +1,35 @@ +import { FetchHttpHandlerOptions, HttpHandlerOptions, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +import { HttpRequest } from "./httpRequest"; +import { HttpResponse } from "./httpResponse"; +/** + * @internal + */ +export type HttpHandler = RequestHandler & { + /** + * @internal + */ + updateHttpClientConfig(key: keyof HttpHandlerConfig, value: HttpHandlerConfig[typeof key]): void; + /** + * @internal + */ + httpHandlerConfigs(): HttpHandlerConfig; +}; +/** + * @public + * + * A type representing the accepted user inputs for the `requestHandler` field + * of a client's constructor object. + * + * You may provide an instance of an HttpHandler, or alternatively + * provide the constructor arguments as an object which will be passed + * to the constructor of the default request handler. + * + * The default class constructor to which your arguments will be passed + * varies. The Node.js default is the NodeHttpHandler and the browser/react-native + * default is the FetchHttpHandler. In rarer cases specific clients may be + * configured to use other default implementations such as Websocket or HTTP2. + * + * The fallback type Record is part of the union to allow + * passing constructor params to an unknown requestHandler type. + */ +export type HttpHandlerUserInput = HttpHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts new file mode 100644 index 0000000..cdcf38b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts @@ -0,0 +1,55 @@ +import { HeaderBag, HttpMessage, HttpRequest as IHttpRequest, QueryParameterBag, URI } from "@smithy/types"; +type HttpRequestOptions = Partial & Partial & { + method?: string; +}; +/** + * Use the distinct IHttpRequest interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpRequest extends IHttpRequest { +} +/** + * @public + */ +export { IHttpRequest }; +/** + * @public + */ +export declare class HttpRequest implements HttpMessage, URI { + method: string; + protocol: string; + hostname: string; + port?: number; + path: string; + query: QueryParameterBag; + headers: HeaderBag; + username?: string; + password?: string; + fragment?: string; + body?: any; + constructor(options: HttpRequestOptions); + /** + * Note: this does not deep-clone the body. + */ + static clone(request: IHttpRequest): HttpRequest; + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request: unknown): request is HttpRequest; + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone(): HttpRequest; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts new file mode 100644 index 0000000..8babc91 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts @@ -0,0 +1,29 @@ +import { HeaderBag, HttpMessage, HttpResponse as IHttpResponse } from "@smithy/types"; +type HttpResponseOptions = Partial & { + statusCode: number; + reason?: string; +}; +/** + * Use the distinct IHttpResponse interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpResponse extends IHttpResponse { +} +/** + * @public + */ +export declare class HttpResponse { + statusCode: number; + reason?: string; + headers: HeaderBag; + body?: any; + constructor(options: HttpResponseOptions); + static isInstance(response: unknown): response is HttpResponse; +} +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..08feffa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts new file mode 100644 index 0000000..7b85b36 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts @@ -0,0 +1 @@ +export declare function isValidHostname(hostname: string): boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..42e3c66 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts @@ -0,0 +1,21 @@ +import { FieldOptions as __FieldOptions, FieldPosition as __FieldPosition, HeaderBag as __HeaderBag, HttpHandlerOptions as __HttpHandlerOptions, HttpMessage as __HttpMessage } from "@smithy/types"; +/** + * @deprecated Use FieldOptions from `@smithy/types` instead + */ +export type FieldOptions = __FieldOptions; +/** + * @deprecated Use FieldPosition from `@smithy/types` instead + */ +export type FieldPosition = __FieldPosition; +/** + * @deprecated Use HeaderBag from `@smithy/types` instead + */ +export type HeaderBag = __HeaderBag; +/** + * @deprecated Use HttpMessage from `@smithy/types` instead + */ +export type HttpMessage = __HttpMessage; +/** + * @deprecated Use HttpHandlerOptions from `@smithy/types` instead + */ +export type HttpHandlerOptions = __HttpHandlerOptions; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/types.d.ts new file mode 100644 index 0000000..0d597b9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/dist-types/types.d.ts @@ -0,0 +1,21 @@ +import { FieldOptions as __FieldOptions, FieldPosition as __FieldPosition, HeaderBag as __HeaderBag, HttpHandlerOptions as __HttpHandlerOptions, HttpMessage as __HttpMessage } from "@smithy/types"; +/** + * @deprecated Use FieldOptions from `@smithy/types` instead + */ +export type FieldOptions = __FieldOptions; +/** + * @deprecated Use FieldPosition from `@smithy/types` instead + */ +export type FieldPosition = __FieldPosition; +/** + * @deprecated Use HeaderBag from `@smithy/types` instead + */ +export type HeaderBag = __HeaderBag; +/** + * @deprecated Use HttpMessage from `@smithy/types` instead + */ +export type HttpMessage = __HttpMessage; +/** + * @deprecated Use HttpHandlerOptions from `@smithy/types` instead + */ +export type HttpHandlerOptions = __HttpHandlerOptions; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/package.json new file mode 100644 index 0000000..549711a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/protocol-http/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/protocol-http", + "version": "5.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline protocol-http", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/protocol-http", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/protocol-http" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/README.md new file mode 100644 index 0000000..00275da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/README.md @@ -0,0 +1,10 @@ +# @smithy/querystring-builder + +[![NPM version](https://img.shields.io/npm/v/@smithy/querystring-builder/latest.svg)](https://www.npmjs.com/package/@smithy/querystring-builder) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/querystring-builder.svg)](https://www.npmjs.com/package/@smithy/querystring-builder) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-cjs/index.js new file mode 100644 index 0000000..7030242 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-cjs/index.js @@ -0,0 +1,52 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + buildQueryString: () => buildQueryString +}); +module.exports = __toCommonJS(src_exports); +var import_util_uri_escape = require("@smithy/util-uri-escape"); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, import_util_uri_escape.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, import_util_uri_escape.escapeUri)(value[i])}`); + } + } else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, import_util_uri_escape.escapeUri)(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} +__name(buildQueryString, "buildQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + buildQueryString +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-es/index.js new file mode 100644 index 0000000..fbc7684 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-es/index.js @@ -0,0 +1,21 @@ +import { escapeUri } from "@smithy/util-uri-escape"; +export function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = escapeUri(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${escapeUri(value[i])}`); + } + } + else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${escapeUri(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-types/index.d.ts new file mode 100644 index 0000000..538b1b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function buildQueryString(query: QueryParameterBag): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..1f866f3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function buildQueryString(query: QueryParameterBag): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/package.json new file mode 100644 index 0000000..d144f0a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-builder/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/querystring-builder", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline querystring-builder", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/querystring-builder", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/querystring-builder" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/README.md new file mode 100644 index 0000000..02dcf51 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/README.md @@ -0,0 +1,10 @@ +# @smithy/querystring-parser + +[![NPM version](https://img.shields.io/npm/v/@smithy/querystring-parser/latest.svg)](https://www.npmjs.com/package/@smithy/querystring-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/querystring-parser.svg)](https://www.npmjs.com/package/@smithy/querystring-parser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-cjs/index.js new file mode 100644 index 0000000..924647c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-cjs/index.js @@ -0,0 +1,53 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseQueryString: () => parseQueryString +}); +module.exports = __toCommonJS(src_exports); +function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } else if (Array.isArray(query[key])) { + query[key].push(value); + } else { + query[key] = [query[key], value]; + } + } + } + return query; +} +__name(parseQueryString, "parseQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + parseQueryString +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-es/index.js new file mode 100644 index 0000000..bd7bf00 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-es/index.js @@ -0,0 +1,23 @@ +export function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } + else if (Array.isArray(query[key])) { + query[key].push(value); + } + else { + query[key] = [query[key], value]; + } + } + } + return query; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-types/index.d.ts new file mode 100644 index 0000000..fdc1ba5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function parseQueryString(querystring: string): QueryParameterBag; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..8bb747d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function parseQueryString(querystring: string): QueryParameterBag; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/package.json new file mode 100644 index 0000000..9a27e7e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/querystring-parser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/querystring-parser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline querystring-parser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/querystring-parser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/querystring-parser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/README.md new file mode 100644 index 0000000..902dd43 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/README.md @@ -0,0 +1,4 @@ +# @smithy/service-error-classification + +[![NPM version](https://img.shields.io/npm/v/@smithy/service-error-classification/latest.svg)](https://www.npmjs.com/package/@smithy/service-error-classification) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/service-error-classification.svg)](https://www.npmjs.com/package/@smithy/service-error-classification) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-cjs/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-cjs/index.js new file mode 100644 index 0000000..bcca2b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-cjs/index.js @@ -0,0 +1,109 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isBrowserNetworkError: () => isBrowserNetworkError, + isClockSkewCorrectedError: () => isClockSkewCorrectedError, + isClockSkewError: () => isClockSkewError, + isRetryableByTrait: () => isRetryableByTrait, + isServerError: () => isServerError, + isThrottlingError: () => isThrottlingError, + isTransientError: () => isTransientError +}); +module.exports = __toCommonJS(src_exports); + +// src/constants.ts +var CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch" +]; +var THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException" + // DynamoDB +]; +var TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +var TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; + +// src/index.ts +var isRetryableByTrait = /* @__PURE__ */ __name((error) => error.$retryable !== void 0, "isRetryableByTrait"); +var isClockSkewError = /* @__PURE__ */ __name((error) => CLOCK_SKEW_ERROR_CODES.includes(error.name), "isClockSkewError"); +var isClockSkewCorrectedError = /* @__PURE__ */ __name((error) => error.$metadata?.clockSkewCorrected, "isClockSkewCorrectedError"); +var isBrowserNetworkError = /* @__PURE__ */ __name((error) => { + const errorMessages = /* @__PURE__ */ new Set([ + "Failed to fetch", + // Chrome + "NetworkError when attempting to fetch resource", + // Firefox + "The Internet connection appears to be offline", + // Safari 16 + "Load failed", + // Safari 17+ + "Network request failed" + // `cross-fetch` + ]); + const isValid = error && error instanceof TypeError; + if (!isValid) { + return false; + } + return errorMessages.has(error.message); +}, "isBrowserNetworkError"); +var isThrottlingError = /* @__PURE__ */ __name((error) => error.$metadata?.httpStatusCode === 429 || THROTTLING_ERROR_CODES.includes(error.name) || error.$retryable?.throttling == true, "isThrottlingError"); +var isTransientError = /* @__PURE__ */ __name((error, depth = 0) => isClockSkewCorrectedError(error) || TRANSIENT_ERROR_CODES.includes(error.name) || NODEJS_TIMEOUT_ERROR_CODES.includes(error?.code || "") || TRANSIENT_ERROR_STATUS_CODES.includes(error.$metadata?.httpStatusCode || 0) || isBrowserNetworkError(error) || error.cause !== void 0 && depth <= 10 && isTransientError(error.cause, depth + 1), "isTransientError"); +var isServerError = /* @__PURE__ */ __name((error) => { + if (error.$metadata?.httpStatusCode !== void 0) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}, "isServerError"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isRetryableByTrait, + isClockSkewError, + isClockSkewCorrectedError, + isBrowserNetworkError, + isThrottlingError, + isTransientError, + isServerError +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-es/constants.js new file mode 100644 index 0000000..267443b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-es/constants.js @@ -0,0 +1,27 @@ +export const CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch", +]; +export const THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException", +]; +export const TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +export const TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +export const NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-es/index.js new file mode 100644 index 0000000..1da4aa9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-es/index.js @@ -0,0 +1,37 @@ +import { CLOCK_SKEW_ERROR_CODES, NODEJS_TIMEOUT_ERROR_CODES, THROTTLING_ERROR_CODES, TRANSIENT_ERROR_CODES, TRANSIENT_ERROR_STATUS_CODES, } from "./constants"; +export const isRetryableByTrait = (error) => error.$retryable !== undefined; +export const isClockSkewError = (error) => CLOCK_SKEW_ERROR_CODES.includes(error.name); +export const isClockSkewCorrectedError = (error) => error.$metadata?.clockSkewCorrected; +export const isBrowserNetworkError = (error) => { + const errorMessages = new Set([ + "Failed to fetch", + "NetworkError when attempting to fetch resource", + "The Internet connection appears to be offline", + "Load failed", + "Network request failed", + ]); + const isValid = error && error instanceof TypeError; + if (!isValid) { + return false; + } + return errorMessages.has(error.message); +}; +export const isThrottlingError = (error) => error.$metadata?.httpStatusCode === 429 || + THROTTLING_ERROR_CODES.includes(error.name) || + error.$retryable?.throttling == true; +export const isTransientError = (error, depth = 0) => isClockSkewCorrectedError(error) || + TRANSIENT_ERROR_CODES.includes(error.name) || + NODEJS_TIMEOUT_ERROR_CODES.includes(error?.code || "") || + TRANSIENT_ERROR_STATUS_CODES.includes(error.$metadata?.httpStatusCode || 0) || + isBrowserNetworkError(error) || + (error.cause !== undefined && depth <= 10 && isTransientError(error.cause, depth + 1)); +export const isServerError = (error) => { + if (error.$metadata?.httpStatusCode !== undefined) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts new file mode 100644 index 0000000..f07663b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts @@ -0,0 +1,26 @@ +/** + * Errors encountered when the client clock and server clock cannot agree on the + * current time. + * + * These errors are retryable, assuming the SDK has enabled clock skew + * correction. + */ +export declare const CLOCK_SKEW_ERROR_CODES: string[]; +/** + * Errors that indicate the SDK is being throttled. + * + * These errors are always retryable. + */ +export declare const THROTTLING_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_STATUS_CODES: number[]; +/** + * Node.js system error codes that indicate timeout. + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/index.d.ts new file mode 100644 index 0000000..6aad102 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/index.d.ts @@ -0,0 +1,24 @@ +import { SdkError } from "@smithy/types"; +export declare const isRetryableByTrait: (error: SdkError) => boolean; +/** + * @deprecated use isClockSkewCorrectedError. This is only used in deprecated code. + */ +export declare const isClockSkewError: (error: SdkError) => boolean; +/** + * @returns whether the error resulted in a systemClockOffset aka clock skew correction. + */ +export declare const isClockSkewCorrectedError: (error: SdkError) => true | undefined; +/** + * + * @internal + */ +export declare const isBrowserNetworkError: (error: SdkError) => boolean; +export declare const isThrottlingError: (error: SdkError) => boolean; +/** + * Though NODEJS_TIMEOUT_ERROR_CODES are platform specific, they are + * included here because there is an error scenario with unknown root + * cause where the NodeHttpHandler does not decorate the Error with + * the name "TimeoutError" to be checked by the TRANSIENT_ERROR_CODES condition. + */ +export declare const isTransientError: (error: SdkError, depth?: number) => boolean; +export declare const isServerError: (error: SdkError) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..74c4858 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,26 @@ +/** + * Errors encountered when the client clock and server clock cannot agree on the + * current time. + * + * These errors are retryable, assuming the SDK has enabled clock skew + * correction. + */ +export declare const CLOCK_SKEW_ERROR_CODES: string[]; +/** + * Errors that indicate the SDK is being throttled. + * + * These errors are always retryable. + */ +export declare const THROTTLING_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_STATUS_CODES: number[]; +/** + * Node.js system error codes that indicate timeout. + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c7909ae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts @@ -0,0 +1,24 @@ +import { SdkError } from "@smithy/types"; +export declare const isRetryableByTrait: (error: SdkError) => boolean; +/** + * @deprecated use isClockSkewCorrectedError. This is only used in deprecated code. + */ +export declare const isClockSkewError: (error: SdkError) => boolean; +/** + * @returns whether the error resulted in a systemClockOffset aka clock skew correction. + */ +export declare const isClockSkewCorrectedError: (error: SdkError) => true | undefined; +/** + * + * @internal + */ +export declare const isBrowserNetworkError: (error: SdkError) => boolean; +export declare const isThrottlingError: (error: SdkError) => boolean; +/** + * Though NODEJS_TIMEOUT_ERROR_CODES are platform specific, they are + * included here because there is an error scenario with unknown root + * cause where the NodeHttpHandler does not decorate the Error with + * the name "TimeoutError" to be checked by the TRANSIENT_ERROR_CODES condition. + */ +export declare const isTransientError: (error: SdkError, depth?: number) => boolean; +export declare const isServerError: (error: SdkError) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/package.json new file mode 100644 index 0000000..a568aee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/service-error-classification/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/service-error-classification", + "version": "4.0.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline service-error-classification", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/service-error-classification", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/service-error-classification" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "dependencies": { + "@smithy/types": "^4.2.0" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/README.md new file mode 100644 index 0000000..45a4b2e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/README.md @@ -0,0 +1,105 @@ +# @smithy/shared-ini-file-loader + +[![NPM version](https://img.shields.io/npm/v/@smithy/shared-ini-file-loader/latest.svg)](https://www.npmjs.com/package/@smithy/shared-ini-file-loader) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/shared-ini-file-loader.svg)](https://www.npmjs.com/package/@smithy/shared-ini-file-loader) + +## AWS Shared Configuration File Loader + +This module provides a function that reads from AWS SDK configuration files and +returns a promise that will resolve with a hash of the parsed contents of the +AWS credentials file and of the AWS config file. Given the [sample +files](#sample-files) below, the promise returned by `loadSharedConfigFiles` +would resolve with: + +```javascript +{ + configFile: { + 'default': { + aws_access_key_id: 'foo', + aws_secret_access_key: 'bar', + }, + dev: { + aws_access_key_id: 'foo1', + aws_secret_access_key: 'bar1', + }, + prod: { + aws_access_key_id: 'foo2', + aws_secret_access_key: 'bar2', + }, + 'testing host': { + aws_access_key_id: 'foo4', + aws_secret_access_key: 'bar4', + } + }, + credentialsFile: { + 'default': { + aws_access_key_id: 'foo', + aws_secret_access_key: 'bar', + }, + dev: { + aws_access_key_id: 'foo1', + aws_secret_access_key: 'bar1', + }, + prod: { + aws_access_key_id: 'foo2', + aws_secret_access_key: 'bar2', + } + }, +} +``` + +If a file is not found, its key (`configFile` or `credentialsFile`) will instead +have a value of an empty object. + +## Supported configuration + +You may customize how the files are loaded by providing an options hash to the +`loadSharedConfigFiles` function. The following options are supported: + +- `filepath` - The path to the shared credentials file. If not specified, the + provider will use the value in the `AWS_SHARED_CREDENTIALS_FILE` environment + variable or a default of `~/.aws/credentials`. +- `configFilepath` - The path to the shared config file. If not specified, the + provider will use the value in the `AWS_CONFIG_FILE` environment variable or a + default of `~/.aws/config`. +- `ignoreCache` - The provider will normally cache the contents of the files it + loads. This option will force the provider to reload the files from disk. + Defaults to `false`. + +## Sample files + +### `~/.aws/credentials` + +```ini +[default] +aws_access_key_id=foo +aws_secret_access_key=bar + +[dev] +aws_access_key_id=foo2 +aws_secret_access_key=bar2 + +[prod] +aws_access_key_id=foo3 +aws_secret_access_key=bar3 +``` + +### `~/.aws/config` + +```ini +[default] +aws_access_key_id=foo +aws_secret_access_key=bar + +[profile dev] +aws_access_key_id=foo2 +aws_secret_access_key=bar2 + +[profile prod] +aws_access_key_id=foo3 +aws_secret_access_key=bar3 + +[profile "testing host"] +aws_access_key_id=foo4 +aws_secret_access_key=bar4 +``` diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js new file mode 100644 index 0000000..2a4f737 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getHomeDir = void 0; +const os_1 = require("os"); +const path_1 = require("path"); +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = (0, os_1.homedir)(); + return homeDirCache[homeDirCacheKey]; +}; +exports.getHomeDir = getHomeDir; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js new file mode 100644 index 0000000..30d97b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSSOTokenFilepath = void 0; +const crypto_1 = require("crypto"); +const path_1 = require("path"); +const getHomeDir_1 = require("./getHomeDir"); +const getSSOTokenFilepath = (id) => { + const hasher = (0, crypto_1.createHash)("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); +}; +exports.getSSOTokenFilepath = getSSOTokenFilepath; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js new file mode 100644 index 0000000..688accb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSSOTokenFromFile = void 0; +const fs_1 = require("fs"); +const getSSOTokenFilepath_1 = require("./getSSOTokenFilepath"); +const { readFile } = fs_1.promises; +const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; +exports.getSSOTokenFromFile = getSSOTokenFromFile; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js new file mode 100644 index 0000000..de59bfa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js @@ -0,0 +1,206 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_PREFIX_SEPARATOR: () => CONFIG_PREFIX_SEPARATOR, + DEFAULT_PROFILE: () => DEFAULT_PROFILE, + ENV_PROFILE: () => ENV_PROFILE, + getProfileName: () => getProfileName, + loadSharedConfigFiles: () => loadSharedConfigFiles, + loadSsoSessionData: () => loadSsoSessionData, + parseKnownFiles: () => parseKnownFiles +}); +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././getHomeDir"), module.exports); + +// src/getProfileName.ts +var ENV_PROFILE = "AWS_PROFILE"; +var DEFAULT_PROFILE = "default"; +var getProfileName = /* @__PURE__ */ __name((init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE, "getProfileName"); + +// src/index.ts +__reExport(src_exports, require("././getSSOTokenFilepath"), module.exports); +__reExport(src_exports, require("././getSSOTokenFromFile"), module.exports); + +// src/loadSharedConfigFiles.ts + + +// src/getConfigData.ts +var import_types = require("@smithy/types"); +var getConfigData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(import_types.IniSectionType).includes(key.substring(0, indexOfSeparator)); +}).reduce( + (acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === import_types.IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; + }, + { + // Populate default profile, if present. + ...data.default && { default: data.default } + } +), "getConfigData"); + +// src/getConfigFilepath.ts +var import_path = require("path"); +var import_getHomeDir = require("././getHomeDir"); +var ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +var getConfigFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CONFIG_PATH] || (0, import_path.join)((0, import_getHomeDir.getHomeDir)(), ".aws", "config"), "getConfigFilepath"); + +// src/getCredentialsFilepath.ts + +var import_getHomeDir2 = require("././getHomeDir"); +var ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +var getCredentialsFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CREDENTIALS_PATH] || (0, import_path.join)((0, import_getHomeDir2.getHomeDir)(), ".aws", "credentials"), "getCredentialsFilepath"); + +// src/loadSharedConfigFiles.ts +var import_getHomeDir3 = require("././getHomeDir"); + +// src/parseIni.ts + +var prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +var profileNameBlockList = ["__proto__", "profile __proto__"]; +var parseIni = /* @__PURE__ */ __name((iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = void 0; + currentSubSection = void 0; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(import_types.IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim() + ]; + if (value === "") { + currentSubSection = name; + } else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = void 0; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}, "parseIni"); + +// src/loadSharedConfigFiles.ts +var import_slurpFile = require("././slurpFile"); +var swallowError = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var CONFIG_PREFIX_SEPARATOR = "."; +var loadSharedConfigFiles = /* @__PURE__ */ __name(async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = (0, import_getHomeDir3.getHomeDir)(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = (0, import_path.join)(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = (0, import_path.join)(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + (0, import_slurpFile.slurpFile)(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).then(getConfigData).catch(swallowError), + (0, import_slurpFile.slurpFile)(resolvedFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).catch(swallowError) + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1] + }; +}, "loadSharedConfigFiles"); + +// src/getSsoSessionData.ts + +var getSsoSessionData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => key.startsWith(import_types.IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)).reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}), "getSsoSessionData"); + +// src/loadSsoSessionData.ts +var import_slurpFile2 = require("././slurpFile"); +var swallowError2 = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var loadSsoSessionData = /* @__PURE__ */ __name(async (init = {}) => (0, import_slurpFile2.slurpFile)(init.configFilepath ?? getConfigFilepath()).then(parseIni).then(getSsoSessionData).catch(swallowError2), "loadSsoSessionData"); + +// src/mergeConfigFiles.ts +var mergeConfigFiles = /* @__PURE__ */ __name((...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== void 0) { + Object.assign(merged[key], values); + } else { + merged[key] = values; + } + } + } + return merged; +}, "mergeConfigFiles"); + +// src/parseKnownFiles.ts +var parseKnownFiles = /* @__PURE__ */ __name(async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}, "parseKnownFiles"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getHomeDir, + ENV_PROFILE, + DEFAULT_PROFILE, + getProfileName, + getSSOTokenFilepath, + getSSOTokenFromFile, + CONFIG_PREFIX_SEPARATOR, + loadSharedConfigFiles, + loadSsoSessionData, + parseKnownFiles +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js new file mode 100644 index 0000000..82d7d65 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.slurpFile = void 0; +const fs_1 = require("fs"); +const { readFile } = fs_1.promises; +const filePromisesHash = {}; +const slurpFile = (path, options) => { + if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; +exports.slurpFile = slurpFile; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js new file mode 100644 index 0000000..4579286 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js @@ -0,0 +1,18 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +export const getConfigData = (data) => Object.entries(data) + .filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(IniSectionType).includes(key.substring(0, indexOfSeparator)); +}) + .reduce((acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; +}, { + ...(data.default && { default: data.default }), +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js new file mode 100644 index 0000000..ca07c2d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js @@ -0,0 +1,4 @@ +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export const getConfigFilepath = () => process.env[ENV_CONFIG_PATH] || join(getHomeDir(), ".aws", "config"); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js new file mode 100644 index 0000000..393c0ae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js @@ -0,0 +1,4 @@ +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export const getCredentialsFilepath = () => process.env[ENV_CREDENTIALS_PATH] || join(getHomeDir(), ".aws", "credentials"); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js new file mode 100644 index 0000000..58772af --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js @@ -0,0 +1,22 @@ +import { homedir } from "os"; +import { sep } from "path"; +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +export const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = homedir(); + return homeDirCache[homeDirCacheKey]; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js new file mode 100644 index 0000000..acc29f0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js @@ -0,0 +1,3 @@ +export const ENV_PROFILE = "AWS_PROFILE"; +export const DEFAULT_PROFILE = "default"; +export const getProfileName = (init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js new file mode 100644 index 0000000..a44b4ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js @@ -0,0 +1,8 @@ +import { createHash } from "crypto"; +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const getSSOTokenFilepath = (id) => { + const hasher = createHash("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return join(getHomeDir(), ".aws", "sso", "cache", `${cacheName}.json`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js new file mode 100644 index 0000000..42659db --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js @@ -0,0 +1,8 @@ +import { promises as fsPromises } from "fs"; +import { getSSOTokenFilepath } from "./getSSOTokenFilepath"; +const { readFile } = fsPromises; +export const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = getSSOTokenFilepath(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js new file mode 100644 index 0000000..f2df194 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js @@ -0,0 +1,5 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +export const getSsoSessionData = (data) => Object.entries(data) + .filter(([key]) => key.startsWith(IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)) + .reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js new file mode 100644 index 0000000..3e8b2c7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js new file mode 100644 index 0000000..77ee32c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js @@ -0,0 +1,39 @@ +import { join } from "path"; +import { getConfigData } from "./getConfigData"; +import { getConfigFilepath } from "./getConfigFilepath"; +import { getCredentialsFilepath } from "./getCredentialsFilepath"; +import { getHomeDir } from "./getHomeDir"; +import { parseIni } from "./parseIni"; +import { slurpFile } from "./slurpFile"; +const swallowError = () => ({}); +export const CONFIG_PREFIX_SEPARATOR = "."; +export const loadSharedConfigFiles = async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = getHomeDir(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = join(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = join(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + slurpFile(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni) + .then(getConfigData) + .catch(swallowError), + slurpFile(resolvedFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni) + .catch(swallowError), + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1], + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js new file mode 100644 index 0000000..3bd730b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js @@ -0,0 +1,9 @@ +import { getConfigFilepath } from "./getConfigFilepath"; +import { getSsoSessionData } from "./getSsoSessionData"; +import { parseIni } from "./parseIni"; +import { slurpFile } from "./slurpFile"; +const swallowError = () => ({}); +export const loadSsoSessionData = async (init = {}) => slurpFile(init.configFilepath ?? getConfigFilepath()) + .then(parseIni) + .then(getSsoSessionData) + .catch(swallowError); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js new file mode 100644 index 0000000..58576f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js @@ -0,0 +1,14 @@ +export const mergeConfigFiles = (...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== undefined) { + Object.assign(merged[key], values); + } + else { + merged[key] = values; + } + } + } + return merged; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js new file mode 100644 index 0000000..7af4a6a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js @@ -0,0 +1,52 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +const prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +const profileNameBlockList = ["__proto__", "profile __proto__"]; +export const parseIni = (iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = undefined; + currentSubSection = undefined; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } + else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } + else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim(), + ]; + if (value === "") { + currentSubSection = name; + } + else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = undefined; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js new file mode 100644 index 0000000..4920e28 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js @@ -0,0 +1,6 @@ +import { loadSharedConfigFiles } from "./loadSharedConfigFiles"; +import { mergeConfigFiles } from "./mergeConfigFiles"; +export const parseKnownFiles = async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js new file mode 100644 index 0000000..7b360cc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js @@ -0,0 +1,9 @@ +import { promises as fsPromises } from "fs"; +const { readFile } = fsPromises; +const filePromisesHash = {}; +export const slurpFile = (path, options) => { + if (!filePromisesHash[path] || options?.ignoreCache) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts new file mode 100644 index 0000000..4259831 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts @@ -0,0 +1,8 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the config data from parsed ini data. + * * Returns data for `default` + * * Returns profile name without prefix. + * * Returns non-profiles as is. + */ +export declare const getConfigData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts new file mode 100644 index 0000000..1d123be --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export declare const getConfigFilepath: () => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts new file mode 100644 index 0000000..26fda4a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export declare const getCredentialsFilepath: () => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts new file mode 100644 index 0000000..5d15bf1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts @@ -0,0 +1,6 @@ +/** + * Get the HOME directory for the current runtime. + * + * @internal + */ +export declare const getHomeDir: () => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts new file mode 100644 index 0000000..5a608b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + */ +export declare const ENV_PROFILE = "AWS_PROFILE"; +/** + * @internal + */ +export declare const DEFAULT_PROFILE = "default"; +/** + * Returns profile with priority order code - ENV - default. + * @internal + */ +export declare const getProfileName: (init: { + profile?: string; +}) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts new file mode 100644 index 0000000..44a4030 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the filepath of the file where SSO token is stored. + * @internal + */ +export declare const getSSOTokenFilepath: (id: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts new file mode 100644 index 0000000..18199ac --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts @@ -0,0 +1,46 @@ +/** + * Cached SSO token retrieved from SSO login flow. + * @public + */ +export interface SSOToken { + /** + * A base64 encoded string returned by the sso-oidc service. + */ + accessToken: string; + /** + * The expiration time of the accessToken as an RFC 3339 formatted timestamp. + */ + expiresAt: string; + /** + * The token used to obtain an access token in the event that the accessToken is invalid or expired. + */ + refreshToken?: string; + /** + * The unique identifier string for each client. The client ID generated when performing the registration + * portion of the OIDC authorization flow. This is used to refresh the accessToken. + */ + clientId?: string; + /** + * A secret string generated when performing the registration portion of the OIDC authorization flow. + * This is used to refresh the accessToken. + */ + clientSecret?: string; + /** + * The expiration time of the client registration (clientId and clientSecret) as an RFC 3339 formatted timestamp. + */ + registrationExpiresAt?: string; + /** + * The configured sso_region for the profile that credentials are being resolved for. + */ + region?: string; + /** + * The configured sso_start_url for the profile that credentials are being resolved for. + */ + startUrl?: string; +} +/** + * @internal + * @param id - can be either a start URL or the SSO session name. + * Returns the SSO token from the file system. + */ +export declare const getSSOTokenFromFile: (id: string) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts new file mode 100644 index 0000000..9be020f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts @@ -0,0 +1,6 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the sso-session data from parsed ini data by reading + * ssoSessionName after sso-session prefix including/excluding quotes + */ +export declare const getSsoSessionData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts new file mode 100644 index 0000000..3e8b2c7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts new file mode 100644 index 0000000..3897ac3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts @@ -0,0 +1,36 @@ +import { Logger, SharedConfigFiles } from "@smithy/types"; +/** + * @public + */ +export interface SharedConfigInit { + /** + * The path at which to locate the ini credentials file. Defaults to the + * value of the `AWS_SHARED_CREDENTIALS_FILE` environment variable (if + * defined) or `~/.aws/credentials` otherwise. + */ + filepath?: string; + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; + /** + * Configuration files are normally cached after the first time they are loaded. When this + * property is set, the provider will always reload any configuration files loaded before. + */ + ignoreCache?: boolean; + /** + * For credential resolution trace logging. + */ + logger?: Logger; +} +/** + * @internal + */ +export declare const CONFIG_PREFIX_SEPARATOR = "."; +/** + * Loads the config and credentials files. + * @internal + */ +export declare const loadSharedConfigFiles: (init?: SharedConfigInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts new file mode 100644 index 0000000..ed6c367 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts @@ -0,0 +1,17 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Subset of {@link SharedConfigInit}. + * @internal + */ +export interface SsoSessionInit { + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; +} +/** + * @internal + */ +export declare const loadSsoSessionData: (init?: SsoSessionInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts new file mode 100644 index 0000000..46b8965 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts @@ -0,0 +1,7 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Merge multiple profile config files such that settings each file are kept together + * + * @internal + */ +export declare const mergeConfigFiles: (...files: ParsedIniData[]) => ParsedIniData; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts new file mode 100644 index 0000000..0ae5851 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts @@ -0,0 +1,2 @@ +import { ParsedIniData } from "@smithy/types"; +export declare const parseIni: (iniData: string) => ParsedIniData; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts new file mode 100644 index 0000000..d5fcafa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts @@ -0,0 +1,18 @@ +import { ParsedIniData } from "@smithy/types"; +import { SharedConfigInit } from "./loadSharedConfigFiles"; +/** + * @public + */ +export interface SourceProfileInit extends SharedConfigInit { + /** + * The configuration profile to use. + */ + profile?: string; +} +/** + * Load profiles from credentials and config INI files and normalize them into a + * single profile list. + * + * @internal + */ +export declare const parseKnownFiles: (init: SourceProfileInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts new file mode 100644 index 0000000..a3bc84c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts @@ -0,0 +1,5 @@ +interface SlurpFileOptions { + ignoreCache?: boolean; +} +export declare const slurpFile: (path: string, options?: SlurpFileOptions) => Promise; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts new file mode 100644 index 0000000..c6b7588 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts @@ -0,0 +1,8 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the config data from parsed ini data. + * * Returns data for `default` + * * Returns profile name without prefix. + * * Returns non-profiles as is. + */ +export declare const getConfigData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts new file mode 100644 index 0000000..dc3699b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export declare const getConfigFilepath: () => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts new file mode 100644 index 0000000..f2c95b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export declare const getCredentialsFilepath: () => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts new file mode 100644 index 0000000..4c1bd7a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts @@ -0,0 +1,6 @@ +/** + * Get the HOME directory for the current runtime. + * + * @internal + */ +export declare const getHomeDir: () => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts new file mode 100644 index 0000000..91cb16b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + */ +export declare const ENV_PROFILE = "AWS_PROFILE"; +/** + * @internal + */ +export declare const DEFAULT_PROFILE = "default"; +/** + * Returns profile with priority order code - ENV - default. + * @internal + */ +export declare const getProfileName: (init: { + profile?: string; +}) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts new file mode 100644 index 0000000..e549daa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the filepath of the file where SSO token is stored. + * @internal + */ +export declare const getSSOTokenFilepath: (id: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts new file mode 100644 index 0000000..140979c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts @@ -0,0 +1,46 @@ +/** + * Cached SSO token retrieved from SSO login flow. + * @public + */ +export interface SSOToken { + /** + * A base64 encoded string returned by the sso-oidc service. + */ + accessToken: string; + /** + * The expiration time of the accessToken as an RFC 3339 formatted timestamp. + */ + expiresAt: string; + /** + * The token used to obtain an access token in the event that the accessToken is invalid or expired. + */ + refreshToken?: string; + /** + * The unique identifier string for each client. The client ID generated when performing the registration + * portion of the OIDC authorization flow. This is used to refresh the accessToken. + */ + clientId?: string; + /** + * A secret string generated when performing the registration portion of the OIDC authorization flow. + * This is used to refresh the accessToken. + */ + clientSecret?: string; + /** + * The expiration time of the client registration (clientId and clientSecret) as an RFC 3339 formatted timestamp. + */ + registrationExpiresAt?: string; + /** + * The configured sso_region for the profile that credentials are being resolved for. + */ + region?: string; + /** + * The configured sso_start_url for the profile that credentials are being resolved for. + */ + startUrl?: string; +} +/** + * @internal + * @param id - can be either a start URL or the SSO session name. + * Returns the SSO token from the file system. + */ +export declare const getSSOTokenFromFile: (id: string) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts new file mode 100644 index 0000000..04a1a99 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts @@ -0,0 +1,6 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the sso-session data from parsed ini data by reading + * ssoSessionName after sso-session prefix including/excluding quotes + */ +export declare const getSsoSessionData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..12ed3bb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts new file mode 100644 index 0000000..969254e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts @@ -0,0 +1,36 @@ +import { Logger, SharedConfigFiles } from "@smithy/types"; +/** + * @public + */ +export interface SharedConfigInit { + /** + * The path at which to locate the ini credentials file. Defaults to the + * value of the `AWS_SHARED_CREDENTIALS_FILE` environment variable (if + * defined) or `~/.aws/credentials` otherwise. + */ + filepath?: string; + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; + /** + * Configuration files are normally cached after the first time they are loaded. When this + * property is set, the provider will always reload any configuration files loaded before. + */ + ignoreCache?: boolean; + /** + * For credential resolution trace logging. + */ + logger?: Logger; +} +/** + * @internal + */ +export declare const CONFIG_PREFIX_SEPARATOR = "."; +/** + * Loads the config and credentials files. + * @internal + */ +export declare const loadSharedConfigFiles: (init?: SharedConfigInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts new file mode 100644 index 0000000..08e265e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts @@ -0,0 +1,17 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Subset of {@link SharedConfigInit}. + * @internal + */ +export interface SsoSessionInit { + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; +} +/** + * @internal + */ +export declare const loadSsoSessionData: (init?: SsoSessionInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts new file mode 100644 index 0000000..f94e725 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts @@ -0,0 +1,7 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Merge multiple profile config files such that settings each file are kept together + * + * @internal + */ +export declare const mergeConfigFiles: (...files: ParsedIniData[]) => ParsedIniData; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts new file mode 100644 index 0000000..4e58d0e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts @@ -0,0 +1,2 @@ +import { ParsedIniData } from "@smithy/types"; +export declare const parseIni: (iniData: string) => ParsedIniData; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts new file mode 100644 index 0000000..46ba24b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts @@ -0,0 +1,18 @@ +import { ParsedIniData } from "@smithy/types"; +import { SharedConfigInit } from "./loadSharedConfigFiles"; +/** + * @public + */ +export interface SourceProfileInit extends SharedConfigInit { + /** + * The configuration profile to use. + */ + profile?: string; +} +/** + * Load profiles from credentials and config INI files and normalize them into a + * single profile list. + * + * @internal + */ +export declare const parseKnownFiles: (init: SourceProfileInit) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts new file mode 100644 index 0000000..33e7a0c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts @@ -0,0 +1,5 @@ +interface SlurpFileOptions { + ignoreCache?: boolean; +} +export declare const slurpFile: (path: string, options?: SlurpFileOptions) => Promise; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..6d6acbd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts @@ -0,0 +1,16 @@ +import { ParsedIniData as __ParsedIniData, Profile as __Profile, SharedConfigFiles as __SharedConfigFiles } from "@smithy/types"; +/** + * @internal + * @deprecated Use Profile from "\@smithy/types" instead + */ +export type Profile = __Profile; +/** + * @internal + * @deprecated Use ParsedIniData from "\@smithy/types" instead + */ +export type ParsedIniData = __ParsedIniData; +/** + * @internal + * @deprecated Use SharedConfigFiles from "\@smithy/types" instead + */ +export type SharedConfigFiles = __SharedConfigFiles; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts new file mode 100644 index 0000000..c7c8d92 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts @@ -0,0 +1,16 @@ +import { ParsedIniData as __ParsedIniData, Profile as __Profile, SharedConfigFiles as __SharedConfigFiles } from "@smithy/types"; +/** + * @internal + * @deprecated Use Profile from "\@smithy/types" instead + */ +export type Profile = __Profile; +/** + * @internal + * @deprecated Use ParsedIniData from "\@smithy/types" instead + */ +export type ParsedIniData = __ParsedIniData; +/** + * @internal + * @deprecated Use SharedConfigFiles from "\@smithy/types" instead + */ +export type SharedConfigFiles = __SharedConfigFiles; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/package.json new file mode 100644 index 0000000..e6dfbfe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/shared-ini-file-loader/package.json @@ -0,0 +1,72 @@ +{ + "name": "@smithy/shared-ini-file-loader", + "version": "4.0.2", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline shared-ini-file-loader", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "browser": { + "./dist-es/getSSOTokenFromFile": false, + "./dist-es/slurpFile": false + }, + "react-native": { + "./dist-cjs/getSSOTokenFromFile": false, + "./dist-cjs/slurpFile": false, + "./dist-es/getSSOTokenFromFile": false, + "./dist-es/slurpFile": false + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/shared-ini-file-loader", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/shared-ini-file-loader" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/README.md new file mode 100644 index 0000000..3bc9a17 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/README.md @@ -0,0 +1,11 @@ +# @smithy/signature-v4 + +[![NPM version](https://img.shields.io/npm/v/@smithy/signature-v4/latest.svg)](https://www.npmjs.com/package/@smithy/signature-v4) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/signature-v4.svg)](https://www.npmjs.com/package/@smithy/signature-v4) + +This package contains an implementation of the [AWS Signature Version 4](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) +authentication scheme. + +It is internal to Smithy-TypeScript generated clients, and not generally intended for standalone usage outside this context. + +For custom usage, inspect the interface of the SignatureV4 class. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/index.js new file mode 100644 index 0000000..41f93bb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/index.js @@ -0,0 +1,682 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + ALGORITHM_IDENTIFIER: () => ALGORITHM_IDENTIFIER, + ALGORITHM_IDENTIFIER_V4A: () => ALGORITHM_IDENTIFIER_V4A, + ALGORITHM_QUERY_PARAM: () => ALGORITHM_QUERY_PARAM, + ALWAYS_UNSIGNABLE_HEADERS: () => ALWAYS_UNSIGNABLE_HEADERS, + AMZ_DATE_HEADER: () => AMZ_DATE_HEADER, + AMZ_DATE_QUERY_PARAM: () => AMZ_DATE_QUERY_PARAM, + AUTH_HEADER: () => AUTH_HEADER, + CREDENTIAL_QUERY_PARAM: () => CREDENTIAL_QUERY_PARAM, + DATE_HEADER: () => DATE_HEADER, + EVENT_ALGORITHM_IDENTIFIER: () => EVENT_ALGORITHM_IDENTIFIER, + EXPIRES_QUERY_PARAM: () => EXPIRES_QUERY_PARAM, + GENERATED_HEADERS: () => GENERATED_HEADERS, + HOST_HEADER: () => HOST_HEADER, + KEY_TYPE_IDENTIFIER: () => KEY_TYPE_IDENTIFIER, + MAX_CACHE_SIZE: () => MAX_CACHE_SIZE, + MAX_PRESIGNED_TTL: () => MAX_PRESIGNED_TTL, + PROXY_HEADER_PATTERN: () => PROXY_HEADER_PATTERN, + REGION_SET_PARAM: () => REGION_SET_PARAM, + SEC_HEADER_PATTERN: () => SEC_HEADER_PATTERN, + SHA256_HEADER: () => SHA256_HEADER, + SIGNATURE_HEADER: () => SIGNATURE_HEADER, + SIGNATURE_QUERY_PARAM: () => SIGNATURE_QUERY_PARAM, + SIGNED_HEADERS_QUERY_PARAM: () => SIGNED_HEADERS_QUERY_PARAM, + SignatureV4: () => SignatureV4, + SignatureV4Base: () => SignatureV4Base, + TOKEN_HEADER: () => TOKEN_HEADER, + TOKEN_QUERY_PARAM: () => TOKEN_QUERY_PARAM, + UNSIGNABLE_PATTERNS: () => UNSIGNABLE_PATTERNS, + UNSIGNED_PAYLOAD: () => UNSIGNED_PAYLOAD, + clearCredentialCache: () => clearCredentialCache, + createScope: () => createScope, + getCanonicalHeaders: () => getCanonicalHeaders, + getCanonicalQuery: () => getCanonicalQuery, + getPayloadHash: () => getPayloadHash, + getSigningKey: () => getSigningKey, + hasHeader: () => hasHeader, + moveHeadersToQuery: () => moveHeadersToQuery, + prepareRequest: () => prepareRequest, + signatureV4aContainer: () => signatureV4aContainer +}); +module.exports = __toCommonJS(src_exports); + +// src/SignatureV4.ts + +var import_util_utf85 = require("@smithy/util-utf8"); + +// src/constants.ts +var ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +var CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +var AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +var SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +var EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +var SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +var TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +var REGION_SET_PARAM = "X-Amz-Region-Set"; +var AUTH_HEADER = "authorization"; +var AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +var DATE_HEADER = "date"; +var GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +var SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +var SHA256_HEADER = "x-amz-content-sha256"; +var TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +var HOST_HEADER = "host"; +var ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true +}; +var PROXY_HEADER_PATTERN = /^proxy-/; +var SEC_HEADER_PATTERN = /^sec-/; +var UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +var ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +var ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +var EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +var UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +var MAX_CACHE_SIZE = 50; +var KEY_TYPE_IDENTIFIER = "aws4_request"; +var MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; + +// src/credentialDerivation.ts +var import_util_hex_encoding = require("@smithy/util-hex-encoding"); +var import_util_utf8 = require("@smithy/util-utf8"); +var signingKeyCache = {}; +var cacheQueue = []; +var createScope = /* @__PURE__ */ __name((shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`, "createScope"); +var getSigningKey = /* @__PURE__ */ __name(async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${(0, import_util_hex_encoding.toHex)(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return signingKeyCache[cacheKey] = key; +}, "getSigningKey"); +var clearCredentialCache = /* @__PURE__ */ __name(() => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}, "clearCredentialCache"); +var hmac = /* @__PURE__ */ __name((ctor, secret, data) => { + const hash = new ctor(secret); + hash.update((0, import_util_utf8.toUint8Array)(data)); + return hash.digest(); +}, "hmac"); + +// src/getCanonicalHeaders.ts +var getCanonicalHeaders = /* @__PURE__ */ __name(({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == void 0) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || unsignableHeaders?.has(canonicalHeaderName) || PROXY_HEADER_PATTERN.test(canonicalHeaderName) || SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || signableHeaders && !signableHeaders.has(canonicalHeaderName)) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}, "getCanonicalHeaders"); + +// src/getPayloadHash.ts +var import_is_array_buffer = require("@smithy/is-array-buffer"); + +var import_util_utf82 = require("@smithy/util-utf8"); +var getPayloadHash = /* @__PURE__ */ __name(async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == void 0) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, import_is_array_buffer.isArrayBuffer)(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update((0, import_util_utf82.toUint8Array)(body)); + return (0, import_util_hex_encoding.toHex)(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}, "getPayloadHash"); + +// src/HeaderFormatter.ts + +var import_util_utf83 = require("@smithy/util-utf8"); +var HeaderFormatter = class { + static { + __name(this, "HeaderFormatter"); + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = (0, import_util_utf83.fromUtf8)(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 /* boolTrue */ : 1 /* boolFalse */]); + case "byte": + return Uint8Array.from([2 /* byte */, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3 /* short */); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4 /* integer */); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5 /* long */; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6 /* byteArray */); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = (0, import_util_utf83.fromUtf8)(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7 /* string */); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8 /* timestamp */; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9 /* uuid */; + uuidBytes.set((0, import_util_hex_encoding.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +}; +var UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +var Int64 = class _Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static { + __name(this, "Int64"); + } + static fromNumber(number) { + if (number > 9223372036854776e3 || number < -9223372036854776e3) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new _Int64(bytes); + } + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 128; + if (negative) { + negate(bytes); + } + return parseInt((0, import_util_hex_encoding.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +}; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 255; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} +__name(negate, "negate"); + +// src/headerUtil.ts +var hasHeader = /* @__PURE__ */ __name((soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}, "hasHeader"); + +// src/moveHeadersToQuery.ts +var import_protocol_http = require("@smithy/protocol-http"); +var moveHeadersToQuery = /* @__PURE__ */ __name((request, options = {}) => { + const { headers, query = {} } = import_protocol_http.HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if (lname.slice(0, 6) === "x-amz-" && !options.unhoistableHeaders?.has(lname) || options.hoistableHeaders?.has(lname)) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query + }; +}, "moveHeadersToQuery"); + +// src/prepareRequest.ts + +var prepareRequest = /* @__PURE__ */ __name((request) => { + request = import_protocol_http.HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}, "prepareRequest"); + +// src/SignatureV4Base.ts + +var import_util_middleware = require("@smithy/util-middleware"); + +var import_util_utf84 = require("@smithy/util-utf8"); + +// src/getCanonicalQuery.ts +var import_util_uri_escape = require("@smithy/util-uri-escape"); +var getCanonicalQuery = /* @__PURE__ */ __name(({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = (0, import_util_uri_escape.escapeUri)(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value)}`; + } else if (Array.isArray(value)) { + serialized[encodedKey] = value.slice(0).reduce((encoded, value2) => encoded.concat([`${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value2)}`]), []).sort().join("&"); + } + } + return keys.sort().map((key) => serialized[key]).filter((serialized2) => serialized2).join("&"); +}, "getCanonicalQuery"); + +// src/utilDate.ts +var iso8601 = /* @__PURE__ */ __name((time) => toDate(time).toISOString().replace(/\.\d{3}Z$/, "Z"), "iso8601"); +var toDate = /* @__PURE__ */ __name((time) => { + if (typeof time === "number") { + return new Date(time * 1e3); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1e3); + } + return new Date(time); + } + return time; +}, "toDate"); + +// src/SignatureV4Base.ts +var SignatureV4Base = class { + static { + __name(this, "SignatureV4Base"); + } + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = (0, import_util_middleware.normalizeProvider)(region); + this.credentialProvider = (0, import_util_middleware.normalizeProvider)(credentials); + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest, algorithmIdentifier) { + const hash = new this.sha256(); + hash.update((0, import_util_utf84.toUint8Array)(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${algorithmIdentifier} +${longDate} +${credentialScope} +${(0, import_util_hex_encoding.toHex)(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if (pathSegment?.length === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${path?.startsWith("/") ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && path?.endsWith("/") ? "/" : ""}`; + const doubleEncoded = (0, import_util_uri_escape.escapeUri)(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || // @ts-expect-error: Property 'accessKeyId' does not exist on type 'object'.ts(2339) + typeof credentials.accessKeyId !== "string" || // @ts-expect-error: Property 'secretAccessKey' does not exist on type 'object'.ts(2339) + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } + formatDate(now) { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8) + }; + } + getCanonicalHeaderList(headers) { + return Object.keys(headers).sort().join(";"); + } +}; + +// src/SignatureV4.ts +var SignatureV4 = class extends SignatureV4Base { + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + super({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath + }); + this.headerFormatter = new HeaderFormatter(); + } + static { + __name(this, "SignatureV4"); + } + async presign(originalRequest, options = {}) { + const { + signingDate = /* @__PURE__ */ new Date(), + expiresIn = 3600, + unsignableHeaders, + unhoistableHeaders, + signableHeaders, + hoistableHeaders, + signingRegion, + signingService + } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { longDate, shortDate } = this.formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject( + "Signature version 4 presigned URLs must have an expiration date less than one week in the future" + ); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = this.getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256)) + ); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } else if (toSign.message) { + return this.signMessage(toSign, options); + } else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = /* @__PURE__ */ new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? await this.regionProvider(); + const { shortDate, longDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = (0, import_util_hex_encoding.toHex)(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService }) { + const promise = this.signEvent( + { + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body + }, + { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature + } + ); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { shortDate } = this.formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update((0, import_util_utf85.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + async signRequest(requestToSign, { + signingDate = /* @__PURE__ */ new Date(), + signableHeaders, + unsignableHeaders, + signingRegion, + signingService + } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, payloadHash) + ); + request.headers[AUTH_HEADER] = `${ALGORITHM_IDENTIFIER} Credential=${credentials.accessKeyId}/${scope}, SignedHeaders=${this.getCanonicalHeaderList(canonicalHeaders)}, Signature=${signature}`; + return request; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign( + longDate, + credentialScope, + canonicalRequest, + ALGORITHM_IDENTIFIER + ); + const hash = new this.sha256(await keyPromise); + hash.update((0, import_util_utf85.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } +}; + +// src/signature-v4a-container.ts +var signatureV4aContainer = { + SignatureV4a: null +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getCanonicalHeaders, + getCanonicalQuery, + getPayloadHash, + moveHeadersToQuery, + prepareRequest, + SignatureV4Base, + hasHeader, + SignatureV4, + ALGORITHM_QUERY_PARAM, + CREDENTIAL_QUERY_PARAM, + AMZ_DATE_QUERY_PARAM, + SIGNED_HEADERS_QUERY_PARAM, + EXPIRES_QUERY_PARAM, + SIGNATURE_QUERY_PARAM, + TOKEN_QUERY_PARAM, + REGION_SET_PARAM, + AUTH_HEADER, + AMZ_DATE_HEADER, + DATE_HEADER, + GENERATED_HEADERS, + SIGNATURE_HEADER, + SHA256_HEADER, + TOKEN_HEADER, + HOST_HEADER, + ALWAYS_UNSIGNABLE_HEADERS, + PROXY_HEADER_PATTERN, + SEC_HEADER_PATTERN, + UNSIGNABLE_PATTERNS, + ALGORITHM_IDENTIFIER, + ALGORITHM_IDENTIFIER_V4A, + EVENT_ALGORITHM_IDENTIFIER, + UNSIGNED_PAYLOAD, + MAX_CACHE_SIZE, + KEY_TYPE_IDENTIFIER, + MAX_PRESIGNED_TTL, + createScope, + getSigningKey, + clearCredentialCache, + signatureV4aContainer +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js new file mode 100644 index 0000000..4edc4b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js @@ -0,0 +1,125 @@ +import { fromHex, toHex } from "@smithy/util-hex-encoding"; +import { fromUtf8 } from "@smithy/util-utf8"; +export class HeaderFormatter { + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 : 1]); + case "byte": + return Uint8Array.from([2, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9; + uuidBytes.set(fromHex(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +} +var HEADER_VALUE_TYPE; +(function (HEADER_VALUE_TYPE) { + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; +})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +export class Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776000 || number < -9223372036854776000) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new Int64(bytes); + } + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 0b10000000; + if (negative) { + negate(bytes); + } + return parseInt(toHex(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +} +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 0xff; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js new file mode 100644 index 0000000..d149132 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js @@ -0,0 +1,135 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { ALGORITHM_IDENTIFIER, ALGORITHM_QUERY_PARAM, AMZ_DATE_HEADER, AMZ_DATE_QUERY_PARAM, AUTH_HEADER, CREDENTIAL_QUERY_PARAM, EVENT_ALGORITHM_IDENTIFIER, EXPIRES_QUERY_PARAM, MAX_PRESIGNED_TTL, SHA256_HEADER, SIGNATURE_QUERY_PARAM, SIGNED_HEADERS_QUERY_PARAM, TOKEN_HEADER, TOKEN_QUERY_PARAM, } from "./constants"; +import { createScope, getSigningKey } from "./credentialDerivation"; +import { getCanonicalHeaders } from "./getCanonicalHeaders"; +import { getPayloadHash } from "./getPayloadHash"; +import { HeaderFormatter } from "./HeaderFormatter"; +import { hasHeader } from "./headerUtil"; +import { moveHeadersToQuery } from "./moveHeadersToQuery"; +import { prepareRequest } from "./prepareRequest"; +import { SignatureV4Base } from "./SignatureV4Base"; +export class SignatureV4 extends SignatureV4Base { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + super({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath, + }); + this.headerFormatter = new HeaderFormatter(); + } + async presign(originalRequest, options = {}) { + const { signingDate = new Date(), expiresIn = 3600, unsignableHeaders, unhoistableHeaders, signableHeaders, hoistableHeaders, signingRegion, signingService, } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const { longDate, shortDate } = this.formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject("Signature version 4 presigned URLs" + " must have an expiration date less than one week in" + " the future"); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = this.getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256))); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } + else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } + else if (toSign.message) { + return this.signMessage(toSign, options); + } + else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? (await this.regionProvider()); + const { shortDate, longDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = toHex(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload, + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = new Date(), signingRegion, signingService }) { + const promise = this.signEvent({ + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body, + }, { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature, + }); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const { shortDate } = this.formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update(toUint8Array(stringToSign)); + return toHex(await hash.digest()); + } + async signRequest(requestToSign, { signingDate = new Date(), signableHeaders, unsignableHeaders, signingRegion, signingService, } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, payloadHash)); + request.headers[AUTH_HEADER] = + `${ALGORITHM_IDENTIFIER} ` + + `Credential=${credentials.accessKeyId}/${scope}, ` + + `SignedHeaders=${this.getCanonicalHeaderList(canonicalHeaders)}, ` + + `Signature=${signature}`; + return request; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest, ALGORITHM_IDENTIFIER); + const hash = new this.sha256(await keyPromise); + hash.update(toUint8Array(stringToSign)); + return toHex(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js new file mode 100644 index 0000000..857ff0c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js @@ -0,0 +1,73 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { normalizeProvider } from "@smithy/util-middleware"; +import { escapeUri } from "@smithy/util-uri-escape"; +import { toUint8Array } from "@smithy/util-utf8"; +import { getCanonicalQuery } from "./getCanonicalQuery"; +import { iso8601 } from "./utilDate"; +export class SignatureV4Base { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = normalizeProvider(region); + this.credentialProvider = normalizeProvider(credentials); + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest, algorithmIdentifier) { + const hash = new this.sha256(); + hash.update(toUint8Array(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${algorithmIdentifier} +${longDate} +${credentialScope} +${toHex(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if (pathSegment?.length === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } + else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${path?.startsWith("/") ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && path?.endsWith("/") ? "/" : ""}`; + const doubleEncoded = escapeUri(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || + typeof credentials.accessKeyId !== "string" || + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } + formatDate(now) { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8), + }; + } + getCanonicalHeaderList(headers) { + return Object.keys(headers).sort().join(";"); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/constants.js new file mode 100644 index 0000000..602728a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/constants.js @@ -0,0 +1,43 @@ +export const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export const REGION_SET_PARAM = "X-Amz-Region-Set"; +export const AUTH_HEADER = "authorization"; +export const AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +export const DATE_HEADER = "date"; +export const GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +export const SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +export const SHA256_HEADER = "x-amz-content-sha256"; +export const TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +export const HOST_HEADER = "host"; +export const ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true, +}; +export const PROXY_HEADER_PATTERN = /^proxy-/; +export const SEC_HEADER_PATTERN = /^sec-/; +export const UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +export const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export const MAX_CACHE_SIZE = 50; +export const KEY_TYPE_IDENTIFIER = "aws4_request"; +export const MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js new file mode 100644 index 0000000..b16ab8c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js @@ -0,0 +1,33 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { KEY_TYPE_IDENTIFIER, MAX_CACHE_SIZE } from "./constants"; +const signingKeyCache = {}; +const cacheQueue = []; +export const createScope = (shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`; +export const getSigningKey = async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${toHex(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return (signingKeyCache[cacheKey] = key); +}; +export const clearCredentialCache = () => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}; +const hmac = (ctor, secret, data) => { + const hash = new ctor(secret); + hash.update(toUint8Array(data)); + return hash.digest(); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js new file mode 100644 index 0000000..3321125 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js @@ -0,0 +1,20 @@ +import { ALWAYS_UNSIGNABLE_HEADERS, PROXY_HEADER_PATTERN, SEC_HEADER_PATTERN } from "./constants"; +export const getCanonicalHeaders = ({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == undefined) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || + unsignableHeaders?.has(canonicalHeaderName) || + PROXY_HEADER_PATTERN.test(canonicalHeaderName) || + SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || (signableHeaders && !signableHeaders.has(canonicalHeaderName))) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js new file mode 100644 index 0000000..0623f1a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js @@ -0,0 +1,29 @@ +import { escapeUri } from "@smithy/util-uri-escape"; +import { SIGNATURE_HEADER } from "./constants"; +export const getCanonicalQuery = ({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = escapeUri(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${escapeUri(value)}`; + } + else if (Array.isArray(value)) { + serialized[encodedKey] = value + .slice(0) + .reduce((encoded, value) => encoded.concat([`${encodedKey}=${escapeUri(value)}`]), []) + .sort() + .join("&"); + } + } + return keys + .sort() + .map((key) => serialized[key]) + .filter((serialized) => serialized) + .join("&"); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js new file mode 100644 index 0000000..cba165c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js @@ -0,0 +1,20 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { SHA256_HEADER, UNSIGNED_PAYLOAD } from "./constants"; +export const getPayloadHash = async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == undefined) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } + else if (typeof body === "string" || ArrayBuffer.isView(body) || isArrayBuffer(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update(toUint8Array(body)); + return toHex(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/headerUtil.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/headerUtil.js new file mode 100644 index 0000000..e502cbb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/headerUtil.js @@ -0,0 +1,26 @@ +export const hasHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}; +export const getHeaderValue = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return headers[headerName]; + } + } + return undefined; +}; +export const deleteHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + delete headers[headerName]; + } + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/index.js new file mode 100644 index 0000000..062752d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/index.js @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js new file mode 100644 index 0000000..806703a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js @@ -0,0 +1,17 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export const moveHeadersToQuery = (request, options = {}) => { + const { headers, query = {} } = HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if ((lname.slice(0, 6) === "x-amz-" && !options.unhoistableHeaders?.has(lname)) || + options.hoistableHeaders?.has(lname)) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js new file mode 100644 index 0000000..7fe5136 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js @@ -0,0 +1,11 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { GENERATED_HEADERS } from "./constants"; +export const prepareRequest = (request) => { + request = HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js new file mode 100644 index 0000000..a309b0a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js @@ -0,0 +1,3 @@ +export const signatureV4aContainer = { + SignatureV4a: null, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js new file mode 100644 index 0000000..bb704a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js @@ -0,0 +1,399 @@ +export const region = "us-east-1"; +export const service = "service"; +export const credentials = { + accessKeyId: "AKIDEXAMPLE", + secretAccessKey: "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY", +}; +export const signingDate = new Date("2015-08-30T12:36:00Z"); +export const requests = [ + { + name: "get-header-key-duplicate", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value2,value2,value1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=c9d5ea9f3f72853aea855b47ea873832890dbdd183b4468f858259531a5138ea", + }, + { + name: "get-header-value-multiline", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1,value2,value3", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=ba17b383a53190154eb5fa66a1b836cc297cc0a3d70a5d00705980573d8ff790", + }, + { + name: "get-header-value-order", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value4,value1,value3,value2", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=08c7e5a9acfcfeb3ab6b2185e75ce8b1deb5e634ec47601a50643f830c755c01", + }, + { + name: "get-header-value-trim", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1", + "my-header2": '"a b c"', + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;my-header2;x-amz-date, Signature=acc3ed3afb60bb290fc8d2dd0098b9911fcaa05412b367055dee359757a9c736", + }, + { + name: "get-unreserved", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=07ef7494c76fa4850883e2b006601f940f8a34d404d0cfa977f52a65bbf5f24f", + }, + { + name: "get-utf8", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/ሴ", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=8318018e0b0f223aa2bbf98705b62bb787dc9c0e678f255a891fd03141be5d85", + }, + { + name: "get-vanilla", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5fa00fa31553b73ebf1942676e86291e8372ff2a2260956d9b8aae1d763fbf31", + }, + { + name: "get-vanilla-empty-query-key", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=a67d582fa61cc504c4bae71f336f98b97f1ea3c7a6bfe1b6e45aec72011b9aeb", + }, + { + name: "get-vanilla-query", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5fa00fa31553b73ebf1942676e86291e8372ff2a2260956d9b8aae1d763fbf31", + }, + { + name: "get-vanilla-query-order-key-case", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + Param2: "value2", + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=b97d918cfa904a5beff61c982a1b6f458b799221646efd99d3219ec94cdf2500", + }, + { + name: "get-vanilla-query-unreserved", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + "-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz": "-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=9c3e54bfcdf0b19771a7f523ee5669cdf59bc7cc0884027167c21bb143a40197", + }, + { + name: "get-vanilla-utf8-query", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + ሴ: "bar", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=2cdec8eed098649ff3a119c94853b13c643bcf08f8b0a1d91e12c9027818dd04", + }, + { + name: "post-header-key-case", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-header-key-sort", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=c5410059b04c1ee005303aed430f6e6645f61f4dc9e1461ec8f8916fdf18852c", + }, + { + name: "post-header-value-case", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "VALUE1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=cdbc9802e29d2942e5e10b5bccfdd67c5f22c7c4e8ae67b53629efa58b974b7d", + }, + { + name: "post-sts-header-after", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-sts-header-before", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + "x-amz-security-token": "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date;x-amz-security-token, Signature=85d96828115b5dc0cfc3bd16ad9e210dd772bbebba041836c64533a82be05ead", + }, + { + name: "post-vanilla", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-vanilla-empty-query-value", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=28038455d6de14eafc1f9222cf5aa6f1a96197d7deb8263271d420d138af7f11", + }, + { + name: "post-vanilla-query", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=28038455d6de14eafc1f9222cf5aa6f1a96197d7deb8263271d420d138af7f11", + }, + { + name: "post-vanilla-query-nonunreserved", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + "@#$%^": "", + "+": '/,?><`";:\\|][{}', + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=66c82657c86e26fb25238d0e69f011edc4c6df5ae71119d7cb98ed9b87393c1e", + }, + { + name: "post-vanilla-query-space", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + p: "", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=e71688addb58a26418614085fb730ba3faa623b461c17f48f2fbdb9361b94a9b", + }, + { + name: "post-x-www-form-urlencoded", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + "content-type": "application/x-www-form-urlencoded", + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + body: "Param1=value1", + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=ff11897932ad3f4e8b18135d722051e5ac45fc38421b1da7b9d196a0fe09473a", + }, + { + name: "post-x-www-form-urlencoded-parameters", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + "content-type": "application/x-www-form-urlencoded; charset=utf8", + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + body: "Param1=value1", + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=1a72ec8f64bd914b0e42e42607c7fbce7fb2c7465f63e3092b3b0d39fa77a6fe", + }, +]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/utilDate.js b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/utilDate.js new file mode 100644 index 0000000..4aad623 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-es/utilDate.js @@ -0,0 +1,15 @@ +export const iso8601 = (time) => toDate(time) + .toISOString() + .replace(/\.\d{3}Z$/, "Z"); +export const toDate = (time) => { + if (typeof time === "number") { + return new Date(time * 1000); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1000); + } + return new Date(time); + } + return time; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts new file mode 100644 index 0000000..92056a6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts @@ -0,0 +1,24 @@ +import type { Int64 as IInt64, MessageHeaders } from "@smithy/types"; +/** + * @internal + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class HeaderFormatter { + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; +} +/** + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class Int64 implements IInt64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts new file mode 100644 index 0000000..99499d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts @@ -0,0 +1,20 @@ +import { EventSigner, EventSigningArguments, FormattedEvent, HttpRequest, MessageSigner, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments, SignableMessage, SignedMessage, SigningArguments, StringSigner } from "@smithy/types"; +import { SignatureV4Base, SignatureV4CryptoInit, SignatureV4Init } from "./SignatureV4Base"; +/** + * @public + */ +export declare class SignatureV4 extends SignatureV4Base implements RequestPresigner, RequestSigner, StringSigner, EventSigner, MessageSigner { + private readonly headerFormatter; + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + sign(stringToSign: string, options?: SigningArguments): Promise; + sign(event: FormattedEvent, options: EventSigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + private signEvent; + signMessage(signableMessage: SignableMessage, { signingDate, signingRegion, signingService }: SigningArguments): Promise; + private signString; + private signRequest; + private getSignature; + private getSigningKey; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts new file mode 100644 index 0000000..9a0e6ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts @@ -0,0 +1,69 @@ +import { AwsCredentialIdentity, ChecksumConstructor, DateInput, HashConstructor, HeaderBag, HttpRequest, Provider } from "@smithy/types"; +/** + * @public + */ +export interface SignatureV4Init { + /** + * The service signing name. + */ + service: string; + /** + * The region name or a function that returns a promise that will be + * resolved with the region name. + */ + region: string | Provider; + /** + * The credentials with which the request should be signed or a function + * that returns a promise that will be resolved with credentials. + */ + credentials: AwsCredentialIdentity | Provider; + /** + * A constructor function for a hash object that will calculate SHA-256 HMAC + * checksums. + */ + sha256?: ChecksumConstructor | HashConstructor; + /** + * Whether to uri-escape the request URI path as part of computing the + * canonical request string. This is required for every AWS service, except + * Amazon S3, as of late 2017. + * + * @default [true] + */ + uriEscapePath?: boolean; + /** + * Whether to calculate a checksum of the request body and include it as + * either a request header (when signing) or as a query string parameter + * (when presigning). This is required for AWS Glacier and Amazon S3 and optional for + * every other AWS service as of late 2017. + * + * @default [true] + */ + applyChecksum?: boolean; +} +/** + * @public + */ +export interface SignatureV4CryptoInit { + sha256: ChecksumConstructor | HashConstructor; +} +/** + * @internal + */ +export declare abstract class SignatureV4Base { + protected readonly service: string; + protected readonly regionProvider: Provider; + protected readonly credentialProvider: Provider; + protected readonly sha256: ChecksumConstructor | HashConstructor; + private readonly uriEscapePath; + protected readonly applyChecksum: boolean; + protected constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + protected createCanonicalRequest(request: HttpRequest, canonicalHeaders: HeaderBag, payloadHash: string): string; + protected createStringToSign(longDate: string, credentialScope: string, canonicalRequest: string, algorithmIdentifier: string): Promise; + private getCanonicalPath; + protected validateResolvedCredentials(credentials: unknown): void; + protected formatDate(now: DateInput): { + longDate: string; + shortDate: string; + }; + protected getCanonicalHeaderList(headers: object): string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/constants.d.ts new file mode 100644 index 0000000..ea1cfb5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/constants.d.ts @@ -0,0 +1,43 @@ +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export declare const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export declare const REGION_SET_PARAM = "X-Amz-Region-Set"; +export declare const AUTH_HEADER = "authorization"; +export declare const AMZ_DATE_HEADER: string; +export declare const DATE_HEADER = "date"; +export declare const GENERATED_HEADERS: string[]; +export declare const SIGNATURE_HEADER: string; +export declare const SHA256_HEADER = "x-amz-content-sha256"; +export declare const TOKEN_HEADER: string; +export declare const HOST_HEADER = "host"; +export declare const ALWAYS_UNSIGNABLE_HEADERS: { + authorization: boolean; + "cache-control": boolean; + connection: boolean; + expect: boolean; + from: boolean; + "keep-alive": boolean; + "max-forwards": boolean; + pragma: boolean; + referer: boolean; + te: boolean; + trailer: boolean; + "transfer-encoding": boolean; + upgrade: boolean; + "user-agent": boolean; + "x-amzn-trace-id": boolean; +}; +export declare const PROXY_HEADER_PATTERN: RegExp; +export declare const SEC_HEADER_PATTERN: RegExp; +export declare const UNSIGNABLE_PATTERNS: RegExp[]; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export declare const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export declare const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const MAX_CACHE_SIZE = 50; +export declare const KEY_TYPE_IDENTIFIER = "aws4_request"; +export declare const MAX_PRESIGNED_TTL: number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts new file mode 100644 index 0000000..a560c2c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts @@ -0,0 +1,30 @@ +import { AwsCredentialIdentity, ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * Create a string describing the scope of credentials used to sign a request. + * + * @internal + * + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being sent. + */ +export declare const createScope: (shortDate: string, region: string, service: string) => string; +/** + * Derive a signing key from its composite parts. + * + * @internal + * + * @param sha256Constructor - a constructor function that can instantiate SHA-256 + * hash objects. + * @param credentials - the credentials with which the request will be + * signed. + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being + * sent. + */ +export declare const getSigningKey: (sha256Constructor: ChecksumConstructor | HashConstructor, credentials: AwsCredentialIdentity, shortDate: string, region: string, service: string) => Promise; +/** + * @internal + */ +export declare const clearCredentialCache: () => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts new file mode 100644 index 0000000..efc417c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalHeaders: ({ headers }: HttpRequest, unsignableHeaders?: Set, signableHeaders?: Set) => HeaderBag; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts new file mode 100644 index 0000000..a8e1800 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalQuery: ({ query }: HttpRequest) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts new file mode 100644 index 0000000..2de0858 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getPayloadHash: ({ headers, body }: HttpRequest, hashConstructor: ChecksumConstructor | HashConstructor) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts new file mode 100644 index 0000000..c0b66eb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (soughtHeader: string, headers: HeaderBag) => boolean; +export declare const getHeaderValue: (soughtHeader: string, headers: HeaderBag) => string | undefined; +export declare const deleteHeader: (soughtHeader: string, headers: HeaderBag) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/index.d.ts new file mode 100644 index 0000000..9305cf3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/index.d.ts @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Init, SignatureV4CryptoInit, SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts new file mode 100644 index 0000000..e2c31e0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts @@ -0,0 +1,10 @@ +import type { HttpRequest as IHttpRequest, QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare const moveHeadersToQuery: (request: IHttpRequest, options?: { + unhoistableHeaders?: Set; + hoistableHeaders?: Set; +}) => IHttpRequest & { + query: QueryParameterBag; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts new file mode 100644 index 0000000..b20e0e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest as IHttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const prepareRequest: (request: IHttpRequest) => IHttpRequest; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts new file mode 100644 index 0000000..8901036 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts @@ -0,0 +1,24 @@ +import type { RequestSigner } from "@smithy/types"; +/** + * @public + */ +export type OptionalSigV4aSigner = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4a package. + * + * The true type is SignatureV4a from @smithy/signature-v4a. + */ + new (options: any): RequestSigner; +}; +/** + * @public + * + * \@smithy/signature-v4a will install the constructor in this + * container if it's installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + */ +export declare const signatureV4aContainer: { + SignatureV4a: null | OptionalSigV4aSigner; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts new file mode 100644 index 0000000..383bc35 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts @@ -0,0 +1,14 @@ +import { HttpRequest } from "@smithy/types"; +export interface TestCase { + name: string; + request: HttpRequest; + authorization: string; +} +export declare const region = "us-east-1"; +export declare const service = "service"; +export declare const credentials: { + accessKeyId: string; + secretAccessKey: string; +}; +export declare const signingDate: Date; +export declare const requests: Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts new file mode 100644 index 0000000..6c294c3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts @@ -0,0 +1,24 @@ +import { Int64 as IInt64, MessageHeaders } from "@smithy/types"; +/** + * @internal + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class HeaderFormatter { + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; +} +/** + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class Int64 implements IInt64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts new file mode 100644 index 0000000..c613753 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts @@ -0,0 +1,20 @@ +import { EventSigner, EventSigningArguments, FormattedEvent, HttpRequest, MessageSigner, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments, SignableMessage, SignedMessage, SigningArguments, StringSigner } from "@smithy/types"; +import { SignatureV4Base, SignatureV4CryptoInit, SignatureV4Init } from "./SignatureV4Base"; +/** + * @public + */ +export declare class SignatureV4 extends SignatureV4Base implements RequestPresigner, RequestSigner, StringSigner, EventSigner, MessageSigner { + private readonly headerFormatter; + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + sign(stringToSign: string, options?: SigningArguments): Promise; + sign(event: FormattedEvent, options: EventSigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + private signEvent; + signMessage(signableMessage: SignableMessage, { signingDate, signingRegion, signingService }: SigningArguments): Promise; + private signString; + private signRequest; + private getSignature; + private getSigningKey; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts new file mode 100644 index 0000000..be1da1f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts @@ -0,0 +1,69 @@ +import { AwsCredentialIdentity, ChecksumConstructor, DateInput, HashConstructor, HeaderBag, HttpRequest, Provider } from "@smithy/types"; +/** + * @public + */ +export interface SignatureV4Init { + /** + * The service signing name. + */ + service: string; + /** + * The region name or a function that returns a promise that will be + * resolved with the region name. + */ + region: string | Provider; + /** + * The credentials with which the request should be signed or a function + * that returns a promise that will be resolved with credentials. + */ + credentials: AwsCredentialIdentity | Provider; + /** + * A constructor function for a hash object that will calculate SHA-256 HMAC + * checksums. + */ + sha256?: ChecksumConstructor | HashConstructor; + /** + * Whether to uri-escape the request URI path as part of computing the + * canonical request string. This is required for every AWS service, except + * Amazon S3, as of late 2017. + * + * @default [true] + */ + uriEscapePath?: boolean; + /** + * Whether to calculate a checksum of the request body and include it as + * either a request header (when signing) or as a query string parameter + * (when presigning). This is required for AWS Glacier and Amazon S3 and optional for + * every other AWS service as of late 2017. + * + * @default [true] + */ + applyChecksum?: boolean; +} +/** + * @public + */ +export interface SignatureV4CryptoInit { + sha256: ChecksumConstructor | HashConstructor; +} +/** + * @internal + */ +export declare abstract class SignatureV4Base { + protected readonly service: string; + protected readonly regionProvider: Provider; + protected readonly credentialProvider: Provider; + protected readonly sha256: ChecksumConstructor | HashConstructor; + private readonly uriEscapePath; + protected readonly applyChecksum: boolean; + protected constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + protected createCanonicalRequest(request: HttpRequest, canonicalHeaders: HeaderBag, payloadHash: string): string; + protected createStringToSign(longDate: string, credentialScope: string, canonicalRequest: string, algorithmIdentifier: string): Promise; + private getCanonicalPath; + protected validateResolvedCredentials(credentials: unknown): void; + protected formatDate(now: DateInput): { + longDate: string; + shortDate: string; + }; + protected getCanonicalHeaderList(headers: object): string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..ff54b67 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,43 @@ +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export declare const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export declare const REGION_SET_PARAM = "X-Amz-Region-Set"; +export declare const AUTH_HEADER = "authorization"; +export declare const AMZ_DATE_HEADER: string; +export declare const DATE_HEADER = "date"; +export declare const GENERATED_HEADERS: string[]; +export declare const SIGNATURE_HEADER: string; +export declare const SHA256_HEADER = "x-amz-content-sha256"; +export declare const TOKEN_HEADER: string; +export declare const HOST_HEADER = "host"; +export declare const ALWAYS_UNSIGNABLE_HEADERS: { + authorization: boolean; + "cache-control": boolean; + connection: boolean; + expect: boolean; + from: boolean; + "keep-alive": boolean; + "max-forwards": boolean; + pragma: boolean; + referer: boolean; + te: boolean; + trailer: boolean; + "transfer-encoding": boolean; + upgrade: boolean; + "user-agent": boolean; + "x-amzn-trace-id": boolean; +}; +export declare const PROXY_HEADER_PATTERN: RegExp; +export declare const SEC_HEADER_PATTERN: RegExp; +export declare const UNSIGNABLE_PATTERNS: RegExp[]; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export declare const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export declare const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const MAX_CACHE_SIZE = 50; +export declare const KEY_TYPE_IDENTIFIER = "aws4_request"; +export declare const MAX_PRESIGNED_TTL: number; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts new file mode 100644 index 0000000..6cba9b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts @@ -0,0 +1,30 @@ +import { AwsCredentialIdentity, ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * Create a string describing the scope of credentials used to sign a request. + * + * @internal + * + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being sent. + */ +export declare const createScope: (shortDate: string, region: string, service: string) => string; +/** + * Derive a signing key from its composite parts. + * + * @internal + * + * @param sha256Constructor - a constructor function that can instantiate SHA-256 + * hash objects. + * @param credentials - the credentials with which the request will be + * signed. + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being + * sent. + */ +export declare const getSigningKey: (sha256Constructor: ChecksumConstructor | HashConstructor, credentials: AwsCredentialIdentity, shortDate: string, region: string, service: string) => Promise; +/** + * @internal + */ +export declare const clearCredentialCache: () => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts new file mode 100644 index 0000000..e8f2e98 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalHeaders: ({ headers }: HttpRequest, unsignableHeaders?: Set, signableHeaders?: Set) => HeaderBag; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts new file mode 100644 index 0000000..6a2d4fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalQuery: ({ query }: HttpRequest) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts new file mode 100644 index 0000000..c14a46d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getPayloadHash: ({ headers, body }: HttpRequest, hashConstructor: ChecksumConstructor | HashConstructor) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts new file mode 100644 index 0000000..41ca217 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (soughtHeader: string, headers: HeaderBag) => boolean; +export declare const getHeaderValue: (soughtHeader: string, headers: HeaderBag) => string | undefined; +export declare const deleteHeader: (soughtHeader: string, headers: HeaderBag) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c9fa5f6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Init, SignatureV4CryptoInit, SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts new file mode 100644 index 0000000..2017f3b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts @@ -0,0 +1,10 @@ +import { HttpRequest as IHttpRequest, QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare const moveHeadersToQuery: (request: IHttpRequest, options?: { + unhoistableHeaders?: Set; + hoistableHeaders?: Set; +}) => IHttpRequest & { + query: QueryParameterBag; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts new file mode 100644 index 0000000..57cf782 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest as IHttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const prepareRequest: (request: IHttpRequest) => IHttpRequest; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts new file mode 100644 index 0000000..f1adc97 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts @@ -0,0 +1,24 @@ +import { RequestSigner } from "@smithy/types"; +/** + * @public + */ +export type OptionalSigV4aSigner = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4a package. + * + * The true type is SignatureV4a from @smithy/signature-v4a. + */ + new (options: any): RequestSigner; +}; +/** + * @public + * + * \@smithy/signature-v4a will install the constructor in this + * container if it's installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + */ +export declare const signatureV4aContainer: { + SignatureV4a: null | OptionalSigV4aSigner; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts new file mode 100644 index 0000000..9363eeb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts @@ -0,0 +1,14 @@ +import { HttpRequest } from "@smithy/types"; +export interface TestCase { + name: string; + request: HttpRequest; + authorization: string; +} +export declare const region = "us-east-1"; +export declare const service = "service"; +export declare const credentials: { + accessKeyId: string; + secretAccessKey: string; +}; +export declare const signingDate: Date; +export declare const requests: Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts new file mode 100644 index 0000000..9a6f383 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts @@ -0,0 +1,2 @@ +export declare const iso8601: (time: number | string | Date) => string; +export declare const toDate: (time: number | string | Date) => Date; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts new file mode 100644 index 0000000..e8c6a68 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts @@ -0,0 +1,2 @@ +export declare const iso8601: (time: number | string | Date) => string; +export declare const toDate: (time: number | string | Date) => Date; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/package.json new file mode 100644 index 0000000..56e028c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/signature-v4/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/signature-v4", + "version": "5.1.0", + "description": "A standalone implementation of the AWS Signature V4 request signing algorithm", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline signature-v4", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-crypto/sha256-js": "5.2.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/signature-v4", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/signature-v4" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/README.md new file mode 100644 index 0000000..365cd62 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/README.md @@ -0,0 +1,10 @@ +# @smithy/smithy-client + +[![NPM version](https://img.shields.io/npm/v/@smithy/smithy-client/latest.svg)](https://www.npmjs.com/package/@smithy/smithy-client) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/smithy-client.svg)](https://www.npmjs.com/package/@smithy/smithy-client) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/client.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/command.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/command.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/command.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/index.js new file mode 100644 index 0000000..c410d8d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/index.js @@ -0,0 +1,1321 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Client: () => Client, + Command: () => Command, + LazyJsonString: () => LazyJsonString, + NoOpLogger: () => NoOpLogger, + SENSITIVE_STRING: () => SENSITIVE_STRING, + ServiceException: () => ServiceException, + _json: () => _json, + collectBody: () => import_protocols.collectBody, + convertMap: () => convertMap, + createAggregatedClient: () => createAggregatedClient, + dateToUtcString: () => dateToUtcString, + decorateServiceException: () => decorateServiceException, + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + expectBoolean: () => expectBoolean, + expectByte: () => expectByte, + expectFloat32: () => expectFloat32, + expectInt: () => expectInt, + expectInt32: () => expectInt32, + expectLong: () => expectLong, + expectNonNull: () => expectNonNull, + expectNumber: () => expectNumber, + expectObject: () => expectObject, + expectShort: () => expectShort, + expectString: () => expectString, + expectUnion: () => expectUnion, + extendedEncodeURIComponent: () => import_protocols.extendedEncodeURIComponent, + getArrayIfSingleItem: () => getArrayIfSingleItem, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + getDefaultExtensionConfiguration: () => getDefaultExtensionConfiguration, + getValueFromTextNode: () => getValueFromTextNode, + handleFloat: () => handleFloat, + isSerializableHeaderValue: () => isSerializableHeaderValue, + limitedParseDouble: () => limitedParseDouble, + limitedParseFloat: () => limitedParseFloat, + limitedParseFloat32: () => limitedParseFloat32, + loadConfigsForDefaultMode: () => loadConfigsForDefaultMode, + logger: () => logger, + map: () => map, + parseBoolean: () => parseBoolean, + parseEpochTimestamp: () => parseEpochTimestamp, + parseRfc3339DateTime: () => parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset: () => parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime: () => parseRfc7231DateTime, + quoteHeader: () => quoteHeader, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig, + resolvedPath: () => import_protocols.resolvedPath, + serializeDateTime: () => serializeDateTime, + serializeFloat: () => serializeFloat, + splitEvery: () => splitEvery, + splitHeader: () => splitHeader, + strictParseByte: () => strictParseByte, + strictParseDouble: () => strictParseDouble, + strictParseFloat: () => strictParseFloat, + strictParseFloat32: () => strictParseFloat32, + strictParseInt: () => strictParseInt, + strictParseInt32: () => strictParseInt32, + strictParseLong: () => strictParseLong, + strictParseShort: () => strictParseShort, + take: () => take, + throwDefaultError: () => throwDefaultError, + withBaseException: () => withBaseException +}); +module.exports = __toCommonJS(src_exports); + +// src/client.ts +var import_middleware_stack = require("@smithy/middleware-stack"); +var Client = class { + constructor(config) { + this.config = config; + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + static { + __name(this, "Client"); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : void 0; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === void 0 && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = /* @__PURE__ */ new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command).then( + (result) => callback(null, result.output), + (err) => callback(err) + ).catch( + // prevent any errors thrown in the callback from triggering an + // unhandled promise rejection + () => { + } + ); + } else { + return handler(command).then((result) => result.output); + } + } + destroy() { + this.config?.requestHandler?.destroy?.(); + delete this.handlers; + } +}; + +// src/collect-stream-body.ts +var import_protocols = require("@smithy/core/protocols"); + +// src/command.ts + +var import_types = require("@smithy/types"); +var Command = class { + constructor() { + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + static { + __name(this, "Command"); + } + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder() { + return new ClassBuilder(); + } + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack, configuration, options, { + middlewareFn, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + smithyContext, + additionalContext, + CommandCtor + }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger: logger2 } = configuration; + const handlerExecutionContext = { + logger: logger2, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [import_types.SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext + }, + ...additionalContext + }; + const { requestHandler } = configuration; + return stack.resolve( + (request) => requestHandler.handle(request.request, options || {}), + handlerExecutionContext + ); + } +}; +var ClassBuilder = class { + constructor() { + this._init = () => { + }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + static { + __name(this, "ClassBuilder"); + } + /** + * Optional init callback. + */ + init(cb) { + this._init = cb; + } + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + /** + * Add any number of middleware. + */ + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + /** + * Set the initial handler execution context Smithy field. + */ + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext + }; + return this; + } + /** + * Set the initial handler execution context. + */ + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + /** + * Set constant string identifiers for the operation. + */ + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + /** + * Sets the serializer. + */ + ser(serializer) { + this._serializer = serializer; + return this; + } + /** + * Sets the deserializer. + */ + de(deserializer) { + this._deserializer = deserializer; + return this; + } + /** + * @returns a Command class with the classBuilder properties. + */ + build() { + const closure = this; + let CommandRef; + return CommandRef = class extends Command { + /** + * @public + */ + constructor(...[input]) { + super(); + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.serialize = closure._serializer; + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + static { + __name(this, "CommandRef"); + } + /** + * @public + */ + static getEndpointParameterInstructions() { + return closure._ep; + } + /** + * @internal + */ + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext + }); + } + }; + } +}; + +// src/constants.ts +var SENSITIVE_STRING = "***SensitiveInformation***"; + +// src/create-aggregated-client.ts +var createAggregatedClient = /* @__PURE__ */ __name((commands, Client2) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = /* @__PURE__ */ __name(async function(args, optionsOrCb, cb) { + const command2 = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command2, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command2, optionsOrCb || {}, cb); + } else { + return this.send(command2, optionsOrCb); + } + }, "methodImpl"); + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client2.prototype[methodName] = methodImpl; + } +}, "createAggregatedClient"); + +// src/parse-utils.ts +var parseBoolean = /* @__PURE__ */ __name((value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}, "parseBoolean"); +var expectBoolean = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}, "expectBoolean"); +var expectNumber = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}, "expectNumber"); +var MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +var expectFloat32 = /* @__PURE__ */ __name((value) => { + const expected = expectNumber(value); + if (expected !== void 0 && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}, "expectFloat32"); +var expectLong = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}, "expectLong"); +var expectInt = expectLong; +var expectInt32 = /* @__PURE__ */ __name((value) => expectSizedInt(value, 32), "expectInt32"); +var expectShort = /* @__PURE__ */ __name((value) => expectSizedInt(value, 16), "expectShort"); +var expectByte = /* @__PURE__ */ __name((value) => expectSizedInt(value, 8), "expectByte"); +var expectSizedInt = /* @__PURE__ */ __name((value, size) => { + const expected = expectLong(value); + if (expected !== void 0 && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}, "expectSizedInt"); +var castInt = /* @__PURE__ */ __name((value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}, "castInt"); +var expectNonNull = /* @__PURE__ */ __name((value, location) => { + if (value === null || value === void 0) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}, "expectNonNull"); +var expectObject = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}, "expectObject"); +var expectString = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}, "expectString"); +var expectUnion = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject).filter(([, v]) => v != null).map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}, "expectUnion"); +var strictParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}, "strictParseDouble"); +var strictParseFloat = strictParseDouble; +var strictParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}, "strictParseFloat32"); +var NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +var parseNumber = /* @__PURE__ */ __name((value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}, "parseNumber"); +var limitedParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}, "limitedParseDouble"); +var handleFloat = limitedParseDouble; +var limitedParseFloat = limitedParseDouble; +var limitedParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}, "limitedParseFloat32"); +var parseFloatString = /* @__PURE__ */ __name((value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}, "parseFloatString"); +var strictParseLong = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}, "strictParseLong"); +var strictParseInt = strictParseLong; +var strictParseInt32 = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}, "strictParseInt32"); +var strictParseShort = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}, "strictParseShort"); +var strictParseByte = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}, "strictParseByte"); +var stackTraceWarning = /* @__PURE__ */ __name((message) => { + return String(new TypeError(message).stack || message).split("\n").slice(0, 5).filter((s) => !s.includes("stackTraceWarning")).join("\n"); +}, "stackTraceWarning"); +var logger = { + warn: console.warn +}; + +// src/date-utils.ts +var DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +var MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +__name(dateToUtcString, "dateToUtcString"); +var RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +var parseRfc3339DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}, "parseRfc3339DateTime"); +var RFC3339_WITH_OFFSET = new RegExp( + /^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/ +); +var parseRfc3339DateTimeWithOffset = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}, "parseRfc3339DateTimeWithOffset"); +var IMF_FIXDATE = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var RFC_850_DATE = new RegExp( + /^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var ASC_TIME = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/ +); +var parseRfc7231DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr, "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year( + buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds + }) + ); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr.trimLeft(), "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}, "parseRfc7231DateTime"); +var parseEpochTimestamp = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1e3)); +}, "parseEpochTimestamp"); +var buildDate = /* @__PURE__ */ __name((year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date( + Date.UTC( + year, + adjustedMonth, + day, + parseDateValue(time.hours, "hour", 0, 23), + parseDateValue(time.minutes, "minute", 0, 59), + // seconds can go up to 60 for leap seconds + parseDateValue(time.seconds, "seconds", 0, 60), + parseMilliseconds(time.fractionalMilliseconds) + ) + ); +}, "buildDate"); +var parseTwoDigitYear = /* @__PURE__ */ __name((value) => { + const thisYear = (/* @__PURE__ */ new Date()).getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}, "parseTwoDigitYear"); +var FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1e3; +var adjustRfc850Year = /* @__PURE__ */ __name((input) => { + if (input.getTime() - (/* @__PURE__ */ new Date()).getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date( + Date.UTC( + input.getUTCFullYear() - 100, + input.getUTCMonth(), + input.getUTCDate(), + input.getUTCHours(), + input.getUTCMinutes(), + input.getUTCSeconds(), + input.getUTCMilliseconds() + ) + ); + } + return input; +}, "adjustRfc850Year"); +var parseMonthByShortName = /* @__PURE__ */ __name((value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}, "parseMonthByShortName"); +var DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +var validateDayOfMonth = /* @__PURE__ */ __name((year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}, "validateDayOfMonth"); +var isLeapYear = /* @__PURE__ */ __name((year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}, "isLeapYear"); +var parseDateValue = /* @__PURE__ */ __name((value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}, "parseDateValue"); +var parseMilliseconds = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return 0; + } + return strictParseFloat32("0." + value) * 1e3; +}, "parseMilliseconds"); +var parseOffsetToMilliseconds = /* @__PURE__ */ __name((value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } else if (directionStr == "-") { + direction = -1; + } else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1e3; +}, "parseOffsetToMilliseconds"); +var stripLeadingZeroes = /* @__PURE__ */ __name((value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}, "stripLeadingZeroes"); + +// src/exceptions.ts +var ServiceException = class _ServiceException extends Error { + static { + __name(this, "ServiceException"); + } + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, Object.getPrototypeOf(this).constructor.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value) { + if (!value) + return false; + const candidate = value; + return _ServiceException.prototype.isPrototypeOf(candidate) || Boolean(candidate.$fault) && Boolean(candidate.$metadata) && (candidate.$fault === "client" || candidate.$fault === "server"); + } + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance) { + if (!instance) + return false; + const candidate = instance; + if (this === _ServiceException) { + return _ServiceException.isInstance(instance); + } + if (_ServiceException.isInstance(instance)) { + if (candidate.name && this.name) { + return this.prototype.isPrototypeOf(instance) || candidate.name === this.name; + } + return this.prototype.isPrototypeOf(instance); + } + return false; + } +}; +var decorateServiceException = /* @__PURE__ */ __name((exception, additions = {}) => { + Object.entries(additions).filter(([, v]) => v !== void 0).forEach(([k, v]) => { + if (exception[k] == void 0 || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}, "decorateServiceException"); + +// src/default-error-handler.ts +var throwDefaultError = /* @__PURE__ */ __name(({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : void 0; + const response = new exceptionCtor({ + name: parsedBody?.code || parsedBody?.Code || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata + }); + throw decorateServiceException(response, parsedBody); +}, "throwDefaultError"); +var withBaseException = /* @__PURE__ */ __name((ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}, "withBaseException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/defaults-mode.ts +var loadConfigsForDefaultMode = /* @__PURE__ */ __name((mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100 + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 3e4 + }; + default: + return {}; + } +}, "loadConfigsForDefaultMode"); + +// src/emitWarningIfUnsupportedVersion.ts +var warningEmitted = false; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}, "emitWarningIfUnsupportedVersion"); + +// src/extended-encode-uri-component.ts + + +// src/extensions/checksum.ts + +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in import_types.AlgorithmId) { + const algorithmId = import_types.AlgorithmId[id]; + if (runtimeConfig[algorithmId] === void 0) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId] + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/retry.ts +var getRetryConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setRetryStrategy(retryStrategy) { + runtimeConfig.retryStrategy = retryStrategy; + }, + retryStrategy() { + return runtimeConfig.retryStrategy; + } + }; +}, "getRetryConfiguration"); +var resolveRetryRuntimeConfig = /* @__PURE__ */ __name((retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}, "resolveRetryRuntimeConfig"); + +// src/extensions/defaultExtensionConfiguration.ts +var getDefaultExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return Object.assign(getChecksumConfiguration(runtimeConfig), getRetryConfiguration(runtimeConfig)); +}, "getDefaultExtensionConfiguration"); +var getDefaultClientConfiguration = getDefaultExtensionConfiguration; +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return Object.assign(resolveChecksumRuntimeConfig(config), resolveRetryRuntimeConfig(config)); +}, "resolveDefaultRuntimeConfig"); + +// src/get-array-if-single-item.ts +var getArrayIfSingleItem = /* @__PURE__ */ __name((mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray], "getArrayIfSingleItem"); + +// src/get-value-from-text-node.ts +var getValueFromTextNode = /* @__PURE__ */ __name((obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== void 0) { + obj[key] = obj[key][textNodeName]; + } else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}, "getValueFromTextNode"); + +// src/is-serializable-header-value.ts +var isSerializableHeaderValue = /* @__PURE__ */ __name((value) => { + return value != null; +}, "isSerializableHeaderValue"); + +// src/lazy-json.ts +var LazyJsonString = /* @__PURE__ */ __name(function LazyJsonString2(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + } + }); + return str; +}, "LazyJsonString"); +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; + +// src/NoOpLogger.ts +var NoOpLogger = class { + static { + __name(this, "NoOpLogger"); + } + trace() { + } + debug() { + } + info() { + } + warn() { + } + error() { + } +}; + +// src/object-mapping.ts +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +__name(map, "map"); +var convertMap = /* @__PURE__ */ __name((target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}, "convertMap"); +var take = /* @__PURE__ */ __name((source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}, "take"); +var mapWithFilter = /* @__PURE__ */ __name((target, filter, instructions) => { + return map( + target, + Object.entries(instructions).reduce( + (_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, + {} + ) + ); +}, "mapWithFilter"); +var applyInstruction = /* @__PURE__ */ __name((target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter2 = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if (typeof filter2 === "function" && filter2(source[sourceKey]) || typeof filter2 !== "function" && !!filter2) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === void 0 && (_value = value()) != null; + const customFilterPassed = typeof filter === "function" && !!filter(void 0) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed) { + target[targetKey] = _value; + } else if (customFilterPassed) { + target[targetKey] = value(); + } + } else { + const defaultFilterPassed = filter === void 0 && value != null; + const customFilterPassed = typeof filter === "function" && !!filter(value) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}, "applyInstruction"); +var nonNullish = /* @__PURE__ */ __name((_) => _ != null, "nonNullish"); +var pass = /* @__PURE__ */ __name((_) => _, "pass"); + +// src/quote-header.ts +function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} +__name(quoteHeader, "quoteHeader"); + +// src/resolve-path.ts + + +// src/ser-utils.ts +var serializeFloat = /* @__PURE__ */ __name((value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}, "serializeFloat"); +var serializeDateTime = /* @__PURE__ */ __name((date) => date.toISOString().replace(".000Z", "Z"), "serializeDateTime"); + +// src/serde-json.ts +var _json = /* @__PURE__ */ __name((obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}, "_json"); + +// src/split-every.ts +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} +__name(splitEvery, "splitEvery"); + +// src/split-header.ts +var splitHeader = /* @__PURE__ */ __name((value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = void 0; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z2 = v.length; + if (z2 < 2) { + return v; + } + if (v[0] === `"` && v[z2 - 1] === `"`) { + v = v.slice(1, z2 - 1); + } + return v.replace(/\\"/g, '"'); + }); +}, "splitHeader"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Client, + collectBody, + Command, + SENSITIVE_STRING, + createAggregatedClient, + dateToUtcString, + parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime, + parseEpochTimestamp, + throwDefaultError, + withBaseException, + loadConfigsForDefaultMode, + emitWarningIfUnsupportedVersion, + ServiceException, + decorateServiceException, + extendedEncodeURIComponent, + getDefaultExtensionConfiguration, + getDefaultClientConfiguration, + resolveDefaultRuntimeConfig, + getArrayIfSingleItem, + getValueFromTextNode, + isSerializableHeaderValue, + LazyJsonString, + NoOpLogger, + map, + convertMap, + take, + parseBoolean, + expectBoolean, + expectNumber, + expectFloat32, + expectLong, + expectInt, + expectInt32, + expectShort, + expectByte, + expectNonNull, + expectObject, + expectString, + expectUnion, + strictParseDouble, + strictParseFloat, + strictParseFloat32, + limitedParseDouble, + handleFloat, + limitedParseFloat, + limitedParseFloat32, + strictParseLong, + strictParseInt, + strictParseInt32, + strictParseShort, + strictParseByte, + logger, + quoteHeader, + resolvedPath, + serializeFloat, + serializeDateTime, + _json, + splitEvery, + splitHeader +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/split-every.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/split-every.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/split-every.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/split-header.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/split-header.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-cjs/split-header.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js new file mode 100644 index 0000000..73cd076 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js @@ -0,0 +1,7 @@ +export class NoOpLogger { + trace() { } + debug() { } + info() { } + warn() { } + error() { } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/client.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/client.js new file mode 100644 index 0000000..56cbc2e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/client.js @@ -0,0 +1,42 @@ +import { constructStack } from "@smithy/middleware-stack"; +export class Client { + constructor(config) { + this.config = config; + this.middlewareStack = constructStack(); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : undefined; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === undefined && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } + else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } + else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command) + .then((result) => callback(null, result.output), (err) => callback(err)) + .catch(() => { }); + } + else { + return handler(command).then((result) => result.output); + } + } + destroy() { + this.config?.requestHandler?.destroy?.(); + delete this.handlers; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js new file mode 100644 index 0000000..2b76f0a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js @@ -0,0 +1 @@ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/command.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/command.js new file mode 100644 index 0000000..078eecb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/command.js @@ -0,0 +1,115 @@ +import { constructStack } from "@smithy/middleware-stack"; +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export class Command { + constructor() { + this.middlewareStack = constructStack(); + } + static classBuilder() { + return new ClassBuilder(); + } + resolveMiddlewareWithContext(clientStack, configuration, options, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext, + }, + ...additionalContext, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } +} +class ClassBuilder { + constructor() { + this._init = () => { }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + init(cb) { + this._init = cb; + } + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext, + }; + return this; + } + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + ser(serializer) { + this._serializer = serializer; + return this; + } + de(deserializer) { + this._deserializer = deserializer; + return this; + } + build() { + const closure = this; + let CommandRef; + return (CommandRef = class extends Command { + static getEndpointParameterInstructions() { + return closure._ep; + } + constructor(...[input]) { + super(); + this.serialize = closure._serializer; + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext, + }); + } + }); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/constants.js new file mode 100644 index 0000000..9b193d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/constants.js @@ -0,0 +1 @@ +export const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js new file mode 100644 index 0000000..44cf4dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js @@ -0,0 +1,21 @@ +export const createAggregatedClient = (commands, Client) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = async function (args, optionsOrCb, cb) { + const command = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + }; + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client.prototype[methodName] = methodImpl; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/date-utils.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/date-utils.js new file mode 100644 index 0000000..0d0abf2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/date-utils.js @@ -0,0 +1,190 @@ +import { strictParseByte, strictParseDouble, strictParseFloat32, strictParseShort } from "./parse-utils"; +const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +export function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +export const parseRfc3339DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}; +const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/); +export const parseRfc3339DateTimeWithOffset = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}; +const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/); +export const parseRfc7231DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds, + })); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}; +export const parseEpochTimestamp = (value) => { + if (value === null || value === undefined) { + return undefined; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } + else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } + else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } + else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1000)); +}; +const buildDate = (year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds))); +}; +const parseTwoDigitYear = (value) => { + const thisYear = new Date().getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}; +const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000; +const adjustRfc850Year = (input) => { + if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds())); + } + return input; +}; +const parseMonthByShortName = (value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}; +const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +const validateDayOfMonth = (year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}; +const isLeapYear = (year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}; +const parseDateValue = (value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}; +const parseMilliseconds = (value) => { + if (value === null || value === undefined) { + return 0; + } + return strictParseFloat32("0." + value) * 1000; +}; +const parseOffsetToMilliseconds = (value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } + else if (directionStr == "-") { + direction = -1; + } + else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1000; +}; +const stripLeadingZeroes = (value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js new file mode 100644 index 0000000..7da1091 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js @@ -0,0 +1,22 @@ +import { decorateServiceException } from "./exceptions"; +export const throwDefaultError = ({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : undefined; + const response = new exceptionCtor({ + name: parsedBody?.code || parsedBody?.Code || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata, + }); + throw decorateServiceException(response, parsedBody); +}; +export const withBaseException = (ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js new file mode 100644 index 0000000..f19079c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js @@ -0,0 +1,26 @@ +export const loadConfigsForDefaultMode = (mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100, + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 30000, + }; + default: + return {}; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..7b30893 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js @@ -0,0 +1,6 @@ +let warningEmitted = false; +export const emitWarningIfUnsupportedVersion = (version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/exceptions.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/exceptions.js new file mode 100644 index 0000000..db6a801 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/exceptions.js @@ -0,0 +1,46 @@ +export class ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, Object.getPrototypeOf(this).constructor.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } + static isInstance(value) { + if (!value) + return false; + const candidate = value; + return (ServiceException.prototype.isPrototypeOf(candidate) || + (Boolean(candidate.$fault) && + Boolean(candidate.$metadata) && + (candidate.$fault === "client" || candidate.$fault === "server"))); + } + static [Symbol.hasInstance](instance) { + if (!instance) + return false; + const candidate = instance; + if (this === ServiceException) { + return ServiceException.isInstance(instance); + } + if (ServiceException.isInstance(instance)) { + if (candidate.name && this.name) { + return this.prototype.isPrototypeOf(instance) || candidate.name === this.name; + } + return this.prototype.isPrototypeOf(instance); + } + return false; + } +} +export const decorateServiceException = (exception, additions = {}) => { + Object.entries(additions) + .filter(([, v]) => v !== undefined) + .forEach(([k, v]) => { + if (exception[k] == undefined || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js new file mode 100644 index 0000000..cb4f991 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js @@ -0,0 +1 @@ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js new file mode 100644 index 0000000..f3831ee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js @@ -0,0 +1,30 @@ +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId }; +export const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in AlgorithmId) { + const algorithmId = AlgorithmId[id]; + if (runtimeConfig[algorithmId] === undefined) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId], + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + }, + }; +}; +export const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..272cd3a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js @@ -0,0 +1,9 @@ +import { getChecksumConfiguration, resolveChecksumRuntimeConfig } from "./checksum"; +import { getRetryConfiguration, resolveRetryRuntimeConfig } from "./retry"; +export const getDefaultExtensionConfiguration = (runtimeConfig) => { + return Object.assign(getChecksumConfiguration(runtimeConfig), getRetryConfiguration(runtimeConfig)); +}; +export const getDefaultClientConfiguration = getDefaultExtensionConfiguration; +export const resolveDefaultRuntimeConfig = (config) => { + return Object.assign(resolveChecksumRuntimeConfig(config), resolveRetryRuntimeConfig(config)); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/index.js new file mode 100644 index 0000000..f1b8074 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/index.js @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js new file mode 100644 index 0000000..2c18b0a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js @@ -0,0 +1,15 @@ +export const getRetryConfiguration = (runtimeConfig) => { + return { + setRetryStrategy(retryStrategy) { + runtimeConfig.retryStrategy = retryStrategy; + }, + retryStrategy() { + return runtimeConfig.retryStrategy; + }, + }; +}; +export const resolveRetryRuntimeConfig = (retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js new file mode 100644 index 0000000..25d9432 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js @@ -0,0 +1 @@ +export const getArrayIfSingleItem = (mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js new file mode 100644 index 0000000..aa0f827 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js @@ -0,0 +1,12 @@ +export const getValueFromTextNode = (obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== undefined) { + obj[key] = obj[key][textNodeName]; + } + else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/index.js new file mode 100644 index 0000000..b05ab01 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/index.js @@ -0,0 +1,25 @@ +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js new file mode 100644 index 0000000..cb117ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js @@ -0,0 +1,3 @@ +export const isSerializableHeaderValue = (value) => { + return value != null; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/lazy-json.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/lazy-json.js new file mode 100644 index 0000000..9bddfce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/lazy-json.js @@ -0,0 +1,24 @@ +export const LazyJsonString = function LazyJsonString(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + }, + }); + return str; +}; +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } + else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/object-mapping.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/object-mapping.js new file mode 100644 index 0000000..84a1f26 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/object-mapping.js @@ -0,0 +1,92 @@ +export function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } + else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } + else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +export const convertMap = (target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}; +export const take = (source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}; +const mapWithFilter = (target, filter, instructions) => { + return map(target, Object.entries(instructions).reduce((_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } + else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } + else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, {})); +}; +const applyInstruction = (target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if ((typeof filter === "function" && filter(source[sourceKey])) || (typeof filter !== "function" && !!filter)) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === undefined && (_value = value()) != null; + const customFilterPassed = (typeof filter === "function" && !!filter(void 0)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed) { + target[targetKey] = _value; + } + else if (customFilterPassed) { + target[targetKey] = value(); + } + } + else { + const defaultFilterPassed = filter === undefined && value != null; + const customFilterPassed = (typeof filter === "function" && !!filter(value)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}; +const nonNullish = (_) => _ != null; +const pass = (_) => _; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/parse-utils.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/parse-utils.js new file mode 100644 index 0000000..209db79 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/parse-utils.js @@ -0,0 +1,230 @@ +export const parseBoolean = (value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}; +export const expectBoolean = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}; +export const expectNumber = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}; +const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +export const expectFloat32 = (value) => { + const expected = expectNumber(value); + if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}; +export const expectLong = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}; +export const expectInt = expectLong; +export const expectInt32 = (value) => expectSizedInt(value, 32); +export const expectShort = (value) => expectSizedInt(value, 16); +export const expectByte = (value) => expectSizedInt(value, 8); +const expectSizedInt = (value, size) => { + const expected = expectLong(value); + if (expected !== undefined && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}; +const castInt = (value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}; +export const expectNonNull = (value, location) => { + if (value === null || value === undefined) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}; +export const expectObject = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}; +export const expectString = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}; +export const expectUnion = (value) => { + if (value === null || value === undefined) { + return undefined; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject) + .filter(([, v]) => v != null) + .map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}; +export const strictParseDouble = (value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}; +export const strictParseFloat = strictParseDouble; +export const strictParseFloat32 = (value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}; +const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +const parseNumber = (value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}; +export const limitedParseDouble = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}; +export const handleFloat = limitedParseDouble; +export const limitedParseFloat = limitedParseDouble; +export const limitedParseFloat32 = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}; +const parseFloatString = (value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}; +export const strictParseLong = (value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}; +export const strictParseInt = strictParseLong; +export const strictParseInt32 = (value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}; +export const strictParseShort = (value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}; +export const strictParseByte = (value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}; +const stackTraceWarning = (message) => { + return String(new TypeError(message).stack || message) + .split("\n") + .slice(0, 5) + .filter((s) => !s.includes("stackTraceWarning")) + .join("\n"); +}; +export const logger = { + warn: console.warn, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/quote-header.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/quote-header.js new file mode 100644 index 0000000..d0ddf67 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/quote-header.js @@ -0,0 +1,6 @@ +export function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/resolve-path.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/resolve-path.js new file mode 100644 index 0000000..6c70cb3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/resolve-path.js @@ -0,0 +1 @@ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/ser-utils.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/ser-utils.js new file mode 100644 index 0000000..207437f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/ser-utils.js @@ -0,0 +1,14 @@ +export const serializeFloat = (value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}; +export const serializeDateTime = (date) => date.toISOString().replace(".000Z", "Z"); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/serde-json.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/serde-json.js new file mode 100644 index 0000000..babb7c1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/serde-json.js @@ -0,0 +1,19 @@ +export const _json = (obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/split-every.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/split-every.js new file mode 100644 index 0000000..1d78dca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/split-every.js @@ -0,0 +1,27 @@ +export function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } + else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/split-header.js b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/split-header.js new file mode 100644 index 0000000..518e77f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-es/split-header.js @@ -0,0 +1,37 @@ +export const splitHeader = (value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = undefined; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z = v.length; + if (z < 2) { + return v; + } + if (v[0] === `"` && v[z - 1] === `"`) { + v = v.slice(1, z - 1); + } + return v.replace(/\\"/g, '"'); + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts new file mode 100644 index 0000000..93ebff4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts @@ -0,0 +1,11 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare class NoOpLogger implements Logger { + trace(): void; + debug(): void; + info(): void; + warn(): void; + error(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/client.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/client.d.ts new file mode 100644 index 0000000..6f155e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/client.d.ts @@ -0,0 +1,61 @@ +import { Client as IClient, Command, FetchHttpHandlerOptions, MetadataBearer, MiddlewareStack, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface SmithyConfiguration { + requestHandler: RequestHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; + /** + * The API version set internally by the SDK, and is + * not planned to be used by customer code. + * @internal + */ + readonly apiVersion: string; + /** + * @public + * + * Default false. + * + * When true, the client will only resolve the middleware stack once per + * Command class. This means modifying the middlewareStack of the + * command or client after requests have been made will not be + * recognized. + * + * Calling client.destroy() also clears this cache. + * + * Enable this only if needing the additional time saved (0-1ms per request) + * and not needing middleware modifications between requests. + */ + cacheMiddleware?: boolean; +} +/** + * @internal + */ +export type SmithyResolvedConfiguration = { + requestHandler: RequestHandler; + readonly apiVersion: string; + cacheMiddleware?: boolean; +}; +/** + * @public + */ +export declare class Client> implements IClient { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + /** + * Holds an object reference to the initial configuration object. + * Used to check that the config resolver stack does not create + * dangling instances of an intermediate form of the configuration object. + * + * @internal + */ + initConfig?: object; + /** + * May be used to cache the resolved handler function for a Command class. + */ + private handlers?; + constructor(config: ResolvedClientConfiguration); + send(command: Command>, options?: HandlerOptions): Promise; + send(command: Command>, cb: (err: any, data?: OutputType) => void): void; + send(command: Command>, options: HandlerOptions, cb: (err: any, data?: OutputType) => void): void; + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts new file mode 100644 index 0000000..33378b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/command.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/command.d.ts new file mode 100644 index 0000000..3625b88 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/command.d.ts @@ -0,0 +1,113 @@ +import type { EndpointParameterInstructions } from "@smithy/middleware-endpoint"; +import type { Command as ICommand, Handler, HandlerExecutionContext, HttpRequest as IHttpRequest, HttpResponse as IHttpResponse, Logger, MetadataBearer, MiddlewareStack as IMiddlewareStack, OptionalParameter, Pluggable, RequestHandler, SerdeContext } from "@smithy/types"; +/** + * @public + */ +export declare abstract class Command implements ICommand { + abstract input: Input; + readonly middlewareStack: IMiddlewareStack; + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder; + }, SI extends object = any, SO extends MetadataBearer = any>(): ClassBuilder; + abstract resolveMiddleware(stack: IMiddlewareStack, configuration: ResolvedClientConfiguration, options: any): Handler; + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack: IMiddlewareStack, configuration: { + logger: Logger; + requestHandler: RequestHandler; + }, options: any, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }: ResolveMiddlewareContextArgs): import("@smithy/types").InitializeHandler; +} +/** + * @internal + */ +type ResolveMiddlewareContextArgs = { + middlewareFn: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]; + clientName: string; + commandName: string; + smithyContext: Record; + additionalContext: HandlerExecutionContext; + inputFilterSensitiveLog: (_: any) => any; + outputFilterSensitiveLog: (_: any) => any; + CommandCtor: any; +}; +/** + * @internal + */ +declare class ClassBuilder; +}, SI extends object = any, SO extends MetadataBearer = any> { + private _init; + private _ep; + private _middlewareFn; + private _commandName; + private _clientName; + private _additionalContext; + private _smithyContext; + private _inputFilterSensitiveLog; + private _outputFilterSensitiveLog; + private _serializer; + private _deserializer; + /** + * Optional init callback. + */ + init(cb: (_: Command) => void): void; + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions: EndpointParameterInstructions): ClassBuilder; + /** + * Add any number of middleware. + */ + m(middlewareSupplier: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]): ClassBuilder; + /** + * Set the initial handler execution context Smithy field. + */ + s(service: string, operation: string, smithyContext?: Record): ClassBuilder; + /** + * Set the initial handler execution context. + */ + c(additionalContext?: HandlerExecutionContext): ClassBuilder; + /** + * Set constant string identifiers for the operation. + */ + n(clientName: string, commandName: string): ClassBuilder; + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter?: (_: any) => any, outputFilter?: (_: any) => any): ClassBuilder; + /** + * Sets the serializer. + */ + ser(serializer: (input: I, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * Sets the deserializer. + */ + de(deserializer: (output: IHttpResponse, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * @returns a Command class with the classBuilder properties. + */ + build(): { + new (input: I): CommandImpl; + new (...[input]: OptionalParameter): CommandImpl; + getEndpointParameterInstructions(): EndpointParameterInstructions; + }; +} +/** + * A concrete implementation of ICommand with no abstract members. + * @public + */ +export interface CommandImpl; +}, SI extends object = any, SO extends MetadataBearer = any> extends Command { + readonly input: I; + resolveMiddleware(stack: IMiddlewareStack, configuration: C, options: any): Handler; +} +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/constants.d.ts new file mode 100644 index 0000000..c17e1c8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/constants.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts new file mode 100644 index 0000000..00e23d8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts @@ -0,0 +1,9 @@ +import { Client } from "./client"; +/** + * @internal + * + * @param commands - command lookup container. + * @param client - client instance on which to add aggregated methods. + * @returns an aggregated client with dynamically created methods. + */ +export declare const createAggregatedClient: (commands: Record, Client: new (...args: any) => Client) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts new file mode 100644 index 0000000..99c55f4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts @@ -0,0 +1,73 @@ +/** + * @internal + * + * Builds a proper UTC HttpDate timestamp from a Date object + * since not all environments will have this as the expected + * format. + * + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString} + * - Prior to ECMAScript 2018, the format of the return value + * - varied according to the platform. The most common return + * - value was an RFC-1123 formatted date stamp, which is a + * - slightly updated version of RFC-822 date stamps. + */ +export declare function dateToUtcString(date: Date): string; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and cannot have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and can have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTimeWithOffset: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 7231 IMF-fixdate or obs-date. + * + * Input strings must conform to RFC7231 section 7.1.1.1. Fractional seconds are supported. + * + * @see {@link https://datatracker.ietf.org/doc/html/rfc7231.html#section-7.1.1.1} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc7231DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a number or a parseable string. + * + * Input strings must be an integer or floating point number. Fractional seconds are supported. + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseEpochTimestamp: (value: unknown) => Date | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts new file mode 100644 index 0000000..fd4b52d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts @@ -0,0 +1,13 @@ +/** + * Always throws an error with the given `exceptionCtor` and other arguments. + * This is only called from an error handling code path. + * + * @internal + */ +export declare const throwDefaultError: ({ output, parsedBody, exceptionCtor, errorCode }: any) => never; +/** + * @internal + * + * Creates {@link throwDefaultError} with bound ExceptionCtor. + */ +export declare const withBaseException: (ExceptionCtor: new (...args: any) => any) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts new file mode 100644 index 0000000..1ddb6f0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export declare const loadConfigsForDefaultMode: (mode: ResolvedDefaultsMode) => DefaultsModeConfigs; +/** + * Option determining how certain default configuration options are resolved in the SDK. It can be one of the value listed below: + * * `"standard"`:

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"in-region"`:

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"cross-region"`:

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"mobile"`:

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"auto"`:

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

+ * * `"legacy"`:

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

+ * + * @defaultValue "legacy" + */ +export type DefaultsMode = "standard" | "in-region" | "cross-region" | "mobile" | "auto" | "legacy"; +/** + * @internal + */ +export type ResolvedDefaultsMode = Exclude; +/** + * @internal + */ +export interface DefaultsModeConfigs { + retryMode?: string; + connectionTimeout?: number; + requestTimeout?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..8fc02ce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Emits warning if the provided Node.js version string is pending deprecation. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts new file mode 100644 index 0000000..0a362c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts @@ -0,0 +1,42 @@ +import { HttpResponse, MetadataBearer, ResponseMetadata, RetryableTrait, SmithyException } from "@smithy/types"; +/** + * The type of the exception class constructor parameter. The returned type contains the properties + * in the `ExceptionType` but not in the `BaseExceptionType`. If the `BaseExceptionType` contains + * `$metadata` and `message` properties, it's also included in the returned type. + * @internal + */ +export type ExceptionOptionType = Omit>; +/** + * @public + */ +export interface ServiceExceptionOptions extends SmithyException, MetadataBearer { + message?: string; +} +/** + * @public + * + * Base exception class for the exceptions from the server-side. + */ +export declare class ServiceException extends Error implements SmithyException, MetadataBearer { + readonly $fault: "client" | "server"; + $response?: HttpResponse; + $retryable?: RetryableTrait; + $metadata: ResponseMetadata; + constructor(options: ServiceExceptionOptions); + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value: unknown): value is ServiceException; + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance: unknown): boolean; +} +/** + * This method inject unmodeled member to a deserialized SDK exception, + * and load the error message from different possible keys('message', + * 'Message'). + * + * @internal + */ +export declare const decorateServiceException: (exception: E, additions?: Record) => E; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..ced666a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts new file mode 100644 index 0000000..8b5dd7b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts @@ -0,0 +1,24 @@ +import type { ChecksumAlgorithm, ChecksumConfiguration, ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration }; +/** + * @internal + */ +export type PartialChecksumRuntimeConfigType = Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; + crc32: ChecksumConstructor | HashConstructor; + crc32c: ChecksumConstructor | HashConstructor; + sha1: ChecksumConstructor | HashConstructor; +}>; +/** + * @internal + */ +export declare const getChecksumConfiguration: (runtimeConfig: PartialChecksumRuntimeConfigType) => { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +}; +/** + * @internal + */ +export declare const resolveChecksumRuntimeConfig: (clientConfig: ChecksumConfiguration) => PartialChecksumRuntimeConfigType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..42de409 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,38 @@ +import type { DefaultExtensionConfiguration } from "@smithy/types"; +import { PartialChecksumRuntimeConfigType } from "./checksum"; +import { PartialRetryRuntimeConfigType } from "./retry"; +/** + * @internal + */ +export type DefaultExtensionRuntimeConfigType = PartialRetryRuntimeConfigType & PartialChecksumRuntimeConfigType; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultExtensionConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @deprecated use getDefaultExtensionConfiguration + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultClientConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveDefaultRuntimeConfig: (config: DefaultExtensionConfiguration) => DefaultExtensionRuntimeConfigType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..f1b8074 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts new file mode 100644 index 0000000..6e28827 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { Provider, RetryStrategy, RetryStrategyConfiguration, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export type PartialRetryRuntimeConfigType = Partial<{ + retryStrategy: Provider; +}>; +/** + * @internal + */ +export declare const getRetryConfiguration: (runtimeConfig: PartialRetryRuntimeConfigType) => { + setRetryStrategy(retryStrategy: Provider): void; + retryStrategy(): Provider; +}; +/** + * @internal + */ +export declare const resolveRetryRuntimeConfig: (retryStrategyConfiguration: RetryStrategyConfiguration) => PartialRetryRuntimeConfigType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts new file mode 100644 index 0000000..6468b91 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The XML parser will set one K:V for a member that could + * return multiple entries but only has one. + */ +export declare const getArrayIfSingleItem: (mayBeArray: T) => T | T[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts new file mode 100644 index 0000000..7163e5a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Recursively parses object and populates value is node from + * "#text" key if it's available + */ +export declare const getValueFromTextNode: (obj: any) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/index.d.ts new file mode 100644 index 0000000..4a4ac19 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/index.d.ts @@ -0,0 +1,26 @@ +export type { DocumentType, SdkError, SmithyException } from "@smithy/types"; +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts new file mode 100644 index 0000000..a35a23a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * @returns whether the header value is serializable. + */ +export declare const isSerializableHeaderValue: (value: any) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts new file mode 100644 index 0000000..df7eb51 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts @@ -0,0 +1,46 @@ +/** + * @public + * + * A model field with this type means that you may provide a JavaScript + * object in lieu of a JSON string, and it will be serialized to JSON + * automatically before being sent in a request. + * + * For responses, you will receive a "LazyJsonString", which is a boxed String object + * with additional mixin methods. + * To get the string value, call `.toString()`, or to get the JSON object value, + * call `.deserializeJSON()` or parse it yourself. + */ +export type AutomaticJsonStringConversion = Parameters[0] | LazyJsonString; +/** + * @internal + * + */ +export interface LazyJsonString extends String { + /** + * @returns the JSON parsing of the string value. + */ + deserializeJSON(): any; + /** + * @returns the original string value rather than a JSON.stringified value. + */ + toJSON(): string; +} +/** + * @internal + * + * Extension of the native String class in the previous implementation + * has negative global performance impact on method dispatch for strings, + * and is generally discouraged. + * + * This current implementation may look strange, but is necessary to preserve the interface and + * behavior of extending the String class. + */ +export declare const LazyJsonString: { + (s: string): LazyJsonString; + new (s: string): LazyJsonString; + from(s: any): LazyJsonString; + /** + * @deprecated use #from. + */ + fromObject(s: any): LazyJsonString; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts new file mode 100644 index 0000000..97e28e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts @@ -0,0 +1,162 @@ +/** + * @internal + * + * A set of instructions for multiple keys. + * The aim is to provide a concise yet readable way to map and filter values + * onto a target object. + * + * @example + * ```javascript + * const example: ObjectMappingInstructions = { + * lazyValue1: [, () => 1], + * lazyValue2: [, () => 2], + * lazyValue3: [, () => 3], + * lazyConditionalValue1: [() => true, () => 4], + * lazyConditionalValue2: [() => true, () => 5], + * lazyConditionalValue3: [true, () => 6], + * lazyConditionalValue4: [false, () => 44], + * lazyConditionalValue5: [() => false, () => 55], + * lazyConditionalValue6: ["", () => 66], + * simpleValue1: [, 7], + * simpleValue2: [, 8], + * simpleValue3: [, 9], + * conditionalValue1: [() => true, 10], + * conditionalValue2: [() => true, 11], + * conditionalValue3: [{}, 12], + * conditionalValue4: [false, 110], + * conditionalValue5: [() => false, 121], + * conditionalValue6: ["", 132], + * }; + * + * const exampleResult: Record = { + * lazyValue1: 1, + * lazyValue2: 2, + * lazyValue3: 3, + * lazyConditionalValue1: 4, + * lazyConditionalValue2: 5, + * lazyConditionalValue3: 6, + * simpleValue1: 7, + * simpleValue2: 8, + * simpleValue3: 9, + * conditionalValue1: 10, + * conditionalValue2: 11, + * conditionalValue3: 12, + * }; + * ``` + */ +export type ObjectMappingInstructions = Record; +/** + * @internal + * + * A variant of the object mapping instruction for the `take` function. + * In this case, the source value is provided to the value function, turning it + * from a supplier into a mapper. + */ +export type SourceMappingInstructions = Record; +/** + * @internal + * + * An instruction set for assigning a value to a target object. + */ +export type ObjectMappingInstruction = LazyValueInstruction | ConditionalLazyValueInstruction | SimpleValueInstruction | ConditionalValueInstruction | UnfilteredValue; +/** + * @internal + * + * non-array + */ +export type UnfilteredValue = any; +/** + * @internal + */ +export type LazyValueInstruction = [FilterStatus, ValueSupplier]; +/** + * @internal + */ +export type ConditionalLazyValueInstruction = [FilterStatusSupplier, ValueSupplier]; +/** + * @internal + */ +export type SimpleValueInstruction = [FilterStatus, Value]; +/** + * @internal + */ +export type ConditionalValueInstruction = [ValueFilteringFunction, Value]; +/** + * @internal + */ +export type SourceMappingInstruction = [(ValueFilteringFunction | FilterStatus)?, ValueMapper?, string?]; +/** + * @internal + * + * Filter is considered passed if + * 1. It is a boolean true. + * 2. It is not undefined and is itself truthy. + * 3. It is undefined and the corresponding _value_ is neither null nor undefined. + */ +export type FilterStatus = boolean | unknown | void; +/** + * @internal + * + * Supplies the filter check but not against any value as input. + */ +export type FilterStatusSupplier = () => boolean; +/** + * @internal + * + * Filter check with the given value. + */ +export type ValueFilteringFunction = (value: any) => boolean; +/** + * @internal + * + * Supplies the value for lazy evaluation. + */ +export type ValueSupplier = () => any; +/** + * @internal + * + * A function that maps the source value to the target value. + * Defaults to pass-through with nullish check. + */ +export type ValueMapper = (value: any) => any; +/** + * @internal + * + * A non-function value. + */ +export type Value = any; +/** + * @internal + * Internal/Private, for codegen use only. + * + * Transfer a set of keys from [instructions] to [target]. + * + * For each instruction in the record, the target key will be the instruction key. + * The target assignment will be conditional on the instruction's filter. + * The target assigned value will be supplied by the instructions as an evaluable function or non-function value. + * + * @see ObjectMappingInstructions for an example. + */ +export declare function map(target: any, filter: (value: any) => boolean, instructions: Record): typeof target; +/** + * @internal + */ +export declare function map(instructions: ObjectMappingInstructions): any; +/** + * @internal + */ +export declare function map(target: any, instructions: ObjectMappingInstructions): typeof target; +/** + * Convert a regular object `{ k: v }` to `{ k: [, v] }` mapping instruction set with default + * filter. + * + * @internal + */ +export declare const convertMap: (target: any) => Record; +/** + * @param source - original object with data. + * @param instructions - how to map the data. + * @returns new object mapped from the source object. + * @internal + */ +export declare const take: (source: any, instructions: SourceMappingInstructions) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts new file mode 100644 index 0000000..b5ded6f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts @@ -0,0 +1,270 @@ +/** + * @internal + * + * Give an input string, strictly parses a boolean value. + * + * @param value - The boolean string to parse. + * @returns true for "true", false for "false", otherwise an error is thrown. + */ +export declare const parseBoolean: (value: string) => boolean; +/** + * @internal + * + * Asserts a value is a boolean and returns it. + * Casts strings and numbers with a warning if there is evidence that they were + * intended to be booleans. + * + * @param value - A value that is expected to be a boolean. + * @returns The value if it's a boolean, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectBoolean: (value: any) => boolean | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. + * Casts strings with a warning if the string is a parseable number. + * This is to unblock slight API definition/implementation inconsistencies. + * + * @param value - A value that is expected to be a number. + * @returns The value if it's a number, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectNumber: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. + * + * @param value - A value that is expected to be a 32-bit float. + * @returns The value if it's a float, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectFloat32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectLong: (value: any) => number | undefined; +/** + * @internal + * + * @deprecated Use expectLong + */ +export declare const expectInt: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectInt32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 16-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectShort: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an 8-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectByte: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is not null or undefined and returns it, or throws an error. + * + * @param value - A value that is expected to be defined + * @param location - The location where we're expecting to find a defined object (optional) + * @returns The value if it's not undefined, otherwise throws an error + */ +export declare const expectNonNull: (value: T | null | undefined, location?: string) => T; +/** + * @internal + * + * Asserts a value is an JSON-like object and returns it. This is expected to be used + * with values parsed from JSON (arrays, objects, numbers, strings, booleans). + * + * @param value - A value that is expected to be an object + * @returns The value if it's an object, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectObject: (value: any) => Record | undefined; +/** + * @internal + * + * Asserts a value is a string and returns it. + * Numbers and boolean will be cast to strings with a warning. + * + * @param value - A value that is expected to be a string. + * @returns The value if it's a string, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectString: (value: any) => string | undefined; +/** + * @internal + * + * Asserts a value is a JSON-like object with only one non-null/non-undefined key and + * returns it. + * + * @param value - A value that is expected to be an object with exactly one non-null, + * non-undefined key. + * @returns the value if it's a union, undefined if it's null/undefined, otherwise + * an error is thrown. + */ +export declare const expectUnion: (value: unknown) => Record | undefined; +/** + * @internal + * + * Parses a value into a double. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a double. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseDouble + */ +export declare const strictParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a float. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const handleFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const limitedParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseLong: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseLong + */ +export declare const strictParseInt: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 32-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 32-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseInt32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 16-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 16-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseShort: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an 8-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an 8-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseByte: (value: string | number) => number | undefined; +/** + * @internal + */ +export declare const logger: { + warn: { + (...data: any[]): void; + (message?: any, ...optionalParams: any[]): void; + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts new file mode 100644 index 0000000..73d6c16 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts @@ -0,0 +1,6 @@ +/** + * @public + * @param part - header list element + * @returns quoted string if part contains delimiter. + */ +export declare function quoteHeader(part: string): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts new file mode 100644 index 0000000..2a3204f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts new file mode 100644 index 0000000..ae03c61 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + * + * Serializes a number, turning non-numeric values into strings. + * + * @param value - The number to serialize. + * @returns A number, or a string if the given number was non-numeric. + */ +export declare const serializeFloat: (value: number) => string | number; +/** + * @internal + * @param date - to be serialized. + * @returns https://smithy.io/2.0/spec/protocol-traits.html#timestampformat-trait date-time format. + */ +export declare const serializeDateTime: (date: Date) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts new file mode 100644 index 0000000..96ac476 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + * + * Maps an object through the default JSON serde behavior. + * This means removing nullish fields and un-sparsifying lists. + * + * This is also used by Smithy RPCv2 CBOR as the default serde behavior. + * + * @param obj - to be checked. + * @returns same object with default serde behavior applied. + */ +export declare const _json: (obj: any) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts new file mode 100644 index 0000000..45a0229 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + * + * Given an input string, splits based on the delimiter after a given + * number of delimiters has been encountered. + * + * @param value - The input string to split. + * @param delimiter - The delimiter to split on. + * @param numDelimiters - The number of delimiters to have encountered to split. + */ +export declare function splitEvery(value: string, delimiter: string, numDelimiters: number): Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts new file mode 100644 index 0000000..0f51651 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts @@ -0,0 +1,5 @@ +/** + * @param value - header string value. + * @returns value split by commas that aren't in quotes. + */ +export declare const splitHeader: (value: string) => string[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts new file mode 100644 index 0000000..a9a1062 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts @@ -0,0 +1,11 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare class NoOpLogger implements Logger { + trace(): void; + debug(): void; + info(): void; + warn(): void; + error(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..578541e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts @@ -0,0 +1,61 @@ +import { Client as IClient, Command, FetchHttpHandlerOptions, MetadataBearer, MiddlewareStack, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface SmithyConfiguration { + requestHandler: RequestHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; + /** + * The API version set internally by the SDK, and is + * not planned to be used by customer code. + * @internal + */ + readonly apiVersion: string; + /** + * @public + * + * Default false. + * + * When true, the client will only resolve the middleware stack once per + * Command class. This means modifying the middlewareStack of the + * command or client after requests have been made will not be + * recognized. + * + * Calling client.destroy() also clears this cache. + * + * Enable this only if needing the additional time saved (0-1ms per request) + * and not needing middleware modifications between requests. + */ + cacheMiddleware?: boolean; +} +/** + * @internal + */ +export type SmithyResolvedConfiguration = { + requestHandler: RequestHandler; + readonly apiVersion: string; + cacheMiddleware?: boolean; +}; +/** + * @public + */ +export declare class Client> implements IClient { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + /** + * Holds an object reference to the initial configuration object. + * Used to check that the config resolver stack does not create + * dangling instances of an intermediate form of the configuration object. + * + * @internal + */ + initConfig?: object; + /** + * May be used to cache the resolved handler function for a Command class. + */ + private handlers?; + constructor(config: ResolvedClientConfiguration); + send(command: Command>, options?: HandlerOptions): Promise; + send(command: Command>, cb: (err: any, data?: OutputType) => void): void; + send(command: Command>, options: HandlerOptions, cb: (err: any, data?: OutputType) => void): void; + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts new file mode 100644 index 0000000..c53a1e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..8b42ff6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts @@ -0,0 +1,113 @@ +import { EndpointParameterInstructions } from "@smithy/middleware-endpoint"; +import { Command as ICommand, Handler, HandlerExecutionContext, HttpRequest as IHttpRequest, HttpResponse as IHttpResponse, Logger, MetadataBearer, MiddlewareStack as IMiddlewareStack, OptionalParameter, Pluggable, RequestHandler, SerdeContext } from "@smithy/types"; +/** + * @public + */ +export declare abstract class Command implements ICommand { + abstract input: Input; + readonly middlewareStack: IMiddlewareStack; + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder; + }, SI extends object = any, SO extends MetadataBearer = any>(): ClassBuilder; + abstract resolveMiddleware(stack: IMiddlewareStack, configuration: ResolvedClientConfiguration, options: any): Handler; + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack: IMiddlewareStack, configuration: { + logger: Logger; + requestHandler: RequestHandler; + }, options: any, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }: ResolveMiddlewareContextArgs): import("@smithy/types").InitializeHandler; +} +/** + * @internal + */ +type ResolveMiddlewareContextArgs = { + middlewareFn: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]; + clientName: string; + commandName: string; + smithyContext: Record; + additionalContext: HandlerExecutionContext; + inputFilterSensitiveLog: (_: any) => any; + outputFilterSensitiveLog: (_: any) => any; + CommandCtor: any; +}; +/** + * @internal + */ +declare class ClassBuilder; +}, SI extends object = any, SO extends MetadataBearer = any> { + private _init; + private _ep; + private _middlewareFn; + private _commandName; + private _clientName; + private _additionalContext; + private _smithyContext; + private _inputFilterSensitiveLog; + private _outputFilterSensitiveLog; + private _serializer; + private _deserializer; + /** + * Optional init callback. + */ + init(cb: (_: Command) => void): void; + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions: EndpointParameterInstructions): ClassBuilder; + /** + * Add any number of middleware. + */ + m(middlewareSupplier: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]): ClassBuilder; + /** + * Set the initial handler execution context Smithy field. + */ + s(service: string, operation: string, smithyContext?: Record): ClassBuilder; + /** + * Set the initial handler execution context. + */ + c(additionalContext?: HandlerExecutionContext): ClassBuilder; + /** + * Set constant string identifiers for the operation. + */ + n(clientName: string, commandName: string): ClassBuilder; + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter?: (_: any) => any, outputFilter?: (_: any) => any): ClassBuilder; + /** + * Sets the serializer. + */ + ser(serializer: (input: I, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * Sets the deserializer. + */ + de(deserializer: (output: IHttpResponse, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * @returns a Command class with the classBuilder properties. + */ + build(): { + new (input: I): CommandImpl; + new (...[input]: OptionalParameter): CommandImpl; + getEndpointParameterInstructions(): EndpointParameterInstructions; + }; +} +/** + * A concrete implementation of ICommand with no abstract members. + * @public + */ +export interface CommandImpl; +}, SI extends object = any, SO extends MetadataBearer = any> extends Command { + readonly input: I; + resolveMiddleware(stack: IMiddlewareStack, configuration: C, options: any): Handler; +} +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..eab978f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts new file mode 100644 index 0000000..ded1999 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts @@ -0,0 +1,9 @@ +import { Client } from "./client"; +/** + * @internal + * + * @param commands - command lookup container. + * @param client - client instance on which to add aggregated methods. + * @returns an aggregated client with dynamically created methods. + */ +export declare const createAggregatedClient: (commands: Record, Client: new (...args: any) => Client) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts new file mode 100644 index 0000000..41071c2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts @@ -0,0 +1,73 @@ +/** + * @internal + * + * Builds a proper UTC HttpDate timestamp from a Date object + * since not all environments will have this as the expected + * format. + * + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString} + * - Prior to ECMAScript 2018, the format of the return value + * - varied according to the platform. The most common return + * - value was an RFC-1123 formatted date stamp, which is a + * - slightly updated version of RFC-822 date stamps. + */ +export declare function dateToUtcString(date: Date): string; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and cannot have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and can have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTimeWithOffset: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 7231 IMF-fixdate or obs-date. + * + * Input strings must conform to RFC7231 section 7.1.1.1. Fractional seconds are supported. + * + * @see {@link https://datatracker.ietf.org/doc/html/rfc7231.html#section-7.1.1.1} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc7231DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a number or a parseable string. + * + * Input strings must be an integer or floating point number. Fractional seconds are supported. + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseEpochTimestamp: (value: unknown) => Date | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts new file mode 100644 index 0000000..e9852ba --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts @@ -0,0 +1,13 @@ +/** + * Always throws an error with the given `exceptionCtor` and other arguments. + * This is only called from an error handling code path. + * + * @internal + */ +export declare const throwDefaultError: ({ output, parsedBody, exceptionCtor, errorCode }: any) => never; +/** + * @internal + * + * Creates {@link throwDefaultError} with bound ExceptionCtor. + */ +export declare const withBaseException: (ExceptionCtor: new (...args: any) => any) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts new file mode 100644 index 0000000..c8a89ed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export declare const loadConfigsForDefaultMode: (mode: ResolvedDefaultsMode) => DefaultsModeConfigs; +/** + * Option determining how certain default configuration options are resolved in the SDK. It can be one of the value listed below: + * * `"standard"`:

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"in-region"`:

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"cross-region"`:

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"mobile"`:

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"auto"`:

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

+ * * `"legacy"`:

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

+ * + * @defaultValue "legacy" + */ +export type DefaultsMode = "standard" | "in-region" | "cross-region" | "mobile" | "auto" | "legacy"; +/** + * @internal + */ +export type ResolvedDefaultsMode = Exclude; +/** + * @internal + */ +export interface DefaultsModeConfigs { + retryMode?: string; + connectionTimeout?: number; + requestTimeout?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..f0284ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Emits warning if the provided Node.js version string is pending deprecation. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts new file mode 100644 index 0000000..675354a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts @@ -0,0 +1,42 @@ +import { HttpResponse, MetadataBearer, ResponseMetadata, RetryableTrait, SmithyException } from "@smithy/types"; +/** + * The type of the exception class constructor parameter. The returned type contains the properties + * in the `ExceptionType` but not in the `BaseExceptionType`. If the `BaseExceptionType` contains + * `$metadata` and `message` properties, it's also included in the returned type. + * @internal + */ +export type ExceptionOptionType = Pick>>; +/** + * @public + */ +export interface ServiceExceptionOptions extends SmithyException, MetadataBearer { + message?: string; +} +/** + * @public + * + * Base exception class for the exceptions from the server-side. + */ +export declare class ServiceException extends Error implements SmithyException, MetadataBearer { + readonly $fault: "client" | "server"; + $response?: HttpResponse; + $retryable?: RetryableTrait; + $metadata: ResponseMetadata; + constructor(options: ServiceExceptionOptions); + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value: unknown): value is ServiceException; + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance: unknown): boolean; +} +/** + * This method inject unmodeled member to a deserialized SDK exception, + * and load the error message from different possible keys('message', + * 'Message'). + * + * @internal + */ +export declare const decorateServiceException: (exception: E, additions?: Record) => E; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..4e510cf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts new file mode 100644 index 0000000..c5f06b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts @@ -0,0 +1,24 @@ +import { ChecksumAlgorithm, ChecksumConfiguration, ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration }; +/** + * @internal + */ +export type PartialChecksumRuntimeConfigType = Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; + crc32: ChecksumConstructor | HashConstructor; + crc32c: ChecksumConstructor | HashConstructor; + sha1: ChecksumConstructor | HashConstructor; +}>; +/** + * @internal + */ +export declare const getChecksumConfiguration: (runtimeConfig: PartialChecksumRuntimeConfigType) => { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +}; +/** + * @internal + */ +export declare const resolveChecksumRuntimeConfig: (clientConfig: ChecksumConfiguration) => PartialChecksumRuntimeConfigType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..d8c05bb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,38 @@ +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { PartialChecksumRuntimeConfigType } from "./checksum"; +import { PartialRetryRuntimeConfigType } from "./retry"; +/** + * @internal + */ +export type DefaultExtensionRuntimeConfigType = PartialRetryRuntimeConfigType & PartialChecksumRuntimeConfigType; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultExtensionConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @deprecated use getDefaultExtensionConfiguration + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultClientConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveDefaultRuntimeConfig: (config: DefaultExtensionConfiguration) => DefaultExtensionRuntimeConfigType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..04e3c83 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts new file mode 100644 index 0000000..b41fa3c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { Provider, RetryStrategy, RetryStrategyConfiguration, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export type PartialRetryRuntimeConfigType = Partial<{ + retryStrategy: Provider; +}>; +/** + * @internal + */ +export declare const getRetryConfiguration: (runtimeConfig: PartialRetryRuntimeConfigType) => { + setRetryStrategy(retryStrategy: Provider): void; + retryStrategy(): Provider; +}; +/** + * @internal + */ +export declare const resolveRetryRuntimeConfig: (retryStrategyConfiguration: RetryStrategyConfiguration) => PartialRetryRuntimeConfigType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts new file mode 100644 index 0000000..dbbd280 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The XML parser will set one K:V for a member that could + * return multiple entries but only has one. + */ +export declare const getArrayIfSingleItem: (mayBeArray: T) => T | T[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts new file mode 100644 index 0000000..d56771e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Recursively parses object and populates value is node from + * "#text" key if it's available + */ +export declare const getValueFromTextNode: (obj: any) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..684c977 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts @@ -0,0 +1,26 @@ +export { DocumentType, SdkError, SmithyException } from "@smithy/types"; +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts new file mode 100644 index 0000000..4d53109 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * @returns whether the header value is serializable. + */ +export declare const isSerializableHeaderValue: (value: any) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts new file mode 100644 index 0000000..3a41bf3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts @@ -0,0 +1,46 @@ +/** + * @public + * + * A model field with this type means that you may provide a JavaScript + * object in lieu of a JSON string, and it will be serialized to JSON + * automatically before being sent in a request. + * + * For responses, you will receive a "LazyJsonString", which is a boxed String object + * with additional mixin methods. + * To get the string value, call `.toString()`, or to get the JSON object value, + * call `.deserializeJSON()` or parse it yourself. + */ +export type AutomaticJsonStringConversion = Parameters[0] | LazyJsonString; +/** + * @internal + * + */ +export interface LazyJsonString extends String { + /** + * @returns the JSON parsing of the string value. + */ + deserializeJSON(): any; + /** + * @returns the original string value rather than a JSON.stringified value. + */ + toJSON(): string; +} +/** + * @internal + * + * Extension of the native String class in the previous implementation + * has negative global performance impact on method dispatch for strings, + * and is generally discouraged. + * + * This current implementation may look strange, but is necessary to preserve the interface and + * behavior of extending the String class. + */ +export declare const LazyJsonString: { + (s: string): LazyJsonString; + new (s: string): LazyJsonString; + from(s: any): LazyJsonString; + /** + * @deprecated use #from. + */ + fromObject(s: any): LazyJsonString; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts new file mode 100644 index 0000000..d658c16 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts @@ -0,0 +1,178 @@ +/** + * @internal + * + * A set of instructions for multiple keys. + * The aim is to provide a concise yet readable way to map and filter values + * onto a target object. + * + * @example + * ```javascript + * const example: ObjectMappingInstructions = { + * lazyValue1: [, () => 1], + * lazyValue2: [, () => 2], + * lazyValue3: [, () => 3], + * lazyConditionalValue1: [() => true, () => 4], + * lazyConditionalValue2: [() => true, () => 5], + * lazyConditionalValue3: [true, () => 6], + * lazyConditionalValue4: [false, () => 44], + * lazyConditionalValue5: [() => false, () => 55], + * lazyConditionalValue6: ["", () => 66], + * simpleValue1: [, 7], + * simpleValue2: [, 8], + * simpleValue3: [, 9], + * conditionalValue1: [() => true, 10], + * conditionalValue2: [() => true, 11], + * conditionalValue3: [{}, 12], + * conditionalValue4: [false, 110], + * conditionalValue5: [() => false, 121], + * conditionalValue6: ["", 132], + * }; + * + * const exampleResult: Record = { + * lazyValue1: 1, + * lazyValue2: 2, + * lazyValue3: 3, + * lazyConditionalValue1: 4, + * lazyConditionalValue2: 5, + * lazyConditionalValue3: 6, + * simpleValue1: 7, + * simpleValue2: 8, + * simpleValue3: 9, + * conditionalValue1: 10, + * conditionalValue2: 11, + * conditionalValue3: 12, + * }; + * ``` + */ +export type ObjectMappingInstructions = Record; +/** + * @internal + * + * A variant of the object mapping instruction for the `take` function. + * In this case, the source value is provided to the value function, turning it + * from a supplier into a mapper. + */ +export type SourceMappingInstructions = Record; +/** + * @internal + * + * An instruction set for assigning a value to a target object. + */ +export type ObjectMappingInstruction = LazyValueInstruction | ConditionalLazyValueInstruction | SimpleValueInstruction | ConditionalValueInstruction | UnfilteredValue; +/** + * @internal + * + * non-array + */ +export type UnfilteredValue = any; +/** + * @internal + */ +export type LazyValueInstruction = [ + FilterStatus, + ValueSupplier +]; +/** + * @internal + */ +export type ConditionalLazyValueInstruction = [ + FilterStatusSupplier, + ValueSupplier +]; +/** + * @internal + */ +export type SimpleValueInstruction = [ + FilterStatus, + Value +]; +/** + * @internal + */ +export type ConditionalValueInstruction = [ + ValueFilteringFunction, + Value +]; +/** + * @internal + */ +export type SourceMappingInstruction = [ + (ValueFilteringFunction | FilterStatus)?, + ValueMapper?, + string? +]; +/** + * @internal + * + * Filter is considered passed if + * 1. It is a boolean true. + * 2. It is not undefined and is itself truthy. + * 3. It is undefined and the corresponding _value_ is neither null nor undefined. + */ +export type FilterStatus = boolean | unknown | void; +/** + * @internal + * + * Supplies the filter check but not against any value as input. + */ +export type FilterStatusSupplier = () => boolean; +/** + * @internal + * + * Filter check with the given value. + */ +export type ValueFilteringFunction = (value: any) => boolean; +/** + * @internal + * + * Supplies the value for lazy evaluation. + */ +export type ValueSupplier = () => any; +/** + * @internal + * + * A function that maps the source value to the target value. + * Defaults to pass-through with nullish check. + */ +export type ValueMapper = (value: any) => any; +/** + * @internal + * + * A non-function value. + */ +export type Value = any; +/** + * @internal + * Internal/Private, for codegen use only. + * + * Transfer a set of keys from [instructions] to [target]. + * + * For each instruction in the record, the target key will be the instruction key. + * The target assignment will be conditional on the instruction's filter. + * The target assigned value will be supplied by the instructions as an evaluable function or non-function value. + * + * @see ObjectMappingInstructions for an example. + */ +export declare function map(target: any, filter: (value: any) => boolean, instructions: Record): typeof target; +/** + * @internal + */ +export declare function map(instructions: ObjectMappingInstructions): any; +/** + * @internal + */ +export declare function map(target: any, instructions: ObjectMappingInstructions): typeof target; +/** + * Convert a regular object `{ k: v }` to `{ k: [, v] }` mapping instruction set with default + * filter. + * + * @internal + */ +export declare const convertMap: (target: any) => Record; +/** + * @param source - original object with data. + * @param instructions - how to map the data. + * @returns new object mapped from the source object. + * @internal + */ +export declare const take: (source: any, instructions: SourceMappingInstructions) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts new file mode 100644 index 0000000..e4c8aef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts @@ -0,0 +1,270 @@ +/** + * @internal + * + * Give an input string, strictly parses a boolean value. + * + * @param value - The boolean string to parse. + * @returns true for "true", false for "false", otherwise an error is thrown. + */ +export declare const parseBoolean: (value: string) => boolean; +/** + * @internal + * + * Asserts a value is a boolean and returns it. + * Casts strings and numbers with a warning if there is evidence that they were + * intended to be booleans. + * + * @param value - A value that is expected to be a boolean. + * @returns The value if it's a boolean, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectBoolean: (value: any) => boolean | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. + * Casts strings with a warning if the string is a parseable number. + * This is to unblock slight API definition/implementation inconsistencies. + * + * @param value - A value that is expected to be a number. + * @returns The value if it's a number, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectNumber: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. + * + * @param value - A value that is expected to be a 32-bit float. + * @returns The value if it's a float, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectFloat32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectLong: (value: any) => number | undefined; +/** + * @internal + * + * @deprecated Use expectLong + */ +export declare const expectInt: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectInt32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 16-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectShort: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an 8-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectByte: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is not null or undefined and returns it, or throws an error. + * + * @param value - A value that is expected to be defined + * @param location - The location where we're expecting to find a defined object (optional) + * @returns The value if it's not undefined, otherwise throws an error + */ +export declare const expectNonNull: (value: T | null | undefined, location?: string) => T; +/** + * @internal + * + * Asserts a value is an JSON-like object and returns it. This is expected to be used + * with values parsed from JSON (arrays, objects, numbers, strings, booleans). + * + * @param value - A value that is expected to be an object + * @returns The value if it's an object, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectObject: (value: any) => Record | undefined; +/** + * @internal + * + * Asserts a value is a string and returns it. + * Numbers and boolean will be cast to strings with a warning. + * + * @param value - A value that is expected to be a string. + * @returns The value if it's a string, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectString: (value: any) => string | undefined; +/** + * @internal + * + * Asserts a value is a JSON-like object with only one non-null/non-undefined key and + * returns it. + * + * @param value - A value that is expected to be an object with exactly one non-null, + * non-undefined key. + * @returns the value if it's a union, undefined if it's null/undefined, otherwise + * an error is thrown. + */ +export declare const expectUnion: (value: unknown) => Record | undefined; +/** + * @internal + * + * Parses a value into a double. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a double. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseDouble + */ +export declare const strictParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a float. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const handleFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const limitedParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseLong: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseLong + */ +export declare const strictParseInt: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 32-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 32-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseInt32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 16-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 16-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseShort: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an 8-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an 8-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseByte: (value: string | number) => number | undefined; +/** + * @internal + */ +export declare const logger: { + warn: { + (...data: any[]): void; + (message?: any, ...optionalParams: any[]): void; + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts new file mode 100644 index 0000000..c2f12e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts @@ -0,0 +1,6 @@ +/** + * @public + * @param part - header list element + * @returns quoted string if part contains delimiter. + */ +export declare function quoteHeader(part: string): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts new file mode 100644 index 0000000..5432be7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts new file mode 100644 index 0000000..355f829 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + * + * Serializes a number, turning non-numeric values into strings. + * + * @param value - The number to serialize. + * @returns A number, or a string if the given number was non-numeric. + */ +export declare const serializeFloat: (value: number) => string | number; +/** + * @internal + * @param date - to be serialized. + * @returns https://smithy.io/2.0/spec/protocol-traits.html#timestampformat-trait date-time format. + */ +export declare const serializeDateTime: (date: Date) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts new file mode 100644 index 0000000..499409f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + * + * Maps an object through the default JSON serde behavior. + * This means removing nullish fields and un-sparsifying lists. + * + * This is also used by Smithy RPCv2 CBOR as the default serde behavior. + * + * @param obj - to be checked. + * @returns same object with default serde behavior applied. + */ +export declare const _json: (obj: any) => any; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts new file mode 100644 index 0000000..2280f3e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + * + * Given an input string, splits based on the delimiter after a given + * number of delimiters has been encountered. + * + * @param value - The input string to split. + * @param delimiter - The delimiter to split on. + * @param numDelimiters - The number of delimiters to have encountered to split. + */ +export declare function splitEvery(value: string, delimiter: string, numDelimiters: number): Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts new file mode 100644 index 0000000..7cf54c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts @@ -0,0 +1,5 @@ +/** + * @param value - header string value. + * @returns value split by commas that aren't in quotes. + */ +export declare const splitHeader: (value: string) => string[]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/package.json new file mode 100644 index 0000000..cbab653 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/smithy-client/package.json @@ -0,0 +1,67 @@ +{ + "name": "@smithy/smithy-client", + "version": "4.2.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline smithy-client", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/smithy-client", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/smithy-client" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/types/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/types/README.md new file mode 100644 index 0000000..7ab3ccd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/README.md @@ -0,0 +1,115 @@ +# @smithy/types + +[![NPM version](https://img.shields.io/npm/v/@smithy/types/latest.svg)](https://www.npmjs.com/package/@smithy/types) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/types.svg)](https://www.npmjs.com/package/@smithy/types) + +## Usage + +This package is mostly used internally by generated clients. +Some public components have independent applications. + +--- + +### Scenario: Removing `| undefined` from input and output structures + +Generated shapes' members are unioned with `undefined` for +input shapes, and are `?` (optional) for output shapes. + +- for inputs, this defers the validation to the service. +- for outputs, this strongly suggests that you should runtime-check the output data. + +If you would like to skip these steps, use the `AssertiveClient` or +`UncheckedClient` type helpers. + +Using AWS S3 as an example: + +```ts +import { S3 } from "@aws-sdk/client-s3"; +import type { AssertiveClient, UncheckedClient } from "@smithy/types"; + +const s3a = new S3({}) as AssertiveClient; +const s3b = new S3({}) as UncheckedClient; + +// AssertiveClient enforces required inputs are not undefined +// and required outputs are not undefined. +const get = await s3a.getObject({ + Bucket: "", + // @ts-expect-error (undefined not assignable to string) + Key: undefined, +}); + +// UncheckedClient makes output fields non-nullable. +// You should still perform type checks as you deem +// necessary, but the SDK will no longer prompt you +// with nullability errors. +const body = await ( + await s3b.getObject({ + Bucket: "", + Key: "", + }) +).Body.transformToString(); +``` + +When using the transform on non-aggregated client with the `Command` syntax, +the input cannot be validated because it goes through another class. + +```ts +import { S3Client, ListBucketsCommand, GetObjectCommand, GetObjectCommandInput } from "@aws-sdk/client-s3"; +import type { AssertiveClient, UncheckedClient, NoUndefined } from "@smithy/types"; + +const s3 = new S3Client({}) as UncheckedClient; + +const list = await s3.send( + new ListBucketsCommand({ + // command inputs are not validated by the type transform. + // because this is a separate class. + }) +); + +/** + * Although less ergonomic, you can use the NoUndefined + * transform on the input type. + */ +const getObjectInput: NoUndefined = { + Bucket: "undefined", + // @ts-expect-error (undefined not assignable to string) + Key: undefined, + // optional params can still be undefined. + SSECustomerAlgorithm: undefined, +}; + +const get = s3.send(new GetObjectCommand(getObjectInput)); + +// outputs are still transformed. +await get.Body.TransformToString(); +``` + +### Scenario: Narrowing a smithy-typescript generated client's output payload blob types + +This is mostly relevant to operations with streaming bodies such as within +the S3Client in the AWS SDK for JavaScript v3. + +Because blob payload types are platform dependent, you may wish to indicate in your application that a client is running in a specific +environment. This narrows the blob payload types. + +```typescript +import { GetObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import type { NodeJsClient, SdkStream, StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import type { IncomingMessage } from "node:http"; + +// default client init. +const s3Default = new S3Client({}); + +// client init with type narrowing. +const s3NarrowType = new S3Client({}) as NodeJsClient; + +// The default type of blob payloads is a wide union type including multiple possible +// request handlers. +const body1: StreamingBlobPayloadOutputTypes = (await s3Default.send(new GetObjectCommand({ Key: "", Bucket: "" }))) + .Body!; + +// This is of the narrower type SdkStream representing +// blob payload responses using specifically the node:http request handler. +const body2: SdkStream = (await s3NarrowType.send(new GetObjectCommand({ Key: "", Bucket: "" }))) + .Body!; +``` diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/abort-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/abort-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/abort-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/abort.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/abort.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/abort.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/auth.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/auth.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/auth.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/auth/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/checksum.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/checksum.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/checksum.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/client.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/command.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/command.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/command.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/config.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/config.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/config.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/manager.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/manager.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/manager.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/pool.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/pool.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/connection/pool.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/crypto.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/crypto.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/crypto.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/encode.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/encode.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/encode.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoint.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoint.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoint.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/shared.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/shared.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/endpoints/shared.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/eventStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/eventStream.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/eventStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/checksum.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/checksum.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/checksum.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/retry.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/extensions/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/feature-ids.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/feature-ids.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/feature-ids.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/http.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/http.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/http.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/identity.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/identity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/identity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/index.js new file mode 100644 index 0000000..0849f2b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/index.js @@ -0,0 +1,144 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AlgorithmId: () => AlgorithmId, + EndpointURLScheme: () => EndpointURLScheme, + FieldPosition: () => FieldPosition, + HttpApiKeyAuthLocation: () => HttpApiKeyAuthLocation, + HttpAuthLocation: () => HttpAuthLocation, + IniSectionType: () => IniSectionType, + RequestHandlerProtocol: () => RequestHandlerProtocol, + SMITHY_CONTEXT_KEY: () => SMITHY_CONTEXT_KEY, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/auth/auth.ts +var HttpAuthLocation = /* @__PURE__ */ ((HttpAuthLocation2) => { + HttpAuthLocation2["HEADER"] = "header"; + HttpAuthLocation2["QUERY"] = "query"; + return HttpAuthLocation2; +})(HttpAuthLocation || {}); + +// src/auth/HttpApiKeyAuth.ts +var HttpApiKeyAuthLocation = /* @__PURE__ */ ((HttpApiKeyAuthLocation2) => { + HttpApiKeyAuthLocation2["HEADER"] = "header"; + HttpApiKeyAuthLocation2["QUERY"] = "query"; + return HttpApiKeyAuthLocation2; +})(HttpApiKeyAuthLocation || {}); + +// src/endpoint.ts +var EndpointURLScheme = /* @__PURE__ */ ((EndpointURLScheme2) => { + EndpointURLScheme2["HTTP"] = "http"; + EndpointURLScheme2["HTTPS"] = "https"; + return EndpointURLScheme2; +})(EndpointURLScheme || {}); + +// src/extensions/checksum.ts +var AlgorithmId = /* @__PURE__ */ ((AlgorithmId2) => { + AlgorithmId2["MD5"] = "md5"; + AlgorithmId2["CRC32"] = "crc32"; + AlgorithmId2["CRC32C"] = "crc32c"; + AlgorithmId2["SHA1"] = "sha1"; + AlgorithmId2["SHA256"] = "sha256"; + return AlgorithmId2; +})(AlgorithmId || {}); +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== void 0) { + checksumAlgorithms.push({ + algorithmId: () => "sha256" /* SHA256 */, + checksumConstructor: () => runtimeConfig.sha256 + }); + } + if (runtimeConfig.md5 != void 0) { + checksumAlgorithms.push({ + algorithmId: () => "md5" /* MD5 */, + checksumConstructor: () => runtimeConfig.md5 + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/defaultClientConfiguration.ts +var getDefaultClientConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return getChecksumConfiguration(runtimeConfig); +}, "getDefaultClientConfiguration"); +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return resolveChecksumRuntimeConfig(config); +}, "resolveDefaultRuntimeConfig"); + +// src/http.ts +var FieldPosition = /* @__PURE__ */ ((FieldPosition2) => { + FieldPosition2[FieldPosition2["HEADER"] = 0] = "HEADER"; + FieldPosition2[FieldPosition2["TRAILER"] = 1] = "TRAILER"; + return FieldPosition2; +})(FieldPosition || {}); + +// src/middleware.ts +var SMITHY_CONTEXT_KEY = "__smithy_context"; + +// src/profile.ts +var IniSectionType = /* @__PURE__ */ ((IniSectionType2) => { + IniSectionType2["PROFILE"] = "profile"; + IniSectionType2["SSO_SESSION"] = "sso-session"; + IniSectionType2["SERVICES"] = "services"; + return IniSectionType2; +})(IniSectionType || {}); + +// src/transfer.ts +var RequestHandlerProtocol = /* @__PURE__ */ ((RequestHandlerProtocol2) => { + RequestHandlerProtocol2["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol2["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol2["TDS_8_0"] = "tds/8.0"; + return RequestHandlerProtocol2; +})(RequestHandlerProtocol || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + HttpAuthLocation, + HttpApiKeyAuthLocation, + EndpointURLScheme, + AlgorithmId, + getDefaultClientConfiguration, + resolveDefaultRuntimeConfig, + FieldPosition, + SMITHY_CONTEXT_KEY, + IniSectionType, + RequestHandlerProtocol +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/logger.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/logger.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/logger.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/middleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/middleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/middleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/pagination.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/pagination.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/pagination.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/profile.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/profile.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/profile.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/response.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/response.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/response.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/retry.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/retry.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/retry.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/serde.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/serde.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/serde.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/shapes.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/shapes.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/shapes.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/signature.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/signature.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/signature.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/stream.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/stream.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/stream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transfer.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transfer.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transfer.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/exact.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/exact.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/exact.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/type-transform.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/type-transform.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/transform/type-transform.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/uri.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/uri.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/uri.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/util.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/util.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/util.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/waiter.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/waiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-cjs/waiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/abort-handler.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/abort-handler.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/abort-handler.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/abort.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/abort.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/abort.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js new file mode 100644 index 0000000..4c02f24 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js @@ -0,0 +1,5 @@ +export var HttpApiKeyAuthLocation; +(function (HttpApiKeyAuthLocation) { + HttpApiKeyAuthLocation["HEADER"] = "header"; + HttpApiKeyAuthLocation["QUERY"] = "query"; +})(HttpApiKeyAuthLocation || (HttpApiKeyAuthLocation = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpSigner.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpSigner.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/HttpSigner.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/auth.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/auth.js new file mode 100644 index 0000000..bd3b2df --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/auth.js @@ -0,0 +1,5 @@ +export var HttpAuthLocation; +(function (HttpAuthLocation) { + HttpAuthLocation["HEADER"] = "header"; + HttpAuthLocation["QUERY"] = "query"; +})(HttpAuthLocation || (HttpAuthLocation = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/index.js new file mode 100644 index 0000000..7436030 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/auth/index.js @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/checksum.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/checksum.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/checksum.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/client.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/client.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/client.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/command.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/command.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/command.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/config.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/config.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/config.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/index.js new file mode 100644 index 0000000..c6c3ea8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/index.js @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/manager.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/manager.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/manager.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/pool.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/pool.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/connection/pool.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/crypto.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/crypto.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/crypto.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/encode.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/encode.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/encode.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoint.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoint.js new file mode 100644 index 0000000..4ae601f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoint.js @@ -0,0 +1,5 @@ +export var EndpointURLScheme; +(function (EndpointURLScheme) { + EndpointURLScheme["HTTP"] = "http"; + EndpointURLScheme["HTTPS"] = "https"; +})(EndpointURLScheme || (EndpointURLScheme = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/index.js new file mode 100644 index 0000000..64d85cf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/index.js @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/shared.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/endpoints/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/eventStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/eventStream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/eventStream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/checksum.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/checksum.js new file mode 100644 index 0000000..5a7939e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/checksum.js @@ -0,0 +1,38 @@ +export var AlgorithmId; +(function (AlgorithmId) { + AlgorithmId["MD5"] = "md5"; + AlgorithmId["CRC32"] = "crc32"; + AlgorithmId["CRC32C"] = "crc32c"; + AlgorithmId["SHA1"] = "sha1"; + AlgorithmId["SHA256"] = "sha256"; +})(AlgorithmId || (AlgorithmId = {})); +export const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.SHA256, + checksumConstructor: () => runtimeConfig.sha256, + }); + } + if (runtimeConfig.md5 != undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.MD5, + checksumConstructor: () => runtimeConfig.md5, + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + }, + }; +}; +export const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js new file mode 100644 index 0000000..4e3eb91 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js @@ -0,0 +1,7 @@ +import { getChecksumConfiguration, resolveChecksumRuntimeConfig } from "./checksum"; +export const getDefaultClientConfiguration = (runtimeConfig) => { + return getChecksumConfiguration(runtimeConfig); +}; +export const resolveDefaultRuntimeConfig = (config) => { + return resolveChecksumRuntimeConfig(config); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/index.js new file mode 100644 index 0000000..0fa92d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/index.js @@ -0,0 +1,3 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId } from "./checksum"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/retry.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/extensions/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/feature-ids.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/feature-ids.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/feature-ids.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/http.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/http.js new file mode 100644 index 0000000..27b22f0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/http.js @@ -0,0 +1,5 @@ +export var FieldPosition; +(function (FieldPosition) { + FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; + FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; +})(FieldPosition || (FieldPosition = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/identity.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/identity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/identity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/index.js new file mode 100644 index 0000000..3360320 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/index.js @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/index.js new file mode 100644 index 0000000..c370335 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/index.js @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/logger.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/logger.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/logger.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/middleware.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/middleware.js new file mode 100644 index 0000000..7d0d050 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/middleware.js @@ -0,0 +1 @@ +export const SMITHY_CONTEXT_KEY = "__smithy_context"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/pagination.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/pagination.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/pagination.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/profile.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/profile.js new file mode 100644 index 0000000..9d56c8d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/profile.js @@ -0,0 +1,6 @@ +export var IniSectionType; +(function (IniSectionType) { + IniSectionType["PROFILE"] = "profile"; + IniSectionType["SSO_SESSION"] = "sso-session"; + IniSectionType["SERVICES"] = "services"; +})(IniSectionType || (IniSectionType = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/response.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/response.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/response.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/retry.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/serde.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/serde.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/serde.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/shapes.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/shapes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/shapes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/signature.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/signature.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/signature.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/stream.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/stream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/stream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transfer.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transfer.js new file mode 100644 index 0000000..f776151 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transfer.js @@ -0,0 +1,6 @@ +export var RequestHandlerProtocol; +(function (RequestHandlerProtocol) { + RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; +})(RequestHandlerProtocol || (RequestHandlerProtocol = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/exact.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/exact.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/exact.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/no-undefined.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/no-undefined.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/no-undefined.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/type-transform.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/type-transform.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/transform/type-transform.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/uri.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/uri.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/uri.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/util.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/util.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/util.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/waiter.js b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/waiter.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-es/waiter.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/abort-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/abort-handler.d.ts new file mode 100644 index 0000000..09a0544 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/abort-handler.d.ts @@ -0,0 +1,7 @@ +import type { AbortSignal as DeprecatedAbortSignal } from "./abort"; +/** + * @public + */ +export interface AbortHandler { + (this: AbortSignal | DeprecatedAbortSignal, ev: any): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/abort.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/abort.d.ts new file mode 100644 index 0000000..80fc87f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/abort.d.ts @@ -0,0 +1,50 @@ +import type { AbortHandler } from "./abort-handler"; +/** + * @public + */ +export { AbortHandler }; +/** + * @public + * @deprecated use platform (global) type for AbortSignal. + * + * Holders of an AbortSignal object may query if the associated operation has + * been aborted and register an onabort handler. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export interface AbortSignal { + /** + * Whether the action represented by this signal has been cancelled. + */ + readonly aborted: boolean; + /** + * A function to be invoked when the action represented by this signal has + * been cancelled. + */ + onabort: AbortHandler | Function | null; +} +/** + * @public + * @deprecated use platform (global) type for AbortController. + * + * The AWS SDK uses a Controller/Signal model to allow for cooperative + * cancellation of asynchronous operations. When initiating such an operation, + * the caller can create an AbortController and then provide linked signal to + * subtasks. This allows a single source to communicate to multiple consumers + * that an action has been aborted without dictating how that cancellation + * should be handled. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortController + */ +export interface AbortController { + /** + * An object that reports whether the action associated with this + * `AbortController` has been cancelled. + */ + readonly signal: AbortSignal; + /** + * Declares the operation associated with this AbortController to have been + * cancelled. + */ + abort(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts new file mode 100644 index 0000000..5d74340 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum HttpApiKeyAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts new file mode 100644 index 0000000..c5be532 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts @@ -0,0 +1,49 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HandlerExecutionContext } from "../middleware"; +import { HttpSigner } from "./HttpSigner"; +import { IdentityProviderConfig } from "./IdentityProviderConfig"; +/** + * ID for {@link HttpAuthScheme} + * @internal + */ +export type HttpAuthSchemeId = string; +/** + * Interface that defines an HttpAuthScheme + * @internal + */ +export interface HttpAuthScheme { + /** + * ID for an HttpAuthScheme, typically the absolute shape ID of a Smithy auth trait. + */ + schemeId: HttpAuthSchemeId; + /** + * Gets the IdentityProvider corresponding to an HttpAuthScheme. + */ + identityProvider(config: IdentityProviderConfig): IdentityProvider | undefined; + /** + * HttpSigner corresponding to an HttpAuthScheme. + */ + signer: HttpSigner; +} +/** + * Interface that defines the identity and signing properties when selecting + * an HttpAuthScheme. + * @internal + */ +export interface HttpAuthOption { + schemeId: HttpAuthSchemeId; + identityProperties?: Record; + signingProperties?: Record; + propertiesExtractor?: (config: TConfig, context: TContext) => { + identityProperties?: Record; + signingProperties?: Record; + }; +} +/** + * @internal + */ +export interface SelectedHttpAuthScheme { + httpAuthOption: HttpAuthOption; + identity: Identity; + signer: HttpSigner; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..710dc8f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts @@ -0,0 +1,20 @@ +import { HandlerExecutionContext } from "../middleware"; +import { HttpAuthOption } from "./HttpAuthScheme"; +/** + * @internal + */ +export interface HttpAuthSchemeParameters { + operation?: string; +} +/** + * @internal + */ +export interface HttpAuthSchemeProvider { + (authParameters: TParameters): HttpAuthOption[]; +} +/** + * @internal + */ +export interface HttpAuthSchemeParametersProvider { + (config: TConfig, context: TContext, input: TInput): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts new file mode 100644 index 0000000..ea2969c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts @@ -0,0 +1,41 @@ +import { HttpRequest, HttpResponse } from "../http"; +import { Identity } from "../identity/identity"; +/** + * @internal + */ +export interface ErrorHandler { + (signingProperties: Record): (error: E) => never; +} +/** + * @internal + */ +export interface SuccessHandler { + (httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * Interface to sign identity and signing properties. + * @internal + */ +export interface HttpSigner { + /** + * Signs an HttpRequest with an identity and signing properties. + * @param httpRequest request to sign + * @param identity identity to sing the request with + * @param signingProperties property bag for signing + * @returns signed request in a promise + */ + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware throws an error. + * The error handler is expected to throw the error it receives, so the return type of the error handler is `never`. + * @internal + */ + errorHandler?: ErrorHandler; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware succeeds. + * @internal + */ + successHandler?: SuccessHandler; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts new file mode 100644 index 0000000..663d2ec --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HttpAuthSchemeId } from "./HttpAuthScheme"; +/** + * Interface to get an IdentityProvider for a specified HttpAuthScheme + * @internal + */ +export interface IdentityProviderConfig { + /** + * Get the IdentityProvider for a specified HttpAuthScheme. + * @param schemeId schemeId of the HttpAuthScheme + * @returns IdentityProvider or undefined if HttpAuthScheme is not found + */ + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/auth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/auth.d.ts new file mode 100644 index 0000000..2aaabbc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/auth.d.ts @@ -0,0 +1,57 @@ +/** + * @internal + * + * Authentication schemes represent a way that the service will authenticate the customer’s identity. + */ +export interface AuthScheme { + /** + * @example "sigv4a" or "sigv4" + */ + name: "sigv4" | "sigv4a" | string; + /** + * @example "s3" + */ + signingName: string; + /** + * @example "us-east-1" + */ + signingRegion: string; + /** + * @example ["*"] + * @example ["us-west-2", "us-east-1"] + */ + signingRegionSet?: string[]; + /** + * @deprecated this field was renamed to signingRegion. + */ + signingScope?: never; + properties: Record; +} +/** + * @internal + * @deprecated + */ +export interface HttpAuthDefinition { + /** + * Defines the location of where the Auth is serialized. + */ + in: HttpAuthLocation; + /** + * Defines the name of the HTTP header or query string parameter + * that contains the Auth. + */ + name: string; + /** + * Defines the security scheme to use on the `Authorization` header value. + * This can only be set if the "in" property is set to {@link HttpAuthLocation.HEADER}. + */ + scheme?: string; +} +/** + * @internal + * @deprecated + */ +export declare enum HttpAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/index.d.ts new file mode 100644 index 0000000..7436030 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/auth/index.d.ts @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts new file mode 100644 index 0000000..e468bae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts @@ -0,0 +1,43 @@ +/// +/// +/// +import { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * A union of types that can be used as inputs for the service model + * "blob" type when it represents the request's entire payload or body. + * + * For example, in Lambda::invoke, the payload is modeled as a blob type + * and this union applies to it. + * In contrast, in Lambda::createFunction the Zip file option is a blob type, + * but is not the (entire) payload and this union does not apply. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may work in some cases, + * but the expected types are primarily string and Uint8Array. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type BlobPayloadInputTypes = string | ArrayBuffer | ArrayBufferView | Uint8Array | NodeJsRuntimeBlobTypes | BrowserRuntimeBlobTypes; +/** + * @public + * + * Additional blob types for the Node.js environment. + */ +export type NodeJsRuntimeBlobTypes = Readable | Buffer; +/** + * @public + * + * Additional blob types for the browser environment. + */ +export type BrowserRuntimeBlobTypes = BlobOptionalType | ReadableStreamOptionalType; +/** + * @internal + * @deprecated renamed to BlobPayloadInputTypes. + */ +export type BlobTypes = BlobPayloadInputTypes; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/checksum.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/checksum.d.ts new file mode 100644 index 0000000..1906009 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/checksum.d.ts @@ -0,0 +1,63 @@ +import { SourceData } from "./crypto"; +/** + * @public + * + * An object that provides a checksum of data provided in chunks to `update`. + * The checksum may be performed incrementally as chunks are received or all + * at once when the checksum is finalized, depending on the underlying + * implementation. + * + * It's recommended to compute checksum incrementally to avoid reading the + * entire payload in memory. + * + * A class that implements this interface may accept an optional secret key in its + * constructor while computing checksum value, when using HMAC. If provided, + * this secret key would be used when computing checksum. + */ +export interface Checksum { + /** + * Constant length of the digest created by the algorithm in bytes. + */ + digestLength?: number; + /** + * Creates a new checksum object that contains a deep copy of the internal + * state of the current `Checksum` object. + */ + copy?(): Checksum; + /** + * Returns the digest of all of the data passed. + */ + digest(): Promise; + /** + * Allows marking a checksum for checksums that support the ability + * to mark and reset. + * + * @param readLimit - The maximum limit of bytes that can be read + * before the mark position becomes invalid. + */ + mark?(readLimit: number): void; + /** + * Resets the checksum to its initial value. + */ + reset(): void; + /** + * Adds a chunk of data for which checksum needs to be computed. + * This can be called many times with new data as it is streamed. + * + * Implementations may override this method which passes second param + * which makes Checksum object stateless. + * + * @param chunk - The buffer to update checksum with. + */ + update(chunk: Uint8Array): void; +} +/** + * @public + * + * A constructor for a Checksum that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + */ +export interface ChecksumConstructor { + new (secret?: SourceData): Checksum; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/client.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/client.d.ts new file mode 100644 index 0000000..8bd8f7e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/client.d.ts @@ -0,0 +1,57 @@ +import { Command } from "./command"; +import { MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +import { OptionalParameter } from "./util"; +/** + * @public + * + * A type which checks if the client configuration is optional. + * If all entries of the client configuration are optional, it allows client creation without passing any config. + */ +export type CheckOptionalClientConfig = OptionalParameter; +/** + * @public + * + * function definition for different overrides of client's 'send' function. + */ +export interface InvokeFunction { + (command: Command, options?: any): Promise; + (command: Command, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options: any, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods. + */ +export interface InvokeMethod { + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods when argument is optional. + */ +export interface InvokeMethodOptionalArgs { + (): Promise; + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * A general interface for service clients, idempotent to browser or node clients + * This type corresponds to SmithyClient(https://github.com/aws/aws-sdk-js-v3/blob/main/packages/smithy-client/src/client.ts). + * It's provided for using without importing the SmithyClient class. + * @internal + */ +export interface Client { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + send: InvokeFunction; + destroy: () => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/command.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/command.d.ts new file mode 100644 index 0000000..3a71ee7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/command.d.ts @@ -0,0 +1,23 @@ +import { Handler, MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + */ +export interface Command extends CommandIO { + readonly input: InputType; + readonly middlewareStack: MiddlewareStack; + resolveMiddleware(stack: MiddlewareStack, configuration: ResolvedConfiguration, options: any): Handler; +} +/** + * @internal + * + * This is a subset of the Command type used only to detect the i/o types. + */ +export interface CommandIO { + readonly input: InputType; + resolveMiddleware(stack: any, configuration: any, options: any): Handler; +} +/** + * @internal + */ +export type GetOutputType = Command extends CommandIO ? O : never; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/config.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/config.d.ts new file mode 100644 index 0000000..f9d4632 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/config.d.ts @@ -0,0 +1,10 @@ +/** + * @public + */ +export interface ConnectConfiguration { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/index.d.ts new file mode 100644 index 0000000..c6c3ea8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/index.d.ts @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/manager.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/manager.d.ts new file mode 100644 index 0000000..5b1a837 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/manager.d.ts @@ -0,0 +1,34 @@ +import { RequestContext } from "../transfer"; +import { ConnectConfiguration } from "./config"; +/** + * @public + */ +export interface ConnectionManagerConfiguration { + /** + * Maximum number of allowed concurrent requests per connection. + */ + maxConcurrency?: number; + /** + * Disables concurrent requests per connection. + */ + disableConcurrency?: boolean; +} +/** + * @public + */ +export interface ConnectionManager { + /** + * Retrieves a connection from the connection pool if available, + * otherwise establish a new connection + */ + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): T; + /** + * Releases the connection back to the pool making it potentially + * re-usable by other requests. + */ + release(requestContext: RequestContext, connection: T): void; + /** + * Destroys the connection manager. All connections will be closed. + */ + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/pool.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/pool.d.ts new file mode 100644 index 0000000..d43530a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/connection/pool.d.ts @@ -0,0 +1,32 @@ +/** + * @public + */ +export interface ConnectionPool { + /** + * Retrieve the first connection in the pool + */ + poll(): T | void; + /** + * Release the connection back to the pool making it potentially + * re-usable by other requests. + */ + offerLast(connection: T): void; + /** + * Removes the connection from the pool, and destroys it. + */ + destroy(connection: T): void; + /** + * Implements the iterable protocol and allows arrays to be consumed + * by most syntaxes expecting iterables, such as the spread syntax + * and for...of loops + */ + [Symbol.iterator](): Iterator; +} +/** + * Unused. + * @internal + * @deprecated + */ +export interface CacheKey { + destination: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/crypto.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/crypto.d.ts new file mode 100644 index 0000000..874320e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/crypto.d.ts @@ -0,0 +1,60 @@ +/** + * @public + */ +export type SourceData = string | ArrayBuffer | ArrayBufferView; +/** + * @public + * + * An object that provides a hash of data provided in chunks to `update`. The + * hash may be performed incrementally as chunks are received or all at once + * when the hash is finalized, depending on the underlying implementation. + * + * @deprecated use {@link Checksum} + */ +export interface Hash { + /** + * Adds a chunk of data to the hash. If a buffer is provided, the `encoding` + * argument will be ignored. If a string is provided without a specified + * encoding, implementations must assume UTF-8 encoding. + * + * Not all encodings are supported on all platforms, though all must support + * UTF-8. + */ + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + /** + * Finalizes the hash and provides a promise that will be fulfilled with the + * raw bytes of the calculated hash. + */ + digest(): Promise; +} +/** + * @public + * + * A constructor for a hash that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + * + * @deprecated use {@link ChecksumConstructor} + */ +export interface HashConstructor { + new (secret?: SourceData): Hash; +} +/** + * @public + * + * A function that calculates the hash of a data stream. Determining the hash + * will consume the stream, so only replayable streams should be provided to an + * implementation of this interface. + */ +export interface StreamHasher { + (hashCtor: HashConstructor, stream: StreamType): Promise; +} +/** + * @public + * + * A function that returns a promise fulfilled with bytes from a + * cryptographically secure pseudorandom number generator. + */ +export interface randomValues { + (byteLength: number): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..312ae6e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts @@ -0,0 +1,25 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [T] extends [FromType] ? ([FromType] extends [T] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [T[key]] extends [FromType] ? [FromType] extends [T[key]] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/encode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/encode.d.ts new file mode 100644 index 0000000..27d3a18 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/encode.d.ts @@ -0,0 +1,31 @@ +import { Message } from "./eventStream"; +/** + * @public + */ +export interface MessageEncoder { + encode(message: Message): Uint8Array; +} +/** + * @public + */ +export interface MessageDecoder { + decode(message: ArrayBufferView): Message; + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; +} +/** + * @public + */ +export interface AvailableMessage { + getMessage(): Message | undefined; + isEndOfStream(): boolean; +} +/** + * @public + */ +export interface AvailableMessages { + getMessages(): Message[]; + isEndOfStream(): boolean; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoint.d.ts new file mode 100644 index 0000000..4e93733 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoint.d.ts @@ -0,0 +1,77 @@ +import { AuthScheme } from "./auth/auth"; +/** + * @public + */ +export interface EndpointPartition { + name: string; + dnsSuffix: string; + dualStackDnsSuffix: string; + supportsFIPS: boolean; + supportsDualStack: boolean; +} +/** + * @public + */ +export interface EndpointARN { + partition: string; + service: string; + region: string; + accountId: string; + resourceId: Array; +} +/** + * @public + */ +export declare enum EndpointURLScheme { + HTTP = "http", + HTTPS = "https" +} +/** + * @public + */ +export interface EndpointURL { + /** + * The URL scheme such as http or https. + */ + scheme: EndpointURLScheme; + /** + * The authority is the host and optional port component of the URL. + */ + authority: string; + /** + * The parsed path segment of the URL. + * This value is as-is as provided by the user. + */ + path: string; + /** + * The parsed path segment of the URL. + * This value is guranteed to start and end with a "/". + */ + normalizedPath: string; + /** + * A boolean indicating whether the authority is an IP address. + */ + isIp: boolean; +} +/** + * @public + */ +export type EndpointObjectProperty = string | boolean | { + [key: string]: EndpointObjectProperty; +} | EndpointObjectProperty[]; +/** + * @public + */ +export interface EndpointV2 { + url: URL; + properties?: { + authSchemes?: AuthScheme[]; + } & Record; + headers?: Record; +} +/** + * @public + */ +export type EndpointParameters = { + [name: string]: undefined | boolean | string | string[]; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts new file mode 100644 index 0000000..349558e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts @@ -0,0 +1,27 @@ +import { EndpointObjectProperty } from "../endpoint"; +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type EndpointObjectProperties = Record; +/** + * @public + */ +export type EndpointObjectHeaders = Record; +/** + * @public + */ +export type EndpointObject = { + url: Expression; + properties?: EndpointObjectProperties; + headers?: EndpointObjectHeaders; +}; +/** + * @public + */ +export type EndpointRuleObject = { + type: "endpoint"; + conditions?: ConditionObject[]; + endpoint: EndpointObject; + documentation?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts new file mode 100644 index 0000000..9ce0733 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts @@ -0,0 +1,10 @@ +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type ErrorRuleObject = { + type: "error"; + conditions?: ConditionObject[]; + error: Expression; + documentation?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts new file mode 100644 index 0000000..669b591 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts @@ -0,0 +1,28 @@ +import { RuleSetRules } from "./TreeRuleObject"; +/** + * @public + */ +export type DeprecatedObject = { + message?: string; + since?: string; +}; +/** + * @public + */ +export type ParameterObject = { + type: "String" | "string" | "Boolean" | "boolean"; + default?: string | boolean; + required?: boolean; + documentation?: string; + builtIn?: string; + deprecated?: DeprecatedObject; +}; +/** + * @public + */ +export type RuleSetObject = { + version: string; + serviceId?: string; + parameters: Record; + rules: RuleSetRules; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts new file mode 100644 index 0000000..180d306 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts @@ -0,0 +1,16 @@ +import { EndpointRuleObject } from "./EndpointRuleObject"; +import { ErrorRuleObject } from "./ErrorRuleObject"; +import { ConditionObject } from "./shared"; +/** + * @public + */ +export type RuleSetRules = Array; +/** + * @public + */ +export type TreeRuleObject = { + type: "tree"; + conditions?: ConditionObject[]; + rules: RuleSetRules; + documentation?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/index.d.ts new file mode 100644 index 0000000..64d85cf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/index.d.ts @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts new file mode 100644 index 0000000..bd11393 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts @@ -0,0 +1,55 @@ +import { Logger } from "../logger"; +/** + * @public + */ +export type ReferenceObject = { + ref: string; +}; +/** + * @public + */ +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +/** + * @public + */ +export type FunctionArgv = Array; +/** + * @public + */ +export type FunctionReturn = string | boolean | number | { + [key: string]: FunctionReturn; +}; +/** + * @public + */ +export type ConditionObject = FunctionObject & { + assign?: string; +}; +/** + * @public + */ +export type Expression = string | ReferenceObject | FunctionObject; +/** + * @public + */ +export type EndpointParams = Record; +/** + * @public + */ +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +/** + * @public + */ +export type ReferenceRecord = Record; +/** + * @public + */ +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/eventStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/eventStream.d.ts new file mode 100644 index 0000000..7b9af6c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/eventStream.d.ts @@ -0,0 +1,137 @@ +import { HttpRequest } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, HandlerExecutionContext } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +/** + * @public + */ +export type MessageHeaders = Record; +/** + * @public + */ +export type HeaderValue = { + type: K; + value: V; +}; +/** + * @public + */ +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +/** + * @public + */ +export type ByteHeaderValue = HeaderValue<"byte", number>; +/** + * @public + */ +export type ShortHeaderValue = HeaderValue<"short", number>; +/** + * @public + */ +export type IntegerHeaderValue = HeaderValue<"integer", number>; +/** + * @public + */ +export type LongHeaderValue = HeaderValue<"long", Int64>; +/** + * @public + */ +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +/** + * @public + */ +export type StringHeaderValue = HeaderValue<"string", string>; +/** + * @public + */ +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +/** + * @public + */ +export type UuidHeaderValue = HeaderValue<"uuid", string>; +/** + * @public + */ +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +/** + * @public + */ +export interface Int64 { + readonly bytes: Uint8Array; + valueOf: () => number; + toString: () => string; +} +/** + * @public + * + * Util functions for serializing or deserializing event stream + */ +export interface EventStreamSerdeContext { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @public + * + * A function which deserializes binary event stream message into modeled shape. + */ +export interface EventStreamMarshallerDeserFn { + (body: StreamType, deserializer: (input: Record) => Promise): AsyncIterable; +} +/** + * @public + * + * A function that serializes modeled shape into binary stream message. + */ +export interface EventStreamMarshallerSerFn { + (input: AsyncIterable, serializer: (event: T) => Message): StreamType; +} +/** + * @public + * + * An interface which provides functions for serializing and deserializing binary event stream + * to/from corresponsing modeled shape. + */ +export interface EventStreamMarshaller { + deserialize: EventStreamMarshallerDeserFn; + serialize: EventStreamMarshallerSerFn; +} +/** + * @public + */ +export interface EventStreamRequestSigner { + sign(request: HttpRequest): Promise; +} +/** + * @public + */ +export interface EventStreamPayloadHandler { + handle: (next: FinalizeHandler, args: FinalizeHandlerArguments, context?: HandlerExecutionContext) => Promise>; +} +/** + * @public + */ +export interface EventStreamPayloadHandlerProvider { + (options: any): EventStreamPayloadHandler; +} +/** + * @public + */ +export interface EventStreamSerdeProvider { + (options: any): EventStreamMarshaller; +} +/** + * @public + */ +export interface EventStreamSignerProvider { + (options: any): EventStreamRequestSigner; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts new file mode 100644 index 0000000..88995b9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts @@ -0,0 +1,58 @@ +import { ChecksumConstructor } from "../checksum"; +import { HashConstructor } from "../crypto"; +/** + * @internal + */ +export declare enum AlgorithmId { + MD5 = "md5", + CRC32 = "crc32", + CRC32C = "crc32c", + SHA1 = "sha1", + SHA256 = "sha256" +} +/** + * @internal + */ +export interface ChecksumAlgorithm { + algorithmId(): AlgorithmId; + checksumConstructor(): ChecksumConstructor | HashConstructor; +} +/** + * @deprecated unused. + * @internal + */ +type ChecksumConfigurationLegacy = { + [other in string | number]: any; +}; +/** + * @internal + */ +export interface ChecksumConfiguration extends ChecksumConfigurationLegacy { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +} +/** + * @deprecated will be removed for implicit type. + * @internal + */ +type GetChecksumConfigurationType = (runtimeConfig: Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; +}>) => ChecksumConfiguration; +/** + * @internal + * @deprecated will be moved to smithy-client. + */ +export declare const getChecksumConfiguration: GetChecksumConfigurationType; +/** + * @internal + * @deprecated will be removed for implicit type. + */ +type ResolveChecksumRuntimeConfigType = (clientConfig: ChecksumConfiguration) => any; +/** + * @internal + * + * @deprecated will be moved to smithy-client. + */ +export declare const resolveChecksumRuntimeConfig: ResolveChecksumRuntimeConfigType; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts new file mode 100644 index 0000000..12eb924 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts @@ -0,0 +1,33 @@ +import { ChecksumConfiguration } from "./checksum"; +/** + * @deprecated will be replaced by DefaultExtensionConfiguration. + * @internal + * + * Default client configuration consisting various configurations for modifying a service client + */ +export interface DefaultClientConfiguration extends ChecksumConfiguration { +} +/** + * @deprecated will be removed for implicit type. + */ +type GetDefaultConfigurationType = (runtimeConfig: any) => DefaultClientConfiguration; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve default client configuration from runtime config + * + */ +export declare const getDefaultClientConfiguration: GetDefaultConfigurationType; +/** + * @deprecated will be removed for implicit type. + */ +type ResolveDefaultRuntimeConfigType = (clientConfig: DefaultClientConfiguration) => any; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve runtime config from default client configuration + */ +export declare const resolveDefaultRuntimeConfig: ResolveDefaultRuntimeConfigType; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..0e6fa0d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConfiguration } from "./checksum"; +import { RetryStrategyConfiguration } from "./retry"; +/** + * @internal + * + * Default extension configuration consisting various configurations for modifying a service client + */ +export interface DefaultExtensionConfiguration extends ChecksumConfiguration, RetryStrategyConfiguration { +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..cce65a1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/index.d.ts @@ -0,0 +1,4 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration } from "./checksum"; +export { RetryStrategyConfiguration } from "./retry"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/retry.d.ts new file mode 100644 index 0000000..8b91f1c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { RetryStrategyV2 } from "../retry"; +import { Provider, RetryStrategy } from "../util"; +/** + * A configuration interface with methods called by runtime extension + * @internal + */ +export interface RetryStrategyConfiguration { + /** + * Set retry strategy used for all http requests + * @param retryStrategy + */ + setRetryStrategy(retryStrategy: Provider): void; + /** + * Get retry strategy used for all http requests + * @param retryStrategy + */ + retryStrategy(): Provider; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts new file mode 100644 index 0000000..0de7f8f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts @@ -0,0 +1,35 @@ +import type { Exact } from "../transform/exact"; +/** + * @public + * + * A checked type that resolves to Blob if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing Blob + * excessively. + */ +export type BlobOptionalType = BlobDefined extends true ? Blob : Unavailable; +/** + * @public + * + * A checked type that resolves to ReadableStream if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing ReadableStream + * excessively. + */ +export type ReadableStreamOptionalType = ReadableStreamDefined extends true ? ReadableStream : Unavailable; +/** + * @public + * + * Indicates a type is unavailable if it resolves to this. + */ +export type Unavailable = never; +/** + * @internal + * + * Whether the global types define more than a stub for ReadableStream. + */ +export type ReadableStreamDefined = Exact extends true ? false : true; +/** + * @internal + * + * Whether the global types define more than a stub for Blob. + */ +export type BlobDefined = Exact extends true ? false : true; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/feature-ids.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/feature-ids.d.ts new file mode 100644 index 0000000..19e4bd2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/feature-ids.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export type SmithyFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + SIGV4A_SIGNING: "S"; + CREDENTIALS_CODE: "e"; +}>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/http.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/http.d.ts new file mode 100644 index 0000000..76c6cb2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/http.d.ts @@ -0,0 +1,112 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +import { URI } from "./uri"; +/** + * @public + * + * @deprecated use {@link EndpointV2} from `@smithy/types`. + */ +export interface Endpoint { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; +} +/** + * @public + * + * Interface an HTTP request class. Contains + * addressing information in addition to standard message properties. + */ +export interface HttpRequest extends HttpMessage, URI { + method: string; +} +/** + * @public + * + * Represents an HTTP message as received in reply to a request. Contains a + * numeric status code in addition to standard message properties. + */ +export interface HttpResponse extends HttpMessage { + statusCode: number; + reason?: string; +} +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. body: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * A mapping of query parameter names to strings or arrays of strings, with the + * second being used when a parameter contains a list of values. Value can be set + * to null when query is not in key-value pairs shape + */ +export type QueryParameterBag = Record | null>; +/** + * @public + */ +export type FieldOptions = { + name: string; + kind?: FieldPosition; + values?: string[]; +}; +/** + * @public + */ +export declare enum FieldPosition { + HEADER = 0, + TRAILER = 1 +} +/** + * @public + * + * A mapping of header names to string values. Multiple values for the same + * header should be represented as a single string with values separated by + * `, `. + * + * Keys should be considered case insensitive, even if this is not enforced by a + * particular implementation. For example, given the following HeaderBag, where + * keys differ only in case: + * + * ```json + * { + * 'x-request-date': '2000-01-01T00:00:00Z', + * 'X-Request-Date': '2001-01-01T00:00:00Z' + * } + * ``` + * + * The SDK may at any point during processing remove one of the object + * properties in favor of the other. The headers may or may not be combined, and + * the SDK will not deterministically select which header candidate to use. + */ +export type HeaderBag = Record; +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. bode: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * Represents the options that may be passed to an Http Handler. + */ +export interface HttpHandlerOptions { + abortSignal?: AbortSignal | DeprecatedAbortSignal; + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts new file mode 100644 index 0000000..bca0851 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts @@ -0,0 +1,123 @@ +/// +/// +import type { Agent as hAgent, AgentOptions as hAgentOptions } from "http"; +import type { Agent as hsAgent, AgentOptions as hsAgentOptions } from "https"; +import { HttpRequest as IHttpRequest } from "../http"; +import { Logger } from "../logger"; +/** + * + * This type represents an alternate client constructor option for the entry + * "requestHandler". Instead of providing an instance of a requestHandler, the user + * may provide the requestHandler's constructor options for either the + * NodeHttpHandler or FetchHttpHandler. + * + * For other RequestHandlers like HTTP2 or WebSocket, + * constructor parameter passthrough is not currently available. + * + * @public + */ +export type RequestHandlerParams = NodeHttpHandlerOptions | FetchHttpHandlerOptions; +/** + * Represents the http options that can be passed to a node http client. + * @public + */ +export interface NodeHttpHandlerOptions { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + * + * Defaults to 0, which disables the timeout. + */ + connectionTimeout?: number; + /** + * The number of milliseconds a request can take before automatically being terminated. + * Defaults to 0, which disables the timeout. + */ + requestTimeout?: number; + /** + * Delay before the NodeHttpHandler checks for socket exhaustion, + * and emits a warning if the active sockets and enqueued request count is greater than + * 2x the maxSockets count. + * + * Defaults to connectionTimeout + requestTimeout or 3000ms if those are not set. + */ + socketAcquisitionWarningTimeout?: number; + /** + * This field is deprecated, and requestTimeout should be used instead. + * The maximum time in milliseconds that a socket may remain idle before it + * is closed. + * + * @deprecated Use {@link requestTimeout} + */ + socketTimeout?: number; + /** + * You can pass http.Agent or its constructor options. + */ + httpAgent?: hAgent | hAgentOptions; + /** + * You can pass https.Agent or its constructor options. + */ + httpsAgent?: hsAgent | hsAgentOptions; + /** + * Optional logger. + */ + logger?: Logger; +} +/** + * Represents the http options that can be passed to a browser http client. + * @public + */ +export interface FetchHttpHandlerOptions { + /** + * The number of milliseconds a request can take before being automatically + * terminated. + */ + requestTimeout?: number; + /** + * Whether to allow the request to outlive the page. Default value is false. + * + * There may be limitations to the payload size, number of concurrent requests, + * request duration etc. when using keepalive in browsers. + * + * These may change over time, so look for up to date information about + * these limitations before enabling keepalive. + */ + keepAlive?: boolean; + /** + * A string indicating whether credentials will be sent with the request always, never, or + * only when sent to a same-origin URL. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials + */ + credentials?: "include" | "omit" | "same-origin" | undefined | string; + /** + * Cache settings for fetch. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/cache + */ + cache?: "default" | "force-cache" | "no-cache" | "no-store" | "only-if-cached" | "reload"; + /** + * An optional function that produces additional RequestInit + * parameters for each httpRequest. + * + * This is applied last via merging with Object.assign() and overwrites other values + * set from other sources. + * + * @example + * ```js + * new Client({ + * requestHandler: { + * requestInit(httpRequest) { + * return { cache: "no-store" }; + * } + * } + * }); + * ``` + */ + requestInit?: (httpRequest: IHttpRequest) => RequestInit; +} +declare global { + /** + * interface merging stub. + */ + interface RequestInit { + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts new file mode 100644 index 0000000..27750d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @public + */ +export interface ApiKeyIdentity extends Identity { + /** + * The literal API Key + */ + readonly apiKey: string; +} +/** + * @public + */ +export type ApiKeyIdentityProvider = IdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts new file mode 100644 index 0000000..7aa5a4b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts @@ -0,0 +1,31 @@ +import { Identity, IdentityProvider } from "./identity"; +/** + * @public + */ +export interface AwsCredentialIdentity extends Identity { + /** + * AWS access key ID + */ + readonly accessKeyId: string; + /** + * AWS secret access key + */ + readonly secretAccessKey: string; + /** + * A security or session token to use with these credentials. Usually + * present for temporary credentials. + */ + readonly sessionToken?: string; + /** + * AWS credential scope for this set of credentials. + */ + readonly credentialScope?: string; + /** + * AWS accountId. + */ + readonly accountId?: string; +} +/** + * @public + */ +export type AwsCredentialIdentityProvider = IdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/identity.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/identity.d.ts new file mode 100644 index 0000000..c6fd0d1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/identity.d.ts @@ -0,0 +1,15 @@ +/** + * @public + */ +export interface Identity { + /** + * A `Date` when the identity or credential will no longer be accepted. + */ + readonly expiration?: Date; +} +/** + * @public + */ +export interface IdentityProvider { + (identityProperties?: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/index.d.ts new file mode 100644 index 0000000..3360320 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/index.d.ts @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts new file mode 100644 index 0000000..84a74ff --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @internal + */ +export interface TokenIdentity extends Identity { + /** + * The literal token string + */ + readonly token: string; +} +/** + * @internal + */ +export type TokenIdentityProvider = IdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/index.d.ts new file mode 100644 index 0000000..c370335 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/index.d.ts @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/logger.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/logger.d.ts new file mode 100644 index 0000000..f66a664 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/logger.d.ts @@ -0,0 +1,13 @@ +/** + * @public + * + * Represents a logger object that is available in HandlerExecutionContext + * throughout the middleware stack. + */ +export interface Logger { + trace?: (...content: any[]) => void; + debug: (...content: any[]) => void; + info: (...content: any[]) => void; + warn: (...content: any[]) => void; + error: (...content: any[]) => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/middleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/middleware.d.ts new file mode 100644 index 0000000..cc20098 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/middleware.d.ts @@ -0,0 +1,534 @@ +import type { AuthScheme, HttpAuthDefinition } from "./auth/auth"; +import type { SelectedHttpAuthScheme } from "./auth/HttpAuthScheme"; +import type { Command } from "./command"; +import type { EndpointV2 } from "./endpoint"; +import type { SmithyFeatures } from "./feature-ids"; +import type { Logger } from "./logger"; +import type { UserAgent } from "./util"; +/** + * @public + */ +export interface InitializeHandlerArguments { + /** + * User input to a command. Reflects the userland representation of the + * union of data types the command can effectively handle. + */ + input: Input; +} +/** + * @public + */ +export interface InitializeHandlerOutput extends DeserializeHandlerOutput { + output: Output; +} +/** + * @public + */ +export interface SerializeHandlerArguments extends InitializeHandlerArguments { + /** + * The user input serialized as a request object. The request object is unknown, + * so you cannot modify it directly. When work with request, you need to guard its + * type to e.g. HttpRequest with 'instanceof' operand + * + * During the build phase of the execution of a middleware stack, a built + * request may or may not be available. + */ + request?: unknown; +} +/** + * @public + */ +export interface SerializeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface BuildHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface BuildHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface FinalizeHandlerArguments extends SerializeHandlerArguments { + /** + * The user input serialized as a request. + */ + request: unknown; +} +/** + * @public + */ +export interface FinalizeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface DeserializeHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface DeserializeHandlerOutput { + /** + * The raw response object from runtime is deserialized to structured output object. + * The response object is unknown so you cannot modify it directly. When work with + * response, you need to guard its type to e.g. HttpResponse with 'instanceof' operand. + * + * During the deserialize phase of the execution of a middleware stack, a deserialized + * response may or may not be available + */ + response: unknown; + output?: Output; +} +/** + * @public + */ +export interface InitializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: InitializeHandlerArguments): Promise>; +} +/** + * @public + */ +export type Handler = InitializeHandler; +/** + * @public + */ +export interface SerializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: SerializeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface FinalizeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: FinalizeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface BuildHandler { + (args: BuildHandlerArguments): Promise>; +} +/** + * @public + */ +export interface DeserializeHandler { + (args: DeserializeHandlerArguments): Promise>; +} +/** + * @public + * + * A factory function that creates functions implementing the `Handler` + * interface. + */ +export interface InitializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: InitializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `BuildHandler` + * interface. + */ +export interface SerializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: SerializeHandler, context: HandlerExecutionContext): SerializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `FinalizeHandler` + * interface. + */ +export interface FinalizeRequestMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: FinalizeHandler, context: HandlerExecutionContext): FinalizeHandler; +} +/** + * @public + */ +export interface BuildMiddleware { + (next: BuildHandler, context: HandlerExecutionContext): BuildHandler; +} +/** + * @public + */ +export interface DeserializeMiddleware { + (next: DeserializeHandler, context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type MiddlewareType = InitializeMiddleware | SerializeMiddleware | BuildMiddleware | FinalizeRequestMiddleware | DeserializeMiddleware; +/** + * @public + * + * A factory function that creates the terminal handler atop which a middleware + * stack sits. + */ +export interface Terminalware { + (context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type Step = "initialize" | "serialize" | "build" | "finalizeRequest" | "deserialize"; +/** + * @public + */ +export type Priority = "high" | "normal" | "low"; +/** + * @public + */ +export interface HandlerOptions { + /** + * Handlers are ordered using a "step" that describes the stage of command + * execution at which the handler will be executed. The available steps are: + * + * - initialize: The input is being prepared. Examples of typical + * initialization tasks include injecting default options computing + * derived parameters. + * - serialize: The input is complete and ready to be serialized. Examples + * of typical serialization tasks include input validation and building + * an HTTP request from user input. + * - build: The input has been serialized into an HTTP request, but that + * request may require further modification. Any request alterations + * will be applied to all retries. Examples of typical build tasks + * include injecting HTTP headers that describe a stable aspect of the + * request, such as `Content-Length` or a body checksum. + * - finalizeRequest: The request is being prepared to be sent over the wire. The + * request in this stage should already be semantically complete and + * should therefore only be altered as match the recipient's + * expectations. Examples of typical finalization tasks include request + * signing and injecting hop-by-hop headers. + * - deserialize: The response has arrived, the middleware here will deserialize + * the raw response object to structured response + * + * Unlike initialization and build handlers, which are executed once + * per operation execution, finalization and deserialize handlers will be + * executed foreach HTTP request sent. + * + * @defaultValue 'initialize' + */ + step?: Step; + /** + * A list of strings to any that identify the general purpose or important + * characteristics of a given handler. + */ + tags?: Array; + /** + * A unique name to refer to a middleware + */ + name?: string; + /** + * @internal + * Aliases allows for middleware to be found by multiple names besides {@link HandlerOptions.name}. + * This allows for references to replaced middleware to continue working, e.g. replacing + * multiple auth-specific middleware with a single generic auth middleware. + */ + aliases?: Array; + /** + * A flag to override the existing middleware with the same name. Without + * setting it, adding middleware with duplicated name will throw an exception. + * @internal + */ + override?: boolean; +} +/** + * @public + */ +export interface AbsoluteLocation { + /** + * By default middleware will be added to individual step in un-guaranteed order. + * In the case that + * + * @defaultValue 'normal' + */ + priority?: Priority; +} +/** + * @public + */ +export type Relation = "before" | "after"; +/** + * @public + */ +export interface RelativeLocation { + /** + * Specify the relation to be before or after a know middleware. + */ + relation: Relation; + /** + * A known middleware name to indicate inserting middleware's location. + */ + toMiddleware: string; +} +/** + * @public + */ +export type RelativeMiddlewareOptions = RelativeLocation & Omit; +/** + * @public + */ +export interface InitializeHandlerOptions extends HandlerOptions { + step?: "initialize"; +} +/** + * @public + */ +export interface SerializeHandlerOptions extends HandlerOptions { + step: "serialize"; +} +/** + * @public + */ +export interface BuildHandlerOptions extends HandlerOptions { + step: "build"; +} +/** + * @public + */ +export interface FinalizeRequestHandlerOptions extends HandlerOptions { + step: "finalizeRequest"; +} +/** + * @public + */ +export interface DeserializeHandlerOptions extends HandlerOptions { + step: "deserialize"; +} +/** + * @public + * + * A stack storing middleware. It can be resolved into a handler. It supports 2 + * approaches for adding middleware: + * 1. Adding middleware to specific step with `add()`. The order of middleware + * added into same step is determined by order of adding them. If one middleware + * needs to be executed at the front of the step or at the end of step, set + * `priority` options to `high` or `low`. + * 2. Adding middleware to location relative to known middleware with `addRelativeTo()`. + * This is useful when given middleware must be executed before or after specific + * middleware(`toMiddleware`). You can add a middleware relatively to another + * middleware which also added relatively. But eventually, this relative middleware + * chain **must** be 'anchored' by a middleware that added using `add()` API + * with absolute `step` and `priority`. This mothod will throw if specified + * `toMiddleware` is not found. + */ +export interface MiddlewareStack extends Pluggable { + /** + * Add middleware to the stack to be executed during the "initialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: InitializeMiddleware, options?: InitializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "serialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: SerializeMiddleware, options: SerializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "build" step, + * optionally specifying a priority, tags and name + */ + add(middleware: BuildMiddleware, options: BuildHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "finalizeRequest" step, + * optionally specifying a priority, tags and name + */ + add(middleware: FinalizeRequestMiddleware, options: FinalizeRequestHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "deserialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: DeserializeMiddleware, options: DeserializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to a stack position before or after a known middleware,optionally + * specifying name and tags. + */ + addRelativeTo(middleware: MiddlewareType, options: RelativeMiddlewareOptions): void; + /** + * Apply a customization function to mutate the middleware stack, often + * used for customizations that requires mutating multiple middleware. + */ + use(pluggable: Pluggable): void; + /** + * Create a shallow clone of this stack. Step bindings and handler priorities + * and tags are preserved in the copy. + */ + clone(): MiddlewareStack; + /** + * Removes middleware from the stack. + * + * If a string is provided, it will be treated as middleware name. If a middleware + * is inserted with the given name, it will be removed. + * + * If a middleware class is provided, all usages thereof will be removed. + */ + remove(toRemove: MiddlewareType | string): boolean; + /** + * Removes middleware that contains given tag + * + * Multiple middleware will potentially be removed + */ + removeByTag(toRemove: string): boolean; + /** + * Create a stack containing the middlewares in this stack as well as the + * middlewares in the `from` stack. Neither source is modified, and step + * bindings and handler priorities and tags are preserved in the copy. + */ + concat(from: MiddlewareStack): MiddlewareStack; + /** + * Returns a list of the current order of middleware in the stack. + * This does not execute the middleware functions, nor does it + * provide a reference to the stack itself. + */ + identify(): string[]; + /** + * @internal + * + * When an operation is called using this stack, + * it will log its list of middleware to the console using + * the identify function. + * + * @param toggle - set whether to log on resolve. + * If no argument given, returns the current value. + */ + identifyOnResolve(toggle?: boolean): boolean; + /** + * Builds a single handler function from zero or more middleware classes and + * a core handler. The core handler is meant to send command objects to AWS + * services and return promises that will resolve with the operation result + * or be rejected with an error. + * + * When a composed handler is invoked, the arguments will pass through all + * middleware in a defined order, and the return from the innermost handler + * will pass through all middleware in the reverse of that order. + */ + resolve(handler: DeserializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @internal + */ +export declare const SMITHY_CONTEXT_KEY = "__smithy_context"; +/** + * @public + * + * Data and helper objects that are not expected to change from one execution of + * a composed handler to another. + */ +export interface HandlerExecutionContext { + /** + * A logger that may be invoked by any handler during execution of an + * operation. + */ + logger?: Logger; + /** + * Name of the service the operation is being sent to. + */ + clientName?: string; + /** + * Name of the operation being executed. + */ + commandName?: string; + /** + * Additional user agent that inferred by middleware. It can be used to save + * the internal user agent sections without overriding the `customUserAgent` + * config in clients. + */ + userAgent?: UserAgent; + /** + * Resolved by the endpointMiddleware function of `@smithy/middleware-endpoint` + * in the serialization stage. + */ + endpointV2?: EndpointV2; + /** + * Set at the same time as endpointV2. + */ + authSchemes?: AuthScheme[]; + /** + * The current auth configuration that has been set by any auth middleware and + * that will prevent from being set more than once. + */ + currentAuthConfig?: HttpAuthDefinition; + /** + * @deprecated do not extend this field, it is a carryover from AWS SDKs. + * Used by DynamoDbDocumentClient. + */ + dynamoDbDocumentClientOptions?: Partial<{ + overrideInputFilterSensitiveLog(...args: any[]): string | void; + overrideOutputFilterSensitiveLog(...args: any[]): string | void; + }>; + /** + * @internal + * Context for Smithy properties. + */ + [SMITHY_CONTEXT_KEY]?: { + service?: string; + operation?: string; + commandInstance?: Command; + selectedHttpAuthScheme?: SelectedHttpAuthScheme; + features?: SmithyFeatures; + /** + * @deprecated + * Do not assign arbitrary members to the Smithy Context, + * fields should be explicitly declared here to avoid collisions. + */ + [key: string]: unknown; + }; + /** + * @deprecated + * Do not assign arbitrary members to the context, since + * they can interfere with existing functionality. + * + * Additional members should instead be declared on the SMITHY_CONTEXT_KEY + * or other reserved keys. + */ + [key: string]: any; +} +/** + * @public + */ +export interface Pluggable { + /** + * A function that mutate the passed in middleware stack. Functions implementing + * this interface can add, remove, modify existing middleware stack from clients + * or commands + */ + applyToStack: (stack: MiddlewareStack) => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/pagination.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/pagination.d.ts new file mode 100644 index 0000000..e10fdda --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/pagination.d.ts @@ -0,0 +1,33 @@ +import type { Client } from "./client"; +import type { Command } from "./command"; +/** + * @public + * + * Expected type definition of a paginator. + */ +export type Paginator = AsyncGenerator; +/** + * @public + * + * Expected paginator configuration passed to an operation. Services will extend + * this interface definition and may type client further. + */ +export interface PaginationConfiguration { + client: Client; + pageSize?: number; + startingToken?: any; + /** + * For some APIs, such as CloudWatchLogs events, the next page token will always + * be present. + * + * When true, this config field will have the paginator stop when the token doesn't change + * instead of when it is not present. + */ + stopOnSameToken?: boolean; + /** + * @param command - reference to the instantiated command. This callback is executed + * prior to sending the command with the paginator's client. + * @returns the original command or a replacement, defaulting to the original command object. + */ + withCommand?: (command: Command) => typeof command | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/profile.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/profile.d.ts new file mode 100644 index 0000000..b7885d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/profile.d.ts @@ -0,0 +1,30 @@ +/** + * @public + */ +export declare enum IniSectionType { + PROFILE = "profile", + SSO_SESSION = "sso-session", + SERVICES = "services" +} +/** + * @public + */ +export type IniSection = Record; +/** + * @public + * + * @deprecated Please use {@link IniSection} + */ +export interface Profile extends IniSection { +} +/** + * @public + */ +export type ParsedIniData = Record; +/** + * @public + */ +export interface SharedConfigFiles { + credentialsFile: ParsedIniData; + configFile: ParsedIniData; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/response.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/response.d.ts new file mode 100644 index 0000000..afcfe8f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/response.d.ts @@ -0,0 +1,40 @@ +/** + * @public + */ +export interface ResponseMetadata { + /** + * The status code of the last HTTP response received for this operation. + */ + httpStatusCode?: number; + /** + * A unique identifier for the last request sent for this operation. Often + * requested by AWS service teams to aid in debugging. + */ + requestId?: string; + /** + * A secondary identifier for the last request sent. Used for debugging. + */ + extendedRequestId?: string; + /** + * A tertiary identifier for the last request sent. Used for debugging. + */ + cfId?: string; + /** + * The number of times this operation was attempted. + */ + attempts?: number; + /** + * The total amount of time (in milliseconds) that was spent waiting between + * retry attempts. + */ + totalRetryDelay?: number; +} +/** + * @public + */ +export interface MetadataBearer { + /** + * Metadata pertaining to this request. + */ + $metadata: ResponseMetadata; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/retry.d.ts new file mode 100644 index 0000000..7bb5881 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/retry.d.ts @@ -0,0 +1,133 @@ +import { SdkError } from "./shapes"; +/** + * @public + */ +export type RetryErrorType = +/** + * This is a connection level error such as a socket timeout, socket connect + * error, tls negotiation timeout etc... + * Typically these should never be applied for non-idempotent request types + * since in this scenario, it's impossible to know whether the operation had + * a side effect on the server. + */ +"TRANSIENT" +/** + * This is an error where the server explicitly told the client to back off, + * such as a 429 or 503 Http error. + */ + | "THROTTLING" +/** + * This is a server error that isn't explicitly throttling but is considered + * by the client to be something that should be retried. + */ + | "SERVER_ERROR" +/** + * Doesn't count against any budgets. This could be something like a 401 + * challenge in Http. + */ + | "CLIENT_ERROR"; +/** + * @public + */ +export interface RetryErrorInfo { + /** + * The error thrown during the initial request, if available. + */ + error?: SdkError; + errorType: RetryErrorType; + /** + * Protocol hint. This could come from Http's 'retry-after' header or + * something from MQTT or any other protocol that has the ability to convey + * retry info from a peer. + * + * The Date after which a retry should be attempted. + */ + retryAfterHint?: Date; +} +/** + * @public + */ +export interface RetryBackoffStrategy { + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + computeNextBackoffDelay(retryAttempt: number): number; +} +/** + * @public + */ +export interface StandardRetryBackoffStrategy extends RetryBackoffStrategy { + /** + * Sets the delayBase used to compute backoff delays. + * @param delayBase - + */ + setDelayBase(delayBase: number): void; +} +/** + * @public + */ +export interface RetryStrategyOptions { + backoffStrategy: RetryBackoffStrategy; + maxRetriesBase: number; +} +/** + * @public + */ +export interface RetryToken { + /** + * @returns the current count of retry. + */ + getRetryCount(): number; + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + getRetryDelay(): number; +} +/** + * @public + */ +export interface StandardRetryToken extends RetryToken { + /** + * @returns the cost of the last retry attempt. + */ + getRetryCost(): number | undefined; +} +/** + * @public + */ +export interface RetryStrategyV2 { + /** + * Called before any retries (for the first call to the operation). It either + * returns a retry token or an error upon the failure to acquire a token prior. + * + * tokenScope is arbitrary and out of scope for this component. However, + * adding it here offers us a lot of future flexibility for outage detection. + * For example, it could be "us-east-1" on a shared retry strategy, or + * "us-west-2-c:dynamodb". + */ + acquireInitialRetryToken(retryTokenScope: string): Promise; + /** + * After a failed operation call, this function is invoked to refresh the + * retryToken returned by acquireInitialRetryToken(). This function can + * either choose to allow another retry and send a new or updated token, + * or reject the retry attempt and report the error either in an exception + * or returning an error. + */ + refreshRetryTokenForRetry(tokenToRenew: RetryToken, errorInfo: RetryErrorInfo): Promise; + /** + * Upon successful completion of the operation, this function is called + * to record that the operation was successful. + */ + recordSuccess(token: RetryToken): void; +} +/** + * @public + */ +export type ExponentialBackoffJitterType = "DEFAULT" | "NONE" | "FULL" | "DECORRELATED"; +/** + * @public + */ +export interface ExponentialBackoffStrategyOptions { + jitterType: ExponentialBackoffJitterType; + backoffScaleValue?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/serde.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/serde.d.ts new file mode 100644 index 0000000..a81314f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/serde.d.ts @@ -0,0 +1,112 @@ +import { Endpoint } from "./http"; +import { RequestHandler } from "./transfer"; +import { Decoder, Encoder, Provider } from "./util"; +/** + * @public + * + * Interface for object requires an Endpoint set. + */ +export interface EndpointBearer { + endpoint: Provider; +} +/** + * @public + */ +export interface StreamCollector { + /** + * A function that converts a stream into an array of bytes. + * + * @param stream - The low-level native stream from browser or Nodejs runtime + */ + (stream: any): Promise; +} +/** + * @public + * + * Request and Response serde util functions and settings for AWS services + */ +export interface SerdeContext extends SerdeFunctions, EndpointBearer { + requestHandler: RequestHandler; + disableHostPrefix: boolean; +} +/** + * @public + * + * Serde functions from the client config. + */ +export interface SerdeFunctions { + base64Encoder: Encoder; + base64Decoder: Decoder; + utf8Encoder: Encoder; + utf8Decoder: Decoder; + streamCollector: StreamCollector; +} +/** + * @public + */ +export interface RequestSerializer { + /** + * Converts the provided `input` into a request object + * + * @param input - The user input to serialize. + * + * @param context - Context containing runtime-specific util functions. + */ + (input: any, context: Context): Promise; +} +/** + * @public + */ +export interface ResponseDeserializer { + /** + * Converts the output of an operation into JavaScript types. + * + * @param output - The HTTP response received from the service + * + * @param context - context containing runtime-specific util functions. + */ + (output: ResponseType, context: Context): Promise; +} +/** + * The interface contains mix-in utility functions to transfer the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * @public + */ +export interface SdkStreamMixin { + transformToByteArray: () => Promise; + transformToString: (encoding?: string) => Promise; + transformToWebStream: () => ReadableStream; +} +/** + * @public + * + * The type describing a runtime-specific stream implementation with mix-in + * utility functions. + */ +export type SdkStream = BaseStream & SdkStreamMixin; +/** + * @public + * + * Indicates that the member of type T with + * key StreamKey have been extended + * with the SdkStreamMixin helper methods. + */ +export type WithSdkStreamMixin = { + [key in keyof T]: key extends StreamKey ? SdkStream : T[key]; +}; +/** + * Interface for internal function to inject stream utility functions + * implementation + * + * @internal + */ +export interface SdkStreamMixinInjector { + (stream: unknown): SdkStreamMixin; +} +/** + * @internal + */ +export interface SdkStreamSerdeContext { + sdkStreamMixin: SdkStreamMixinInjector; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/shapes.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/shapes.d.ts new file mode 100644 index 0000000..a4812fb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/shapes.d.ts @@ -0,0 +1,82 @@ +import { HttpResponse } from "./http"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A document type represents an untyped JSON-like value. + * + * Not all protocols support document types, and the serialization format of a + * document type is protocol specific. All JSON protocols SHOULD support + * document types and they SHOULD serialize document types inline as normal + * JSON values. + */ +export type DocumentType = null | boolean | number | string | DocumentType[] | { + [prop: string]: DocumentType; +}; +/** + * @public + * + * A structure shape with the error trait. + * https://smithy.io/2.0/spec/behavior-traits.html#smithy-api-retryable-trait + */ +export interface RetryableTrait { + /** + * Indicates that the error is a retryable throttling error. + */ + readonly throttling?: boolean; +} +/** + * @public + * + * Type that is implemented by all Smithy shapes marked with the + * error trait. + * @deprecated + */ +export interface SmithyException { + /** + * The shape ID name of the exception. + */ + readonly name: string; + /** + * Whether the client or server are at fault. + */ + readonly $fault: "client" | "server"; + /** + * The service that encountered the exception. + */ + readonly $service?: string; + /** + * Indicates that an error MAY be retried by the client. + */ + readonly $retryable?: RetryableTrait; + /** + * Reference to low-level HTTP response object. + */ + readonly $response?: HttpResponse; +} +/** + * @public + * + * @deprecated See {@link https://aws.amazon.com/blogs/developer/service-error-handling-modular-aws-sdk-js/} + * + * This type should not be used in your application. + * Users of the AWS SDK for JavaScript v3 service clients should prefer to + * use the specific Exception classes corresponding to each operation. + * These can be found as code in the deserializer for the operation's Command class, + * or as declarations in the service model file in codegen/sdk-codegen/aws-models. + * + * If no exceptions are enumerated by a particular Command operation, + * the base exception for the service should be used. Each client exports + * a base ServiceException prefixed with the service name. + */ +export type SdkError = Error & Partial & Partial & { + $metadata?: Partial["$metadata"] & { + /** + * If present, will have value of true and indicates that the error resulted in a + * correction of the clock skew, a.k.a. config.systemClockOffset. + * This is specific to AWS SDK and sigv4. + */ + readonly clockSkewCorrected?: true; + }; + cause?: Error; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/signature.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/signature.d.ts new file mode 100644 index 0000000..db0039d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/signature.d.ts @@ -0,0 +1,155 @@ +import { Message } from "./eventStream"; +import { HttpRequest } from "./http"; +/** + * @public + * + * A `Date` object, a unix (epoch) timestamp in seconds, or a string that can be + * understood by the JavaScript `Date` constructor. + */ +export type DateInput = number | string | Date; +/** + * @public + */ +export interface SigningArguments { + /** + * The date and time to be used as signature metadata. This value should be + * a Date object, a unix (epoch) timestamp, or a string that can be + * understood by the JavaScript `Date` constructor.If not supplied, the + * value returned by `new Date()` will be used. + */ + signingDate?: DateInput; + /** + * The service signing name. It will override the service name of the signer + * in current invocation + */ + signingService?: string; + /** + * The region name to sign the request. It will override the signing region of the + * signer in current invocation + */ + signingRegion?: string; +} +/** + * @public + */ +export interface RequestSigningArguments extends SigningArguments { + /** + * A set of strings whose members represents headers that cannot be signed. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unsignableHeaders set. + */ + unsignableHeaders?: Set; + /** + * A set of strings whose members represents headers that should be signed. + * Any values passed here will override those provided via unsignableHeaders, + * allowing them to be signed. + * + * All headers in the provided request will have their names converted to + * lower case before signing. + */ + signableHeaders?: Set; +} +/** + * @public + */ +export interface RequestPresigningArguments extends RequestSigningArguments { + /** + * The number of seconds before the presigned URL expires + */ + expiresIn?: number; + /** + * A set of strings whose representing headers that should not be hoisted + * to presigned request's query string. If not supplied, the presigner + * moves all the AWS-specific headers (starting with `x-amz-`) to the request + * query string. If supplied, these headers remain in the presigned request's + * header. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unhoistableHeaders set. + */ + unhoistableHeaders?: Set; + /** + * This overrides any headers with the same name(s) set by unhoistableHeaders. + * These headers will be hoisted into the query string and signed. + */ + hoistableHeaders?: Set; +} +/** + * @public + */ +export interface EventSigningArguments extends SigningArguments { + priorSignature: string; +} +/** + * @public + */ +export interface RequestPresigner { + /** + * Signs a request for future use. + * + * The request will be valid until either the provided `expiration` time has + * passed or the underlying credentials have expired. + * + * @param requestToSign - The request that should be signed. + * @param options - Additional signing options. + */ + presign(requestToSign: HttpRequest, options?: RequestPresigningArguments): Promise; +} +/** + * @public + * + * An object that signs request objects with AWS credentials using one of the + * AWS authentication protocols. + */ +export interface RequestSigner { + /** + * Sign the provided request for immediate dispatch. + */ + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; +} +/** + * @public + */ +export interface StringSigner { + /** + * Sign the provided `stringToSign` for use outside of the context of + * request signing. Typical uses include signed policy generation. + */ + sign(stringToSign: string, options?: SigningArguments): Promise; +} +/** + * @public + */ +export interface FormattedEvent { + headers: Uint8Array; + payload: Uint8Array; +} +/** + * @public + */ +export interface EventSigner { + /** + * Sign the individual event of the event stream. + */ + sign(event: FormattedEvent, options: EventSigningArguments): Promise; +} +/** + * @public + */ +export interface SignableMessage { + message: Message; + priorSignature: string; +} +/** + * @public + */ +export interface SignedMessage { + message: Message; + signature: string; +} +/** + * @public + */ +export interface MessageSigner { + signMessage(message: SignableMessage, args: SigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/stream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/stream.d.ts new file mode 100644 index 0000000..f305dd9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/stream.d.ts @@ -0,0 +1,22 @@ +import { ChecksumConstructor } from "./checksum"; +import { HashConstructor, StreamHasher } from "./crypto"; +import { BodyLengthCalculator, Encoder } from "./util"; +/** + * @public + */ +export interface GetAwsChunkedEncodingStreamOptions { + base64Encoder?: Encoder; + bodyLengthChecker: BodyLengthCalculator; + checksumAlgorithmFn?: ChecksumConstructor | HashConstructor; + checksumLocationName?: string; + streamHasher?: StreamHasher; +} +/** + * @public + * + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * It optionally adds checksum if options are provided. + */ +export interface GetAwsChunkedEncodingStream { + (readableStream: StreamType, options: GetAwsChunkedEncodingStreamOptions): StreamType; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts new file mode 100644 index 0000000..92c52da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts @@ -0,0 +1,33 @@ +/// +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This is the union representing the modeled blob type with streaming trait + * in a generic format that does not relate to HTTP input or output payloads. + * + * Note: the non-streaming blob type is represented by Uint8Array, but because + * the streaming blob type is always in the request/response paylod, it has + * historically been handled with different types. + * + * @see https://smithy.io/2.0/spec/simple-types.html#blob + * + * For compatibility with its historical representation, it must contain at least + * Readble (Node.js), Blob (browser), and ReadableStream (browser). + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + */ +export type StreamingBlobTypes = NodeJsRuntimeStreamingBlobTypes | BrowserRuntimeStreamingBlobTypes; +/** + * @public + * + * Node.js streaming blob type. + */ +export type NodeJsRuntimeStreamingBlobTypes = Readable; +/** + * @public + * + * Browser streaming blob types. + */ +export type BrowserRuntimeStreamingBlobTypes = ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts new file mode 100644 index 0000000..9bcc164 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts @@ -0,0 +1,63 @@ +/// +/// +/// +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This union represents a superset of the compatible types you + * can use for streaming payload inputs. + * + * FAQ: + * Why does the type union mix mutually exclusive runtime types, namely + * Node.js and browser types? + * + * There are several reasons: + * 1. For backwards compatibility. + * 2. As a convenient compromise solution so that users in either environment may use the types + * without customization. + * 3. The SDK does not have static type information about the exact implementation + * of the HTTP RequestHandler being used in your client(s) (e.g. fetch, XHR, node:http, or node:http2), + * given that it is chosen at runtime. There are multiple possible request handlers + * in both the Node.js and browser runtime environments. + * + * Rather than restricting the type to a known common format (Uint8Array, for example) + * which doesn't include a universal streaming format in the currently supported Node.js versions, + * the type declaration is widened to multiple possible formats. + * It is up to the user to ultimately select a compatible format with the + * runtime and HTTP handler implementation they are using. + * + * Usage: + * The typical solution we expect users to have is to manually narrow the + * type when needed, picking the appropriate one out of the union according to the + * runtime environment and specific request handler. + * There is also the type utility "NodeJsClient", "BrowserClient" and more + * exported from this package. These can be applied at the client level + * to pre-narrow these streaming payload blobs. For usage see the readme.md + * in the root of the \@smithy/types NPM package. + */ +export type StreamingBlobPayloadInputTypes = NodeJsRuntimeStreamingBlobPayloadInputTypes | BrowserRuntimeStreamingBlobPayloadInputTypes; +/** + * @public + * + * Streaming payload input types in the Node.js environment. + * These are derived from the types compatible with the request body used by node:http. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may be intended in some cases, + * but the expected types are primarily string, Uint8Array, and Buffer. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type NodeJsRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | Buffer | Readable; +/** + * @public + * + * Streaming payload input types in the browser environment. + * These are derived from the types compatible with fetch's Request.body. + */ +export type BrowserRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts new file mode 100644 index 0000000..b64a878 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts @@ -0,0 +1,53 @@ +/// +/// +import type { IncomingMessage } from "http"; +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +import type { SdkStream } from "../serde"; +/** + * @public + * + * This union represents a superset of the types you may receive + * in streaming payload outputs. + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + * + * To highlight the upstream docs about the SdkStream mixin: + * + * The interface contains mix-in (via Object.assign) methods to transform the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * + * The available methods are described on the SdkStream type via SdkStreamMixin. + */ +export type StreamingBlobPayloadOutputTypes = NodeJsRuntimeStreamingBlobPayloadOutputTypes | BrowserRuntimeStreamingBlobPayloadOutputTypes; +/** + * @public + * + * Streaming payload output types in the Node.js environment. + * + * This is by default the IncomingMessage type from node:http responses when + * using the default node-http-handler in Node.js environments. + * + * It can be other Readable types like node:http2's ClientHttp2Stream + * such as when using the node-http2-handler. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type NodeJsRuntimeStreamingBlobPayloadOutputTypes = SdkStream; +/** + * @public + * + * Streaming payload output types in the browser environment. + * + * This is by default fetch's Response.body type (ReadableStream) when using + * the default fetch-http-handler in browser-like environments. + * + * It may be a Blob, such as when using the XMLHttpRequest handler + * and receiving an arraybuffer response body. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type BrowserRuntimeStreamingBlobPayloadOutputTypes = SdkStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transfer.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transfer.d.ts new file mode 100644 index 0000000..462ee23 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transfer.d.ts @@ -0,0 +1,41 @@ +/** + * @public + */ +export type RequestHandlerOutput = { + response: ResponseType; +}; +/** + * @public + */ +export interface RequestHandler { + /** + * metadata contains information of a handler. For example + * 'h2' refers this handler is for handling HTTP/2 requests, + * whereas 'h1' refers handling HTTP1 requests + */ + metadata?: RequestHandlerMetadata; + destroy?: () => void; + handle: (request: RequestType, handlerOptions?: HandlerOptions) => Promise>; +} +/** + * @public + */ +export interface RequestHandlerMetadata { + handlerProtocol: RequestHandlerProtocol | string; +} +/** + * @public + * Values from ALPN Protocol IDs. + * @see https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + */ +export declare enum RequestHandlerProtocol { + HTTP_0_9 = "http/0.9", + HTTP_1_0 = "http/1.0", + TDS_8_0 = "tds/8.0" +} +/** + * @public + */ +export interface RequestContext { + destination: URL; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts new file mode 100644 index 0000000..f9424c4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts @@ -0,0 +1,26 @@ +import type { CommandIO } from "../command"; +import type { MetadataBearer } from "../response"; +import type { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import type { Transform } from "./type-transform"; +/** + * @internal + * + * Narrowed version of InvokeFunction used in Client::send. + */ +export interface NarrowedInvokeFunction { + (command: CommandIO, options?: HttpHandlerOptions): Promise>; + (command: CommandIO, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options?: HttpHandlerOptions, cb?: (err: unknown, data?: Transform) => void): Promise> | void; +} +/** + * @internal + * + * Narrowed version of InvokeMethod used in aggregated Client methods. + */ +export interface NarrowedInvokeMethod { + (input: InputType, options?: HttpHandlerOptions): Promise>; + (input: InputType, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options?: HttpHandlerOptions, cb?: (err: unknown, data?: OutputType) => void): Promise> | void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts new file mode 100644 index 0000000..243a40f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts @@ -0,0 +1,79 @@ +/// +/// +import type { IncomingMessage } from "http"; +import type { ClientHttp2Stream } from "http2"; +import type { InvokeMethod } from "../client"; +import type { GetOutputType } from "../command"; +import type { HttpHandlerOptions } from "../http"; +import type { SdkStream } from "../serde"; +import type { BrowserRuntimeStreamingBlobPayloadInputTypes, NodeJsRuntimeStreamingBlobPayloadInputTypes, StreamingBlobPayloadInputTypes } from "../streaming-payload/streaming-blob-payload-input-types"; +import type { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import type { NarrowedInvokeMethod } from "./client-method-transforms"; +import type { Transform } from "./type-transform"; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the NodeHttpHandler requestHandler, + * the default in Node.js when not using HTTP2. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as NodeJsClient; + * ``` + */ +export type NodeJsClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * Variant of NodeJsClient for node:http2. + */ +export type NodeJsHttp2Client = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the FetchHttpHandler requestHandler, + * which is the default in browser environments. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as BrowserClient; + * ``` + */ +export type BrowserClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Variant of BrowserClient for XMLHttpRequest. + */ +export type BrowserXhrClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * @deprecated use NarrowPayloadBlobTypes. + * + * Narrow a given Client's blob payload outputs to the given type T. + */ +export type NarrowPayloadBlobOutputType = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, T>>; +}; +/** + * @public + * + * Narrow a Client's blob payload input and output types to I and O. + */ +export type NarrowPayloadBlobTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod, FunctionOutputTypes> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, O>>; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/exact.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/exact.d.ts new file mode 100644 index 0000000..c8a15d8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/exact.d.ts @@ -0,0 +1,6 @@ +/** + * @internal + * + * Checks that A and B extend each other. + */ +export type Exact = [A] extends [B] ? ([B] extends [A] ? true : false) : false; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts new file mode 100644 index 0000000..a0ec72e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts @@ -0,0 +1,68 @@ +import type { InvokeMethod, InvokeMethodOptionalArgs } from "../client"; +import type { GetOutputType } from "../command"; +import type { DocumentType } from "../shapes"; +/** + * @public + * + * This type is intended as a type helper for generated clients. + * When initializing client, cast it to this type by passing + * the client constructor type as the type parameter. + * + * It will then recursively remove "undefined" as a union type from all + * input and output shapes' members. Note, this does not affect + * any member that is optional (?) such as outputs with no required members. + * + * @example + * ```ts + * const client = new Client({}) as AssertiveClient; + * ``` + */ +export type AssertiveClient = NarrowClientIOTypes; +/** + * @public + * + * This is similar to AssertiveClient but additionally changes all + * output types to (recursive) Required so as to bypass all output nullability guards. + */ +export type UncheckedClient = UncheckedClientOutputTypes; +/** + * @internal + * + * Excludes undefined recursively. + */ +export type NoUndefined = T extends Function ? T : T extends DocumentType ? T : [T] extends [object] ? { + [key in keyof T]: NoUndefined; +} : Exclude; +/** + * @internal + * + * Excludes undefined and optional recursively. + */ +export type RecursiveRequired = T extends Function ? T : T extends DocumentType ? T : [T] extends [object] ? { + [key in keyof T]-?: RecursiveRequired; +} : Exclude; +/** + * @internal + * + * Removes undefined from unions. + */ +type NarrowClientIOTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, NoUndefined> : [ClientType[key]] extends [InvokeMethod] ? InvokeMethod, NoUndefined> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>; +}; +/** + * @internal + * + * Removes undefined from unions and adds yolo output types. + */ +type UncheckedClientOutputTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, RecursiveRequired> : [ClientType[key]] extends [InvokeMethod] ? InvokeMethod, RecursiveRequired> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>>; +}; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts new file mode 100644 index 0000000..90373fb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts @@ -0,0 +1,34 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = ConditionalRecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [T] extends [FromType] ? ([FromType] extends [T] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [T[key]] extends [FromType] ? [FromType] extends [T[key]] ? ToType : ConditionalRecursiveTransformExact : ConditionalRecursiveTransformExact; +} : TransformExact; +/** + * @internal + * + * Same as RecursiveTransformExact but does not assign to an object + * unless there is a matching transformed member. + */ +type ConditionalRecursiveTransformExact = [T] extends [ + RecursiveTransformExact +] ? [RecursiveTransformExact] extends [T] ? T : RecursiveTransformExact : RecursiveTransformExact; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts new file mode 100644 index 0000000..26c068c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts @@ -0,0 +1,7 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +/** + * @public + */ +export interface AbortHandler { + (this: AbortSignal | DeprecatedAbortSignal, ev: any): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts new file mode 100644 index 0000000..00741af --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts @@ -0,0 +1,50 @@ +import { AbortHandler } from "./abort-handler"; +/** + * @public + */ +export { AbortHandler }; +/** + * @public + * @deprecated use platform (global) type for AbortSignal. + * + * Holders of an AbortSignal object may query if the associated operation has + * been aborted and register an onabort handler. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export interface AbortSignal { + /** + * Whether the action represented by this signal has been cancelled. + */ + readonly aborted: boolean; + /** + * A function to be invoked when the action represented by this signal has + * been cancelled. + */ + onabort: AbortHandler | Function | null; +} +/** + * @public + * @deprecated use platform (global) type for AbortController. + * + * The AWS SDK uses a Controller/Signal model to allow for cooperative + * cancellation of asynchronous operations. When initiating such an operation, + * the caller can create an AbortController and then provide linked signal to + * subtasks. This allows a single source to communicate to multiple consumers + * that an action has been aborted without dictating how that cancellation + * should be handled. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortController + */ +export interface AbortController { + /** + * An object that reports whether the action associated with this + * `AbortController` has been cancelled. + */ + readonly signal: AbortSignal; + /** + * Declares the operation associated with this AbortController to have been + * cancelled. + */ + abort(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts new file mode 100644 index 0000000..380c8fc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum HttpApiKeyAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts new file mode 100644 index 0000000..e0d939e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts @@ -0,0 +1,49 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HandlerExecutionContext } from "../middleware"; +import { HttpSigner } from "./HttpSigner"; +import { IdentityProviderConfig } from "./IdentityProviderConfig"; +/** + * ID for {@link HttpAuthScheme} + * @internal + */ +export type HttpAuthSchemeId = string; +/** + * Interface that defines an HttpAuthScheme + * @internal + */ +export interface HttpAuthScheme { + /** + * ID for an HttpAuthScheme, typically the absolute shape ID of a Smithy auth trait. + */ + schemeId: HttpAuthSchemeId; + /** + * Gets the IdentityProvider corresponding to an HttpAuthScheme. + */ + identityProvider(config: IdentityProviderConfig): IdentityProvider | undefined; + /** + * HttpSigner corresponding to an HttpAuthScheme. + */ + signer: HttpSigner; +} +/** + * Interface that defines the identity and signing properties when selecting + * an HttpAuthScheme. + * @internal + */ +export interface HttpAuthOption { + schemeId: HttpAuthSchemeId; + identityProperties?: Record; + signingProperties?: Record; + propertiesExtractor?: (config: TConfig, context: TContext) => { + identityProperties?: Record; + signingProperties?: Record; + }; +} +/** + * @internal + */ +export interface SelectedHttpAuthScheme { + httpAuthOption: HttpAuthOption; + identity: Identity; + signer: HttpSigner; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..d417aaf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts @@ -0,0 +1,20 @@ +import { HandlerExecutionContext } from "../middleware"; +import { HttpAuthOption } from "./HttpAuthScheme"; +/** + * @internal + */ +export interface HttpAuthSchemeParameters { + operation?: string; +} +/** + * @internal + */ +export interface HttpAuthSchemeProvider { + (authParameters: TParameters): HttpAuthOption[]; +} +/** + * @internal + */ +export interface HttpAuthSchemeParametersProvider { + (config: TConfig, context: TContext, input: TInput): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts new file mode 100644 index 0000000..7abcf84 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts @@ -0,0 +1,41 @@ +import { HttpRequest, HttpResponse } from "../http"; +import { Identity } from "../identity/identity"; +/** + * @internal + */ +export interface ErrorHandler { + (signingProperties: Record): (error: E) => never; +} +/** + * @internal + */ +export interface SuccessHandler { + (httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * Interface to sign identity and signing properties. + * @internal + */ +export interface HttpSigner { + /** + * Signs an HttpRequest with an identity and signing properties. + * @param httpRequest request to sign + * @param identity identity to sing the request with + * @param signingProperties property bag for signing + * @returns signed request in a promise + */ + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware throws an error. + * The error handler is expected to throw the error it receives, so the return type of the error handler is `never`. + * @internal + */ + errorHandler?: ErrorHandler; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware succeeds. + * @internal + */ + successHandler?: SuccessHandler; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts new file mode 100644 index 0000000..6a50f65 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HttpAuthSchemeId } from "./HttpAuthScheme"; +/** + * Interface to get an IdentityProvider for a specified HttpAuthScheme + * @internal + */ +export interface IdentityProviderConfig { + /** + * Get the IdentityProvider for a specified HttpAuthScheme. + * @param schemeId schemeId of the HttpAuthScheme + * @returns IdentityProvider or undefined if HttpAuthScheme is not found + */ + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts new file mode 100644 index 0000000..8241fe3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts @@ -0,0 +1,57 @@ +/** + * @internal + * + * Authentication schemes represent a way that the service will authenticate the customer’s identity. + */ +export interface AuthScheme { + /** + * @example "sigv4a" or "sigv4" + */ + name: "sigv4" | "sigv4a" | string; + /** + * @example "s3" + */ + signingName: string; + /** + * @example "us-east-1" + */ + signingRegion: string; + /** + * @example ["*"] + * @example ["us-west-2", "us-east-1"] + */ + signingRegionSet?: string[]; + /** + * @deprecated this field was renamed to signingRegion. + */ + signingScope?: never; + properties: Record; +} +/** + * @internal + * @deprecated + */ +export interface HttpAuthDefinition { + /** + * Defines the location of where the Auth is serialized. + */ + in: HttpAuthLocation; + /** + * Defines the name of the HTTP header or query string parameter + * that contains the Auth. + */ + name: string; + /** + * Defines the security scheme to use on the `Authorization` header value. + * This can only be set if the "in" property is set to {@link HttpAuthLocation.HEADER}. + */ + scheme?: string; +} +/** + * @internal + * @deprecated + */ +export declare enum HttpAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts new file mode 100644 index 0000000..fbb845d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts new file mode 100644 index 0000000..465c9a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts @@ -0,0 +1,41 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * A union of types that can be used as inputs for the service model + * "blob" type when it represents the request's entire payload or body. + * + * For example, in Lambda::invoke, the payload is modeled as a blob type + * and this union applies to it. + * In contrast, in Lambda::createFunction the Zip file option is a blob type, + * but is not the (entire) payload and this union does not apply. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may work in some cases, + * but the expected types are primarily string and Uint8Array. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type BlobPayloadInputTypes = string | ArrayBuffer | ArrayBufferView | Uint8Array | NodeJsRuntimeBlobTypes | BrowserRuntimeBlobTypes; +/** + * @public + * + * Additional blob types for the Node.js environment. + */ +export type NodeJsRuntimeBlobTypes = Readable | Buffer; +/** + * @public + * + * Additional blob types for the browser environment. + */ +export type BrowserRuntimeBlobTypes = BlobOptionalType | ReadableStreamOptionalType; +/** + * @internal + * @deprecated renamed to BlobPayloadInputTypes. + */ +export type BlobTypes = BlobPayloadInputTypes; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts new file mode 100644 index 0000000..dbfff0c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts @@ -0,0 +1,63 @@ +import { SourceData } from "./crypto"; +/** + * @public + * + * An object that provides a checksum of data provided in chunks to `update`. + * The checksum may be performed incrementally as chunks are received or all + * at once when the checksum is finalized, depending on the underlying + * implementation. + * + * It's recommended to compute checksum incrementally to avoid reading the + * entire payload in memory. + * + * A class that implements this interface may accept an optional secret key in its + * constructor while computing checksum value, when using HMAC. If provided, + * this secret key would be used when computing checksum. + */ +export interface Checksum { + /** + * Constant length of the digest created by the algorithm in bytes. + */ + digestLength?: number; + /** + * Creates a new checksum object that contains a deep copy of the internal + * state of the current `Checksum` object. + */ + copy?(): Checksum; + /** + * Returns the digest of all of the data passed. + */ + digest(): Promise; + /** + * Allows marking a checksum for checksums that support the ability + * to mark and reset. + * + * @param readLimit - The maximum limit of bytes that can be read + * before the mark position becomes invalid. + */ + mark?(readLimit: number): void; + /** + * Resets the checksum to its initial value. + */ + reset(): void; + /** + * Adds a chunk of data for which checksum needs to be computed. + * This can be called many times with new data as it is streamed. + * + * Implementations may override this method which passes second param + * which makes Checksum object stateless. + * + * @param chunk - The buffer to update checksum with. + */ + update(chunk: Uint8Array): void; +} +/** + * @public + * + * A constructor for a Checksum that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + */ +export interface ChecksumConstructor { + new (secret?: SourceData): Checksum; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..1d05c04 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts @@ -0,0 +1,57 @@ +import { Command } from "./command"; +import { MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +import { OptionalParameter } from "./util"; +/** + * @public + * + * A type which checks if the client configuration is optional. + * If all entries of the client configuration are optional, it allows client creation without passing any config. + */ +export type CheckOptionalClientConfig = OptionalParameter; +/** + * @public + * + * function definition for different overrides of client's 'send' function. + */ +export interface InvokeFunction { + (command: Command, options?: any): Promise; + (command: Command, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options: any, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods. + */ +export interface InvokeMethod { + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods when argument is optional. + */ +export interface InvokeMethodOptionalArgs { + (): Promise; + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * A general interface for service clients, idempotent to browser or node clients + * This type corresponds to SmithyClient(https://github.com/aws/aws-sdk-js-v3/blob/main/packages/smithy-client/src/client.ts). + * It's provided for using without importing the SmithyClient class. + * @internal + */ +export interface Client { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + send: InvokeFunction; + destroy: () => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..fb7c5b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts @@ -0,0 +1,23 @@ +import { Handler, MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + */ +export interface Command extends CommandIO { + readonly input: InputType; + readonly middlewareStack: MiddlewareStack; + resolveMiddleware(stack: MiddlewareStack, configuration: ResolvedConfiguration, options: any): Handler; +} +/** + * @internal + * + * This is a subset of the Command type used only to detect the i/o types. + */ +export interface CommandIO { + readonly input: InputType; + resolveMiddleware(stack: any, configuration: any, options: any): Handler; +} +/** + * @internal + */ +export type GetOutputType = Command extends CommandIO ? O : never; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts new file mode 100644 index 0000000..09ed18b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts @@ -0,0 +1,10 @@ +/** + * @public + */ +export interface ConnectConfiguration { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts new file mode 100644 index 0000000..eaacf8b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts new file mode 100644 index 0000000..7245028 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts @@ -0,0 +1,34 @@ +import { RequestContext } from "../transfer"; +import { ConnectConfiguration } from "./config"; +/** + * @public + */ +export interface ConnectionManagerConfiguration { + /** + * Maximum number of allowed concurrent requests per connection. + */ + maxConcurrency?: number; + /** + * Disables concurrent requests per connection. + */ + disableConcurrency?: boolean; +} +/** + * @public + */ +export interface ConnectionManager { + /** + * Retrieves a connection from the connection pool if available, + * otherwise establish a new connection + */ + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): T; + /** + * Releases the connection back to the pool making it potentially + * re-usable by other requests. + */ + release(requestContext: RequestContext, connection: T): void; + /** + * Destroys the connection manager. All connections will be closed. + */ + destroy(): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts new file mode 100644 index 0000000..161094f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts @@ -0,0 +1,32 @@ +/** + * @public + */ +export interface ConnectionPool { + /** + * Retrieve the first connection in the pool + */ + poll(): T | void; + /** + * Release the connection back to the pool making it potentially + * re-usable by other requests. + */ + offerLast(connection: T): void; + /** + * Removes the connection from the pool, and destroys it. + */ + destroy(connection: T): void; + /** + * Implements the iterable protocol and allows arrays to be consumed + * by most syntaxes expecting iterables, such as the spread syntax + * and for...of loops + */ + [Symbol.iterator](): Iterator; +} +/** + * Unused. + * @internal + * @deprecated + */ +export interface CacheKey { + destination: string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts new file mode 100644 index 0000000..467ec86 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts @@ -0,0 +1,60 @@ +/** + * @public + */ +export type SourceData = string | ArrayBuffer | ArrayBufferView; +/** + * @public + * + * An object that provides a hash of data provided in chunks to `update`. The + * hash may be performed incrementally as chunks are received or all at once + * when the hash is finalized, depending on the underlying implementation. + * + * @deprecated use {@link Checksum} + */ +export interface Hash { + /** + * Adds a chunk of data to the hash. If a buffer is provided, the `encoding` + * argument will be ignored. If a string is provided without a specified + * encoding, implementations must assume UTF-8 encoding. + * + * Not all encodings are supported on all platforms, though all must support + * UTF-8. + */ + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + /** + * Finalizes the hash and provides a promise that will be fulfilled with the + * raw bytes of the calculated hash. + */ + digest(): Promise; +} +/** + * @public + * + * A constructor for a hash that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + * + * @deprecated use {@link ChecksumConstructor} + */ +export interface HashConstructor { + new (secret?: SourceData): Hash; +} +/** + * @public + * + * A function that calculates the hash of a data stream. Determining the hash + * will consume the stream, so only replayable streams should be provided to an + * implementation of this interface. + */ +export interface StreamHasher { + (hashCtor: HashConstructor, stream: StreamType): Promise; +} +/** + * @public + * + * A function that returns a promise fulfilled with bytes from a + * cryptographically secure pseudorandom number generator. + */ +export interface randomValues { + (byteLength: number): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..547303f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts @@ -0,0 +1,41 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [ + T +] extends [ + FromType +] ? ([ + FromType +] extends [ + T +] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [ + T[key] + ] extends [ + FromType + ] ? [ + FromType + ] extends [ + T[key] + ] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts new file mode 100644 index 0000000..4714bf9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts @@ -0,0 +1,31 @@ +import { Message } from "./eventStream"; +/** + * @public + */ +export interface MessageEncoder { + encode(message: Message): Uint8Array; +} +/** + * @public + */ +export interface MessageDecoder { + decode(message: ArrayBufferView): Message; + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; +} +/** + * @public + */ +export interface AvailableMessage { + getMessage(): Message | undefined; + isEndOfStream(): boolean; +} +/** + * @public + */ +export interface AvailableMessages { + getMessages(): Message[]; + isEndOfStream(): boolean; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts new file mode 100644 index 0000000..a1221ee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts @@ -0,0 +1,77 @@ +import { AuthScheme } from "./auth/auth"; +/** + * @public + */ +export interface EndpointPartition { + name: string; + dnsSuffix: string; + dualStackDnsSuffix: string; + supportsFIPS: boolean; + supportsDualStack: boolean; +} +/** + * @public + */ +export interface EndpointARN { + partition: string; + service: string; + region: string; + accountId: string; + resourceId: Array; +} +/** + * @public + */ +export declare enum EndpointURLScheme { + HTTP = "http", + HTTPS = "https" +} +/** + * @public + */ +export interface EndpointURL { + /** + * The URL scheme such as http or https. + */ + scheme: EndpointURLScheme; + /** + * The authority is the host and optional port component of the URL. + */ + authority: string; + /** + * The parsed path segment of the URL. + * This value is as-is as provided by the user. + */ + path: string; + /** + * The parsed path segment of the URL. + * This value is guranteed to start and end with a "/". + */ + normalizedPath: string; + /** + * A boolean indicating whether the authority is an IP address. + */ + isIp: boolean; +} +/** + * @public + */ +export type EndpointObjectProperty = string | boolean | { + [key: string]: EndpointObjectProperty; +} | EndpointObjectProperty[]; +/** + * @public + */ +export interface EndpointV2 { + url: URL; + properties?: { + authSchemes?: AuthScheme[]; + } & Record; + headers?: Record; +} +/** + * @public + */ +export type EndpointParameters = { + [name: string]: undefined | boolean | string | string[]; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts new file mode 100644 index 0000000..2c8026b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts @@ -0,0 +1,27 @@ +import { EndpointObjectProperty } from "../endpoint"; +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type EndpointObjectProperties = Record; +/** + * @public + */ +export type EndpointObjectHeaders = Record; +/** + * @public + */ +export type EndpointObject = { + url: Expression; + properties?: EndpointObjectProperties; + headers?: EndpointObjectHeaders; +}; +/** + * @public + */ +export type EndpointRuleObject = { + type: "endpoint"; + conditions?: ConditionObject[]; + endpoint: EndpointObject; + documentation?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts new file mode 100644 index 0000000..98fc7a8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts @@ -0,0 +1,10 @@ +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type ErrorRuleObject = { + type: "error"; + conditions?: ConditionObject[]; + error: Expression; + documentation?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts new file mode 100644 index 0000000..e749fba --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts @@ -0,0 +1,28 @@ +import { RuleSetRules } from "./TreeRuleObject"; +/** + * @public + */ +export type DeprecatedObject = { + message?: string; + since?: string; +}; +/** + * @public + */ +export type ParameterObject = { + type: "String" | "string" | "Boolean" | "boolean"; + default?: string | boolean; + required?: boolean; + documentation?: string; + builtIn?: string; + deprecated?: DeprecatedObject; +}; +/** + * @public + */ +export type RuleSetObject = { + version: string; + serviceId?: string; + parameters: Record; + rules: RuleSetRules; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts new file mode 100644 index 0000000..c203eed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts @@ -0,0 +1,16 @@ +import { EndpointRuleObject } from "./EndpointRuleObject"; +import { ErrorRuleObject } from "./ErrorRuleObject"; +import { ConditionObject } from "./shared"; +/** + * @public + */ +export type RuleSetRules = Array; +/** + * @public + */ +export type TreeRuleObject = { + type: "tree"; + conditions?: ConditionObject[]; + rules: RuleSetRules; + documentation?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts new file mode 100644 index 0000000..8a29789 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts new file mode 100644 index 0000000..1c5d4b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts @@ -0,0 +1,55 @@ +import { Logger } from "../logger"; +/** + * @public + */ +export type ReferenceObject = { + ref: string; +}; +/** + * @public + */ +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +/** + * @public + */ +export type FunctionArgv = Array; +/** + * @public + */ +export type FunctionReturn = string | boolean | number | { + [key: string]: FunctionReturn; +}; +/** + * @public + */ +export type ConditionObject = FunctionObject & { + assign?: string; +}; +/** + * @public + */ +export type Expression = string | ReferenceObject | FunctionObject; +/** + * @public + */ +export type EndpointParams = Record; +/** + * @public + */ +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +/** + * @public + */ +export type ReferenceRecord = Record; +/** + * @public + */ +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts new file mode 100644 index 0000000..49c37c7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts @@ -0,0 +1,137 @@ +import { HttpRequest } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, HandlerExecutionContext } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +/** + * @public + */ +export type MessageHeaders = Record; +/** + * @public + */ +export type HeaderValue = { + type: K; + value: V; +}; +/** + * @public + */ +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +/** + * @public + */ +export type ByteHeaderValue = HeaderValue<"byte", number>; +/** + * @public + */ +export type ShortHeaderValue = HeaderValue<"short", number>; +/** + * @public + */ +export type IntegerHeaderValue = HeaderValue<"integer", number>; +/** + * @public + */ +export type LongHeaderValue = HeaderValue<"long", Int64>; +/** + * @public + */ +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +/** + * @public + */ +export type StringHeaderValue = HeaderValue<"string", string>; +/** + * @public + */ +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +/** + * @public + */ +export type UuidHeaderValue = HeaderValue<"uuid", string>; +/** + * @public + */ +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +/** + * @public + */ +export interface Int64 { + readonly bytes: Uint8Array; + valueOf: () => number; + toString: () => string; +} +/** + * @public + * + * Util functions for serializing or deserializing event stream + */ +export interface EventStreamSerdeContext { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @public + * + * A function which deserializes binary event stream message into modeled shape. + */ +export interface EventStreamMarshallerDeserFn { + (body: StreamType, deserializer: (input: Record) => Promise): AsyncIterable; +} +/** + * @public + * + * A function that serializes modeled shape into binary stream message. + */ +export interface EventStreamMarshallerSerFn { + (input: AsyncIterable, serializer: (event: T) => Message): StreamType; +} +/** + * @public + * + * An interface which provides functions for serializing and deserializing binary event stream + * to/from corresponsing modeled shape. + */ +export interface EventStreamMarshaller { + deserialize: EventStreamMarshallerDeserFn; + serialize: EventStreamMarshallerSerFn; +} +/** + * @public + */ +export interface EventStreamRequestSigner { + sign(request: HttpRequest): Promise; +} +/** + * @public + */ +export interface EventStreamPayloadHandler { + handle: (next: FinalizeHandler, args: FinalizeHandlerArguments, context?: HandlerExecutionContext) => Promise>; +} +/** + * @public + */ +export interface EventStreamPayloadHandlerProvider { + (options: any): EventStreamPayloadHandler; +} +/** + * @public + */ +export interface EventStreamSerdeProvider { + (options: any): EventStreamMarshaller; +} +/** + * @public + */ +export interface EventStreamSignerProvider { + (options: any): EventStreamRequestSigner; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts new file mode 100644 index 0000000..8ebbf00 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts @@ -0,0 +1,58 @@ +import { ChecksumConstructor } from "../checksum"; +import { HashConstructor } from "../crypto"; +/** + * @internal + */ +export declare enum AlgorithmId { + MD5 = "md5", + CRC32 = "crc32", + CRC32C = "crc32c", + SHA1 = "sha1", + SHA256 = "sha256" +} +/** + * @internal + */ +export interface ChecksumAlgorithm { + algorithmId(): AlgorithmId; + checksumConstructor(): ChecksumConstructor | HashConstructor; +} +/** + * @deprecated unused. + * @internal + */ +type ChecksumConfigurationLegacy = { + [other in string | number]: any; +}; +/** + * @internal + */ +export interface ChecksumConfiguration extends ChecksumConfigurationLegacy { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +} +/** + * @deprecated will be removed for implicit type. + * @internal + */ +type GetChecksumConfigurationType = (runtimeConfig: Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; +}>) => ChecksumConfiguration; +/** + * @internal + * @deprecated will be moved to smithy-client. + */ +export declare const getChecksumConfiguration: GetChecksumConfigurationType; +/** + * @internal + * @deprecated will be removed for implicit type. + */ +type ResolveChecksumRuntimeConfigType = (clientConfig: ChecksumConfiguration) => any; +/** + * @internal + * + * @deprecated will be moved to smithy-client. + */ +export declare const resolveChecksumRuntimeConfig: ResolveChecksumRuntimeConfigType; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts new file mode 100644 index 0000000..40458b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts @@ -0,0 +1,33 @@ +import { ChecksumConfiguration } from "./checksum"; +/** + * @deprecated will be replaced by DefaultExtensionConfiguration. + * @internal + * + * Default client configuration consisting various configurations for modifying a service client + */ +export interface DefaultClientConfiguration extends ChecksumConfiguration { +} +/** + * @deprecated will be removed for implicit type. + */ +type GetDefaultConfigurationType = (runtimeConfig: any) => DefaultClientConfiguration; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve default client configuration from runtime config + * + */ +export declare const getDefaultClientConfiguration: GetDefaultConfigurationType; +/** + * @deprecated will be removed for implicit type. + */ +type ResolveDefaultRuntimeConfigType = (clientConfig: DefaultClientConfiguration) => any; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve runtime config from default client configuration + */ +export declare const resolveDefaultRuntimeConfig: ResolveDefaultRuntimeConfigType; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..55f5137 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConfiguration } from "./checksum"; +import { RetryStrategyConfiguration } from "./retry"; +/** + * @internal + * + * Default extension configuration consisting various configurations for modifying a service client + */ +export interface DefaultExtensionConfiguration extends ChecksumConfiguration, RetryStrategyConfiguration { +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..55edb16 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,4 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration } from "./checksum"; +export { RetryStrategyConfiguration } from "./retry"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts new file mode 100644 index 0000000..3471d08 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { RetryStrategyV2 } from "../retry"; +import { Provider, RetryStrategy } from "../util"; +/** + * A configuration interface with methods called by runtime extension + * @internal + */ +export interface RetryStrategyConfiguration { + /** + * Set retry strategy used for all http requests + * @param retryStrategy + */ + setRetryStrategy(retryStrategy: Provider): void; + /** + * Get retry strategy used for all http requests + * @param retryStrategy + */ + retryStrategy(): Provider; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts new file mode 100644 index 0000000..b709d7f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts @@ -0,0 +1,35 @@ +import { Exact } from "../transform/exact"; +/** + * @public + * + * A checked type that resolves to Blob if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing Blob + * excessively. + */ +export type BlobOptionalType = BlobDefined extends true ? Blob : Unavailable; +/** + * @public + * + * A checked type that resolves to ReadableStream if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing ReadableStream + * excessively. + */ +export type ReadableStreamOptionalType = ReadableStreamDefined extends true ? ReadableStream : Unavailable; +/** + * @public + * + * Indicates a type is unavailable if it resolves to this. + */ +export type Unavailable = never; +/** + * @internal + * + * Whether the global types define more than a stub for ReadableStream. + */ +export type ReadableStreamDefined = Exact extends true ? false : true; +/** + * @internal + * + * Whether the global types define more than a stub for Blob. + */ +export type BlobDefined = Exact extends true ? false : true; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts new file mode 100644 index 0000000..1a2c157 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export type SmithyFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + SIGV4A_SIGNING: "S"; + CREDENTIALS_CODE: "e"; +}>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts new file mode 100644 index 0000000..1e47e4e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts @@ -0,0 +1,112 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +import { URI } from "./uri"; +/** + * @public + * + * @deprecated use {@link EndpointV2} from `@smithy/types`. + */ +export interface Endpoint { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; +} +/** + * @public + * + * Interface an HTTP request class. Contains + * addressing information in addition to standard message properties. + */ +export interface HttpRequest extends HttpMessage, URI { + method: string; +} +/** + * @public + * + * Represents an HTTP message as received in reply to a request. Contains a + * numeric status code in addition to standard message properties. + */ +export interface HttpResponse extends HttpMessage { + statusCode: number; + reason?: string; +} +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. body: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * A mapping of query parameter names to strings or arrays of strings, with the + * second being used when a parameter contains a list of values. Value can be set + * to null when query is not in key-value pairs shape + */ +export type QueryParameterBag = Record | null>; +/** + * @public + */ +export type FieldOptions = { + name: string; + kind?: FieldPosition; + values?: string[]; +}; +/** + * @public + */ +export declare enum FieldPosition { + HEADER = 0, + TRAILER = 1 +} +/** + * @public + * + * A mapping of header names to string values. Multiple values for the same + * header should be represented as a single string with values separated by + * `, `. + * + * Keys should be considered case insensitive, even if this is not enforced by a + * particular implementation. For example, given the following HeaderBag, where + * keys differ only in case: + * + * ```json + * { + * 'x-request-date': '2000-01-01T00:00:00Z', + * 'X-Request-Date': '2001-01-01T00:00:00Z' + * } + * ``` + * + * The SDK may at any point during processing remove one of the object + * properties in favor of the other. The headers may or may not be combined, and + * the SDK will not deterministically select which header candidate to use. + */ +export type HeaderBag = Record; +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. bode: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * Represents the options that may be passed to an Http Handler. + */ +export interface HttpHandlerOptions { + abortSignal?: AbortSignal | DeprecatedAbortSignal; + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts new file mode 100644 index 0000000..0ee18e4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts @@ -0,0 +1,122 @@ +/// +import { Agent as hAgent, AgentOptions as hAgentOptions } from "http"; +import { Agent as hsAgent, AgentOptions as hsAgentOptions } from "https"; +import { HttpRequest as IHttpRequest } from "../http"; +import { Logger } from "../logger"; +/** + * + * This type represents an alternate client constructor option for the entry + * "requestHandler". Instead of providing an instance of a requestHandler, the user + * may provide the requestHandler's constructor options for either the + * NodeHttpHandler or FetchHttpHandler. + * + * For other RequestHandlers like HTTP2 or WebSocket, + * constructor parameter passthrough is not currently available. + * + * @public + */ +export type RequestHandlerParams = NodeHttpHandlerOptions | FetchHttpHandlerOptions; +/** + * Represents the http options that can be passed to a node http client. + * @public + */ +export interface NodeHttpHandlerOptions { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + * + * Defaults to 0, which disables the timeout. + */ + connectionTimeout?: number; + /** + * The number of milliseconds a request can take before automatically being terminated. + * Defaults to 0, which disables the timeout. + */ + requestTimeout?: number; + /** + * Delay before the NodeHttpHandler checks for socket exhaustion, + * and emits a warning if the active sockets and enqueued request count is greater than + * 2x the maxSockets count. + * + * Defaults to connectionTimeout + requestTimeout or 3000ms if those are not set. + */ + socketAcquisitionWarningTimeout?: number; + /** + * This field is deprecated, and requestTimeout should be used instead. + * The maximum time in milliseconds that a socket may remain idle before it + * is closed. + * + * @deprecated Use {@link requestTimeout} + */ + socketTimeout?: number; + /** + * You can pass http.Agent or its constructor options. + */ + httpAgent?: hAgent | hAgentOptions; + /** + * You can pass https.Agent or its constructor options. + */ + httpsAgent?: hsAgent | hsAgentOptions; + /** + * Optional logger. + */ + logger?: Logger; +} +/** + * Represents the http options that can be passed to a browser http client. + * @public + */ +export interface FetchHttpHandlerOptions { + /** + * The number of milliseconds a request can take before being automatically + * terminated. + */ + requestTimeout?: number; + /** + * Whether to allow the request to outlive the page. Default value is false. + * + * There may be limitations to the payload size, number of concurrent requests, + * request duration etc. when using keepalive in browsers. + * + * These may change over time, so look for up to date information about + * these limitations before enabling keepalive. + */ + keepAlive?: boolean; + /** + * A string indicating whether credentials will be sent with the request always, never, or + * only when sent to a same-origin URL. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials + */ + credentials?: "include" | "omit" | "same-origin" | undefined | string; + /** + * Cache settings for fetch. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/cache + */ + cache?: "default" | "force-cache" | "no-cache" | "no-store" | "only-if-cached" | "reload"; + /** + * An optional function that produces additional RequestInit + * parameters for each httpRequest. + * + * This is applied last via merging with Object.assign() and overwrites other values + * set from other sources. + * + * @example + * ```js + * new Client({ + * requestHandler: { + * requestInit(httpRequest) { + * return { cache: "no-store" }; + * } + * } + * }); + * ``` + */ + requestInit?: (httpRequest: IHttpRequest) => RequestInit; +} +declare global { + /** + * interface merging stub. + */ + interface RequestInit { + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts new file mode 100644 index 0000000..4aee7a2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @public + */ +export interface ApiKeyIdentity extends Identity { + /** + * The literal API Key + */ + readonly apiKey: string; +} +/** + * @public + */ +export type ApiKeyIdentityProvider = IdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts new file mode 100644 index 0000000..9605e4d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts @@ -0,0 +1,31 @@ +import { Identity, IdentityProvider } from "./identity"; +/** + * @public + */ +export interface AwsCredentialIdentity extends Identity { + /** + * AWS access key ID + */ + readonly accessKeyId: string; + /** + * AWS secret access key + */ + readonly secretAccessKey: string; + /** + * A security or session token to use with these credentials. Usually + * present for temporary credentials. + */ + readonly sessionToken?: string; + /** + * AWS credential scope for this set of credentials. + */ + readonly credentialScope?: string; + /** + * AWS accountId. + */ + readonly accountId?: string; +} +/** + * @public + */ +export type AwsCredentialIdentityProvider = IdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts new file mode 100644 index 0000000..eaa7e5d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts @@ -0,0 +1,15 @@ +/** + * @public + */ +export interface Identity { + /** + * A `Date` when the identity or credential will no longer be accepted. + */ + readonly expiration?: Date; +} +/** + * @public + */ +export interface IdentityProvider { + (identityProperties?: Record): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts new file mode 100644 index 0000000..031a0fe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts new file mode 100644 index 0000000..33783eb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @internal + */ +export interface TokenIdentity extends Identity { + /** + * The literal token string + */ + readonly token: string; +} +/** + * @internal + */ +export type TokenIdentityProvider = IdentityProvider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..85b4e44 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts new file mode 100644 index 0000000..cc69a11 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts @@ -0,0 +1,13 @@ +/** + * @public + * + * Represents a logger object that is available in HandlerExecutionContext + * throughout the middleware stack. + */ +export interface Logger { + trace?: (...content: any[]) => void; + debug: (...content: any[]) => void; + info: (...content: any[]) => void; + warn: (...content: any[]) => void; + error: (...content: any[]) => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts new file mode 100644 index 0000000..8b35bbe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts @@ -0,0 +1,534 @@ +import { AuthScheme, HttpAuthDefinition } from "./auth/auth"; +import { SelectedHttpAuthScheme } from "./auth/HttpAuthScheme"; +import { Command } from "./command"; +import { EndpointV2 } from "./endpoint"; +import { SmithyFeatures } from "./feature-ids"; +import { Logger } from "./logger"; +import { UserAgent } from "./util"; +/** + * @public + */ +export interface InitializeHandlerArguments { + /** + * User input to a command. Reflects the userland representation of the + * union of data types the command can effectively handle. + */ + input: Input; +} +/** + * @public + */ +export interface InitializeHandlerOutput extends DeserializeHandlerOutput { + output: Output; +} +/** + * @public + */ +export interface SerializeHandlerArguments extends InitializeHandlerArguments { + /** + * The user input serialized as a request object. The request object is unknown, + * so you cannot modify it directly. When work with request, you need to guard its + * type to e.g. HttpRequest with 'instanceof' operand + * + * During the build phase of the execution of a middleware stack, a built + * request may or may not be available. + */ + request?: unknown; +} +/** + * @public + */ +export interface SerializeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface BuildHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface BuildHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface FinalizeHandlerArguments extends SerializeHandlerArguments { + /** + * The user input serialized as a request. + */ + request: unknown; +} +/** + * @public + */ +export interface FinalizeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface DeserializeHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface DeserializeHandlerOutput { + /** + * The raw response object from runtime is deserialized to structured output object. + * The response object is unknown so you cannot modify it directly. When work with + * response, you need to guard its type to e.g. HttpResponse with 'instanceof' operand. + * + * During the deserialize phase of the execution of a middleware stack, a deserialized + * response may or may not be available + */ + response: unknown; + output?: Output; +} +/** + * @public + */ +export interface InitializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: InitializeHandlerArguments): Promise>; +} +/** + * @public + */ +export type Handler = InitializeHandler; +/** + * @public + */ +export interface SerializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: SerializeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface FinalizeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: FinalizeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface BuildHandler { + (args: BuildHandlerArguments): Promise>; +} +/** + * @public + */ +export interface DeserializeHandler { + (args: DeserializeHandlerArguments): Promise>; +} +/** + * @public + * + * A factory function that creates functions implementing the `Handler` + * interface. + */ +export interface InitializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: InitializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `BuildHandler` + * interface. + */ +export interface SerializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: SerializeHandler, context: HandlerExecutionContext): SerializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `FinalizeHandler` + * interface. + */ +export interface FinalizeRequestMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: FinalizeHandler, context: HandlerExecutionContext): FinalizeHandler; +} +/** + * @public + */ +export interface BuildMiddleware { + (next: BuildHandler, context: HandlerExecutionContext): BuildHandler; +} +/** + * @public + */ +export interface DeserializeMiddleware { + (next: DeserializeHandler, context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type MiddlewareType = InitializeMiddleware | SerializeMiddleware | BuildMiddleware | FinalizeRequestMiddleware | DeserializeMiddleware; +/** + * @public + * + * A factory function that creates the terminal handler atop which a middleware + * stack sits. + */ +export interface Terminalware { + (context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type Step = "initialize" | "serialize" | "build" | "finalizeRequest" | "deserialize"; +/** + * @public + */ +export type Priority = "high" | "normal" | "low"; +/** + * @public + */ +export interface HandlerOptions { + /** + * Handlers are ordered using a "step" that describes the stage of command + * execution at which the handler will be executed. The available steps are: + * + * - initialize: The input is being prepared. Examples of typical + * initialization tasks include injecting default options computing + * derived parameters. + * - serialize: The input is complete and ready to be serialized. Examples + * of typical serialization tasks include input validation and building + * an HTTP request from user input. + * - build: The input has been serialized into an HTTP request, but that + * request may require further modification. Any request alterations + * will be applied to all retries. Examples of typical build tasks + * include injecting HTTP headers that describe a stable aspect of the + * request, such as `Content-Length` or a body checksum. + * - finalizeRequest: The request is being prepared to be sent over the wire. The + * request in this stage should already be semantically complete and + * should therefore only be altered as match the recipient's + * expectations. Examples of typical finalization tasks include request + * signing and injecting hop-by-hop headers. + * - deserialize: The response has arrived, the middleware here will deserialize + * the raw response object to structured response + * + * Unlike initialization and build handlers, which are executed once + * per operation execution, finalization and deserialize handlers will be + * executed foreach HTTP request sent. + * + * @defaultValue 'initialize' + */ + step?: Step; + /** + * A list of strings to any that identify the general purpose or important + * characteristics of a given handler. + */ + tags?: Array; + /** + * A unique name to refer to a middleware + */ + name?: string; + /** + * @internal + * Aliases allows for middleware to be found by multiple names besides {@link HandlerOptions.name}. + * This allows for references to replaced middleware to continue working, e.g. replacing + * multiple auth-specific middleware with a single generic auth middleware. + */ + aliases?: Array; + /** + * A flag to override the existing middleware with the same name. Without + * setting it, adding middleware with duplicated name will throw an exception. + * @internal + */ + override?: boolean; +} +/** + * @public + */ +export interface AbsoluteLocation { + /** + * By default middleware will be added to individual step in un-guaranteed order. + * In the case that + * + * @defaultValue 'normal' + */ + priority?: Priority; +} +/** + * @public + */ +export type Relation = "before" | "after"; +/** + * @public + */ +export interface RelativeLocation { + /** + * Specify the relation to be before or after a know middleware. + */ + relation: Relation; + /** + * A known middleware name to indicate inserting middleware's location. + */ + toMiddleware: string; +} +/** + * @public + */ +export type RelativeMiddlewareOptions = RelativeLocation & Pick>; +/** + * @public + */ +export interface InitializeHandlerOptions extends HandlerOptions { + step?: "initialize"; +} +/** + * @public + */ +export interface SerializeHandlerOptions extends HandlerOptions { + step: "serialize"; +} +/** + * @public + */ +export interface BuildHandlerOptions extends HandlerOptions { + step: "build"; +} +/** + * @public + */ +export interface FinalizeRequestHandlerOptions extends HandlerOptions { + step: "finalizeRequest"; +} +/** + * @public + */ +export interface DeserializeHandlerOptions extends HandlerOptions { + step: "deserialize"; +} +/** + * @public + * + * A stack storing middleware. It can be resolved into a handler. It supports 2 + * approaches for adding middleware: + * 1. Adding middleware to specific step with `add()`. The order of middleware + * added into same step is determined by order of adding them. If one middleware + * needs to be executed at the front of the step or at the end of step, set + * `priority` options to `high` or `low`. + * 2. Adding middleware to location relative to known middleware with `addRelativeTo()`. + * This is useful when given middleware must be executed before or after specific + * middleware(`toMiddleware`). You can add a middleware relatively to another + * middleware which also added relatively. But eventually, this relative middleware + * chain **must** be 'anchored' by a middleware that added using `add()` API + * with absolute `step` and `priority`. This mothod will throw if specified + * `toMiddleware` is not found. + */ +export interface MiddlewareStack extends Pluggable { + /** + * Add middleware to the stack to be executed during the "initialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: InitializeMiddleware, options?: InitializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "serialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: SerializeMiddleware, options: SerializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "build" step, + * optionally specifying a priority, tags and name + */ + add(middleware: BuildMiddleware, options: BuildHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "finalizeRequest" step, + * optionally specifying a priority, tags and name + */ + add(middleware: FinalizeRequestMiddleware, options: FinalizeRequestHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "deserialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: DeserializeMiddleware, options: DeserializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to a stack position before or after a known middleware,optionally + * specifying name and tags. + */ + addRelativeTo(middleware: MiddlewareType, options: RelativeMiddlewareOptions): void; + /** + * Apply a customization function to mutate the middleware stack, often + * used for customizations that requires mutating multiple middleware. + */ + use(pluggable: Pluggable): void; + /** + * Create a shallow clone of this stack. Step bindings and handler priorities + * and tags are preserved in the copy. + */ + clone(): MiddlewareStack; + /** + * Removes middleware from the stack. + * + * If a string is provided, it will be treated as middleware name. If a middleware + * is inserted with the given name, it will be removed. + * + * If a middleware class is provided, all usages thereof will be removed. + */ + remove(toRemove: MiddlewareType | string): boolean; + /** + * Removes middleware that contains given tag + * + * Multiple middleware will potentially be removed + */ + removeByTag(toRemove: string): boolean; + /** + * Create a stack containing the middlewares in this stack as well as the + * middlewares in the `from` stack. Neither source is modified, and step + * bindings and handler priorities and tags are preserved in the copy. + */ + concat(from: MiddlewareStack): MiddlewareStack; + /** + * Returns a list of the current order of middleware in the stack. + * This does not execute the middleware functions, nor does it + * provide a reference to the stack itself. + */ + identify(): string[]; + /** + * @internal + * + * When an operation is called using this stack, + * it will log its list of middleware to the console using + * the identify function. + * + * @param toggle - set whether to log on resolve. + * If no argument given, returns the current value. + */ + identifyOnResolve(toggle?: boolean): boolean; + /** + * Builds a single handler function from zero or more middleware classes and + * a core handler. The core handler is meant to send command objects to AWS + * services and return promises that will resolve with the operation result + * or be rejected with an error. + * + * When a composed handler is invoked, the arguments will pass through all + * middleware in a defined order, and the return from the innermost handler + * will pass through all middleware in the reverse of that order. + */ + resolve(handler: DeserializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @internal + */ +export declare const SMITHY_CONTEXT_KEY = "__smithy_context"; +/** + * @public + * + * Data and helper objects that are not expected to change from one execution of + * a composed handler to another. + */ +export interface HandlerExecutionContext { + /** + * A logger that may be invoked by any handler during execution of an + * operation. + */ + logger?: Logger; + /** + * Name of the service the operation is being sent to. + */ + clientName?: string; + /** + * Name of the operation being executed. + */ + commandName?: string; + /** + * Additional user agent that inferred by middleware. It can be used to save + * the internal user agent sections without overriding the `customUserAgent` + * config in clients. + */ + userAgent?: UserAgent; + /** + * Resolved by the endpointMiddleware function of `@smithy/middleware-endpoint` + * in the serialization stage. + */ + endpointV2?: EndpointV2; + /** + * Set at the same time as endpointV2. + */ + authSchemes?: AuthScheme[]; + /** + * The current auth configuration that has been set by any auth middleware and + * that will prevent from being set more than once. + */ + currentAuthConfig?: HttpAuthDefinition; + /** + * @deprecated do not extend this field, it is a carryover from AWS SDKs. + * Used by DynamoDbDocumentClient. + */ + dynamoDbDocumentClientOptions?: Partial<{ + overrideInputFilterSensitiveLog(...args: any[]): string | void; + overrideOutputFilterSensitiveLog(...args: any[]): string | void; + }>; + /** + * @internal + * Context for Smithy properties. + */ + [SMITHY_CONTEXT_KEY]?: { + service?: string; + operation?: string; + commandInstance?: Command; + selectedHttpAuthScheme?: SelectedHttpAuthScheme; + features?: SmithyFeatures; + /** + * @deprecated + * Do not assign arbitrary members to the Smithy Context, + * fields should be explicitly declared here to avoid collisions. + */ + [key: string]: unknown; + }; + /** + * @deprecated + * Do not assign arbitrary members to the context, since + * they can interfere with existing functionality. + * + * Additional members should instead be declared on the SMITHY_CONTEXT_KEY + * or other reserved keys. + */ + [key: string]: any; +} +/** + * @public + */ +export interface Pluggable { + /** + * A function that mutate the passed in middleware stack. Functions implementing + * this interface can add, remove, modify existing middleware stack from clients + * or commands + */ + applyToStack: (stack: MiddlewareStack) => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts new file mode 100644 index 0000000..c9d1c92 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts @@ -0,0 +1,33 @@ +import { Client } from "./client"; +import { Command } from "./command"; +/** + * @public + * + * Expected type definition of a paginator. + */ +export type Paginator = AsyncGenerator; +/** + * @public + * + * Expected paginator configuration passed to an operation. Services will extend + * this interface definition and may type client further. + */ +export interface PaginationConfiguration { + client: Client; + pageSize?: number; + startingToken?: any; + /** + * For some APIs, such as CloudWatchLogs events, the next page token will always + * be present. + * + * When true, this config field will have the paginator stop when the token doesn't change + * instead of when it is not present. + */ + stopOnSameToken?: boolean; + /** + * @param command - reference to the instantiated command. This callback is executed + * prior to sending the command with the paginator's client. + * @returns the original command or a replacement, defaulting to the original command object. + */ + withCommand?: (command: Command) => typeof command | undefined; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts new file mode 100644 index 0000000..1b3dba7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts @@ -0,0 +1,30 @@ +/** + * @public + */ +export declare enum IniSectionType { + PROFILE = "profile", + SSO_SESSION = "sso-session", + SERVICES = "services" +} +/** + * @public + */ +export type IniSection = Record; +/** + * @public + * + * @deprecated Please use {@link IniSection} + */ +export interface Profile extends IniSection { +} +/** + * @public + */ +export type ParsedIniData = Record; +/** + * @public + */ +export interface SharedConfigFiles { + credentialsFile: ParsedIniData; + configFile: ParsedIniData; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts new file mode 100644 index 0000000..3d8a45a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts @@ -0,0 +1,40 @@ +/** + * @public + */ +export interface ResponseMetadata { + /** + * The status code of the last HTTP response received for this operation. + */ + httpStatusCode?: number; + /** + * A unique identifier for the last request sent for this operation. Often + * requested by AWS service teams to aid in debugging. + */ + requestId?: string; + /** + * A secondary identifier for the last request sent. Used for debugging. + */ + extendedRequestId?: string; + /** + * A tertiary identifier for the last request sent. Used for debugging. + */ + cfId?: string; + /** + * The number of times this operation was attempted. + */ + attempts?: number; + /** + * The total amount of time (in milliseconds) that was spent waiting between + * retry attempts. + */ + totalRetryDelay?: number; +} +/** + * @public + */ +export interface MetadataBearer { + /** + * Metadata pertaining to this request. + */ + $metadata: ResponseMetadata; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts new file mode 100644 index 0000000..8436c9a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts @@ -0,0 +1,133 @@ +import { SdkError } from "./shapes"; +/** + * @public + */ +export type RetryErrorType = +/** + * This is a connection level error such as a socket timeout, socket connect + * error, tls negotiation timeout etc... + * Typically these should never be applied for non-idempotent request types + * since in this scenario, it's impossible to know whether the operation had + * a side effect on the server. + */ +"TRANSIENT" +/** + * This is an error where the server explicitly told the client to back off, + * such as a 429 or 503 Http error. + */ + | "THROTTLING" +/** + * This is a server error that isn't explicitly throttling but is considered + * by the client to be something that should be retried. + */ + | "SERVER_ERROR" +/** + * Doesn't count against any budgets. This could be something like a 401 + * challenge in Http. + */ + | "CLIENT_ERROR"; +/** + * @public + */ +export interface RetryErrorInfo { + /** + * The error thrown during the initial request, if available. + */ + error?: SdkError; + errorType: RetryErrorType; + /** + * Protocol hint. This could come from Http's 'retry-after' header or + * something from MQTT or any other protocol that has the ability to convey + * retry info from a peer. + * + * The Date after which a retry should be attempted. + */ + retryAfterHint?: Date; +} +/** + * @public + */ +export interface RetryBackoffStrategy { + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + computeNextBackoffDelay(retryAttempt: number): number; +} +/** + * @public + */ +export interface StandardRetryBackoffStrategy extends RetryBackoffStrategy { + /** + * Sets the delayBase used to compute backoff delays. + * @param delayBase - + */ + setDelayBase(delayBase: number): void; +} +/** + * @public + */ +export interface RetryStrategyOptions { + backoffStrategy: RetryBackoffStrategy; + maxRetriesBase: number; +} +/** + * @public + */ +export interface RetryToken { + /** + * @returns the current count of retry. + */ + getRetryCount(): number; + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + getRetryDelay(): number; +} +/** + * @public + */ +export interface StandardRetryToken extends RetryToken { + /** + * @returns the cost of the last retry attempt. + */ + getRetryCost(): number | undefined; +} +/** + * @public + */ +export interface RetryStrategyV2 { + /** + * Called before any retries (for the first call to the operation). It either + * returns a retry token or an error upon the failure to acquire a token prior. + * + * tokenScope is arbitrary and out of scope for this component. However, + * adding it here offers us a lot of future flexibility for outage detection. + * For example, it could be "us-east-1" on a shared retry strategy, or + * "us-west-2-c:dynamodb". + */ + acquireInitialRetryToken(retryTokenScope: string): Promise; + /** + * After a failed operation call, this function is invoked to refresh the + * retryToken returned by acquireInitialRetryToken(). This function can + * either choose to allow another retry and send a new or updated token, + * or reject the retry attempt and report the error either in an exception + * or returning an error. + */ + refreshRetryTokenForRetry(tokenToRenew: RetryToken, errorInfo: RetryErrorInfo): Promise; + /** + * Upon successful completion of the operation, this function is called + * to record that the operation was successful. + */ + recordSuccess(token: RetryToken): void; +} +/** + * @public + */ +export type ExponentialBackoffJitterType = "DEFAULT" | "NONE" | "FULL" | "DECORRELATED"; +/** + * @public + */ +export interface ExponentialBackoffStrategyOptions { + jitterType: ExponentialBackoffJitterType; + backoffScaleValue?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts new file mode 100644 index 0000000..d2d7ea9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts @@ -0,0 +1,112 @@ +import { Endpoint } from "./http"; +import { RequestHandler } from "./transfer"; +import { Decoder, Encoder, Provider } from "./util"; +/** + * @public + * + * Interface for object requires an Endpoint set. + */ +export interface EndpointBearer { + endpoint: Provider; +} +/** + * @public + */ +export interface StreamCollector { + /** + * A function that converts a stream into an array of bytes. + * + * @param stream - The low-level native stream from browser or Nodejs runtime + */ + (stream: any): Promise; +} +/** + * @public + * + * Request and Response serde util functions and settings for AWS services + */ +export interface SerdeContext extends SerdeFunctions, EndpointBearer { + requestHandler: RequestHandler; + disableHostPrefix: boolean; +} +/** + * @public + * + * Serde functions from the client config. + */ +export interface SerdeFunctions { + base64Encoder: Encoder; + base64Decoder: Decoder; + utf8Encoder: Encoder; + utf8Decoder: Decoder; + streamCollector: StreamCollector; +} +/** + * @public + */ +export interface RequestSerializer { + /** + * Converts the provided `input` into a request object + * + * @param input - The user input to serialize. + * + * @param context - Context containing runtime-specific util functions. + */ + (input: any, context: Context): Promise; +} +/** + * @public + */ +export interface ResponseDeserializer { + /** + * Converts the output of an operation into JavaScript types. + * + * @param output - The HTTP response received from the service + * + * @param context - context containing runtime-specific util functions. + */ + (output: ResponseType, context: Context): Promise; +} +/** + * The interface contains mix-in utility functions to transfer the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * @public + */ +export interface SdkStreamMixin { + transformToByteArray: () => Promise; + transformToString: (encoding?: string) => Promise; + transformToWebStream: () => ReadableStream; +} +/** + * @public + * + * The type describing a runtime-specific stream implementation with mix-in + * utility functions. + */ +export type SdkStream = BaseStream & SdkStreamMixin; +/** + * @public + * + * Indicates that the member of type T with + * key StreamKey have been extended + * with the SdkStreamMixin helper methods. + */ +export type WithSdkStreamMixin = { + [key in keyof T]: key extends StreamKey ? SdkStream : T[key]; +}; +/** + * Interface for internal function to inject stream utility functions + * implementation + * + * @internal + */ +export interface SdkStreamMixinInjector { + (stream: unknown): SdkStreamMixin; +} +/** + * @internal + */ +export interface SdkStreamSerdeContext { + sdkStreamMixin: SdkStreamMixinInjector; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts new file mode 100644 index 0000000..a81cbf1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts @@ -0,0 +1,82 @@ +import { HttpResponse } from "./http"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A document type represents an untyped JSON-like value. + * + * Not all protocols support document types, and the serialization format of a + * document type is protocol specific. All JSON protocols SHOULD support + * document types and they SHOULD serialize document types inline as normal + * JSON values. + */ +export type DocumentType = null | boolean | number | string | DocumentType[] | { + [prop: string]: DocumentType; +}; +/** + * @public + * + * A structure shape with the error trait. + * https://smithy.io/2.0/spec/behavior-traits.html#smithy-api-retryable-trait + */ +export interface RetryableTrait { + /** + * Indicates that the error is a retryable throttling error. + */ + readonly throttling?: boolean; +} +/** + * @public + * + * Type that is implemented by all Smithy shapes marked with the + * error trait. + * @deprecated + */ +export interface SmithyException { + /** + * The shape ID name of the exception. + */ + readonly name: string; + /** + * Whether the client or server are at fault. + */ + readonly $fault: "client" | "server"; + /** + * The service that encountered the exception. + */ + readonly $service?: string; + /** + * Indicates that an error MAY be retried by the client. + */ + readonly $retryable?: RetryableTrait; + /** + * Reference to low-level HTTP response object. + */ + readonly $response?: HttpResponse; +} +/** + * @public + * + * @deprecated See {@link https://aws.amazon.com/blogs/developer/service-error-handling-modular-aws-sdk-js/} + * + * This type should not be used in your application. + * Users of the AWS SDK for JavaScript v3 service clients should prefer to + * use the specific Exception classes corresponding to each operation. + * These can be found as code in the deserializer for the operation's Command class, + * or as declarations in the service model file in codegen/sdk-codegen/aws-models. + * + * If no exceptions are enumerated by a particular Command operation, + * the base exception for the service should be used. Each client exports + * a base ServiceException prefixed with the service name. + */ +export type SdkError = Error & Partial & Partial & { + $metadata?: Partial["$metadata"] & { + /** + * If present, will have value of true and indicates that the error resulted in a + * correction of the clock skew, a.k.a. config.systemClockOffset. + * This is specific to AWS SDK and sigv4. + */ + readonly clockSkewCorrected?: true; + }; + cause?: Error; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts new file mode 100644 index 0000000..bbaecde --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts @@ -0,0 +1,155 @@ +import { Message } from "./eventStream"; +import { HttpRequest } from "./http"; +/** + * @public + * + * A `Date` object, a unix (epoch) timestamp in seconds, or a string that can be + * understood by the JavaScript `Date` constructor. + */ +export type DateInput = number | string | Date; +/** + * @public + */ +export interface SigningArguments { + /** + * The date and time to be used as signature metadata. This value should be + * a Date object, a unix (epoch) timestamp, or a string that can be + * understood by the JavaScript `Date` constructor.If not supplied, the + * value returned by `new Date()` will be used. + */ + signingDate?: DateInput; + /** + * The service signing name. It will override the service name of the signer + * in current invocation + */ + signingService?: string; + /** + * The region name to sign the request. It will override the signing region of the + * signer in current invocation + */ + signingRegion?: string; +} +/** + * @public + */ +export interface RequestSigningArguments extends SigningArguments { + /** + * A set of strings whose members represents headers that cannot be signed. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unsignableHeaders set. + */ + unsignableHeaders?: Set; + /** + * A set of strings whose members represents headers that should be signed. + * Any values passed here will override those provided via unsignableHeaders, + * allowing them to be signed. + * + * All headers in the provided request will have their names converted to + * lower case before signing. + */ + signableHeaders?: Set; +} +/** + * @public + */ +export interface RequestPresigningArguments extends RequestSigningArguments { + /** + * The number of seconds before the presigned URL expires + */ + expiresIn?: number; + /** + * A set of strings whose representing headers that should not be hoisted + * to presigned request's query string. If not supplied, the presigner + * moves all the AWS-specific headers (starting with `x-amz-`) to the request + * query string. If supplied, these headers remain in the presigned request's + * header. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unhoistableHeaders set. + */ + unhoistableHeaders?: Set; + /** + * This overrides any headers with the same name(s) set by unhoistableHeaders. + * These headers will be hoisted into the query string and signed. + */ + hoistableHeaders?: Set; +} +/** + * @public + */ +export interface EventSigningArguments extends SigningArguments { + priorSignature: string; +} +/** + * @public + */ +export interface RequestPresigner { + /** + * Signs a request for future use. + * + * The request will be valid until either the provided `expiration` time has + * passed or the underlying credentials have expired. + * + * @param requestToSign - The request that should be signed. + * @param options - Additional signing options. + */ + presign(requestToSign: HttpRequest, options?: RequestPresigningArguments): Promise; +} +/** + * @public + * + * An object that signs request objects with AWS credentials using one of the + * AWS authentication protocols. + */ +export interface RequestSigner { + /** + * Sign the provided request for immediate dispatch. + */ + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; +} +/** + * @public + */ +export interface StringSigner { + /** + * Sign the provided `stringToSign` for use outside of the context of + * request signing. Typical uses include signed policy generation. + */ + sign(stringToSign: string, options?: SigningArguments): Promise; +} +/** + * @public + */ +export interface FormattedEvent { + headers: Uint8Array; + payload: Uint8Array; +} +/** + * @public + */ +export interface EventSigner { + /** + * Sign the individual event of the event stream. + */ + sign(event: FormattedEvent, options: EventSigningArguments): Promise; +} +/** + * @public + */ +export interface SignableMessage { + message: Message; + priorSignature: string; +} +/** + * @public + */ +export interface SignedMessage { + message: Message; + signature: string; +} +/** + * @public + */ +export interface MessageSigner { + signMessage(message: SignableMessage, args: SigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts new file mode 100644 index 0000000..1e2b85d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts @@ -0,0 +1,22 @@ +import { ChecksumConstructor } from "./checksum"; +import { HashConstructor, StreamHasher } from "./crypto"; +import { BodyLengthCalculator, Encoder } from "./util"; +/** + * @public + */ +export interface GetAwsChunkedEncodingStreamOptions { + base64Encoder?: Encoder; + bodyLengthChecker: BodyLengthCalculator; + checksumAlgorithmFn?: ChecksumConstructor | HashConstructor; + checksumLocationName?: string; + streamHasher?: StreamHasher; +} +/** + * @public + * + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * It optionally adds checksum if options are provided. + */ +export interface GetAwsChunkedEncodingStream { + (readableStream: StreamType, options: GetAwsChunkedEncodingStreamOptions): StreamType; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts new file mode 100644 index 0000000..27088db --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts @@ -0,0 +1,33 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This is the union representing the modeled blob type with streaming trait + * in a generic format that does not relate to HTTP input or output payloads. + * + * Note: the non-streaming blob type is represented by Uint8Array, but because + * the streaming blob type is always in the request/response paylod, it has + * historically been handled with different types. + * + * @see https://smithy.io/2.0/spec/simple-types.html#blob + * + * For compatibility with its historical representation, it must contain at least + * Readble (Node.js), Blob (browser), and ReadableStream (browser). + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + */ +export type StreamingBlobTypes = NodeJsRuntimeStreamingBlobTypes | BrowserRuntimeStreamingBlobTypes; +/** + * @public + * + * Node.js streaming blob type. + */ +export type NodeJsRuntimeStreamingBlobTypes = Readable; +/** + * @public + * + * Browser streaming blob types. + */ +export type BrowserRuntimeStreamingBlobTypes = ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts new file mode 100644 index 0000000..1a86dea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts @@ -0,0 +1,61 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This union represents a superset of the compatible types you + * can use for streaming payload inputs. + * + * FAQ: + * Why does the type union mix mutually exclusive runtime types, namely + * Node.js and browser types? + * + * There are several reasons: + * 1. For backwards compatibility. + * 2. As a convenient compromise solution so that users in either environment may use the types + * without customization. + * 3. The SDK does not have static type information about the exact implementation + * of the HTTP RequestHandler being used in your client(s) (e.g. fetch, XHR, node:http, or node:http2), + * given that it is chosen at runtime. There are multiple possible request handlers + * in both the Node.js and browser runtime environments. + * + * Rather than restricting the type to a known common format (Uint8Array, for example) + * which doesn't include a universal streaming format in the currently supported Node.js versions, + * the type declaration is widened to multiple possible formats. + * It is up to the user to ultimately select a compatible format with the + * runtime and HTTP handler implementation they are using. + * + * Usage: + * The typical solution we expect users to have is to manually narrow the + * type when needed, picking the appropriate one out of the union according to the + * runtime environment and specific request handler. + * There is also the type utility "NodeJsClient", "BrowserClient" and more + * exported from this package. These can be applied at the client level + * to pre-narrow these streaming payload blobs. For usage see the readme.md + * in the root of the \@smithy/types NPM package. + */ +export type StreamingBlobPayloadInputTypes = NodeJsRuntimeStreamingBlobPayloadInputTypes | BrowserRuntimeStreamingBlobPayloadInputTypes; +/** + * @public + * + * Streaming payload input types in the Node.js environment. + * These are derived from the types compatible with the request body used by node:http. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may be intended in some cases, + * but the expected types are primarily string, Uint8Array, and Buffer. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type NodeJsRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | Buffer | Readable; +/** + * @public + * + * Streaming payload input types in the browser environment. + * These are derived from the types compatible with fetch's Request.body. + */ +export type BrowserRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts new file mode 100644 index 0000000..e344a46 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts @@ -0,0 +1,52 @@ +/// +import { IncomingMessage } from "http"; +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +import { SdkStream } from "../serde"; +/** + * @public + * + * This union represents a superset of the types you may receive + * in streaming payload outputs. + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + * + * To highlight the upstream docs about the SdkStream mixin: + * + * The interface contains mix-in (via Object.assign) methods to transform the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * + * The available methods are described on the SdkStream type via SdkStreamMixin. + */ +export type StreamingBlobPayloadOutputTypes = NodeJsRuntimeStreamingBlobPayloadOutputTypes | BrowserRuntimeStreamingBlobPayloadOutputTypes; +/** + * @public + * + * Streaming payload output types in the Node.js environment. + * + * This is by default the IncomingMessage type from node:http responses when + * using the default node-http-handler in Node.js environments. + * + * It can be other Readable types like node:http2's ClientHttp2Stream + * such as when using the node-http2-handler. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type NodeJsRuntimeStreamingBlobPayloadOutputTypes = SdkStream; +/** + * @public + * + * Streaming payload output types in the browser environment. + * + * This is by default fetch's Response.body type (ReadableStream) when using + * the default fetch-http-handler in browser-like environments. + * + * It may be a Blob, such as when using the XMLHttpRequest handler + * and receiving an arraybuffer response body. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type BrowserRuntimeStreamingBlobPayloadOutputTypes = SdkStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts new file mode 100644 index 0000000..f37ddb7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts @@ -0,0 +1,41 @@ +/** + * @public + */ +export type RequestHandlerOutput = { + response: ResponseType; +}; +/** + * @public + */ +export interface RequestHandler { + /** + * metadata contains information of a handler. For example + * 'h2' refers this handler is for handling HTTP/2 requests, + * whereas 'h1' refers handling HTTP1 requests + */ + metadata?: RequestHandlerMetadata; + destroy?: () => void; + handle: (request: RequestType, handlerOptions?: HandlerOptions) => Promise>; +} +/** + * @public + */ +export interface RequestHandlerMetadata { + handlerProtocol: RequestHandlerProtocol | string; +} +/** + * @public + * Values from ALPN Protocol IDs. + * @see https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + */ +export declare enum RequestHandlerProtocol { + HTTP_0_9 = "http/0.9", + HTTP_1_0 = "http/1.0", + TDS_8_0 = "tds/8.0" +} +/** + * @public + */ +export interface RequestContext { + destination: URL; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts new file mode 100644 index 0000000..f1aecf3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts @@ -0,0 +1,26 @@ +import { CommandIO } from "../command"; +import { MetadataBearer } from "../response"; +import { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import { Transform } from "./type-transform"; +/** + * @internal + * + * Narrowed version of InvokeFunction used in Client::send. + */ +export interface NarrowedInvokeFunction { + (command: CommandIO, options?: HttpHandlerOptions): Promise>; + (command: CommandIO, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options?: HttpHandlerOptions, cb?: (err: unknown, data?: Transform) => void): Promise> | void; +} +/** + * @internal + * + * Narrowed version of InvokeMethod used in aggregated Client methods. + */ +export interface NarrowedInvokeMethod { + (input: InputType, options?: HttpHandlerOptions): Promise>; + (input: InputType, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options?: HttpHandlerOptions, cb?: (err: unknown, data?: OutputType) => void): Promise> | void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts new file mode 100644 index 0000000..e9516e2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts @@ -0,0 +1,82 @@ +/// +import { IncomingMessage } from "http"; +import { ClientHttp2Stream } from "http2"; +import { InvokeMethod } from "../client"; +import { GetOutputType } from "../command"; +import { HttpHandlerOptions } from "../http"; +import { SdkStream } from "../serde"; +import { BrowserRuntimeStreamingBlobPayloadInputTypes, NodeJsRuntimeStreamingBlobPayloadInputTypes, StreamingBlobPayloadInputTypes } from "../streaming-payload/streaming-blob-payload-input-types"; +import { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import { NarrowedInvokeMethod } from "./client-method-transforms"; +import { Transform } from "./type-transform"; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the NodeHttpHandler requestHandler, + * the default in Node.js when not using HTTP2. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as NodeJsClient; + * ``` + */ +export type NodeJsClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * Variant of NodeJsClient for node:http2. + */ +export type NodeJsHttp2Client = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the FetchHttpHandler requestHandler, + * which is the default in browser environments. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as BrowserClient; + * ``` + */ +export type BrowserClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Variant of BrowserClient for XMLHttpRequest. + */ +export type BrowserXhrClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * @deprecated use NarrowPayloadBlobTypes. + * + * Narrow a given Client's blob payload outputs to the given type T. + */ +export type NarrowPayloadBlobOutputType = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, T>>; +}; +/** + * @public + * + * Narrow a Client's blob payload input and output types to I and O. + */ +export type NarrowPayloadBlobTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod, FunctionOutputTypes> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, O>>; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts new file mode 100644 index 0000000..3a812df --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts @@ -0,0 +1,14 @@ +/** + * @internal + * + * Checks that A and B extend each other. + */ +export type Exact = [ + A +] extends [ + B +] ? ([ + B +] extends [ + A +] ? true : false) : false; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts new file mode 100644 index 0000000..6a7f6d8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts @@ -0,0 +1,88 @@ +import { InvokeMethod, InvokeMethodOptionalArgs } from "../client"; +import { GetOutputType } from "../command"; +import { DocumentType } from "../shapes"; +/** + * @public + * + * This type is intended as a type helper for generated clients. + * When initializing client, cast it to this type by passing + * the client constructor type as the type parameter. + * + * It will then recursively remove "undefined" as a union type from all + * input and output shapes' members. Note, this does not affect + * any member that is optional (?) such as outputs with no required members. + * + * @example + * ```ts + * const client = new Client({}) as AssertiveClient; + * ``` + */ +export type AssertiveClient = NarrowClientIOTypes; +/** + * @public + * + * This is similar to AssertiveClient but additionally changes all + * output types to (recursive) Required so as to bypass all output nullability guards. + */ +export type UncheckedClient = UncheckedClientOutputTypes; +/** + * @internal + * + * Excludes undefined recursively. + */ +export type NoUndefined = T extends Function ? T : T extends DocumentType ? T : [ + T +] extends [ + object +] ? { + [key in keyof T]: NoUndefined; +} : Exclude; +/** + * @internal + * + * Excludes undefined and optional recursively. + */ +export type RecursiveRequired = T extends Function ? T : T extends DocumentType ? T : [ + T +] extends [ + object +] ? { + [key in keyof T]-?: RecursiveRequired; +} : Exclude; +/** + * @internal + * + * Removes undefined from unions. + */ +type NarrowClientIOTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, NoUndefined> : [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? InvokeMethod, NoUndefined> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>; +}; +/** + * @internal + * + * Removes undefined from unions and adds yolo output types. + */ +type UncheckedClientOutputTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, RecursiveRequired> : [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? InvokeMethod, RecursiveRequired> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>>; +}; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..547303f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts @@ -0,0 +1,41 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [ + T +] extends [ + FromType +] ? ([ + FromType +] extends [ + T +] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [ + T[key] + ] extends [ + FromType + ] ? [ + FromType + ] extends [ + T[key] + ] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts new file mode 100644 index 0000000..4e7adb4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts @@ -0,0 +1,17 @@ +import { QueryParameterBag } from "./http"; +/** + * @internal + * + * Represents the components parts of a Uniform Resource Identifier used to + * construct the target location of a Request. + */ +export type URI = { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; + username?: string; + password?: string; + fragment?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..7c700af --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts @@ -0,0 +1,192 @@ +import { Endpoint } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A generic which checks if Type1 is exactly same as Type2. + */ +export type Exact = [ + Type1 +] extends [ + Type2 +] ? ([ + Type2 +] extends [ + Type1 +] ? true : false) : false; +/** + * @public + * + * A function that, given a Uint8Array of bytes, can produce a string + * representation thereof. The function may optionally attempt to + * convert other input types to Uint8Array before encoding. + * + * @example An encoder function that converts bytes to hexadecimal + * representation would return `'hello'` when given + * `new Uint8Array([104, 101, 108, 108, 111])`. + */ +export interface Encoder { + /** + * Caution: the `any` type on the input is for backwards compatibility. + * Runtime support is limited to Uint8Array and string by default. + * + * You may choose to support more encoder input types if overriding the default + * implementations. + */ + (input: Uint8Array | string | any): string; +} +/** + * @public + * + * A function that, given a string, can derive the bytes represented by that + * string. + * + * @example A decoder function that converts bytes to hexadecimal + * representation would return `new Uint8Array([104, 101, 108, 108, 111])` when + * given the string `'hello'`. + */ +export interface Decoder { + (input: string): Uint8Array; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. + * + * @example A function that reads credentials from shared SDK configuration + * files, assuming roles and collecting MFA tokens as necessary. + */ +export interface Provider { + (): Promise; +} +/** + * @public + * + * A tuple that represents an API name and optional version + * of a library built using the AWS SDK. + */ +export type UserAgentPair = [ + /*name*/ string, + /*version*/ string +]; +/** + * @public + * + * User agent data that to be put into the request's user + * agent. + */ +export type UserAgent = UserAgentPair[]; +/** + * @public + * + * Parses a URL in string form into an Endpoint object. + */ +export interface UrlParser { + (url: string | URL): Endpoint; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. It memoizes the result from the previous invocation + * instead of calling the underlying resources every time. + * + * You can force the provider to refresh the memoized value by invoke the + * function with optional parameter hash with `forceRefresh` boolean key and + * value `true`. + * + * @example A function that reads credentials from IMDS service that could + * return expired credentials. The SDK will keep using the expired credentials + * until an unretryable service error requiring a force refresh of the + * credentials. + */ +export interface MemoizedProvider { + (options?: { + forceRefresh?: boolean; + }): Promise; +} +/** + * @public + * + * A function that, given a request body, determines the + * length of the body. This is used to determine the Content-Length + * that should be sent with a request. + * + * @example A function that reads a file stream and calculates + * the size of the file. + */ +export interface BodyLengthCalculator { + (body: any): number | undefined; +} +/** + * @public + * + * Object containing regionalization information of + * AWS services. + */ +export interface RegionInfo { + hostname: string; + partition: string; + path?: string; + signingService?: string; + signingRegion?: string; +} +/** + * @public + * + * Options to pass when calling {@link RegionInfoProvider} + */ +export interface RegionInfoProviderOptions { + /** + * Enables IPv6/IPv4 dualstack endpoint. + * @defaultValue false + */ + useDualstackEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + * @defaultValue false + */ + useFipsEndpoint: boolean; +} +/** + * @public + * + * Function returns designated service's regionalization + * information from given region. Each service client + * comes with its regionalization provider. it serves + * to provide the default values of related configurations + */ +export interface RegionInfoProvider { + (region: string, options?: RegionInfoProviderOptions): Promise; +} +/** + * @public + * + * Interface that specifies the retry behavior + */ +export interface RetryStrategy { + /** + * The retry mode describing how the retry strategy control the traffic flow. + */ + mode?: string; + /** + * the retry behavior the will invoke the next handler and handle the retry accordingly. + * This function should also update the $metadata from the response accordingly. + * @see {@link ResponseMetadata} + */ + retry: (next: FinalizeHandler, args: FinalizeHandlerArguments) => Promise>; +} +/** + * @public + * + * Indicates the parameter may be omitted if the parameter object T + * is equivalent to a Partial, i.e. all properties optional. + */ +export type OptionalParameter = Exact, T> extends true ? [ +] | [ + T +] : [ + T +]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..2cc2fff --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1,35 @@ +import { AbortController as DeprecatedAbortController } from "./abort"; +/** + * @public + */ +export interface WaiterConfiguration { + /** + * Required service client + */ + client: Client; + /** + * The amount of time in seconds a user is willing to wait for a waiter to complete. + */ + maxWaitTime: number; + /** + * @deprecated Use abortSignal + * Abort controller. Used for ending the waiter early. + */ + abortController?: AbortController | DeprecatedAbortController; + /** + * Abort Signal. Used for ending the waiter early. + */ + abortSignal?: AbortController["signal"] | DeprecatedAbortController["signal"]; + /** + * The minimum amount of time to delay between retries in seconds. This is the + * floor of the exponential backoff. This value defaults to service default + * if not specified. This value MUST be less than or equal to maxDelay and greater than 0. + */ + minDelay?: number; + /** + * The maximum amount of time to delay between retries in seconds. This is the + * ceiling of the exponential backoff. This value defaults to service default + * if not specified. If specified, this value MUST be greater than or equal to 1. + */ + maxDelay?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/uri.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/uri.d.ts new file mode 100644 index 0000000..d7b874c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/uri.d.ts @@ -0,0 +1,17 @@ +import { QueryParameterBag } from "./http"; +/** + * @internal + * + * Represents the components parts of a Uniform Resource Identifier used to + * construct the target location of a Request. + */ +export type URI = { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; + username?: string; + password?: string; + fragment?: string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/util.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/util.d.ts new file mode 100644 index 0000000..b15045c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/util.d.ts @@ -0,0 +1,176 @@ +import { Endpoint } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A generic which checks if Type1 is exactly same as Type2. + */ +export type Exact = [Type1] extends [Type2] ? ([Type2] extends [Type1] ? true : false) : false; +/** + * @public + * + * A function that, given a Uint8Array of bytes, can produce a string + * representation thereof. The function may optionally attempt to + * convert other input types to Uint8Array before encoding. + * + * @example An encoder function that converts bytes to hexadecimal + * representation would return `'hello'` when given + * `new Uint8Array([104, 101, 108, 108, 111])`. + */ +export interface Encoder { + /** + * Caution: the `any` type on the input is for backwards compatibility. + * Runtime support is limited to Uint8Array and string by default. + * + * You may choose to support more encoder input types if overriding the default + * implementations. + */ + (input: Uint8Array | string | any): string; +} +/** + * @public + * + * A function that, given a string, can derive the bytes represented by that + * string. + * + * @example A decoder function that converts bytes to hexadecimal + * representation would return `new Uint8Array([104, 101, 108, 108, 111])` when + * given the string `'hello'`. + */ +export interface Decoder { + (input: string): Uint8Array; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. + * + * @example A function that reads credentials from shared SDK configuration + * files, assuming roles and collecting MFA tokens as necessary. + */ +export interface Provider { + (): Promise; +} +/** + * @public + * + * A tuple that represents an API name and optional version + * of a library built using the AWS SDK. + */ +export type UserAgentPair = [name: string, version?: string]; +/** + * @public + * + * User agent data that to be put into the request's user + * agent. + */ +export type UserAgent = UserAgentPair[]; +/** + * @public + * + * Parses a URL in string form into an Endpoint object. + */ +export interface UrlParser { + (url: string | URL): Endpoint; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. It memoizes the result from the previous invocation + * instead of calling the underlying resources every time. + * + * You can force the provider to refresh the memoized value by invoke the + * function with optional parameter hash with `forceRefresh` boolean key and + * value `true`. + * + * @example A function that reads credentials from IMDS service that could + * return expired credentials. The SDK will keep using the expired credentials + * until an unretryable service error requiring a force refresh of the + * credentials. + */ +export interface MemoizedProvider { + (options?: { + forceRefresh?: boolean; + }): Promise; +} +/** + * @public + * + * A function that, given a request body, determines the + * length of the body. This is used to determine the Content-Length + * that should be sent with a request. + * + * @example A function that reads a file stream and calculates + * the size of the file. + */ +export interface BodyLengthCalculator { + (body: any): number | undefined; +} +/** + * @public + * + * Object containing regionalization information of + * AWS services. + */ +export interface RegionInfo { + hostname: string; + partition: string; + path?: string; + signingService?: string; + signingRegion?: string; +} +/** + * @public + * + * Options to pass when calling {@link RegionInfoProvider} + */ +export interface RegionInfoProviderOptions { + /** + * Enables IPv6/IPv4 dualstack endpoint. + * @defaultValue false + */ + useDualstackEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + * @defaultValue false + */ + useFipsEndpoint: boolean; +} +/** + * @public + * + * Function returns designated service's regionalization + * information from given region. Each service client + * comes with its regionalization provider. it serves + * to provide the default values of related configurations + */ +export interface RegionInfoProvider { + (region: string, options?: RegionInfoProviderOptions): Promise; +} +/** + * @public + * + * Interface that specifies the retry behavior + */ +export interface RetryStrategy { + /** + * The retry mode describing how the retry strategy control the traffic flow. + */ + mode?: string; + /** + * the retry behavior the will invoke the next handler and handle the retry accordingly. + * This function should also update the $metadata from the response accordingly. + * @see {@link ResponseMetadata} + */ + retry: (next: FinalizeHandler, args: FinalizeHandlerArguments) => Promise>; +} +/** + * @public + * + * Indicates the parameter may be omitted if the parameter object T + * is equivalent to a Partial, i.e. all properties optional. + */ +export type OptionalParameter = Exact, T> extends true ? [] | [T] : [T]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/waiter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/waiter.d.ts new file mode 100644 index 0000000..5941832 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/dist-types/waiter.d.ts @@ -0,0 +1,35 @@ +import { AbortController as DeprecatedAbortController } from "./abort"; +/** + * @public + */ +export interface WaiterConfiguration { + /** + * Required service client + */ + client: Client; + /** + * The amount of time in seconds a user is willing to wait for a waiter to complete. + */ + maxWaitTime: number; + /** + * @deprecated Use abortSignal + * Abort controller. Used for ending the waiter early. + */ + abortController?: AbortController | DeprecatedAbortController; + /** + * Abort Signal. Used for ending the waiter early. + */ + abortSignal?: AbortController["signal"] | DeprecatedAbortController["signal"]; + /** + * The minimum amount of time to delay between retries in seconds. This is the + * floor of the exponential backoff. This value defaults to service default + * if not specified. This value MUST be less than or equal to maxDelay and greater than 0. + */ + minDelay?: number; + /** + * The maximum amount of time to delay between retries in seconds. This is the + * ceiling of the exponential backoff. This value defaults to service default + * if not specified. If specified, this value MUST be greater than or equal to 1. + */ + maxDelay?: number; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/types/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/types/package.json new file mode 100644 index 0000000..87c5ad0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/types/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/types", + "version": "4.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline types", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4 && node scripts/downlevel", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:tsc -p tsconfig.test.json", + "extract:docs": "api-extractor run --local" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<=4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/types", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/types" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/README.md new file mode 100644 index 0000000..0d8d61e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/README.md @@ -0,0 +1,10 @@ +# @smithy/url-parser + +[![NPM version](https://img.shields.io/npm/v/@smithy/url-parser/latest.svg)](https://www.npmjs.com/package/@smithy/url-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/url-parser.svg)](https://www.npmjs.com/package/@smithy/url-parser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-cjs/index.js new file mode 100644 index 0000000..ab81787 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-cjs/index.js @@ -0,0 +1,49 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseUrl: () => parseUrl +}); +module.exports = __toCommonJS(src_exports); +var import_querystring_parser = require("@smithy/querystring-parser"); +var parseUrl = /* @__PURE__ */ __name((url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, import_querystring_parser.parseQueryString)(search); + } + return { + hostname, + port: port ? parseInt(port) : void 0, + protocol, + path: pathname, + query + }; +}, "parseUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + parseUrl +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-es/index.js new file mode 100644 index 0000000..811f8bf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-es/index.js @@ -0,0 +1,18 @@ +import { parseQueryString } from "@smithy/querystring-parser"; +export const parseUrl = (url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = parseQueryString(search); + } + return { + hostname, + port: port ? parseInt(port) : undefined, + protocol, + path: pathname, + query, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-types/index.d.ts new file mode 100644 index 0000000..b0d91c9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { UrlParser } from "@smithy/types"; +/** + * @internal + */ +export declare const parseUrl: UrlParser; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d6f0ec5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { UrlParser } from "@smithy/types"; +/** + * @internal + */ +export declare const parseUrl: UrlParser; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/package.json new file mode 100644 index 0000000..10aebb8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/url-parser/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/url-parser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline url-parser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/url-parser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/url-parser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/README.md new file mode 100644 index 0000000..c9b6c87 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/README.md @@ -0,0 +1,4 @@ +# @smithy/util-base64 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-base64/latest.svg)](https://www.npmjs.com/package/@smithy/util-base64) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-base64.svg)](https://www.npmjs.com/package/@smithy/util-base64) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js new file mode 100644 index 0000000..d35d09f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.maxLetterValue = exports.bitsPerByte = exports.bitsPerLetter = exports.alphabetByValue = exports.alphabetByEncoding = void 0; +const alphabetByEncoding = {}; +exports.alphabetByEncoding = alphabetByEncoding; +const alphabetByValue = new Array(64); +exports.alphabetByValue = alphabetByValue; +for (let i = 0, start = "A".charCodeAt(0), limit = "Z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + alphabetByEncoding[char] = i; + alphabetByValue[i] = char; +} +for (let i = 0, start = "a".charCodeAt(0), limit = "z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + const index = i + 26; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +for (let i = 0; i < 10; i++) { + alphabetByEncoding[i.toString(10)] = i + 52; + const char = i.toString(10); + const index = i + 52; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +alphabetByEncoding["+"] = 62; +alphabetByValue[62] = "+"; +alphabetByEncoding["/"] = 63; +alphabetByValue[63] = "/"; +const bitsPerLetter = 6; +exports.bitsPerLetter = bitsPerLetter; +const bitsPerByte = 8; +exports.bitsPerByte = bitsPerByte; +const maxLetterValue = 0b111111; +exports.maxLetterValue = maxLetterValue; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js new file mode 100644 index 0000000..a5baffd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromBase64 = void 0; +const constants_browser_1 = require("./constants.browser"); +const fromBase64 = (input) => { + let totalByteLength = (input.length / 4) * 3; + if (input.slice(-2) === "==") { + totalByteLength -= 2; + } + else if (input.slice(-1) === "=") { + totalByteLength--; + } + const out = new ArrayBuffer(totalByteLength); + const dataView = new DataView(out); + for (let i = 0; i < input.length; i += 4) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = i + 3; j <= limit; j++) { + if (input[j] !== "=") { + if (!(input[j] in constants_browser_1.alphabetByEncoding)) { + throw new TypeError(`Invalid character ${input[j]} in base64 string.`); + } + bits |= constants_browser_1.alphabetByEncoding[input[j]] << ((limit - j) * constants_browser_1.bitsPerLetter); + bitLength += constants_browser_1.bitsPerLetter; + } + else { + bits >>= constants_browser_1.bitsPerLetter; + } + } + const chunkOffset = (i / 4) * 3; + bits >>= bitLength % constants_browser_1.bitsPerByte; + const byteLength = Math.floor(bitLength / constants_browser_1.bitsPerByte); + for (let k = 0; k < byteLength; k++) { + const offset = (byteLength - k - 1) * constants_browser_1.bitsPerByte; + dataView.setUint8(chunkOffset + k, (bits & (255 << offset)) >> offset); + } + } + return new Uint8Array(out); +}; +exports.fromBase64 = fromBase64; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js new file mode 100644 index 0000000..b06a7b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromBase64 = void 0; +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/index.js new file mode 100644 index 0000000..02848d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/index.js @@ -0,0 +1,27 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././fromBase64"), module.exports); +__reExport(src_exports, require("././toBase64"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromBase64, + toBase64 +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js new file mode 100644 index 0000000..e294f3f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBase64 = void 0; +const util_utf8_1 = require("@smithy/util-utf8"); +const constants_browser_1 = require("./constants.browser"); +function toBase64(_input) { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + const isArrayLike = typeof input === "object" && typeof input.length === "number"; + const isUint8Array = typeof input === "object" && + typeof input.byteOffset === "number" && + typeof input.byteLength === "number"; + if (!isArrayLike && !isUint8Array) { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + let str = ""; + for (let i = 0; i < input.length; i += 3) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = Math.min(i + 3, input.length); j < limit; j++) { + bits |= input[j] << ((limit - j - 1) * constants_browser_1.bitsPerByte); + bitLength += constants_browser_1.bitsPerByte; + } + const bitClusterCount = Math.ceil(bitLength / constants_browser_1.bitsPerLetter); + bits <<= bitClusterCount * constants_browser_1.bitsPerLetter - bitLength; + for (let k = 1; k <= bitClusterCount; k++) { + const offset = (bitClusterCount - k) * constants_browser_1.bitsPerLetter; + str += constants_browser_1.alphabetByValue[(bits & (constants_browser_1.maxLetterValue << offset)) >> offset]; + } + str += "==".slice(0, 4 - bitClusterCount); + } + return str; +} +exports.toBase64 = toBase64; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.js new file mode 100644 index 0000000..0590ce3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBase64 = void 0; +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const util_utf8_1 = require("@smithy/util-utf8"); +const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; +exports.toBase64 = toBase64; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/constants.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/constants.browser.js new file mode 100644 index 0000000..fd4df4d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/constants.browser.js @@ -0,0 +1,28 @@ +const alphabetByEncoding = {}; +const alphabetByValue = new Array(64); +for (let i = 0, start = "A".charCodeAt(0), limit = "Z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + alphabetByEncoding[char] = i; + alphabetByValue[i] = char; +} +for (let i = 0, start = "a".charCodeAt(0), limit = "z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + const index = i + 26; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +for (let i = 0; i < 10; i++) { + alphabetByEncoding[i.toString(10)] = i + 52; + const char = i.toString(10); + const index = i + 52; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +alphabetByEncoding["+"] = 62; +alphabetByValue[62] = "+"; +alphabetByEncoding["/"] = 63; +alphabetByValue[63] = "/"; +const bitsPerLetter = 6; +const bitsPerByte = 8; +const maxLetterValue = 0b111111; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js new file mode 100644 index 0000000..c2c6a66 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js @@ -0,0 +1,36 @@ +import { alphabetByEncoding, bitsPerByte, bitsPerLetter } from "./constants.browser"; +export const fromBase64 = (input) => { + let totalByteLength = (input.length / 4) * 3; + if (input.slice(-2) === "==") { + totalByteLength -= 2; + } + else if (input.slice(-1) === "=") { + totalByteLength--; + } + const out = new ArrayBuffer(totalByteLength); + const dataView = new DataView(out); + for (let i = 0; i < input.length; i += 4) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = i + 3; j <= limit; j++) { + if (input[j] !== "=") { + if (!(input[j] in alphabetByEncoding)) { + throw new TypeError(`Invalid character ${input[j]} in base64 string.`); + } + bits |= alphabetByEncoding[input[j]] << ((limit - j) * bitsPerLetter); + bitLength += bitsPerLetter; + } + else { + bits >>= bitsPerLetter; + } + } + const chunkOffset = (i / 4) * 3; + bits >>= bitLength % bitsPerByte; + const byteLength = Math.floor(bitLength / bitsPerByte); + for (let k = 0; k < byteLength; k++) { + const offset = (byteLength - k - 1) * bitsPerByte; + dataView.setUint8(chunkOffset + k, (bits & (255 << offset)) >> offset); + } + } + return new Uint8Array(out); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.js new file mode 100644 index 0000000..5197e93 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.js @@ -0,0 +1,12 @@ +import { fromString } from "@smithy/util-buffer-from"; +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +export const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = fromString(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/index.js new file mode 100644 index 0000000..594bd43 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js new file mode 100644 index 0000000..2a03a9d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js @@ -0,0 +1,35 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { alphabetByValue, bitsPerByte, bitsPerLetter, maxLetterValue } from "./constants.browser"; +export function toBase64(_input) { + let input; + if (typeof _input === "string") { + input = fromUtf8(_input); + } + else { + input = _input; + } + const isArrayLike = typeof input === "object" && typeof input.length === "number"; + const isUint8Array = typeof input === "object" && + typeof input.byteOffset === "number" && + typeof input.byteLength === "number"; + if (!isArrayLike && !isUint8Array) { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + let str = ""; + for (let i = 0; i < input.length; i += 3) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = Math.min(i + 3, input.length); j < limit; j++) { + bits |= input[j] << ((limit - j - 1) * bitsPerByte); + bitLength += bitsPerByte; + } + const bitClusterCount = Math.ceil(bitLength / bitsPerLetter); + bits <<= bitClusterCount * bitsPerLetter - bitLength; + for (let k = 1; k <= bitClusterCount; k++) { + const offset = (bitClusterCount - k) * bitsPerLetter; + str += alphabetByValue[(bits & (maxLetterValue << offset)) >> offset]; + } + str += "==".slice(0, 4 - bitClusterCount); + } + return str; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/toBase64.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/toBase64.js new file mode 100644 index 0000000..61f03ce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-es/toBase64.js @@ -0,0 +1,15 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +import { fromUtf8 } from "@smithy/util-utf8"; +export const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = fromUtf8(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts new file mode 100644 index 0000000..eb750ea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts @@ -0,0 +1,6 @@ +declare const alphabetByEncoding: Record; +declare const alphabetByValue: Array; +declare const bitsPerLetter = 6; +declare const bitsPerByte = 8; +declare const maxLetterValue = 63; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts new file mode 100644 index 0000000..6a640f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes. + * + * @param input The base-64 encoded string + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts new file mode 100644 index 0000000..1878a89 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts @@ -0,0 +1,7 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes using Node.JS's + * `buffer` module. + * + * @param input The base-64 encoded string + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/index.d.ts new file mode 100644 index 0000000..594bd43 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts new file mode 100644 index 0000000..5f5615e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts @@ -0,0 +1,9 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare function toBase64(_input: Uint8Array | string): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts new file mode 100644 index 0000000..96bd0ed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string using + * Node.JS's `buffer` module. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + */ +export declare const toBase64: (_input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts new file mode 100644 index 0000000..61c36c8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts @@ -0,0 +1,6 @@ +declare const alphabetByEncoding: Record; +declare const alphabetByValue: Array; +declare const bitsPerLetter = 6; +declare const bitsPerByte = 8; +declare const maxLetterValue = 63; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts new file mode 100644 index 0000000..3a50006 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes. + * + * @param input The base-64 encoded string + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts new file mode 100644 index 0000000..f84c7c6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts @@ -0,0 +1,7 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes using Node.JS's + * `buffer` module. + * + * @param input The base-64 encoded string + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c4e1d03 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts new file mode 100644 index 0000000..260f696 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts @@ -0,0 +1,9 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare function toBase64(_input: Uint8Array | string): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts new file mode 100644 index 0000000..7e8bb70 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string using + * Node.JS's `buffer` module. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + */ +export declare const toBase64: (_input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/package.json new file mode 100644 index 0000000..e122233 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-base64/package.json @@ -0,0 +1,73 @@ +{ + "name": "@smithy/util-base64", + "version": "4.0.0", + "description": "A Base64 <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-base64", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromBase64": "./dist-es/fromBase64.browser", + "./dist-es/toBase64": "./dist-es/toBase64.browser" + }, + "react-native": { + "./dist-es/fromBase64": "./dist-es/fromBase64.browser", + "./dist-es/toBase64": "./dist-es/toBase64.browser", + "./dist-cjs/fromBase64": "./dist-cjs/fromBase64.browser", + "./dist-cjs/toBase64": "./dist-cjs/toBase64.browser" + }, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-base64", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-base64" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/README.md new file mode 100644 index 0000000..460d092 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/README.md @@ -0,0 +1,12 @@ +# @smithy/util-body-length-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-body-length-browser/latest.svg)](https://www.npmjs.com/package/@smithy/util-body-length-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-body-length-browser.svg)](https://www.npmjs.com/package/@smithy/util-body-length-browser) + +Determines the length of a request body in browsers + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js new file mode 100644 index 0000000..9e872bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js @@ -0,0 +1,57 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var TEXT_ENCODER = typeof TextEncoder == "function" ? new TextEncoder() : null; +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (typeof body === "string") { + if (TEXT_ENCODER) { + return TEXT_ENCODER.encode(body).byteLength; + } + let len = body.length; + for (let i = len - 1; i >= 0; i--) { + const code = body.charCodeAt(i); + if (code > 127 && code <= 2047) + len++; + else if (code > 2047 && code <= 65535) + len += 2; + if (code >= 56320 && code <= 57343) + i--; + } + return len; + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + calculateBodyLength +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js new file mode 100644 index 0000000..6b994ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js @@ -0,0 +1,26 @@ +const TEXT_ENCODER = typeof TextEncoder == "function" ? new TextEncoder() : null; +export const calculateBodyLength = (body) => { + if (typeof body === "string") { + if (TEXT_ENCODER) { + return TEXT_ENCODER.encode(body).byteLength; + } + let len = body.length; + for (let i = len - 1; i >= 0; i--) { + const code = body.charCodeAt(i); + if (code > 0x7f && code <= 0x7ff) + len++; + else if (code > 0x7ff && code <= 0xffff) + len += 2; + if (code >= 0xdc00 && code <= 0xdfff) + i--; + } + return len; + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-es/index.js new file mode 100644 index 0000000..16ba478 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-es/index.js @@ -0,0 +1 @@ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts new file mode 100644 index 0000000..8e1bdb0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts new file mode 100644 index 0000000..7b4a0d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts new file mode 100644 index 0000000..3260536 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab6cb83 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/package.json new file mode 100644 index 0000000..b571489 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-browser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/util-body-length-browser", + "description": "Determines the length of a request body in browsers", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-body-length-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-body-length-browser", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-body-length-browser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/README.md new file mode 100644 index 0000000..9a80efe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/README.md @@ -0,0 +1,12 @@ +# @smithy/util-body-length-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-body-length-node/latest.svg)](https://www.npmjs.com/package/@smithy/util-body-length-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-body-length-node.svg)](https://www.npmjs.com/package/@smithy/util-body-length-node) + +Determines the length of a request body in node.js + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-cjs/index.js new file mode 100644 index 0000000..1ecdc79 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-cjs/index.js @@ -0,0 +1,53 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var import_fs = require("fs"); +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return (0, import_fs.lstatSync)(body.path).size; + } else if (typeof body.fd === "number") { + return (0, import_fs.fstatSync)(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + calculateBodyLength +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js new file mode 100644 index 0000000..857cff5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js @@ -0,0 +1,25 @@ +import { fstatSync, lstatSync } from "fs"; +export const calculateBodyLength = (body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } + else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return lstatSync(body.path).size; + } + else if (typeof body.fd === "number") { + return fstatSync(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-es/index.js new file mode 100644 index 0000000..16ba478 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts new file mode 100644 index 0000000..8e1bdb0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts new file mode 100644 index 0000000..7b4a0d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts new file mode 100644 index 0000000..3260536 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab6cb83 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/package.json new file mode 100644 index 0000000..25b0f7a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-body-length-node/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-body-length-node", + "description": "Determines the length of a request body in node.js", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-body-length-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-body-length-node", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-body-length-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..0869899 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/README.md new file mode 100644 index 0000000..5b0341d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/README.md @@ -0,0 +1,4 @@ +# @smithy/util-config-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-config-provider/latest.svg)](https://www.npmjs.com/package/@smithy/util-config-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-config-provider.svg)](https://www.npmjs.com/package/@smithy/util-config-provider) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/index.js new file mode 100644 index 0000000..210d40d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/index.js @@ -0,0 +1,64 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + SelectorType: () => SelectorType, + booleanSelector: () => booleanSelector, + numberSelector: () => numberSelector +}); +module.exports = __toCommonJS(src_exports); + +// src/booleanSelector.ts +var booleanSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}, "booleanSelector"); + +// src/numberSelector.ts +var numberSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}, "numberSelector"); + +// src/types.ts +var SelectorType = /* @__PURE__ */ ((SelectorType2) => { + SelectorType2["ENV"] = "env"; + SelectorType2["CONFIG"] = "shared config entry"; + return SelectorType2; +})(SelectorType || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + booleanSelector, + numberSelector, + SelectorType +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js new file mode 100644 index 0000000..6ba2261 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js @@ -0,0 +1,9 @@ +export const booleanSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/index.js new file mode 100644 index 0000000..a926de8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js new file mode 100644 index 0000000..81cfe40 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js @@ -0,0 +1,9 @@ +export const numberSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/types.js new file mode 100644 index 0000000..5b10fb5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-es/types.js @@ -0,0 +1,5 @@ +export var SelectorType; +(function (SelectorType) { + SelectorType["ENV"] = "env"; + SelectorType["CONFIG"] = "shared config entry"; +})(SelectorType || (SelectorType = {})); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts new file mode 100644 index 0000000..d4977cb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts @@ -0,0 +1,10 @@ +import { SelectorType } from "./types"; +/** + * Returns boolean value true/false for string value "true"/"false", + * if the string is defined in obj[key] + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const booleanSelector: (obj: Record, key: string, type: SelectorType) => boolean | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/index.d.ts new file mode 100644 index 0000000..a926de8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts new file mode 100644 index 0000000..9e0cbf9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts @@ -0,0 +1,9 @@ +import { SelectorType } from "./types"; +/** + * Returns number value for string value, if the string is defined in obj[key]. + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const numberSelector: (obj: Record, key: string, type: SelectorType) => number | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts new file mode 100644 index 0000000..0b85452 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts @@ -0,0 +1,10 @@ +import { SelectorType } from "./types"; +/** + * Returns boolean value true/false for string value "true"/"false", + * if the string is defined in obj[key] + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const booleanSelector: (obj: Record, key: string, type: SelectorType) => boolean | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..02fd81d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts new file mode 100644 index 0000000..3a34671 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts @@ -0,0 +1,9 @@ +import { SelectorType } from "./types"; +/** + * Returns number value for string value, if the string is defined in obj[key]. + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const numberSelector: (obj: Record, key: string, type: SelectorType) => number | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..e01c128 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts @@ -0,0 +1,4 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/types.d.ts new file mode 100644 index 0000000..caa65d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/dist-types/types.d.ts @@ -0,0 +1,4 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/package.json new file mode 100644 index 0000000..50796be --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-config-provider/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/util-config-provider", + "version": "4.0.0", + "description": "Utilities package for configuration providers", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-config-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-config-provider", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-config-provider" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/README.md new file mode 100644 index 0000000..f2f1cc0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/README.md @@ -0,0 +1,10 @@ +# @smithy/util-defaults-mode-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-defaults-mode-browser/latest.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-defaults-mode-browser.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js new file mode 100644 index 0000000..3733506 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULTS_MODE_OPTIONS = void 0; +exports.DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js new file mode 100644 index 0000000..4624ef1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js @@ -0,0 +1,25 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././resolveDefaultsModeConfig"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveDefaultsModeConfig +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..f23368c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveDefaultsModeConfig = void 0; +const tslib_1 = require("tslib"); +const property_provider_1 = require("@smithy/property-provider"); +const bowser_1 = tslib_1.__importDefault(require("bowser")); +const constants_1 = require("./constants"); +const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return Promise.resolve(isMobileBrowser() ? "mobile" : "standard"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; +const isMobileBrowser = () => { + var _a, _b; + const parsedUA = typeof window !== "undefined" && ((_a = window === null || window === void 0 ? void 0 : window.navigator) === null || _a === void 0 ? void 0 : _a.userAgent) + ? bowser_1.default.parse(window.navigator.userAgent) + : undefined; + const platform = (_b = parsedUA === null || parsedUA === void 0 ? void 0 : parsedUA.platform) === null || _b === void 0 ? void 0 : _b.type; + return platform === "tablet" || platform === "mobile"; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js new file mode 100644 index 0000000..fc6be33 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveDefaultsModeConfig = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const constants_1 = require("./constants"); +const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return Promise.resolve("mobile"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js new file mode 100644 index 0000000..d58e11f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js @@ -0,0 +1 @@ +export const DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js new file mode 100644 index 0000000..05aa818 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js @@ -0,0 +1 @@ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..940ab63 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js @@ -0,0 +1,27 @@ +import { memoize } from "@smithy/property-provider"; +import bowser from "bowser"; +import { DEFAULTS_MODE_OPTIONS } from "./constants"; +export const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return Promise.resolve(isMobileBrowser() ? "mobile" : "standard"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +const isMobileBrowser = () => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser.parse(window.navigator.userAgent) + : undefined; + const platform = parsedUA?.platform?.type; + return platform === "tablet" || platform === "mobile"; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js new file mode 100644 index 0000000..3164191 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js @@ -0,0 +1,19 @@ +import { memoize } from "@smithy/property-provider"; +import { DEFAULTS_MODE_OPTIONS } from "./constants"; +export const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return Promise.resolve("mobile"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts new file mode 100644 index 0000000..18dbe6c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts @@ -0,0 +1,12 @@ +import type { DefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts new file mode 100644 index 0000000..003de26 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..e4cc1b7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile" if the app is running in a mobile browser, + * otherwise it resolves to "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts new file mode 100644 index 0000000..6c48ad8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts @@ -0,0 +1,16 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..fc88602 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,12 @@ +import { DefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..4ab48b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..d468478 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile" if the app is running in a mobile browser, + * otherwise it resolves to "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts new file mode 100644 index 0000000..86fe4b7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts @@ -0,0 +1,16 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/package.json new file mode 100644 index 0000000..f06f246 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-browser/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-defaults-mode-browser", + "version": "4.0.10", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-defaults-mode-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "react-native": {}, + "browser": {}, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-defaults-mode-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-defaults-mode-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/README.md new file mode 100644 index 0000000..bfae0bd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/README.md @@ -0,0 +1,10 @@ +# @smithy/util-defaults-mode-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-defaults-mode-node/latest.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-defaults-mode-node.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js new file mode 100644 index 0000000..ddd0684 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js @@ -0,0 +1,119 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + resolveDefaultsModeConfig: () => resolveDefaultsModeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/resolveDefaultsModeConfig.ts +var import_config_resolver = require("@smithy/config-resolver"); +var import_node_config_provider = require("@smithy/node-config-provider"); +var import_property_provider = require("@smithy/property-provider"); + +// src/constants.ts +var AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +var AWS_REGION_ENV = "AWS_REGION"; +var AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +var IMDS_REGION_PATH = "/latest/meta-data/placement/region"; + +// src/defaultsModeConfig.ts +var AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +var AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +var NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy" +}; + +// src/resolveDefaultsModeConfig.ts +var resolveDefaultsModeConfig = /* @__PURE__ */ __name(({ + region = (0, import_node_config_provider.loadConfig)(import_config_resolver.NODE_REGION_CONFIG_OPTIONS), + defaultsMode = (0, import_node_config_provider.loadConfig)(NODE_DEFAULTS_MODE_CONFIG_OPTIONS) +} = {}) => (0, import_property_provider.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case void 0: + return Promise.resolve("legacy"); + default: + throw new Error( + `Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}` + ); + } +}), "resolveDefaultsModeConfig"); +var resolveNodeDefaultsModeAuto = /* @__PURE__ */ __name(async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } else { + return "cross-region"; + } + } + return "standard"; +}, "resolveNodeDefaultsModeAuto"); +var inferPhysicalRegion = /* @__PURE__ */ __name(async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } catch (e) { + } + } +}, "inferPhysicalRegion"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveDefaultsModeConfig +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js new file mode 100644 index 0000000..69361a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js @@ -0,0 +1,6 @@ +export const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +export const AWS_REGION_ENV = "AWS_REGION"; +export const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +export const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export const DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +export const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js new file mode 100644 index 0000000..f43b570 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js @@ -0,0 +1,11 @@ +const AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +const AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +export const NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy", +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js new file mode 100644 index 0000000..05aa818 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..8c9d050 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js @@ -0,0 +1,52 @@ +import { NODE_REGION_CONFIG_OPTIONS } from "@smithy/config-resolver"; +import { loadConfig } from "@smithy/node-config-provider"; +import { memoize } from "@smithy/property-provider"; +import { AWS_DEFAULT_REGION_ENV, AWS_EXECUTION_ENV, AWS_REGION_ENV, DEFAULTS_MODE_OPTIONS, ENV_IMDS_DISABLED, IMDS_REGION_PATH, } from "./constants"; +import { NODE_DEFAULTS_MODE_CONFIG_OPTIONS } from "./defaultsModeConfig"; +export const resolveDefaultsModeConfig = ({ region = loadConfig(NODE_REGION_CONFIG_OPTIONS), defaultsMode = loadConfig(NODE_DEFAULTS_MODE_CONFIG_OPTIONS), } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +const resolveNodeDefaultsModeAuto = async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } + else { + return "cross-region"; + } + } + return "standard"; +}; +const inferPhysicalRegion = async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await import("@smithy/credential-provider-imds"); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } + catch (e) { + } + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts new file mode 100644 index 0000000..a2db283 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export declare const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +/** + * @internal + */ +export declare const AWS_REGION_ENV = "AWS_REGION"; +/** + * @internal + */ +export declare const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export declare const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts new file mode 100644 index 0000000..12f4dae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import type { DefaultsMode } from "@smithy/smithy-client"; +/** + * @internal + */ +export declare const NODE_DEFAULTS_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts new file mode 100644 index 0000000..003de26 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..8f34371 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; + region?: string | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "in-region", "cross-region", or "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ region, defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..b847dc2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export declare const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +/** + * @internal + */ +export declare const AWS_REGION_ENV = "AWS_REGION"; +/** + * @internal + */ +export declare const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export declare const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts new file mode 100644 index 0000000..76c3d0d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { DefaultsMode } from "@smithy/smithy-client"; +/** + * @internal + */ +export declare const NODE_DEFAULTS_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..4ab48b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..4daa927 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; + region?: string | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "in-region", "cross-region", or "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ region, defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/package.json new file mode 100644 index 0000000..0a69079 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-defaults-mode-node/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-defaults-mode-node", + "version": "4.0.10", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-defaults-mode-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-defaults-mode-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-defaults-mode-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/README.md new file mode 100644 index 0000000..85d60b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/README.md @@ -0,0 +1,10 @@ +# @smithy/util-endpoints + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-endpoints/latest.svg)](https://www.npmjs.com/package/@smithy/util-endpoints) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-endpoints.svg)](https://www.npmjs.com/package/@smithy/util-endpoints) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/index.js new file mode 100644 index 0000000..3bc5a7d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/index.js @@ -0,0 +1,544 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EndpointCache: () => EndpointCache, + EndpointError: () => EndpointError, + customEndpointFunctions: () => customEndpointFunctions, + isIpAddress: () => isIpAddress, + isValidHostLabel: () => isValidHostLabel, + resolveEndpoint: () => resolveEndpoint +}); +module.exports = __toCommonJS(src_exports); + +// src/cache/EndpointCache.ts +var EndpointCache = class { + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }) { + this.data = /* @__PURE__ */ new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + static { + __name(this, "EndpointCache"); + } + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + /** + * @returns cache key or false if not cachable. + */ + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +}; + +// src/lib/isIpAddress.ts +var IP_V4_REGEX = new RegExp( + `^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$` +); +var isIpAddress = /* @__PURE__ */ __name((value) => IP_V4_REGEX.test(value) || value.startsWith("[") && value.endsWith("]"), "isIpAddress"); + +// src/lib/isValidHostLabel.ts +var VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +var isValidHostLabel = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}, "isValidHostLabel"); + +// src/utils/customEndpointFunctions.ts +var customEndpointFunctions = {}; + +// src/debug/debugId.ts +var debugId = "endpoints"; + +// src/debug/toDebugString.ts +function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} +__name(toDebugString, "toDebugString"); + +// src/types/EndpointError.ts +var EndpointError = class extends Error { + static { + __name(this, "EndpointError"); + } + constructor(message) { + super(message); + this.name = "EndpointError"; + } +}; + +// src/lib/booleanEquals.ts +var booleanEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "booleanEquals"); + +// src/lib/getAttrPathList.ts +var getAttrPathList = /* @__PURE__ */ __name((path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } else { + pathList.push(part); + } + } + return pathList; +}, "getAttrPathList"); + +// src/lib/getAttr.ts +var getAttr = /* @__PURE__ */ __name((value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value), "getAttr"); + +// src/lib/isSet.ts +var isSet = /* @__PURE__ */ __name((value) => value != null, "isSet"); + +// src/lib/not.ts +var not = /* @__PURE__ */ __name((value) => !value, "not"); + +// src/lib/parseURL.ts +var import_types3 = require("@smithy/types"); +var DEFAULT_PORTS = { + [import_types3.EndpointURLScheme.HTTP]: 80, + [import_types3.EndpointURLScheme.HTTPS]: 443 +}; +var parseURL = /* @__PURE__ */ __name((value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname: hostname2, port, protocol: protocol2 = "", path = "", query = {} } = value; + const url = new URL(`${protocol2}//${hostname2}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query).map(([k, v]) => `${k}=${v}`).join("&"); + return url; + } + return new URL(value); + } catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(import_types3.EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp + }; +}, "parseURL"); + +// src/lib/stringEquals.ts +var stringEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "stringEquals"); + +// src/lib/substring.ts +var substring = /* @__PURE__ */ __name((input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}, "substring"); + +// src/lib/uriEncode.ts +var uriEncode = /* @__PURE__ */ __name((value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`), "uriEncode"); + +// src/utils/endpointFunctions.ts +var endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode +}; + +// src/utils/evaluateTemplate.ts +var evaluateTemplate = /* @__PURE__ */ __name((template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}, "evaluateTemplate"); + +// src/utils/getReferenceValue.ts +var getReferenceValue = /* @__PURE__ */ __name(({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord + }; + return referenceRecord[ref]; +}, "getReferenceValue"); + +// src/utils/evaluateExpression.ts +var evaluateExpression = /* @__PURE__ */ __name((obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } else if (obj["fn"]) { + return callFunction(obj, options); + } else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}, "evaluateExpression"); + +// src/utils/callFunction.ts +var callFunction = /* @__PURE__ */ __name(({ fn, argv }, options) => { + const evaluatedArgs = argv.map( + (arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options) + ); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}, "callFunction"); + +// src/utils/evaluateCondition.ts +var evaluateCondition = /* @__PURE__ */ __name(({ assign, ...fnArgs }, options) => { + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + options.logger?.debug?.(`${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...assign != null && { toAssign: { name: assign, value } } + }; +}, "evaluateCondition"); + +// src/utils/evaluateConditions.ts +var evaluateConditions = /* @__PURE__ */ __name((conditions = [], options) => { + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord + } + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + options.logger?.debug?.(`${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}, "evaluateConditions"); + +// src/utils/getEndpointHeaders.ts +var getEndpointHeaders = /* @__PURE__ */ __name((headers, options) => Object.entries(headers).reduce( + (acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }) + }), + {} +), "getEndpointHeaders"); + +// src/utils/getEndpointProperty.ts +var getEndpointProperty = /* @__PURE__ */ __name((property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}, "getEndpointProperty"); + +// src/utils/getEndpointProperties.ts +var getEndpointProperties = /* @__PURE__ */ __name((properties, options) => Object.entries(properties).reduce( + (acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options) + }), + {} +), "getEndpointProperties"); + +// src/utils/getEndpointUrl.ts +var getEndpointUrl = /* @__PURE__ */ __name((endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}, "getEndpointUrl"); + +// src/utils/evaluateEndpointRule.ts +var evaluateEndpointRule = /* @__PURE__ */ __name((endpointRule, options) => { + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }; + const { url, properties, headers } = endpoint; + options.logger?.debug?.(`${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...headers != void 0 && { + headers: getEndpointHeaders(headers, endpointRuleOptions) + }, + ...properties != void 0 && { + properties: getEndpointProperties(properties, endpointRuleOptions) + }, + url: getEndpointUrl(url, endpointRuleOptions) + }; +}, "evaluateEndpointRule"); + +// src/utils/evaluateErrorRule.ts +var evaluateErrorRule = /* @__PURE__ */ __name((errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError( + evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }) + ); +}, "evaluateErrorRule"); + +// src/utils/evaluateTreeRule.ts +var evaluateTreeRule = /* @__PURE__ */ __name((treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }); +}, "evaluateTreeRule"); + +// src/utils/evaluateRules.ts +var evaluateRules = /* @__PURE__ */ __name((rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}, "evaluateRules"); + +// src/resolveEndpoint.ts +var resolveEndpoint = /* @__PURE__ */ __name((ruleSetObject, options) => { + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + options.logger?.debug?.(`${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters).filter(([, v]) => v.default != null).map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters).filter(([, v]) => v.required).map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + options.logger?.debug?.(`${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}, "resolveEndpoint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EndpointCache, + isIpAddress, + isValidHostLabel, + customEndpointFunctions, + resolveEndpoint, + EndpointError +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js new file mode 100644 index 0000000..ddc7b0d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js @@ -0,0 +1,49 @@ +export class EndpointCache { + constructor({ size, params }) { + this.data = new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js new file mode 100644 index 0000000..0d4e27e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js @@ -0,0 +1 @@ +export const debugId = "endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/index.js new file mode 100644 index 0000000..70d3b15 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/index.js @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js new file mode 100644 index 0000000..33c8fcb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js @@ -0,0 +1,12 @@ +export function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js new file mode 100644 index 0000000..5069030 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js @@ -0,0 +1,21 @@ +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +export const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceEndpointUrlSections = [ENV_ENDPOINT_URL, ...serviceId.split(" ").map((w) => w.toUpperCase())]; + const serviceEndpointUrl = env[serviceEndpointUrlSections.join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile) => { + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/index.js new file mode 100644 index 0000000..c39ed2b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js new file mode 100644 index 0000000..730cbd3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js @@ -0,0 +1 @@ +export const booleanEquals = (value1, value2) => value1 === value2; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js new file mode 100644 index 0000000..d77f165 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js @@ -0,0 +1,11 @@ +import { EndpointError } from "../types"; +import { getAttrPathList } from "./getAttrPathList"; +export const getAttr = (value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } + else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js new file mode 100644 index 0000000..5817a2d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js @@ -0,0 +1,25 @@ +import { EndpointError } from "../types"; +export const getAttrPathList = (path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } + else { + pathList.push(part); + } + } + return pathList; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/index.js new file mode 100644 index 0000000..99a0844 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/index.js @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js new file mode 100644 index 0000000..20be5a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js @@ -0,0 +1,2 @@ +const IP_V4_REGEX = new RegExp(`^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$`); +export const isIpAddress = (value) => IP_V4_REGEX.test(value) || (value.startsWith("[") && value.endsWith("]")); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js new file mode 100644 index 0000000..83ccc7a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js @@ -0,0 +1 @@ +export const isSet = (value) => value != null; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js new file mode 100644 index 0000000..7858598 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js @@ -0,0 +1,13 @@ +const VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +export const isValidHostLabel = (value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/not.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/not.js new file mode 100644 index 0000000..180e5dd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/not.js @@ -0,0 +1 @@ +export const not = (value) => !value; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js new file mode 100644 index 0000000..79f9b24 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js @@ -0,0 +1,51 @@ +import { EndpointURLScheme } from "@smithy/types"; +import { isIpAddress } from "./isIpAddress"; +const DEFAULT_PORTS = { + [EndpointURLScheme.HTTP]: 80, + [EndpointURLScheme.HTTPS]: 443, +}; +export const parseURL = (value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname, port, protocol = "", path = "", query = {} } = value; + const url = new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query) + .map(([k, v]) => `${k}=${v}`) + .join("&"); + return url; + } + return new URL(value); + } + catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || + (typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`)); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js new file mode 100644 index 0000000..ee41426 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js @@ -0,0 +1 @@ +export const stringEquals = (value1, value2) => value1 === value2; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js new file mode 100644 index 0000000..942dde4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js @@ -0,0 +1,9 @@ +export const substring = (input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js new file mode 100644 index 0000000..ae226dc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js @@ -0,0 +1 @@ +export const uriEncode = (value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js new file mode 100644 index 0000000..ac12096 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js @@ -0,0 +1,27 @@ +import { debugId, toDebugString } from "./debug"; +import { EndpointError } from "./types"; +import { evaluateRules } from "./utils"; +export const resolveEndpoint = (ruleSetObject, options) => { + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + options.logger?.debug?.(`${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters) + .filter(([, v]) => v.default != null) + .map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters) + .filter(([, v]) => v.required) + .map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + options.logger?.debug?.(`${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js new file mode 100644 index 0000000..1ce597d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js @@ -0,0 +1,6 @@ +export class EndpointError extends Error { + constructor(message) { + super(message); + this.name = "EndpointError"; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/index.js new file mode 100644 index 0000000..a49f984 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/index.js @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/shared.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/types/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js new file mode 100644 index 0000000..bf0747a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js @@ -0,0 +1,11 @@ +import { customEndpointFunctions } from "./customEndpointFunctions"; +import { endpointFunctions } from "./endpointFunctions"; +import { evaluateExpression } from "./evaluateExpression"; +export const callFunction = ({ fn, argv }, options) => { + const evaluatedArgs = argv.map((arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options)); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js new file mode 100644 index 0000000..0c26493 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js @@ -0,0 +1 @@ +export const customEndpointFunctions = {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js new file mode 100644 index 0000000..e2215ff --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js @@ -0,0 +1,12 @@ +import { booleanEquals, getAttr, isSet, isValidHostLabel, not, parseURL, stringEquals, substring, uriEncode, } from "../lib"; +export const endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode, +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js new file mode 100644 index 0000000..8e84f08 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js @@ -0,0 +1,14 @@ +import { debugId, toDebugString } from "../debug"; +import { EndpointError } from "../types"; +import { callFunction } from "./callFunction"; +export const evaluateCondition = ({ assign, ...fnArgs }, options) => { + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + options.logger?.debug?.(`${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...(assign != null && { toAssign: { name: assign, value } }), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js new file mode 100644 index 0000000..5542076 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js @@ -0,0 +1,22 @@ +import { debugId, toDebugString } from "../debug"; +import { evaluateCondition } from "./evaluateCondition"; +export const evaluateConditions = (conditions = [], options) => { + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord, + }, + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + options.logger?.debug?.(`${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js new file mode 100644 index 0000000..ba6307b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js @@ -0,0 +1,27 @@ +import { debugId, toDebugString } from "../debug"; +import { evaluateConditions } from "./evaluateConditions"; +import { getEndpointHeaders } from "./getEndpointHeaders"; +import { getEndpointProperties } from "./getEndpointProperties"; +import { getEndpointUrl } from "./getEndpointUrl"; +export const evaluateEndpointRule = (endpointRule, options) => { + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }; + const { url, properties, headers } = endpoint; + options.logger?.debug?.(`${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...(headers != undefined && { + headers: getEndpointHeaders(headers, endpointRuleOptions), + }), + ...(properties != undefined && { + properties: getEndpointProperties(properties, endpointRuleOptions), + }), + url: getEndpointUrl(url, endpointRuleOptions), + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js new file mode 100644 index 0000000..1a57860 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js @@ -0,0 +1,14 @@ +import { EndpointError } from "../types"; +import { evaluateConditions } from "./evaluateConditions"; +import { evaluateExpression } from "./evaluateExpression"; +export const evaluateErrorRule = (errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError(evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + })); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js new file mode 100644 index 0000000..7f69658 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js @@ -0,0 +1,16 @@ +import { EndpointError } from "../types"; +import { callFunction } from "./callFunction"; +import { evaluateTemplate } from "./evaluateTemplate"; +import { getReferenceValue } from "./getReferenceValue"; +export const evaluateExpression = (obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } + else if (obj["fn"]) { + return callFunction(obj, options); + } + else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js new file mode 100644 index 0000000..58a40a0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js @@ -0,0 +1,27 @@ +import { EndpointError } from "../types"; +import { evaluateEndpointRule } from "./evaluateEndpointRule"; +import { evaluateErrorRule } from "./evaluateErrorRule"; +import { evaluateTreeRule } from "./evaluateTreeRule"; +export const evaluateRules = (rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } + else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js new file mode 100644 index 0000000..7005809 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js @@ -0,0 +1,36 @@ +import { getAttr } from "../lib"; +export const evaluateTemplate = (template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord, + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } + else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js new file mode 100644 index 0000000..427c1fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js @@ -0,0 +1,13 @@ +import { evaluateConditions } from "./evaluateConditions"; +import { evaluateRules } from "./evaluateRules"; +export const evaluateTreeRule = (treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js new file mode 100644 index 0000000..f94cf55 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js @@ -0,0 +1,12 @@ +import { EndpointError } from "../types"; +import { evaluateExpression } from "./evaluateExpression"; +export const getEndpointHeaders = (headers, options) => Object.entries(headers).reduce((acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }), +}), {}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js new file mode 100644 index 0000000..e7afe88 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js @@ -0,0 +1,5 @@ +import { getEndpointProperty } from "./getEndpointProperty"; +export const getEndpointProperties = (properties, options) => Object.entries(properties).reduce((acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options), +}), {}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js new file mode 100644 index 0000000..0600969 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js @@ -0,0 +1,21 @@ +import { EndpointError } from "../types"; +import { evaluateTemplate } from "./evaluateTemplate"; +import { getEndpointProperties } from "./getEndpointProperties"; +export const getEndpointProperty = (property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js new file mode 100644 index 0000000..8f1301e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js @@ -0,0 +1,15 @@ +import { EndpointError } from "../types"; +import { evaluateExpression } from "./evaluateExpression"; +export const getEndpointUrl = (endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } + catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js new file mode 100644 index 0000000..759f4d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js @@ -0,0 +1,7 @@ +export const getReferenceValue = ({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord, + }; + return referenceRecord[ref]; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/index.js new file mode 100644 index 0000000..b571d02 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-es/utils/index.js @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts new file mode 100644 index 0000000..19a338f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts @@ -0,0 +1,34 @@ +import type { EndpointParams, EndpointV2 } from "@smithy/types"; +/** + * @internal + * + * Cache for endpoint ruleSet resolution. + */ +export declare class EndpointCache { + private capacity; + private data; + private parameters; + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }: { + size?: number; + params?: string[]; + }); + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams: EndpointParams, resolver: () => EndpointV2): EndpointV2; + size(): number; + /** + * @returns cache key or false if not cachable. + */ + private hash; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts new file mode 100644 index 0000000..d39f408 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts @@ -0,0 +1 @@ +export declare const debugId = "endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts new file mode 100644 index 0000000..70d3b15 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts new file mode 100644 index 0000000..6bf1d3a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts @@ -0,0 +1,9 @@ +import { EndpointParameters, EndpointV2 } from "@smithy/types"; +import { GetAttrValue } from "../lib"; +import { EndpointObject, FunctionObject, FunctionReturn } from "../types"; +export declare function toDebugString(input: EndpointParameters): string; +export declare function toDebugString(input: EndpointV2): string; +export declare function toDebugString(input: GetAttrValue): string; +export declare function toDebugString(input: FunctionObject): string; +export declare function toDebugString(input: FunctionReturn): string; +export declare function toDebugString(input: EndpointObject): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..0971010 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/index.d.ts new file mode 100644 index 0000000..c39ed2b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts new file mode 100644 index 0000000..7eac561 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two boolean values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const booleanEquals: (value1: boolean, value2: boolean) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts new file mode 100644 index 0000000..a8088c5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts @@ -0,0 +1,7 @@ +export type GetAttrValue = string | boolean | { + [key: string]: GetAttrValue; +} | Array; +/** + * Returns value corresponding to pathing string for an array or object. + */ +export declare const getAttr: (value: GetAttrValue, path: string) => GetAttrValue; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts new file mode 100644 index 0000000..e6c4979 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts @@ -0,0 +1,4 @@ +/** + * Parses path as a getAttr expression, returning a list of strings. + */ +export declare const getAttrPathList: (path: string) => Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts new file mode 100644 index 0000000..99a0844 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts new file mode 100644 index 0000000..28aba97 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts @@ -0,0 +1,4 @@ +/** + * Validates if the provided value is an IP address. + */ +export declare const isIpAddress: (value: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts new file mode 100644 index 0000000..7c74ec5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a value is set (aka not null or undefined). + * Returns true if the value is set, otherwise returns false. + */ +export declare const isSet: (value: unknown) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts new file mode 100644 index 0000000..c05f9e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts @@ -0,0 +1,7 @@ +/** + * Evaluates whether one or more string values are valid host labels per RFC 1123. + * + * If allowSubDomains is true, then the provided value may be zero or more dotted + * subdomains which are each validated per RFC 1123. + */ +export declare const isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts new file mode 100644 index 0000000..1e8e728 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts @@ -0,0 +1,5 @@ +/** + * Performs logical negation on the provided boolean value, + * returning the negated value. + */ +export declare const not: (value: boolean) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts new file mode 100644 index 0000000..3e0dce3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointURL } from "@smithy/types"; +/** + * Parses a string, URL, or Endpoint into it’s Endpoint URL components. + */ +export declare const parseURL: (value: string | URL | Endpoint) => EndpointURL | null; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts new file mode 100644 index 0000000..bdfc98d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two string values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const stringEquals: (value1: string, value2: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts new file mode 100644 index 0000000..5d70035 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts @@ -0,0 +1,7 @@ +/** + * Computes the substring of a given string, conditionally indexing from the end of the string. + * When the string is long enough to fully include the substring, return the substring. + * Otherwise, return None. The start index is inclusive and the stop index is exclusive. + * The length of the returned string will always be stop-start. + */ +export declare const substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts new file mode 100644 index 0000000..c2a720c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts @@ -0,0 +1,4 @@ +/** + * Performs percent-encoding per RFC3986 section 2.1 + */ +export declare const uriEncode: (value: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts new file mode 100644 index 0000000..b02188b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts @@ -0,0 +1,6 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointResolverOptions, RuleSetObject } from "./types"; +/** + * Resolves an endpoint URL by processing the endpoints ruleset and options. + */ +export declare const resolveEndpoint: (ruleSetObject: RuleSetObject, options: EndpointResolverOptions) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts new file mode 100644 index 0000000..9d622ae --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts @@ -0,0 +1,34 @@ +import { EndpointParams, EndpointV2 } from "@smithy/types"; +/** + * @internal + * + * Cache for endpoint ruleSet resolution. + */ +export declare class EndpointCache { + private capacity; + private data; + private parameters; + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }: { + size?: number; + params?: string[]; + }); + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams: EndpointParams, resolver: () => EndpointV2): EndpointV2; + size(): number; + /** + * @returns cache key or false if not cachable. + */ + private hash; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts new file mode 100644 index 0000000..f674b8a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts @@ -0,0 +1 @@ +export declare const debugId = "endpoints"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts new file mode 100644 index 0000000..1eb0bf4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts new file mode 100644 index 0000000..e295ca0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts @@ -0,0 +1,9 @@ +import { EndpointParameters, EndpointV2 } from "@smithy/types"; +import { GetAttrValue } from "../lib"; +import { EndpointObject, FunctionObject, FunctionReturn } from "../types"; +export declare function toDebugString(input: EndpointParameters): string; +export declare function toDebugString(input: EndpointV2): string; +export declare function toDebugString(input: GetAttrValue): string; +export declare function toDebugString(input: FunctionObject): string; +export declare function toDebugString(input: FunctionReturn): string; +export declare function toDebugString(input: EndpointObject): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..7b9d068 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..7b367cf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts new file mode 100644 index 0000000..7aec001 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two boolean values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const booleanEquals: (value1: boolean, value2: boolean) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts new file mode 100644 index 0000000..e2f5b43 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts @@ -0,0 +1,7 @@ +export type GetAttrValue = string | boolean | { + [key: string]: GetAttrValue; +} | Array; +/** + * Returns value corresponding to pathing string for an array or object. + */ +export declare const getAttr: (value: GetAttrValue, path: string) => GetAttrValue; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts new file mode 100644 index 0000000..93bbf31 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts @@ -0,0 +1,4 @@ +/** + * Parses path as a getAttr expression, returning a list of strings. + */ +export declare const getAttrPathList: (path: string) => Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts new file mode 100644 index 0000000..a28ecaa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts new file mode 100644 index 0000000..9f37893 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts @@ -0,0 +1,4 @@ +/** + * Validates if the provided value is an IP address. + */ +export declare const isIpAddress: (value: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts new file mode 100644 index 0000000..6b102dd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a value is set (aka not null or undefined). + * Returns true if the value is set, otherwise returns false. + */ +export declare const isSet: (value: unknown) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts new file mode 100644 index 0000000..01f7eb9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts @@ -0,0 +1,7 @@ +/** + * Evaluates whether one or more string values are valid host labels per RFC 1123. + * + * If allowSubDomains is true, then the provided value may be zero or more dotted + * subdomains which are each validated per RFC 1123. + */ +export declare const isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts new file mode 100644 index 0000000..b4e84ac --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts @@ -0,0 +1,5 @@ +/** + * Performs logical negation on the provided boolean value, + * returning the negated value. + */ +export declare const not: (value: boolean) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts new file mode 100644 index 0000000..0f54066 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointURL } from "@smithy/types"; +/** + * Parses a string, URL, or Endpoint into it’s Endpoint URL components. + */ +export declare const parseURL: (value: string | URL | Endpoint) => EndpointURL | null; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts new file mode 100644 index 0000000..9acb10c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two string values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const stringEquals: (value1: string, value2: string) => boolean; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts new file mode 100644 index 0000000..a99025c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts @@ -0,0 +1,7 @@ +/** + * Computes the substring of a given string, conditionally indexing from the end of the string. + * When the string is long enough to fully include the substring, return the substring. + * Otherwise, return None. The start index is inclusive and the stop index is exclusive. + * The length of the returned string will always be stop-start. + */ +export declare const substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts new file mode 100644 index 0000000..acb75bb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts @@ -0,0 +1,4 @@ +/** + * Performs percent-encoding per RFC3986 section 2.1 + */ +export declare const uriEncode: (value: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts new file mode 100644 index 0000000..5469fa2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts @@ -0,0 +1,6 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointResolverOptions, RuleSetObject } from "./types"; +/** + * Resolves an endpoint URL by processing the endpoints ruleset and options. + */ +export declare const resolveEndpoint: (ruleSetObject: RuleSetObject, options: EndpointResolverOptions) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts new file mode 100644 index 0000000..4f3c538 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts @@ -0,0 +1,3 @@ +export declare class EndpointError extends Error { + constructor(message: string); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts new file mode 100644 index 0000000..7b3cf42 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts @@ -0,0 +1,2 @@ +import { FunctionReturn } from "./shared"; +export type EndpointFunctions = Record FunctionReturn>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..436001e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts @@ -0,0 +1,5 @@ +import { EndpointObject as __EndpointObject, EndpointObjectHeaders as __EndpointObjectHeaders, EndpointObjectProperties as __EndpointObjectProperties, EndpointRuleObject as __EndpointRuleObject } from "@smithy/types"; +export type EndpointObjectProperties = __EndpointObjectProperties; +export type EndpointObjectHeaders = __EndpointObjectHeaders; +export type EndpointObject = __EndpointObject; +export type EndpointRuleObject = __EndpointRuleObject; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..1540835 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject as __ErrorRuleObject } from "@smithy/types"; +export type ErrorRuleObject = __ErrorRuleObject; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts new file mode 100644 index 0000000..227b269 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts @@ -0,0 +1,4 @@ +import { DeprecatedObject as __DeprecatedObject, ParameterObject as __ParameterObject, RuleSetObject as __RuleSetObject } from "@smithy/types"; +export type DeprecatedObject = __DeprecatedObject; +export type ParameterObject = __ParameterObject; +export type RuleSetObject = __RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..ecdb6b4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts @@ -0,0 +1,3 @@ +import { RuleSetRules as __RuleSetRules, TreeRuleObject as __TreeRuleObject } from "@smithy/types"; +export type RuleSetRules = __RuleSetRules; +export type TreeRuleObject = __TreeRuleObject; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts new file mode 100644 index 0000000..f89fb63 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts new file mode 100644 index 0000000..052dcf3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts @@ -0,0 +1,25 @@ +import { EndpointARN, EndpointPartition, Logger } from "@smithy/types"; +export type ReferenceObject = { + ref: string; +}; +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +export type FunctionArgv = Array; +export type FunctionReturn = string | boolean | number | EndpointARN | EndpointPartition | { + [key: string]: FunctionReturn; +} | null; +export type ConditionObject = FunctionObject & { + assign?: string; +}; +export type Expression = string | ReferenceObject | FunctionObject; +export type EndpointParams = Record; +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +export type ReferenceRecord = Record; +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts new file mode 100644 index 0000000..bfdf543 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, FunctionObject, FunctionReturn } from "../types"; +export declare const callFunction: ({ fn, argv }: FunctionObject, options: EvaluateOptions) => FunctionReturn; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts new file mode 100644 index 0000000..1cd2240 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts @@ -0,0 +1,4 @@ +import { EndpointFunctions } from "../types/EndpointFunctions"; +export declare const customEndpointFunctions: { + [key: string]: EndpointFunctions; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts new file mode 100644 index 0000000..cde57d1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts @@ -0,0 +1,11 @@ +export declare const endpointFunctions: { + booleanEquals: (value1: boolean, value2: boolean) => boolean; + getAttr: (value: import("../lib").GetAttrValue, path: string) => import("../lib").GetAttrValue; + isSet: (value: unknown) => boolean; + isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; + not: (value: boolean) => boolean; + parseURL: (value: string | URL | import("@smithy/types").Endpoint) => import("@smithy/types").EndpointURL | null; + stringEquals: (value1: string, value2: string) => boolean; + substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; + uriEncode: (value: string) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts new file mode 100644 index 0000000..ba2c0be --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions } from "../types"; +export declare const evaluateCondition: ({ assign, ...fnArgs }: ConditionObject, options: EvaluateOptions) => { + toAssign?: { + name: string; + value: import("../types").FunctionReturn; + } | undefined; + result: boolean; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts new file mode 100644 index 0000000..a7fbc5f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions, FunctionReturn } from "../types"; +export declare const evaluateConditions: (conditions: ConditionObject[] | undefined, options: EvaluateOptions) => { + result: false; + referenceRecord?: undefined; +} | { + result: boolean; + referenceRecord: Record; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts new file mode 100644 index 0000000..32f23ff --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateEndpointRule: (endpointRule: EndpointRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts new file mode 100644 index 0000000..eef15e3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateErrorRule: (errorRule: ErrorRuleObject, options: EvaluateOptions) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts new file mode 100644 index 0000000..8bbd358 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const evaluateExpression: (obj: Expression, keyName: string, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts new file mode 100644 index 0000000..a37fe07 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const evaluateRules: (rules: import("@smithy/types").RuleSetRules, options: EvaluateOptions) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts new file mode 100644 index 0000000..e6ae9c3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions } from "../types"; +export declare const evaluateTemplate: (template: string, options: EvaluateOptions) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts new file mode 100644 index 0000000..8518f7b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions, TreeRuleObject } from "../types"; +export declare const evaluateTreeRule: (treeRule: TreeRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts new file mode 100644 index 0000000..2775159 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectHeaders, EvaluateOptions } from "../types"; +export declare const getEndpointHeaders: (headers: EndpointObjectHeaders, options: EvaluateOptions) => {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts new file mode 100644 index 0000000..944b39d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectProperties, EvaluateOptions } from "../types"; +export declare const getEndpointProperties: (properties: EndpointObjectProperties, options: EvaluateOptions) => {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts new file mode 100644 index 0000000..5002377 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts @@ -0,0 +1,3 @@ +import { EndpointObjectProperty } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const getEndpointProperty: (property: EndpointObjectProperty, options: EvaluateOptions) => EndpointObjectProperty; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts new file mode 100644 index 0000000..9c93422 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const getEndpointUrl: (endpointUrl: Expression, options: EvaluateOptions) => URL; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts new file mode 100644 index 0000000..2ebfda3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, ReferenceObject } from "../types"; +export declare const getReferenceValue: ({ ref }: ReferenceObject, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts new file mode 100644 index 0000000..bd481df --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts new file mode 100644 index 0000000..89132f2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts @@ -0,0 +1,3 @@ +export declare class EndpointError extends Error { + constructor(message: string); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts new file mode 100644 index 0000000..33b1a0b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts @@ -0,0 +1,2 @@ +import { FunctionReturn } from "./shared"; +export type EndpointFunctions = Record FunctionReturn>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..d24545f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts @@ -0,0 +1,5 @@ +import { EndpointObject as __EndpointObject, EndpointObjectHeaders as __EndpointObjectHeaders, EndpointObjectProperties as __EndpointObjectProperties, EndpointRuleObject as __EndpointRuleObject } from "@smithy/types"; +export type EndpointObjectProperties = __EndpointObjectProperties; +export type EndpointObjectHeaders = __EndpointObjectHeaders; +export type EndpointObject = __EndpointObject; +export type EndpointRuleObject = __EndpointRuleObject; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..51fe138 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject as __ErrorRuleObject } from "@smithy/types"; +export type ErrorRuleObject = __ErrorRuleObject; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts new file mode 100644 index 0000000..3335b80 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts @@ -0,0 +1,4 @@ +import { DeprecatedObject as __DeprecatedObject, ParameterObject as __ParameterObject, RuleSetObject as __RuleSetObject } from "@smithy/types"; +export type DeprecatedObject = __DeprecatedObject; +export type ParameterObject = __ParameterObject; +export type RuleSetObject = __RuleSetObject; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..3d902d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts @@ -0,0 +1,3 @@ +import { RuleSetRules as __RuleSetRules, TreeRuleObject as __TreeRuleObject } from "@smithy/types"; +export type RuleSetRules = __RuleSetRules; +export type TreeRuleObject = __TreeRuleObject; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts new file mode 100644 index 0000000..a49f984 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts new file mode 100644 index 0000000..8351a92 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts @@ -0,0 +1,25 @@ +import { EndpointARN, EndpointPartition, Logger } from "@smithy/types"; +export type ReferenceObject = { + ref: string; +}; +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +export type FunctionArgv = Array; +export type FunctionReturn = string | boolean | number | EndpointARN | EndpointPartition | { + [key: string]: FunctionReturn; +} | null; +export type ConditionObject = FunctionObject & { + assign?: string; +}; +export type Expression = string | ReferenceObject | FunctionObject; +export type EndpointParams = Record; +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +export type ReferenceRecord = Record; +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts new file mode 100644 index 0000000..729a206 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, FunctionObject, FunctionReturn } from "../types"; +export declare const callFunction: ({ fn, argv }: FunctionObject, options: EvaluateOptions) => FunctionReturn; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts new file mode 100644 index 0000000..d8971d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts @@ -0,0 +1,4 @@ +import { EndpointFunctions } from "../types/EndpointFunctions"; +export declare const customEndpointFunctions: { + [key: string]: EndpointFunctions; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts new file mode 100644 index 0000000..12d75b9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts @@ -0,0 +1,11 @@ +export declare const endpointFunctions: { + booleanEquals: (value1: boolean, value2: boolean) => boolean; + getAttr: (value: import("../lib").GetAttrValue, path: string) => import("../lib").GetAttrValue; + isSet: (value: unknown) => boolean; + isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; + not: (value: boolean) => boolean; + parseURL: (value: string | URL | import("@smithy/types").Endpoint) => import("@smithy/types").EndpointURL | null; + stringEquals: (value1: string, value2: string) => boolean; + substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; + uriEncode: (value: string) => string; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts new file mode 100644 index 0000000..5fbe59f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions } from "../types"; +export declare const evaluateCondition: ({ assign, ...fnArgs }: ConditionObject, options: EvaluateOptions) => { + toAssign?: { + name: string; + value: import("../types").FunctionReturn; + } | undefined; + result: boolean; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts new file mode 100644 index 0000000..4131beb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions, FunctionReturn } from "../types"; +export declare const evaluateConditions: (conditions: ConditionObject[] | undefined, options: EvaluateOptions) => { + result: false; + referenceRecord?: undefined; +} | { + result: boolean; + referenceRecord: Record; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts new file mode 100644 index 0000000..da9496e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateEndpointRule: (endpointRule: EndpointRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts new file mode 100644 index 0000000..df4973d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateErrorRule: (errorRule: ErrorRuleObject, options: EvaluateOptions) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts new file mode 100644 index 0000000..2541960 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const evaluateExpression: (obj: Expression, keyName: string, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts new file mode 100644 index 0000000..d38c8be --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const evaluateRules: (rules: import("@smithy/types").RuleSetRules, options: EvaluateOptions) => EndpointV2; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts new file mode 100644 index 0000000..9b0b9ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions } from "../types"; +export declare const evaluateTemplate: (template: string, options: EvaluateOptions) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts new file mode 100644 index 0000000..2564388 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions, TreeRuleObject } from "../types"; +export declare const evaluateTreeRule: (treeRule: TreeRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts new file mode 100644 index 0000000..a802565 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectHeaders, EvaluateOptions } from "../types"; +export declare const getEndpointHeaders: (headers: EndpointObjectHeaders, options: EvaluateOptions) => {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts new file mode 100644 index 0000000..9c83bb0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectProperties, EvaluateOptions } from "../types"; +export declare const getEndpointProperties: (properties: EndpointObjectProperties, options: EvaluateOptions) => {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts new file mode 100644 index 0000000..7bc5b82 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts @@ -0,0 +1,3 @@ +import { EndpointObjectProperty } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const getEndpointProperty: (property: EndpointObjectProperty, options: EvaluateOptions) => EndpointObjectProperty; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts new file mode 100644 index 0000000..4ab2289 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const getEndpointUrl: (endpointUrl: Expression, options: EvaluateOptions) => URL; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts new file mode 100644 index 0000000..3699ec1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, ReferenceObject } from "../types"; +export declare const getReferenceValue: ({ ref }: ReferenceObject, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts new file mode 100644 index 0000000..b571d02 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/package.json new file mode 100644 index 0000000..7c6f9b1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-endpoints/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/util-endpoints", + "version": "3.0.2", + "description": "Utilities to help with endpoint resolution.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-endpoints", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "keywords": [ + "endpoint" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-endpoints", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-endpoints" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/README.md new file mode 100644 index 0000000..67e4499 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/README.md @@ -0,0 +1,4 @@ +# @smithy/util-hex-encoding + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-hex-encoding/latest.svg)](https://www.npmjs.com/package/@smithy/util-hex-encoding) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-hex-encoding.svg)](https://www.npmjs.com/package/@smithy/util-hex-encoding) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js new file mode 100644 index 0000000..78a59ea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js @@ -0,0 +1,67 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromHex: () => fromHex, + toHex: () => toHex +}); +module.exports = __toCommonJS(src_exports); +var SHORT_TO_HEX = {}; +var HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +__name(fromHex, "fromHex"); +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} +__name(toHex, "toHex"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromHex, + toHex +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-es/index.js new file mode 100644 index 0000000..e47b3aa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-es/index.js @@ -0,0 +1,33 @@ +const SHORT_TO_HEX = {}; +const HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +export function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } + else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +export function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts new file mode 100644 index 0000000..9d4307a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * Converts a hexadecimal encoded string to a Uint8Array of bytes. + * + * @param encoded The hexadecimal encoded string + */ +export declare function fromHex(encoded: string): Uint8Array; +/** + * Converts a Uint8Array of binary data to a hexadecimal encoded string. + * + * @param bytes The binary data to encode + */ +export declare function toHex(bytes: Uint8Array): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..02a8848 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * Converts a hexadecimal encoded string to a Uint8Array of bytes. + * + * @param encoded The hexadecimal encoded string + */ +export declare function fromHex(encoded: string): Uint8Array; +/** + * Converts a Uint8Array of binary data to a hexadecimal encoded string. + * + * @param bytes The binary data to encode + */ +export declare function toHex(bytes: Uint8Array): string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/package.json new file mode 100644 index 0000000..2c1ba3d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-hex-encoding/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/util-hex-encoding", + "version": "4.0.0", + "description": "Converts binary buffers to and from lowercase hexadecimal encoding", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-hex-encoding", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "dependencies": { + "tslib": "^2.6.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-hex-encoding", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-hex-encoding" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/README.md new file mode 100644 index 0000000..f043cfa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/README.md @@ -0,0 +1,12 @@ +# @smithy/util-middleware + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-middleware/latest.svg)](https://www.npmjs.com/package/@smithy/util-middleware) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-middleware.svg)](https://www.npmjs.com/package/@smithy/util-middleware) + +> An internal package + +This package provides shared utilities for middleware. + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/index.js new file mode 100644 index 0000000..dfccf17 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/index.js @@ -0,0 +1,45 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + getSmithyContext: () => getSmithyContext, + normalizeProvider: () => normalizeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = require("@smithy/types"); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getSmithyContext, + normalizeProvider +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js new file mode 100644 index 0000000..3848a0c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js @@ -0,0 +1,2 @@ +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {}); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/index.js new file mode 100644 index 0000000..484290d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./getSmithyContext"; +export * from "./normalizeProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js new file mode 100644 index 0000000..a83ea99 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js @@ -0,0 +1,6 @@ +export const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts new file mode 100644 index 0000000..523ee47 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/index.d.ts new file mode 100644 index 0000000..3869284 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getSmithyContext"; +/** + * @internal + */ +export * from "./normalizeProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts new file mode 100644 index 0000000..4fe2d9a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts new file mode 100644 index 0000000..14cd7c4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab07159 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getSmithyContext"; +/** + * @internal + */ +export * from "./normalizeProvider"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts new file mode 100644 index 0000000..594e8fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/package.json new file mode 100644 index 0000000..b9aa172 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-middleware/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-middleware", + "version": "4.0.2", + "description": "Shared utilities for to be used in middleware packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-middleware", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "middleware" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-middleware", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-middleware" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/README.md new file mode 100644 index 0000000..bcf11a9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/README.md @@ -0,0 +1,78 @@ +# @smithy/util-retry + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-retry/latest.svg)](https://www.npmjs.com/package/@smithy/util-retry) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-retry.svg)](https://www.npmjs.com/package/@smithy/util-retry) + +This package provides shared utilities for retries. + +## Usage + +### Default + +By default, each client already has a default retry strategy. The default retry count is 3, and +only retryable errors will be retried. + +[AWS Documentation: Retry behavior](https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html). + +```js +import { S3Client } from "@aws-sdk/client-s3"; + +const client = new S3Client({}); // default retry strategy included. +``` + +### MaxAttempts + +If you want to change the number of attempts, you can provide `maxAttempts` configuration during client creation. + +```js +import { S3Client } from "@aws-sdk/client-s3"; + +const client = new S3Client({ maxAttempts: 4 }); +``` + +This is recommended because the `StandardRetryStrategy` includes backoff calculation, +deciding whether an error should be retried, and a retry token counter. + +### MaxAttempts and BackoffComputation + +If you want to change the number of attempts and use a custom delay +computation, you can use the `ConfiguredRetryStrategy` from `@smithy/util-retry`. + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { ConfiguredRetryStrategy } from "@smithy/util-retry"; + +const client = new S3Client({ + retryStrategy: new ConfiguredRetryStrategy( + 4, // max attempts. + (attempt: number) => 100 + attempt * 1000 // backoff function. + ), +}); +``` + +This example sets the backoff at 100ms plus 1s per attempt. + +### MaxAttempts and RetryStrategy + +If you provide both `maxAttempts` and `retryStrategy`, the `retryStrategy` will +get precedence as it's more specific. + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { ConfiguredRetryStrategy } from "@smithy/util-retry"; + +const client = new S3Client({ + maxAttempts: 2, // ignored. + retryStrategy: new ConfiguredRetryStrategy( + 4, // used. + (attempt: number) => 100 + attempt * 1000 // backoff function. + ), +}); +``` + +### Further customization + +You can implement the `RetryStrategyV2` interface. + +Source: https://github.com/smithy-lang/smithy-typescript/blob/main/packages/types/src/retry.ts +API Docs: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-types/Interface/RetryStrategyV2/ diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/config.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/config.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/config.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/index.js new file mode 100644 index 0000000..699447a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/index.js @@ -0,0 +1,358 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + ConfiguredRetryStrategy: () => ConfiguredRetryStrategy, + DEFAULT_MAX_ATTEMPTS: () => DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_DELAY_BASE: () => DEFAULT_RETRY_DELAY_BASE, + DEFAULT_RETRY_MODE: () => DEFAULT_RETRY_MODE, + DefaultRateLimiter: () => DefaultRateLimiter, + INITIAL_RETRY_TOKENS: () => INITIAL_RETRY_TOKENS, + INVOCATION_ID_HEADER: () => INVOCATION_ID_HEADER, + MAXIMUM_RETRY_DELAY: () => MAXIMUM_RETRY_DELAY, + NO_RETRY_INCREMENT: () => NO_RETRY_INCREMENT, + REQUEST_HEADER: () => REQUEST_HEADER, + RETRY_COST: () => RETRY_COST, + RETRY_MODES: () => RETRY_MODES, + StandardRetryStrategy: () => StandardRetryStrategy, + THROTTLING_RETRY_DELAY_BASE: () => THROTTLING_RETRY_DELAY_BASE, + TIMEOUT_RETRY_COST: () => TIMEOUT_RETRY_COST +}); +module.exports = __toCommonJS(src_exports); + +// src/config.ts +var RETRY_MODES = /* @__PURE__ */ ((RETRY_MODES2) => { + RETRY_MODES2["STANDARD"] = "standard"; + RETRY_MODES2["ADAPTIVE"] = "adaptive"; + return RETRY_MODES2; +})(RETRY_MODES || {}); +var DEFAULT_MAX_ATTEMPTS = 3; +var DEFAULT_RETRY_MODE = "standard" /* STANDARD */; + +// src/DefaultRateLimiter.ts +var import_service_error_classification = require("@smithy/service-error-classification"); +var DefaultRateLimiter = class _DefaultRateLimiter { + constructor(options) { + // Pre-set state variables + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = options?.beta ?? 0.7; + this.minCapacity = options?.minCapacity ?? 1; + this.minFillRate = options?.minFillRate ?? 0.5; + this.scaleConstant = options?.scaleConstant ?? 0.4; + this.smooth = options?.smooth ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + static { + __name(this, "DefaultRateLimiter"); + } + static { + /** + * Only used in testing. + */ + this.setTimeoutFn = setTimeout; + } + getCurrentTimeInSeconds() { + return Date.now() / 1e3; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = (amount - this.currentCapacity) / this.fillRate * 1e3; + await new Promise((resolve) => _DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, import_service_error_classification.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow(this.lastMaxRate * (1 - this.beta) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise( + this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate + ); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +}; + +// src/constants.ts +var DEFAULT_RETRY_DELAY_BASE = 100; +var MAXIMUM_RETRY_DELAY = 20 * 1e3; +var THROTTLING_RETRY_DELAY_BASE = 500; +var INITIAL_RETRY_TOKENS = 500; +var RETRY_COST = 5; +var TIMEOUT_RETRY_COST = 10; +var NO_RETRY_INCREMENT = 1; +var INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +var REQUEST_HEADER = "amz-sdk-request"; + +// src/defaultRetryBackoffStrategy.ts +var getDefaultRetryBackoffStrategy = /* @__PURE__ */ __name(() => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = /* @__PURE__ */ __name((attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }, "computeNextBackoffDelay"); + const setDelayBase = /* @__PURE__ */ __name((delay) => { + delayBase = delay; + }, "setDelayBase"); + return { + computeNextBackoffDelay, + setDelayBase + }; +}, "getDefaultRetryBackoffStrategy"); + +// src/defaultRetryToken.ts +var createDefaultRetryToken = /* @__PURE__ */ __name(({ + retryDelay, + retryCount, + retryCost +}) => { + const getRetryCount = /* @__PURE__ */ __name(() => retryCount, "getRetryCount"); + const getRetryDelay = /* @__PURE__ */ __name(() => Math.min(MAXIMUM_RETRY_DELAY, retryDelay), "getRetryDelay"); + const getRetryCost = /* @__PURE__ */ __name(() => retryCost, "getRetryCost"); + return { + getRetryCount, + getRetryDelay, + getRetryCost + }; +}, "createDefaultRetryToken"); + +// src/StandardRetryStrategy.ts +var StandardRetryStrategy = class { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = "standard" /* STANDARD */; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + static { + __name(this, "StandardRetryStrategy"); + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0 + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase( + errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE + ); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return attempts < maxAttempts && this.capacity >= this.getCapacityCost(errorInfo.errorType) && this.isRetryableError(errorInfo.errorType); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +}; + +// src/AdaptiveRetryStrategy.ts +var AdaptiveRetryStrategy = class { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = "adaptive" /* ADAPTIVE */; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + static { + __name(this, "AdaptiveRetryStrategy"); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +}; + +// src/ConfiguredRetryStrategy.ts +var ConfiguredRetryStrategy = class extends StandardRetryStrategy { + static { + __name(this, "ConfiguredRetryStrategy"); + } + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AdaptiveRetryStrategy, + ConfiguredRetryStrategy, + DefaultRateLimiter, + StandardRetryStrategy, + RETRY_MODES, + DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_MODE, + DEFAULT_RETRY_DELAY_BASE, + MAXIMUM_RETRY_DELAY, + THROTTLING_RETRY_DELAY_BASE, + INITIAL_RETRY_TOKENS, + RETRY_COST, + TIMEOUT_RETRY_COST, + NO_RETRY_INCREMENT, + INVOCATION_ID_HEADER, + REQUEST_HEADER +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..e20cf0f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js @@ -0,0 +1,24 @@ +import { RETRY_MODES } from "./config"; +import { DefaultRateLimiter } from "./DefaultRateLimiter"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = RETRY_MODES.ADAPTIVE; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js new file mode 100644 index 0000000..541bdb2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js @@ -0,0 +1,18 @@ +import { DEFAULT_RETRY_DELAY_BASE } from "./constants"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class ConfiguredRetryStrategy extends StandardRetryStrategy { + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } + else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js new file mode 100644 index 0000000..15240c8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js @@ -0,0 +1,100 @@ +import { isThrottlingError } from "@smithy/service-error-classification"; +export class DefaultRateLimiter { + constructor(options) { + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = options?.beta ?? 0.7; + this.minCapacity = options?.minCapacity ?? 1; + this.minFillRate = options?.minFillRate ?? 0.5; + this.scaleConstant = options?.scaleConstant ?? 0.4; + this.smooth = options?.smooth ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1000; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000; + await new Promise((resolve) => DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if (isThrottlingError(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } + else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +} +DefaultRateLimiter.setTimeoutFn = setTimeout; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js new file mode 100644 index 0000000..07adde0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js @@ -0,0 +1,65 @@ +import { DEFAULT_MAX_ATTEMPTS, RETRY_MODES } from "./config"; +import { DEFAULT_RETRY_DELAY_BASE, INITIAL_RETRY_TOKENS, NO_RETRY_INCREMENT, RETRY_COST, THROTTLING_RETRY_DELAY_BASE, TIMEOUT_RETRY_COST, } from "./constants"; +import { getDefaultRetryBackoffStrategy } from "./defaultRetryBackoffStrategy"; +import { createDefaultRetryToken } from "./defaultRetryToken"; +export class StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = RETRY_MODES.STANDARD; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0, + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint + ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) + : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost, + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } + catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return (attempts < maxAttempts && + this.capacity >= this.getCapacityCost(errorInfo.errorType) && + this.isRetryableError(errorInfo.errorType)); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/config.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/config.js new file mode 100644 index 0000000..438d42d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/config.js @@ -0,0 +1,7 @@ +export var RETRY_MODES; +(function (RETRY_MODES) { + RETRY_MODES["STANDARD"] = "standard"; + RETRY_MODES["ADAPTIVE"] = "adaptive"; +})(RETRY_MODES || (RETRY_MODES = {})); +export const DEFAULT_MAX_ATTEMPTS = 3; +export const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/constants.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/constants.js new file mode 100644 index 0000000..0876f8e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/constants.js @@ -0,0 +1,9 @@ +export const DEFAULT_RETRY_DELAY_BASE = 100; +export const MAXIMUM_RETRY_DELAY = 20 * 1000; +export const THROTTLING_RETRY_DELAY_BASE = 500; +export const INITIAL_RETRY_TOKENS = 500; +export const RETRY_COST = 5; +export const TIMEOUT_RETRY_COST = 10; +export const NO_RETRY_INCREMENT = 1; +export const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +export const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js new file mode 100644 index 0000000..ce04bc5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js @@ -0,0 +1,14 @@ +import { DEFAULT_RETRY_DELAY_BASE, MAXIMUM_RETRY_DELAY } from "./constants"; +export const getDefaultRetryBackoffStrategy = () => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = (attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }; + const setDelayBase = (delay) => { + delayBase = delay; + }; + return { + computeNextBackoffDelay, + setDelayBase, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js new file mode 100644 index 0000000..203bb66 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js @@ -0,0 +1,11 @@ +import { MAXIMUM_RETRY_DELAY } from "./constants"; +export const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => { + const getRetryCount = () => retryCount; + const getRetryDelay = () => Math.min(MAXIMUM_RETRY_DELAY, retryDelay); + const getRetryCost = () => retryCost; + return { + getRetryCount, + getRetryDelay, + getRetryCost, + }; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/index.js new file mode 100644 index 0000000..8637ced --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/types.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..8092519 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, RetryToken, StandardRetryToken } from "@smithy/types"; +import { RateLimiter } from "./types"; +/** + * @public + * + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * + * The AdaptiveRetryStrategy is a retry strategy for executing against a very + * resource constrained set of resources. Care should be taken when using this + * retry strategy. By default, it uses a dynamic backoff delay based on load + * currently perceived against the downstream resource and performs circuit + * breaking to disable retries in the event of high downstream failures using + * the DefaultRateLimiter. + * + * @see {@link StandardRetryStrategy} + * @see {@link DefaultRateLimiter } + */ +export declare class AdaptiveRetryStrategy implements RetryStrategyV2 { + private readonly maxAttemptsProvider; + private rateLimiter; + private standardRetryStrategy; + readonly mode: string; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts new file mode 100644 index 0000000..3250c6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts @@ -0,0 +1,32 @@ +import type { Provider, RetryBackoffStrategy, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +/** + * @public + * + * This extension of the StandardRetryStrategy allows customizing the + * backoff computation. + */ +export declare class ConfiguredRetryStrategy extends StandardRetryStrategy implements RetryStrategyV2 { + private readonly computeNextBackoffDelay; + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts: number | Provider, computeNextBackoffDelay?: number | RetryBackoffStrategy["computeNextBackoffDelay"]); + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts new file mode 100644 index 0000000..9d689fc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts @@ -0,0 +1,49 @@ +import { RateLimiter } from "./types"; +/** + * @public + */ +export interface DefaultRateLimiterOptions { + beta?: number; + minCapacity?: number; + minFillRate?: number; + scaleConstant?: number; + smooth?: number; +} +/** + * @public + */ +export declare class DefaultRateLimiter implements RateLimiter { + /** + * Only used in testing. + */ + private static setTimeoutFn; + private beta; + private minCapacity; + private minFillRate; + private scaleConstant; + private smooth; + private currentCapacity; + private enabled; + private lastMaxRate; + private measuredTxRate; + private requestCount; + private fillRate; + private lastThrottleTime; + private lastTimestamp; + private lastTxRateBucket; + private maxCapacity; + private timeWindow; + constructor(options?: DefaultRateLimiterOptions); + private getCurrentTimeInSeconds; + getSendToken(): Promise; + private acquireTokenBucket; + private refillTokenBucket; + updateClientSendingRate(response: any): void; + private calculateTimeWindow; + private cubicThrottle; + private cubicSuccess; + private enableTokenBucket; + private updateTokenBucketRate; + private updateMeasuredRate; + private getPrecise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..c100ebc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts @@ -0,0 +1,26 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +/** + * @public + */ +export declare class StandardRetryStrategy implements RetryStrategyV2 { + private readonly maxAttempts; + readonly mode: string; + private capacity; + private readonly retryBackoffStrategy; + private readonly maxAttemptsProvider; + constructor(maxAttempts: number); + constructor(maxAttemptsProvider: Provider); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(token: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity(): number; + private getMaxAttempts; + private shouldRetry; + private getCapacityCost; + private isRetryableError; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/config.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/config.d.ts new file mode 100644 index 0000000..e4e74b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/config.d.ts @@ -0,0 +1,20 @@ +/** + * @public + */ +export declare enum RETRY_MODES { + STANDARD = "standard", + ADAPTIVE = "adaptive" +} +/** + * @public + * + * The default value for how many HTTP requests an SDK should make for a + * single SDK operation invocation before giving up + */ +export declare const DEFAULT_MAX_ATTEMPTS = 3; +/** + * @public + * + * The default retry algorithm to use. + */ +export declare const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/constants.d.ts new file mode 100644 index 0000000..bc7fec8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/constants.d.ts @@ -0,0 +1,59 @@ +/** + * @public + * + * The base number of milliseconds to use in calculating a suitable cool-down + * time when a retryable error is encountered. + */ +export declare const DEFAULT_RETRY_DELAY_BASE = 100; +/** + * @public + * + * The maximum amount of time (in milliseconds) that will be used as a delay + * between retry attempts. + */ +export declare const MAXIMUM_RETRY_DELAY: number; +/** + * @public + * + * The retry delay base (in milliseconds) to use when a throttling error is + * encountered. + */ +export declare const THROTTLING_RETRY_DELAY_BASE = 500; +/** + * @public + * + * Initial number of retry tokens in Retry Quota + */ +export declare const INITIAL_RETRY_TOKENS = 500; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance. + */ +export declare const RETRY_COST = 5; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ +export declare const TIMEOUT_RETRY_COST = 10; +/** + * @public + * + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ +export declare const NO_RETRY_INCREMENT = 1; +/** + * @public + * + * Header name for SDK invocation ID + */ +export declare const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +/** + * @public + * + * Header name for request retry information. + */ +export declare const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts new file mode 100644 index 0000000..b70eb2d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts @@ -0,0 +1,5 @@ +import { StandardRetryBackoffStrategy } from "@smithy/types"; +/** + * @internal + */ +export declare const getDefaultRetryBackoffStrategy: () => StandardRetryBackoffStrategy; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts new file mode 100644 index 0000000..947b68f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts @@ -0,0 +1,9 @@ +import { StandardRetryToken } from "@smithy/types"; +/** + * @internal + */ +export declare const createDefaultRetryToken: ({ retryDelay, retryCount, retryCost, }: { + retryDelay: number; + retryCount: number; + retryCost?: number | undefined; +}) => StandardRetryToken; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/index.d.ts new file mode 100644 index 0000000..8637ced --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..f6b0ef4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, RetryToken, StandardRetryToken } from "@smithy/types"; +import { RateLimiter } from "./types"; +/** + * @public + * + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * + * The AdaptiveRetryStrategy is a retry strategy for executing against a very + * resource constrained set of resources. Care should be taken when using this + * retry strategy. By default, it uses a dynamic backoff delay based on load + * currently perceived against the downstream resource and performs circuit + * breaking to disable retries in the event of high downstream failures using + * the DefaultRateLimiter. + * + * @see {@link StandardRetryStrategy} + * @see {@link DefaultRateLimiter } + */ +export declare class AdaptiveRetryStrategy implements RetryStrategyV2 { + private readonly maxAttemptsProvider; + private rateLimiter; + private standardRetryStrategy; + readonly mode: string; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts new file mode 100644 index 0000000..7df2983 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts @@ -0,0 +1,32 @@ +import { Provider, RetryBackoffStrategy, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +/** + * @public + * + * This extension of the StandardRetryStrategy allows customizing the + * backoff computation. + */ +export declare class ConfiguredRetryStrategy extends StandardRetryStrategy implements RetryStrategyV2 { + private readonly computeNextBackoffDelay; + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts: number | Provider, computeNextBackoffDelay?: number | RetryBackoffStrategy["computeNextBackoffDelay"]); + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts new file mode 100644 index 0000000..9c239d6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts @@ -0,0 +1,49 @@ +import { RateLimiter } from "./types"; +/** + * @public + */ +export interface DefaultRateLimiterOptions { + beta?: number; + minCapacity?: number; + minFillRate?: number; + scaleConstant?: number; + smooth?: number; +} +/** + * @public + */ +export declare class DefaultRateLimiter implements RateLimiter { + /** + * Only used in testing. + */ + private static setTimeoutFn; + private beta; + private minCapacity; + private minFillRate; + private scaleConstant; + private smooth; + private currentCapacity; + private enabled; + private lastMaxRate; + private measuredTxRate; + private requestCount; + private fillRate; + private lastThrottleTime; + private lastTimestamp; + private lastTxRateBucket; + private maxCapacity; + private timeWindow; + constructor(options?: DefaultRateLimiterOptions); + private getCurrentTimeInSeconds; + getSendToken(): Promise; + private acquireTokenBucket; + private refillTokenBucket; + updateClientSendingRate(response: any): void; + private calculateTimeWindow; + private cubicThrottle; + private cubicSuccess; + private enableTokenBucket; + private updateTokenBucketRate; + private updateMeasuredRate; + private getPrecise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..c22f8b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts @@ -0,0 +1,26 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +/** + * @public + */ +export declare class StandardRetryStrategy implements RetryStrategyV2 { + private readonly maxAttempts; + readonly mode: string; + private capacity; + private readonly retryBackoffStrategy; + private readonly maxAttemptsProvider; + constructor(maxAttempts: number); + constructor(maxAttemptsProvider: Provider); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(token: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity(): number; + private getMaxAttempts; + private shouldRetry; + private getCapacityCost; + private isRetryableError; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts new file mode 100644 index 0000000..6727a38 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts @@ -0,0 +1,20 @@ +/** + * @public + */ +export declare enum RETRY_MODES { + STANDARD = "standard", + ADAPTIVE = "adaptive" +} +/** + * @public + * + * The default value for how many HTTP requests an SDK should make for a + * single SDK operation invocation before giving up + */ +export declare const DEFAULT_MAX_ATTEMPTS = 3; +/** + * @public + * + * The default retry algorithm to use. + */ +export declare const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..5c1a5ce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,59 @@ +/** + * @public + * + * The base number of milliseconds to use in calculating a suitable cool-down + * time when a retryable error is encountered. + */ +export declare const DEFAULT_RETRY_DELAY_BASE = 100; +/** + * @public + * + * The maximum amount of time (in milliseconds) that will be used as a delay + * between retry attempts. + */ +export declare const MAXIMUM_RETRY_DELAY: number; +/** + * @public + * + * The retry delay base (in milliseconds) to use when a throttling error is + * encountered. + */ +export declare const THROTTLING_RETRY_DELAY_BASE = 500; +/** + * @public + * + * Initial number of retry tokens in Retry Quota + */ +export declare const INITIAL_RETRY_TOKENS = 500; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance. + */ +export declare const RETRY_COST = 5; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ +export declare const TIMEOUT_RETRY_COST = 10; +/** + * @public + * + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ +export declare const NO_RETRY_INCREMENT = 1; +/** + * @public + * + * Header name for SDK invocation ID + */ +export declare const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +/** + * @public + * + * Header name for request retry information. + */ +export declare const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts new file mode 100644 index 0000000..1d632ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts @@ -0,0 +1,5 @@ +import { StandardRetryBackoffStrategy } from "@smithy/types"; +/** + * @internal + */ +export declare const getDefaultRetryBackoffStrategy: () => StandardRetryBackoffStrategy; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts new file mode 100644 index 0000000..fd4b75e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts @@ -0,0 +1,9 @@ +import { StandardRetryToken } from "@smithy/types"; +/** + * @internal + */ +export declare const createDefaultRetryToken: ({ retryDelay, retryCount, retryCost, }: { + retryDelay: number; + retryCount: number; + retryCost?: number | undefined; +}) => StandardRetryToken; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..de9af3d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..5a20c01 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts @@ -0,0 +1,19 @@ +/** + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/types.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/types.d.ts new file mode 100644 index 0000000..b3f2bd1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/dist-types/types.d.ts @@ -0,0 +1,19 @@ +/** + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/package.json new file mode 100644 index 0000000..6379727 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-retry/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/util-retry", + "version": "4.0.3", + "description": "Shared retry utilities to be used in middleware packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-retry", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "retry" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-retry", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-retry" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/README.md new file mode 100644 index 0000000..6fcd9f6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/README.md @@ -0,0 +1,6 @@ +# @smithy/util-stream + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-stream/latest.svg)](https://www.npmjs.com/package/@smithy/util-stream) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-stream.svg)](https://www.npmjs.com/package/@smithy/util-stream) + +Package with utilities to operate on streams. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js new file mode 100644 index 0000000..ea8baac --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ByteArrayCollector = void 0; +class ByteArrayCollector { + constructor(allocByteArray) { + this.allocByteArray = allocByteArray; + this.byteLength = 0; + this.byteArrays = []; + } + push(byteArray) { + this.byteArrays.push(byteArray); + this.byteLength += byteArray.byteLength; + } + flush() { + if (this.byteArrays.length === 1) { + const bytes = this.byteArrays[0]; + this.reset(); + return bytes; + } + const aggregation = this.allocByteArray(this.byteLength); + let cursor = 0; + for (let i = 0; i < this.byteArrays.length; ++i) { + const bytes = this.byteArrays[i]; + aggregation.set(bytes, cursor); + cursor += bytes.byteLength; + } + this.reset(); + return aggregation; + } + reset() { + this.byteArrays = []; + this.byteLength = 0; + } +} +exports.ByteArrayCollector = ByteArrayCollector; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js new file mode 100644 index 0000000..b73363a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChecksumStream = void 0; +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +class ChecksumStream extends ReadableStreamRef { +} +exports.ChecksumStream = ChecksumStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js new file mode 100644 index 0000000..92d0bc0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChecksumStream = void 0; +const util_base64_1 = require("@smithy/util-base64"); +const stream_1 = require("stream"); +class ChecksumStream extends stream_1.Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + var _a, _b; + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} +exports.ChecksumStream = ChecksumStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js new file mode 100644 index 0000000..2f6cf12 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createChecksumStream = void 0; +const util_base64_1 = require("@smithy/util-base64"); +const stream_type_check_1 = require("../stream-type-check"); +const ChecksumStream_browser_1 = require("./ChecksumStream.browser"); +const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + var _a, _b; + if (!(0, stream_type_check_1.isReadableStream)(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + const encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream_browser_1.ChecksumStream.prototype); + return readable; +}; +exports.createChecksumStream = createChecksumStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js new file mode 100644 index 0000000..57e2a2f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createChecksumStream = void 0; +const stream_type_check_1 = require("../stream-type-check"); +const ChecksumStream_1 = require("./ChecksumStream"); +const createChecksumStream_browser_1 = require("./createChecksumStream.browser"); +function createChecksumStream(init) { + if (typeof ReadableStream === "function" && (0, stream_type_check_1.isReadableStream)(init.source)) { + return (0, createChecksumStream_browser_1.createChecksumStream)(init); + } + return new ChecksumStream_1.ChecksumStream(init); +} +exports.createChecksumStream = createChecksumStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js new file mode 100644 index 0000000..4c10847 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createBufferedReadable = void 0; +const node_stream_1 = require("node:stream"); +const ByteArrayCollector_1 = require("./ByteArrayCollector"); +const createBufferedReadableStream_1 = require("./createBufferedReadableStream"); +const stream_type_check_1 = require("./stream-type-check"); +function createBufferedReadable(upstream, size, logger) { + if ((0, stream_type_check_1.isReadableStream)(upstream)) { + return (0, createBufferedReadableStream_1.createBufferedReadableStream)(upstream, size, logger); + } + const downstream = new node_stream_1.Readable({ read() { } }); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = [ + "", + new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size)), + new ByteArrayCollector_1.ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), + ]; + let mode = -1; + upstream.on("data", (chunk) => { + const chunkMode = (0, createBufferedReadableStream_1.modeOf)(chunk, true); + if (mode !== chunkMode) { + if (mode >= 0) { + downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + downstream.push(chunk); + return; + } + const chunkSize = (0, createBufferedReadableStream_1.sizeOf)(chunk); + bytesSeen += chunkSize; + const bufferSize = (0, createBufferedReadableStream_1.sizeOf)(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + downstream.push(chunk); + } + else { + const newSize = (0, createBufferedReadableStream_1.merge)(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger === null || logger === void 0 ? void 0 : logger.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode)); + } + } + }); + upstream.on("end", () => { + if (mode !== -1) { + const remainder = (0, createBufferedReadableStream_1.flush)(buffers, mode); + if ((0, createBufferedReadableStream_1.sizeOf)(remainder) > 0) { + downstream.push(remainder); + } + } + downstream.push(null); + }); + return downstream; +} +exports.createBufferedReadable = createBufferedReadable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js new file mode 100644 index 0000000..2cd72aa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.modeOf = exports.sizeOf = exports.flush = exports.merge = exports.createBufferedReadable = exports.createBufferedReadableStream = void 0; +const ByteArrayCollector_1 = require("./ByteArrayCollector"); +function createBufferedReadableStream(upstream, size, logger) { + const reader = upstream.getReader(); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = ["", new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size))]; + let mode = -1; + const pull = async (controller) => { + const { value, done } = await reader.read(); + const chunk = value; + if (done) { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + controller.enqueue(remainder); + } + } + controller.close(); + } + else { + const chunkMode = modeOf(chunk, false); + if (mode !== chunkMode) { + if (mode >= 0) { + controller.enqueue(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + controller.enqueue(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + controller.enqueue(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger === null || logger === void 0 ? void 0 : logger.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + controller.enqueue(flush(buffers, mode)); + } + else { + await pull(controller); + } + } + } + }; + return new ReadableStream({ + pull, + }); +} +exports.createBufferedReadableStream = createBufferedReadableStream; +exports.createBufferedReadable = createBufferedReadableStream; +function merge(buffers, mode, chunk) { + switch (mode) { + case 0: + buffers[0] += chunk; + return sizeOf(buffers[0]); + case 1: + case 2: + buffers[mode].push(chunk); + return sizeOf(buffers[mode]); + } +} +exports.merge = merge; +function flush(buffers, mode) { + switch (mode) { + case 0: + const s = buffers[0]; + buffers[0] = ""; + return s; + case 1: + case 2: + return buffers[mode].flush(); + } + throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`); +} +exports.flush = flush; +function sizeOf(chunk) { + var _a, _b; + return (_b = (_a = chunk === null || chunk === void 0 ? void 0 : chunk.byteLength) !== null && _a !== void 0 ? _a : chunk === null || chunk === void 0 ? void 0 : chunk.length) !== null && _b !== void 0 ? _b : 0; +} +exports.sizeOf = sizeOf; +function modeOf(chunk, allowBuffer = true) { + if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) { + return 2; + } + if (chunk instanceof Uint8Array) { + return 1; + } + if (typeof chunk === "string") { + return 0; + } + return -1; +} +exports.modeOf = modeOf; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js new file mode 100644 index 0000000..d8e540c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAwsChunkedEncodingStream = void 0; +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + bodyLengthChecker !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await reader.read(); + if (done) { + controller.enqueue(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + controller.enqueue(`${checksumLocationName}:${checksum}\r\n`); + controller.enqueue(`\r\n`); + } + controller.close(); + } + else { + controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`); + } + }, + }); +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js new file mode 100644 index 0000000..4f3f9e7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAwsChunkedEncodingStream = void 0; +const stream_1 = require("stream"); +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js new file mode 100644 index 0000000..38512c1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.headStream = void 0; +async function headStream(stream, bytes) { + var _a; + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += (_a = value === null || value === void 0 ? void 0 : value.byteLength) !== null && _a !== void 0 ? _a : 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} +exports.headStream = headStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.js new file mode 100644 index 0000000..86103b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.headStream = void 0; +const stream_1 = require("stream"); +const headStream_browser_1 = require("./headStream.browser"); +const stream_type_check_1 = require("./stream-type-check"); +const headStream = (stream, bytes) => { + if ((0, stream_type_check_1.isReadableStream)(stream)) { + return (0, headStream_browser_1.headStream)(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +exports.headStream = headStream; +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + var _a; + this.buffers.push(chunk); + this.bytesBuffered += (_a = chunk.byteLength) !== null && _a !== void 0 ? _a : 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/index.js new file mode 100644 index 0000000..d42fe10 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/index.js @@ -0,0 +1,103 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Uint8ArrayBlobAdapter: () => Uint8ArrayBlobAdapter +}); +module.exports = __toCommonJS(src_exports); + +// src/blob/transforms.ts +var import_util_base64 = require("@smithy/util-base64"); +var import_util_utf8 = require("@smithy/util-utf8"); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, import_util_base64.toBase64)(payload); + } + return (0, import_util_utf8.toUtf8)(payload); +} +__name(transformToString, "transformToString"); +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate((0, import_util_base64.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter.mutate((0, import_util_utf8.fromUtf8)(str)); +} +__name(transformFromString, "transformFromString"); + +// src/blob/Uint8ArrayBlobAdapter.ts +var Uint8ArrayBlobAdapter = class _Uint8ArrayBlobAdapter extends Uint8Array { + static { + __name(this, "Uint8ArrayBlobAdapter"); + } + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source) { + Object.setPrototypeOf(source, _Uint8ArrayBlobAdapter.prototype); + return source; + } + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +}; + +// src/index.ts +__reExport(src_exports, require("./checksum/ChecksumStream"), module.exports); +__reExport(src_exports, require("./checksum/createChecksumStream"), module.exports); +__reExport(src_exports, require("././createBufferedReadable"), module.exports); +__reExport(src_exports, require("././getAwsChunkedEncodingStream"), module.exports); +__reExport(src_exports, require("././headStream"), module.exports); +__reExport(src_exports, require("././sdk-stream-mixin"), module.exports); +__reExport(src_exports, require("././splitStream"), module.exports); +__reExport(src_exports, require("././stream-type-check"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Uint8ArrayBlobAdapter, + ChecksumStream, + createChecksumStream, + createBufferedReadable, + getAwsChunkedEncodingStream, + headStream, + sdkStreamMixin, + splitStream, + isReadableStream, + isBlob +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js new file mode 100644 index 0000000..9309af1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sdkStreamMixin = void 0; +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const util_base64_1 = require("@smithy/util-base64"); +const util_hex_encoding_1 = require("@smithy/util-hex-encoding"); +const util_utf8_1 = require("@smithy/util-utf8"); +const stream_type_check_1 = require("./stream-type-check"); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!isBlobInstance(stream) && !(0, stream_type_check_1.isReadableStream)(stream)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, fetch_http_handler_1.streamCollector)(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return (0, util_base64_1.toBase64)(buf); + } + else if (encoding === "hex") { + return (0, util_hex_encoding_1.toHex)(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return (0, util_utf8_1.toUtf8)(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if ((0, stream_type_check_1.isReadableStream)(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js new file mode 100644 index 0000000..0817eac --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const stream_1 = require("stream"); +const sdk_stream_mixin_browser_1 = require("./sdk-stream-mixin.browser"); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + try { + return (0, sdk_stream_mixin_browser_1.sdkStreamMixin)(stream); + } + catch (e) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js new file mode 100644 index 0000000..eb890cc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitStream = void 0; +async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} +exports.splitStream = splitStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.js new file mode 100644 index 0000000..c55b628 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitStream = void 0; +const stream_1 = require("stream"); +const splitStream_browser_1 = require("./splitStream.browser"); +const stream_type_check_1 = require("./stream-type-check"); +async function splitStream(stream) { + if ((0, stream_type_check_1.isReadableStream)(stream) || (0, stream_type_check_1.isBlob)(stream)) { + return (0, splitStream_browser_1.splitStream)(stream); + } + const stream1 = new stream_1.PassThrough(); + const stream2 = new stream_1.PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} +exports.splitStream = splitStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js new file mode 100644 index 0000000..a4a6138 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBlob = exports.isReadableStream = void 0; +const isReadableStream = (stream) => { + var _a; + return typeof ReadableStream === "function" && + (((_a = stream === null || stream === void 0 ? void 0 : stream.constructor) === null || _a === void 0 ? void 0 : _a.name) === ReadableStream.name || stream instanceof ReadableStream); +}; +exports.isReadableStream = isReadableStream; +const isBlob = (blob) => { + var _a; + return typeof Blob === "function" && (((_a = blob === null || blob === void 0 ? void 0 : blob.constructor) === null || _a === void 0 ? void 0 : _a.name) === Blob.name || blob instanceof Blob); +}; +exports.isBlob = isBlob; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js new file mode 100644 index 0000000..39af48f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js @@ -0,0 +1,31 @@ +export class ByteArrayCollector { + constructor(allocByteArray) { + this.allocByteArray = allocByteArray; + this.byteLength = 0; + this.byteArrays = []; + } + push(byteArray) { + this.byteArrays.push(byteArray); + this.byteLength += byteArray.byteLength; + } + flush() { + if (this.byteArrays.length === 1) { + const bytes = this.byteArrays[0]; + this.reset(); + return bytes; + } + const aggregation = this.allocByteArray(this.byteLength); + let cursor = 0; + for (let i = 0; i < this.byteArrays.length; ++i) { + const bytes = this.byteArrays[i]; + aggregation.set(bytes, cursor); + cursor += bytes.byteLength; + } + this.reset(); + return aggregation; + } + reset() { + this.byteArrays = []; + this.byteLength = 0; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js new file mode 100644 index 0000000..41746b1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js @@ -0,0 +1,18 @@ +import { transformFromString, transformToString } from "./transforms"; +export class Uint8ArrayBlobAdapter extends Uint8Array { + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + static mutate(source) { + Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype); + return source; + } + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/blob/transforms.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/blob/transforms.js new file mode 100644 index 0000000..0d1f74a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/blob/transforms.js @@ -0,0 +1,15 @@ +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +export function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return toBase64(payload); + } + return toUtf8(payload); +} +export function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate(fromBase64(str)); + } + return Uint8ArrayBlobAdapter.mutate(fromUtf8(str)); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js new file mode 100644 index 0000000..afcf529 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js @@ -0,0 +1,3 @@ +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +export class ChecksumStream extends ReadableStreamRef { +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js new file mode 100644 index 0000000..e623a09 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js @@ -0,0 +1,44 @@ +import { toBase64 } from "@smithy/util-base64"; +import { Duplex } from "stream"; +export class ChecksumStream extends Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder ?? toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js new file mode 100644 index 0000000..6a41c12 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js @@ -0,0 +1,35 @@ +import { toBase64 } from "@smithy/util-base64"; +import { isReadableStream } from "../stream-type-check"; +import { ChecksumStream } from "./ChecksumStream.browser"; +export const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + if (!isReadableStream(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`); + } + const encoder = base64Encoder ?? toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream.prototype); + return readable; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js new file mode 100644 index 0000000..d205b82 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js @@ -0,0 +1,9 @@ +import { isReadableStream } from "../stream-type-check"; +import { ChecksumStream } from "./ChecksumStream"; +import { createChecksumStream as createChecksumStreamWeb } from "./createChecksumStream.browser"; +export function createChecksumStream(init) { + if (typeof ReadableStream === "function" && isReadableStream(init.source)) { + return createChecksumStreamWeb(init); + } + return new ChecksumStream(init); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js new file mode 100644 index 0000000..0e3bbce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js @@ -0,0 +1,57 @@ +import { Readable } from "node:stream"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +import { createBufferedReadableStream, flush, merge, modeOf, sizeOf } from "./createBufferedReadableStream"; +import { isReadableStream } from "./stream-type-check"; +export function createBufferedReadable(upstream, size, logger) { + if (isReadableStream(upstream)) { + return createBufferedReadableStream(upstream, size, logger); + } + const downstream = new Readable({ read() { } }); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = [ + "", + new ByteArrayCollector((size) => new Uint8Array(size)), + new ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), + ]; + let mode = -1; + upstream.on("data", (chunk) => { + const chunkMode = modeOf(chunk, true); + if (mode !== chunkMode) { + if (mode >= 0) { + downstream.push(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + downstream.push(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + downstream.push(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + downstream.push(flush(buffers, mode)); + } + } + }); + upstream.on("end", () => { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + downstream.push(remainder); + } + } + downstream.push(null); + }); + return downstream; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js new file mode 100644 index 0000000..698a757 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js @@ -0,0 +1,95 @@ +import { ByteArrayCollector } from "./ByteArrayCollector"; +export function createBufferedReadableStream(upstream, size, logger) { + const reader = upstream.getReader(); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = ["", new ByteArrayCollector((size) => new Uint8Array(size))]; + let mode = -1; + const pull = async (controller) => { + const { value, done } = await reader.read(); + const chunk = value; + if (done) { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + controller.enqueue(remainder); + } + } + controller.close(); + } + else { + const chunkMode = modeOf(chunk, false); + if (mode !== chunkMode) { + if (mode >= 0) { + controller.enqueue(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + controller.enqueue(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + controller.enqueue(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + controller.enqueue(flush(buffers, mode)); + } + else { + await pull(controller); + } + } + } + }; + return new ReadableStream({ + pull, + }); +} +export const createBufferedReadable = createBufferedReadableStream; +export function merge(buffers, mode, chunk) { + switch (mode) { + case 0: + buffers[0] += chunk; + return sizeOf(buffers[0]); + case 1: + case 2: + buffers[mode].push(chunk); + return sizeOf(buffers[mode]); + } +} +export function flush(buffers, mode) { + switch (mode) { + case 0: + const s = buffers[0]; + buffers[0] = ""; + return s; + case 1: + case 2: + return buffers[mode].flush(); + } + throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`); +} +export function sizeOf(chunk) { + return chunk?.byteLength ?? chunk?.length ?? 0; +} +export function modeOf(chunk, allowBuffer = true) { + if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) { + return 2; + } + if (chunk instanceof Uint8Array) { + return 1; + } + if (typeof chunk === "string") { + return 0; + } + return -1; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js new file mode 100644 index 0000000..b5d5fa4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js @@ -0,0 +1,27 @@ +export const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + bodyLengthChecker !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await reader.read(); + if (done) { + controller.enqueue(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + controller.enqueue(`${checksumLocationName}:${checksum}\r\n`); + controller.enqueue(`\r\n`); + } + controller.close(); + } + else { + controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`); + } + }, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js new file mode 100644 index 0000000..7c55116 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js @@ -0,0 +1,26 @@ +import { Readable } from "stream"; +export const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/headStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/headStream.browser.js new file mode 100644 index 0000000..4e7f864 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/headStream.browser.js @@ -0,0 +1,31 @@ +export async function headStream(stream, bytes) { + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += value?.byteLength ?? 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/headStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/headStream.js new file mode 100644 index 0000000..27b28ea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/headStream.js @@ -0,0 +1,41 @@ +import { Writable } from "stream"; +import { headStream as headWebStream } from "./headStream.browser"; +import { isReadableStream } from "./stream-type-check"; +export const headStream = (stream, bytes) => { + if (isReadableStream(stream)) { + return headWebStream(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +class Collector extends Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + this.buffers.push(chunk); + this.bytesBuffered += chunk.byteLength ?? 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/index.js new file mode 100644 index 0000000..1b5b599 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/index.js @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js new file mode 100644 index 0000000..f21ff66 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js @@ -0,0 +1,64 @@ +import { streamCollector } from "@smithy/fetch-http-handler"; +import { toBase64 } from "@smithy/util-base64"; +import { toHex } from "@smithy/util-hex-encoding"; +import { toUtf8 } from "@smithy/util-utf8"; +import { isReadableStream } from "./stream-type-check"; +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +export const sdkStreamMixin = (stream) => { + if (!isBlobInstance(stream) && !isReadableStream(stream)) { + const name = stream?.__proto__?.constructor?.name || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await streamCollector(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return toBase64(buf); + } + else if (encoding === "hex") { + return toHex(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return toUtf8(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if (isReadableStream(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js new file mode 100644 index 0000000..4731333 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js @@ -0,0 +1,50 @@ +import { streamCollector } from "@smithy/node-http-handler"; +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +import { Readable } from "stream"; +import { sdkStreamMixin as sdkStreamMixinReadableStream } from "./sdk-stream-mixin.browser"; +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +export const sdkStreamMixin = (stream) => { + if (!(stream instanceof Readable)) { + try { + return sdkStreamMixinReadableStream(stream); + } + catch (e) { + const name = stream?.__proto__?.constructor?.name || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await streamCollector(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return fromArrayBuffer(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available."); + } + transformed = true; + return Readable.toWeb(stream); + }, + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js new file mode 100644 index 0000000..6f06b0e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js @@ -0,0 +1,7 @@ +export async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/splitStream.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/splitStream.js new file mode 100644 index 0000000..1a8c032 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/splitStream.js @@ -0,0 +1,13 @@ +import { PassThrough } from "stream"; +import { splitStream as splitWebStream } from "./splitStream.browser"; +import { isBlob, isReadableStream } from "./stream-type-check"; +export async function splitStream(stream) { + if (isReadableStream(stream) || isBlob(stream)) { + return splitWebStream(stream); + } + const stream1 = new PassThrough(); + const stream2 = new PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/stream-type-check.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/stream-type-check.js new file mode 100644 index 0000000..6ee93a3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-es/stream-type-check.js @@ -0,0 +1,5 @@ +export const isReadableStream = (stream) => typeof ReadableStream === "function" && + (stream?.constructor?.name === ReadableStream.name || stream instanceof ReadableStream); +export const isBlob = (blob) => { + return typeof Blob === "function" && (blob?.constructor?.name === Blob.name || blob instanceof Blob); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts new file mode 100644 index 0000000..a1bbd53 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts @@ -0,0 +1,13 @@ +/** + * Aggregates byteArrays on demand. + * @internal + */ +export declare class ByteArrayCollector { + readonly allocByteArray: (size: number) => Uint8Array; + byteLength: number; + private byteArrays; + constructor(allocByteArray: (size: number) => Uint8Array); + push(byteArray: Uint8Array): void; + flush(): Uint8Array; + private reset; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts new file mode 100644 index 0000000..c3d994d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts @@ -0,0 +1,21 @@ +/** + * Adapter for conversions of the native Uint8Array type. + * @public + */ +export declare class Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source: string, encoding?: string): Uint8ArrayBlobAdapter; + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source: Uint8Array): Uint8ArrayBlobAdapter; + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding?: string): string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts new file mode 100644 index 0000000..c54a18b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts @@ -0,0 +1,9 @@ +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +/** + * @internal + */ +export declare function transformToString(payload: Uint8Array, encoding?: string): string; +/** + * @internal + */ +export declare function transformFromString(str: string, encoding?: string): Uint8ArrayBlobAdapter; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts new file mode 100644 index 0000000..0c5fbd4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts @@ -0,0 +1,37 @@ +import { Checksum, Encoder } from "@smithy/types"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: ReadableStream; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +declare const ChecksumStream_base: any; +/** + * This stub exists so that the readable returned by createChecksumStream + * identifies as "ChecksumStream" in alignment with the Node.js + * implementation. + * + * @extends ReadableStream + */ +export declare class ChecksumStream extends ChecksumStream_base { +} +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts new file mode 100644 index 0000000..6893e55 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts @@ -0,0 +1,62 @@ +/// +/// +/// +import { Checksum, Encoder } from "@smithy/types"; +import { Duplex, Readable } from "stream"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: T; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +/** + * @internal + * + * Wrapper for throwing checksum errors for streams without + * buffering the stream. + * + */ +export declare class ChecksumStream extends Duplex { + private expectedChecksum; + private checksumSourceLocation; + private checksum; + private source?; + private base64Encoder; + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit); + /** + * @internal do not call this directly. + */ + _read(size: number): void; + /** + * @internal do not call this directly. + * + * When the upstream source flows data to this stream, + * calculate a step update of the checksum. + */ + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; + /** + * @internal do not call this directly. + * + * When the upstream source finishes, perform the checksum comparison. + */ + _final(callback: (err?: Error) => void): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts new file mode 100644 index 0000000..1874987 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts @@ -0,0 +1,15 @@ +import { ChecksumStreamInit } from "./ChecksumStream.browser"; +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +export type ReadableStreamType = ReadableStream; +/** + * @internal + * + * Creates a stream adapter for throwing checksum errors for streams without + * buffering the stream. + */ +export declare const createChecksumStream: ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit) => ReadableStreamType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts new file mode 100644 index 0000000..db09f80 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from "stream"; +import { ChecksumStreamInit } from "./ChecksumStream"; +import { ReadableStreamType } from "./createChecksumStream.browser"; +/** + * @internal + * + * Creates a stream mirroring the input stream's interface, but + * performs checksumming when reading to the end of the stream. + */ +export declare function createChecksumStream(init: ChecksumStreamInit): ReadableStreamType; +export declare function createChecksumStream(init: ChecksumStreamInit): Readable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts new file mode 100644 index 0000000..b173636 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts @@ -0,0 +1,13 @@ +/// +import type { Logger } from "@smithy/types"; +import { Readable } from "node:stream"; +/** + * @internal + * @param upstream - any Readable or ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param onBuffer - for emitting warnings when buffering occurs. + * @returns another stream of the same data and stream class, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadable(upstream: Readable, size: number, logger?: Logger): Readable; +export declare function createBufferedReadable(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts new file mode 100644 index 0000000..9f6cdbd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts @@ -0,0 +1,50 @@ +import type { Logger } from "@smithy/types"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +export type BufferStore = [string, ByteArrayCollector, ByteArrayCollector?]; +export type BufferUnion = string | Uint8Array; +export type Modes = 0 | 1 | 2; +/** + * @internal + * @param upstream - any ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param logger - for emitting warnings when buffering occurs. + * @returns another stream of the same data, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadableStream(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; +/** + * Replaces R/RS polymorphic implementation in environments with only ReadableStream. + * @internal + */ +export declare const createBufferedReadable: typeof createBufferedReadableStream; +/** + * @internal + * @param buffers + * @param mode + * @param chunk + * @returns the new buffer size after merging the chunk with its appropriate buffer. + */ +export declare function merge(buffers: BufferStore, mode: Modes, chunk: string | Uint8Array): number; +/** + * @internal + * @param buffers + * @param mode + * @returns the buffer matching the mode. + */ +export declare function flush(buffers: BufferStore, mode: Modes | -1): BufferUnion; +/** + * @internal + * @param chunk + * @returns size of the chunk in bytes or characters. + */ +export declare function sizeOf(chunk?: { + byteLength?: number; + length?: number; +}): number; +/** + * @internal + * @param chunk - from upstream Readable. + * @param allowBuffer - allow mode 2 (Buffer), otherwise Buffer will return mode 1. + * @returns type index of the chunk. + */ +export declare function modeOf(chunk: BufferUnion, allowBuffer?: boolean): Modes | -1; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts new file mode 100644 index 0000000..f767f77 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts @@ -0,0 +1,5 @@ +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts new file mode 100644 index 0000000..d3997d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts @@ -0,0 +1,7 @@ +/// +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts new file mode 100644 index 0000000..80ad267 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * @param stream + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare function headStream(stream: ReadableStream, bytes: number): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/headStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/headStream.d.ts new file mode 100644 index 0000000..7ab9714 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/headStream.d.ts @@ -0,0 +1,9 @@ +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be read. + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare const headStream: (stream: Readable | ReadableStream, bytes: number) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/index.d.ts new file mode 100644 index 0000000..1b5b599 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/index.d.ts @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts new file mode 100644 index 0000000..400c0b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts @@ -0,0 +1,7 @@ +import { SdkStream } from "@smithy/types"; +/** + * The stream handling utility functions for browsers and React Native + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts new file mode 100644 index 0000000..34fcb6f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts @@ -0,0 +1,8 @@ +import { SdkStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * The function that mixes in the utility functions to help consuming runtime-specific payload stream. + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream | SdkStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts new file mode 100644 index 0000000..506c23a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts @@ -0,0 +1,5 @@ +/** + * @param stream + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: ReadableStream | Blob): Promise<[ReadableStream, ReadableStream]>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts new file mode 100644 index 0000000..8a8a48c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts @@ -0,0 +1,9 @@ +/// +import type { Readable } from "stream"; +/** + * @internal + * @param stream - to be split. + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: Readable): Promise<[Readable, Readable]>; +export declare function splitStream(stream: ReadableStream): Promise<[ReadableStream, ReadableStream]>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts new file mode 100644 index 0000000..5607088 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +type ReadableStreamType = ReadableStream; +/** + * @internal + */ +export declare const isReadableStream: (stream: unknown) => stream is ReadableStreamType; +/** + * @internal + */ +export declare const isBlob: (blob: unknown) => blob is Blob; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts new file mode 100644 index 0000000..c309a6c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts @@ -0,0 +1,13 @@ +/** + * Aggregates byteArrays on demand. + * @internal + */ +export declare class ByteArrayCollector { + readonly allocByteArray: (size: number) => Uint8Array; + byteLength: number; + private byteArrays; + constructor(allocByteArray: (size: number) => Uint8Array); + push(byteArray: Uint8Array): void; + flush(): Uint8Array; + private reset; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts new file mode 100644 index 0000000..e0338a2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts @@ -0,0 +1,21 @@ +/** + * Adapter for conversions of the native Uint8Array type. + * @public + */ +export declare class Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source: string, encoding?: string): Uint8ArrayBlobAdapter; + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source: Uint8Array): Uint8ArrayBlobAdapter; + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding?: string): string; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts new file mode 100644 index 0000000..6e3ee0a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts @@ -0,0 +1,9 @@ +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +/** + * @internal + */ +export declare function transformToString(payload: Uint8Array, encoding?: string): string; +/** + * @internal + */ +export declare function transformFromString(str: string, encoding?: string): Uint8ArrayBlobAdapter; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts new file mode 100644 index 0000000..902a9b2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts @@ -0,0 +1,37 @@ +import { Checksum, Encoder } from "@smithy/types"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: ReadableStream; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +declare const ChecksumStream_base: any; +/** + * This stub exists so that the readable returned by createChecksumStream + * identifies as "ChecksumStream" in alignment with the Node.js + * implementation. + * + * @extends ReadableStream + */ +export declare class ChecksumStream extends ChecksumStream_base { +} +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts new file mode 100644 index 0000000..7151034 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts @@ -0,0 +1,60 @@ +/// +import { Checksum, Encoder } from "@smithy/types"; +import { Duplex, Readable } from "stream"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: T; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +/** + * @internal + * + * Wrapper for throwing checksum errors for streams without + * buffering the stream. + * + */ +export declare class ChecksumStream extends Duplex { + private expectedChecksum; + private checksumSourceLocation; + private checksum; + private source?; + private base64Encoder; + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit); + /** + * @internal do not call this directly. + */ + _read(size: number): void; + /** + * @internal do not call this directly. + * + * When the upstream source flows data to this stream, + * calculate a step update of the checksum. + */ + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; + /** + * @internal do not call this directly. + * + * When the upstream source finishes, perform the checksum comparison. + */ + _final(callback: (err?: Error) => void): Promise; +} diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts new file mode 100644 index 0000000..bd3c004 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts @@ -0,0 +1,15 @@ +import { ChecksumStreamInit } from "./ChecksumStream.browser"; +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +export type ReadableStreamType = ReadableStream; +/** + * @internal + * + * Creates a stream adapter for throwing checksum errors for streams without + * buffering the stream. + */ +export declare const createChecksumStream: ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit) => ReadableStreamType; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts new file mode 100644 index 0000000..dc36418 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from "stream"; +import { ChecksumStreamInit } from "./ChecksumStream"; +import { ReadableStreamType } from "./createChecksumStream.browser"; +/** + * @internal + * + * Creates a stream mirroring the input stream's interface, but + * performs checksumming when reading to the end of the stream. + */ +export declare function createChecksumStream(init: ChecksumStreamInit): ReadableStreamType; +export declare function createChecksumStream(init: ChecksumStreamInit): Readable; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts new file mode 100644 index 0000000..f62c741 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts @@ -0,0 +1,13 @@ +/// +import { Logger } from "@smithy/types"; +import { Readable } from "node:stream"; +/** + * @internal + * @param upstream - any Readable or ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param onBuffer - for emitting warnings when buffering occurs. + * @returns another stream of the same data and stream class, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadable(upstream: Readable, size: number, logger?: Logger): Readable; +export declare function createBufferedReadable(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts new file mode 100644 index 0000000..7b4effd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts @@ -0,0 +1,54 @@ +import { Logger } from "@smithy/types"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +export type BufferStore = [ + string, + ByteArrayCollector, + ByteArrayCollector? +]; +export type BufferUnion = string | Uint8Array; +export type Modes = 0 | 1 | 2; +/** + * @internal + * @param upstream - any ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param logger - for emitting warnings when buffering occurs. + * @returns another stream of the same data, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadableStream(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; +/** + * Replaces R/RS polymorphic implementation in environments with only ReadableStream. + * @internal + */ +export declare const createBufferedReadable: typeof createBufferedReadableStream; +/** + * @internal + * @param buffers + * @param mode + * @param chunk + * @returns the new buffer size after merging the chunk with its appropriate buffer. + */ +export declare function merge(buffers: BufferStore, mode: Modes, chunk: string | Uint8Array): number; +/** + * @internal + * @param buffers + * @param mode + * @returns the buffer matching the mode. + */ +export declare function flush(buffers: BufferStore, mode: Modes | -1): BufferUnion; +/** + * @internal + * @param chunk + * @returns size of the chunk in bytes or characters. + */ +export declare function sizeOf(chunk?: { + byteLength?: number; + length?: number; +}): number; +/** + * @internal + * @param chunk - from upstream Readable. + * @param allowBuffer - allow mode 2 (Buffer), otherwise Buffer will return mode 1. + * @returns type index of the chunk. + */ +export declare function modeOf(chunk: BufferUnion, allowBuffer?: boolean): Modes | -1; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts new file mode 100644 index 0000000..5979078 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts @@ -0,0 +1,5 @@ +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts new file mode 100644 index 0000000..a100381 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts @@ -0,0 +1,7 @@ +/// +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts new file mode 100644 index 0000000..d8654c3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * @param stream + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare function headStream(stream: ReadableStream, bytes: number): Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts new file mode 100644 index 0000000..7037715 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts @@ -0,0 +1,9 @@ +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be read. + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare const headStream: (stream: Readable | ReadableStream, bytes: number) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c7c4c3f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts new file mode 100644 index 0000000..99dea40 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts @@ -0,0 +1,7 @@ +import { SdkStream } from "@smithy/types"; +/** + * The stream handling utility functions for browsers and React Native + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts new file mode 100644 index 0000000..c05518a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts @@ -0,0 +1,8 @@ +import { SdkStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * The function that mixes in the utility functions to help consuming runtime-specific payload stream. + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream | SdkStream; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts new file mode 100644 index 0000000..25c8549 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @param stream + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: ReadableStream | Blob): Promise<[ + ReadableStream, + ReadableStream +]>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts new file mode 100644 index 0000000..61a7620 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts @@ -0,0 +1,15 @@ +/// +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be split. + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: Readable): Promise<[ + Readable, + Readable +]>; +export declare function splitStream(stream: ReadableStream): Promise<[ + ReadableStream, + ReadableStream +]>; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts new file mode 100644 index 0000000..11be8f1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +type ReadableStreamType = ReadableStream; +/** + * @internal + */ +export declare const isReadableStream: (stream: unknown) => stream is ReadableStreamType; +/** + * @internal + */ +export declare const isBlob: (blob: unknown) => blob is Blob; +export {}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/package.json new file mode 100644 index 0000000..769bfc1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-stream/package.json @@ -0,0 +1,98 @@ +{ + "name": "@smithy/util-stream", + "version": "4.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-stream", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run && yarn test:browser", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/checksum/ChecksumStream": "./dist-es/checksum/ChecksumStream.browser", + "./dist-es/checksum/createChecksumStream": "./dist-es/checksum/createChecksumStream.browser", + "./dist-es/createBufferedReadable": "./dist-es/createBufferedReadableStream", + "./dist-es/getAwsChunkedEncodingStream": "./dist-es/getAwsChunkedEncodingStream.browser", + "./dist-es/headStream": "./dist-es/headStream.browser", + "./dist-es/sdk-stream-mixin": "./dist-es/sdk-stream-mixin.browser", + "./dist-es/splitStream": "./dist-es/splitStream.browser" + }, + "react-native": { + "./dist-es/checksum/createChecksumStream": "./dist-es/checksum/createChecksumStream.browser", + "./dist-es/checksum/ChecksumStream": "./dist-es/checksum/ChecksumStream.browser", + "./dist-es/getAwsChunkedEncodingStream": "./dist-es/getAwsChunkedEncodingStream.browser", + "./dist-es/sdk-stream-mixin": "./dist-es/sdk-stream-mixin.browser", + "./dist-es/headStream": "./dist-es/headStream.browser", + "./dist-es/splitStream": "./dist-es/splitStream.browser", + "./dist-es/createBufferedReadable": "./dist-es/createBufferedReadableStream", + "./dist-cjs/checksum/createChecksumStream": "./dist-cjs/checksum/createChecksumStream.browser", + "./dist-cjs/checksum/ChecksumStream": "./dist-cjs/checksum/ChecksumStream.browser", + "./dist-cjs/getAwsChunkedEncodingStream": "./dist-cjs/getAwsChunkedEncodingStream.browser", + "./dist-cjs/sdk-stream-mixin": "./dist-cjs/sdk-stream-mixin.browser", + "./dist-cjs/headStream": "./dist-cjs/headStream.browser", + "./dist-cjs/splitStream": "./dist-cjs/splitStream.browser", + "./dist-cjs/createBufferedReadable": "./dist-cjs/createBufferedReadableStream" + }, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-stream", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-stream" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/README.md new file mode 100644 index 0000000..22e939a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/README.md @@ -0,0 +1,10 @@ +# @smithy/util-uri-escape + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-uri-escape/latest.svg)](https://www.npmjs.com/package/@smithy/util-uri-escape) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-uri-escape.svg)](https://www.npmjs.com/package/@smithy/util-uri-escape) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/index.js new file mode 100644 index 0000000..51001ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-cjs/index.js @@ -0,0 +1,43 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + escapeUri: () => escapeUri, + escapeUriPath: () => escapeUriPath +}); +module.exports = __toCommonJS(src_exports); + +// src/escape-uri.ts +var escapeUri = /* @__PURE__ */ __name((uri) => ( + // AWS percent-encodes some extra non-standard characters in a URI + encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode) +), "escapeUri"); +var hexEncode = /* @__PURE__ */ __name((c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`, "hexEncode"); + +// src/escape-uri-path.ts +var escapeUriPath = /* @__PURE__ */ __name((uri) => uri.split("/").map(escapeUri).join("/"), "escapeUriPath"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + escapeUri, + escapeUriPath +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js new file mode 100644 index 0000000..81b3fe3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js @@ -0,0 +1,2 @@ +import { escapeUri } from "./escape-uri"; +export const escapeUriPath = (uri) => uri.split("/").map(escapeUri).join("/"); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js new file mode 100644 index 0000000..8990be1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js @@ -0,0 +1,2 @@ +export const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode); +const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/index.js new file mode 100644 index 0000000..ed402e1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./escape-uri"; +export * from "./escape-uri-path"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts new file mode 100644 index 0000000..b547ff9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUriPath: (uri: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts new file mode 100644 index 0000000..3f14d2c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUri: (uri: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts new file mode 100644 index 0000000..1913825 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./escape-uri"; +/** + * @internal + */ +export * from "./escape-uri-path"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts new file mode 100644 index 0000000..a7e19ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUriPath: (uri: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts new file mode 100644 index 0000000..13cc372 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUri: (uri: string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ad719fe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./escape-uri"; +/** + * @internal + */ +export * from "./escape-uri-path"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/package.json new file mode 100644 index 0000000..4ca6fd9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-uri-escape/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/util-uri-escape", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-uri-escape", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-uri-escape", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-uri-escape" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..e33060d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "4.0.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/LICENSE b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/README.md b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/README.md new file mode 100644 index 0000000..17169a8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/README.md @@ -0,0 +1,10 @@ +# @smithy/util-waiter + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-waiter/latest.svg)](https://www.npmjs.com/package/@smithy/util-waiter) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-waiter.svg)](https://www.npmjs.com/package/@smithy/util-waiter) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/index.js new file mode 100644 index 0000000..c038e3b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/index.js @@ -0,0 +1,185 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + WaiterState: () => WaiterState, + checkExceptions: () => checkExceptions, + createWaiter: () => createWaiter, + waiterServiceDefaults: () => waiterServiceDefaults +}); +module.exports = __toCommonJS(src_exports); + +// src/utils/sleep.ts +var sleep = /* @__PURE__ */ __name((seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); +}, "sleep"); + +// src/waiter.ts +var waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120 +}; +var WaiterState = /* @__PURE__ */ ((WaiterState2) => { + WaiterState2["ABORTED"] = "ABORTED"; + WaiterState2["FAILURE"] = "FAILURE"; + WaiterState2["SUCCESS"] = "SUCCESS"; + WaiterState2["RETRY"] = "RETRY"; + WaiterState2["TIMEOUT"] = "TIMEOUT"; + return WaiterState2; +})(WaiterState || {}); +var checkExceptions = /* @__PURE__ */ __name((result) => { + if (result.state === "ABORTED" /* ABORTED */) { + const abortError = new Error( + `${JSON.stringify({ + ...result, + reason: "Request was aborted" + })}` + ); + abortError.name = "AbortError"; + throw abortError; + } else if (result.state === "TIMEOUT" /* TIMEOUT */) { + const timeoutError = new Error( + `${JSON.stringify({ + ...result, + reason: "Waiter has timed out" + })}` + ); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } else if (result.state !== "SUCCESS" /* SUCCESS */) { + throw new Error(`${JSON.stringify(result)}`); + } + return result; +}, "checkExceptions"); + +// src/poller.ts +var exponentialBackoffWithJitter = /* @__PURE__ */ __name((minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}, "exponentialBackoffWithJitter"); +var randomInRange = /* @__PURE__ */ __name((min, max) => min + Math.random() * (max - min), "randomInRange"); +var runPolling = /* @__PURE__ */ __name(async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + const observedResponses = {}; + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== "RETRY" /* RETRY */) { + return { state, reason, observedResponses }; + } + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1e3; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (abortController?.signal?.aborted || abortSignal?.aborted) { + const message = "AbortController signal aborted."; + observedResponses[message] |= 0; + observedResponses[message] += 1; + return { state: "ABORTED" /* ABORTED */, observedResponses }; + } + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1e3 > waitUntil) { + return { state: "TIMEOUT" /* TIMEOUT */, observedResponses }; + } + await sleep(delay); + const { state: state2, reason: reason2 } = await acceptorChecks(client, input); + if (reason2) { + const message = createMessageFromResponse(reason2); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state2 !== "RETRY" /* RETRY */) { + return { state: state2, reason: reason2, observedResponses }; + } + currentAttempt += 1; + } +}, "runPolling"); +var createMessageFromResponse = /* @__PURE__ */ __name((reason) => { + if (reason?.$responseBodyText) { + return `Deserialization error for body: ${reason.$responseBodyText}`; + } + if (reason?.$metadata?.httpStatusCode) { + if (reason.$response || reason.message) { + return `${reason.$response.statusCode ?? reason.$metadata.httpStatusCode ?? "Unknown"}: ${reason.message}`; + } + return `${reason.$metadata.httpStatusCode}: OK`; + } + return String(reason?.message ?? JSON.stringify(reason) ?? "Unknown"); +}, "createMessageFromResponse"); + +// src/utils/validate.ts +var validateWaiterOptions = /* @__PURE__ */ __name((options) => { + if (options.maxWaitTime <= 0) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } else if (options.minDelay <= 0) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } else if (options.maxDelay <= 0) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } else if (options.maxWaitTime <= options.minDelay) { + throw new Error( + `WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter` + ); + } else if (options.maxDelay < options.minDelay) { + throw new Error( + `WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter` + ); + } +}, "validateWaiterOptions"); + +// src/createWaiter.ts +var abortTimeout = /* @__PURE__ */ __name(async (abortSignal) => { + return new Promise((resolve) => { + const onAbort = /* @__PURE__ */ __name(() => resolve({ state: "ABORTED" /* ABORTED */ }), "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + abortSignal.addEventListener("abort", onAbort); + } else { + abortSignal.onabort = onAbort; + } + }); +}, "abortTimeout"); +var createWaiter = /* @__PURE__ */ __name(async (options, input, acceptorChecks) => { + const params = { + ...waiterServiceDefaults, + ...options + }; + validateWaiterOptions(params); + const exitConditions = [runPolling(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); + } + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}, "createWaiter"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + createWaiter, + waiterServiceDefaults, + WaiterState, + checkExceptions +}); + diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/poller.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/poller.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/poller.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/waiter.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/waiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-cjs/waiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/createWaiter.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/createWaiter.js new file mode 100644 index 0000000..59bfdb9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/createWaiter.js @@ -0,0 +1,29 @@ +import { runPolling } from "./poller"; +import { validateWaiterOptions } from "./utils"; +import { waiterServiceDefaults, WaiterState } from "./waiter"; +const abortTimeout = async (abortSignal) => { + return new Promise((resolve) => { + const onAbort = () => resolve({ state: WaiterState.ABORTED }); + if (typeof abortSignal.addEventListener === "function") { + abortSignal.addEventListener("abort", onAbort); + } + else { + abortSignal.onabort = onAbort; + } + }); +}; +export const createWaiter = async (options, input, acceptorChecks) => { + const params = { + ...waiterServiceDefaults, + ...options, + }; + validateWaiterOptions(params); + const exitConditions = [runPolling(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); + } + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/index.js new file mode 100644 index 0000000..d77f139 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/poller.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/poller.js new file mode 100644 index 0000000..d1a0ec0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/poller.js @@ -0,0 +1,59 @@ +import { sleep } from "./utils/sleep"; +import { WaiterState } from "./waiter"; +const exponentialBackoffWithJitter = (minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}; +const randomInRange = (min, max) => min + Math.random() * (max - min); +export const runPolling = async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + const observedResponses = {}; + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== WaiterState.RETRY) { + return { state, reason, observedResponses }; + } + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1000; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (abortController?.signal?.aborted || abortSignal?.aborted) { + const message = "AbortController signal aborted."; + observedResponses[message] |= 0; + observedResponses[message] += 1; + return { state: WaiterState.ABORTED, observedResponses }; + } + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1000 > waitUntil) { + return { state: WaiterState.TIMEOUT, observedResponses }; + } + await sleep(delay); + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== WaiterState.RETRY) { + return { state, reason, observedResponses }; + } + currentAttempt += 1; + } +}; +const createMessageFromResponse = (reason) => { + if (reason?.$responseBodyText) { + return `Deserialization error for body: ${reason.$responseBodyText}`; + } + if (reason?.$metadata?.httpStatusCode) { + if (reason.$response || reason.message) { + return `${reason.$response.statusCode ?? reason.$metadata.httpStatusCode ?? "Unknown"}: ${reason.message}`; + } + return `${reason.$metadata.httpStatusCode}: OK`; + } + return String(reason?.message ?? JSON.stringify(reason) ?? "Unknown"); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/index.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/index.js new file mode 100644 index 0000000..e15a156 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/index.js @@ -0,0 +1,2 @@ +export * from "./sleep"; +export * from "./validate"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js new file mode 100644 index 0000000..789205d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js @@ -0,0 +1,3 @@ +export const sleep = (seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1000)); +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/validate.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/validate.js new file mode 100644 index 0000000..e094ea7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/utils/validate.js @@ -0,0 +1,17 @@ +export const validateWaiterOptions = (options) => { + if (options.maxWaitTime <= 0) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } + else if (options.minDelay <= 0) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } + else if (options.maxDelay <= 0) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } + else if (options.maxWaitTime <= options.minDelay) { + throw new Error(`WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } + else if (options.maxDelay < options.minDelay) { + throw new Error(`WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/waiter.js b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/waiter.js new file mode 100644 index 0000000..158c46a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-es/waiter.js @@ -0,0 +1,34 @@ +export const waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120, +}; +export var WaiterState; +(function (WaiterState) { + WaiterState["ABORTED"] = "ABORTED"; + WaiterState["FAILURE"] = "FAILURE"; + WaiterState["SUCCESS"] = "SUCCESS"; + WaiterState["RETRY"] = "RETRY"; + WaiterState["TIMEOUT"] = "TIMEOUT"; +})(WaiterState || (WaiterState = {})); +export const checkExceptions = (result) => { + if (result.state === WaiterState.ABORTED) { + const abortError = new Error(`${JSON.stringify({ + ...result, + reason: "Request was aborted", + })}`); + abortError.name = "AbortError"; + throw abortError; + } + else if (result.state === WaiterState.TIMEOUT) { + const timeoutError = new Error(`${JSON.stringify({ + ...result, + reason: "Waiter has timed out", + })}`); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } + else if (result.state !== WaiterState.SUCCESS) { + throw new Error(`${JSON.stringify(result)}`); + } + return result; +}; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts new file mode 100644 index 0000000..1695802 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts @@ -0,0 +1,11 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Create a waiter promise that only resolves when: + * 1. Abort controller is signaled + * 2. Max wait time is reached + * 3. `acceptorChecks` succeeds, or fails + * Otherwise, it invokes `acceptorChecks` with exponential-backoff delay. + * + * @internal + */ +export declare const createWaiter: (options: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/index.d.ts new file mode 100644 index 0000000..d77f139 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/poller.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/poller.d.ts new file mode 100644 index 0000000..4008957 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/poller.d.ts @@ -0,0 +1,10 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Function that runs polling as part of waiters. This will make one inital attempt and then + * subsequent attempts with an increasing delay. + * @param params - options passed to the waiter. + * @param client - AWS SDK Client + * @param input - client input + * @param stateChecker - function that checks the acceptor states on each poll. + */ +export declare const runPolling: ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts new file mode 100644 index 0000000..f9b3242 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts @@ -0,0 +1,11 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Create a waiter promise that only resolves when: + * 1. Abort controller is signaled + * 2. Max wait time is reached + * 3. `acceptorChecks` succeeds, or fails + * Otherwise, it invokes `acceptorChecks` with exponential-backoff delay. + * + * @internal + */ +export declare const createWaiter: (options: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..be143d5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts new file mode 100644 index 0000000..8b33c94 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts @@ -0,0 +1,10 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Function that runs polling as part of waiters. This will make one inital attempt and then + * subsequent attempts with an increasing delay. + * @param params - options passed to the waiter. + * @param client - AWS SDK Client + * @param input - client input + * @param stateChecker - function that checks the acceptor states on each poll. + */ +export declare const runPolling: ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts new file mode 100644 index 0000000..974384c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./sleep"; +/** + * @internal + */ +export * from "./validate"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts new file mode 100644 index 0000000..f53553b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const sleep: (seconds: number) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts new file mode 100644 index 0000000..73d79b0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts @@ -0,0 +1,8 @@ +import { WaiterOptions } from "../waiter"; +/** + * @internal + * + * Validates that waiter options are passed correctly + * @param options - a waiter configuration object + */ +export declare const validateWaiterOptions: (options: WaiterOptions) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..f685ce4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1,49 @@ +import { WaiterConfiguration as WaiterConfiguration__ } from "@smithy/types"; +/** + * @internal + */ +export interface WaiterConfiguration extends WaiterConfiguration__ { +} +/** + * @internal + */ +export declare const waiterServiceDefaults: { + minDelay: number; + maxDelay: number; +}; +/** + * @internal + */ +export type WaiterOptions = WaiterConfiguration & Required, "minDelay" | "maxDelay">>; +/** + * @internal + */ +export declare enum WaiterState { + ABORTED = "ABORTED", + FAILURE = "FAILURE", + SUCCESS = "SUCCESS", + RETRY = "RETRY", + TIMEOUT = "TIMEOUT" +} +/** + * @internal + */ +export type WaiterResult = { + state: WaiterState; + /** + * (optional) Indicates a reason for why a waiter has reached its state. + */ + reason?: any; + /** + * Responses observed by the waiter during its polling, where the value + * is the count. + */ + observedResponses?: Record; +}; +/** + * @internal + * + * Handles and throws exceptions resulting from the waiterResult + * @param result - WaiterResult + */ +export declare const checkExceptions: (result: WaiterResult) => WaiterResult; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts new file mode 100644 index 0000000..b9a3205 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./sleep"; +/** + * @internal + */ +export * from "./validate"; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts new file mode 100644 index 0000000..e5d9f73 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const sleep: (seconds: number) => Promise; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts new file mode 100644 index 0000000..a847eee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts @@ -0,0 +1,8 @@ +import { WaiterOptions } from "../waiter"; +/** + * @internal + * + * Validates that waiter options are passed correctly + * @param options - a waiter configuration object + */ +export declare const validateWaiterOptions: (options: WaiterOptions) => void; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts new file mode 100644 index 0000000..e0c690f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts @@ -0,0 +1,49 @@ +import { WaiterConfiguration as WaiterConfiguration__ } from "@smithy/types"; +/** + * @internal + */ +export interface WaiterConfiguration extends WaiterConfiguration__ { +} +/** + * @internal + */ +export declare const waiterServiceDefaults: { + minDelay: number; + maxDelay: number; +}; +/** + * @internal + */ +export type WaiterOptions = WaiterConfiguration & Required, "minDelay" | "maxDelay">>; +/** + * @internal + */ +export declare enum WaiterState { + ABORTED = "ABORTED", + FAILURE = "FAILURE", + SUCCESS = "SUCCESS", + RETRY = "RETRY", + TIMEOUT = "TIMEOUT" +} +/** + * @internal + */ +export type WaiterResult = { + state: WaiterState; + /** + * (optional) Indicates a reason for why a waiter has reached its state. + */ + reason?: any; + /** + * Responses observed by the waiter during its polling, where the value + * is the count. + */ + observedResponses?: Record; +}; +/** + * @internal + * + * Handles and throws exceptions resulting from the waiterResult + * @param result - WaiterResult + */ +export declare const checkExceptions: (result: WaiterResult) => WaiterResult; diff --git a/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/package.json b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/package.json new file mode 100644 index 0000000..2706fd7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@smithy/util-waiter/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/util-waiter", + "version": "4.0.3", + "description": "Shared utilities for client waiters for the AWS SDK", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-waiter", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-waiter", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-waiter" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/@types/uuid/LICENSE b/amplify/functions/deleteDocument/node_modules/@types/uuid/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@types/uuid/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/amplify/functions/deleteDocument/node_modules/@types/uuid/README.md b/amplify/functions/deleteDocument/node_modules/@types/uuid/README.md new file mode 100644 index 0000000..4cd2a58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@types/uuid/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/uuid` + +# Summary +This package contains type definitions for uuid (https://github.com/uuidjs/uuid). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/uuid. + +### Additional Details + * Last updated: Thu, 25 Jan 2024 23:07:19 GMT + * Dependencies: none + +# Credits +These definitions were written by [Oliver Hoffmann](https://github.com/iamolivinius), [Felipe Ochoa](https://github.com/felipeochoa), [Chris Barth](https://github.com/cjbarth), [Linus Unnebäck](https://github.com/LinusU), and [Christoph Tavan](https://github.com/ctavan). diff --git a/amplify/functions/deleteDocument/node_modules/@types/uuid/index.d.mts b/amplify/functions/deleteDocument/node_modules/@types/uuid/index.d.mts new file mode 100644 index 0000000..47a6599 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@types/uuid/index.d.mts @@ -0,0 +1,12 @@ +import uuid from "./index.js"; +export import v1 = uuid.v1; +export import v3 = uuid.v3; +export import v4 = uuid.v4; +export import v5 = uuid.v5; +export import NIL = uuid.NIL; +export import version = uuid.version; +export import validate = uuid.validate; +export import stringify = uuid.stringify; +export import parse = uuid.parse; +export import V1Options = uuid.V1Options; +export import V4Options = uuid.V4Options; diff --git a/amplify/functions/deleteDocument/node_modules/@types/uuid/index.d.ts b/amplify/functions/deleteDocument/node_modules/@types/uuid/index.d.ts new file mode 100644 index 0000000..2f7d813 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@types/uuid/index.d.ts @@ -0,0 +1,86 @@ +// disable automatic export +export {}; + +// Uses ArrayLike to admit Uint8 and co. +type OutputBuffer = ArrayLike; +type InputBuffer = ArrayLike; + +interface RandomOptions { + /** `Array` of 16 random bytes (0-255) */ + random?: InputBuffer | undefined; +} +interface RngOptions { + /** Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) */ + rng?: (() => InputBuffer) | undefined; +} + +interface V1BaseOptions { + /** RFC "node" field as an `Array[6]` of byte values (per 4.1.6) */ + node?: InputBuffer | undefined; + /** RFC "clock sequence" as a `Number` between 0 - 0x3fff */ + clockseq?: number | undefined; + /** RFC "timestamp" field (`Number` of milliseconds, unix epoch) */ + msecs?: number | Date | undefined; + /** RFC "timestamp" field (`Number` of nanoseconds to add to msecs, should be 0-10,000) */ + nsecs?: number | undefined; +} +interface V1RandomOptions extends V1BaseOptions, RandomOptions {} +interface V1RngOptions extends V1BaseOptions, RngOptions {} + +export type V1Options = V1RandomOptions | V1RngOptions; +export type V4Options = RandomOptions | RngOptions; + +type v1String = (options?: V1Options) => string; +type v1Buffer = (options: V1Options | null | undefined, buffer: T, offset?: number) => T; +type v1 = v1Buffer & v1String; + +type v4String = (options?: V4Options) => string; +type v4Buffer = (options: V4Options | null | undefined, buffer: T, offset?: number) => T; +type v4 = v4Buffer & v4String; + +type v3String = (name: string | InputBuffer, namespace: string | InputBuffer) => string; +type v3Buffer = ( + name: string | InputBuffer, + namespace: string | InputBuffer, + buffer: T, + offset?: number, +) => T; +interface v3Static { + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L16 + DNS: string; + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L17 + URL: string; +} +type v3 = v3Buffer & v3String & v3Static; + +type v5String = (name: string | InputBuffer, namespace: string | InputBuffer) => string; +type v5Buffer = ( + name: string | InputBuffer, + namespace: string | InputBuffer, + buffer: T, + offset?: number, +) => T; +interface v5Static { + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L16 + DNS: string; + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L17 + URL: string; +} +type v5 = v5Buffer & v5String & v5Static; + +type NIL = string; + +type parse = (uuid: string) => Uint8Array; +type stringify = (buffer: InputBuffer, offset?: number) => string; +type validate = (uuid: string) => boolean; +type version = (uuid: string) => number; + +export const NIL: NIL; +export const parse: parse; +export const stringify: stringify; +export const v1: v1; +export const v3: v3; +export const v4: v4; +export const v5: v5; +export const validate: validate; +export const version: version; diff --git a/amplify/functions/deleteDocument/node_modules/@types/uuid/package.json b/amplify/functions/deleteDocument/node_modules/@types/uuid/package.json new file mode 100644 index 0000000..09959ce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/@types/uuid/package.json @@ -0,0 +1,54 @@ +{ + "name": "@types/uuid", + "version": "9.0.8", + "description": "TypeScript definitions for uuid", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/uuid", + "license": "MIT", + "contributors": [ + { + "name": "Oliver Hoffmann", + "githubUsername": "iamolivinius", + "url": "https://github.com/iamolivinius" + }, + { + "name": "Felipe Ochoa", + "githubUsername": "felipeochoa", + "url": "https://github.com/felipeochoa" + }, + { + "name": "Chris Barth", + "githubUsername": "cjbarth", + "url": "https://github.com/cjbarth" + }, + { + "name": "Linus Unnebäck", + "githubUsername": "LinusU", + "url": "https://github.com/LinusU" + }, + { + "name": "Christoph Tavan", + "githubUsername": "ctavan", + "url": "https://github.com/ctavan" + } + ], + "main": "", + "types": "index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "types": { + "import": "./index.d.mts", + "default": "./index.d.ts" + } + } + }, + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/uuid" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "ee6ba7ad17fbbead7a508faf213a9ad0f49c12929e8c6b0f05fb35129bc72d61", + "typeScriptVersion": "4.6" +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/bowser/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/bowser/CHANGELOG.md new file mode 100644 index 0000000..260a03d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/CHANGELOG.md @@ -0,0 +1,218 @@ +# Bowser Changelog + +### 2.11.0 (Sep 12, 2020) +- [ADD] Added support for aliases in `Parser#is` method (#437) +- [ADD] Added more typings (#438, #427) +- [ADD] Added support for MIUI Browserr (#436) + +### 2.10.0 (Jul 9, 2020) +- [FIX] Fix for Firefox detection on iOS 13 [#415] +- [FIX] Fixes for typings.d.ts [#409] +- [FIX] Updated development dependencies + +### 2.9.0 (Jan 28, 2020) +- [ADD] Export more methods and constants via .d.ts [#388], [#390] + +### 2.8.1 (Dec 26, 2019) +- [FIX] Reverted [#382] as it broke build + +### 2.8.0 (Dec 26, 2019) +- [ADD] Add polyfills for Array.find & Object.assign [#383] +- [ADD] Export constants with types.d.ts [#382] +- [FIX] Add support for WeChat on Windows [#381] +- [FIX] Fix detection of Firefox on iPad [#379] +- [FIX] Add detection of Electron [#375] +- [FIX] Updated dev-dependencies + +### 2.7.0 (Oct 2, 2019) +- [FIX] Add support for QQ Browser [#362] +- [FIX] Add support for GSA [#364] +- [FIX] Updated dependencies + +### 2.6.0 (Sep 6, 2019) +- [ADD] Define "module" export in package.json [#354] +- [FIX] Fix Tablet PC detection [#334] + +### 2.5.4 (Sep 2, 2019) +- [FIX] Exclude docs from the npm package [#349] + +### 2.5.3 (Aug 4, 2019) +- [FIX] Add MacOS names support [#338] +- [FIX] Point typings.d.ts from package.json [#341] +- [FIX] Upgrade dependencies + +### 2.5.2 (July 17, 2019) +- [FIX] Fixes the bug undefined method because of failed build (#335) + +### 2.5.1 (July 17, 2019) +- [FIX] Fixes the bug with a custom Error class (#335) +- [FIX] Fixes the settings for Babel to reduce the bundle size (#259) + +### 2.5.0 (July 16, 2019) +- [ADD] Add constant output so that users can quickly get all types (#325) +- [FIX] Add support for Roku OS (#332) +- [FIX] Update devDependencies +- [FIX] Fix docs, README and added funding information + +### 2.4.0 (May 3, 2019) +- [FIX] Update regexp for generic browsers (#310) +- [FIX] Fix issues with module.exports (#318) +- [FIX] Update devDependencies (#316, #321, #322) +- [FIX] Fix docs (#320) + +### 2.3.0 (April 14, 2019) +- [ADD] Add support for Blink-based MS Edge (#311) +- [ADD] Add more types for TS (#289) +- [FIX] Update dev-dependencies +- [FIX] Update docs + +### 2.2.1 (April 12, 2019) +- [ADD] Add an alias for Samsung Internet +- [FIX] Fix browser name detection for browsers without an alias (#313) + +### 2.2.0 (April 7, 2019) +- [ADD] Add short aliases for browser names (#295) +- [FIX] Fix Yandex Browser version detection (#308) + +### 2.1.2 (March 6, 2019) +- [FIX] Fix buggy `getFirstMatch` reference + +### 2.1.1 (March 6, 2019) +- [ADD] Add detection of PlayStation 4 (#291) +- [ADD] Deploy docs on GH Pages (#293) +- [FIX] Fix files extensions for importing (#294) +- [FIX] Fix docs (#295) + +### 2.1.0 (January 24, 2019) +- [ADD] Add new `Parser.getEngineName()` method (#288) +- [ADD] Add detection of ChromeOS (#287) +- [FIX] Fix README + +### 2.0.0 (January 19, 2019) +- [ADD] Support a non strict equality in `Parser.satisfies()` (#275) +- [ADD] Add Android versions names (#276) +- [ADD] Add a typings file (#277) +- [ADD] Added support for Googlebot recognition (#278) +- [FIX] Update building tools, avoid security issues + +### 2.0.0-beta.3 (September 15, 2018) +- [FIX] Fix Chrome Mobile detection (#253) +- [FIX] Use built bowser for CI (#252) +- [FIX] Update babel-plugin-add-module-exports (#251) + +### 2.0.0-beta.2 (September 9, 2018) +- [FIX] Fix failing comparing version through `Parser.satisfies` (#243) +- [FIX] Fix travis testing, include eslint into CI testing +- [FIX] Add support for Maxthon desktop browser (#246) +- [FIX] Add support for Swing browser (#248) +- [DOCS] Regenerate docs + +### 2.0.0-beta.1 (August 18, 2018) +- [ADD] Add loose version comparison to `Parser.compareVersion()` and `Parser.satisfies()` +- [CHORE] Add CONTRIBUTING.md +- [DOCS] Regenerate docs + +### 2.0.0-alpha.4 (August 2, 2018) +- [DOCS] Fix usage docs (#238) +- [CHANGE] Make `./es5.js` the main file of the package (#239) + +### 2.0.0-alpha.3 (July 22, 2018) +- [CHANGE] Rename split and rename `compiled.js` to `es5.js` and `bundled.js` (#231, #236, #237) +- [ADD] Add `Parser.some` (#235) + +### 2.0.0-alpha.2 (July 17, 2018) +- [CHANGE] Make `src/bowser` main file instead of the bundled one +- [CHANGE] Move the bundled file to the root of the package to make it possible to `require('bowser/compiled')` (#231) +- [REMOVE] Remove `typings.d.ts` before stable release (#232) +- [FIX] Improve Nexus devices detection (#233) + +### 2.0.0-alpha.1 (July 9, 2018) +- [ADD] `Bowser.getParser()` +- [ADD] `Bowser.parse` +- [ADD] `Parser` class which describes parsing process +- [CHANGE] Change bowser's returning object +- [REMOVE] Remove bower support + +### 1.9.4 (June 28, 2018) +- [FIX] Fix NAVER Whale browser detection (#220) +- [FIX] Fix MZ Browser browser detection (#219) +- [FIX] Fix Firefox Focus browser detection (#191) +- [FIX] Fix webOS browser detection (#186) + +### 1.9.3 (March 12, 2018) +- [FIX] Fix `typings.d.ts` — add `ipad`, `iphone`, `ipod` flags to the interface + +### 1.9.2 (February 5, 2018) +- [FIX] Fix `typings.d.ts` — add `osname` flag to the interface + +### 1.9.1 (December 22, 2017) +- [FIX] Fix `typings.d.ts` — add `chromium` flag to the interface + +### 1.9.0 (December 20, 2017) +- [ADD] Add a public method `.detect()` (#205) +- [DOCS] Fix description of `chromium` flag in docs (#206) + +### 1.8.1 (October 7, 2017) +- [FIX] Fix detection of MS Edge on Android and iOS (#201) + +### 1.8.0 (October 7, 2017) +- [ADD] Add `osname` into result object (#200) + +### 1.7.3 (August 30, 2017) +- [FIX] Fix detection of Chrome on Android 8 OPR6 (#193) + +### 1.7.2 (August 17, 2017) +- [FIX] Fix typings.d.ts according to #185 + +### 1.7.1 (July 13, 2017) +- [ADD] Fix detecting of Tablet PC as tablet (#183) + +### 1.7.0 (May 18, 2017) +- [ADD] Add OS version support for Windows and macOS (#178) + +### 1.6.0 (December 5, 2016) +- [ADD] Add some tests for Windows devices (#89) +- [ADD] Add `root` to initialization process (#170) +- [FIX] Upgrade .travis.yml config + +### 1.5.0 (October 31, 2016) +- [ADD] Throw an error when `minVersion` map has not a string as a version and fix readme (#165) +- [FIX] Fix truly detection of Windows Phones (#167) + +### 1.4.6 (September 19, 2016) +- [FIX] Fix mobile Opera's version detection on Android +- [FIX] Fix typescript typings — add `mobile` and `tablet` flags +- [DOC] Fix description of `bowser.check` + +### 1.4.5 (August 30, 2016) + +- [FIX] Add support of Samsung Internet for Android +- [FIX] Fix case when `navigator.userAgent` is `undefined` +- [DOC] Add information about `strictMode` in `check` function +- [DOC] Consistent use of `bowser` variable in the README + +### 1.4.4 (August 10, 2016) + +- [FIX] Fix AMD `define` call — pass name to the function + +### 1.4.3 (July 27, 2016) + +- [FIX] Fix error `Object doesn't support this property or method` on IE8 + +### 1.4.2 (July 26, 2016) + +- [FIX] Fix missing `isUnsupportedBrowser` in typings description +- [DOC] Fix `check`'s declaration in README + +### 1.4.1 (July 7, 2016) + +- [FIX] Fix `strictMode` logic for `isUnsupportedBrowser` + +### 1.4.0 (June 28, 2016) + +- [FEATURE] Add `bowser.compareVersions` method +- [FEATURE] Add `bowser.isUnsupportedBrowser` method +- [FEATURE] Add `bowser.check` method +- [DOC] Changelog started +- [DOC] Add API section to README +- [FIX] Fix detection of browser type (A/C/X) for Chromium diff --git a/amplify/functions/deleteDocument/node_modules/bowser/LICENSE b/amplify/functions/deleteDocument/node_modules/bowser/LICENSE new file mode 100644 index 0000000..94085f0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/LICENSE @@ -0,0 +1,39 @@ +Copyright 2015, Dustin Diaz (the "Original Author") +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +Distributions of all or part of the Software intended to be used +by the recipients as they would use the unmodified Software, +containing modifications that substantially alter, remove, or +disable functionality of the Software, outside of the documented +configuration mechanisms provided by the Software, shall be +modified such that the Original Author's bug reporting email +addresses and urls are either replaced with the contact information +of the parties responsible for the changes, or removed entirely. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + + +Except where noted, this license applies to any and all software +programs and associated documentation files created by the +Original Author, when distributed with the Software. diff --git a/amplify/functions/deleteDocument/node_modules/bowser/README.md b/amplify/functions/deleteDocument/node_modules/bowser/README.md new file mode 100644 index 0000000..8f5f915 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/README.md @@ -0,0 +1,179 @@ +## Bowser +A small, fast and rich-API browser/platform/engine detector for both browser and node. +- **Small.** Use plain ES5-version which is ~4.8kB gzipped. +- **Optimized.** Use only those parsers you need — it doesn't do useless work. +- **Multi-platform.** It's browser- and node-ready, so you can use it in any environment. + +Don't hesitate to support the project on Github or [OpenCollective](https://opencollective.com/bowser) if you like it ❤️ Also, contributors are always welcome! + +[![Financial Contributors on Open Collective](https://opencollective.com/bowser/all/badge.svg?label=financial+contributors)](https://opencollective.com/bowser) [![Build Status](https://travis-ci.org/lancedikson/bowser.svg?branch=master)](https://travis-ci.org/lancedikson/bowser/) [![Greenkeeper badge](https://badges.greenkeeper.io/lancedikson/bowser.svg)](https://greenkeeper.io/) [![Coverage Status](https://coveralls.io/repos/github/lancedikson/bowser/badge.svg?branch=master)](https://coveralls.io/github/lancedikson/bowser?branch=master) ![Downloads](https://img.shields.io/npm/dm/bowser) + +# Contents +- [Overview](#overview) +- [Use cases](#use-cases) +- [Advanced usage](#advanced-usage) +- [How can I help?](#contributing) + +# Overview + +The library is made to help to detect what browser your user has and gives you a convenient API to filter the users somehow depending on their browsers. Check it out on this page: https://bowser-js.github.io/bowser-online/. + +### ⚠️ Version 2.0 breaking changes ⚠️ + +Version 2.0 has drastically changed the API. All available methods are on the [docs page](https://lancedikson.github.io/bowser/docs). + +_For legacy code, check out the [1.x](https://github.com/lancedikson/bowser/tree/v1.x) branch and install it through `npm install bowser@1.9.4`._ + +# Use cases + +First of all, require the library. This is a UMD Module, so it will work for AMD, TypeScript, ES6, and CommonJS module systems. + +```javascript +const Bowser = require("bowser"); // CommonJS + +import * as Bowser from "bowser"; // TypeScript + +import Bowser from "bowser"; // ES6 (and TypeScript with --esModuleInterop enabled) +``` + +By default, the exported version is the *ES5 transpiled version*, which **do not** include any polyfills. + +In case you don't use your own `babel-polyfill` you may need to have pre-built bundle with all needed polyfills. +So, for you it's suitable to require bowser like this: `require('bowser/bundled')`. +As the result, you get a ES5 version of bowser with `babel-polyfill` bundled together. + +You may need to use the source files, so they will be available in the package as well. + +## Browser props detection + +Often we need to pick users' browser properties such as the name, the version, the rendering engine and so on. Here is an example how to do it with Bowser: + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); + +console.log(`The current browser name is "${browser.getBrowserName()}"`); +// The current browser name is "Internet Explorer" +``` + +or + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); +console.log(browser.getBrowser()); + +// outputs +{ + name: "Internet Explorer" + version: "11.0" +} +``` + +or + +```javascript +console.log(Bowser.parse(window.navigator.userAgent)); + +// outputs +{ + browser: { + name: "Internet Explorer" + version: "11.0" + }, + os: { + name: "Windows" + version: "NT 6.3" + versionName: "8.1" + }, + platform: { + type: "desktop" + }, + engine: { + name: "Trident" + version: "7.0" + } +} +``` + + +## Filtering browsers + +You could want to filter some particular browsers to provide any special support for them or make any workarounds. +It could look like this: + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); +const isValidBrowser = browser.satisfies({ + // declare browsers per OS + windows: { + "internet explorer": ">10", + }, + macos: { + safari: ">10.1" + }, + + // per platform (mobile, desktop or tablet) + mobile: { + safari: '>=9', + 'android browser': '>3.10' + }, + + // or in general + chrome: "~20.1.1432", + firefox: ">31", + opera: ">=22", + + // also supports equality operator + chrome: "=20.1.1432", // will match particular build only + + // and loose-equality operator + chrome: "~20", // will match any 20.* sub-version + chrome: "~20.1" // will match any 20.1.* sub-version (20.1.19 as well as 20.1.12.42-alpha.1) +}); +``` + +Settings for any particular OS or platform has more priority and redefines settings of standalone browsers. +Thus, you can define OS or platform specific rules and they will have more priority in the end. + +More of API and possibilities you will find in the `docs` folder. + +### Browser names for `.satisfies()` + +By default you are supposed to use the full browser name for `.satisfies`. +But, there's a short way to define a browser using short aliases. The full +list of aliases can be found in [the file](src/constants.js). + +## Similar Projects +* [Kong](https://github.com/BigBadBleuCheese/Kong) - A C# port of Bowser. + +## Contributors + +### Code Contributors + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. + + +### Financial Contributors + +Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/bowser/contribute)] + +#### Individuals + + + +#### Organizations + +Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/bowser/contribute)] + + + + + + + + + + + + +## License +Licensed as MIT. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details. diff --git a/amplify/functions/deleteDocument/node_modules/bowser/bundled.js b/amplify/functions/deleteDocument/node_modules/bowser/bundled.js new file mode 100644 index 0000000..066ac40 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/bundled.js @@ -0,0 +1 @@ +!function(t,n){"object"==typeof exports&&"object"==typeof module?module.exports=n():"function"==typeof define&&define.amd?define([],n):"object"==typeof exports?exports.bowser=n():t.bowser=n()}(this,(function(){return function(t){var n={};function e(r){if(n[r])return n[r].exports;var i=n[r]={i:r,l:!1,exports:{}};return t[r].call(i.exports,i,i.exports,e),i.l=!0,i.exports}return e.m=t,e.c=n,e.d=function(t,n,r){e.o(t,n)||Object.defineProperty(t,n,{enumerable:!0,get:r})},e.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},e.t=function(t,n){if(1&n&&(t=e(t)),8&n)return t;if(4&n&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(e.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&n&&"string"!=typeof t)for(var i in t)e.d(r,i,function(n){return t[n]}.bind(null,i));return r},e.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(n,"a",n),n},e.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},e.p="",e(e.s=129)}([function(t,n,e){var r=e(1),i=e(7),o=e(14),u=e(11),a=e(19),c=function(t,n,e){var s,f,l,h,d=t&c.F,p=t&c.G,v=t&c.S,g=t&c.P,y=t&c.B,m=p?r:v?r[n]||(r[n]={}):(r[n]||{}).prototype,b=p?i:i[n]||(i[n]={}),S=b.prototype||(b.prototype={});for(s in p&&(e=n),e)l=((f=!d&&m&&void 0!==m[s])?m:e)[s],h=y&&f?a(l,r):g&&"function"==typeof l?a(Function.call,l):l,m&&u(m,s,l,t&c.U),b[s]!=l&&o(b,s,h),g&&S[s]!=l&&(S[s]=l)};r.core=i,c.F=1,c.G=2,c.S=4,c.P=8,c.B=16,c.W=32,c.U=64,c.R=128,t.exports=c},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,e){var r=e(4);t.exports=function(t){if(!r(t))throw TypeError(t+" is not an object!");return t}},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,e){var r=e(50)("wks"),i=e(31),o=e(1).Symbol,u="function"==typeof o;(t.exports=function(t){return r[t]||(r[t]=u&&o[t]||(u?o:i)("Symbol."+t))}).store=r},function(t,n,e){var r=e(21),i=Math.min;t.exports=function(t){return t>0?i(r(t),9007199254740991):0}},function(t,n){var e=t.exports={version:"2.6.9"};"number"==typeof __e&&(__e=e)},function(t,n,e){t.exports=!e(2)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(3),i=e(96),o=e(28),u=Object.defineProperty;n.f=e(8)?Object.defineProperty:function(t,n,e){if(r(t),n=o(n,!0),r(e),i)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n,e){var r=e(26);t.exports=function(t){return Object(r(t))}},function(t,n,e){var r=e(1),i=e(14),o=e(13),u=e(31)("src"),a=e(134),c=(""+a).split("toString");e(7).inspectSource=function(t){return a.call(t)},(t.exports=function(t,n,e,a){var s="function"==typeof e;s&&(o(e,"name")||i(e,"name",n)),t[n]!==e&&(s&&(o(e,u)||i(e,u,t[n]?""+t[n]:c.join(String(n)))),t===r?t[n]=e:a?t[n]?t[n]=e:i(t,n,e):(delete t[n],i(t,n,e)))})(Function.prototype,"toString",(function(){return"function"==typeof this&&this[u]||a.call(this)}))},function(t,n,e){var r=e(0),i=e(2),o=e(26),u=/"/g,a=function(t,n,e,r){var i=String(o(t)),a="<"+n;return""!==e&&(a+=" "+e+'="'+String(r).replace(u,""")+'"'),a+">"+i+""};t.exports=function(t,n){var e={};e[t]=n(a),r(r.P+r.F*i((function(){var n=""[t]('"');return n!==n.toLowerCase()||n.split('"').length>3})),"String",e)}},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}},function(t,n,e){var r=e(9),i=e(30);t.exports=e(8)?function(t,n,e){return r.f(t,n,i(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){var r=e(46),i=e(26);t.exports=function(t){return r(i(t))}},function(t,n,e){"use strict";var r=e(2);t.exports=function(t,n){return!!t&&r((function(){n?t.call(null,(function(){}),1):t.call(null)}))}},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r=e(18),i=function(){function t(){}return t.getFirstMatch=function(t,n){var e=n.match(t);return e&&e.length>0&&e[1]||""},t.getSecondMatch=function(t,n){var e=n.match(t);return e&&e.length>1&&e[2]||""},t.matchAndReturnConst=function(t,n,e){if(t.test(n))return e},t.getWindowsVersionName=function(t){switch(t){case"NT":return"NT";case"XP":return"XP";case"NT 5.0":return"2000";case"NT 5.1":return"XP";case"NT 5.2":return"2003";case"NT 6.0":return"Vista";case"NT 6.1":return"7";case"NT 6.2":return"8";case"NT 6.3":return"8.1";case"NT 10.0":return"10";default:return}},t.getMacOSVersionName=function(t){var n=t.split(".").splice(0,2).map((function(t){return parseInt(t,10)||0}));if(n.push(0),10===n[0])switch(n[1]){case 5:return"Leopard";case 6:return"Snow Leopard";case 7:return"Lion";case 8:return"Mountain Lion";case 9:return"Mavericks";case 10:return"Yosemite";case 11:return"El Capitan";case 12:return"Sierra";case 13:return"High Sierra";case 14:return"Mojave";case 15:return"Catalina";default:return}},t.getAndroidVersionName=function(t){var n=t.split(".").splice(0,2).map((function(t){return parseInt(t,10)||0}));if(n.push(0),!(1===n[0]&&n[1]<5))return 1===n[0]&&n[1]<6?"Cupcake":1===n[0]&&n[1]>=6?"Donut":2===n[0]&&n[1]<2?"Eclair":2===n[0]&&2===n[1]?"Froyo":2===n[0]&&n[1]>2?"Gingerbread":3===n[0]?"Honeycomb":4===n[0]&&n[1]<1?"Ice Cream Sandwich":4===n[0]&&n[1]<4?"Jelly Bean":4===n[0]&&n[1]>=4?"KitKat":5===n[0]?"Lollipop":6===n[0]?"Marshmallow":7===n[0]?"Nougat":8===n[0]?"Oreo":9===n[0]?"Pie":void 0},t.getVersionPrecision=function(t){return t.split(".").length},t.compareVersions=function(n,e,r){void 0===r&&(r=!1);var i=t.getVersionPrecision(n),o=t.getVersionPrecision(e),u=Math.max(i,o),a=0,c=t.map([n,e],(function(n){var e=u-t.getVersionPrecision(n),r=n+new Array(e+1).join(".0");return t.map(r.split("."),(function(t){return new Array(20-t.length).join("0")+t})).reverse()}));for(r&&(a=u-Math.min(i,o)),u-=1;u>=a;){if(c[0][u]>c[1][u])return 1;if(c[0][u]===c[1][u]){if(u===a)return 0;u-=1}else if(c[0][u]1?i-1:0),u=1;u0?r:e)(t)}},function(t,n,e){var r=e(47),i=e(30),o=e(15),u=e(28),a=e(13),c=e(96),s=Object.getOwnPropertyDescriptor;n.f=e(8)?s:function(t,n){if(t=o(t),n=u(n,!0),c)try{return s(t,n)}catch(t){}if(a(t,n))return i(!r.f.call(t,n),t[n])}},function(t,n,e){var r=e(0),i=e(7),o=e(2);t.exports=function(t,n){var e=(i.Object||{})[t]||Object[t],u={};u[t]=n(e),r(r.S+r.F*o((function(){e(1)})),"Object",u)}},function(t,n,e){var r=e(19),i=e(46),o=e(10),u=e(6),a=e(112);t.exports=function(t,n){var e=1==t,c=2==t,s=3==t,f=4==t,l=6==t,h=5==t||l,d=n||a;return function(n,a,p){for(var v,g,y=o(n),m=i(y),b=r(a,p,3),S=u(m.length),w=0,_=e?d(n,S):c?d(n,0):void 0;S>w;w++)if((h||w in m)&&(g=b(v=m[w],w,y),t))if(e)_[w]=g;else if(g)switch(t){case 3:return!0;case 5:return v;case 6:return w;case 2:_.push(v)}else if(f)return!1;return l?-1:s||f?f:_}}},function(t,n){var e={}.toString;t.exports=function(t){return e.call(t).slice(8,-1)}},function(t,n){t.exports=function(t){if(null==t)throw TypeError("Can't call method on "+t);return t}},function(t,n,e){"use strict";if(e(8)){var r=e(32),i=e(1),o=e(2),u=e(0),a=e(61),c=e(86),s=e(19),f=e(44),l=e(30),h=e(14),d=e(45),p=e(21),v=e(6),g=e(123),y=e(34),m=e(28),b=e(13),S=e(48),w=e(4),_=e(10),M=e(78),x=e(35),P=e(37),O=e(36).f,F=e(80),A=e(31),E=e(5),N=e(24),R=e(51),k=e(49),T=e(82),I=e(42),j=e(54),L=e(43),B=e(81),C=e(114),W=e(9),V=e(22),G=W.f,D=V.f,U=i.RangeError,z=i.TypeError,q=i.Uint8Array,K=Array.prototype,Y=c.ArrayBuffer,Q=c.DataView,H=N(0),J=N(2),X=N(3),Z=N(4),$=N(5),tt=N(6),nt=R(!0),et=R(!1),rt=T.values,it=T.keys,ot=T.entries,ut=K.lastIndexOf,at=K.reduce,ct=K.reduceRight,st=K.join,ft=K.sort,lt=K.slice,ht=K.toString,dt=K.toLocaleString,pt=E("iterator"),vt=E("toStringTag"),gt=A("typed_constructor"),yt=A("def_constructor"),mt=a.CONSTR,bt=a.TYPED,St=a.VIEW,wt=N(1,(function(t,n){return Ot(k(t,t[yt]),n)})),_t=o((function(){return 1===new q(new Uint16Array([1]).buffer)[0]})),Mt=!!q&&!!q.prototype.set&&o((function(){new q(1).set({})})),xt=function(t,n){var e=p(t);if(e<0||e%n)throw U("Wrong offset!");return e},Pt=function(t){if(w(t)&&bt in t)return t;throw z(t+" is not a typed array!")},Ot=function(t,n){if(!(w(t)&> in t))throw z("It is not a typed array constructor!");return new t(n)},Ft=function(t,n){return At(k(t,t[yt]),n)},At=function(t,n){for(var e=0,r=n.length,i=Ot(t,r);r>e;)i[e]=n[e++];return i},Et=function(t,n,e){G(t,n,{get:function(){return this._d[e]}})},Nt=function(t){var n,e,r,i,o,u,a=_(t),c=arguments.length,f=c>1?arguments[1]:void 0,l=void 0!==f,h=F(a);if(null!=h&&!M(h)){for(u=h.call(a),r=[],n=0;!(o=u.next()).done;n++)r.push(o.value);a=r}for(l&&c>2&&(f=s(f,arguments[2],2)),n=0,e=v(a.length),i=Ot(this,e);e>n;n++)i[n]=l?f(a[n],n):a[n];return i},Rt=function(){for(var t=0,n=arguments.length,e=Ot(this,n);n>t;)e[t]=arguments[t++];return e},kt=!!q&&o((function(){dt.call(new q(1))})),Tt=function(){return dt.apply(kt?lt.call(Pt(this)):Pt(this),arguments)},It={copyWithin:function(t,n){return C.call(Pt(this),t,n,arguments.length>2?arguments[2]:void 0)},every:function(t){return Z(Pt(this),t,arguments.length>1?arguments[1]:void 0)},fill:function(t){return B.apply(Pt(this),arguments)},filter:function(t){return Ft(this,J(Pt(this),t,arguments.length>1?arguments[1]:void 0))},find:function(t){return $(Pt(this),t,arguments.length>1?arguments[1]:void 0)},findIndex:function(t){return tt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},forEach:function(t){H(Pt(this),t,arguments.length>1?arguments[1]:void 0)},indexOf:function(t){return et(Pt(this),t,arguments.length>1?arguments[1]:void 0)},includes:function(t){return nt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},join:function(t){return st.apply(Pt(this),arguments)},lastIndexOf:function(t){return ut.apply(Pt(this),arguments)},map:function(t){return wt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},reduce:function(t){return at.apply(Pt(this),arguments)},reduceRight:function(t){return ct.apply(Pt(this),arguments)},reverse:function(){for(var t,n=Pt(this).length,e=Math.floor(n/2),r=0;r1?arguments[1]:void 0)},sort:function(t){return ft.call(Pt(this),t)},subarray:function(t,n){var e=Pt(this),r=e.length,i=y(t,r);return new(k(e,e[yt]))(e.buffer,e.byteOffset+i*e.BYTES_PER_ELEMENT,v((void 0===n?r:y(n,r))-i))}},jt=function(t,n){return Ft(this,lt.call(Pt(this),t,n))},Lt=function(t){Pt(this);var n=xt(arguments[1],1),e=this.length,r=_(t),i=v(r.length),o=0;if(i+n>e)throw U("Wrong length!");for(;o255?255:255&r),i.v[d](e*n+i.o,r,_t)}(this,e,t)},enumerable:!0})};b?(p=e((function(t,e,r,i){f(t,p,s,"_d");var o,u,a,c,l=0,d=0;if(w(e)){if(!(e instanceof Y||"ArrayBuffer"==(c=S(e))||"SharedArrayBuffer"==c))return bt in e?At(p,e):Nt.call(p,e);o=e,d=xt(r,n);var y=e.byteLength;if(void 0===i){if(y%n)throw U("Wrong length!");if((u=y-d)<0)throw U("Wrong length!")}else if((u=v(i)*n)+d>y)throw U("Wrong length!");a=u/n}else a=g(e),o=new Y(u=a*n);for(h(t,"_d",{b:o,o:d,l:u,e:a,v:new Q(o)});ldocument.F=Object<\/script>"),t.close(),c=t.F;r--;)delete c.prototype[o[r]];return c()};t.exports=Object.create||function(t,n){var e;return null!==t?(a.prototype=r(t),e=new a,a.prototype=null,e[u]=t):e=c(),void 0===n?e:i(e,n)}},function(t,n,e){var r=e(98),i=e(65).concat("length","prototype");n.f=Object.getOwnPropertyNames||function(t){return r(t,i)}},function(t,n,e){var r=e(13),i=e(10),o=e(64)("IE_PROTO"),u=Object.prototype;t.exports=Object.getPrototypeOf||function(t){return t=i(t),r(t,o)?t[o]:"function"==typeof t.constructor&&t instanceof t.constructor?t.constructor.prototype:t instanceof Object?u:null}},function(t,n,e){var r=e(5)("unscopables"),i=Array.prototype;null==i[r]&&e(14)(i,r,{}),t.exports=function(t){i[r][t]=!0}},function(t,n,e){var r=e(4);t.exports=function(t,n){if(!r(t)||t._t!==n)throw TypeError("Incompatible receiver, "+n+" required!");return t}},function(t,n,e){var r=e(9).f,i=e(13),o=e(5)("toStringTag");t.exports=function(t,n,e){t&&!i(t=e?t:t.prototype,o)&&r(t,o,{configurable:!0,value:n})}},function(t,n,e){var r=e(0),i=e(26),o=e(2),u=e(68),a="["+u+"]",c=RegExp("^"+a+a+"*"),s=RegExp(a+a+"*$"),f=function(t,n,e){var i={},a=o((function(){return!!u[t]()||"​…"!="​…"[t]()})),c=i[t]=a?n(l):u[t];e&&(i[e]=c),r(r.P+r.F*a,"String",i)},l=f.trim=function(t,n){return t=String(i(t)),1&n&&(t=t.replace(c,"")),2&n&&(t=t.replace(s,"")),t};t.exports=f},function(t,n){t.exports={}},function(t,n,e){"use strict";var r=e(1),i=e(9),o=e(8),u=e(5)("species");t.exports=function(t){var n=r[t];o&&n&&!n[u]&&i.f(n,u,{configurable:!0,get:function(){return this}})}},function(t,n){t.exports=function(t,n,e,r){if(!(t instanceof n)||void 0!==r&&r in t)throw TypeError(e+": incorrect invocation!");return t}},function(t,n,e){var r=e(11);t.exports=function(t,n,e){for(var i in n)r(t,i,n[i],e);return t}},function(t,n,e){var r=e(25);t.exports=Object("z").propertyIsEnumerable(0)?Object:function(t){return"String"==r(t)?t.split(""):Object(t)}},function(t,n){n.f={}.propertyIsEnumerable},function(t,n,e){var r=e(25),i=e(5)("toStringTag"),o="Arguments"==r(function(){return arguments}());t.exports=function(t){var n,e,u;return void 0===t?"Undefined":null===t?"Null":"string"==typeof(e=function(t,n){try{return t[n]}catch(t){}}(n=Object(t),i))?e:o?r(n):"Object"==(u=r(n))&&"function"==typeof n.callee?"Arguments":u}},function(t,n,e){var r=e(3),i=e(20),o=e(5)("species");t.exports=function(t,n){var e,u=r(t).constructor;return void 0===u||null==(e=r(u)[o])?n:i(e)}},function(t,n,e){var r=e(7),i=e(1),o=i["__core-js_shared__"]||(i["__core-js_shared__"]={});(t.exports=function(t,n){return o[t]||(o[t]=void 0!==n?n:{})})("versions",[]).push({version:r.version,mode:e(32)?"pure":"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})},function(t,n,e){var r=e(15),i=e(6),o=e(34);t.exports=function(t){return function(n,e,u){var a,c=r(n),s=i(c.length),f=o(u,s);if(t&&e!=e){for(;s>f;)if((a=c[f++])!=a)return!0}else for(;s>f;f++)if((t||f in c)&&c[f]===e)return t||f||0;return!t&&-1}}},function(t,n){n.f=Object.getOwnPropertySymbols},function(t,n,e){var r=e(25);t.exports=Array.isArray||function(t){return"Array"==r(t)}},function(t,n,e){var r=e(5)("iterator"),i=!1;try{var o=[7][r]();o.return=function(){i=!0},Array.from(o,(function(){throw 2}))}catch(t){}t.exports=function(t,n){if(!n&&!i)return!1;var e=!1;try{var o=[7],u=o[r]();u.next=function(){return{done:e=!0}},o[r]=function(){return u},t(o)}catch(t){}return e}},function(t,n,e){"use strict";var r=e(3);t.exports=function(){var t=r(this),n="";return t.global&&(n+="g"),t.ignoreCase&&(n+="i"),t.multiline&&(n+="m"),t.unicode&&(n+="u"),t.sticky&&(n+="y"),n}},function(t,n,e){"use strict";var r=e(48),i=RegExp.prototype.exec;t.exports=function(t,n){var e=t.exec;if("function"==typeof e){var o=e.call(t,n);if("object"!=typeof o)throw new TypeError("RegExp exec method returned something other than an Object or null");return o}if("RegExp"!==r(t))throw new TypeError("RegExp#exec called on incompatible receiver");return i.call(t,n)}},function(t,n,e){"use strict";e(116);var r=e(11),i=e(14),o=e(2),u=e(26),a=e(5),c=e(83),s=a("species"),f=!o((function(){var t=/./;return t.exec=function(){var t=[];return t.groups={a:"7"},t},"7"!=="".replace(t,"$")})),l=function(){var t=/(?:)/,n=t.exec;t.exec=function(){return n.apply(this,arguments)};var e="ab".split(t);return 2===e.length&&"a"===e[0]&&"b"===e[1]}();t.exports=function(t,n,e){var h=a(t),d=!o((function(){var n={};return n[h]=function(){return 7},7!=""[t](n)})),p=d?!o((function(){var n=!1,e=/a/;return e.exec=function(){return n=!0,null},"split"===t&&(e.constructor={},e.constructor[s]=function(){return e}),e[h](""),!n})):void 0;if(!d||!p||"replace"===t&&!f||"split"===t&&!l){var v=/./[h],g=e(u,h,""[t],(function(t,n,e,r,i){return n.exec===c?d&&!i?{done:!0,value:v.call(n,e,r)}:{done:!0,value:t.call(e,n,r)}:{done:!1}})),y=g[0],m=g[1];r(String.prototype,t,y),i(RegExp.prototype,h,2==n?function(t,n){return m.call(t,this,n)}:function(t){return m.call(t,this)})}}},function(t,n,e){var r=e(19),i=e(111),o=e(78),u=e(3),a=e(6),c=e(80),s={},f={};(n=t.exports=function(t,n,e,l,h){var d,p,v,g,y=h?function(){return t}:c(t),m=r(e,l,n?2:1),b=0;if("function"!=typeof y)throw TypeError(t+" is not iterable!");if(o(y)){for(d=a(t.length);d>b;b++)if((g=n?m(u(p=t[b])[0],p[1]):m(t[b]))===s||g===f)return g}else for(v=y.call(t);!(p=v.next()).done;)if((g=i(v,m,p.value,n))===s||g===f)return g}).BREAK=s,n.RETURN=f},function(t,n,e){var r=e(1).navigator;t.exports=r&&r.userAgent||""},function(t,n,e){"use strict";var r=e(1),i=e(0),o=e(11),u=e(45),a=e(29),c=e(58),s=e(44),f=e(4),l=e(2),h=e(54),d=e(40),p=e(69);t.exports=function(t,n,e,v,g,y){var m=r[t],b=m,S=g?"set":"add",w=b&&b.prototype,_={},M=function(t){var n=w[t];o(w,t,"delete"==t?function(t){return!(y&&!f(t))&&n.call(this,0===t?0:t)}:"has"==t?function(t){return!(y&&!f(t))&&n.call(this,0===t?0:t)}:"get"==t?function(t){return y&&!f(t)?void 0:n.call(this,0===t?0:t)}:"add"==t?function(t){return n.call(this,0===t?0:t),this}:function(t,e){return n.call(this,0===t?0:t,e),this})};if("function"==typeof b&&(y||w.forEach&&!l((function(){(new b).entries().next()})))){var x=new b,P=x[S](y?{}:-0,1)!=x,O=l((function(){x.has(1)})),F=h((function(t){new b(t)})),A=!y&&l((function(){for(var t=new b,n=5;n--;)t[S](n,n);return!t.has(-0)}));F||((b=n((function(n,e){s(n,b,t);var r=p(new m,n,b);return null!=e&&c(e,g,r[S],r),r}))).prototype=w,w.constructor=b),(O||A)&&(M("delete"),M("has"),g&&M("get")),(A||P)&&M(S),y&&w.clear&&delete w.clear}else b=v.getConstructor(n,t,g,S),u(b.prototype,e),a.NEED=!0;return d(b,t),_[t]=b,i(i.G+i.W+i.F*(b!=m),_),y||v.setStrong(b,t,g),b}},function(t,n,e){for(var r,i=e(1),o=e(14),u=e(31),a=u("typed_array"),c=u("view"),s=!(!i.ArrayBuffer||!i.DataView),f=s,l=0,h="Int8Array,Uint8Array,Uint8ClampedArray,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array".split(",");l<9;)(r=i[h[l++]])?(o(r.prototype,a,!0),o(r.prototype,c,!0)):f=!1;t.exports={ABV:s,CONSTR:f,TYPED:a,VIEW:c}},function(t,n,e){var r=e(4),i=e(1).document,o=r(i)&&r(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,e){n.f=e(5)},function(t,n,e){var r=e(50)("keys"),i=e(31);t.exports=function(t){return r[t]||(r[t]=i(t))}},function(t,n){t.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},function(t,n,e){var r=e(1).document;t.exports=r&&r.documentElement},function(t,n,e){var r=e(4),i=e(3),o=function(t,n){if(i(t),!r(n)&&null!==n)throw TypeError(n+": can't set as prototype!")};t.exports={set:Object.setPrototypeOf||("__proto__"in{}?function(t,n,r){try{(r=e(19)(Function.call,e(22).f(Object.prototype,"__proto__").set,2))(t,[]),n=!(t instanceof Array)}catch(t){n=!0}return function(t,e){return o(t,e),n?t.__proto__=e:r(t,e),t}}({},!1):void 0),check:o}},function(t,n){t.exports="\t\n\v\f\r   ᠎              \u2028\u2029\ufeff"},function(t,n,e){var r=e(4),i=e(67).set;t.exports=function(t,n,e){var o,u=n.constructor;return u!==e&&"function"==typeof u&&(o=u.prototype)!==e.prototype&&r(o)&&i&&i(t,o),t}},function(t,n,e){"use strict";var r=e(21),i=e(26);t.exports=function(t){var n=String(i(this)),e="",o=r(t);if(o<0||o==1/0)throw RangeError("Count can't be negative");for(;o>0;(o>>>=1)&&(n+=n))1&o&&(e+=n);return e}},function(t,n){t.exports=Math.sign||function(t){return 0==(t=+t)||t!=t?t:t<0?-1:1}},function(t,n){var e=Math.expm1;t.exports=!e||e(10)>22025.465794806718||e(10)<22025.465794806718||-2e-17!=e(-2e-17)?function(t){return 0==(t=+t)?t:t>-1e-6&&t<1e-6?t+t*t/2:Math.exp(t)-1}:e},function(t,n,e){var r=e(21),i=e(26);t.exports=function(t){return function(n,e){var o,u,a=String(i(n)),c=r(e),s=a.length;return c<0||c>=s?t?"":void 0:(o=a.charCodeAt(c))<55296||o>56319||c+1===s||(u=a.charCodeAt(c+1))<56320||u>57343?t?a.charAt(c):o:t?a.slice(c,c+2):u-56320+(o-55296<<10)+65536}}},function(t,n,e){"use strict";var r=e(32),i=e(0),o=e(11),u=e(14),a=e(42),c=e(110),s=e(40),f=e(37),l=e(5)("iterator"),h=!([].keys&&"next"in[].keys()),d=function(){return this};t.exports=function(t,n,e,p,v,g,y){c(e,n,p);var m,b,S,w=function(t){if(!h&&t in P)return P[t];switch(t){case"keys":case"values":return function(){return new e(this,t)}}return function(){return new e(this,t)}},_=n+" Iterator",M="values"==v,x=!1,P=t.prototype,O=P[l]||P["@@iterator"]||v&&P[v],F=O||w(v),A=v?M?w("entries"):F:void 0,E="Array"==n&&P.entries||O;if(E&&(S=f(E.call(new t)))!==Object.prototype&&S.next&&(s(S,_,!0),r||"function"==typeof S[l]||u(S,l,d)),M&&O&&"values"!==O.name&&(x=!0,F=function(){return O.call(this)}),r&&!y||!h&&!x&&P[l]||u(P,l,F),a[n]=F,a[_]=d,v)if(m={values:M?F:w("values"),keys:g?F:w("keys"),entries:A},y)for(b in m)b in P||o(P,b,m[b]);else i(i.P+i.F*(h||x),n,m);return m}},function(t,n,e){var r=e(76),i=e(26);t.exports=function(t,n,e){if(r(n))throw TypeError("String#"+e+" doesn't accept regex!");return String(i(t))}},function(t,n,e){var r=e(4),i=e(25),o=e(5)("match");t.exports=function(t){var n;return r(t)&&(void 0!==(n=t[o])?!!n:"RegExp"==i(t))}},function(t,n,e){var r=e(5)("match");t.exports=function(t){var n=/./;try{"/./"[t](n)}catch(e){try{return n[r]=!1,!"/./"[t](n)}catch(t){}}return!0}},function(t,n,e){var r=e(42),i=e(5)("iterator"),o=Array.prototype;t.exports=function(t){return void 0!==t&&(r.Array===t||o[i]===t)}},function(t,n,e){"use strict";var r=e(9),i=e(30);t.exports=function(t,n,e){n in t?r.f(t,n,i(0,e)):t[n]=e}},function(t,n,e){var r=e(48),i=e(5)("iterator"),o=e(42);t.exports=e(7).getIteratorMethod=function(t){if(null!=t)return t[i]||t["@@iterator"]||o[r(t)]}},function(t,n,e){"use strict";var r=e(10),i=e(34),o=e(6);t.exports=function(t){for(var n=r(this),e=o(n.length),u=arguments.length,a=i(u>1?arguments[1]:void 0,e),c=u>2?arguments[2]:void 0,s=void 0===c?e:i(c,e);s>a;)n[a++]=t;return n}},function(t,n,e){"use strict";var r=e(38),i=e(115),o=e(42),u=e(15);t.exports=e(74)(Array,"Array",(function(t,n){this._t=u(t),this._i=0,this._k=n}),(function(){var t=this._t,n=this._k,e=this._i++;return!t||e>=t.length?(this._t=void 0,i(1)):i(0,"keys"==n?e:"values"==n?t[e]:[e,t[e]])}),"values"),o.Arguments=o.Array,r("keys"),r("values"),r("entries")},function(t,n,e){"use strict";var r,i,o=e(55),u=RegExp.prototype.exec,a=String.prototype.replace,c=u,s=(r=/a/,i=/b*/g,u.call(r,"a"),u.call(i,"a"),0!==r.lastIndex||0!==i.lastIndex),f=void 0!==/()??/.exec("")[1];(s||f)&&(c=function(t){var n,e,r,i,c=this;return f&&(e=new RegExp("^"+c.source+"$(?!\\s)",o.call(c))),s&&(n=c.lastIndex),r=u.call(c,t),s&&r&&(c.lastIndex=c.global?r.index+r[0].length:n),f&&r&&r.length>1&&a.call(r[0],e,(function(){for(i=1;ie;)n.push(arguments[e++]);return y[++g]=function(){a("function"==typeof t?t:Function(t),n)},r(g),g},d=function(t){delete y[t]},"process"==e(25)(l)?r=function(t){l.nextTick(u(m,t,1))}:v&&v.now?r=function(t){v.now(u(m,t,1))}:p?(o=(i=new p).port2,i.port1.onmessage=b,r=u(o.postMessage,o,1)):f.addEventListener&&"function"==typeof postMessage&&!f.importScripts?(r=function(t){f.postMessage(t+"","*")},f.addEventListener("message",b,!1)):r="onreadystatechange"in s("script")?function(t){c.appendChild(s("script")).onreadystatechange=function(){c.removeChild(this),m.call(t)}}:function(t){setTimeout(u(m,t,1),0)}),t.exports={set:h,clear:d}},function(t,n,e){"use strict";var r=e(1),i=e(8),o=e(32),u=e(61),a=e(14),c=e(45),s=e(2),f=e(44),l=e(21),h=e(6),d=e(123),p=e(36).f,v=e(9).f,g=e(81),y=e(40),m="prototype",b="Wrong index!",S=r.ArrayBuffer,w=r.DataView,_=r.Math,M=r.RangeError,x=r.Infinity,P=S,O=_.abs,F=_.pow,A=_.floor,E=_.log,N=_.LN2,R=i?"_b":"buffer",k=i?"_l":"byteLength",T=i?"_o":"byteOffset";function I(t,n,e){var r,i,o,u=new Array(e),a=8*e-n-1,c=(1<>1,f=23===n?F(2,-24)-F(2,-77):0,l=0,h=t<0||0===t&&1/t<0?1:0;for((t=O(t))!=t||t===x?(i=t!=t?1:0,r=c):(r=A(E(t)/N),t*(o=F(2,-r))<1&&(r--,o*=2),(t+=r+s>=1?f/o:f*F(2,1-s))*o>=2&&(r++,o/=2),r+s>=c?(i=0,r=c):r+s>=1?(i=(t*o-1)*F(2,n),r+=s):(i=t*F(2,s-1)*F(2,n),r=0));n>=8;u[l++]=255&i,i/=256,n-=8);for(r=r<0;u[l++]=255&r,r/=256,a-=8);return u[--l]|=128*h,u}function j(t,n,e){var r,i=8*e-n-1,o=(1<>1,a=i-7,c=e-1,s=t[c--],f=127&s;for(s>>=7;a>0;f=256*f+t[c],c--,a-=8);for(r=f&(1<<-a)-1,f>>=-a,a+=n;a>0;r=256*r+t[c],c--,a-=8);if(0===f)f=1-u;else{if(f===o)return r?NaN:s?-x:x;r+=F(2,n),f-=u}return(s?-1:1)*r*F(2,f-n)}function L(t){return t[3]<<24|t[2]<<16|t[1]<<8|t[0]}function B(t){return[255&t]}function C(t){return[255&t,t>>8&255]}function W(t){return[255&t,t>>8&255,t>>16&255,t>>24&255]}function V(t){return I(t,52,8)}function G(t){return I(t,23,4)}function D(t,n,e){v(t[m],n,{get:function(){return this[e]}})}function U(t,n,e,r){var i=d(+e);if(i+n>t[k])throw M(b);var o=t[R]._b,u=i+t[T],a=o.slice(u,u+n);return r?a:a.reverse()}function z(t,n,e,r,i,o){var u=d(+e);if(u+n>t[k])throw M(b);for(var a=t[R]._b,c=u+t[T],s=r(+i),f=0;fQ;)(q=Y[Q++])in S||a(S,q,P[q]);o||(K.constructor=S)}var H=new w(new S(2)),J=w[m].setInt8;H.setInt8(0,2147483648),H.setInt8(1,2147483649),!H.getInt8(0)&&H.getInt8(1)||c(w[m],{setInt8:function(t,n){J.call(this,t,n<<24>>24)},setUint8:function(t,n){J.call(this,t,n<<24>>24)}},!0)}else S=function(t){f(this,S,"ArrayBuffer");var n=d(t);this._b=g.call(new Array(n),0),this[k]=n},w=function(t,n,e){f(this,w,"DataView"),f(t,S,"DataView");var r=t[k],i=l(n);if(i<0||i>r)throw M("Wrong offset!");if(i+(e=void 0===e?r-i:h(e))>r)throw M("Wrong length!");this[R]=t,this[T]=i,this[k]=e},i&&(D(S,"byteLength","_l"),D(w,"buffer","_b"),D(w,"byteLength","_l"),D(w,"byteOffset","_o")),c(w[m],{getInt8:function(t){return U(this,1,t)[0]<<24>>24},getUint8:function(t){return U(this,1,t)[0]},getInt16:function(t){var n=U(this,2,t,arguments[1]);return(n[1]<<8|n[0])<<16>>16},getUint16:function(t){var n=U(this,2,t,arguments[1]);return n[1]<<8|n[0]},getInt32:function(t){return L(U(this,4,t,arguments[1]))},getUint32:function(t){return L(U(this,4,t,arguments[1]))>>>0},getFloat32:function(t){return j(U(this,4,t,arguments[1]),23,4)},getFloat64:function(t){return j(U(this,8,t,arguments[1]),52,8)},setInt8:function(t,n){z(this,1,t,B,n)},setUint8:function(t,n){z(this,1,t,B,n)},setInt16:function(t,n){z(this,2,t,C,n,arguments[2])},setUint16:function(t,n){z(this,2,t,C,n,arguments[2])},setInt32:function(t,n){z(this,4,t,W,n,arguments[2])},setUint32:function(t,n){z(this,4,t,W,n,arguments[2])},setFloat32:function(t,n){z(this,4,t,G,n,arguments[2])},setFloat64:function(t,n){z(this,8,t,V,n,arguments[2])}});y(S,"ArrayBuffer"),y(w,"DataView"),a(w[m],u.VIEW,!0),n.ArrayBuffer=S,n.DataView=w},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,e){t.exports=!e(128)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(91))&&r.__esModule?r:{default:r},o=e(18);function u(t,n){for(var e=0;e0){var u=Object.keys(e),c=a.default.find(u,(function(t){return n.isOS(t)}));if(c){var s=this.satisfies(e[c]);if(void 0!==s)return s}var f=a.default.find(u,(function(t){return n.isPlatform(t)}));if(f){var l=this.satisfies(e[f]);if(void 0!==l)return l}}if(o>0){var h=Object.keys(i),d=a.default.find(h,(function(t){return n.isBrowser(t,!0)}));if(void 0!==d)return this.compareVersion(i[d])}},n.isBrowser=function(t,n){void 0===n&&(n=!1);var e=this.getBrowserName().toLowerCase(),r=t.toLowerCase(),i=a.default.getBrowserTypeByAlias(r);return n&&i&&(r=i.toLowerCase()),r===e},n.compareVersion=function(t){var n=[0],e=t,r=!1,i=this.getBrowserVersion();if("string"==typeof i)return">"===t[0]||"<"===t[0]?(e=t.substr(1),"="===t[1]?(r=!0,e=t.substr(2)):n=[],">"===t[0]?n.push(1):n.push(-1)):"="===t[0]?e=t.substr(1):"~"===t[0]&&(r=!0,e=t.substr(1)),n.indexOf(a.default.compareVersions(i,e,r))>-1},n.isOS=function(t){return this.getOSName(!0)===String(t).toLowerCase()},n.isPlatform=function(t){return this.getPlatformType(!0)===String(t).toLowerCase()},n.isEngine=function(t){return this.getEngineName(!0)===String(t).toLowerCase()},n.is=function(t,n){return void 0===n&&(n=!1),this.isBrowser(t,n)||this.isOS(t)||this.isPlatform(t)},n.some=function(t){var n=this;return void 0===t&&(t=[]),t.some((function(t){return n.is(t)}))},t}();n.default=s,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r};var o=/version\/(\d+(\.?_?\d+)+)/i,u=[{test:[/googlebot/i],describe:function(t){var n={name:"Googlebot"},e=i.default.getFirstMatch(/googlebot\/(\d+(\.\d+))/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/opera/i],describe:function(t){var n={name:"Opera"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/opr\/|opios/i],describe:function(t){var n={name:"Opera"},e=i.default.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/SamsungBrowser/i],describe:function(t){var n={name:"Samsung Internet for Android"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/Whale/i],describe:function(t){var n={name:"NAVER Whale Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/MZBrowser/i],describe:function(t){var n={name:"MZ Browser"},e=i.default.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/focus/i],describe:function(t){var n={name:"Focus"},e=i.default.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/swing/i],describe:function(t){var n={name:"Swing"},e=i.default.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/coast/i],describe:function(t){var n={name:"Opera Coast"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/opt\/\d+(?:.?_?\d+)+/i],describe:function(t){var n={name:"Opera Touch"},e=i.default.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/yabrowser/i],describe:function(t){var n={name:"Yandex Browser"},e=i.default.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/ucbrowser/i],describe:function(t){var n={name:"UC Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/Maxthon|mxios/i],describe:function(t){var n={name:"Maxthon"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/epiphany/i],describe:function(t){var n={name:"Epiphany"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/puffin/i],describe:function(t){var n={name:"Puffin"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/sleipnir/i],describe:function(t){var n={name:"Sleipnir"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/k-meleon/i],describe:function(t){var n={name:"K-Meleon"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/micromessenger/i],describe:function(t){var n={name:"WeChat"},e=i.default.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/qqbrowser/i],describe:function(t){var n={name:/qqbrowserlite/i.test(t)?"QQ Browser Lite":"QQ Browser"},e=i.default.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/msie|trident/i],describe:function(t){var n={name:"Internet Explorer"},e=i.default.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/\sedg\//i],describe:function(t){var n={name:"Microsoft Edge"},e=i.default.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/edg([ea]|ios)/i],describe:function(t){var n={name:"Microsoft Edge"},e=i.default.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/vivaldi/i],describe:function(t){var n={name:"Vivaldi"},e=i.default.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/seamonkey/i],describe:function(t){var n={name:"SeaMonkey"},e=i.default.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/sailfish/i],describe:function(t){var n={name:"Sailfish"},e=i.default.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i,t);return e&&(n.version=e),n}},{test:[/silk/i],describe:function(t){var n={name:"Amazon Silk"},e=i.default.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/phantom/i],describe:function(t){var n={name:"PhantomJS"},e=i.default.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/slimerjs/i],describe:function(t){var n={name:"SlimerJS"},e=i.default.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(t){var n={name:"BlackBerry"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/(web|hpw)[o0]s/i],describe:function(t){var n={name:"WebOS Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/bada/i],describe:function(t){var n={name:"Bada"},e=i.default.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/tizen/i],describe:function(t){var n={name:"Tizen"},e=i.default.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/qupzilla/i],describe:function(t){var n={name:"QupZilla"},e=i.default.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/firefox|iceweasel|fxios/i],describe:function(t){var n={name:"Firefox"},e=i.default.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/electron/i],describe:function(t){var n={name:"Electron"},e=i.default.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/MiuiBrowser/i],describe:function(t){var n={name:"Miui"},e=i.default.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/chromium/i],describe:function(t){var n={name:"Chromium"},e=i.default.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/chrome|crios|crmo/i],describe:function(t){var n={name:"Chrome"},e=i.default.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/GSA/i],describe:function(t){var n={name:"Google Search"},e=i.default.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){var n=!t.test(/like android/i),e=t.test(/android/i);return n&&e},describe:function(t){var n={name:"Android Browser"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/playstation 4/i],describe:function(t){var n={name:"PlayStation 4"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/safari|applewebkit/i],describe:function(t){var n={name:"Safari"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/.*/i],describe:function(t){var n=-1!==t.search("\\(")?/^(.*)\/(.*)[ \t]\((.*)/:/^(.*)\/(.*) /;return{name:i.default.getFirstMatch(n,t),version:i.default.getSecondMatch(n,t)}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:[/Roku\/DVP/],describe:function(t){var n=i.default.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i,t);return{name:o.OS_MAP.Roku,version:n}}},{test:[/windows phone/i],describe:function(t){var n=i.default.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.WindowsPhone,version:n}}},{test:[/windows /i],describe:function(t){var n=i.default.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i,t),e=i.default.getWindowsVersionName(n);return{name:o.OS_MAP.Windows,version:n,versionName:e}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(t){var n={name:o.OS_MAP.iOS},e=i.default.getSecondMatch(/(Version\/)(\d[\d.]+)/,t);return e&&(n.version=e),n}},{test:[/macintosh/i],describe:function(t){var n=i.default.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i,t).replace(/[_\s]/g,"."),e=i.default.getMacOSVersionName(n),r={name:o.OS_MAP.MacOS,version:n};return e&&(r.versionName=e),r}},{test:[/(ipod|iphone|ipad)/i],describe:function(t){var n=i.default.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i,t).replace(/[_\s]/g,".");return{name:o.OS_MAP.iOS,version:n}}},{test:function(t){var n=!t.test(/like android/i),e=t.test(/android/i);return n&&e},describe:function(t){var n=i.default.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i,t),e=i.default.getAndroidVersionName(n),r={name:o.OS_MAP.Android,version:n};return e&&(r.versionName=e),r}},{test:[/(web|hpw)[o0]s/i],describe:function(t){var n=i.default.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i,t),e={name:o.OS_MAP.WebOS};return n&&n.length&&(e.version=n),e}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(t){var n=i.default.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i,t)||i.default.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i,t)||i.default.getFirstMatch(/\bbb(\d+)/i,t);return{name:o.OS_MAP.BlackBerry,version:n}}},{test:[/bada/i],describe:function(t){var n=i.default.getFirstMatch(/bada\/(\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.Bada,version:n}}},{test:[/tizen/i],describe:function(t){var n=i.default.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.Tizen,version:n}}},{test:[/linux/i],describe:function(){return{name:o.OS_MAP.Linux}}},{test:[/CrOS/],describe:function(){return{name:o.OS_MAP.ChromeOS}}},{test:[/PlayStation 4/],describe:function(t){var n=i.default.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.PlayStation4,version:n}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:[/googlebot/i],describe:function(){return{type:"bot",vendor:"Google"}}},{test:[/huawei/i],describe:function(t){var n=i.default.getFirstMatch(/(can-l01)/i,t)&&"Nova",e={type:o.PLATFORMS_MAP.mobile,vendor:"Huawei"};return n&&(e.model=n),e}},{test:[/nexus\s*(?:7|8|9|10).*/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Nexus"}}},{test:[/ipad/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/kftt build/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Amazon",model:"Kindle Fire HD 7"}}},{test:[/silk/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Amazon"}}},{test:[/tablet(?! pc)/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet}}},{test:function(t){var n=t.test(/ipod|iphone/i),e=t.test(/like (ipod|iphone)/i);return n&&!e},describe:function(t){var n=i.default.getFirstMatch(/(ipod|iphone)/i,t);return{type:o.PLATFORMS_MAP.mobile,vendor:"Apple",model:n}}},{test:[/nexus\s*[0-6].*/i,/galaxy nexus/i],describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"Nexus"}}},{test:[/[^-]mobi/i],describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"blackberry"===t.getBrowserName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"BlackBerry"}}},{test:function(t){return"bada"===t.getBrowserName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"windows phone"===t.getBrowserName()},describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"Microsoft"}}},{test:function(t){var n=Number(String(t.getOSVersion()).split(".")[0]);return"android"===t.getOSName(!0)&&n>=3},describe:function(){return{type:o.PLATFORMS_MAP.tablet}}},{test:function(t){return"android"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"macos"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop,vendor:"Apple"}}},{test:function(t){return"windows"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop}}},{test:function(t){return"linux"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop}}},{test:function(t){return"playstation 4"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.tv}}},{test:function(t){return"roku"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.tv}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:function(t){return"microsoft edge"===t.getBrowserName(!0)},describe:function(t){if(/\sedg\//i.test(t))return{name:o.ENGINE_MAP.Blink};var n=i.default.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i,t);return{name:o.ENGINE_MAP.EdgeHTML,version:n}}},{test:[/trident/i],describe:function(t){var n={name:o.ENGINE_MAP.Trident},e=i.default.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){return t.test(/presto/i)},describe:function(t){var n={name:o.ENGINE_MAP.Presto},e=i.default.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){var n=t.test(/gecko/i),e=t.test(/like gecko/i);return n&&!e},describe:function(t){var n={name:o.ENGINE_MAP.Gecko},e=i.default.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/(apple)?webkit\/537\.36/i],describe:function(){return{name:o.ENGINE_MAP.Blink}}},{test:[/(apple)?webkit/i],describe:function(t){var n={name:o.ENGINE_MAP.WebKit},e=i.default.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}}];n.default=u,t.exports=n.default},function(t,n,e){t.exports=!e(8)&&!e(2)((function(){return 7!=Object.defineProperty(e(62)("div"),"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(1),i=e(7),o=e(32),u=e(63),a=e(9).f;t.exports=function(t){var n=i.Symbol||(i.Symbol=o?{}:r.Symbol||{});"_"==t.charAt(0)||t in n||a(n,t,{value:u.f(t)})}},function(t,n,e){var r=e(13),i=e(15),o=e(51)(!1),u=e(64)("IE_PROTO");t.exports=function(t,n){var e,a=i(t),c=0,s=[];for(e in a)e!=u&&r(a,e)&&s.push(e);for(;n.length>c;)r(a,e=n[c++])&&(~o(s,e)||s.push(e));return s}},function(t,n,e){var r=e(9),i=e(3),o=e(33);t.exports=e(8)?Object.defineProperties:function(t,n){i(t);for(var e,u=o(n),a=u.length,c=0;a>c;)r.f(t,e=u[c++],n[e]);return t}},function(t,n,e){var r=e(15),i=e(36).f,o={}.toString,u="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[];t.exports.f=function(t){return u&&"[object Window]"==o.call(t)?function(t){try{return i(t)}catch(t){return u.slice()}}(t):i(r(t))}},function(t,n,e){"use strict";var r=e(8),i=e(33),o=e(52),u=e(47),a=e(10),c=e(46),s=Object.assign;t.exports=!s||e(2)((function(){var t={},n={},e=Symbol(),r="abcdefghijklmnopqrst";return t[e]=7,r.split("").forEach((function(t){n[t]=t})),7!=s({},t)[e]||Object.keys(s({},n)).join("")!=r}))?function(t,n){for(var e=a(t),s=arguments.length,f=1,l=o.f,h=u.f;s>f;)for(var d,p=c(arguments[f++]),v=l?i(p).concat(l(p)):i(p),g=v.length,y=0;g>y;)d=v[y++],r&&!h.call(p,d)||(e[d]=p[d]);return e}:s},function(t,n){t.exports=Object.is||function(t,n){return t===n?0!==t||1/t==1/n:t!=t&&n!=n}},function(t,n,e){"use strict";var r=e(20),i=e(4),o=e(104),u=[].slice,a={},c=function(t,n,e){if(!(n in a)){for(var r=[],i=0;i>>0||(u.test(e)?16:10))}:r},function(t,n,e){var r=e(1).parseFloat,i=e(41).trim;t.exports=1/r(e(68)+"-0")!=-1/0?function(t){var n=i(String(t),3),e=r(n);return 0===e&&"-"==n.charAt(0)?-0:e}:r},function(t,n,e){var r=e(25);t.exports=function(t,n){if("number"!=typeof t&&"Number"!=r(t))throw TypeError(n);return+t}},function(t,n,e){var r=e(4),i=Math.floor;t.exports=function(t){return!r(t)&&isFinite(t)&&i(t)===t}},function(t,n){t.exports=Math.log1p||function(t){return(t=+t)>-1e-8&&t<1e-8?t-t*t/2:Math.log(1+t)}},function(t,n,e){"use strict";var r=e(35),i=e(30),o=e(40),u={};e(14)(u,e(5)("iterator"),(function(){return this})),t.exports=function(t,n,e){t.prototype=r(u,{next:i(1,e)}),o(t,n+" Iterator")}},function(t,n,e){var r=e(3);t.exports=function(t,n,e,i){try{return i?n(r(e)[0],e[1]):n(e)}catch(n){var o=t.return;throw void 0!==o&&r(o.call(t)),n}}},function(t,n,e){var r=e(224);t.exports=function(t,n){return new(r(t))(n)}},function(t,n,e){var r=e(20),i=e(10),o=e(46),u=e(6);t.exports=function(t,n,e,a,c){r(n);var s=i(t),f=o(s),l=u(s.length),h=c?l-1:0,d=c?-1:1;if(e<2)for(;;){if(h in f){a=f[h],h+=d;break}if(h+=d,c?h<0:l<=h)throw TypeError("Reduce of empty array with no initial value")}for(;c?h>=0:l>h;h+=d)h in f&&(a=n(a,f[h],h,s));return a}},function(t,n,e){"use strict";var r=e(10),i=e(34),o=e(6);t.exports=[].copyWithin||function(t,n){var e=r(this),u=o(e.length),a=i(t,u),c=i(n,u),s=arguments.length>2?arguments[2]:void 0,f=Math.min((void 0===s?u:i(s,u))-c,u-a),l=1;for(c0;)c in e?e[a]=e[c]:delete e[a],a+=l,c+=l;return e}},function(t,n){t.exports=function(t,n){return{value:n,done:!!t}}},function(t,n,e){"use strict";var r=e(83);e(0)({target:"RegExp",proto:!0,forced:r!==/./.exec},{exec:r})},function(t,n,e){e(8)&&"g"!=/./g.flags&&e(9).f(RegExp.prototype,"flags",{configurable:!0,get:e(55)})},function(t,n,e){"use strict";var r,i,o,u,a=e(32),c=e(1),s=e(19),f=e(48),l=e(0),h=e(4),d=e(20),p=e(44),v=e(58),g=e(49),y=e(85).set,m=e(244)(),b=e(119),S=e(245),w=e(59),_=e(120),M=c.TypeError,x=c.process,P=x&&x.versions,O=P&&P.v8||"",F=c.Promise,A="process"==f(x),E=function(){},N=i=b.f,R=!!function(){try{var t=F.resolve(1),n=(t.constructor={})[e(5)("species")]=function(t){t(E,E)};return(A||"function"==typeof PromiseRejectionEvent)&&t.then(E)instanceof n&&0!==O.indexOf("6.6")&&-1===w.indexOf("Chrome/66")}catch(t){}}(),k=function(t){var n;return!(!h(t)||"function"!=typeof(n=t.then))&&n},T=function(t,n){if(!t._n){t._n=!0;var e=t._c;m((function(){for(var r=t._v,i=1==t._s,o=0,u=function(n){var e,o,u,a=i?n.ok:n.fail,c=n.resolve,s=n.reject,f=n.domain;try{a?(i||(2==t._h&&L(t),t._h=1),!0===a?e=r:(f&&f.enter(),e=a(r),f&&(f.exit(),u=!0)),e===n.promise?s(M("Promise-chain cycle")):(o=k(e))?o.call(e,c,s):c(e)):s(r)}catch(t){f&&!u&&f.exit(),s(t)}};e.length>o;)u(e[o++]);t._c=[],t._n=!1,n&&!t._h&&I(t)}))}},I=function(t){y.call(c,(function(){var n,e,r,i=t._v,o=j(t);if(o&&(n=S((function(){A?x.emit("unhandledRejection",i,t):(e=c.onunhandledrejection)?e({promise:t,reason:i}):(r=c.console)&&r.error&&r.error("Unhandled promise rejection",i)})),t._h=A||j(t)?2:1),t._a=void 0,o&&n.e)throw n.v}))},j=function(t){return 1!==t._h&&0===(t._a||t._c).length},L=function(t){y.call(c,(function(){var n;A?x.emit("rejectionHandled",t):(n=c.onrejectionhandled)&&n({promise:t,reason:t._v})}))},B=function(t){var n=this;n._d||(n._d=!0,(n=n._w||n)._v=t,n._s=2,n._a||(n._a=n._c.slice()),T(n,!0))},C=function(t){var n,e=this;if(!e._d){e._d=!0,e=e._w||e;try{if(e===t)throw M("Promise can't be resolved itself");(n=k(t))?m((function(){var r={_w:e,_d:!1};try{n.call(t,s(C,r,1),s(B,r,1))}catch(t){B.call(r,t)}})):(e._v=t,e._s=1,T(e,!1))}catch(t){B.call({_w:e,_d:!1},t)}}};R||(F=function(t){p(this,F,"Promise","_h"),d(t),r.call(this);try{t(s(C,this,1),s(B,this,1))}catch(t){B.call(this,t)}},(r=function(t){this._c=[],this._a=void 0,this._s=0,this._d=!1,this._v=void 0,this._h=0,this._n=!1}).prototype=e(45)(F.prototype,{then:function(t,n){var e=N(g(this,F));return e.ok="function"!=typeof t||t,e.fail="function"==typeof n&&n,e.domain=A?x.domain:void 0,this._c.push(e),this._a&&this._a.push(e),this._s&&T(this,!1),e.promise},catch:function(t){return this.then(void 0,t)}}),o=function(){var t=new r;this.promise=t,this.resolve=s(C,t,1),this.reject=s(B,t,1)},b.f=N=function(t){return t===F||t===u?new o(t):i(t)}),l(l.G+l.W+l.F*!R,{Promise:F}),e(40)(F,"Promise"),e(43)("Promise"),u=e(7).Promise,l(l.S+l.F*!R,"Promise",{reject:function(t){var n=N(this);return(0,n.reject)(t),n.promise}}),l(l.S+l.F*(a||!R),"Promise",{resolve:function(t){return _(a&&this===u?F:this,t)}}),l(l.S+l.F*!(R&&e(54)((function(t){F.all(t).catch(E)}))),"Promise",{all:function(t){var n=this,e=N(n),r=e.resolve,i=e.reject,o=S((function(){var e=[],o=0,u=1;v(t,!1,(function(t){var a=o++,c=!1;e.push(void 0),u++,n.resolve(t).then((function(t){c||(c=!0,e[a]=t,--u||r(e))}),i)})),--u||r(e)}));return o.e&&i(o.v),e.promise},race:function(t){var n=this,e=N(n),r=e.reject,i=S((function(){v(t,!1,(function(t){n.resolve(t).then(e.resolve,r)}))}));return i.e&&r(i.v),e.promise}})},function(t,n,e){"use strict";var r=e(20);function i(t){var n,e;this.promise=new t((function(t,r){if(void 0!==n||void 0!==e)throw TypeError("Bad Promise constructor");n=t,e=r})),this.resolve=r(n),this.reject=r(e)}t.exports.f=function(t){return new i(t)}},function(t,n,e){var r=e(3),i=e(4),o=e(119);t.exports=function(t,n){if(r(t),i(n)&&n.constructor===t)return n;var e=o.f(t);return(0,e.resolve)(n),e.promise}},function(t,n,e){"use strict";var r=e(9).f,i=e(35),o=e(45),u=e(19),a=e(44),c=e(58),s=e(74),f=e(115),l=e(43),h=e(8),d=e(29).fastKey,p=e(39),v=h?"_s":"size",g=function(t,n){var e,r=d(n);if("F"!==r)return t._i[r];for(e=t._f;e;e=e.n)if(e.k==n)return e};t.exports={getConstructor:function(t,n,e,s){var f=t((function(t,r){a(t,f,n,"_i"),t._t=n,t._i=i(null),t._f=void 0,t._l=void 0,t[v]=0,null!=r&&c(r,e,t[s],t)}));return o(f.prototype,{clear:function(){for(var t=p(this,n),e=t._i,r=t._f;r;r=r.n)r.r=!0,r.p&&(r.p=r.p.n=void 0),delete e[r.i];t._f=t._l=void 0,t[v]=0},delete:function(t){var e=p(this,n),r=g(e,t);if(r){var i=r.n,o=r.p;delete e._i[r.i],r.r=!0,o&&(o.n=i),i&&(i.p=o),e._f==r&&(e._f=i),e._l==r&&(e._l=o),e[v]--}return!!r},forEach:function(t){p(this,n);for(var e,r=u(t,arguments.length>1?arguments[1]:void 0,3);e=e?e.n:this._f;)for(r(e.v,e.k,this);e&&e.r;)e=e.p},has:function(t){return!!g(p(this,n),t)}}),h&&r(f.prototype,"size",{get:function(){return p(this,n)[v]}}),f},def:function(t,n,e){var r,i,o=g(t,n);return o?o.v=e:(t._l=o={i:i=d(n,!0),k:n,v:e,p:r=t._l,n:void 0,r:!1},t._f||(t._f=o),r&&(r.n=o),t[v]++,"F"!==i&&(t._i[i]=o)),t},getEntry:g,setStrong:function(t,n,e){s(t,n,(function(t,e){this._t=p(t,n),this._k=e,this._l=void 0}),(function(){for(var t=this._k,n=this._l;n&&n.r;)n=n.p;return this._t&&(this._l=n=n?n.n:this._t._f)?f(0,"keys"==t?n.k:"values"==t?n.v:[n.k,n.v]):(this._t=void 0,f(1))}),e?"entries":"values",!e,!0),l(n)}}},function(t,n,e){"use strict";var r=e(45),i=e(29).getWeak,o=e(3),u=e(4),a=e(44),c=e(58),s=e(24),f=e(13),l=e(39),h=s(5),d=s(6),p=0,v=function(t){return t._l||(t._l=new g)},g=function(){this.a=[]},y=function(t,n){return h(t.a,(function(t){return t[0]===n}))};g.prototype={get:function(t){var n=y(this,t);if(n)return n[1]},has:function(t){return!!y(this,t)},set:function(t,n){var e=y(this,t);e?e[1]=n:this.a.push([t,n])},delete:function(t){var n=d(this.a,(function(n){return n[0]===t}));return~n&&this.a.splice(n,1),!!~n}},t.exports={getConstructor:function(t,n,e,o){var s=t((function(t,r){a(t,s,n,"_i"),t._t=n,t._i=p++,t._l=void 0,null!=r&&c(r,e,t[o],t)}));return r(s.prototype,{delete:function(t){if(!u(t))return!1;var e=i(t);return!0===e?v(l(this,n)).delete(t):e&&f(e,this._i)&&delete e[this._i]},has:function(t){if(!u(t))return!1;var e=i(t);return!0===e?v(l(this,n)).has(t):e&&f(e,this._i)}}),s},def:function(t,n,e){var r=i(o(n),!0);return!0===r?v(t).set(n,e):r[t._i]=e,t},ufstore:v}},function(t,n,e){var r=e(21),i=e(6);t.exports=function(t){if(void 0===t)return 0;var n=r(t),e=i(n);if(n!==e)throw RangeError("Wrong length!");return e}},function(t,n,e){var r=e(36),i=e(52),o=e(3),u=e(1).Reflect;t.exports=u&&u.ownKeys||function(t){var n=r.f(o(t)),e=i.f;return e?n.concat(e(t)):n}},function(t,n,e){var r=e(6),i=e(70),o=e(26);t.exports=function(t,n,e,u){var a=String(o(t)),c=a.length,s=void 0===e?" ":String(e),f=r(n);if(f<=c||""==s)return a;var l=f-c,h=i.call(s,Math.ceil(l/s.length));return h.length>l&&(h=h.slice(0,l)),u?h+a:a+h}},function(t,n,e){var r=e(8),i=e(33),o=e(15),u=e(47).f;t.exports=function(t){return function(n){for(var e,a=o(n),c=i(a),s=c.length,f=0,l=[];s>f;)e=c[f++],r&&!u.call(a,e)||l.push(t?[e,a[e]]:a[e]);return l}}},function(t,n){var e=t.exports={version:"2.6.9"};"number"==typeof __e&&(__e=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,e){e(130),t.exports=e(90)},function(t,n,e){"use strict";e(131);var r,i=(r=e(303))&&r.__esModule?r:{default:r};i.default._babelPolyfill&&"undefined"!=typeof console&&console.warn&&console.warn("@babel/polyfill is loaded more than once on this page. This is probably not desirable/intended and may have consequences if different versions of the polyfills are applied sequentially. If you do need to load the polyfill more than once, use @babel/polyfill/noConflict instead to bypass the warning."),i.default._babelPolyfill=!0},function(t,n,e){"use strict";e(132),e(275),e(277),e(280),e(282),e(284),e(286),e(288),e(290),e(292),e(294),e(296),e(298),e(302)},function(t,n,e){e(133),e(136),e(137),e(138),e(139),e(140),e(141),e(142),e(143),e(144),e(145),e(146),e(147),e(148),e(149),e(150),e(151),e(152),e(153),e(154),e(155),e(156),e(157),e(158),e(159),e(160),e(161),e(162),e(163),e(164),e(165),e(166),e(167),e(168),e(169),e(170),e(171),e(172),e(173),e(174),e(175),e(176),e(177),e(179),e(180),e(181),e(182),e(183),e(184),e(185),e(186),e(187),e(188),e(189),e(190),e(191),e(192),e(193),e(194),e(195),e(196),e(197),e(198),e(199),e(200),e(201),e(202),e(203),e(204),e(205),e(206),e(207),e(208),e(209),e(210),e(211),e(212),e(214),e(215),e(217),e(218),e(219),e(220),e(221),e(222),e(223),e(225),e(226),e(227),e(228),e(229),e(230),e(231),e(232),e(233),e(234),e(235),e(236),e(237),e(82),e(238),e(116),e(239),e(117),e(240),e(241),e(242),e(243),e(118),e(246),e(247),e(248),e(249),e(250),e(251),e(252),e(253),e(254),e(255),e(256),e(257),e(258),e(259),e(260),e(261),e(262),e(263),e(264),e(265),e(266),e(267),e(268),e(269),e(270),e(271),e(272),e(273),e(274),t.exports=e(7)},function(t,n,e){"use strict";var r=e(1),i=e(13),o=e(8),u=e(0),a=e(11),c=e(29).KEY,s=e(2),f=e(50),l=e(40),h=e(31),d=e(5),p=e(63),v=e(97),g=e(135),y=e(53),m=e(3),b=e(4),S=e(10),w=e(15),_=e(28),M=e(30),x=e(35),P=e(100),O=e(22),F=e(52),A=e(9),E=e(33),N=O.f,R=A.f,k=P.f,T=r.Symbol,I=r.JSON,j=I&&I.stringify,L=d("_hidden"),B=d("toPrimitive"),C={}.propertyIsEnumerable,W=f("symbol-registry"),V=f("symbols"),G=f("op-symbols"),D=Object.prototype,U="function"==typeof T&&!!F.f,z=r.QObject,q=!z||!z.prototype||!z.prototype.findChild,K=o&&s((function(){return 7!=x(R({},"a",{get:function(){return R(this,"a",{value:7}).a}})).a}))?function(t,n,e){var r=N(D,n);r&&delete D[n],R(t,n,e),r&&t!==D&&R(D,n,r)}:R,Y=function(t){var n=V[t]=x(T.prototype);return n._k=t,n},Q=U&&"symbol"==typeof T.iterator?function(t){return"symbol"==typeof t}:function(t){return t instanceof T},H=function(t,n,e){return t===D&&H(G,n,e),m(t),n=_(n,!0),m(e),i(V,n)?(e.enumerable?(i(t,L)&&t[L][n]&&(t[L][n]=!1),e=x(e,{enumerable:M(0,!1)})):(i(t,L)||R(t,L,M(1,{})),t[L][n]=!0),K(t,n,e)):R(t,n,e)},J=function(t,n){m(t);for(var e,r=g(n=w(n)),i=0,o=r.length;o>i;)H(t,e=r[i++],n[e]);return t},X=function(t){var n=C.call(this,t=_(t,!0));return!(this===D&&i(V,t)&&!i(G,t))&&(!(n||!i(this,t)||!i(V,t)||i(this,L)&&this[L][t])||n)},Z=function(t,n){if(t=w(t),n=_(n,!0),t!==D||!i(V,n)||i(G,n)){var e=N(t,n);return!e||!i(V,n)||i(t,L)&&t[L][n]||(e.enumerable=!0),e}},$=function(t){for(var n,e=k(w(t)),r=[],o=0;e.length>o;)i(V,n=e[o++])||n==L||n==c||r.push(n);return r},tt=function(t){for(var n,e=t===D,r=k(e?G:w(t)),o=[],u=0;r.length>u;)!i(V,n=r[u++])||e&&!i(D,n)||o.push(V[n]);return o};U||(a((T=function(){if(this instanceof T)throw TypeError("Symbol is not a constructor!");var t=h(arguments.length>0?arguments[0]:void 0),n=function(e){this===D&&n.call(G,e),i(this,L)&&i(this[L],t)&&(this[L][t]=!1),K(this,t,M(1,e))};return o&&q&&K(D,t,{configurable:!0,set:n}),Y(t)}).prototype,"toString",(function(){return this._k})),O.f=Z,A.f=H,e(36).f=P.f=$,e(47).f=X,F.f=tt,o&&!e(32)&&a(D,"propertyIsEnumerable",X,!0),p.f=function(t){return Y(d(t))}),u(u.G+u.W+u.F*!U,{Symbol:T});for(var nt="hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables".split(","),et=0;nt.length>et;)d(nt[et++]);for(var rt=E(d.store),it=0;rt.length>it;)v(rt[it++]);u(u.S+u.F*!U,"Symbol",{for:function(t){return i(W,t+="")?W[t]:W[t]=T(t)},keyFor:function(t){if(!Q(t))throw TypeError(t+" is not a symbol!");for(var n in W)if(W[n]===t)return n},useSetter:function(){q=!0},useSimple:function(){q=!1}}),u(u.S+u.F*!U,"Object",{create:function(t,n){return void 0===n?x(t):J(x(t),n)},defineProperty:H,defineProperties:J,getOwnPropertyDescriptor:Z,getOwnPropertyNames:$,getOwnPropertySymbols:tt});var ot=s((function(){F.f(1)}));u(u.S+u.F*ot,"Object",{getOwnPropertySymbols:function(t){return F.f(S(t))}}),I&&u(u.S+u.F*(!U||s((function(){var t=T();return"[null]"!=j([t])||"{}"!=j({a:t})||"{}"!=j(Object(t))}))),"JSON",{stringify:function(t){for(var n,e,r=[t],i=1;arguments.length>i;)r.push(arguments[i++]);if(e=n=r[1],(b(n)||void 0!==t)&&!Q(t))return y(n)||(n=function(t,n){if("function"==typeof e&&(n=e.call(this,t,n)),!Q(n))return n}),r[1]=n,j.apply(I,r)}}),T.prototype[B]||e(14)(T.prototype,B,T.prototype.valueOf),l(T,"Symbol"),l(Math,"Math",!0),l(r.JSON,"JSON",!0)},function(t,n,e){t.exports=e(50)("native-function-to-string",Function.toString)},function(t,n,e){var r=e(33),i=e(52),o=e(47);t.exports=function(t){var n=r(t),e=i.f;if(e)for(var u,a=e(t),c=o.f,s=0;a.length>s;)c.call(t,u=a[s++])&&n.push(u);return n}},function(t,n,e){var r=e(0);r(r.S,"Object",{create:e(35)})},function(t,n,e){var r=e(0);r(r.S+r.F*!e(8),"Object",{defineProperty:e(9).f})},function(t,n,e){var r=e(0);r(r.S+r.F*!e(8),"Object",{defineProperties:e(99)})},function(t,n,e){var r=e(15),i=e(22).f;e(23)("getOwnPropertyDescriptor",(function(){return function(t,n){return i(r(t),n)}}))},function(t,n,e){var r=e(10),i=e(37);e(23)("getPrototypeOf",(function(){return function(t){return i(r(t))}}))},function(t,n,e){var r=e(10),i=e(33);e(23)("keys",(function(){return function(t){return i(r(t))}}))},function(t,n,e){e(23)("getOwnPropertyNames",(function(){return e(100).f}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("freeze",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("seal",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("preventExtensions",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4);e(23)("isFrozen",(function(t){return function(n){return!r(n)||!!t&&t(n)}}))},function(t,n,e){var r=e(4);e(23)("isSealed",(function(t){return function(n){return!r(n)||!!t&&t(n)}}))},function(t,n,e){var r=e(4);e(23)("isExtensible",(function(t){return function(n){return!!r(n)&&(!t||t(n))}}))},function(t,n,e){var r=e(0);r(r.S+r.F,"Object",{assign:e(101)})},function(t,n,e){var r=e(0);r(r.S,"Object",{is:e(102)})},function(t,n,e){var r=e(0);r(r.S,"Object",{setPrototypeOf:e(67).set})},function(t,n,e){"use strict";var r=e(48),i={};i[e(5)("toStringTag")]="z",i+""!="[object z]"&&e(11)(Object.prototype,"toString",(function(){return"[object "+r(this)+"]"}),!0)},function(t,n,e){var r=e(0);r(r.P,"Function",{bind:e(103)})},function(t,n,e){var r=e(9).f,i=Function.prototype,o=/^\s*function ([^ (]*)/;"name"in i||e(8)&&r(i,"name",{configurable:!0,get:function(){try{return(""+this).match(o)[1]}catch(t){return""}}})},function(t,n,e){"use strict";var r=e(4),i=e(37),o=e(5)("hasInstance"),u=Function.prototype;o in u||e(9).f(u,o,{value:function(t){if("function"!=typeof this||!r(t))return!1;if(!r(this.prototype))return t instanceof this;for(;t=i(t);)if(this.prototype===t)return!0;return!1}})},function(t,n,e){var r=e(0),i=e(105);r(r.G+r.F*(parseInt!=i),{parseInt:i})},function(t,n,e){var r=e(0),i=e(106);r(r.G+r.F*(parseFloat!=i),{parseFloat:i})},function(t,n,e){"use strict";var r=e(1),i=e(13),o=e(25),u=e(69),a=e(28),c=e(2),s=e(36).f,f=e(22).f,l=e(9).f,h=e(41).trim,d=r.Number,p=d,v=d.prototype,g="Number"==o(e(35)(v)),y="trim"in String.prototype,m=function(t){var n=a(t,!1);if("string"==typeof n&&n.length>2){var e,r,i,o=(n=y?n.trim():h(n,3)).charCodeAt(0);if(43===o||45===o){if(88===(e=n.charCodeAt(2))||120===e)return NaN}else if(48===o){switch(n.charCodeAt(1)){case 66:case 98:r=2,i=49;break;case 79:case 111:r=8,i=55;break;default:return+n}for(var u,c=n.slice(2),s=0,f=c.length;si)return NaN;return parseInt(c,r)}}return+n};if(!d(" 0o1")||!d("0b1")||d("+0x1")){d=function(t){var n=arguments.length<1?0:t,e=this;return e instanceof d&&(g?c((function(){v.valueOf.call(e)})):"Number"!=o(e))?u(new p(m(n)),e,d):m(n)};for(var b,S=e(8)?s(p):"MAX_VALUE,MIN_VALUE,NaN,NEGATIVE_INFINITY,POSITIVE_INFINITY,EPSILON,isFinite,isInteger,isNaN,isSafeInteger,MAX_SAFE_INTEGER,MIN_SAFE_INTEGER,parseFloat,parseInt,isInteger".split(","),w=0;S.length>w;w++)i(p,b=S[w])&&!i(d,b)&&l(d,b,f(p,b));d.prototype=v,v.constructor=d,e(11)(r,"Number",d)}},function(t,n,e){"use strict";var r=e(0),i=e(21),o=e(107),u=e(70),a=1..toFixed,c=Math.floor,s=[0,0,0,0,0,0],f="Number.toFixed: incorrect invocation!",l=function(t,n){for(var e=-1,r=n;++e<6;)r+=t*s[e],s[e]=r%1e7,r=c(r/1e7)},h=function(t){for(var n=6,e=0;--n>=0;)e+=s[n],s[n]=c(e/t),e=e%t*1e7},d=function(){for(var t=6,n="";--t>=0;)if(""!==n||0===t||0!==s[t]){var e=String(s[t]);n=""===n?e:n+u.call("0",7-e.length)+e}return n},p=function(t,n,e){return 0===n?e:n%2==1?p(t,n-1,e*t):p(t*t,n/2,e)};r(r.P+r.F*(!!a&&("0.000"!==8e-5.toFixed(3)||"1"!==.9.toFixed(0)||"1.25"!==1.255.toFixed(2)||"1000000000000000128"!==(0xde0b6b3a7640080).toFixed(0))||!e(2)((function(){a.call({})}))),"Number",{toFixed:function(t){var n,e,r,a,c=o(this,f),s=i(t),v="",g="0";if(s<0||s>20)throw RangeError(f);if(c!=c)return"NaN";if(c<=-1e21||c>=1e21)return String(c);if(c<0&&(v="-",c=-c),c>1e-21)if(e=(n=function(t){for(var n=0,e=t;e>=4096;)n+=12,e/=4096;for(;e>=2;)n+=1,e/=2;return n}(c*p(2,69,1))-69)<0?c*p(2,-n,1):c/p(2,n,1),e*=4503599627370496,(n=52-n)>0){for(l(0,e),r=s;r>=7;)l(1e7,0),r-=7;for(l(p(10,r,1),0),r=n-1;r>=23;)h(1<<23),r-=23;h(1<0?v+((a=g.length)<=s?"0."+u.call("0",s-a)+g:g.slice(0,a-s)+"."+g.slice(a-s)):v+g}})},function(t,n,e){"use strict";var r=e(0),i=e(2),o=e(107),u=1..toPrecision;r(r.P+r.F*(i((function(){return"1"!==u.call(1,void 0)}))||!i((function(){u.call({})}))),"Number",{toPrecision:function(t){var n=o(this,"Number#toPrecision: incorrect invocation!");return void 0===t?u.call(n):u.call(n,t)}})},function(t,n,e){var r=e(0);r(r.S,"Number",{EPSILON:Math.pow(2,-52)})},function(t,n,e){var r=e(0),i=e(1).isFinite;r(r.S,"Number",{isFinite:function(t){return"number"==typeof t&&i(t)}})},function(t,n,e){var r=e(0);r(r.S,"Number",{isInteger:e(108)})},function(t,n,e){var r=e(0);r(r.S,"Number",{isNaN:function(t){return t!=t}})},function(t,n,e){var r=e(0),i=e(108),o=Math.abs;r(r.S,"Number",{isSafeInteger:function(t){return i(t)&&o(t)<=9007199254740991}})},function(t,n,e){var r=e(0);r(r.S,"Number",{MAX_SAFE_INTEGER:9007199254740991})},function(t,n,e){var r=e(0);r(r.S,"Number",{MIN_SAFE_INTEGER:-9007199254740991})},function(t,n,e){var r=e(0),i=e(106);r(r.S+r.F*(Number.parseFloat!=i),"Number",{parseFloat:i})},function(t,n,e){var r=e(0),i=e(105);r(r.S+r.F*(Number.parseInt!=i),"Number",{parseInt:i})},function(t,n,e){var r=e(0),i=e(109),o=Math.sqrt,u=Math.acosh;r(r.S+r.F*!(u&&710==Math.floor(u(Number.MAX_VALUE))&&u(1/0)==1/0),"Math",{acosh:function(t){return(t=+t)<1?NaN:t>94906265.62425156?Math.log(t)+Math.LN2:i(t-1+o(t-1)*o(t+1))}})},function(t,n,e){var r=e(0),i=Math.asinh;r(r.S+r.F*!(i&&1/i(0)>0),"Math",{asinh:function t(n){return isFinite(n=+n)&&0!=n?n<0?-t(-n):Math.log(n+Math.sqrt(n*n+1)):n}})},function(t,n,e){var r=e(0),i=Math.atanh;r(r.S+r.F*!(i&&1/i(-0)<0),"Math",{atanh:function(t){return 0==(t=+t)?t:Math.log((1+t)/(1-t))/2}})},function(t,n,e){var r=e(0),i=e(71);r(r.S,"Math",{cbrt:function(t){return i(t=+t)*Math.pow(Math.abs(t),1/3)}})},function(t,n,e){var r=e(0);r(r.S,"Math",{clz32:function(t){return(t>>>=0)?31-Math.floor(Math.log(t+.5)*Math.LOG2E):32}})},function(t,n,e){var r=e(0),i=Math.exp;r(r.S,"Math",{cosh:function(t){return(i(t=+t)+i(-t))/2}})},function(t,n,e){var r=e(0),i=e(72);r(r.S+r.F*(i!=Math.expm1),"Math",{expm1:i})},function(t,n,e){var r=e(0);r(r.S,"Math",{fround:e(178)})},function(t,n,e){var r=e(71),i=Math.pow,o=i(2,-52),u=i(2,-23),a=i(2,127)*(2-u),c=i(2,-126);t.exports=Math.fround||function(t){var n,e,i=Math.abs(t),s=r(t);return ia||e!=e?s*(1/0):s*e}},function(t,n,e){var r=e(0),i=Math.abs;r(r.S,"Math",{hypot:function(t,n){for(var e,r,o=0,u=0,a=arguments.length,c=0;u0?(r=e/c)*r:e;return c===1/0?1/0:c*Math.sqrt(o)}})},function(t,n,e){var r=e(0),i=Math.imul;r(r.S+r.F*e(2)((function(){return-5!=i(4294967295,5)||2!=i.length})),"Math",{imul:function(t,n){var e=+t,r=+n,i=65535&e,o=65535&r;return 0|i*o+((65535&e>>>16)*o+i*(65535&r>>>16)<<16>>>0)}})},function(t,n,e){var r=e(0);r(r.S,"Math",{log10:function(t){return Math.log(t)*Math.LOG10E}})},function(t,n,e){var r=e(0);r(r.S,"Math",{log1p:e(109)})},function(t,n,e){var r=e(0);r(r.S,"Math",{log2:function(t){return Math.log(t)/Math.LN2}})},function(t,n,e){var r=e(0);r(r.S,"Math",{sign:e(71)})},function(t,n,e){var r=e(0),i=e(72),o=Math.exp;r(r.S+r.F*e(2)((function(){return-2e-17!=!Math.sinh(-2e-17)})),"Math",{sinh:function(t){return Math.abs(t=+t)<1?(i(t)-i(-t))/2:(o(t-1)-o(-t-1))*(Math.E/2)}})},function(t,n,e){var r=e(0),i=e(72),o=Math.exp;r(r.S,"Math",{tanh:function(t){var n=i(t=+t),e=i(-t);return n==1/0?1:e==1/0?-1:(n-e)/(o(t)+o(-t))}})},function(t,n,e){var r=e(0);r(r.S,"Math",{trunc:function(t){return(t>0?Math.floor:Math.ceil)(t)}})},function(t,n,e){var r=e(0),i=e(34),o=String.fromCharCode,u=String.fromCodePoint;r(r.S+r.F*(!!u&&1!=u.length),"String",{fromCodePoint:function(t){for(var n,e=[],r=arguments.length,u=0;r>u;){if(n=+arguments[u++],i(n,1114111)!==n)throw RangeError(n+" is not a valid code point");e.push(n<65536?o(n):o(55296+((n-=65536)>>10),n%1024+56320))}return e.join("")}})},function(t,n,e){var r=e(0),i=e(15),o=e(6);r(r.S,"String",{raw:function(t){for(var n=i(t.raw),e=o(n.length),r=arguments.length,u=[],a=0;e>a;)u.push(String(n[a++])),a=n.length?{value:void 0,done:!0}:(t=r(n,e),this._i+=t.length,{value:t,done:!1})}))},function(t,n,e){"use strict";var r=e(0),i=e(73)(!1);r(r.P,"String",{codePointAt:function(t){return i(this,t)}})},function(t,n,e){"use strict";var r=e(0),i=e(6),o=e(75),u="".endsWith;r(r.P+r.F*e(77)("endsWith"),"String",{endsWith:function(t){var n=o(this,t,"endsWith"),e=arguments.length>1?arguments[1]:void 0,r=i(n.length),a=void 0===e?r:Math.min(i(e),r),c=String(t);return u?u.call(n,c,a):n.slice(a-c.length,a)===c}})},function(t,n,e){"use strict";var r=e(0),i=e(75);r(r.P+r.F*e(77)("includes"),"String",{includes:function(t){return!!~i(this,t,"includes").indexOf(t,arguments.length>1?arguments[1]:void 0)}})},function(t,n,e){var r=e(0);r(r.P,"String",{repeat:e(70)})},function(t,n,e){"use strict";var r=e(0),i=e(6),o=e(75),u="".startsWith;r(r.P+r.F*e(77)("startsWith"),"String",{startsWith:function(t){var n=o(this,t,"startsWith"),e=i(Math.min(arguments.length>1?arguments[1]:void 0,n.length)),r=String(t);return u?u.call(n,r,e):n.slice(e,e+r.length)===r}})},function(t,n,e){"use strict";e(12)("anchor",(function(t){return function(n){return t(this,"a","name",n)}}))},function(t,n,e){"use strict";e(12)("big",(function(t){return function(){return t(this,"big","","")}}))},function(t,n,e){"use strict";e(12)("blink",(function(t){return function(){return t(this,"blink","","")}}))},function(t,n,e){"use strict";e(12)("bold",(function(t){return function(){return t(this,"b","","")}}))},function(t,n,e){"use strict";e(12)("fixed",(function(t){return function(){return t(this,"tt","","")}}))},function(t,n,e){"use strict";e(12)("fontcolor",(function(t){return function(n){return t(this,"font","color",n)}}))},function(t,n,e){"use strict";e(12)("fontsize",(function(t){return function(n){return t(this,"font","size",n)}}))},function(t,n,e){"use strict";e(12)("italics",(function(t){return function(){return t(this,"i","","")}}))},function(t,n,e){"use strict";e(12)("link",(function(t){return function(n){return t(this,"a","href",n)}}))},function(t,n,e){"use strict";e(12)("small",(function(t){return function(){return t(this,"small","","")}}))},function(t,n,e){"use strict";e(12)("strike",(function(t){return function(){return t(this,"strike","","")}}))},function(t,n,e){"use strict";e(12)("sub",(function(t){return function(){return t(this,"sub","","")}}))},function(t,n,e){"use strict";e(12)("sup",(function(t){return function(){return t(this,"sup","","")}}))},function(t,n,e){var r=e(0);r(r.S,"Date",{now:function(){return(new Date).getTime()}})},function(t,n,e){"use strict";var r=e(0),i=e(10),o=e(28);r(r.P+r.F*e(2)((function(){return null!==new Date(NaN).toJSON()||1!==Date.prototype.toJSON.call({toISOString:function(){return 1}})})),"Date",{toJSON:function(t){var n=i(this),e=o(n);return"number"!=typeof e||isFinite(e)?n.toISOString():null}})},function(t,n,e){var r=e(0),i=e(213);r(r.P+r.F*(Date.prototype.toISOString!==i),"Date",{toISOString:i})},function(t,n,e){"use strict";var r=e(2),i=Date.prototype.getTime,o=Date.prototype.toISOString,u=function(t){return t>9?t:"0"+t};t.exports=r((function(){return"0385-07-25T07:06:39.999Z"!=o.call(new Date(-5e13-1))}))||!r((function(){o.call(new Date(NaN))}))?function(){if(!isFinite(i.call(this)))throw RangeError("Invalid time value");var t=this,n=t.getUTCFullYear(),e=t.getUTCMilliseconds(),r=n<0?"-":n>9999?"+":"";return r+("00000"+Math.abs(n)).slice(r?-6:-4)+"-"+u(t.getUTCMonth()+1)+"-"+u(t.getUTCDate())+"T"+u(t.getUTCHours())+":"+u(t.getUTCMinutes())+":"+u(t.getUTCSeconds())+"."+(e>99?e:"0"+u(e))+"Z"}:o},function(t,n,e){var r=Date.prototype,i=r.toString,o=r.getTime;new Date(NaN)+""!="Invalid Date"&&e(11)(r,"toString",(function(){var t=o.call(this);return t==t?i.call(this):"Invalid Date"}))},function(t,n,e){var r=e(5)("toPrimitive"),i=Date.prototype;r in i||e(14)(i,r,e(216))},function(t,n,e){"use strict";var r=e(3),i=e(28);t.exports=function(t){if("string"!==t&&"number"!==t&&"default"!==t)throw TypeError("Incorrect hint");return i(r(this),"number"!=t)}},function(t,n,e){var r=e(0);r(r.S,"Array",{isArray:e(53)})},function(t,n,e){"use strict";var r=e(19),i=e(0),o=e(10),u=e(111),a=e(78),c=e(6),s=e(79),f=e(80);i(i.S+i.F*!e(54)((function(t){Array.from(t)})),"Array",{from:function(t){var n,e,i,l,h=o(t),d="function"==typeof this?this:Array,p=arguments.length,v=p>1?arguments[1]:void 0,g=void 0!==v,y=0,m=f(h);if(g&&(v=r(v,p>2?arguments[2]:void 0,2)),null==m||d==Array&&a(m))for(e=new d(n=c(h.length));n>y;y++)s(e,y,g?v(h[y],y):h[y]);else for(l=m.call(h),e=new d;!(i=l.next()).done;y++)s(e,y,g?u(l,v,[i.value,y],!0):i.value);return e.length=y,e}})},function(t,n,e){"use strict";var r=e(0),i=e(79);r(r.S+r.F*e(2)((function(){function t(){}return!(Array.of.call(t)instanceof t)})),"Array",{of:function(){for(var t=0,n=arguments.length,e=new("function"==typeof this?this:Array)(n);n>t;)i(e,t,arguments[t++]);return e.length=n,e}})},function(t,n,e){"use strict";var r=e(0),i=e(15),o=[].join;r(r.P+r.F*(e(46)!=Object||!e(16)(o)),"Array",{join:function(t){return o.call(i(this),void 0===t?",":t)}})},function(t,n,e){"use strict";var r=e(0),i=e(66),o=e(25),u=e(34),a=e(6),c=[].slice;r(r.P+r.F*e(2)((function(){i&&c.call(i)})),"Array",{slice:function(t,n){var e=a(this.length),r=o(this);if(n=void 0===n?e:n,"Array"==r)return c.call(this,t,n);for(var i=u(t,e),s=u(n,e),f=a(s-i),l=new Array(f),h=0;h1&&(r=Math.min(r,o(arguments[1]))),r<0&&(r=e+r);r>=0;r--)if(r in n&&n[r]===t)return r||0;return-1}})},function(t,n,e){var r=e(0);r(r.P,"Array",{copyWithin:e(114)}),e(38)("copyWithin")},function(t,n,e){var r=e(0);r(r.P,"Array",{fill:e(81)}),e(38)("fill")},function(t,n,e){"use strict";var r=e(0),i=e(24)(5),o=!0;"find"in[]&&Array(1).find((function(){o=!1})),r(r.P+r.F*o,"Array",{find:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)("find")},function(t,n,e){"use strict";var r=e(0),i=e(24)(6),o="findIndex",u=!0;o in[]&&Array(1)[o]((function(){u=!1})),r(r.P+r.F*u,"Array",{findIndex:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)(o)},function(t,n,e){e(43)("Array")},function(t,n,e){var r=e(1),i=e(69),o=e(9).f,u=e(36).f,a=e(76),c=e(55),s=r.RegExp,f=s,l=s.prototype,h=/a/g,d=/a/g,p=new s(h)!==h;if(e(8)&&(!p||e(2)((function(){return d[e(5)("match")]=!1,s(h)!=h||s(d)==d||"/a/i"!=s(h,"i")})))){s=function(t,n){var e=this instanceof s,r=a(t),o=void 0===n;return!e&&r&&t.constructor===s&&o?t:i(p?new f(r&&!o?t.source:t,n):f((r=t instanceof s)?t.source:t,r&&o?c.call(t):n),e?this:l,s)};for(var v=function(t){t in s||o(s,t,{configurable:!0,get:function(){return f[t]},set:function(n){f[t]=n}})},g=u(f),y=0;g.length>y;)v(g[y++]);l.constructor=s,s.prototype=l,e(11)(r,"RegExp",s)}e(43)("RegExp")},function(t,n,e){"use strict";e(117);var r=e(3),i=e(55),o=e(8),u=/./.toString,a=function(t){e(11)(RegExp.prototype,"toString",t,!0)};e(2)((function(){return"/a/b"!=u.call({source:"a",flags:"b"})}))?a((function(){var t=r(this);return"/".concat(t.source,"/","flags"in t?t.flags:!o&&t instanceof RegExp?i.call(t):void 0)})):"toString"!=u.name&&a((function(){return u.call(this)}))},function(t,n,e){"use strict";var r=e(3),i=e(6),o=e(84),u=e(56);e(57)("match",1,(function(t,n,e,a){return[function(e){var r=t(this),i=null==e?void 0:e[n];return void 0!==i?i.call(e,r):new RegExp(e)[n](String(r))},function(t){var n=a(e,t,this);if(n.done)return n.value;var c=r(t),s=String(this);if(!c.global)return u(c,s);var f=c.unicode;c.lastIndex=0;for(var l,h=[],d=0;null!==(l=u(c,s));){var p=String(l[0]);h[d]=p,""===p&&(c.lastIndex=o(s,i(c.lastIndex),f)),d++}return 0===d?null:h}]}))},function(t,n,e){"use strict";var r=e(3),i=e(10),o=e(6),u=e(21),a=e(84),c=e(56),s=Math.max,f=Math.min,l=Math.floor,h=/\$([$&`']|\d\d?|<[^>]*>)/g,d=/\$([$&`']|\d\d?)/g;e(57)("replace",2,(function(t,n,e,p){return[function(r,i){var o=t(this),u=null==r?void 0:r[n];return void 0!==u?u.call(r,o,i):e.call(String(o),r,i)},function(t,n){var i=p(e,t,this,n);if(i.done)return i.value;var l=r(t),h=String(this),d="function"==typeof n;d||(n=String(n));var g=l.global;if(g){var y=l.unicode;l.lastIndex=0}for(var m=[];;){var b=c(l,h);if(null===b)break;if(m.push(b),!g)break;""===String(b[0])&&(l.lastIndex=a(h,o(l.lastIndex),y))}for(var S,w="",_=0,M=0;M=_&&(w+=h.slice(_,P)+N,_=P+x.length)}return w+h.slice(_)}];function v(t,n,r,o,u,a){var c=r+t.length,s=o.length,f=d;return void 0!==u&&(u=i(u),f=h),e.call(a,f,(function(e,i){var a;switch(i.charAt(0)){case"$":return"$";case"&":return t;case"`":return n.slice(0,r);case"'":return n.slice(c);case"<":a=u[i.slice(1,-1)];break;default:var f=+i;if(0===f)return e;if(f>s){var h=l(f/10);return 0===h?e:h<=s?void 0===o[h-1]?i.charAt(1):o[h-1]+i.charAt(1):e}a=o[f-1]}return void 0===a?"":a}))}}))},function(t,n,e){"use strict";var r=e(3),i=e(102),o=e(56);e(57)("search",1,(function(t,n,e,u){return[function(e){var r=t(this),i=null==e?void 0:e[n];return void 0!==i?i.call(e,r):new RegExp(e)[n](String(r))},function(t){var n=u(e,t,this);if(n.done)return n.value;var a=r(t),c=String(this),s=a.lastIndex;i(s,0)||(a.lastIndex=0);var f=o(a,c);return i(a.lastIndex,s)||(a.lastIndex=s),null===f?-1:f.index}]}))},function(t,n,e){"use strict";var r=e(76),i=e(3),o=e(49),u=e(84),a=e(6),c=e(56),s=e(83),f=e(2),l=Math.min,h=[].push,d=!f((function(){RegExp(4294967295,"y")}));e(57)("split",2,(function(t,n,e,f){var p;return p="c"=="abbc".split(/(b)*/)[1]||4!="test".split(/(?:)/,-1).length||2!="ab".split(/(?:ab)*/).length||4!=".".split(/(.?)(.?)/).length||".".split(/()()/).length>1||"".split(/.?/).length?function(t,n){var i=String(this);if(void 0===t&&0===n)return[];if(!r(t))return e.call(i,t,n);for(var o,u,a,c=[],f=(t.ignoreCase?"i":"")+(t.multiline?"m":"")+(t.unicode?"u":"")+(t.sticky?"y":""),l=0,d=void 0===n?4294967295:n>>>0,p=new RegExp(t.source,f+"g");(o=s.call(p,i))&&!((u=p.lastIndex)>l&&(c.push(i.slice(l,o.index)),o.length>1&&o.index=d));)p.lastIndex===o.index&&p.lastIndex++;return l===i.length?!a&&p.test("")||c.push(""):c.push(i.slice(l)),c.length>d?c.slice(0,d):c}:"0".split(void 0,0).length?function(t,n){return void 0===t&&0===n?[]:e.call(this,t,n)}:e,[function(e,r){var i=t(this),o=null==e?void 0:e[n];return void 0!==o?o.call(e,i,r):p.call(String(i),e,r)},function(t,n){var r=f(p,t,this,n,p!==e);if(r.done)return r.value;var s=i(t),h=String(this),v=o(s,RegExp),g=s.unicode,y=(s.ignoreCase?"i":"")+(s.multiline?"m":"")+(s.unicode?"u":"")+(d?"y":"g"),m=new v(d?s:"^(?:"+s.source+")",y),b=void 0===n?4294967295:n>>>0;if(0===b)return[];if(0===h.length)return null===c(m,h)?[h]:[];for(var S=0,w=0,_=[];w0?arguments[0]:void 0)}}),{get:function(t){var n=r.getEntry(i(this,"Map"),t);return n&&n.v},set:function(t,n){return r.def(i(this,"Map"),0===t?0:t,n)}},r,!0)},function(t,n,e){"use strict";var r=e(121),i=e(39);t.exports=e(60)("Set",(function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}}),{add:function(t){return r.def(i(this,"Set"),t=0===t?0:t,t)}},r)},function(t,n,e){"use strict";var r,i=e(1),o=e(24)(0),u=e(11),a=e(29),c=e(101),s=e(122),f=e(4),l=e(39),h=e(39),d=!i.ActiveXObject&&"ActiveXObject"in i,p=a.getWeak,v=Object.isExtensible,g=s.ufstore,y=function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}},m={get:function(t){if(f(t)){var n=p(t);return!0===n?g(l(this,"WeakMap")).get(t):n?n[this._i]:void 0}},set:function(t,n){return s.def(l(this,"WeakMap"),t,n)}},b=t.exports=e(60)("WeakMap",y,m,s,!0,!0);h&&d&&(c((r=s.getConstructor(y,"WeakMap")).prototype,m),a.NEED=!0,o(["delete","has","get","set"],(function(t){var n=b.prototype,e=n[t];u(n,t,(function(n,i){if(f(n)&&!v(n)){this._f||(this._f=new r);var o=this._f[t](n,i);return"set"==t?this:o}return e.call(this,n,i)}))})))},function(t,n,e){"use strict";var r=e(122),i=e(39);e(60)("WeakSet",(function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}}),{add:function(t){return r.def(i(this,"WeakSet"),t,!0)}},r,!1,!0)},function(t,n,e){"use strict";var r=e(0),i=e(61),o=e(86),u=e(3),a=e(34),c=e(6),s=e(4),f=e(1).ArrayBuffer,l=e(49),h=o.ArrayBuffer,d=o.DataView,p=i.ABV&&f.isView,v=h.prototype.slice,g=i.VIEW;r(r.G+r.W+r.F*(f!==h),{ArrayBuffer:h}),r(r.S+r.F*!i.CONSTR,"ArrayBuffer",{isView:function(t){return p&&p(t)||s(t)&&g in t}}),r(r.P+r.U+r.F*e(2)((function(){return!new h(2).slice(1,void 0).byteLength})),"ArrayBuffer",{slice:function(t,n){if(void 0!==v&&void 0===n)return v.call(u(this),t);for(var e=u(this).byteLength,r=a(t,e),i=a(void 0===n?e:n,e),o=new(l(this,h))(c(i-r)),s=new d(this),f=new d(o),p=0;r=n.length)return{value:void 0,done:!0}}while(!((t=n[this._i++])in this._t));return{value:t,done:!1}})),r(r.S,"Reflect",{enumerate:function(t){return new o(t)}})},function(t,n,e){var r=e(22),i=e(37),o=e(13),u=e(0),a=e(4),c=e(3);u(u.S,"Reflect",{get:function t(n,e){var u,s,f=arguments.length<3?n:arguments[2];return c(n)===f?n[e]:(u=r.f(n,e))?o(u,"value")?u.value:void 0!==u.get?u.get.call(f):void 0:a(s=i(n))?t(s,e,f):void 0}})},function(t,n,e){var r=e(22),i=e(0),o=e(3);i(i.S,"Reflect",{getOwnPropertyDescriptor:function(t,n){return r.f(o(t),n)}})},function(t,n,e){var r=e(0),i=e(37),o=e(3);r(r.S,"Reflect",{getPrototypeOf:function(t){return i(o(t))}})},function(t,n,e){var r=e(0);r(r.S,"Reflect",{has:function(t,n){return n in t}})},function(t,n,e){var r=e(0),i=e(3),o=Object.isExtensible;r(r.S,"Reflect",{isExtensible:function(t){return i(t),!o||o(t)}})},function(t,n,e){var r=e(0);r(r.S,"Reflect",{ownKeys:e(124)})},function(t,n,e){var r=e(0),i=e(3),o=Object.preventExtensions;r(r.S,"Reflect",{preventExtensions:function(t){i(t);try{return o&&o(t),!0}catch(t){return!1}}})},function(t,n,e){var r=e(9),i=e(22),o=e(37),u=e(13),a=e(0),c=e(30),s=e(3),f=e(4);a(a.S,"Reflect",{set:function t(n,e,a){var l,h,d=arguments.length<4?n:arguments[3],p=i.f(s(n),e);if(!p){if(f(h=o(n)))return t(h,e,a,d);p=c(0)}if(u(p,"value")){if(!1===p.writable||!f(d))return!1;if(l=i.f(d,e)){if(l.get||l.set||!1===l.writable)return!1;l.value=a,r.f(d,e,l)}else r.f(d,e,c(0,a));return!0}return void 0!==p.set&&(p.set.call(d,a),!0)}})},function(t,n,e){var r=e(0),i=e(67);i&&r(r.S,"Reflect",{setPrototypeOf:function(t,n){i.check(t,n);try{return i.set(t,n),!0}catch(t){return!1}}})},function(t,n,e){e(276),t.exports=e(7).Array.includes},function(t,n,e){"use strict";var r=e(0),i=e(51)(!0);r(r.P,"Array",{includes:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)("includes")},function(t,n,e){e(278),t.exports=e(7).Array.flatMap},function(t,n,e){"use strict";var r=e(0),i=e(279),o=e(10),u=e(6),a=e(20),c=e(112);r(r.P,"Array",{flatMap:function(t){var n,e,r=o(this);return a(t),n=u(r.length),e=c(r,0),i(e,r,r,n,0,1,t,arguments[1]),e}}),e(38)("flatMap")},function(t,n,e){"use strict";var r=e(53),i=e(4),o=e(6),u=e(19),a=e(5)("isConcatSpreadable");t.exports=function t(n,e,c,s,f,l,h,d){for(var p,v,g=f,y=0,m=!!h&&u(h,d,3);y0)g=t(n,e,p,o(p.length),g,l-1)-1;else{if(g>=9007199254740991)throw TypeError();n[g]=p}g++}y++}return g}},function(t,n,e){e(281),t.exports=e(7).String.padStart},function(t,n,e){"use strict";var r=e(0),i=e(125),o=e(59),u=/Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(o);r(r.P+r.F*u,"String",{padStart:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!0)}})},function(t,n,e){e(283),t.exports=e(7).String.padEnd},function(t,n,e){"use strict";var r=e(0),i=e(125),o=e(59),u=/Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(o);r(r.P+r.F*u,"String",{padEnd:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!1)}})},function(t,n,e){e(285),t.exports=e(7).String.trimLeft},function(t,n,e){"use strict";e(41)("trimLeft",(function(t){return function(){return t(this,1)}}),"trimStart")},function(t,n,e){e(287),t.exports=e(7).String.trimRight},function(t,n,e){"use strict";e(41)("trimRight",(function(t){return function(){return t(this,2)}}),"trimEnd")},function(t,n,e){e(289),t.exports=e(63).f("asyncIterator")},function(t,n,e){e(97)("asyncIterator")},function(t,n,e){e(291),t.exports=e(7).Object.getOwnPropertyDescriptors},function(t,n,e){var r=e(0),i=e(124),o=e(15),u=e(22),a=e(79);r(r.S,"Object",{getOwnPropertyDescriptors:function(t){for(var n,e,r=o(t),c=u.f,s=i(r),f={},l=0;s.length>l;)void 0!==(e=c(r,n=s[l++]))&&a(f,n,e);return f}})},function(t,n,e){e(293),t.exports=e(7).Object.values},function(t,n,e){var r=e(0),i=e(126)(!1);r(r.S,"Object",{values:function(t){return i(t)}})},function(t,n,e){e(295),t.exports=e(7).Object.entries},function(t,n,e){var r=e(0),i=e(126)(!0);r(r.S,"Object",{entries:function(t){return i(t)}})},function(t,n,e){"use strict";e(118),e(297),t.exports=e(7).Promise.finally},function(t,n,e){"use strict";var r=e(0),i=e(7),o=e(1),u=e(49),a=e(120);r(r.P+r.R,"Promise",{finally:function(t){var n=u(this,i.Promise||o.Promise),e="function"==typeof t;return this.then(e?function(e){return a(n,t()).then((function(){return e}))}:t,e?function(e){return a(n,t()).then((function(){throw e}))}:t)}})},function(t,n,e){e(299),e(300),e(301),t.exports=e(7)},function(t,n,e){var r=e(1),i=e(0),o=e(59),u=[].slice,a=/MSIE .\./.test(o),c=function(t){return function(n,e){var r=arguments.length>2,i=!!r&&u.call(arguments,2);return t(r?function(){("function"==typeof n?n:Function(n)).apply(this,i)}:n,e)}};i(i.G+i.B+i.F*a,{setTimeout:c(r.setTimeout),setInterval:c(r.setInterval)})},function(t,n,e){var r=e(0),i=e(85);r(r.G+r.B,{setImmediate:i.set,clearImmediate:i.clear})},function(t,n,e){for(var r=e(82),i=e(33),o=e(11),u=e(1),a=e(14),c=e(42),s=e(5),f=s("iterator"),l=s("toStringTag"),h=c.Array,d={CSSRuleList:!0,CSSStyleDeclaration:!1,CSSValueList:!1,ClientRectList:!1,DOMRectList:!1,DOMStringList:!1,DOMTokenList:!0,DataTransferItemList:!1,FileList:!1,HTMLAllCollection:!1,HTMLCollection:!1,HTMLFormElement:!1,HTMLSelectElement:!1,MediaList:!0,MimeTypeArray:!1,NamedNodeMap:!1,NodeList:!0,PaintRequestList:!1,Plugin:!1,PluginArray:!1,SVGLengthList:!1,SVGNumberList:!1,SVGPathSegList:!1,SVGPointList:!1,SVGStringList:!1,SVGTransformList:!1,SourceBufferList:!1,StyleSheetList:!0,TextTrackCueList:!1,TextTrackList:!1,TouchList:!1},p=i(d),v=0;v=0;--o){var u=this.tryEntries[o],a=u.completion;if("root"===u.tryLoc)return i("end");if(u.tryLoc<=this.prev){var c=r.call(u,"catchLoc"),s=r.call(u,"finallyLoc");if(c&&s){if(this.prev=0;--e){var i=this.tryEntries[e];if(i.tryLoc<=this.prev&&r.call(i,"finallyLoc")&&this.prev=0;--n){var e=this.tryEntries[n];if(e.finallyLoc===t)return this.complete(e.completion,e.afterLoc),O(e),p}},catch:function(t){for(var n=this.tryEntries.length-1;n>=0;--n){var e=this.tryEntries[n];if(e.tryLoc===t){var r=e.completion;if("throw"===r.type){var i=r.arg;O(e)}return i}}throw new Error("illegal catch attempt")},delegateYield:function(t,e,r){return this.delegate={iterator:A(t),resultName:e,nextLoc:r},"next"===this.method&&(this.arg=n),p}},t}(t.exports);try{regeneratorRuntime=r}catch(t){Function("r","regeneratorRuntime = r")(r)}},function(t,n,e){e(304),t.exports=e(127).global},function(t,n,e){var r=e(305);r(r.G,{global:e(87)})},function(t,n,e){var r=e(87),i=e(127),o=e(306),u=e(308),a=e(315),c=function(t,n,e){var s,f,l,h=t&c.F,d=t&c.G,p=t&c.S,v=t&c.P,g=t&c.B,y=t&c.W,m=d?i:i[n]||(i[n]={}),b=m.prototype,S=d?r:p?r[n]:(r[n]||{}).prototype;for(s in d&&(e=n),e)(f=!h&&S&&void 0!==S[s])&&a(m,s)||(l=f?S[s]:e[s],m[s]=d&&"function"!=typeof S[s]?e[s]:g&&f?o(l,r):y&&S[s]==l?function(t){var n=function(n,e,r){if(this instanceof t){switch(arguments.length){case 0:return new t;case 1:return new t(n);case 2:return new t(n,e)}return new t(n,e,r)}return t.apply(this,arguments)};return n.prototype=t.prototype,n}(l):v&&"function"==typeof l?o(Function.call,l):l,v&&((m.virtual||(m.virtual={}))[s]=l,t&c.R&&b&&!b[s]&&u(b,s,l)))};c.F=1,c.G=2,c.S=4,c.P=8,c.B=16,c.W=32,c.U=64,c.R=128,t.exports=c},function(t,n,e){var r=e(307);t.exports=function(t,n,e){if(r(t),void 0===n)return t;switch(e){case 1:return function(e){return t.call(n,e)};case 2:return function(e,r){return t.call(n,e,r)};case 3:return function(e,r,i){return t.call(n,e,r,i)}}return function(){return t.apply(n,arguments)}}},function(t,n){t.exports=function(t){if("function"!=typeof t)throw TypeError(t+" is not a function!");return t}},function(t,n,e){var r=e(309),i=e(314);t.exports=e(89)?function(t,n,e){return r.f(t,n,i(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){var r=e(310),i=e(311),o=e(313),u=Object.defineProperty;n.f=e(89)?Object.defineProperty:function(t,n,e){if(r(t),n=o(n,!0),r(e),i)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n,e){var r=e(88);t.exports=function(t){if(!r(t))throw TypeError(t+" is not an object!");return t}},function(t,n,e){t.exports=!e(89)&&!e(128)((function(){return 7!=Object.defineProperty(e(312)("div"),"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(88),i=e(87).document,o=r(i)&&r(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,e){var r=e(88);t.exports=function(t,n){if(!r(t))return t;var e,i;if(n&&"function"==typeof(e=t.toString)&&!r(i=e.call(t)))return i;if("function"==typeof(e=t.valueOf)&&!r(i=e.call(t)))return i;if(!n&&"function"==typeof(e=t.toString)&&!r(i=e.call(t)))return i;throw TypeError("Can't convert object to primitive value")}},function(t,n){t.exports=function(t,n){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:n}}},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}}])})); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/bowser/es5.js b/amplify/functions/deleteDocument/node_modules/bowser/es5.js new file mode 100644 index 0000000..bb8ec3d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/es5.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.bowser=t():e.bowser=t()}(this,(function(){return function(e){var t={};function r(n){if(t[n])return t[n].exports;var i=t[n]={i:n,l:!1,exports:{}};return e[n].call(i.exports,i,i.exports,r),i.l=!0,i.exports}return r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var i in e)r.d(n,i,function(t){return e[t]}.bind(null,i));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=90)}({17:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n=r(18),i=function(){function e(){}return e.getFirstMatch=function(e,t){var r=t.match(e);return r&&r.length>0&&r[1]||""},e.getSecondMatch=function(e,t){var r=t.match(e);return r&&r.length>1&&r[2]||""},e.matchAndReturnConst=function(e,t,r){if(e.test(t))return r},e.getWindowsVersionName=function(e){switch(e){case"NT":return"NT";case"XP":return"XP";case"NT 5.0":return"2000";case"NT 5.1":return"XP";case"NT 5.2":return"2003";case"NT 6.0":return"Vista";case"NT 6.1":return"7";case"NT 6.2":return"8";case"NT 6.3":return"8.1";case"NT 10.0":return"10";default:return}},e.getMacOSVersionName=function(e){var t=e.split(".").splice(0,2).map((function(e){return parseInt(e,10)||0}));if(t.push(0),10===t[0])switch(t[1]){case 5:return"Leopard";case 6:return"Snow Leopard";case 7:return"Lion";case 8:return"Mountain Lion";case 9:return"Mavericks";case 10:return"Yosemite";case 11:return"El Capitan";case 12:return"Sierra";case 13:return"High Sierra";case 14:return"Mojave";case 15:return"Catalina";default:return}},e.getAndroidVersionName=function(e){var t=e.split(".").splice(0,2).map((function(e){return parseInt(e,10)||0}));if(t.push(0),!(1===t[0]&&t[1]<5))return 1===t[0]&&t[1]<6?"Cupcake":1===t[0]&&t[1]>=6?"Donut":2===t[0]&&t[1]<2?"Eclair":2===t[0]&&2===t[1]?"Froyo":2===t[0]&&t[1]>2?"Gingerbread":3===t[0]?"Honeycomb":4===t[0]&&t[1]<1?"Ice Cream Sandwich":4===t[0]&&t[1]<4?"Jelly Bean":4===t[0]&&t[1]>=4?"KitKat":5===t[0]?"Lollipop":6===t[0]?"Marshmallow":7===t[0]?"Nougat":8===t[0]?"Oreo":9===t[0]?"Pie":void 0},e.getVersionPrecision=function(e){return e.split(".").length},e.compareVersions=function(t,r,n){void 0===n&&(n=!1);var i=e.getVersionPrecision(t),s=e.getVersionPrecision(r),a=Math.max(i,s),o=0,u=e.map([t,r],(function(t){var r=a-e.getVersionPrecision(t),n=t+new Array(r+1).join(".0");return e.map(n.split("."),(function(e){return new Array(20-e.length).join("0")+e})).reverse()}));for(n&&(o=a-Math.min(i,s)),a-=1;a>=o;){if(u[0][a]>u[1][a])return 1;if(u[0][a]===u[1][a]){if(a===o)return 0;a-=1}else if(u[0][a]1?i-1:0),a=1;a0){var a=Object.keys(r),u=o.default.find(a,(function(e){return t.isOS(e)}));if(u){var d=this.satisfies(r[u]);if(void 0!==d)return d}var c=o.default.find(a,(function(e){return t.isPlatform(e)}));if(c){var f=this.satisfies(r[c]);if(void 0!==f)return f}}if(s>0){var l=Object.keys(i),h=o.default.find(l,(function(e){return t.isBrowser(e,!0)}));if(void 0!==h)return this.compareVersion(i[h])}},t.isBrowser=function(e,t){void 0===t&&(t=!1);var r=this.getBrowserName().toLowerCase(),n=e.toLowerCase(),i=o.default.getBrowserTypeByAlias(n);return t&&i&&(n=i.toLowerCase()),n===r},t.compareVersion=function(e){var t=[0],r=e,n=!1,i=this.getBrowserVersion();if("string"==typeof i)return">"===e[0]||"<"===e[0]?(r=e.substr(1),"="===e[1]?(n=!0,r=e.substr(2)):t=[],">"===e[0]?t.push(1):t.push(-1)):"="===e[0]?r=e.substr(1):"~"===e[0]&&(n=!0,r=e.substr(1)),t.indexOf(o.default.compareVersions(i,r,n))>-1},t.isOS=function(e){return this.getOSName(!0)===String(e).toLowerCase()},t.isPlatform=function(e){return this.getPlatformType(!0)===String(e).toLowerCase()},t.isEngine=function(e){return this.getEngineName(!0)===String(e).toLowerCase()},t.is=function(e,t){return void 0===t&&(t=!1),this.isBrowser(e,t)||this.isOS(e)||this.isPlatform(e)},t.some=function(e){var t=this;return void 0===e&&(e=[]),e.some((function(e){return t.is(e)}))},e}();t.default=d,e.exports=t.default},92:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n};var s=/version\/(\d+(\.?_?\d+)+)/i,a=[{test:[/googlebot/i],describe:function(e){var t={name:"Googlebot"},r=i.default.getFirstMatch(/googlebot\/(\d+(\.\d+))/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/opera/i],describe:function(e){var t={name:"Opera"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/opr\/|opios/i],describe:function(e){var t={name:"Opera"},r=i.default.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/SamsungBrowser/i],describe:function(e){var t={name:"Samsung Internet for Android"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/Whale/i],describe:function(e){var t={name:"NAVER Whale Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/MZBrowser/i],describe:function(e){var t={name:"MZ Browser"},r=i.default.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/focus/i],describe:function(e){var t={name:"Focus"},r=i.default.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/swing/i],describe:function(e){var t={name:"Swing"},r=i.default.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/coast/i],describe:function(e){var t={name:"Opera Coast"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/opt\/\d+(?:.?_?\d+)+/i],describe:function(e){var t={name:"Opera Touch"},r=i.default.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/yabrowser/i],describe:function(e){var t={name:"Yandex Browser"},r=i.default.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/ucbrowser/i],describe:function(e){var t={name:"UC Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/Maxthon|mxios/i],describe:function(e){var t={name:"Maxthon"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/epiphany/i],describe:function(e){var t={name:"Epiphany"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/puffin/i],describe:function(e){var t={name:"Puffin"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/sleipnir/i],describe:function(e){var t={name:"Sleipnir"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/k-meleon/i],describe:function(e){var t={name:"K-Meleon"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/micromessenger/i],describe:function(e){var t={name:"WeChat"},r=i.default.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/qqbrowser/i],describe:function(e){var t={name:/qqbrowserlite/i.test(e)?"QQ Browser Lite":"QQ Browser"},r=i.default.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/msie|trident/i],describe:function(e){var t={name:"Internet Explorer"},r=i.default.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/\sedg\//i],describe:function(e){var t={name:"Microsoft Edge"},r=i.default.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/edg([ea]|ios)/i],describe:function(e){var t={name:"Microsoft Edge"},r=i.default.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/vivaldi/i],describe:function(e){var t={name:"Vivaldi"},r=i.default.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/seamonkey/i],describe:function(e){var t={name:"SeaMonkey"},r=i.default.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/sailfish/i],describe:function(e){var t={name:"Sailfish"},r=i.default.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i,e);return r&&(t.version=r),t}},{test:[/silk/i],describe:function(e){var t={name:"Amazon Silk"},r=i.default.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/phantom/i],describe:function(e){var t={name:"PhantomJS"},r=i.default.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/slimerjs/i],describe:function(e){var t={name:"SlimerJS"},r=i.default.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(e){var t={name:"BlackBerry"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/(web|hpw)[o0]s/i],describe:function(e){var t={name:"WebOS Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/bada/i],describe:function(e){var t={name:"Bada"},r=i.default.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/tizen/i],describe:function(e){var t={name:"Tizen"},r=i.default.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/qupzilla/i],describe:function(e){var t={name:"QupZilla"},r=i.default.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/firefox|iceweasel|fxios/i],describe:function(e){var t={name:"Firefox"},r=i.default.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/electron/i],describe:function(e){var t={name:"Electron"},r=i.default.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/MiuiBrowser/i],describe:function(e){var t={name:"Miui"},r=i.default.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/chromium/i],describe:function(e){var t={name:"Chromium"},r=i.default.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/chrome|crios|crmo/i],describe:function(e){var t={name:"Chrome"},r=i.default.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/GSA/i],describe:function(e){var t={name:"Google Search"},r=i.default.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){var t=!e.test(/like android/i),r=e.test(/android/i);return t&&r},describe:function(e){var t={name:"Android Browser"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/playstation 4/i],describe:function(e){var t={name:"PlayStation 4"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/safari|applewebkit/i],describe:function(e){var t={name:"Safari"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/.*/i],describe:function(e){var t=-1!==e.search("\\(")?/^(.*)\/(.*)[ \t]\((.*)/:/^(.*)\/(.*) /;return{name:i.default.getFirstMatch(t,e),version:i.default.getSecondMatch(t,e)}}}];t.default=a,e.exports=t.default},93:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:[/Roku\/DVP/],describe:function(e){var t=i.default.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i,e);return{name:s.OS_MAP.Roku,version:t}}},{test:[/windows phone/i],describe:function(e){var t=i.default.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.WindowsPhone,version:t}}},{test:[/windows /i],describe:function(e){var t=i.default.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i,e),r=i.default.getWindowsVersionName(t);return{name:s.OS_MAP.Windows,version:t,versionName:r}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(e){var t={name:s.OS_MAP.iOS},r=i.default.getSecondMatch(/(Version\/)(\d[\d.]+)/,e);return r&&(t.version=r),t}},{test:[/macintosh/i],describe:function(e){var t=i.default.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i,e).replace(/[_\s]/g,"."),r=i.default.getMacOSVersionName(t),n={name:s.OS_MAP.MacOS,version:t};return r&&(n.versionName=r),n}},{test:[/(ipod|iphone|ipad)/i],describe:function(e){var t=i.default.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i,e).replace(/[_\s]/g,".");return{name:s.OS_MAP.iOS,version:t}}},{test:function(e){var t=!e.test(/like android/i),r=e.test(/android/i);return t&&r},describe:function(e){var t=i.default.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i,e),r=i.default.getAndroidVersionName(t),n={name:s.OS_MAP.Android,version:t};return r&&(n.versionName=r),n}},{test:[/(web|hpw)[o0]s/i],describe:function(e){var t=i.default.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i,e),r={name:s.OS_MAP.WebOS};return t&&t.length&&(r.version=t),r}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(e){var t=i.default.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i,e)||i.default.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i,e)||i.default.getFirstMatch(/\bbb(\d+)/i,e);return{name:s.OS_MAP.BlackBerry,version:t}}},{test:[/bada/i],describe:function(e){var t=i.default.getFirstMatch(/bada\/(\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.Bada,version:t}}},{test:[/tizen/i],describe:function(e){var t=i.default.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.Tizen,version:t}}},{test:[/linux/i],describe:function(){return{name:s.OS_MAP.Linux}}},{test:[/CrOS/],describe:function(){return{name:s.OS_MAP.ChromeOS}}},{test:[/PlayStation 4/],describe:function(e){var t=i.default.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.PlayStation4,version:t}}}];t.default=a,e.exports=t.default},94:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:[/googlebot/i],describe:function(){return{type:"bot",vendor:"Google"}}},{test:[/huawei/i],describe:function(e){var t=i.default.getFirstMatch(/(can-l01)/i,e)&&"Nova",r={type:s.PLATFORMS_MAP.mobile,vendor:"Huawei"};return t&&(r.model=t),r}},{test:[/nexus\s*(?:7|8|9|10).*/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Nexus"}}},{test:[/ipad/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/kftt build/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Amazon",model:"Kindle Fire HD 7"}}},{test:[/silk/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Amazon"}}},{test:[/tablet(?! pc)/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet}}},{test:function(e){var t=e.test(/ipod|iphone/i),r=e.test(/like (ipod|iphone)/i);return t&&!r},describe:function(e){var t=i.default.getFirstMatch(/(ipod|iphone)/i,e);return{type:s.PLATFORMS_MAP.mobile,vendor:"Apple",model:t}}},{test:[/nexus\s*[0-6].*/i,/galaxy nexus/i],describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"Nexus"}}},{test:[/[^-]mobi/i],describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"blackberry"===e.getBrowserName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"BlackBerry"}}},{test:function(e){return"bada"===e.getBrowserName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"windows phone"===e.getBrowserName()},describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"Microsoft"}}},{test:function(e){var t=Number(String(e.getOSVersion()).split(".")[0]);return"android"===e.getOSName(!0)&&t>=3},describe:function(){return{type:s.PLATFORMS_MAP.tablet}}},{test:function(e){return"android"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"macos"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop,vendor:"Apple"}}},{test:function(e){return"windows"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop}}},{test:function(e){return"linux"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop}}},{test:function(e){return"playstation 4"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.tv}}},{test:function(e){return"roku"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.tv}}}];t.default=a,e.exports=t.default},95:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:function(e){return"microsoft edge"===e.getBrowserName(!0)},describe:function(e){if(/\sedg\//i.test(e))return{name:s.ENGINE_MAP.Blink};var t=i.default.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i,e);return{name:s.ENGINE_MAP.EdgeHTML,version:t}}},{test:[/trident/i],describe:function(e){var t={name:s.ENGINE_MAP.Trident},r=i.default.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){return e.test(/presto/i)},describe:function(e){var t={name:s.ENGINE_MAP.Presto},r=i.default.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){var t=e.test(/gecko/i),r=e.test(/like gecko/i);return t&&!r},describe:function(e){var t={name:s.ENGINE_MAP.Gecko},r=i.default.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/(apple)?webkit\/537\.36/i],describe:function(){return{name:s.ENGINE_MAP.Blink}}},{test:[/(apple)?webkit/i],describe:function(e){var t={name:s.ENGINE_MAP.WebKit},r=i.default.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}}];t.default=a,e.exports=t.default}})})); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/bowser/index.d.ts b/amplify/functions/deleteDocument/node_modules/bowser/index.d.ts new file mode 100644 index 0000000..d95656a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/index.d.ts @@ -0,0 +1,250 @@ +// Type definitions for Bowser v2 +// Project: https://github.com/lancedikson/bowser +// Definitions by: Alexander P. Cerutti , + +export = Bowser; +export as namespace Bowser; + +declare namespace Bowser { + /** + * Creates a Parser instance + * @param {string} UA - User agent string + * @param {boolean} skipParsing + */ + + function getParser(UA: string, skipParsing?: boolean): Parser.Parser; + + /** + * Creates a Parser instance and runs Parser.getResult immediately + * @param UA - User agent string + * @returns {Parser.ParsedResult} + */ + + function parse(UA: string): Parser.ParsedResult; + + /** + * Constants exposed via bowser getters + */ + const BROWSER_MAP: Record; + const ENGINE_MAP: Record; + const OS_MAP: Record; + const PLATFORMS_MAP: Record; + + namespace Parser { + interface Parser { + constructor(UA: string, skipParsing?: boolean): Parser.Parser; + + /** + * Get parsed browser object + * @return {BrowserDetails} Browser's details + */ + + getBrowser(): BrowserDetails; + + /** + * Get browser's name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} Browser's name or an empty string + */ + + getBrowserName(toLowerCase?: boolean): string; + + /** + * Get browser's version + * @return {String} version of browser + */ + + getBrowserVersion(): string; + + /** + * Get OS + * @return {OSDetails} - OS Details + * + * @example + * this.getOS(); // { + * // name: 'macOS', + * // version: '10.11.12', + * // } + */ + + getOS(): OSDetails; + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + + getOSName(toLowerCase?: boolean): string; + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + + getOSVersion(): string; + + /** + * Get parsed platform + * @returns {PlatformDetails} + */ + + getPlatform(): PlatformDetails; + + /** + * Get platform name + * @param {boolean} toLowerCase + */ + + getPlatformType(toLowerCase?: boolean): string; + + /** + * Get parsed engine + * @returns {EngineDetails} + */ + + getEngine(): EngineDetails; + + /** + * Get parsed engine's name + * @returns {String} Engine's name or an empty string + */ + + getEngineName(): string; + + /** + * Get parsed result + * @return {ParsedResult} + */ + + getResult(): ParsedResult; + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + */ + + getUA(): string; + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @returns {Boolean} + */ + + is(anything: any): boolean; + + /** + * Parse full information about the browser + * @returns {Parser.Parser} + */ + + parse(): Parser.Parser; + + /** + * Get parsed browser object + * @returns {BrowserDetails} + */ + + parseBrowser(): BrowserDetails; + + /** + * Get parsed engine + * @returns {EngineDetails} + */ + + parseEngine(): EngineDetails; + + /** + * Parse OS and save it to this.parsedResult.os + * @returns {OSDetails} + */ + + parseOS(): OSDetails; + + /** + * Get parsed platform + * @returns {PlatformDetails} + */ + + parsePlatform(): PlatformDetails; + + /** + * Check if parsed browser matches certain conditions + * + * @param {checkTree} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = new Bowser(UA); + * if (browser.check({chrome: '>118.01.1322' })) + * // or with os + * if (browser.check({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.check({desktop: { chrome: '>118.01.1322' } })) + */ + + satisfies(checkTree: checkTree): boolean | undefined; + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + + + isBrowser(browserName: string, includingAlias?: boolean): boolean; + + /** + * Check if any of the given values satifies `.is(anything)` + * @param {string[]} anythings + * @returns {boolean} true if at least one condition is satisfied, false otherwise. + */ + + some(anythings: string[]): boolean | undefined; + + /** + * Test a UA string for a regexp + * @param regex + * @returns {boolean} true if the regex matches the UA, false otherwise. + */ + + test(regex: RegExp): boolean; + } + + interface ParsedResult { + browser: BrowserDetails; + os: OSDetails; + platform: PlatformDetails; + engine: EngineDetails; + } + + interface Details { + name?: string; + version?: string; + } + + interface OSDetails extends Details { + versionName?: string; + } + + interface PlatformDetails { + type?: string; + vendor?: string; + model?: string; + } + + type BrowserDetails = Details; + type EngineDetails = Details; + + interface checkTree { + [key: string]: any; + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/bowser/package.json b/amplify/functions/deleteDocument/node_modules/bowser/package.json new file mode 100644 index 0000000..3fb7c83 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/package.json @@ -0,0 +1,83 @@ +{ + "name": "bowser", + "version": "2.11.0", + "description": "Lightweight browser detector", + "keywords": [ + "browser", + "useragent", + "user-agent", + "parser", + "ua", + "detection", + "ender", + "sniff" + ], + "homepage": "https://github.com/lancedikson/bowser", + "author": "Dustin Diaz (http://dustindiaz.com)", + "contributors": [ + { + "name": "Denis Demchenko", + "url": "http://twitter.com/lancedikson" + } + ], + "main": "es5.js", + "browser": "es5.js", + "module": "src/bowser.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git+https://github.com/lancedikson/bowser.git" + }, + "devDependencies": { + "@babel/cli": "^7.11.6", + "@babel/core": "^7.8.0", + "@babel/polyfill": "^7.8.3", + "@babel/preset-env": "^7.8.2", + "@babel/register": "^7.8.3", + "ava": "^3.0.0", + "babel-eslint": "^10.0.3", + "babel-loader": "^8.0.6", + "babel-plugin-add-module-exports": "^1.0.2", + "babel-plugin-istanbul": "^6.0.0", + "compression-webpack-plugin": "^4.0.0", + "coveralls": "^3.0.6", + "docdash": "^1.1.1", + "eslint": "^6.5.1", + "eslint-config-airbnb-base": "^13.2.0", + "eslint-plugin-ava": "^10.0.0", + "eslint-plugin-import": "^2.18.2", + "gh-pages": "^3.0.0", + "jsdoc": "^3.6.3", + "nyc": "^15.0.0", + "sinon": "^9.0.0", + "testem": "^3.0.0", + "webpack": "^4.41.0", + "webpack-bundle-analyzer": "^3.5.2", + "webpack-cli": "^3.3.9", + "yamljs": "^0.3.0" + }, + "ava": { + "require": [ + "@babel/register" + ] + }, + "bugs": { + "url": "https://github.com/lancedikson/bowser/issues" + }, + "directories": { + "test": "test" + }, + "scripts": { + "build": "webpack --config webpack.config.js", + "generate-and-deploy-docs": "npm run generate-docs && gh-pages --dist docs --dest docs", + "watch": "webpack --watch --config webpack.config.js", + "prepublishOnly": "npm run build", + "lint": "eslint ./src", + "testem": "testem", + "test": "nyc --reporter=html --reporter=text ava", + "test:watch": "ava --watch", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "generate-docs": "jsdoc -c jsdoc.json" + }, + "license": "MIT" +} diff --git a/amplify/functions/deleteDocument/node_modules/bowser/src/bowser.js b/amplify/functions/deleteDocument/node_modules/bowser/src/bowser.js new file mode 100644 index 0000000..f79e6e0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/src/bowser.js @@ -0,0 +1,77 @@ +/*! + * Bowser - a browser detector + * https://github.com/lancedikson/bowser + * MIT License | (c) Dustin Diaz 2012-2015 + * MIT License | (c) Denis Demchenko 2015-2019 + */ +import Parser from './parser.js'; +import { + BROWSER_MAP, + ENGINE_MAP, + OS_MAP, + PLATFORMS_MAP, +} from './constants.js'; + +/** + * Bowser class. + * Keep it simple as much as it can be. + * It's supposed to work with collections of {@link Parser} instances + * rather then solve one-instance problems. + * All the one-instance stuff is located in Parser class. + * + * @class + * @classdesc Bowser is a static object, that provides an API to the Parsers + * @hideconstructor + */ +class Bowser { + /** + * Creates a {@link Parser} instance + * + * @param {String} UA UserAgent string + * @param {Boolean} [skipParsing=false] Will make the Parser postpone parsing until you ask it + * explicitly. Same as `skipParsing` for {@link Parser}. + * @returns {Parser} + * @throws {Error} when UA is not a String + * + * @example + * const parser = Bowser.getParser(window.navigator.userAgent); + * const result = parser.getResult(); + */ + static getParser(UA, skipParsing = false) { + if (typeof UA !== 'string') { + throw new Error('UserAgent should be a string'); + } + return new Parser(UA, skipParsing); + } + + /** + * Creates a {@link Parser} instance and runs {@link Parser.getResult} immediately + * + * @param UA + * @return {ParsedResult} + * + * @example + * const result = Bowser.parse(window.navigator.userAgent); + */ + static parse(UA) { + return (new Parser(UA)).getResult(); + } + + static get BROWSER_MAP() { + return BROWSER_MAP; + } + + static get ENGINE_MAP() { + return ENGINE_MAP; + } + + static get OS_MAP() { + return OS_MAP; + } + + static get PLATFORMS_MAP() { + return PLATFORMS_MAP; + } +} + +export default Bowser; diff --git a/amplify/functions/deleteDocument/node_modules/bowser/src/constants.js b/amplify/functions/deleteDocument/node_modules/bowser/src/constants.js new file mode 100644 index 0000000..f335032 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/src/constants.js @@ -0,0 +1,116 @@ +// NOTE: this list must be up-to-date with browsers listed in +// test/acceptance/useragentstrings.yml +export const BROWSER_ALIASES_MAP = { + 'Amazon Silk': 'amazon_silk', + 'Android Browser': 'android', + Bada: 'bada', + BlackBerry: 'blackberry', + Chrome: 'chrome', + Chromium: 'chromium', + Electron: 'electron', + Epiphany: 'epiphany', + Firefox: 'firefox', + Focus: 'focus', + Generic: 'generic', + 'Google Search': 'google_search', + Googlebot: 'googlebot', + 'Internet Explorer': 'ie', + 'K-Meleon': 'k_meleon', + Maxthon: 'maxthon', + 'Microsoft Edge': 'edge', + 'MZ Browser': 'mz', + 'NAVER Whale Browser': 'naver', + Opera: 'opera', + 'Opera Coast': 'opera_coast', + PhantomJS: 'phantomjs', + Puffin: 'puffin', + QupZilla: 'qupzilla', + QQ: 'qq', + QQLite: 'qqlite', + Safari: 'safari', + Sailfish: 'sailfish', + 'Samsung Internet for Android': 'samsung_internet', + SeaMonkey: 'seamonkey', + Sleipnir: 'sleipnir', + Swing: 'swing', + Tizen: 'tizen', + 'UC Browser': 'uc', + Vivaldi: 'vivaldi', + 'WebOS Browser': 'webos', + WeChat: 'wechat', + 'Yandex Browser': 'yandex', + Roku: 'roku', +}; + +export const BROWSER_MAP = { + amazon_silk: 'Amazon Silk', + android: 'Android Browser', + bada: 'Bada', + blackberry: 'BlackBerry', + chrome: 'Chrome', + chromium: 'Chromium', + electron: 'Electron', + epiphany: 'Epiphany', + firefox: 'Firefox', + focus: 'Focus', + generic: 'Generic', + googlebot: 'Googlebot', + google_search: 'Google Search', + ie: 'Internet Explorer', + k_meleon: 'K-Meleon', + maxthon: 'Maxthon', + edge: 'Microsoft Edge', + mz: 'MZ Browser', + naver: 'NAVER Whale Browser', + opera: 'Opera', + opera_coast: 'Opera Coast', + phantomjs: 'PhantomJS', + puffin: 'Puffin', + qupzilla: 'QupZilla', + qq: 'QQ Browser', + qqlite: 'QQ Browser Lite', + safari: 'Safari', + sailfish: 'Sailfish', + samsung_internet: 'Samsung Internet for Android', + seamonkey: 'SeaMonkey', + sleipnir: 'Sleipnir', + swing: 'Swing', + tizen: 'Tizen', + uc: 'UC Browser', + vivaldi: 'Vivaldi', + webos: 'WebOS Browser', + wechat: 'WeChat', + yandex: 'Yandex Browser', +}; + +export const PLATFORMS_MAP = { + tablet: 'tablet', + mobile: 'mobile', + desktop: 'desktop', + tv: 'tv', +}; + +export const OS_MAP = { + WindowsPhone: 'Windows Phone', + Windows: 'Windows', + MacOS: 'macOS', + iOS: 'iOS', + Android: 'Android', + WebOS: 'WebOS', + BlackBerry: 'BlackBerry', + Bada: 'Bada', + Tizen: 'Tizen', + Linux: 'Linux', + ChromeOS: 'Chrome OS', + PlayStation4: 'PlayStation 4', + Roku: 'Roku', +}; + +export const ENGINE_MAP = { + EdgeHTML: 'EdgeHTML', + Blink: 'Blink', + Trident: 'Trident', + Presto: 'Presto', + Gecko: 'Gecko', + WebKit: 'WebKit', +}; diff --git a/amplify/functions/deleteDocument/node_modules/bowser/src/parser-browsers.js b/amplify/functions/deleteDocument/node_modules/bowser/src/parser-browsers.js new file mode 100644 index 0000000..ee7840c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/src/parser-browsers.js @@ -0,0 +1,700 @@ +/** + * Browsers' descriptors + * + * The idea of descriptors is simple. You should know about them two simple things: + * 1. Every descriptor has a method or property called `test` and a `describe` method. + * 2. Order of descriptors is important. + * + * More details: + * 1. Method or property `test` serves as a way to detect whether the UA string + * matches some certain browser or not. The `describe` method helps to make a result + * object with params that show some browser-specific things: name, version, etc. + * 2. Order of descriptors is important because a Parser goes through them one by one + * in course. For example, if you insert Chrome's descriptor as the first one, + * more then a half of browsers will be described as Chrome, because they will pass + * the Chrome descriptor's test. + * + * Descriptor's `test` could be a property with an array of RegExps, where every RegExp + * will be applied to a UA string to test it whether it matches or not. + * If a descriptor has two or more regexps in the `test` array it tests them one by one + * with a logical sum operation. Parser stops if it has found any RegExp that matches the UA. + * + * Or `test` could be a method. In that case it gets a Parser instance and should + * return true/false to get the Parser know if this browser descriptor matches the UA or not. + */ + +import Utils from './utils.js'; + +const commonVersionIdentifier = /version\/(\d+(\.?_?\d+)+)/i; + +const browsersList = [ + /* Googlebot */ + { + test: [/googlebot/i], + describe(ua) { + const browser = { + name: 'Googlebot', + }; + const version = Utils.getFirstMatch(/googlebot\/(\d+(\.\d+))/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera < 13.0 */ + { + test: [/opera/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera > 13.0 */ + { + test: [/opr\/|opios/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/SamsungBrowser/i], + describe(ua) { + const browser = { + name: 'Samsung Internet for Android', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Whale/i], + describe(ua) { + const browser = { + name: 'NAVER Whale Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MZBrowser/i], + describe(ua) { + const browser = { + name: 'MZ Browser', + }; + const version = Utils.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/focus/i], + describe(ua) { + const browser = { + name: 'Focus', + }; + const version = Utils.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/swing/i], + describe(ua) { + const browser = { + name: 'Swing', + }; + const version = Utils.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/coast/i], + describe(ua) { + const browser = { + name: 'Opera Coast', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/opt\/\d+(?:.?_?\d+)+/i], + describe(ua) { + const browser = { + name: 'Opera Touch', + }; + const version = Utils.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/yabrowser/i], + describe(ua) { + const browser = { + name: 'Yandex Browser', + }; + const version = Utils.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/ucbrowser/i], + describe(ua) { + const browser = { + name: 'UC Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Maxthon|mxios/i], + describe(ua) { + const browser = { + name: 'Maxthon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/epiphany/i], + describe(ua) { + const browser = { + name: 'Epiphany', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/puffin/i], + describe(ua) { + const browser = { + name: 'Puffin', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sleipnir/i], + describe(ua) { + const browser = { + name: 'Sleipnir', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/k-meleon/i], + describe(ua) { + const browser = { + name: 'K-Meleon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/micromessenger/i], + describe(ua) { + const browser = { + name: 'WeChat', + }; + const version = Utils.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qqbrowser/i], + describe(ua) { + const browser = { + name: (/qqbrowserlite/i).test(ua) ? 'QQ Browser Lite' : 'QQ Browser', + }; + const version = Utils.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/msie|trident/i], + describe(ua) { + const browser = { + name: 'Internet Explorer', + }; + const version = Utils.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/\sedg\//i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/edg([ea]|ios)/i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/vivaldi/i], + describe(ua) { + const browser = { + name: 'Vivaldi', + }; + const version = Utils.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/seamonkey/i], + describe(ua) { + const browser = { + name: 'SeaMonkey', + }; + const version = Utils.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sailfish/i], + describe(ua) { + const browser = { + name: 'Sailfish', + }; + + const version = Utils.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/silk/i], + describe(ua) { + const browser = { + name: 'Amazon Silk', + }; + const version = Utils.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/phantom/i], + describe(ua) { + const browser = { + name: 'PhantomJS', + }; + const version = Utils.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/slimerjs/i], + describe(ua) { + const browser = { + name: 'SlimerJS', + }; + const version = Utils.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const browser = { + name: 'BlackBerry', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const browser = { + name: 'WebOS Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/bada/i], + describe(ua) { + const browser = { + name: 'Bada', + }; + const version = Utils.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/tizen/i], + describe(ua) { + const browser = { + name: 'Tizen', + }; + const version = Utils.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qupzilla/i], + describe(ua) { + const browser = { + name: 'QupZilla', + }; + const version = Utils.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/firefox|iceweasel|fxios/i], + describe(ua) { + const browser = { + name: 'Firefox', + }; + const version = Utils.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/electron/i], + describe(ua) { + const browser = { + name: 'Electron', + }; + const version = Utils.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MiuiBrowser/i], + describe(ua) { + const browser = { + name: 'Miui', + }; + const version = Utils.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chromium/i], + describe(ua) { + const browser = { + name: 'Chromium', + }; + const version = Utils.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chrome|crios|crmo/i], + describe(ua) { + const browser = { + name: 'Chrome', + }; + const version = Utils.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/GSA/i], + describe(ua) { + const browser = { + name: 'Google Search', + }; + const version = Utils.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Android Browser */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const browser = { + name: 'Android Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* PlayStation 4 */ + { + test: [/playstation 4/i], + describe(ua) { + const browser = { + name: 'PlayStation 4', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Safari */ + { + test: [/safari|applewebkit/i], + describe(ua) { + const browser = { + name: 'Safari', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Something else */ + { + test: [/.*/i], + describe(ua) { + /* Here we try to make sure that there are explicit details about the device + * in order to decide what regexp exactly we want to apply + * (as there is a specific decision based on that conclusion) + */ + const regexpWithoutDeviceSpec = /^(.*)\/(.*) /; + const regexpWithDeviceSpec = /^(.*)\/(.*)[ \t]\((.*)/; + const hasDeviceSpec = ua.search('\\(') !== -1; + const regexp = hasDeviceSpec ? regexpWithDeviceSpec : regexpWithoutDeviceSpec; + return { + name: Utils.getFirstMatch(regexp, ua), + version: Utils.getSecondMatch(regexp, ua), + }; + }, + }, +]; + +export default browsersList; diff --git a/amplify/functions/deleteDocument/node_modules/bowser/src/parser-engines.js b/amplify/functions/deleteDocument/node_modules/bowser/src/parser-engines.js new file mode 100644 index 0000000..d46d0e5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/src/parser-engines.js @@ -0,0 +1,120 @@ +import Utils from './utils.js'; +import { ENGINE_MAP } from './constants.js'; + +/* + * More specific goes first + */ +export default [ + /* EdgeHTML */ + { + test(parser) { + return parser.getBrowserName(true) === 'microsoft edge'; + }, + describe(ua) { + const isBlinkBased = /\sedg\//i.test(ua); + + // return blink if it's blink-based one + if (isBlinkBased) { + return { + name: ENGINE_MAP.Blink, + }; + } + + // otherwise match the version and return EdgeHTML + const version = Utils.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i, ua); + + return { + name: ENGINE_MAP.EdgeHTML, + version, + }; + }, + }, + + /* Trident */ + { + test: [/trident/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.Trident, + }; + + const version = Utils.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Presto */ + { + test(parser) { + return parser.test(/presto/i); + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Presto, + }; + + const version = Utils.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Gecko */ + { + test(parser) { + const isGecko = parser.test(/gecko/i); + const likeGecko = parser.test(/like gecko/i); + return isGecko && !likeGecko; + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Gecko, + }; + + const version = Utils.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Blink */ + { + test: [/(apple)?webkit\/537\.36/i], + describe() { + return { + name: ENGINE_MAP.Blink, + }; + }, + }, + + /* WebKit */ + { + test: [/(apple)?webkit/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.WebKit, + }; + + const version = Utils.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, +]; diff --git a/amplify/functions/deleteDocument/node_modules/bowser/src/parser-os.js b/amplify/functions/deleteDocument/node_modules/bowser/src/parser-os.js new file mode 100644 index 0000000..4c516dd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/src/parser-os.js @@ -0,0 +1,199 @@ +import Utils from './utils.js'; +import { OS_MAP } from './constants.js'; + +export default [ + /* Roku */ + { + test: [/Roku\/DVP/], + describe(ua) { + const version = Utils.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i, ua); + return { + name: OS_MAP.Roku, + version, + }; + }, + }, + + /* Windows Phone */ + { + test: [/windows phone/i], + describe(ua) { + const version = Utils.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.WindowsPhone, + version, + }; + }, + }, + + /* Windows */ + { + test: [/windows /i], + describe(ua) { + const version = Utils.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i, ua); + const versionName = Utils.getWindowsVersionName(version); + + return { + name: OS_MAP.Windows, + version, + versionName, + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe(ua) { + const result = { + name: OS_MAP.iOS, + }; + const version = Utils.getSecondMatch(/(Version\/)(\d[\d.]+)/, ua); + if (version) { + result.version = version; + } + return result; + }, + }, + + /* macOS */ + { + test: [/macintosh/i], + describe(ua) { + const version = Utils.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i, ua).replace(/[_\s]/g, '.'); + const versionName = Utils.getMacOSVersionName(version); + + const os = { + name: OS_MAP.MacOS, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* iOS */ + { + test: [/(ipod|iphone|ipad)/i], + describe(ua) { + const version = Utils.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i, ua).replace(/[_\s]/g, '.'); + + return { + name: OS_MAP.iOS, + version, + }; + }, + }, + + /* Android */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const version = Utils.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i, ua); + const versionName = Utils.getAndroidVersionName(version); + const os = { + name: OS_MAP.Android, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* WebOS */ + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const version = Utils.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i, ua); + const os = { + name: OS_MAP.WebOS, + }; + + if (version && version.length) { + os.version = version; + } + return os; + }, + }, + + /* BlackBerry */ + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const version = Utils.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i, ua) + || Utils.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i, ua) + || Utils.getFirstMatch(/\bbb(\d+)/i, ua); + + return { + name: OS_MAP.BlackBerry, + version, + }; + }, + }, + + /* Bada */ + { + test: [/bada/i], + describe(ua) { + const version = Utils.getFirstMatch(/bada\/(\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Bada, + version, + }; + }, + }, + + /* Tizen */ + { + test: [/tizen/i], + describe(ua) { + const version = Utils.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Tizen, + version, + }; + }, + }, + + /* Linux */ + { + test: [/linux/i], + describe() { + return { + name: OS_MAP.Linux, + }; + }, + }, + + /* Chrome OS */ + { + test: [/CrOS/], + describe() { + return { + name: OS_MAP.ChromeOS, + }; + }, + }, + + /* Playstation 4 */ + { + test: [/PlayStation 4/], + describe(ua) { + const version = Utils.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.PlayStation4, + version, + }; + }, + }, +]; diff --git a/amplify/functions/deleteDocument/node_modules/bowser/src/parser-platforms.js b/amplify/functions/deleteDocument/node_modules/bowser/src/parser-platforms.js new file mode 100644 index 0000000..48b1eb1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/src/parser-platforms.js @@ -0,0 +1,266 @@ +import Utils from './utils.js'; +import { PLATFORMS_MAP } from './constants.js'; + +/* + * Tablets go first since usually they have more specific + * signs to detect. + */ + +export default [ + /* Googlebot */ + { + test: [/googlebot/i], + describe() { + return { + type: 'bot', + vendor: 'Google', + }; + }, + }, + + /* Huawei */ + { + test: [/huawei/i], + describe(ua) { + const model = Utils.getFirstMatch(/(can-l01)/i, ua) && 'Nova'; + const platform = { + type: PLATFORMS_MAP.mobile, + vendor: 'Huawei', + }; + if (model) { + platform.model = model; + } + return platform; + }, + }, + + /* Nexus Tablet */ + { + test: [/nexus\s*(?:7|8|9|10).*/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Nexus', + }; + }, + }, + + /* iPad */ + { + test: [/ipad/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Amazon Kindle Fire */ + { + test: [/kftt build/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + model: 'Kindle Fire HD 7', + }; + }, + }, + + /* Another Amazon Tablet with Silk */ + { + test: [/silk/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + }; + }, + }, + + /* Tablet */ + { + test: [/tablet(?! pc)/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* iPod/iPhone */ + { + test(parser) { + const iDevice = parser.test(/ipod|iphone/i); + const likeIDevice = parser.test(/like (ipod|iphone)/i); + return iDevice && !likeIDevice; + }, + describe(ua) { + const model = Utils.getFirstMatch(/(ipod|iphone)/i, ua); + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Apple', + model, + }; + }, + }, + + /* Nexus Mobile */ + { + test: [/nexus\s*[0-6].*/i, /galaxy nexus/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Nexus', + }; + }, + }, + + /* Mobile */ + { + test: [/[^-]mobi/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* BlackBerry */ + { + test(parser) { + return parser.getBrowserName(true) === 'blackberry'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'BlackBerry', + }; + }, + }, + + /* Bada */ + { + test(parser) { + return parser.getBrowserName(true) === 'bada'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* Windows Phone */ + { + test(parser) { + return parser.getBrowserName() === 'windows phone'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Microsoft', + }; + }, + }, + + /* Android Tablet */ + { + test(parser) { + const osMajorVersion = Number(String(parser.getOSVersion()).split('.')[0]); + return parser.getOSName(true) === 'android' && (osMajorVersion >= 3); + }, + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* Android Mobile */ + { + test(parser) { + return parser.getOSName(true) === 'android'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* desktop */ + { + test(parser) { + return parser.getOSName(true) === 'macos'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + vendor: 'Apple', + }; + }, + }, + + /* Windows */ + { + test(parser) { + return parser.getOSName(true) === 'windows'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* Linux */ + { + test(parser) { + return parser.getOSName(true) === 'linux'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* PlayStation 4 */ + { + test(parser) { + return parser.getOSName(true) === 'playstation 4'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, + + /* Roku */ + { + test(parser) { + return parser.getOSName(true) === 'roku'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, +]; diff --git a/amplify/functions/deleteDocument/node_modules/bowser/src/parser.js b/amplify/functions/deleteDocument/node_modules/bowser/src/parser.js new file mode 100644 index 0000000..2f9f39f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/src/parser.js @@ -0,0 +1,496 @@ +import browserParsersList from './parser-browsers.js'; +import osParsersList from './parser-os.js'; +import platformParsersList from './parser-platforms.js'; +import enginesParsersList from './parser-engines.js'; +import Utils from './utils.js'; + +/** + * The main class that arranges the whole parsing process. + */ +class Parser { + /** + * Create instance of Parser + * + * @param {String} UA User-Agent string + * @param {Boolean} [skipParsing=false] parser can skip parsing in purpose of performance + * improvements if you need to make a more particular parsing + * like {@link Parser#parseBrowser} or {@link Parser#parsePlatform} + * + * @throw {Error} in case of empty UA String + * + * @constructor + */ + constructor(UA, skipParsing = false) { + if (UA === void (0) || UA === null || UA === '') { + throw new Error("UserAgent parameter can't be empty"); + } + + this._ua = UA; + + /** + * @typedef ParsedResult + * @property {Object} browser + * @property {String|undefined} [browser.name] + * Browser name, like `"Chrome"` or `"Internet Explorer"` + * @property {String|undefined} [browser.version] Browser version as a String `"12.01.45334.10"` + * @property {Object} os + * @property {String|undefined} [os.name] OS name, like `"Windows"` or `"macOS"` + * @property {String|undefined} [os.version] OS version, like `"NT 5.1"` or `"10.11.1"` + * @property {String|undefined} [os.versionName] OS name, like `"XP"` or `"High Sierra"` + * @property {Object} platform + * @property {String|undefined} [platform.type] + * platform type, can be either `"desktop"`, `"tablet"` or `"mobile"` + * @property {String|undefined} [platform.vendor] Vendor of the device, + * like `"Apple"` or `"Samsung"` + * @property {String|undefined} [platform.model] Device model, + * like `"iPhone"` or `"Kindle Fire HD 7"` + * @property {Object} engine + * @property {String|undefined} [engine.name] + * Can be any of this: `WebKit`, `Blink`, `Gecko`, `Trident`, `Presto`, `EdgeHTML` + * @property {String|undefined} [engine.version] String version of the engine + */ + this.parsedResult = {}; + + if (skipParsing !== true) { + this.parse(); + } + } + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + * + * @public + */ + getUA() { + return this._ua; + } + + /** + * Test a UA string for a regexp + * @param {RegExp} regex + * @return {Boolean} + */ + test(regex) { + return regex.test(this._ua); + } + + /** + * Get parsed browser object + * @return {Object} + */ + parseBrowser() { + this.parsedResult.browser = {}; + + const browserDescriptor = Utils.find(browserParsersList, (_browser) => { + if (typeof _browser.test === 'function') { + return _browser.test(this); + } + + if (_browser.test instanceof Array) { + return _browser.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (browserDescriptor) { + this.parsedResult.browser = browserDescriptor.describe(this.getUA()); + } + + return this.parsedResult.browser; + } + + /** + * Get parsed browser object + * @return {Object} + * + * @public + */ + getBrowser() { + if (this.parsedResult.browser) { + return this.parsedResult.browser; + } + + return this.parseBrowser(); + } + + /** + * Get browser's name + * @return {String} Browser's name or an empty string + * + * @public + */ + getBrowserName(toLowerCase) { + if (toLowerCase) { + return String(this.getBrowser().name).toLowerCase() || ''; + } + return this.getBrowser().name || ''; + } + + + /** + * Get browser's version + * @return {String} version of browser + * + * @public + */ + getBrowserVersion() { + return this.getBrowser().version; + } + + /** + * Get OS + * @return {Object} + * + * @example + * this.getOS(); + * { + * name: 'macOS', + * version: '10.11.12' + * } + */ + getOS() { + if (this.parsedResult.os) { + return this.parsedResult.os; + } + + return this.parseOS(); + } + + /** + * Parse OS and save it to this.parsedResult.os + * @return {*|{}} + */ + parseOS() { + this.parsedResult.os = {}; + + const os = Utils.find(osParsersList, (_os) => { + if (typeof _os.test === 'function') { + return _os.test(this); + } + + if (_os.test instanceof Array) { + return _os.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (os) { + this.parsedResult.os = os.describe(this.getUA()); + } + + return this.parsedResult.os; + } + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + getOSName(toLowerCase) { + const { name } = this.getOS(); + + if (toLowerCase) { + return String(name).toLowerCase() || ''; + } + + return name || ''; + } + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + getOSVersion() { + return this.getOS().version; + } + + /** + * Get parsed platform + * @return {{}} + */ + getPlatform() { + if (this.parsedResult.platform) { + return this.parsedResult.platform; + } + + return this.parsePlatform(); + } + + /** + * Get platform name + * @param {Boolean} [toLowerCase=false] + * @return {*} + */ + getPlatformType(toLowerCase = false) { + const { type } = this.getPlatform(); + + if (toLowerCase) { + return String(type).toLowerCase() || ''; + } + + return type || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parsePlatform() { + this.parsedResult.platform = {}; + + const platform = Utils.find(platformParsersList, (_platform) => { + if (typeof _platform.test === 'function') { + return _platform.test(this); + } + + if (_platform.test instanceof Array) { + return _platform.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (platform) { + this.parsedResult.platform = platform.describe(this.getUA()); + } + + return this.parsedResult.platform; + } + + /** + * Get parsed engine + * @return {{}} + */ + getEngine() { + if (this.parsedResult.engine) { + return this.parsedResult.engine; + } + + return this.parseEngine(); + } + + /** + * Get engines's name + * @return {String} Engines's name or an empty string + * + * @public + */ + getEngineName(toLowerCase) { + if (toLowerCase) { + return String(this.getEngine().name).toLowerCase() || ''; + } + return this.getEngine().name || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parseEngine() { + this.parsedResult.engine = {}; + + const engine = Utils.find(enginesParsersList, (_engine) => { + if (typeof _engine.test === 'function') { + return _engine.test(this); + } + + if (_engine.test instanceof Array) { + return _engine.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (engine) { + this.parsedResult.engine = engine.describe(this.getUA()); + } + + return this.parsedResult.engine; + } + + /** + * Parse full information about the browser + * @returns {Parser} + */ + parse() { + this.parseBrowser(); + this.parseOS(); + this.parsePlatform(); + this.parseEngine(); + + return this; + } + + /** + * Get parsed result + * @return {ParsedResult} + */ + getResult() { + return Utils.assign({}, this.parsedResult); + } + + /** + * Check if parsed browser matches certain conditions + * + * @param {Object} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = Bowser.getParser(window.navigator.userAgent); + * if (browser.satisfies({chrome: '>118.01.1322' })) + * // or with os + * if (browser.satisfies({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.satisfies({desktop: { chrome: '>118.01.1322' } })) + */ + satisfies(checkTree) { + const platformsAndOSes = {}; + let platformsAndOSCounter = 0; + const browsers = {}; + let browsersCounter = 0; + + const allDefinitions = Object.keys(checkTree); + + allDefinitions.forEach((key) => { + const currentDefinition = checkTree[key]; + if (typeof currentDefinition === 'string') { + browsers[key] = currentDefinition; + browsersCounter += 1; + } else if (typeof currentDefinition === 'object') { + platformsAndOSes[key] = currentDefinition; + platformsAndOSCounter += 1; + } + }); + + if (platformsAndOSCounter > 0) { + const platformsAndOSNames = Object.keys(platformsAndOSes); + const OSMatchingDefinition = Utils.find(platformsAndOSNames, name => (this.isOS(name))); + + if (OSMatchingDefinition) { + const osResult = this.satisfies(platformsAndOSes[OSMatchingDefinition]); + + if (osResult !== void 0) { + return osResult; + } + } + + const platformMatchingDefinition = Utils.find( + platformsAndOSNames, + name => (this.isPlatform(name)), + ); + if (platformMatchingDefinition) { + const platformResult = this.satisfies(platformsAndOSes[platformMatchingDefinition]); + + if (platformResult !== void 0) { + return platformResult; + } + } + } + + if (browsersCounter > 0) { + const browserNames = Object.keys(browsers); + const matchingDefinition = Utils.find(browserNames, name => (this.isBrowser(name, true))); + + if (matchingDefinition !== void 0) { + return this.compareVersion(browsers[matchingDefinition]); + } + } + + return undefined; + } + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + isBrowser(browserName, includingAlias = false) { + const defaultBrowserName = this.getBrowserName().toLowerCase(); + let browserNameLower = browserName.toLowerCase(); + const alias = Utils.getBrowserTypeByAlias(browserNameLower); + + if (includingAlias && alias) { + browserNameLower = alias.toLowerCase(); + } + return browserNameLower === defaultBrowserName; + } + + compareVersion(version) { + let expectedResults = [0]; + let comparableVersion = version; + let isLoose = false; + + const currentBrowserVersion = this.getBrowserVersion(); + + if (typeof currentBrowserVersion !== 'string') { + return void 0; + } + + if (version[0] === '>' || version[0] === '<') { + comparableVersion = version.substr(1); + if (version[1] === '=') { + isLoose = true; + comparableVersion = version.substr(2); + } else { + expectedResults = []; + } + if (version[0] === '>') { + expectedResults.push(1); + } else { + expectedResults.push(-1); + } + } else if (version[0] === '=') { + comparableVersion = version.substr(1); + } else if (version[0] === '~') { + isLoose = true; + comparableVersion = version.substr(1); + } + + return expectedResults.indexOf( + Utils.compareVersions(currentBrowserVersion, comparableVersion, isLoose), + ) > -1; + } + + isOS(osName) { + return this.getOSName(true) === String(osName).toLowerCase(); + } + + isPlatform(platformType) { + return this.getPlatformType(true) === String(platformType).toLowerCase(); + } + + isEngine(engineName) { + return this.getEngineName(true) === String(engineName).toLowerCase(); + } + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {Boolean} + */ + is(anything, includingAlias = false) { + return this.isBrowser(anything, includingAlias) || this.isOS(anything) + || this.isPlatform(anything); + } + + /** + * Check if any of the given values satisfies this.is(anything) + * @param {String[]} anythings + * @returns {Boolean} + */ + some(anythings = []) { + return anythings.some(anything => this.is(anything)); + } +} + +export default Parser; diff --git a/amplify/functions/deleteDocument/node_modules/bowser/src/utils.js b/amplify/functions/deleteDocument/node_modules/bowser/src/utils.js new file mode 100644 index 0000000..d1174bf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/bowser/src/utils.js @@ -0,0 +1,309 @@ +import { BROWSER_MAP, BROWSER_ALIASES_MAP } from './constants.js'; + +export default class Utils { + /** + * Get first matched item for a string + * @param {RegExp} regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getFirstMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 0 && match[1]) || ''; + } + + /** + * Get second matched item for a string + * @param regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getSecondMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 1 && match[2]) || ''; + } + + /** + * Match a regexp and return a constant or undefined + * @param {RegExp} regexp + * @param {String} ua + * @param {*} _const Any const that will be returned if regexp matches the string + * @return {*} + */ + static matchAndReturnConst(regexp, ua, _const) { + if (regexp.test(ua)) { + return _const; + } + return void (0); + } + + static getWindowsVersionName(version) { + switch (version) { + case 'NT': return 'NT'; + case 'XP': return 'XP'; + case 'NT 5.0': return '2000'; + case 'NT 5.1': return 'XP'; + case 'NT 5.2': return '2003'; + case 'NT 6.0': return 'Vista'; + case 'NT 6.1': return '7'; + case 'NT 6.2': return '8'; + case 'NT 6.3': return '8.1'; + case 'NT 10.0': return '10'; + default: return undefined; + } + } + + /** + * Get macOS version name + * 10.5 - Leopard + * 10.6 - Snow Leopard + * 10.7 - Lion + * 10.8 - Mountain Lion + * 10.9 - Mavericks + * 10.10 - Yosemite + * 10.11 - El Capitan + * 10.12 - Sierra + * 10.13 - High Sierra + * 10.14 - Mojave + * 10.15 - Catalina + * + * @example + * getMacOSVersionName("10.14") // 'Mojave' + * + * @param {string} version + * @return {string} versionName + */ + static getMacOSVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] !== 10) return undefined; + switch (v[1]) { + case 5: return 'Leopard'; + case 6: return 'Snow Leopard'; + case 7: return 'Lion'; + case 8: return 'Mountain Lion'; + case 9: return 'Mavericks'; + case 10: return 'Yosemite'; + case 11: return 'El Capitan'; + case 12: return 'Sierra'; + case 13: return 'High Sierra'; + case 14: return 'Mojave'; + case 15: return 'Catalina'; + default: return undefined; + } + } + + /** + * Get Android version name + * 1.5 - Cupcake + * 1.6 - Donut + * 2.0 - Eclair + * 2.1 - Eclair + * 2.2 - Froyo + * 2.x - Gingerbread + * 3.x - Honeycomb + * 4.0 - Ice Cream Sandwich + * 4.1 - Jelly Bean + * 4.4 - KitKat + * 5.x - Lollipop + * 6.x - Marshmallow + * 7.x - Nougat + * 8.x - Oreo + * 9.x - Pie + * + * @example + * getAndroidVersionName("7.0") // 'Nougat' + * + * @param {string} version + * @return {string} versionName + */ + static getAndroidVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] === 1 && v[1] < 5) return undefined; + if (v[0] === 1 && v[1] < 6) return 'Cupcake'; + if (v[0] === 1 && v[1] >= 6) return 'Donut'; + if (v[0] === 2 && v[1] < 2) return 'Eclair'; + if (v[0] === 2 && v[1] === 2) return 'Froyo'; + if (v[0] === 2 && v[1] > 2) return 'Gingerbread'; + if (v[0] === 3) return 'Honeycomb'; + if (v[0] === 4 && v[1] < 1) return 'Ice Cream Sandwich'; + if (v[0] === 4 && v[1] < 4) return 'Jelly Bean'; + if (v[0] === 4 && v[1] >= 4) return 'KitKat'; + if (v[0] === 5) return 'Lollipop'; + if (v[0] === 6) return 'Marshmallow'; + if (v[0] === 7) return 'Nougat'; + if (v[0] === 8) return 'Oreo'; + if (v[0] === 9) return 'Pie'; + return undefined; + } + + /** + * Get version precisions count + * + * @example + * getVersionPrecision("1.10.3") // 3 + * + * @param {string} version + * @return {number} + */ + static getVersionPrecision(version) { + return version.split('.').length; + } + + /** + * Calculate browser version weight + * + * @example + * compareVersions('1.10.2.1', '1.8.2.1.90') // 1 + * compareVersions('1.010.2.1', '1.09.2.1.90'); // 1 + * compareVersions('1.10.2.1', '1.10.2.1'); // 0 + * compareVersions('1.10.2.1', '1.0800.2'); // -1 + * compareVersions('1.10.2.1', '1.10', true); // 0 + * + * @param {String} versionA versions versions to compare + * @param {String} versionB versions versions to compare + * @param {boolean} [isLoose] enable loose comparison + * @return {Number} comparison result: -1 when versionA is lower, + * 1 when versionA is bigger, 0 when both equal + */ + /* eslint consistent-return: 1 */ + static compareVersions(versionA, versionB, isLoose = false) { + // 1) get common precision for both versions, for example for "10.0" and "9" it should be 2 + const versionAPrecision = Utils.getVersionPrecision(versionA); + const versionBPrecision = Utils.getVersionPrecision(versionB); + + let precision = Math.max(versionAPrecision, versionBPrecision); + let lastPrecision = 0; + + const chunks = Utils.map([versionA, versionB], (version) => { + const delta = precision - Utils.getVersionPrecision(version); + + // 2) "9" -> "9.0" (for precision = 2) + const _version = version + new Array(delta + 1).join('.0'); + + // 3) "9.0" -> ["000000000"", "000000009"] + return Utils.map(_version.split('.'), chunk => new Array(20 - chunk.length).join('0') + chunk).reverse(); + }); + + // adjust precision for loose comparison + if (isLoose) { + lastPrecision = precision - Math.min(versionAPrecision, versionBPrecision); + } + + // iterate in reverse order by reversed chunks array + precision -= 1; + while (precision >= lastPrecision) { + // 4) compare: "000000009" > "000000010" = false (but "9" > "10" = true) + if (chunks[0][precision] > chunks[1][precision]) { + return 1; + } + + if (chunks[0][precision] === chunks[1][precision]) { + if (precision === lastPrecision) { + // all version chunks are same + return 0; + } + + precision -= 1; + } else if (chunks[0][precision] < chunks[1][precision]) { + return -1; + } + } + + return undefined; + } + + /** + * Array::map polyfill + * + * @param {Array} arr + * @param {Function} iterator + * @return {Array} + */ + static map(arr, iterator) { + const result = []; + let i; + if (Array.prototype.map) { + return Array.prototype.map.call(arr, iterator); + } + for (i = 0; i < arr.length; i += 1) { + result.push(iterator(arr[i])); + } + return result; + } + + /** + * Array::find polyfill + * + * @param {Array} arr + * @param {Function} predicate + * @return {Array} + */ + static find(arr, predicate) { + let i; + let l; + if (Array.prototype.find) { + return Array.prototype.find.call(arr, predicate); + } + for (i = 0, l = arr.length; i < l; i += 1) { + const value = arr[i]; + if (predicate(value, i)) { + return value; + } + } + return undefined; + } + + /** + * Object::assign polyfill + * + * @param {Object} obj + * @param {Object} ...objs + * @return {Object} + */ + static assign(obj, ...assigners) { + const result = obj; + let i; + let l; + if (Object.assign) { + return Object.assign(obj, ...assigners); + } + for (i = 0, l = assigners.length; i < l; i += 1) { + const assigner = assigners[i]; + if (typeof assigner === 'object' && assigner !== null) { + const keys = Object.keys(assigner); + keys.forEach((key) => { + result[key] = assigner[key]; + }); + } + } + return obj; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('Microsoft Edge') // edge + * + * @param {string} browserName + * @return {string} + */ + static getBrowserAlias(browserName) { + return BROWSER_ALIASES_MAP[browserName]; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('edge') // Microsoft Edge + * + * @param {string} browserAlias + * @return {string} + */ + static getBrowserTypeByAlias(browserAlias) { + return BROWSER_MAP[browserAlias] || ''; + } +} diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/CHANGELOG.md new file mode 100644 index 0000000..021eab6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/CHANGELOG.md @@ -0,0 +1,594 @@ +Note: If you find missing information about particular minor version, that version must have been changed without any functional change in this library. + +**4.4.1 / 2024-07-28** +- v5 fix: maximum length limit to currency value +- fix #634: build attributes with oneListGroup and attributesGroupName (#653)(By [Andreas Naziris](https://github.com/a-rasin)) +- fix: get oneListGroup to work as expected for array of strings (#662)(By [Andreas Naziris](https://github.com/a-rasin)) + +**4.4.0 / 2024-05-18** +- fix #654: parse attribute list correctly for self closing stop node. +- fix: validator bug when closing tag is not opened. (#647) (By [Ryosuke Fukatani](https://github.com/RyosukeFukatani)) +- fix #581: typings; return type of `tagValueProcessor` & `attributeValueProcessor` (#582) (By [monholm]()) + +**4.3.6 / 2024-03-16** +- Add support for parsing HTML numeric entities (#645) (By [Jonas Schade ](https://github.com/DerZade)) + +**4.3.5 / 2024-02-24** +- code for v5 is added for experimental use + +**4.3.4 / 2024-01-10** +- fix: Don't escape entities in CDATA sections (#633) (By [wackbyte](https://github.com/wackbyte)) + +**4.3.3 / 2024-01-10** +- Remove unnecessary regex + +**4.3.2 / 2023-10-02** +- fix `jObj.hasOwnProperty` when give input is null (By [Arda TANRIKULU](https://github.com/ardatan)) + +**4.3.1 / 2023-09-24** +- revert back "Fix typings for builder and parser to make return type generic" to avoid failure of existing projects. Need to decide a common approach. + +**4.3.0 / 2023-09-20** +- Fix stopNodes to work with removeNSPrefix (#607) (#608) (By [Craig Andrews]https://github.com/candrews)) +- Fix #610 ignore properties set to Object.prototype +- Fix typings for builder and parser to make return type generic (By [Sarah Dayan](https://github.com/sarahdayan)) + +**4.2.7 / 2023-07-30** +- Fix: builder should set text node correctly when only textnode is present (#589) (By [qianqing](https://github.com/joneqian)) +- Fix: Fix for null and undefined attributes when building xml (#585) (#598). A null or undefined value should be ignored. (By [Eugenio Ceschia](https://github.com/cecia234)) + +**4.2.6 / 2023-07-17** +- Fix: Remove trailing slash from jPath for self-closing tags (#595) (By [Maciej Radzikowski](https://github.com/m-radzikowski)) + +**4.2.5 / 2023-06-22** +- change code implementation + +**4.2.4 / 2023-06-06** +- fix security bug + +**4.2.3 / 2023-06-05** +- fix security bug + +**4.2.2 / 2023-04-18** +- fix #562: fix unpaired tag when it comes in last of a nested tag. Also throw error when unpaired tag is used as closing tag + +**4.2.1 / 2023-04-18** +- fix: jpath after unpaired tags + +**4.2.0 / 2023-04-09** +- support `updateTag` parser property + +**4.1.4 / 2023-04-08** +- update typings to let user create XMLBuilder instance without options (#556) (By [Patrick](https://github.com/omggga)) +- fix: IsArray option isn't parsing tags with 0 as value correctly #490 (#557) (By [Aleksandr Murashkin](https://github.com/p-kuen)) +- feature: support `oneListGroup` to group repeated children tags udder single group + +**4.1.3 / 2023-02-26** +- fix #546: Support complex entity value + +**4.1.2 / 2023-02-12** +- Security Fix + +**4.1.1 / 2023-02-03** +- Fix #540: ignoreAttributes breaks unpairedTags +- Refactor XML builder code + +**4.1.0 / 2023-02-02** +- Fix '<' or '>' in DTD comment throwing an error. (#533) (By [Adam Baker](https://github.com/Cwazywierdo)) +- Set "eNotation" to 'true' as default + +**4.0.15 / 2023-01-25** +- make "eNotation" optional + +**4.0.14 / 2023-01-22** +- fixed: add missed typing "eNotation" to parse values + +**4.0.13 / 2023-01-07** +- preserveorder formatting (By [mdeknowis](https://github.com/mdeknowis)) +- support `transformAttributeName` (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.12 / 2022-11-19** +- fix typescript + +**4.0.11 / 2022-10-05** +- fix #501: parse for entities only once + +**4.0.10 / 2022-09-14** +- fix broken links in demo site (By [Yannick Lang](https://github.com/layaxx)) +- fix #491: tagValueProcessor type definition (By [Andrea Francesco Speziale](https://github.com/andreafspeziale)) +- Add jsdocs for tagValueProcessor + + +**4.0.9 / 2022-07-10** +- fix #470: stop-tag can have self-closing tag with same name +- fix #472: stopNode can have any special tag inside +- Allow !ATTLIST and !NOTATION with DOCTYPE +- Add transformTagName option to transform tag names when parsing (#469) (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.8 / 2022-05-28** +- Fix CDATA parsing returning empty string when value = 0 (#451) (By [ndelanou](https://github.com/ndelanou)) +- Fix stopNodes when same tag appears inside node (#456) (By [patrickshipe](https://github.com/patrickshipe)) +- fix #468: prettify own properties only + +**4.0.7 / 2022-03-18** +- support CDATA even if tag order is not preserved +- support Comments even if tag order is not preserved +- fix #446: XMLbuilder should not indent XML declaration + +**4.0.6 / 2022-03-08** +- fix: call tagValueProcessor only once for array items +- fix: missing changed for #437 + +**4.0.5 / 2022-03-06** +- fix #437: call tagValueProcessor from XML builder + +**4.0.4 / 2022-03-03** +- fix #435: should skip unpaired and self-closing nodes when set as stopnodes + +**4.0.3 / 2022-02-15** +- fix: ReferenceError when Bundled with Strict (#431) (By [Andreas Heissenberger](https://github.com/aheissenberger)) + + +**4.0.2 / 2022-02-04** +- builder supports `suppressUnpairedNode` +- parser supports `ignoreDeclaration` and `ignorePiTags` +- fix: when comment is parsed as text value if given as ` ...` #423 +- builder supports decoding `&` + +**4.0.1 / 2022-01-08** +- fix builder for pi tag +- fix: support suppressBooleanAttrs by builder + +**4.0.0 / 2022-01-06** +- Generating different combined, parser only, builder only, validator only browser bundles +- Keeping cjs modules as they can be imported in cjs and esm modules both. Otherwise refer `esm` branch. + +**4.0.0-beta.8 / 2021-12-13** +- call tagValueProcessor for stop nodes + +**4.0.0-beta.7 / 2021-12-09** +- fix Validator bug when an attribute has no value but '=' only +- XML Builder should suppress unpaired tags by default. +- documents update for missing features +- refactoring to use Object.assign +- refactoring to remove repeated code + +**4.0.0-beta.6 / 2021-12-05** +- Support PI Tags processing +- Support `suppressBooleanAttributes` by XML Builder for attributes with value `true`. + +**4.0.0-beta.5 / 2021-12-04** +- fix: when a tag with name "attributes" + +**4.0.0-beta.4 / 2021-12-02** +- Support HTML document parsing +- skip stop nodes parsing when building the XML from JS object +- Support external entites without DOCTYPE +- update dev dependency: strnum v1.0.5 to fix long number issue + +**4.0.0-beta.3 / 2021-11-30** +- support global stopNodes expression like "*.stop" +- support self-closing and paired unpaired tags +- fix: CDATA should not be parsed. +- Fix typings for XMLBuilder (#396)(By [Anders Emil Salvesen](https://github.com/andersem)) +- supports XML entities, HTML entities, DOCTYPE entities + +**⚠️ 4.0.0-beta.2 / 2021-11-19** +- rename `attrMap` to `attibutes` in parser output when `preserveOrder:true` +- supports unpairedTags + +**⚠️ 4.0.0-beta.1 / 2021-11-18** +- Parser returns an array now + - to make the structure common + - and to return root level detail +- renamed `cdataTagName` to `cdataPropName` +- Added `commentPropName` +- fix typings + +**⚠️ 4.0.0-beta.0 / 2021-11-16** +- Name change of many configuration properties. + - `attrNodeName` to `attributesGroupName` + - `attrValueProcessor` to `attributeValueProcessor` + - `parseNodeValue` to `parseTagValue` + - `ignoreNameSpace` to `removeNSPrefix` + - `numParseOptions` to `numberParseOptions` + - spelling correction for `suppressEmptyNode` +- Name change of cli and browser bundle to **fxparser** +- `isArray` option is added to parse a tag into array +- `preserveOrder` option is added to render XML in such a way that the result js Object maintains the order of properties same as in XML. +- Processing behaviour of `tagValueProcessor` and `attributeValueProcessor` are changes with extra input parameters +- j2xparser is renamed to XMLBuilder. +- You need to build XML parser instance for given options first before parsing XML. +- fix #327, #336: throw error when extra text after XML content +- fix #330: attribute value can have '\n', +- fix #350: attrbiutes can be separated by '\n' from tagname + +3.21.1 / 2021-10-31 +- Correctly format JSON elements with a text prop but no attribute props ( By [haddadnj](https://github.com/haddadnj) ) + +3.21.0 / 2021-10-25 + - feat: added option `rootNodeName` to set tag name for array input when converting js object to XML. + - feat: added option `alwaysCreateTextNode` to force text node creation (by: *@massimo-ua*) + - ⚠️ feat: Better error location for unclosed tags. (by *@Gei0r*) + - Some error messages would be changed when validating XML. Eg + - `{ InvalidXml: "Invalid '[ \"rootNode\"]' found." }` → `{InvalidTag: "Unclosed tag 'rootNode'."}` + - `{ InvalidTag: "Closing tag 'rootNode' is expected inplace of 'rootnode'." }` → `{ InvalidTag: "Expected closing tag 'rootNode' (opened in line 1) instead of closing tag 'rootnode'."}` + - ⚠️ feat: Column in error response when validating XML +```js +{ + "code": "InvalidAttr", + "msg": "Attribute 'abc' is repeated.", + "line": 1, + "col": 22 +} +``` + +3.20.1 / 2021-09-25 + - update strnum package + +3.20.0 / 2021-09-10 + - Use strnum npm package to parse string to number + - breaking change: long number will be parsed to scientific notation. + +3.19.0 / 2021-03-14 + - License changed to MIT original + - Fix #321 : namespace tag parsing + +3.18.0 / 2021-02-05 + - Support RegEx and function in arrayMode option + - Fix #317 : validate nested PI tags + +3.17.4 / 2020-06-07 + - Refactor some code to support IE11 + - Fix: `` space as attribute string + +3.17.3 / 2020-05-23 + - Fix: tag name separated by \n \t + - Fix: throw error for unclosed tags + +3.17.2 / 2020-05-23 + - Fixed an issue in processing doctype tag + - Fixed tagName where it should not have whitespace chars + +3.17.1 / 2020-05-19 + - Fixed an issue in checking opening tag + +3.17.0 / 2020-05-18 + - parser: fix '<' issue when it comes in aatr value + - parser: refactoring to remove dependency from regex + - validator: fix IE 11 issue for error messages + - updated dev dependencies + - separated benchmark module to sub-module + - breaking change: comments will not be removed from CDATA data + +3.16.0 / 2020-01-12 + - validaor: fix for ampersand characters (#215) + - refactoring to support unicode chars in tag name + - update typing for validator error + +3.15.1 / 2019-12-09 + - validaor: fix multiple roots are not allowed + +3.15.0 / 2019-11-23 + - validaor: improve error messaging + - validator: add line number in case of error + - validator: add more error scenarios to make it more descriptive + +3.14.0 / 2019-10-25 + - arrayMode for XML to JS obj parsing + +3.13.0 / 2019-10-02 + - pass tag/attr name to tag/attr value processor + - inbuilt optional validation with XML parser + +3.12.21 / 2019-10-02 + - Fix validator for unclosed XMLs + - move nimnjs dependency to dev dependency + - update dependencies + +3.12.20 / 2019-08-16 + - Revert: Fix #167: '>' in attribute value as it is causing high performance degrade. + +3.12.19 / 2019-07-28 + - Fix js to xml parser should work for date values. (broken: `tagValueProcessor` will receive the original value instead of string always) (breaking change) + +3.12.18 / 2019-07-27 + - remove configstore dependency + +3.12.17 / 2019-07-14 + - Fix #167: '>' in attribute value + +3.12.16 / 2019-03-23 + - Support a new option "stopNodes". (#150) +Accept the list of tags which are not required to be parsed. Instead, all the nested tag and data will be assigned as string. + - Don't show post-install message + +3.12.12 / 2019-01-11 + - fix : IE parseInt, parseFloat error + +3.12.11 / 2018-12-24 + - fix #132: "/" should not be parsed as boolean attr in case of self closing tags + +3.12.9 / 2018-11-23 + - fix #129 : validator should not fail when an atrribute name is 'length' + +3.12.8 / 2018-11-22 + - fix #128 : use 'attrValueProcessor' to process attribute value in json2xml parser + +3.12.6 / 2018-11-10 + - Fix #126: check for type + +3.12.4 / 2018-09-12 + - Fix: include tasks in npm package + +3.12.3 / 2018-09-12 + - Fix CLI issue raised in last PR + +3.12.2 / 2018-09-11 + - Fix formatting for JSON to XML output + - Migrate to webpack (PR merged) + - fix cli (PR merged) + +3.12.0 / 2018-08-06 + - Support hexadecimal values + - Support true number parsing + +3.11.2 / 2018-07-23 + - Update Demo for more options + - Update license information + - Update readme for formatting, users, and spelling mistakes + - Add missing typescript definition for j2xParser + - refactoring: change filenames + +3.11.1 / 2018-06-05 + - fix #93: read the text after self closing tag + +3.11.0 / 2018-05-20 + - return defaultOptions if there are not options in buildOptions function + - added localeRange declaration in parser.d.ts + - Added support of cyrillic characters in validator XML + - fixed bug in validator work when XML data with byte order marker + +3.10.0 / 2018-05-13 + - Added support of cyrillic characters in parsing XML to JSON + +3.9.11 / 2018-05-09 + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/80 fix nimn chars + - update package information + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/86: json 2 xml parser : property with null value should be parsed to self closing tag. + - update online demo + - revert zombiejs to old version to support old version of node + - update dependencies + +3.3.10 / 2018-04-23 + - fix #77 : parse even if closing tag has space before '>' + - include all css & js lib in demo app + - remove babel dependencies until needed + +3.3.9 / 2018-04-18 + - fix #74 : TS2314 TypeScript compiler error + +3.3.8 / 2018-04-17 + - fix #73 : IE doesn't support Object.assign + +3.3.7 / 2018-04-14 + - fix: use let insted of const in for loop of validator + - Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/71 from bb/master + first draft of typings for typescript + https://github.com/NaturalIntelligence/fast-xml-parser/issues/69 + - Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/70 from bb/patch-1 + fix some typos in readme + +3.3.6 / 2018-03-21 + - change arrow functions to full notation for IE compatibility + +3.3.5 / 2018-03-15 + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/67 : attrNodeName invalid behavior + - fix: remove decodeHTML char condition + +3.3.4 / 2018-03-14 + - remove dependency on "he" package + - refactor code to separate methods in separate files. + - draft code for transforming XML to json string. It is not officially documented due to performance issue. + +3.3.0 / 2018-03-05 + - use common default options for XML parsing for consistency. And add `parseToNimn` method. + - update nexttodo + - update README about XML to Nimn transformation and remove special notes about 3.x release + - update CONTRIBUTING.ms mentioning nexttodo + - add negative case for XML PIs + - validate xml processing instruction tags https://github.com/NaturalIntelligence/fast-xml-parser/issues/62 + - nimndata: handle array with object + - nimndata: node with nested node and text node + - nimndata: handle attributes and text node + - nimndata: add options, handle array + - add xml to nimn data converter + - x2j: direct access property with tagname + - update changelog + - fix validator when single quote presents in value enclosed with double quotes or vice versa + - Revert "remove unneded nimnjs dependency, move opencollective to devDependencies and replace it + with more light opencollective-postinstall" + This reverts commit d47aa7181075d82db4fee97fd8ea32b056fe3f46. + - Merge pull request: https://github.com/NaturalIntelligence/fast-xml-parser/issues/63 from HaroldPutman/suppress-undefined + Keep undefined nodes out of the XML output : This is useful when you are deleting nodes from the JSON and rewriting XML. + +3.2.4 / 2018-03-01 + - fix #59 fix in validator when open quote presents in attribute value + - Create nexttodo.md + - exclude static from bitHound tests + - add package lock + +3.2.3 / 2018-02-28 + - Merge pull request from Delagen/master: fix namespaces can contain the same characters as xml names + +3.2.2 / 2018-02-22 + - fix: attribute xmlns should not be removed if ignoreNameSpace is false + - create CONTRIBUTING.md + +3.2.1 / 2018-02-17 + - fix: empty attribute should be parsed + +3.2.0 / 2018-02-16 + - Merge pull request : Dev to Master + - Update README and version + - j2x:add performance test + - j2x: Remove extra empty line before closing tag + - j2x: suppress empty nodes to self closing node if configured + - j2x: provide option to give indentation depth + - j2x: make optional formatting + - j2x: encodeHTMLchat + - j2x: handle cdata tag + - j2x: handle grouped attributes + - convert json to xml + - nested object + - array + - attributes + - text value + - small refactoring + - Merge pull request: Update cli.js to let user validate XML file or data + - Add option for rendering CDATA as separate property + +3.0.1 / 2018-02-09 + - fix CRLF: replace it with single space in attributes value only. + +3.0.0 / 2018-02-08 + - change online tool with new changes + - update info about new options + - separate tag value processing to separate function + - make HTML decoding optional + - give an option to allow boolean attributes + - change cli options as per v3 + - Correct comparison table format on README + - update v3 information + - some performance improvement changes + - Make regex object local to the method and move some common methods to util + - Change parser to + - handle multiple instances of CDATA + - make triming of value optionals + - HTML decode attribute and text value + - refactor code to separate files + - Ignore newline chars without RE (in validator) + - validate for XML prolog + - Validate DOCTYPE without RE + - Update validator to return error response + - Update README to add detail about V3 + - Separate xmlNode model class + - include vscode debug config + - fix for repeated object + - fix attribute regex for boolean attributes + - Fix validator for invalid attributes +2.9.4 / 2018-02-02 + - Merge pull request: Decode HTML characters + - refactor source folder name + - ignore bundle / browser js to be published to npm +2.9.3 / 2018-01-26 + - Merge pull request: Correctly remove CRLF line breaks + - Enable to parse attribute in online editor + - Fix testing demo app test + - Describe parsing options + - Add options for online demo +2.9.2 / 2018-01-18 + - Remove check if tag starting with "XML" + - Fix: when there are spaces before / after CDATA + +2.9.1 / 2018-01-16 + - Fix: newline should be replaced with single space + - Fix: for single and multiline comments + - validate xml with CDATA + - Fix: the issue when there is no space between 2 attributes + - Fix: https://github.com/NaturalIntelligence/fast-xml-parser/issues/33: when there is newline char in attr val, it doesn't parse + - Merge pull request: fix ignoreNamespace + - fix: don't wrap attributes if only namespace attrs + - fix: use portfinder for run tests, update deps + - fix: don't treat namespaces as attributes when ignoreNamespace enabled + +2.9.0 / 2018-01-10 + - Rewrite the validator to handle large files. + Ignore DOCTYPE validation. + - Fix: When attribute value has equal sign + +2.8.3 / 2017-12-15 + - Fix: when a tag has value along with subtags + +2.8.2 / 2017-12-04 + - Fix value parsing for IE + +2.8.1 / 2017-12-01 + - fix: validator should return false instead of err when invalid XML + +2.8.0 / 2017-11-29 + - Add CLI option to ignore value conversion + - Fix variable name when filename is given on CLI + - Update CLI help text + - Merge pull request: xml2js: Accept standard input + - Test Node 8 + - Update dependencies + - Bundle readToEnd + - Add ability to read from standard input + +2.7.4 / 2017-09-22 + - Merge pull request: Allow wrap attributes with subobject to compatible with other parsers output + +2.7.3 / 2017-08-02 + - fix: handle CDATA with regx + +2.7.2 / 2017-07-30 + - Change travis config for yarn caching + - fix validator: when tag property is same as array property + - Merge pull request: Failing test case in validator for valid SVG + +2.7.1 / 2017-07-26 + - Fix: Handle val 0 + +2.7.0 / 2017-07-25 + - Fix test for arrayMode + - Merge pull request: Add arrayMode option to parse any nodes as arrays + +2.6.0 / 2017-07-14 + - code improvement + - Add unit tests for value conversion for attr + - Merge pull request: option of an attribute value conversion to a number (textAttrConversion) the same way as the textNodeConversion option does. Default value is false. + +2.5.1 / 2017-07-01 + - Fix XML element name pattern + - Fix XML element name pattern while parsing + - Fix validation for xml tag element + +2.5.0 / 2017-06-25 + - Improve Validator performance + - update attr matching regex + - Add perf tests + - Improve atrr regex to handle all cases + +2.4.4 / 2017-06-08 + - Bug fix: when an attribute has single or double quote in value + +2.4.3 / 2017-06-05 + - Bug fix: when multiple CDATA tags are given + - Merge pull request: add option "textNodeConversion" + - add option "textNodeConversion" + +2.4.1 / 2017-04-14 + - fix tests + - Bug fix: preserve initial space of node value + - Handle CDATA + +2.3.1 / 2017-03-15 + - Bug fix: when single self closing tag + - Merge pull request: fix .codeclimate.yml + - Update .codeclimate.yml - Fixed config so it does not error anymore. + - Update .codeclimate.yml + +2.3.0 / 2017-02-26 + - Code improvement + - add bithound config + - Update usage + - Update travis to generate bundle js before running tests + - 1.Browserify, 2. add more tests for validator + - Add validator + - Fix CLI default parameter bug + +2.2.1 / 2017-02-05 + - Bug fix: CLI default option diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/LICENSE b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/LICENSE new file mode 100644 index 0000000..d7da622 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Amit Kumar Gupta + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/README.md b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/README.md new file mode 100644 index 0000000..1891838 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/README.md @@ -0,0 +1,236 @@ +# [fast-xml-parser](https://www.npmjs.com/package/fast-xml-parser) +[![NPM quality][quality-image]][quality-url] +[![Coverage Status](https://coveralls.io/repos/github/NaturalIntelligence/fast-xml-parser/badge.svg?branch=master)](https://coveralls.io/github/NaturalIntelligence/fast-xml-parser?branch=master) +[Try me](https://naturalintelligence.github.io/fast-xml-parser/) +[![NPM total downloads](https://img.shields.io/npm/dt/fast-xml-parser.svg)](https://npm.im/fast-xml-parser) + + +Validate XML, Parse XML to JS Object, or Build XML from JS Object without C/C++ based libraries and no callback. + +--- + +ads-thePowerGlassesBook +I had recently published a book, The Power Glasses. Please have a look. Your feedback would be helpful. You can [mail](githubissues@proton.me) me for a free copy. +
+ +Sponsor this project + + + + + + + + Stubmatic donate button +
+
+
+ + + +![fxp_sponsors](https://github.com/NaturalIntelligence/fast-xml-parser/assets/7692328/c9367497-d67e-410a-90a6-66e3808be929) + +## Users + + + + + + + + + + + + + + + + + + + +[more](./USERs.md) + +The list of users are mostly published by Github or communicated directly. Feel free to contact if you find any information wrong. + +--- + +## Main Features + +FXP logo + +* Validate XML data syntactically +* Parse XML to JS Object +* Build XML from JS Object +* Compatible to node js packages, in browser, and in CLI (click try me button above for demo) +* Faster than any other pure JS implementation. +* It can handle big files (tested up to 100mb). +* Controlled parsing using various options +* XML Entities, HTML entities, and DOCTYPE entites are supported. +* unpaired tags (Eg `
` in HTML), stop nodes (Eg ` +: + +``` + +Bundle size + +| Bundle Name | Size | +| ------------------ | ---- | +| fxbuilder.min.js | 6.5K | +| fxparser.min.js | 20K | +| fxp.min.js | 26K | +| fxvalidator.min.js | 5.7K | + +### Documents + + + + + + + +
v3v4v5
+ documents +
    +
  1. Getting Started
  2. +
  3. XML Parser
  4. +
  5. XML Builder
  6. +
  7. XML Validator
  8. +
  9. Entities
  10. +
  11. HTML Document Parsing
  12. +
  13. PI Tag processing
  14. +
    +
  1. Getting Started +
  2. Features
  3. +
  4. Options
  5. +
  6. Output Builders
  7. +
  8. Value Parsers
  9. +
+ +**note**: version 5 is released with version 4 tfor experimental use. Based on it's demand, it'll be developed and the features can be different in final release. + +## Performance +negative means error + +### XML Parser + + + + +* Y-axis: requests per second +* X-axis: File size + +### XML Builder + + +* Y-axis: requests per second + + + + + + +## Usage Trend + +[Usage Trend of fast-xml-parser](https://npm-compare.com/fast-xml-parser#timeRange=THREE_YEARS) + + + NPM Usage Trend of fast-xml-parser + + +## Supporters +### Contributors + +This project exists thanks to [all](graphs/contributors) the people who contribute. [[Contribute](docs/CONTRIBUTING.md)]. + + + + +### Backers + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/fast-xml-parser#backer)] + + + + + +# License +* MIT License + +![Donate $5](static/img/donation_quote.png) diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/package.json b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/package.json new file mode 100644 index 0000000..1fd52c1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/package.json @@ -0,0 +1,74 @@ +{ + "name": "fast-xml-parser", + "version": "4.4.1", + "description": "Validate XML, Parse XML, Build XML without C/C++ based libraries", + "main": "./src/fxp.js", + "scripts": { + "test": "nyc --reporter=lcov --reporter=text jasmine spec/*spec.js", + "test-types": "tsc --noEmit spec/typings/typings-test.ts", + "unit": "jasmine", + "coverage": "nyc report --reporter html --reporter text -t .nyc_output --report-dir .nyc_output/summary", + "perf": "node ./benchmark/perfTest3.js", + "lint": "eslint src/*.js spec/*.js", + "bundle": "webpack --config webpack-prod.config.js", + "prettier": "prettier --write src/**/*.js", + "publish-please": "publish-please", + "checkReadiness": "publish-please --dry-run" + }, + "bin": { + "fxparser": "./src/cli/cli.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/fast-xml-parser" + }, + "keywords": [ + "fast", + "xml", + "json", + "parser", + "xml2js", + "x2js", + "xml2json", + "js", + "cli", + "validator", + "validate", + "transformer", + "assert", + "js2xml", + "json2xml", + "html" + ], + "author": "Amit Gupta (https://solothought.com)", + "license": "MIT", + "devDependencies": { + "@babel/core": "^7.13.10", + "@babel/plugin-transform-runtime": "^7.13.10", + "@babel/preset-env": "^7.13.10", + "@babel/register": "^7.13.8", + "@types/node": "20", + "babel-loader": "^8.2.2", + "cytorus": "^0.2.9", + "eslint": "^8.3.0", + "he": "^1.2.0", + "jasmine": "^3.6.4", + "nyc": "^15.1.0", + "prettier": "^1.19.1", + "publish-please": "^5.5.2", + "typescript": "5", + "webpack": "^5.64.4", + "webpack-cli": "^4.9.1" + }, + "typings": "src/fxp.d.ts", + "funding": [{ + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + },{ + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }], + "dependencies": { + "strnum": "^1.0.5" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/cli.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/cli.js new file mode 100755 index 0000000..984534c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/cli.js @@ -0,0 +1,93 @@ +#!/usr/bin/env node +'use strict'; +/*eslint-disable no-console*/ +const fs = require('fs'); +const path = require('path'); +const {XMLParser, XMLValidator} = require("../fxp"); +const readToEnd = require('./read').readToEnd; + +const version = require('./../../package.json').version; +if (process.argv[2] === '--help' || process.argv[2] === '-h') { + console.log(require("./man")); +} else if (process.argv[2] === '--version') { + console.log(version); +} else { + const options = { + removeNSPrefix: true, + ignoreAttributes: false, + parseTagValue: true, + parseAttributeValue: true, + }; + let fileName = ''; + let outputFileName; + let validate = false; + let validateOnly = false; + for (let i = 2; i < process.argv.length; i++) { + if (process.argv[i] === '-ns') { + options.removeNSPrefix = false; + } else if (process.argv[i] === '-a') { + options.ignoreAttributes = true; + } else if (process.argv[i] === '-c') { + options.parseTagValue = false; + options.parseAttributeValue = false; + } else if (process.argv[i] === '-o') { + outputFileName = process.argv[++i]; + } else if (process.argv[i] === '-v') { + validate = true; + } else if (process.argv[i] === '-V') { + validateOnly = true; + } else { + //filename + fileName = process.argv[i]; + } + } + + const callback = function(xmlData) { + let output = ''; + if (validate) { + const parser = new XMLParser(options); + output = parser.parse(xmlData,validate); + } else if (validateOnly) { + output = XMLValidator.validate(xmlData); + process.exitCode = output === true ? 0 : 1; + } else { + const parser = new XMLParser(options); + output = JSON.stringify(parser.parse(xmlData,validate), null, 4); + } + if (outputFileName) { + writeToFile(outputFileName, output); + } else { + console.log(output); + } + }; + + try { + + if (!fileName) { + readToEnd(process.stdin, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } else { + fs.readFile(fileName, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } + } catch (e) { + console.log('Seems an invalid file or stream.' + e); + } +} + +function writeToFile(fileName, data) { + fs.writeFile(fileName, data, function(err) { + if (err) { + throw err; + } + console.log('JSON output has been written to ' + fileName); + }); +} diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/man.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/man.js new file mode 100644 index 0000000..89947cc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/man.js @@ -0,0 +1,12 @@ +module.exports = `Fast XML Parser 4.0.0 +---------------- +$ fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] +$ cat xmlfile.xml | fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] + +Options +---------------- +-ns: remove namespace from tag and atrribute name. +-a: don't parse attributes. +-c: parse values to premitive type. +-v: validate before parsing. +-V: validate only.` \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/read.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/read.js new file mode 100644 index 0000000..642da52 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/cli/read.js @@ -0,0 +1,92 @@ +'use strict'; + +// Copyright 2013 Timothy J Fontaine +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE + +/* + +Read any stream all the way to the end and trigger a single cb + +const http = require('http'); + +const rte = require('readtoend'); + +http.get('http://nodejs.org', function(response) { + rte.readToEnd(response, function(err, body) { + console.log(body); + }); +}); + +*/ + +let stream = require('stream'); +const util = require('util'); + +if (!stream.Transform) { + stream = require('readable-stream'); +} + +function ReadToEnd(opts) { + if (!(this instanceof ReadToEnd)) { + return new ReadToEnd(opts); + } + + stream.Transform.call(this, opts); + + this._rte_encoding = opts.encoding || 'utf8'; + + this._buff = ''; +} + +module.exports = ReadToEnd; +util.inherits(ReadToEnd, stream.Transform); + +ReadToEnd.prototype._transform = function(chunk, encoding, done) { + this._buff += chunk.toString(this._rte_encoding); + this.push(chunk); + done(); +}; + +ReadToEnd.prototype._flush = function(done) { + this.emit('complete', undefined, this._buff); + done(); +}; + +ReadToEnd.readToEnd = function(stream, options, cb) { + if (!cb) { + cb = options; + options = {}; + } + + const dest = new ReadToEnd(options); + + stream.pipe(dest); + + stream.on('error', function(err) { + stream.unpipe(dest); + cb(err); + }); + + dest.on('complete', cb); + + dest.resume(); + + return dest; +}; diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/fxp.d.ts b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/fxp.d.ts new file mode 100644 index 0000000..bddcfef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/fxp.d.ts @@ -0,0 +1,402 @@ +type X2jOptions = { + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Whether to remove namespace string from tag and attribute names + * + * Defaults to `false` + */ + removeNSPrefix?: boolean; + + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `true` + */ + parseTagValue?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `false` + */ + parseAttributeValue?: boolean; + + /** + * Whether to remove surrounding whitespace from tag or attribute value + * + * Defaults to `true` + */ + trimValues?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (tagName: string, tagValue: string, jPath: string, hasAttributes: boolean, isLeafNode: boolean) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (attrName: string, attrValue: string, jPath: string) => unknown; + + /** + * Options to pass to `strnum` for parsing numbers + * + * Defaults to `{ hex: true, leadingZeros: true, eNotation: true }` + */ + numberParseOptions?: strnumOptions; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Whether to always create a text node + * + * Defaults to `false` + */ + alwaysCreateTextNode?: boolean; + + /** + * Determine whether a tag should be parsed as an array + * + * @param tagName + * @param jPath + * @param isLeafNode + * @param isAttribute + * @returns {boolean} + * + * Defaults to `() => false` + */ + isArray?: (tagName: string, jPath: string, isLeafNode: boolean, isAttribute: boolean) => boolean; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + /** + * Whether to process HTML entities + * + * Defaults to `false` + */ + htmlEntities?: boolean; + + /** + * Whether to ignore the declaration tag from output + * + * Defaults to `false` + */ + ignoreDeclaration?: boolean; + + /** + * Whether to ignore Pi tags + * + * Defaults to `false` + */ + ignorePiTags?: boolean; + + /** + * Transform tag names + * + * Defaults to `false` + */ + transformTagName?: ((tagName: string) => string) | false; + + /** + * Transform attribute names + * + * Defaults to `false` + */ + transformAttributeName?: ((attributeName: string) => string) | false; + + /** + * Change the tag name when a different name is returned. Skip the tag from parsed result when false is returned. + * Modify `attrs` object to control attributes for the given tag. + * + * @returns {string} new tag name. + * @returns false to skip the tag + * + * Defaults to `(tagName, jPath, attrs) => tagName` + */ + updateTag?: (tagName: string, jPath: string, attrs: {[k: string]: string}) => string | boolean; +}; + +type strnumOptions = { + hex: boolean; + leadingZeros: boolean, + skipLike?: RegExp, + eNotation?: boolean +} + +type validationOptions = { + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; +}; + +type XmlBuilderOptions = { + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Whether to make output pretty instead of single line + * + * Defaults to `false` + */ + format?: boolean; + + + /** + * If `format` is set to `true`, sets the indent string + * + * Defaults to ` ` + */ + indentBy?: string; + + /** + * Give a name to a top-level array + * + * Defaults to `undefined` + */ + arrayNodeName?: string; + + /** + * Create empty tags for tags with no text value + * + * Defaults to `false` + */ + suppressEmptyNode?: boolean; + + /** + * Suppress an unpaired tag + * + * Defaults to `true` + */ + suppressUnpairedNode?: boolean; + + /** + * Don't put a value for boolean attributes + * + * Defaults to `true` + */ + suppressBooleanAttributes?: boolean; + + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (name: string, value: unknown) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (name: string, value: unknown) => unknown; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + + oneListGroup?: boolean; +}; + +type ESchema = string | object | Array; + +type ValidationError = { + err: { + code: string; + msg: string, + line: number, + col: number + }; +}; + +export class XMLParser { + constructor(options?: X2jOptions); + parse(xmlData: string | Buffer ,validationOptions?: validationOptions | boolean): any; + /** + * Add Entity which is not by default supported by this library + * @param entityIdentifier {string} Eg: 'ent' for &ent; + * @param entityValue {string} Eg: '\r' + */ + addEntity(entityIdentifier: string, entityValue: string): void; +} + +export class XMLValidator{ + static validate( xmlData: string, options?: validationOptions): true | ValidationError; +} +export class XMLBuilder { + constructor(options?: XmlBuilderOptions); + build(jObj: any): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/fxp.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/fxp.js new file mode 100644 index 0000000..9cfa0ac --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/fxp.js @@ -0,0 +1,11 @@ +'use strict'; + +const validator = require('./validator'); +const XMLParser = require('./xmlparser/XMLParser'); +const XMLBuilder = require('./xmlbuilder/json2xml'); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/util.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/util.js new file mode 100644 index 0000000..df0a60d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/util.js @@ -0,0 +1,72 @@ +'use strict'; + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/CharsSymbol.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/CharsSymbol.js new file mode 100644 index 0000000..fa5ce9e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/CharsSymbol.js @@ -0,0 +1,16 @@ +modules.export = { + "<" : "<", //tag start + ">" : ">", //tag end + "/" : "/", //close tag + "!" : "!", //comment or docttype + "!--" : "!--", //comment + "-->" : "-->", //comment end + "?" : "?", //pi + "?>" : "?>", //pi end + "?xml" : "?xml", //pi end + "![" : "![", //cdata + "]]>" : "]]>", //cdata end + "[" : "[", + "-" : "-", + "D" : "D", +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/EntitiesParser.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/EntitiesParser.js new file mode 100644 index 0000000..62cc02f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js new file mode 100755 index 0000000..be1f1d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js @@ -0,0 +1,64 @@ + +const JsObjOutputBuilder = require("./OutputBuilders/JsObjBuilder"); + +const defaultOptions = { + preserveOrder: false, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + //ignoreRootElement : false, + stopNodes: [], //nested tags will not be parsed even for errors + // isArray: () => false, //User will set it + htmlEntities: false, + // skipEmptyListItem: false + tags:{ + unpaired: [], + nameFor:{ + cdata: false, + comment: false, + text: '#text' + }, + separateTextProperty: false, + }, + attributes:{ + ignore: false, + booleanType: true, + entities: true, + }, + + // select: ["img[src]"], + // stop: ["anim", "[ads]"] + only: [], // rest tags will be skipped. It will result in flat array + hierarchy: false, //will be used when a particular tag is set to be parsed. + skip: [], // will be skipped from parse result. on('skip') will be triggered + + select: [], // on('select', tag => tag ) will be called if match + stop: [], //given tagPath will not be parsed. innerXML will be set as string value + OutputBuilder: new JsObjOutputBuilder(), +}; + +const buildOptions = function(options) { + const finalOptions = { ... defaultOptions}; + copyProperties(finalOptions,options) + return finalOptions; +}; + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (key === 'OutputBuilder') { + target[key] = source[key]; + }else if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js new file mode 100644 index 0000000..be2d478 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js @@ -0,0 +1,71 @@ +class BaseOutputBuilder{ + constructor(){ + // this.attributes = {}; + } + + addAttribute(name, value){ + if(this.options.onAttribute){ + //TODO: better to pass tag path + const v = this.options.onAttribute(name, value, this.tagName); + if(v) this.attributes[v.name] = v.value; + }else{ + name = this.options.attributes.prefix + name + this.options.attributes.suffix; + this.attributes[name] = this.parseValue(value, this.options.attributes.valueParsers); + } + } + + /** + * parse value by chain of parsers + * @param {string} val + * @returns {any} parsed value if matching parser found + */ + parseValue = function(val, valParsers){ + for (let i = 0; i < valParsers.length; i++) { + let valParser = valParsers[i]; + if(typeof valParser === "string"){ + valParser = this.registeredParsers[valParser]; + } + if(valParser){ + val = valParser.parse(val); + } + } + return val; + } + + /** + * To add a nested empty tag. + * @param {string} key + * @param {any} val + */ + _addChild(key, val){} + + /** + * skip the comment if property is not set + */ + addComment(text){ + if(this.options.nameFor.comment) + this._addChild(this.options.nameFor.comment, text); + } + + //store CDATA separately if property is set + //otherwise add to tag's value + addCdata(text){ + if (this.options.nameFor.cdata) { + this._addChild(this.options.nameFor.cdata, text); + } else { + this.addRawValue(text || ""); + } + } + + addRawValue = text => this.addValue(text); + + addDeclaration(){ + if(!this.options.declaration){ + }else{ + this.addPi("?xml"); + } + this.attributes = {} + } +} + +module.exports = BaseOutputBuilder; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js new file mode 100644 index 0000000..c63f627 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js @@ -0,0 +1,103 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const rootName = '!js_arr'; +const BaseOutputBuilder = require("./BaseOutputBuilder"); + +class JsArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = new Node(rootName); + this.currentNode = this.root; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push(this.currentNode); + this.currentNode = new Node(tag.name, this.attributes); + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + this.currentNode = this.tagsStack.pop(); //set parent node in scope + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode.child.push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.child.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode.child.push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + //TODO: set pi flag + if(!this.options.ignorePiTags){ + const node = new Node(name, this.attributes); + this.currentNode[":@"] = this.attributes; + this.currentNode.child.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root.child[0]; + } +} + + + +class Node{ + constructor(tagname, attributes){ + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments + if(attributes && Object.keys(attributes).length > 0) + this[":@"] = attributes; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js new file mode 100644 index 0000000..e0dc1e9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js @@ -0,0 +1,102 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsMinArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsMinArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = {[rootName]: []}; + this.currentNode = this.root; + this.currentNodeTagName = rootName; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push([this.currentNodeTagName,this.currentNode]); //this.currentNode is parent node here + this.currentNodeTagName = tag.name; + this.currentNode = { [tag.name]:[]} + if(Object.keys(this.attributes).length > 0){ + this.currentNode[":@"] = this.attributes; + this.attributes = {}; + } + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + const nodeName = this.currentNodeTagName; + const arr = this.tagsStack.pop(); //set parent node in scope + this.currentNodeTagName = arr[0]; + this.currentNode = arr[1]; + + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode[this.currentNodeTagName].push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode[this.currentNodeTagName].push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + if(!this.options.ignorePiTags){ + const node = { [name]:[]} + if(this.attributes){ + node[":@"] = this.attributes; + } + this.currentNode.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root[rootName]; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js new file mode 100644 index 0000000..37036c5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js @@ -0,0 +1,156 @@ + + +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(builderOptions){ + this.options = buildOptions(builderOptions); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsObjBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsObjBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, builderOptions,registeredParsers) { + super(); + //hold the raw detail of a tag and sequence with reference to the output + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = builderOptions; + this.registeredParsers = registeredParsers; + + this.root = {}; + this.parent = this.root; + this.tagName = rootName; + this.value = {}; + this.textValue = ""; + this.attributes = {}; + } + + addTag(tag){ + + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + + this.tagsStack.push([this.tagName, this.textValue, this.value]); //parent tag, parent text value, parent tag value (jsobj) + this.tagName = tag.name; + this.value = value; + this.textValue = ""; + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const tagName = this.tagName; + let value = this.value; + let textValue = this.textValue; + + //update tag text value + if(typeof value !== "object" && !Array.isArray(value)){ + value = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + }else if(textValue.length > 0){ + value[this.options.nameFor.text] = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + } + + + let resultTag= { + tagName: tagName, + value: value + }; + + if(this.options.onTagClose !== undefined){ + //TODO TagPathMatcher + resultTag = this.options.onClose(tagName, value, this.textValue, new TagPathMatcher(this.tagsStack,node)); + + if(!resultTag) return; + } + + //set parent node in scope + let arr = this.tagsStack.pop(); + let parentTag = arr[2]; + parentTag=this._addChildTo(resultTag.tagName, resultTag.value, parentTag); + + this.tagName = arr[0]; + this.textValue = arr[1]; + this.value = parentTag; + } + + _addChild(key, val){ + if(typeof this.value === "string"){ + this.value = { [this.options.nameFor.text] : this.value }; + } + + this._addChildTo(key, val, this.value); + // this.currentNode.leafType = false; + this.attributes = {}; + } + + _addChildTo(key, val, node){ + if(typeof node === 'string') node = {}; + if(!node[key]){ + node[key] = val; + }else{ //Repeated + if(!Array.isArray(node[key])){ //but not stored as array + node[key] = [node[key]]; + } + node[key].push(val); + } + return node; + } + + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + //TODO: use bytes join + if(this.textValue.length > 0) this.textValue += " " + text; + else this.textValue = text; + } + + addPi(name){ + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + this._addChild(name, value); + + } + getOutput(){ + return this.value; + } +} + +function isEmpty(obj) { + return Object.keys(obj).length === 0; +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js new file mode 100644 index 0000000..c71ea94 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js @@ -0,0 +1,99 @@ +const trimParser = require("../valueParsers/trim") +const booleanParser = require("../valueParsers/booleanParser") +const currencyParser = require("../valueParsers/currency") +const numberParser = require("../valueParsers/number") + +const defaultOptions={ + nameFor:{ + text: "#text", + comment: "", + cdata: "", + }, + // onTagClose: () => {}, + // onAttribute: () => {}, + piTag: false, + declaration: false, //"?xml" + tags: { + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + attributes:{ + prefix: "@_", + suffix: "", + groupBy: "", + + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + dataType:{ + + } +} + +//TODO +const withJoin = ["trim","join", /*"entities",*/"number","boolean","currency"/*, "date"*/] +const withoutJoin = ["trim", /*"entities",*/"number","boolean","currency"/*, "date"*/] + +function buildOptions(options){ + //clone + const finalOptions = { ... defaultOptions}; + + //add config missed in cloning + finalOptions.tags.valueParsers.push(...withJoin) + if(!this.preserveOrder) + finalOptions.tags.valueParsers.push(...withoutJoin); + + //add config missed in cloning + finalOptions.attributes.valueParsers.push(...withJoin) + + //override configuration + copyProperties(finalOptions,options); + return finalOptions; +} + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +function registerCommonValueParsers(options){ + return { + "trim": new trimParser(), + // "join": this.entityParser.parse, + "boolean": new booleanParser(), + "number": new numberParser({ + hex: true, + leadingZeros: true, + eNotation: true + }), + "currency": new currencyParser(), + // "date": this.entityParser.parse, + } +} + +module.exports = { + buildOptions : buildOptions, + registerCommonValueParsers: registerCommonValueParsers +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/Report.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/Report.js new file mode 100644 index 0000000..e69de29 diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/TagPath.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/TagPath.js new file mode 100644 index 0000000..d901cc3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/TagPath.js @@ -0,0 +1,81 @@ +class TagPath{ + constructor(pathStr){ + let text = ""; + let tName = ""; + let pos; + let aName = ""; + let aVal = ""; + this.stack = [] + + for (let i = 0; i < pathStr.length; i++) { + let ch = pathStr[i]; + if(ch === " ") { + if(text.length === 0) continue; + tName = text; text = ""; + }else if(ch === "["){ + if(tName.length === 0){ + tName = text; text = ""; + } + i++; + for (; i < pathStr.length; i++) { + ch = pathStr[i]; + if(ch=== "=") continue; + else if(ch=== "]") {aName = text.trim(); text=""; break; i--;} + else if(ch === "'" || ch === '"'){ + let attrEnd = pathStr.indexOf(ch,i+1); + aVal = pathStr.substring(i+1, attrEnd); + i = attrEnd; + }else{ + text +=ch; + } + } + }else if(ch !== " " && text.length === 0 && tName.length > 0){//reading tagName + //save previous tag + this.stack.push(new TagPathNode(tName,pos,aName,aVal)); + text = ch; tName = ""; aName = ""; aVal = ""; + }else{ + text+=ch; + } + } + + //last tag in the path + if(tName.length >0 || text.length>0){ + this.stack.push(new TagPathNode(text||tName,pos,aName,aVal)); + } + } + + match(tagStack,node){ + if(this.stack[0].name !== "*"){ + if(this.stack.length !== tagStack.length +1) return false; + + //loop through tagPath and tagStack and match + for (let i = 0; i < this.tagStack.length; i++) { + if(!this.stack[i].match(tagStack[i])) return false; + } + } + if(!this.stack[this.stack.length - 1].match(node)) return false; + return true; + } +} + +class TagPathNode{ + constructor(name,position,attrName,attrVal){ + this.name = name; + this.position = position; + this.attrName = attrName, + this.attrVal = attrVal; + } + + match(node){ + let matching = true; + matching = node.name === this.name; + if(this.position) matching = node.position === this.position; + if(this.attrName) matching = node.attrs[this.attrName !== undefined]; + if(this.attrVal) matching = node.attrs[this.attrName !== this.attrVal]; + return matching; + } +} + +// console.log((new TagPath("* b[b]")).stack); +// console.log((new TagPath("a[a] b[b] c")).stack); +// console.log((new TagPath(" b [ b= 'cf sdadwa' ] a ")).stack); \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js new file mode 100644 index 0000000..af23607 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js @@ -0,0 +1,15 @@ +const TagPath = require("./TagPath"); + +class TagPathMatcher{ + constructor(stack,node){ + this.stack = stack; + this.node= node; + } + + match(path){ + const tagPath = new TagPath(path); + return tagPath.match(this.stack, this.node); + } +} + +module.exports = TagPathMatcher; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XMLParser.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XMLParser.js new file mode 100755 index 0000000..6de58ed --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XMLParser.js @@ -0,0 +1,85 @@ +const { buildOptions} = require("./OptionsBuilder"); +const Xml2JsParser = require("./Xml2JsParser"); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + // console.log(this.options) + } + /** + * Parse XML data string to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + return this.parse(xmlData); + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + // if( validationOption){ + // if(validationOption === true) validationOption = {}; //validate with default options + + // const result = validator.validate(xmlData, validationOption); + // if (result !== true) { + // throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + // } + // } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parse(xmlData); + } + /** + * Parse XML data buffer to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parseBytesArr(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + }else{ + throw new Error("XML data is accepted in Bytes[] form.") + } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parseBytesArr(xmlData); + } + /** + * Parse XML data stream to JS object + * @param {fs.ReadableStream} xmlDataStream + */ + parseStream(xmlDataStream){ + if(!isStream(xmlDataStream)) throw new Error("FXP: Invalid stream input"); + + const orderedObjParser = new Xml2JsParser(this.options); + orderedObjParser.entityParser.addExternalEntities(this.externalEntities); + return orderedObjParser.parseStream(xmlDataStream); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +function isStream(stream){ + if(stream && typeof stream.read === "function" && typeof stream.on === "function" && typeof stream.readableEnded === "boolean") return true; + return false; +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js new file mode 100644 index 0000000..c4baab4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js @@ -0,0 +1,237 @@ +const StringSource = require("./inputSource/StringSource"); +const BufferSource = require("./inputSource/BufferSource"); +const {readTagExp,readClosingTagName} = require("./XmlPartReader"); +const {readComment, readCdata,readDocType,readPiTag} = require("./XmlSpecialTagsReader"); +const TagPath = require("./TagPath"); +const TagPathMatcher = require("./TagPathMatcher"); +const EntitiesParser = require('./EntitiesParser'); + +//To hold the data of current tag +//This is usually used to compare jpath expression against current tag +class TagDetail{ + constructor(name){ + this.name = name; + this.position = 0; + // this.attributes = {}; + } +} + +class Xml2JsParser { + constructor(options) { + this.options = options; + + this.currentTagDetail = null; + this.tagTextData = ""; + this.tagsStack = []; + this.entityParser = new EntitiesParser(options.htmlEntities); + this.stopNodes = []; + for (let i = 0; i < this.options.stopNodes.length; i++) { + this.stopNodes.push(new TagPath(this.options.stopNodes[i])); + } + } + + parse(strData) { + this.source = new StringSource(strData); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + parseBytesArr(data) { + this.source = new BufferSource(data ); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + + parseXml() { + //TODO: Separate TagValueParser as separate class. So no scope issue in node builder class + + //OutputBuilder should be set in XML Parser + this.outputBuilder = this.options.OutputBuilder.getInstance(this.options); + this.root = { root: true}; + this.currentTagDetail = this.root; + + while(this.source.canRead()){ + let ch = this.source.readCh(); + if (ch === "") break; + + if(ch === "<"){//tagStart + let nextChar = this.source.readChAt(0); + if (nextChar === "" ) throw new Error("Unexpected end of source"); + + + if(nextChar === "!" || nextChar === "?"){ + this.source.updateBufferBoundary(); + //previously collected text should be added to current node + this.addTextNode(); + + this.readSpecialTag(nextChar);// Read DOCTYPE, comment, CDATA, PI tag + }else if(nextChar === "/"){ + this.source.updateBufferBoundary(); + this.readClosingTag(); + // console.log(this.source.buffer.length, this.source.readable); + // console.log(this.tagsStack.length); + }else{//opening tag + this.readOpeningTag(); + } + }else{ + this.tagTextData += ch; + } + }//End While loop + if(this.tagsStack.length > 0 || ( this.tagTextData !== "undefined" && this.tagTextData.trimEnd().length > 0) ) throw new Error("Unexpected data in the end of document"); + } + + /** + * read closing paired tag. Set parent tag in scope. + * skip a node on user's choice + */ + readClosingTag(){ + const tagName = this.processTagName(readClosingTagName(this.source)); + // console.log(tagName, this.tagsStack.length); + this.validateClosingTag(tagName); + // All the text data collected, belongs to current tag. + if(!this.currentTagDetail.root) this.addTextNode(); + this.outputBuilder.closeTag(); + // Since the tag is closed now, parent tag comes in scope + this.currentTagDetail = this.tagsStack.pop(); + } + + validateClosingTag(tagName){ + // This can't be unpaired tag, or a stop tag. + if(this.isUnpaired(tagName) || this.isStopNode(tagName)) throw new Error(`Unexpected closing tag '${tagName}'`); + // This must match with last opening tag + else if(tagName !== this.currentTagDetail.name) + throw new Error(`Unexpected closing tag '${tagName}' expecting '${this.currentTagDetail.name}'`) + } + + /** + * Read paired, unpaired, self-closing, stop and special tags. + * Create a new node + * Push paired tag in stack. + */ + readOpeningTag(){ + //save previously collected text data to current node + this.addTextNode(); + + //create new tag + let tagExp = readTagExp(this, ">" ); + + // process and skip from tagsStack For unpaired tag, self closing tag, and stop node + const tagDetail = new TagDetail(tagExp.tagName); + if(this.isUnpaired(tagExp.tagName)) { + //TODO: this will lead 2 extra stack operation + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(tagExp.selfClosing){ + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(this.isStopNode(this.currentTagDetail)){ + // TODO: let's user set a stop node boundary detector for complex contents like script tag + //TODO: pass tag name only to avoid string operations + const content = source.readUptoCloseTag(` 0){ + //TODO: shift parsing to output builder + + this.outputBuilder.addValue(this.replaceEntities(this.tagTextData)); + } + this.tagTextData = ""; + } + // } + } + + processAttrName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + processTagName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + /** + * Generate tags path from tagsStack + */ + tagsPath(tagName){ + //TODO: return TagPath Object. User can call match method with path + return ""; + } + + isUnpaired(tagName){ + return this.options.tags.unpaired.indexOf(tagName) !== -1; + } + + /** + * valid expressions are + * tag nested + * * nested + * tag nested[attribute] + * tag nested[attribute=""] + * tag nested[attribute!=""] + * tag nested:0 //for future + * @param {string} tagName + * @returns + */ + isStopNode(node){ + for (let i = 0; i < this.stopNodes.length; i++) { + const givenPath = this.stopNodes[i]; + if(givenPath.match(this.tagsStack, node)) return true; + } + return false + } + + replaceEntities(text){ + //TODO: if option is set then replace entities + return this.entityParser.parse(text) + } +} + +function resolveNameSpace(name, removeNSPrefix) { + if (removeNSPrefix) { + const parts = name.split(':'); + if(parts.length === 2){ + if (parts[0] === 'xmlns') return ''; + else return parts[1]; + }else reportError(`Multiple namespaces ${name}`) + } + return name; +} + +module.exports = Xml2JsParser; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XmlPartReader.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XmlPartReader.js new file mode 100644 index 0000000..56b180e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XmlPartReader.js @@ -0,0 +1,212 @@ +'use strict'; + +/** + * find paired tag for a stop node + * @param {string} xmlDoc + * @param {string} tagName + * @param {number} i : start index + */ +function readStopNode(xmlDoc, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlDoc.length; i++) { + if( xmlDoc[i] === "<"){ + if (xmlDoc[i+1] === "/") {//close tag + const closeIndex = findSubStrIndex(xmlDoc, ">", i, `${tagName} is not closed`); + let closeTagName = xmlDoc.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlDoc.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlDoc[i+1] === '?') { + const closeIndex = findSubStrIndex(xmlDoc, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 3) === '!--') { + const closeIndex = findSubStrIndex(xmlDoc, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 2) === '![') { + const closeIndex = findSubStrIndex(xmlDoc, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlDoc, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +/** + * Read closing tag name + * @param {Source} source + * @returns tag name + */ +function readClosingTagName(source){ + let text = ""; //temporary data + while(source.canRead()){ + let ch = source.readCh(); + // if (ch === null || ch === undefined) break; + // source.updateBuffer(); + + if (ch === ">") return text.trimEnd(); + else text += ch; + } + throw new Error(`Unexpected end of source. Reading '${substr}'`); +} + +/** + * Read XML tag and build attributes map + * This function can be used to read normal tag, pi tag. + * This function can't be used to read comment, CDATA, DOCTYPE. + * Eg + * @param {string} xmlDoc + * @param {number} startIndex starting index + * @returns tag expression includes tag name & attribute string + */ +function readTagExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i); i++) { + const char = parser.source.readChAt(i); + + if (char === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (char === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } else if (char === '>' && !inSingleQuotes && !inDoubleQuotes) { + // If not inside quotes, stop reading at '>' + EOE = true; + break; + } + + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '>'"); + + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function readPiExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i) ; i++) { + const currentChar = parser.source.readChAt(i); + const nextChar = parser.source.readChAt(i+1); + + if (currentChar === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (currentChar === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } + + if (!inSingleQuotes && !inDoubleQuotes) { + if (currentChar === '?' && nextChar === '>') { + EOE = true; + break; // Exit the loop when '?>' is found + } + } + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed in PI tag expression"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '?>'"); + + if(!parser.options.attributes.ignore){ + //TODO: use regex to verify attributes if not set to ignore + } + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function buildTagExpObj(exp, parser){ + const tagExp = { + tagName: "", + selfClosing: false + }; + let attrsExp = ""; + + if(exp[exp.length -1] === "/") tagExp.selfClosing = true; + + //separate tag name + let i = 0; + for (; i < exp.length; i++) { + const char = exp[i]; + if(char === " "){ + tagExp.tagName = exp.substring(0, i); + attrsExp = exp.substring(i + 1); + break; + } + } + //only tag + if(tagExp.tagName.length === 0 && i === exp.length)tagExp.tagName = exp; + + tagExp.tagName = tagExp.tagName.trimEnd(); + + if(!parser.options.attributes.ignore && attrsExp.length > 0){ + parseAttributesExp(attrsExp,parser) + } + + return tagExp; +} + +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function parseAttributesExp(attrStr, parser) { + const matches = getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + for (let i = 0; i < len; i++) { + let attrName = parser.processAttrName(matches[i][1]); + let attrVal = parser.replaceEntities(matches[i][4] || true); + + parser.outputBuilder.addAttribute(attrName, attrVal); + } +} + + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +module.exports = { + readStopNode: readStopNode, + readClosingTagName: readClosingTagName, + readTagExp: readTagExp, + readPiExp: readPiExp, +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js new file mode 100644 index 0000000..0fba196 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js @@ -0,0 +1,118 @@ +const {readPiExp} = require("./XmlPartReader"); + +function readCdata(parser){ + //"); + parser.outputBuilder.addCdata(text); +} +function readPiTag(parser){ + //"); + if(!tagExp) throw new Error("Invalid Pi Tag expression."); + + if (tagExp.tagName === "?xml") {//TODO: test if tagName is just xml + parser.outputBuilder.addDeclaration(); + } else { + parser.outputBuilder.addPi("?"+tagExp.tagName); + } +} + +function readComment(parser){ + //"); + parser.outputBuilder.addComment(text); +} + +const DOCTYPE_tags = { + "EL":/^EMENT\s+([^\s>]+)\s+(ANY|EMPTY|\(.+\)\s*$)/m, + "AT":/^TLIST\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+$/m, + "NO":/^TATION.+$/m +} +function readDocType(parser){ + //"); + const regx = DOCTYPE_tags[str]; + if(regx){ + const match = dTagExp.match(regx); + if(!match) throw new Error("Invalid DOCTYPE"); + }else throw new Error("Invalid DOCTYPE"); + } + }else if( ch === '>' && lastch === "]"){//end of doctype + return; + } + }else if( ch === '>'){//end of doctype + return; + }else if( ch === '['){ + hasBody = true; + }else{ + lastch = ch; + } + }//End While loop + +} + +function registerEntity(parser){ + //read Entity + let attrBoundary=""; + let name ="", val =""; + while(source.canRead()){ + let ch = source.readCh(); + + if(attrBoundary){ + if (ch === attrBoundary){ + val = text; + text = "" + } + }else if(ch === " " || ch === "\t"){ + if(!name){ + name = text.trimStart(); + text = ""; + } + }else if (ch === '"' || ch === "'") {//start of attrBoundary + attrBoundary = ch; + }else if(ch === ">"){ + parser.entityParser.addExternalEntity(name,val); + return; + }else{ + text+=ch; + } + } +} + +module.exports = { + readCdata: readCdata, + readComment:readComment, + readDocType:readDocType, + readPiTag:readPiTag +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js new file mode 100644 index 0000000..b83ce46 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js @@ -0,0 +1,118 @@ +const Constants = { + space: 32, + tab: 9 +} +class BufferSource{ + constructor(bytesArr){ + this.line = 1; + this.cols = 0; + this.buffer = bytesArr; + this.startIndex = 0; + } + + + + readCh() { + return String.fromCharCode(this.buffer[this.startIndex++]); + } + + readChAt(index) { + return String.fromCharCode(this.buffer[this.startIndex+index]); + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.slice(from, from + n).toString(); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + const stopBuffer = Buffer.from(stopStr); + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.slice(this.startIndex, i).toString(); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + +readUptoCloseTag(stopStr) { //stopStr: "'){ //TODO: if it should be equivalent ASCII + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.slice(this.startIndex, stopIndex - 1 ).toString(); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + + readFromBuffer(n, shouldUpdate) { + let ch; + if (n === 1) { + ch = this.buffer[this.startIndex]; + if (ch === 10) { + this.line++; + this.cols = 1; + } else { + this.cols++; + } + ch = String.fromCharCode(ch); + } else { + this.cols += n; + ch = this.buffer.slice(this.startIndex, this.startIndex + n).toString(); + } + if (shouldUpdate) this.updateBuffer(n); + return ch; + } + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = BufferSource; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js new file mode 100644 index 0000000..a996528 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js @@ -0,0 +1,123 @@ +const whiteSpaces = [" ", "\n", "\t"]; + + +class StringSource{ + constructor(str){ + this.line = 1; + this.cols = 0; + this.buffer = str; + //a boundary pointer to indicate where from the buffer dat should be read + // data before this pointer can be deleted to free the memory + this.startIndex = 0; + } + + readCh() { + return this.buffer[this.startIndex++]; + } + + readChAt(index) { + return this.buffer[this.startIndex+index]; + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.substring(from, from + n); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.substring(this.startIndex, i); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readUptoCloseTag(stopStr) { //stopStr: "'){ + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.substring(this.startIndex, stopIndex - 1 ); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readFromBuffer(n, updateIndex){ + let ch; + if(n===1){ + ch = this.buffer[this.startIndex]; + // if(ch === "\n") { + // this.line++; + // this.cols = 1; + // }else{ + // this.cols++; + // } + }else{ + ch = this.buffer.substring(this.startIndex, this.startIndex + n); + // if("".indexOf("\n") !== -1){ + // //TODO: handle the scenario when there are multiple lines + // //TODO: col should be set to number of chars after last '\n' + // // this.cols = 1; + // }else{ + // this.cols += n; + + // } + } + if(updateIndex) this.updateBufferBoundary(n); + return ch; + } + + //TODO: rename to updateBufferReadIndex + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = StringSource; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js new file mode 100644 index 0000000..62cc02f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js new file mode 100644 index 0000000..f8f5d12 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js @@ -0,0 +1,23 @@ +class boolParser{ + constructor(trueList, falseList){ + if(trueList) + this.trueList = trueList; + else + this.trueList = ["true"]; + + if(falseList) + this.falseList = falseList; + else + this.falseList = ["false"]; + } + parse(val){ + if (typeof val === 'string') { + //TODO: performance: don't convert + const temp = val.toLowerCase(); + if(this.trueList.indexOf(temp) !== -1) return true; + else if(this.falseList.indexOf(temp) !== -1 ) return false; + } + return val; + } +} +module.exports = boolParser; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js new file mode 100644 index 0000000..21b8050 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js @@ -0,0 +1,20 @@ +function boolParserExt(val){ + if(isArray(val)){ + for (let i = 0; i < val.length; i++) { + val[i] = parse(val[i]) + } + }else{ + val = parse(val) + } + return val; +} + +function parse(val){ + if (typeof val === 'string') { + const temp = val.toLowerCase(); + if(temp === 'true' || temp ==="yes" || temp==="1") return true; + else if(temp === 'false' || temp ==="no" || temp==="0") return false; + } + return val; +} +module.exports = boolParserExt; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js new file mode 100644 index 0000000..82e21e7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js @@ -0,0 +1,40 @@ +const defaultOptions = { + maxLength: 200, + // locale: "en-IN" +} +const localeMap = { + "$":"en-US", + "€":"de-DE", + "£":"en-GB", + "¥":"ja-JP", + "₹":"en-IN", +} +const sign = "(?:-|\+)?"; +const digitsAndSeparator = "(?:\d+|\d{1,3}(?:,\d{3})+)"; +const decimalPart = "(?:\.\d{1,2})?"; +const symbol = "(?:\$|€|¥|₹)?"; + +const currencyCheckRegex = /^\s*(?:-|\+)?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d{1,2})?\s*(?:\$|€|¥|₹)?\s*$/u; + +class CurrencyParser{ + constructor(options){ + this.options = options || defaultOptions; + } + parse(val){ + if (typeof val === 'string' && val.length <= this.options.maxLength) { + if(val.indexOf(",,") !== -1 && val.indexOf(".." !== -1)){ + const match = val.match(currencyCheckRegex); + if(match){ + const locale = this.options.locale || localeMap[match[2]||match[5]||"₹"]; + const formatter = new Intl.NumberFormat(locale) + val = val.replace(/[^0-9,.]/g, '').trim(); + val = Number(val.replace(formatter.format(1000)[1], '')); + } + } + } + return val; + } +} +CurrencyParser.defaultOptions = defaultOptions; + +module.exports = CurrencyParser; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/join.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/join.js new file mode 100644 index 0000000..d7f2027 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/join.js @@ -0,0 +1,14 @@ +/** + * + * @param {array} val + * @param {string} by + * @returns + */ +function join(val, by=" "){ + if(isArray(val)){ + val.join(by) + } + return val; +} + +module.exports = join; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/number.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/number.js new file mode 100644 index 0000000..bef3803 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/number.js @@ -0,0 +1,16 @@ +const toNumber = require("strnum"); + + +class numParser{ + constructor(options){ + this.options = options; + } + parse(val){ + if (typeof val === 'string') { + val = toNumber(val,this.options); + } + return val; + } +} + +module.exports = numParser; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js new file mode 100644 index 0000000..ecce49a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js @@ -0,0 +1,8 @@ +class trimmer{ + parse(val){ + if(typeof val === "string") return val.trim(); + else return val; + } +} + +module.exports = trimmer; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/validator.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/validator.js new file mode 100644 index 0000000..3b1b2ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/validator.js @@ -0,0 +1,425 @@ +'use strict'; + +const util = require('./util'); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js new file mode 100644 index 0000000..f30604a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js @@ -0,0 +1,281 @@ +'use strict'; +//parse Empty Node as self closing node +const buildFromOrderedJs = require('./orderedJs2Xml'); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } +}; + +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js new file mode 100644 index 0000000..e69de29 diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js new file mode 100644 index 0000000..bcf9dee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js @@ -0,0 +1,152 @@ +const util = require('../util'); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js new file mode 100644 index 0000000..bca3776 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js @@ -0,0 +1,48 @@ + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js new file mode 100644 index 0000000..ffd3f24 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js @@ -0,0 +1,601 @@ +'use strict'; +///@ts-check + +const util = require('../util'); +const xmlNode = require('./xmlNode'); +const readDocType = require("./DocTypeReader"); +const toNumber = require("strnum"); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js new file mode 100644 index 0000000..ffaf59b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js @@ -0,0 +1,58 @@ +const { buildOptions} = require("./OptionsBuilder"); +const OrderedObjParser = require("./OrderedObjParser"); +const { prettify} = require("./node2json"); +const validator = require('../validator'); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/node2json.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/node2json.js new file mode 100644 index 0000000..3045573 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/node2json.js @@ -0,0 +1,113 @@ +'use strict'; + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; diff --git a/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js new file mode 100644 index 0000000..9319524 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js @@ -0,0 +1,25 @@ +'use strict'; + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/mnemonist/CHANGELOG.md new file mode 100644 index 0000000..286c2fe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/CHANGELOG.md @@ -0,0 +1,305 @@ +# Changelog + +## 0.38.3 + +* Refactoring `VPTree` memory layout. +* Fixing `VPTree.nearestNeighbors` edge case. +* Various `VPTree` optimizations. + +## 0.38.2 + +* Fixing `Heap.replace` & `Heap.pusphpop` types (@wholenews). + +## 0.38.1 + +* Fixing `SparseQueueSet` deopt. + +## 0.38.0 + +* Adding `TrieMap.update` (@wholenews). + +## 0.37.0 + +* Adding `DefaultWeakMap` (@yoursunny). + +## 0.36.1 + +* Improved typings for iteration methods (@yoursunny). + +## 0.36.0 + +* Adding `SparseQueueSet`. + +## 0.35.0 + +* Adding `SparseMap`. +* Enhancing `SparseSet` performance. + +## 0.34.0 + +* Adding `set.overlap`. + +## 0.33.1 + +* Fixing build by including missing `sort` folder. + +## 0.33.0 + +* Adding `KDTree`. +* Adding `set.intersectionSize`. +* Adding `set.unionSize`. +* Adding `set.jaccard`. +* Adding `FixedReverseHeap.peek`. + +## 0.32.0 + +* Adding `PassjoinIndex`. + +## 0.31.3 + +* Fixing `Heap.nsmallest` & `Heap.nlargest` docs & typings. +* Fixing `Heap.nsmallest` & `Heap.nlargest` not using custom comparator function when `n = 1`. + +## 0.31.2 + +* Fixing `BitSet` & `BitVector` iteration methods edge case. +* Fixing `BitSet` & `BitVector` `#.select` method. + +## 0.31.1 + +* Fixing `BitSet` & `BitVector` `#.size` caching edge case. + +## 0.31.0 + +* Adding `DefaultMap.peek`. +* Fixing some error messages. +* Fixing `BitSet` & `BitVector` `#.size` caching. + +## 0.30.0 + +* Stricter TS definitions (`--noImplicitAny`, `--noImplicitReturns`) (@pbadenski). + +## 0.29.0 + +* Adding `LRUCache.setpop` and `LRUMap.setpop` (@veggiesaurus). + +## 0.28.0 + +* Adding `LRUCache.peek` and `LRUMap.peek` (@veggiesaurus). + +## 0.27.2 + +* Fixing usage with TypeScript. + +## 0.27.1 + +* Fixing `CircularBuffer` and `FixedDeque` types. + +## 0.27.0 + +* Adding `FixedDeque`. +* Adding `CircularBuffer.unshift`. +* Changing `CircularBuffer` semantics to now overwrite values when wrapping around. + +## 0.26.0 + +* Adding the `DefaultMap.autoIncrement` factory. +* Removing the `IncrementalMap`. +* Fixing `Vector` typings. +* Fixing `BitVector` typings. + +## 0.25.1 + +* Fixing custom inspect methods for node >= 10. + +## 0.25.0 + +* Adding `LRUCache`. +* Adding `LRUMap`. + +## 0.24.0 + +* Adding `#.forEachMultiplicity` to `MultiSet`. +* Adding `#.forEachAssociation` to `MultiMap`. +* Adding `DefaultMap`. + +## 0.23.0 + +* Adding `FixedReverseHeap`. +* Adding `Heap.nsmallest` & `Heap.nlargest`. +* Adding `MultiSet.isSubset` & `MultiSet.isSuperset`. +* Adding `#.top` to `MultiSet`. +* Adding missing `Heap` types. +* Renaming `FiniteStack` to `FixedStack`. + +## 0.22.0 + +* Adding `FuzzyMultiMap.dimension`. +* Adding `#.consume` to `Heap`. +* Adding `#.replace` to `Heap`. +* Adding `#.pushpop` to `Heap`. +* Improving `BitSet` and `BitVector` `#.toJSON`. +* Improving `FiniteStack.from` & `CircularBuffer.from` performance when handling arrays. +* `Heap.from` is now linear time. +* Refactoring `Heap` inner logic. +* Fixing `CircularBuffer`'s `#.unshift` to `#.shift`. +* Fixing `SparseSet.delete` return consistency. + +## 0.21.0 + +* Library is now fully typed. +* Adding `CircularBuffer`. +* Adding `#.toArray` to `Heap`. + +## 0.20.0 + +* Adding `TrieMap`. +* Reworking the `Trie` considerably. + +## 0.19.0 + +* Adding `StaticIntervalTree`. +* Adding `PointerVector`. +* Adding `Queue.of`. +* Adding `Stack.of`. +* Improving `Vector` & `BitVector` reallocation performance. +* Improving `InvertedIndex` performance. + +## 0.18.O + +* Adding `FiniteStack`. +* Adding `#.keys` to `MultiSet`. +* Adding `#.count` alias to `MultiSet`. +* Adding `#.count` alias to `MultiMap`. +* Adding `#.remove` to `MultiMap`. +* Adding `Vector.from`. +* Adding `#.values` to `Vector`. +* Adding `#.entries` to `Vector`. +* Fixing bug when feeding invalid values to a `MultiSet`. +* Fixing `.from` static methods not taking byte arrays into account. +* Fixing bugs related to `Stack.pop` edge cases. +* Optimizing `Stack` performance. + +## 0.17.0 + +* Adding `HashedArrayTree`. +* Adding `BitVector`. +* Adding `#.frequency` to `MultiSet`. +* Adding `#.grow` to `DynamicArray`. +* Adding `#.reallocate` to `DynamicArray`. +* Adding `#.resize` to `DynamicArray`. +* Fixing several `MultiSet` issues. +* Renaming `DynamicArray` to `Vector`. +* Renaming the `DynamicArray.initialLength` option to `initialCapacity`. +* Renaming `DynamicArray.allocated` to `capacity`. +* Optimizing `MultiSet` performance. +* Optimizing `SparseSet` memory consumption. + +## 0.16.0 + +* Adding `#.has` to `FuzzyMap`. +* Adding `#.has` to `FuzzyMultiMap`. +* Adding `#.multiplicity` to `MultiMap`. +* Renaming `RangeMap` to `IncrementalMap`. +* Renaming `Index` to `FuzzyMap`. +* Renaming `MultiIndex` to `FuzzyMultiMap`. +* Renaming `DynamicArray` `initialSize` option to `initialLength`. +* Improving `MultiMap.set` performance. +* Improving `BitSet.reset` performance. +* Improving `Set.isSubset` & `Set.isSuperset` performance. + +## 0.15.0 + +* Adding `RangeMap`. +* Improving `MultiSet`. +* Out-of-bound `DynamicArray.set` will now correctly grow the array. +* Fixing `StaticDisjointSet.find` complexity. + +## O.14.0 + +* Adding `DynamicArray`. +* Adding `SparseSet`. +* Adding `StaticDisjointSet`. +* Adding iterator methods to `BitSet`. +* Adding `#.rank` & `#.select` to `BitSet`. +* `BitSet` now relies on `Uint32Array` rather than `Uint8Array`. +* Improving `BitSet` performances. +* Using `obliterator` to handle iterators. + +## 0.13.0 + +* Adding `BiMap`. +* Adding `BitSet`. +* Fixing universal iterator. + +## 0.12.0 + +* Adding `InvertedIndex`. + +## 0.11.0 + +* Adding bunch of set functions. + +## 0.10.2 + +* Fixing error in `Trie.get`. +* Fixing error related to `Trie.size`. + +## 0.10.1 + +* Fixing an error in `VPTree.neighbors`. + +## 0.10.0 + +* Adding `Index`. +* Adding `MultiIndex`. +* Adding `MultiMap`. +* Adding `MultiSet`. +* Adding `SymSpell`. + +## 0.9.0 + +* Adding `VPTree`. + +## 0.8.0 + +* Adding `BKTree`. + +## 0.7.0 + +* Adding `BloomFilter`. +* Adding static `#.from` method to all relevant structures. +* Adding iterators to all relevant structures. +* Removing the `MultiSet` until proper API is found. + +## 0.6.0 + +* Adding `MultiSet`. + +## 0.5.0 + +* Adding `SuffixArray` & `GeneralizedSuffixArray`. +* Better `Trie` sentinel. + +## 0.4.0 + +* Adding `Queue`. +* Adding possibility to pass custom comparator to `Heap` & `FibonacciHeap`. + +## 0.3.0 + +* Adding `FibonacciHeap`. +* Fixing bug related to `Heap`. + +## 0.2.0 + +* Adding `Trie`. + +## 0.1.0 + +* Adding `Heap`. + +## 0.0.1 + +* Adding `LinkedList`. +* Adding `Stack`. diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/LICENSE.txt b/amplify/functions/deleteDocument/node_modules/mnemonist/LICENSE.txt new file mode 100644 index 0000000..2d8d205 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Guillaume Plique (Yomguithereal) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/README.md b/amplify/functions/deleteDocument/node_modules/mnemonist/README.md new file mode 100644 index 0000000..dffc9ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/README.md @@ -0,0 +1,112 @@ +[![Build Status](https://travis-ci.org/Yomguithereal/mnemonist.svg)](https://travis-ci.org/Yomguithereal/mnemonist) + +# Mnemonist + +Mnemonist is a curated collection of data structures for the JavaScript language. + +It gathers classic data structures (think heap, trie etc.) as well as more exotic ones such as Buckhard-Keller trees etc. + +It strives at being: + +* As performant as possible for a high-level language. +* Completely modular (don't need to import the whole library just to use a simple heap). +* Simple & straightforward to use and consistent with JavaScript standard objects' API. +* Completely typed and comfortably usable with Typescript. + +## Installation + +``` +npm install --save mnemonist +``` + +## Documentation + +Full documentation for the library can be found [here](https://yomguithereal.github.io/mnemonist). + +**Classics** + +* [Heap](https://yomguithereal.github.io/mnemonist/heap) +* [Linked List](https://yomguithereal.github.io/mnemonist/linked-list) +* [LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache), [LRUMap](https://yomguithereal.github.io/mnemonist/lru-map) +* [MultiMap](https://yomguithereal.github.io/mnemonist/multi-map) +* [MultiSet](https://yomguithereal.github.io/mnemonist/multi-set) +* [Queue](https://yomguithereal.github.io/mnemonist/queue) +* [Set (helpers)](https://yomguithereal.github.io/mnemonist/set) +* [Stack](https://yomguithereal.github.io/mnemonist/stack) +* [Trie](https://yomguithereal.github.io/mnemonist/trie) +* [TrieMap](https://yomguithereal.github.io/mnemonist/trie-map) + +**Low-level & structures for very specific use cases** + +* [Circular Buffer](https://yomguithereal.github.io/mnemonist/circular-buffer) +* [Fixed Deque](https://yomguithereal.github.io/mnemonist/fixed-deque) +* [Fibonacci Heap](https://yomguithereal.github.io/mnemonist/fibonacci-heap) +* [Fixed Reverse Heap](https://yomguithereal.github.io/mnemonist/fixed-reverse-heap) +* [Fixed Stack](https://yomguithereal.github.io/mnemonist/fixed-stack) +* [Hashed Array Tree](https://yomguithereal.github.io/mnemonist/hashed-array-tree) +* [Static DisjointSet](https://yomguithereal.github.io/mnemonist/static-disjoint-set) +* [SparseQueueSet](https://yomguithereal.github.io/mnemonist/sparse-queue-set) +* [SparseMap](https://yomguithereal.github.io/mnemonist/sparse-map) +* [SparseSet](https://yomguithereal.github.io/mnemonist/sparse-set) +* [Suffix Array](https://yomguithereal.github.io/mnemonist/suffix-array) +* [Generalized Suffix Array](https://yomguithereal.github.io/mnemonist/generalized-suffix-array) +* [Vector](https://yomguithereal.github.io/mnemonist/vector) + +**Information retrieval & Natural language processing** + +* [Fuzzy Map](https://yomguithereal.github.io/mnemonist/fuzzy-map) +* [Fuzzy MultiMap](https://yomguithereal.github.io/mnemonist/fuzzy-multi-map) +* [Inverted Index](https://yomguithereal.github.io/mnemonist/inverted-index) +* [Passjoin Index](https://yomguithereal.github.io/mnemonist/passjoin-index) +* [SymSpell](https://yomguithereal.github.io/mnemonist/symspell) + +**Space & time indexation** + +* [Static IntervalTree](https://yomguithereal.github.io/mnemonist/static-interval-tree) +* [KD-Tree](https://yomguithereal.github.io/mnemonist/kd-tree) + +**Metric space indexation** + +* [Burkhard-Keller Tree](https://yomguithereal.github.io/mnemonist/bk-tree) +* [Vantage Point Tree](https://yomguithereal.github.io/mnemonist/vp-tree) + +**Probabilistic & succinct data structures** + +* [BitSet](https://yomguithereal.github.io/mnemonist/bit-set) +* [BitVector](https://yomguithereal.github.io/mnemonist/bit-vector) +* [Bloom Filter](https://yomguithereal.github.io/mnemonist/bloom-filter) + +**Utility classes** + +* [BiMap](https://yomguithereal.github.io/mnemonist/bi-map) +* [DefaultMap](https://yomguithereal.github.io/mnemonist/default-map) +* [DefaultWeakMap](https://yomguithereal.github.io/mnemonist/default-weak-map) + +--- + +Note that this list does not include a `Graph` data structure, whose implementation is usually far too complex for the scope of this library. + +However, we advise the reader to take a look at the [`graphology`](https://graphology.github.io/) library instead. + +Don't find the data structure you need? Maybe we can work it out [together](https://github.com/Yomguithereal/mnemonist/issues). + +## Contribution + +Contributions are obviously welcome. Be sure to lint the code & add relevant unit tests. + +``` +# Installing +git clone git@github.com:Yomguithereal/mnemonist.git +cd mnemonist +npm install + +# Linting +npm run lint + +# Running the unit tests +npm test +``` + +## License + +[MIT](LICENSE.txt) diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bi-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/bi-map.d.ts new file mode 100644 index 0000000..d0c2f76 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bi-map.d.ts @@ -0,0 +1,46 @@ +/** + * Mnemonist BiMap Typings + * ======================== + */ +export class InverseMap implements Iterable<[K, V]> { + + // Members + size: number; + inverse: BiMap; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; +} + +export default class BiMap implements Iterable<[K, V]> { + + // Members + size: number; + inverse: InverseMap; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, J]> | {[key: string]: J}): BiMap; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bi-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/bi-map.js new file mode 100644 index 0000000..3d5d03f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bi-map.js @@ -0,0 +1,195 @@ +/** + * Mnemonist BiMap + * ================ + * + * JavaScript implementation of a BiMap. + */ +var forEach = require('obliterator/foreach'); + +/** + * Inverse Map. + * + * @constructor + */ +function InverseMap(original) { + + this.size = 0; + this.items = new Map(); + this.inverse = original; +} + +/** + * BiMap. + * + * @constructor + */ +function BiMap() { + + this.size = 0; + this.items = new Map(); + this.inverse = new InverseMap(this); +} + +/** + * Method used to clear the map. + * + * @return {undefined} + */ +function clear() { + this.size = 0; + this.items.clear(); + this.inverse.items.clear(); +} + +BiMap.prototype.clear = clear; +InverseMap.prototype.clear = clear; + +/** + * Method used to set a relation. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {BiMap|InverseMap} + */ +function set(key, value) { + + // First we need to attempt to see if the relation is not flawed + if (this.items.has(key)) { + var currentValue = this.items.get(key); + + // The relation already exists, we do nothing + if (currentValue === value) + return this; + else + this.inverse.items.delete(currentValue); + } + + if (this.inverse.items.has(value)) { + var currentKey = this.inverse.items.get(value); + + if (currentKey === key) + return this; + else + this.items.delete(currentKey); + } + + // Here we actually add the relation + this.items.set(key, value); + this.inverse.items.set(value, key); + + // Size + this.size = this.items.size; + this.inverse.size = this.inverse.items.size; + + return this; +} + +BiMap.prototype.set = set; +InverseMap.prototype.set = set; + +/** + * Method used to delete a relation. + * + * @param {any} key - Key. + * @return {boolean} + */ +function del(key) { + if (this.items.has(key)) { + var currentValue = this.items.get(key); + + this.items.delete(key); + this.inverse.items.delete(currentValue); + + // Size + this.size = this.items.size; + this.inverse.size = this.inverse.items.size; + + return true; + } + + return false; +} + +BiMap.prototype.delete = del; +InverseMap.prototype.delete = del; + +/** + * Mapping some Map prototype function unto our two classes. + */ +var METHODS = ['has', 'get', 'forEach', 'keys', 'values', 'entries']; + +METHODS.forEach(function(name) { + BiMap.prototype[name] = InverseMap.prototype[name] = function() { + return Map.prototype[name].apply(this.items, arguments); + }; +}); + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') { + BiMap.prototype[Symbol.iterator] = BiMap.prototype.entries; + InverseMap.prototype[Symbol.iterator] = InverseMap.prototype.entries; +} + +/** + * Convenience known methods. + */ +BiMap.prototype.inspect = function() { + var dummy = { + left: this.items, + right: this.inverse.items + }; + + // Trick so that node displays the name of the constructor + Object.defineProperty(dummy, 'constructor', { + value: BiMap, + enumerable: false + }); + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + BiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = BiMap.prototype.inspect; + +InverseMap.prototype.inspect = function() { + var dummy = { + left: this.inverse.items, + right: this.items + }; + + // Trick so that node displays the name of the constructor + Object.defineProperty(dummy, 'constructor', { + value: InverseMap, + enumerable: false + }); + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + InverseMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = InverseMap.prototype.inspect; + + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a bimap. + * + * @param {Iterable} iterable - Target iterable. + * @return {BiMap} + */ +BiMap.from = function(iterable) { + var bimap = new BiMap(); + + forEach(iterable, function(value, key) { + bimap.set(key, value); + }); + + return bimap; +}; + +/** + * Exporting. + */ +module.exports = BiMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bit-set.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/bit-set.d.ts new file mode 100644 index 0000000..cfeb0d1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bit-set.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist BitSet Typings + * ========================= + */ +export default class BitSet implements Iterable { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + set(index: number, value?: boolean | number): void; + reset(index: number, value: boolean | number): void; + flip(index: number, value: boolean | number): void; + get(index: number): number; + test(index: number): boolean; + rank(r: number): number; + select(r: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): Array; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bit-set.js b/amplify/functions/deleteDocument/node_modules/mnemonist/bit-set.js new file mode 100644 index 0000000..f2445a0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bit-set.js @@ -0,0 +1,379 @@ +/** + * Mnemonist BitSet + * ================= + * + * JavaScript implementation of a fixed-size BitSet based upon a Uint32Array. + * + * Notes: + * - (i >> 5) is the same as ((i / 32) | 0) + * - (i & 0x0000001f) is the same as (i % 32) + * - I could use a Float64Array to store more in less blocks but I would lose + * the benefits of byte comparison to keep track of size without popcounts. + */ +var Iterator = require('obliterator/iterator'), + bitwise = require('./utils/bitwise.js'); + +/** + * BitSet. + * + * @constructor + */ +function BitSet(length) { + + // Properties + this.length = length; + this.clear(); + + // Methods + + // Statics +} + +/** + * Method used to clear the bit set. + * + * @return {undefined} + */ +BitSet.prototype.clear = function() { + + // Properties + this.size = 0; + this.array = new Uint32Array(Math.ceil(this.length / 32)); +}; + +/** + * Method used to set the given bit's value. + * + * @param {number} index - Target bit index. + * @param {number} value - Value to set. + * @return {BitSet} + */ +BitSet.prototype.set = function(index, value) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + if (value === 0 || value === false) + newBytes = this.array[byteIndex] &= ~(1 << pos); + else + newBytes = this.array[byteIndex] |= (1 << pos); + + // The operands of all bitwise operators are converted to *signed* 32-bit integers. + // Source: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Bitwise_Operators#Signed_32-bit_integers + // Shifting by 31 changes the sign (i.e. 1 << 31 = -2147483648). + // Therefore, get unsigned representation by applying '>>> 0'. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** +* Method used to reset the given bit's value. +* +* @param {number} index - Target bit index. +* @return {BitSet} +*/ +BitSet.prototype.reset = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + newBytes = this.array[byteIndex] &= ~(1 << pos); + + // Updating size + if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to flip the value of the given bit. + * + * @param {number} index - Target bit index. + * @return {BitSet} + */ +BitSet.prototype.flip = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex]; + + var newBytes = this.array[byteIndex] ^= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to get the given bit's value. + * + * @param {number} index - Target bit index. + * @return {number} + */ +BitSet.prototype.get = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to test the given bit's value. + * + * @param {number} index - Target bit index. + * @return {BitSet} + */ +BitSet.prototype.test = function(index) { + return Boolean(this.get(index)); +}; + +/** + * Method used to return the number of 1 from the beginning of the set up to + * the ith index. + * + * @param {number} i - Ith index (cannot be > length). + * @return {number} + */ +BitSet.prototype.rank = function(i) { + if (this.size === 0) + return 0; + + var byteIndex = i >> 5, + pos = i & 0x0000001f, + r = 0; + + // Accessing the bytes before the last one + for (var j = 0; j < byteIndex; j++) + r += bitwise.table8Popcount(this.array[j]); + + // Handling masked last byte + var maskedByte = this.array[byteIndex] & ((1 << pos) - 1); + + r += bitwise.table8Popcount(maskedByte); + + return r; +}; + +/** + * Method used to return the position of the rth 1 in the set or -1 if the + * set is empty. + * + * Note: usually select is implemented using binary search over rank but I + * tend to think the following linear implementation is faster since here + * rank is O(n) anyway. + * + * @param {number} r - Rth 1 to select (should be < length). + * @return {number} + */ +BitSet.prototype.select = function(r) { + if (this.size === 0) + return -1; + + // TODO: throw? + if (r >= this.length) + return -1; + + var byte, + b = 32, + p = 0, + c = 0; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + // The byte is empty, let's continue + if (byte === 0) + continue; + + // TODO: This branching might not be useful here + if (i === l - 1) + b = this.length % 32 || 32; + + // TODO: popcount should speed things up here + + for (var j = 0; j < b; j++, p++) { + c += (byte >> j) & 1; + + if (c === r) + return p; + } + } +}; + +/** + * Method used to iterate over the bit set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +BitSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var length = this.length, + byte, + bit, + b = 32; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + if (i === l - 1) + b = length % 32 || 32; + + for (var j = 0; j < b; j++) { + bit = (byte >> j) & 1; + + callback.call(scope, bit, i * 32 + j); + } + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +BitSet.prototype.values = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: bit + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +BitSet.prototype.entries = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + index, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + index = (~-i) * 32 + j; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: [index, bit] + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + BitSet.prototype[Symbol.iterator] = BitSet.prototype.values; + +/** + * Convenience known methods. + */ +BitSet.prototype.inspect = function() { + var proxy = new Uint8Array(this.length); + + this.forEach(function(bit, i) { + proxy[i] = bit; + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: BitSet, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + BitSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = BitSet.prototype.inspect; + +BitSet.prototype.toJSON = function() { + return Array.from(this.array); +}; + +/** + * Exporting. + */ +module.exports = BitSet; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bit-vector.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/bit-vector.d.ts new file mode 100644 index 0000000..4005d3c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bit-vector.d.ts @@ -0,0 +1,42 @@ +/** + * Mnemonist BitVector Typings + * ============================ + */ +type BitVectorOptions = { + initialLength?: number; + initialCapacity?: number; + policy?: (capacity: number) => number; +} + +export default class BitVector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(length: number); + constructor(options: BitVectorOptions); + + // Methods + clear(): void; + set(index: number, value?: boolean | number): this; + reset(index: number, value: boolean | number): void; + flip(index: number, value: boolean | number): void; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: boolean | number): number; + pop(): number | undefined; + get(index: number): number; + test(index: number): boolean; + rank(r: number): number; + select(r: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): Array; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bit-vector.js b/amplify/functions/deleteDocument/node_modules/mnemonist/bit-vector.js new file mode 100644 index 0000000..5ee01e6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bit-vector.js @@ -0,0 +1,550 @@ +/** + * Mnemonist BitVector + * ==================== + * + * JavaScript implementation of a dynamic BitSet based upon a Uint32Array. + * + * Notes: + * - (i >> 5) is the same as ((i / 32) | 0) + * - (i & 0x0000001f) is the same as (i % 32) + * - I could use a Float64Array to store more in less blocks but I would lose + * the benefits of byte comparison to keep track of size without popcounts. + */ +var Iterator = require('obliterator/iterator'), + bitwise = require('./utils/bitwise.js'); + +/** + * Constants. + */ +var DEFAULT_GROWING_POLICY = function(capacity) { + return Math.max(1, Math.ceil(capacity * 1.5)); +}; + +/** + * Helpers. + */ +function createByteArray(capacity) { + return new Uint32Array(Math.ceil(capacity / 32)); +} + +/** + * BitVector. + * + * @constructor + */ +function BitVector(initialLengthOrOptions) { + var initialLength = initialLengthOrOptions || 0, + policy = DEFAULT_GROWING_POLICY; + + if (typeof initialLengthOrOptions === 'object') { + initialLength = ( + initialLengthOrOptions.initialLength || + initialLengthOrOptions.initialCapacity || + 0 + ); + policy = initialLengthOrOptions.policy || policy; + } + + this.size = 0; + this.length = initialLength; + this.capacity = Math.ceil(this.length / 32) * 32; + this.policy = policy; + this.array = createByteArray(this.capacity); +} + +/** + * Method used to set the given bit's value. + * + * @param {number} index - Target bit index. + * @param {number|boolean} value - Value to set. + * @return {BitVector} + */ +BitVector.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('BitVector.set: index out of bounds.'); + + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + if (value === 0 || value === false) + newBytes = this.array[byteIndex] &= ~(1 << pos); + else + newBytes = this.array[byteIndex] |= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** +* Method used to reset the given bit's value. +* +* @param {number} index - Target bit index. +* @return {BitVector} +*/ +BitVector.prototype.reset = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + newBytes = this.array[byteIndex] &= ~(1 << pos); + + // Updating size + if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to flip the value of the given bit. + * + * @param {number} index - Target bit index. + * @return {BitVector} + */ +BitVector.prototype.flip = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex]; + + var newBytes = this.array[byteIndex] ^= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to apply the growing policy. + * + * @param {number} [override] - Override capacity. + * @return {number} + */ +BitVector.prototype.applyPolicy = function(override) { + var newCapacity = this.policy(override || this.capacity); + + if (typeof newCapacity !== 'number' || newCapacity < 0) + throw new Error('mnemonist/bit-vector.applyPolicy: policy returned an invalid value (expecting a positive integer).'); + + if (newCapacity <= this.capacity) + throw new Error('mnemonist/bit-vector.applyPolicy: policy returned a less or equal capacity to allocate.'); + + // TODO: we should probably check that the returned number is an integer + + // Ceil to nearest 32 + return Math.ceil(newCapacity / 32) * 32; +}; + +/** + * Method used to reallocate the underlying array. + * + * @param {number} capacity - Target capacity. + * @return {BitVector} + */ +BitVector.prototype.reallocate = function(capacity) { + var virtualCapacity = capacity; + + capacity = Math.ceil(capacity / 32) * 32; + + if (virtualCapacity < this.length) + this.length = virtualCapacity; + + if (capacity === this.capacity) + return this; + + var oldArray = this.array; + + var storageLength = capacity / 32; + + if (storageLength === this.array.length) + return this; + + if (storageLength > this.array.length) { + this.array = new Uint32Array(storageLength); + this.array.set(oldArray, 0); + } + else { + this.array = oldArray.slice(0, storageLength); + } + + this.capacity = capacity; + + return this; +}; + +/** + * Method used to grow the array. + * + * @param {number} [capacity] - Optional capacity to match. + * @return {BitVector} + */ +BitVector.prototype.grow = function(capacity) { + var newCapacity; + + if (typeof capacity === 'number') { + + if (this.capacity >= capacity) + return this; + + // We need to match the given capacity + newCapacity = this.capacity; + + while (newCapacity < capacity) + newCapacity = this.applyPolicy(newCapacity); + + this.reallocate(newCapacity); + + return this; + } + + // We need to run the policy once + newCapacity = this.applyPolicy(); + this.reallocate(newCapacity); + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {BitVector} + */ +BitVector.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.reallocate(length); + + return this; +}; + +/** + * Method used to push a value in the set. + * + * @param {number|boolean} value + * @return {BitVector} + */ +BitVector.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + if (value === 0 || value === false) + return ++this.length; + + this.size++; + + var index = this.length++, + byteIndex = index >> 5, + pos = index & 0x0000001f; + + this.array[byteIndex] |= (1 << pos); + + return this.length; +}; + +/** + * Method used to pop the last value of the set. + * + * @return {number} - The popped value. + */ +BitVector.prototype.pop = function() { + if (this.length === 0) + return; + + var index = --this.length; + + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to get the given bit's value. + * + * @param {number} index - Target bit index. + * @return {number} + */ +BitVector.prototype.get = function(index) { + if (this.length < index) + return undefined; + + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to test the given bit's value. + * + * @param {number} index - Target bit index. + * @return {BitVector} + */ +BitVector.prototype.test = function(index) { + if (this.length < index) + return false; + + return Boolean(this.get(index)); +}; + +/** + * Method used to return the number of 1 from the beginning of the set up to + * the ith index. + * + * @param {number} i - Ith index (cannot be > length). + * @return {number} + */ +BitVector.prototype.rank = function(i) { + if (this.size === 0) + return 0; + + var byteIndex = i >> 5, + pos = i & 0x0000001f, + r = 0; + + // Accessing the bytes before the last one + for (var j = 0; j < byteIndex; j++) + r += bitwise.table8Popcount(this.array[j]); + + // Handling masked last byte + var maskedByte = this.array[byteIndex] & ((1 << pos) - 1); + + r += bitwise.table8Popcount(maskedByte); + + return r; +}; + +/** + * Method used to return the position of the rth 1 in the set or -1 if the + * set is empty. + * + * Note: usually select is implemented using binary search over rank but I + * tend to think the following linear implementation is faster since here + * rank is O(n) anyway. + * + * @param {number} r - Rth 1 to select (should be < length). + * @return {number} + */ +BitVector.prototype.select = function(r) { + if (this.size === 0) + return -1; + + // TODO: throw? + if (r >= this.length) + return -1; + + var byte, + b = 32, + p = 0, + c = 0; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + // The byte is empty, let's continue + if (byte === 0) + continue; + + // TODO: This branching might not be useful here + if (i === l - 1) + b = this.length % 32 || 32; + + // TODO: popcount should speed things up here + + for (var j = 0; j < b; j++, p++) { + c += (byte >> j) & 1; + + if (c === r) + return p; + } + } +}; + +/** + * Method used to iterate over the bit set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +BitVector.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var length = this.length, + byte, + bit, + b = 32; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + if (i === l - 1) + b = length % 32 || 32; + + for (var j = 0; j < b; j++) { + bit = (byte >> j) & 1; + + callback.call(scope, bit, i * 32 + j); + } + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +BitVector.prototype.values = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: bit + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +BitVector.prototype.entries = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + index, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + index = (~-i) * 32 + j; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: [index, bit] + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + BitVector.prototype[Symbol.iterator] = BitVector.prototype.values; + +/** + * Convenience known methods. + */ +BitVector.prototype.inspect = function() { + var proxy = new Uint8Array(this.length); + + this.forEach(function(bit, i) { + proxy[i] = bit; + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: BitVector, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + BitVector.prototype[Symbol.for('nodejs.util.inspect.custom')] = BitVector.prototype.inspect; + +BitVector.prototype.toJSON = function() { + return Array.from(this.array.slice(0, (this.length >> 5) + 1)); +}; + +/** + * Exporting. + */ +module.exports = BitVector; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bk-tree.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/bk-tree.d.ts new file mode 100644 index 0000000..f158dfd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bk-tree.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist BKTree Typings + * ========================= + */ +type DistanceFunction = (a: T, b: T) => number; + +export default class BKTree { + + // Members + distance: DistanceFunction; + size: number; + + // Constructor + constructor(distance: DistanceFunction); + + // Methods + add(item: T): this; + search(n: number, query: T): Array<{item: T, distance: number}>; + toJSON(): object; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, distance: DistanceFunction): BKTree; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bk-tree.js b/amplify/functions/deleteDocument/node_modules/mnemonist/bk-tree.js new file mode 100644 index 0000000..9c9792d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bk-tree.js @@ -0,0 +1,180 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist BK Tree + * ================== + * + * Implementation of a Burkhard-Keller tree, allowing fast lookups of words + * that lie within a specified distance of the query word. + * + * [Reference]: + * https://en.wikipedia.org/wiki/BK-tree + * + * [Article]: + * W. Burkhard and R. Keller. Some approaches to best-match file searching, + * CACM, 1973 + */ +var forEach = require('obliterator/foreach'); + +/** + * BK Tree. + * + * @constructor + * @param {function} distance - Distance function to use. + */ +function BKTree(distance) { + + if (typeof distance !== 'function') + throw new Error('mnemonist/BKTree.constructor: given `distance` should be a function.'); + + this.distance = distance; + this.clear(); +} + +/** + * Method used to add an item to the tree. + * + * @param {any} item - Item to add. + * @return {BKTree} + */ +BKTree.prototype.add = function(item) { + + // Initializing the tree with the first given word + if (!this.root) { + this.root = { + item: item, + children: {} + }; + + this.size++; + return this; + } + + var node = this.root, + d; + + while (true) { + d = this.distance(item, node.item); + + if (!node.children[d]) + break; + + node = node.children[d]; + } + + node.children[d] = { + item: item, + children: {} + }; + + this.size++; + return this; +}; + +/** + * Method used to query the tree. + * + * @param {number} n - Maximum distance between query & item. + * @param {any} query - Query + * @return {BKTree} + */ +BKTree.prototype.search = function(n, query) { + if (!this.root) + return []; + + var found = [], + stack = [this.root], + node, + child, + d, + i, + l; + + while (stack.length) { + node = stack.pop(); + d = this.distance(query, node.item); + + if (d <= n) + found.push({item: node.item, distance: d}); + + for (i = d - n, l = d + n + 1; i < l; i++) { + child = node.children[i]; + + if (child) + stack.push(child); + } + } + + return found; +}; + +/** + * Method used to clear the tree. + * + * @return {undefined} + */ +BKTree.prototype.clear = function() { + + // Properties + this.size = 0; + this.root = null; +}; + +/** + * Convenience known methods. + */ +BKTree.prototype.toJSON = function() { + return this.root; +}; + +BKTree.prototype.inspect = function() { + var array = [], + stack = [this.root], + node, + d; + + while (stack.length) { + node = stack.pop(); + + if (!node) + continue; + + array.push(node.item); + + for (d in node.children) + stack.push(node.children[d]); + } + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: BKTree, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + BKTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = BKTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a tree. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} distance - Distance function. + * @return {Heap} + */ +BKTree.from = function(iterable, distance) { + var tree = new BKTree(distance); + + forEach(iterable, function(value) { + tree.add(value); + }); + + return tree; +}; + +/** + * Exporting. + */ +module.exports = BKTree; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bloom-filter.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/bloom-filter.d.ts new file mode 100644 index 0000000..dc9b2fa --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bloom-filter.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist BloomFilter Typings + * ============================== + */ +type BloomFilterOptions = { + capacity: number; + errorRate?: number; +} + +export default class BloomFilter { + + // Members + capacity: number; + errorRate: number; + hashFunctions: number; + + // Constructor + constructor(capacity: number); + constructor(options: BloomFilterOptions); + + // Methods + clear(): void; + add(string: string): this; + test(string: string): boolean; + toJSON(): Uint8Array; + + // Statics + from(iterable: Iterable, options?: number | BloomFilterOptions): BloomFilter; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/bloom-filter.js b/amplify/functions/deleteDocument/node_modules/mnemonist/bloom-filter.js new file mode 100644 index 0000000..ba3ee76 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/bloom-filter.js @@ -0,0 +1,186 @@ +/** + * Mnemonist Bloom Filter + * ======================= + * + * Bloom Filter implementation relying on MurmurHash3. + */ +var murmurhash3 = require('./utils/murmurhash3.js'), + forEach = require('obliterator/foreach'); + +/** + * Constants. + */ +var LN2_SQUARED = Math.LN2 * Math.LN2; + +/** + * Defaults. + */ +var DEFAULTS = { + errorRate: 0.005 +}; + +/** + * Function used to convert a string into a Uint16 byte array. + * + * @param {string} string - Target string. + * @return {Uint16Array} + */ +function stringToByteArray(string) { + var array = new Uint16Array(string.length), + i, + l; + + for (i = 0, l = string.length; i < l; i++) + array[i] = string.charCodeAt(i); + + return array; +} + +/** + * Function used to hash the given byte array. + * + * @param {number} length - Length of the filter's byte array. + * @param {number} seed - Seed to use for the hash function. + * @param {Uint16Array} - Byte array representing the string. + * @return {number} - The hash. + * + * @note length * 8 should probably already be computed as well as seeds. + */ +function hashArray(length, seed, array) { + var hash = murmurhash3((seed * 0xFBA4C795) & 0xFFFFFFFF, array); + + return hash % (length * 8); +} + +/** + * Bloom Filter. + * + * @constructor + * @param {number|object} capacityOrOptions - Capacity or options. + */ +function BloomFilter(capacityOrOptions) { + var options = {}; + + if (!capacityOrOptions) + throw new Error('mnemonist/BloomFilter.constructor: a BloomFilter must be created with a capacity.'); + + if (typeof capacityOrOptions === 'object') + options = capacityOrOptions; + else + options.capacity = capacityOrOptions; + + // Handling capacity + if (typeof options.capacity !== 'number' || options.capacity <= 0) + throw new Error('mnemonist/BloomFilter.constructor: `capacity` option should be a positive integer.'); + + this.capacity = options.capacity; + + // Handling error rate + this.errorRate = options.errorRate || DEFAULTS.errorRate; + + if (typeof this.errorRate !== 'number' || options.errorRate <= 0) + throw new Error('mnemonist/BloomFilter.constructor: `errorRate` option should be a positive float.'); + + this.clear(); +} + +/** + * Method used to clear the filter. + * + * @return {undefined} + */ +BloomFilter.prototype.clear = function() { + + // Optimizing number of bits & number of hash functions + var bits = -1 / LN2_SQUARED * this.capacity * Math.log(this.errorRate), + length = (bits / 8) | 0; + + this.hashFunctions = (length * 8 / this.capacity * Math.LN2) | 0; + + // Creating the data array + this.data = new Uint8Array(length); + + return; +}; + +/** + * Method used to add an string to the filter. + * + * @param {string} string - Item to add. + * @return {BloomFilter} + * + * @note Should probably create a hash function working directly on a string. + */ +BloomFilter.prototype.add = function(string) { + + // Converting the string to a byte array + var array = stringToByteArray(string); + + // Applying the n hash functions + for (var i = 0, l = this.hashFunctions; i < l; i++) { + var index = hashArray(this.data.length, i, array), + position = (1 << (7 & index)); + + this.data[index >> 3] |= position; + } + + return this; +}; + +/** + * Method used to test the given string. + * + * @param {string} string - Item to test. + * @return {boolean} + */ +BloomFilter.prototype.test = function(string) { + + // Converting the string to a byte array + var array = stringToByteArray(string); + + // Applying the n hash functions + for (var i = 0, l = this.hashFunctions; i < l; i++) { + var index = hashArray(this.data.length, i, array); + + if (!(this.data[index >> 3] & (1 << (7 & index)))) + return false; + } + + return true; +}; + +/** + * Convenience known methods. + */ +BloomFilter.prototype.toJSON = function() { + return this.data; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a filter. + * + * @param {Iterable} iterable - Target iterable. + * @return {BloomFilter} + */ +BloomFilter.from = function(iterable, options) { + if (!options) { + options = iterable.length || iterable.size; + + if (typeof options !== 'number') + throw new Error('BloomFilter.from: could not infer the filter\'s capacity. Try passing it as second argument.'); + } + + var filter = new BloomFilter(options); + + forEach(iterable, function(value) { + filter.add(value); + }); + + return filter; +}; + +/** + * Exporting. + */ +module.exports = BloomFilter; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/circular-buffer.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/circular-buffer.d.ts new file mode 100644 index 0000000..ec1fa4c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/circular-buffer.d.ts @@ -0,0 +1,34 @@ +/** + * Mnemonist CircularBuffer Typings + * ================================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class CircularBuffer implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + unshift(item: T): number; + pop(): T | undefined; + shift(): T | undefined; + peekFirst(): T | undefined; + peekLast(): T | undefined; + get(index: number): T | undefined; + forEach(callback: (item: T, index: number, buffer: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): CircularBuffer; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/circular-buffer.js b/amplify/functions/deleteDocument/node_modules/mnemonist/circular-buffer.js new file mode 100644 index 0000000..d3ef950 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/circular-buffer.js @@ -0,0 +1,131 @@ +/** + * Mnemonist CircularBuffer + * ========================= + * + * Circular buffer implementation fit to use as a finite deque. + */ +var iterables = require('./utils/iterables.js'), + FixedDeque = require('./fixed-deque'); + +/** + * CircularBuffer. + * + * @constructor + */ +function CircularBuffer(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/circular-buffer: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/circular-buffer: `capacity` should be a positive number.'); + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + this.items = new ArrayClass(this.capacity); + this.clear(); +} + +/** + * Pasting most of the prototype from FixedDeque. + */ +function paste(name) { + CircularBuffer.prototype[name] = FixedDeque.prototype[name]; +} + +Object.keys(FixedDeque.prototype).forEach(paste); + +if (typeof Symbol !== 'undefined') + Object.getOwnPropertySymbols(FixedDeque.prototype).forEach(paste); + +/** + * Method used to append a value to the buffer. + * + * @param {any} item - Item to append. + * @return {number} - Returns the new size of the buffer. + */ +CircularBuffer.prototype.push = function(item) { + var index = (this.start + this.size) % this.capacity; + + this.items[index] = item; + + // Overwriting? + if (this.size === this.capacity) { + + // If start is at the end, we wrap around the buffer + this.start = (index + 1) % this.capacity; + + return this.size; + } + + return ++this.size; +}; + +/** + * Method used to prepend a value to the buffer. + * + * @param {any} item - Item to prepend. + * @return {number} - Returns the new size of the buffer. + */ +CircularBuffer.prototype.unshift = function(item) { + var index = this.start - 1; + + if (this.start === 0) + index = this.capacity - 1; + + this.items[index] = item; + + // Overwriting + if (this.size === this.capacity) { + + this.start = index; + + return this.size; + } + + this.start = index; + + return ++this.size; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a circular buffer. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FiniteStack} + */ +CircularBuffer.from = function(iterable, ArrayClass, capacity) { + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/circular-buffer.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var buffer = new CircularBuffer(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + buffer.items[i] = iterable[i]; + + buffer.size = l; + + return buffer; + } + + iterables.forEach(iterable, function(value) { + buffer.push(value); + }); + + return buffer; +}; + +/** + * Exporting. + */ +module.exports = CircularBuffer; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/critbit-tree-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/critbit-tree-map.js new file mode 100644 index 0000000..1c41a9a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/critbit-tree-map.js @@ -0,0 +1,515 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist CritBitTreeMap + * ========================= + * + * JavaScript implementation of a crit-bit tree, also called PATRICIA tree. + * This tree is a basically a bitwise radix tree and is supposedly much more + * efficient than a standard Trie. + * + * [References]: + * https://cr.yp.to/critbit.html + * https://www.imperialviolet.org/binary/critbit.pdf + */ +var bitwise = require('./utils/bitwise.js'); + +/** + * Helpers. + */ + +/** + * Helper returning the direction we need to take given a key and an + * encoded critbit. + * + * @param {string} key - Target key. + * @param {number} critbit - Packed address of byte + mask. + * @return {number} - 0, left or 1, right. + */ +function getDirection(key, critbit) { + var byteIndex = critbit >> 8; + + if (byteIndex > key.length - 1) + return 0; + + var byte = key.charCodeAt(byteIndex), + mask = critbit & 0xff; + + return (1 + (byte | mask)) >> 8; +} + +/** + * Helper returning the packed address of byte + mask or -1 if strings + * are identical. + * + * @param {string} a - First key. + * @param {string} b - Second key. + * @return {number} - Packed address of byte + mask. + */ +function findCriticalBit(a, b) { + var i = 0, + tmp; + + // Swapping so a is the shortest + if (a.length > b.length) { + tmp = b; + b = a; + a = tmp; + } + + var l = a.length, + mask; + + while (i < l) { + if (a[i] !== b[i]) { + mask = bitwise.criticalBit8Mask( + a.charCodeAt(i), + b.charCodeAt(i) + ); + + return (i << 8) | mask; + } + + i++; + } + + // Strings are identical + if (a.length === b.length) + return -1; + + // NOTE: x ^ 0 is the same as x + mask = bitwise.criticalBit8Mask(b.charCodeAt(i)); + + return (i << 8) | mask; +} + +/** + * Class representing a crit-bit tree's internal node. + * + * @constructor + * @param {number} critbit - Packed address of byte + mask. + */ +function InternalNode(critbit) { + this.critbit = critbit; + this.left = null; + this.right = null; +} + +/** + * Class representing a crit-bit tree's external node. + * Note that it is possible to replace those nodes by flat arrays. + * + * @constructor + * @param {string} key - Node's key. + * @param {any} value - Arbitrary value. + */ +function ExternalNode(key, value) { + this.key = key; + this.value = value; +} + +/** + * CritBitTreeMap. + * + * @constructor + */ +function CritBitTreeMap() { + + // Properties + this.root = null; + this.size = 0; + + this.clear(); +} + +/** + * Method used to clear the CritBitTreeMap. + * + * @return {undefined} + */ +CritBitTreeMap.prototype.clear = function() { + + // Properties + this.root = null; + this.size = 0; +}; + +/** + * Method used to set the value of the given key in the trie. + * + * @param {string} key - Key to set. + * @param {any} value - Arbitrary value. + * @return {CritBitTreeMap} + */ +CritBitTreeMap.prototype.set = function(key, value) { + + // Tree is empty + if (this.size === 0) { + this.root = new ExternalNode(key, value); + this.size++; + + return this; + } + + // Walk state + var node = this.root, + ancestors = [], + path = [], + ancestor, + parent, + child, + critbit, + internal, + left, + leftPath, + best, + dir, + i, + l; + + // Walking the tree + while (true) { + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + // Going left & creating key if not yet there + if (dir === 0) { + if (!node.left) { + node.left = new ExternalNode(key, value); + return this; + } + + ancestors.push(node); + path.push(true); + + node = node.left; + } + + // Going right & creating key if not yet there + else { + if (!node.right) { + node.right = new ExternalNode(key, value); + return this; + } + + ancestors.push(node); + path.push(false); + + node = node.right; + } + } + + // Reaching an external node + else { + + // 1. Creating a new external node + critbit = findCriticalBit(key, node.key); + + // Key is identical, we just replace the value + if (critbit === -1) { + node.value = value; + return this; + } + + this.size++; + + internal = new InternalNode(critbit); + + left = getDirection(key, critbit) === 0; + + // TODO: maybe setting opposite pointer is not necessary + if (left) { + internal.left = new ExternalNode(key, value); + internal.right = node; + } + else { + internal.left = node; + internal.right = new ExternalNode(key, value); + } + + // 2. Bubbling up + best = -1; + l = ancestors.length; + + for (i = l - 1; i >= 0; i--) { + ancestor = ancestors[i]; + + if (ancestor.critbit > critbit) + continue; + + best = i; + break; + } + + // Do we need to attach to the root? + if (best < 0) { + this.root = internal; + + // Need to rewire parent as child? + if (l > 0) { + parent = ancestors[0]; + + if (left) + internal.right = parent; + else + internal.left = parent; + } + } + + // Simple case without rotation + else if (best === l - 1) { + parent = ancestors[best]; + leftPath = path[best]; + + if (leftPath) + parent.left = internal; + else + parent.right = internal; + } + + // Full rotation + else { + parent = ancestors[best]; + leftPath = path[best]; + child = ancestors[best + 1]; + + if (leftPath) + parent.left = internal; + else + parent.right = internal; + + if (left) + internal.right = child; + else + internal.left = child; + } + + return this; + } + } +}; + +/** + * Method used to get the value attached to the given key in the tree or + * undefined if not found. + * + * @param {string} key - Key to get. + * @return {any} + */ +CritBitTreeMap.prototype.get = function(key) { + + // Walk state + var node = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + node = dir ? node.right : node.left; + } + + // Reaching an external node + else { + if (node.key !== key) + return; + + return node.value; + } + } +}; + +/** + * Method used to return whether the given key exists in the tree. + * + * @param {string} key - Key to test. + * @return {boolean} + */ +CritBitTreeMap.prototype.has = function(key) { + + // Walk state + var node = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return false; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + node = dir ? node.right : node.left; + } + + // Reaching an external node + else { + return node.key === key; + } + } +}; + +/** + * Method used to delete the given key from the tree and return whether the + * key did exist or not. + * + * @param {string} key - Key to delete. + * @return {boolean} + */ +CritBitTreeMap.prototype.delete = function(key) { + + // Walk state + var node = this.root, + dir; + + var parent = null, + grandParent = null, + wentLeftForParent = false, + wentLeftForGrandparent = false; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return false; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + if (dir === 0) { + grandParent = parent; + wentLeftForGrandparent = wentLeftForParent; + parent = node; + wentLeftForParent = true; + + node = node.left; + } + else { + grandParent = parent; + wentLeftForGrandparent = wentLeftForParent; + parent = node; + wentLeftForParent = false; + + node = node.right; + } + } + + // Reaching an external node + else { + if (key !== node.key) + return false; + + this.size--; + + // Rewiring + if (parent === null) { + this.root = null; + } + + else if (grandParent === null) { + if (wentLeftForParent) + this.root = parent.right; + else + this.root = parent.left; + } + + else { + if (wentLeftForGrandparent) { + if (wentLeftForParent) { + grandParent.left = parent.right; + } + else { + grandParent.left = parent.left; + } + } + else { + if (wentLeftForParent) { + grandParent.right = parent.right; + } + else { + grandParent.right = parent.left; + } + } + } + + return true; + } + } +}; + +/** + * Method used to iterate over the tree in key order. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +CritBitTreeMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inorder traversal of the tree + var current = this.root, + stack = []; + + while (true) { + + if (current !== null) { + stack.push(current); + + current = current instanceof InternalNode ? current.left : null; + } + + else { + if (stack.length > 0) { + current = stack.pop(); + + if (current instanceof ExternalNode) + callback.call(scope, current.value, current.key); + + current = current instanceof InternalNode ? current.right : null; + } + else { + break; + } + } + } +}; + +/** + * Convenience known methods. + */ +CritBitTreeMap.prototype.inspect = function() { + return this; +}; + +if (typeof Symbol !== 'undefined') + CritBitTreeMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = CritBitTreeMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a CritBitTreeMap. + * + * @param {Iterable} iterable - Target iterable. + * @return {CritBitTreeMap} + */ +// CritBitTreeMap.from = function(iterable) { + +// }; + +/** + * Exporting. + */ +module.exports = CritBitTreeMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/default-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/default-map.d.ts new file mode 100644 index 0000000..186878c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/default-map.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist DefaultMap Typings + * ============================= + */ +export default class DefaultMap implements Iterable<[K, V]> { + + // Members + size: number; + + // Constructor + constructor(factory: (key: K, index: number) => V); + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + peek(key: K): V | undefined; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static autoIncrement(): number; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/default-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/default-map.js new file mode 100644 index 0000000..dbe41d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/default-map.js @@ -0,0 +1,162 @@ +/** + * Mnemonist DefaultMap + * ===================== + * + * JavaScript implementation of a default map that will return a constructed + * value any time one tries to access an inexisting key. It's quite similar + * to python's defaultdict. + */ + +/** + * DefaultMap. + * + * @constructor + */ +function DefaultMap(factory) { + if (typeof factory !== 'function') + throw new Error('mnemonist/DefaultMap.constructor: expecting a function.'); + + this.items = new Map(); + this.factory = factory; + this.size = 0; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +DefaultMap.prototype.clear = function() { + + // Properties + this.items.clear(); + this.size = 0; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * the value will be created using the provided factory. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultMap.prototype.get = function(key) { + var value = this.items.get(key); + + if (typeof value === 'undefined') { + value = this.factory(key, this.size); + this.items.set(key, value); + this.size++; + } + + return value; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * a value won't be created. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultMap.prototype.peek = function(key) { + return this.items.get(key); +}; + +/** + * Method used to set a value for given key. + * + * @param {any} key - Target key. + * @param {any} value - Value. + * @return {DefaultMap} + */ +DefaultMap.prototype.set = function(key, value) { + this.items.set(key, value); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to test the existence of a key in the map. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to delete target key. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultMap.prototype.delete = function(key) { + var deleted = this.items.delete(key); + + this.size = this.items.size; + + return deleted; +}; + +/** + * Method used to iterate over each of the key/value pairs. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +DefaultMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Iterators. + */ +DefaultMap.prototype.entries = function() { + return this.items.entries(); +}; + +DefaultMap.prototype.keys = function() { + return this.items.keys(); +}; + +DefaultMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + DefaultMap.prototype[Symbol.iterator] = DefaultMap.prototype.entries; + +/** + * Convenience known methods. + */ +DefaultMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + DefaultMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = DefaultMap.prototype.inspect; + +/** + * Typical factories. + */ +DefaultMap.autoIncrement = function() { + var i = 0; + + return function() { + return i++; + }; +}; + +/** + * Exporting. + */ +module.exports = DefaultMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/default-weak-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/default-weak-map.d.ts new file mode 100644 index 0000000..579a883 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/default-weak-map.d.ts @@ -0,0 +1,18 @@ +/** + * Mnemonist DefaultWeakMap Typings + * ================================ + */ +export default class DefaultWeakMap { + + // Constructor + constructor(factory: (key: K) => V); + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + peek(key: K): V | undefined; + inspect(): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/default-weak-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/default-weak-map.js new file mode 100644 index 0000000..aa8931c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/default-weak-map.js @@ -0,0 +1,108 @@ +/** + * Mnemonist DefaultWeakMap + * ========================= + * + * JavaScript implementation of a default weak map that will return a constructed + * value any time one tries to access an non-existing key. It is similar to + * DefaultMap but uses ES6 WeakMap that only holds weak reference to keys. + */ + +/** + * DefaultWeakMap. + * + * @constructor + */ +function DefaultWeakMap(factory) { + if (typeof factory !== 'function') + throw new Error('mnemonist/DefaultWeakMap.constructor: expecting a function.'); + + this.items = new WeakMap(); + this.factory = factory; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +DefaultWeakMap.prototype.clear = function() { + + // Properties + this.items = new WeakMap(); +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * the value will be created using the provided factory. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultWeakMap.prototype.get = function(key) { + var value = this.items.get(key); + + if (typeof value === 'undefined') { + value = this.factory(key); + this.items.set(key, value); + } + + return value; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * a value won't be created. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultWeakMap.prototype.peek = function(key) { + return this.items.get(key); +}; + +/** + * Method used to set a value for given key. + * + * @param {any} key - Target key. + * @param {any} value - Value. + * @return {DefaultMap} + */ +DefaultWeakMap.prototype.set = function(key, value) { + this.items.set(key, value); + return this; +}; + +/** + * Method used to test the existence of a key in the map. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultWeakMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to delete target key. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultWeakMap.prototype.delete = function(key) { + return this.items.delete(key); +}; + +/** + * Convenience known methods. + */ +DefaultWeakMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + DefaultWeakMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = DefaultWeakMap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = DefaultWeakMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fibonacci-heap.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/fibonacci-heap.d.ts new file mode 100644 index 0000000..cb15ab0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fibonacci-heap.d.ts @@ -0,0 +1,65 @@ +/** + * Mnemonist FibonacciHeap Typings + * ================================ + */ +type FibonacciHeapComparator = (a: T, b: T) => number; + +export default class FibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + comparator?: FibonacciHeapComparator + ): FibonacciHeap; +} + +export class MinFibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): FibonacciHeap; +} + +export class MaxFibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): FibonacciHeap; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fibonacci-heap.js b/amplify/functions/deleteDocument/node_modules/mnemonist/fibonacci-heap.js new file mode 100644 index 0000000..f41334f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fibonacci-heap.js @@ -0,0 +1,320 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Fibonacci Heap + * ========================= + * + * Fibonacci heap implementation. + */ +var comparators = require('./utils/comparators.js'), + forEach = require('obliterator/foreach'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Fibonacci Heap. + * + * @constructor + */ +function FibonacciHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FibonacciHeap.constructor: given comparator should be a function.'); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +FibonacciHeap.prototype.clear = function() { + + // Properties + this.root = null; + this.min = null; + this.size = 0; +}; + +/** + * Function used to create a node. + * + * @param {any} item - Target item. + * @return {object} + */ +function createNode(item) { + return { + item: item, + degree: 0 + }; +} + +/** + * Function used to merge the given node with the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} node - Target node. + */ +function mergeWithRoot(heap, node) { + if (!heap.root) { + heap.root = node; + } + else { + node.right = heap.root.right; + node.left = heap.root; + heap.root.right.left = node; + heap.root.right = node; + } +} + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +FibonacciHeap.prototype.push = function(item) { + var node = createNode(item); + node.left = node; + node.right = node; + mergeWithRoot(this, node); + + if (!this.min || this.comparator(node.item, this.min.item) <= 0) + this.min = node; + + return ++this.size; +}; + +/** + * Method used to get the "first" item of the heap. + * + * @return {any} + */ +FibonacciHeap.prototype.peek = function() { + return this.min ? this.min.item : undefined; +}; + +/** + * Function used to consume the given linked list. + * + * @param {Node} head - Head node. + * @param {array} + */ +function consumeLinkedList(head) { + var nodes = [], + node = head, + flag = false; + + while (true) { + if (node === head && flag) + break; + else if (node === head) + flag = true; + + nodes.push(node); + node = node.right; + } + + return nodes; +} + +/** + * Function used to remove the target node from the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} node - Target node. + */ +function removeFromRoot(heap, node) { + if (heap.root === node) + heap.root = node.right; + node.left.right = node.right; + node.right.left = node.left; +} + +/** + * Function used to merge the given node with the child list of a root node. + * + * @param {Node} parent - Parent node. + * @param {Node} node - Target node. + */ +function mergeWithChild(parent, node) { + if (!parent.child) { + parent.child = node; + } + else { + node.right = parent.child.right; + node.left = parent.child; + parent.child.right.left = node; + parent.child.right = node; + } +} + +/** + * Function used to link one node to another in the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} y - Y node. + * @param {Node} x - X node. + */ +function link(heap, y, x) { + removeFromRoot(heap, y); + y.left = y; + y.right = y; + mergeWithChild(x, y); + x.degree++; + y.parent = x; +} + +/** + * Function used to consolidate the heap. + * + * @param {FibonacciHeap} heap - Target heap. + */ +function consolidate(heap) { + var A = new Array(heap.size), + nodes = consumeLinkedList(heap.root), + i, l, x, y, d, t; + + for (i = 0, l = nodes.length; i < l; i++) { + x = nodes[i]; + d = x.degree; + + while (A[d]) { + y = A[d]; + + if (heap.comparator(x.item, y.item) > 0) { + t = x; + x = y; + y = t; + } + + link(heap, y, x); + A[d] = null; + d++; + } + + A[d] = x; + } + + for (i = 0; i < heap.size; i++) { + if (A[i] && heap.comparator(A[i].item, heap.min.item) <= 0) + heap.min = A[i]; + } +} + +/** + * Method used to retrieve & remove the "first" item of the heap. + * + * @return {any} + */ +FibonacciHeap.prototype.pop = function() { + if (!this.size) + return undefined; + + var z = this.min; + + if (z.child) { + var nodes = consumeLinkedList(z.child), + node, + i, + l; + + for (i = 0, l = nodes.length; i < l; i++) { + node = nodes[i]; + + mergeWithRoot(this, node); + delete node.parent; + } + } + + removeFromRoot(this, z); + + if (z === z.right) { + this.min = null; + this.root = null; + } + else { + this.min = z.right; + consolidate(this); + } + + this.size--; + + return z.item; +}; + +/** + * Convenience known methods. + */ +FibonacciHeap.prototype.inspect = function() { + var proxy = { + size: this.size + }; + + if (this.min && 'item' in this.min) + proxy.top = this.min.item; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: FibonacciHeap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + FibonacciHeap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FibonacciHeap.prototype.inspect; + +/** + * Fibonacci Maximum Heap. + * + * @constructor + */ +function MaxFibonacciHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FibonacciHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +MaxFibonacciHeap.prototype = FibonacciHeap.prototype; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a heap. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} comparator - Custom comparator function. + * @return {FibonacciHeap} + */ +FibonacciHeap.from = function(iterable, comparator) { + var heap = new FibonacciHeap(comparator); + + forEach(iterable, function(value) { + heap.push(value); + }); + + return heap; +}; + +MaxFibonacciHeap.from = function(iterable, comparator) { + var heap = new MaxFibonacciHeap(comparator); + + forEach(iterable, function(value) { + heap.push(value); + }); + + return heap; +}; + +/** + * Exporting. + */ +FibonacciHeap.MinFibonacciHeap = FibonacciHeap; +FibonacciHeap.MaxFibonacciHeap = MaxFibonacciHeap; +module.exports = FibonacciHeap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-critbit-tree-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-critbit-tree-map.js new file mode 100644 index 0000000..9658fee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-critbit-tree-map.js @@ -0,0 +1,427 @@ +/* eslint no-constant-condition: 0 */ + +/* eslint-disable */ + +/** + * Mnemonist FixedFixedCritBitTreeMap + * =================================== + * + * TODO... + * + * [References]: + * https://cr.yp.to/critbit.html + * https://www.imperialviolet.org/binary/critbit.pdf + */ +var bitwise = require('./utils/bitwise.js'), + typed = require('./utils/typed-arrays.js'); + +/** + * Helpers. + */ + +/** + * Helper returning the direction we need to take given a key and an + * encoded critbit. + * + * @param {string} key - Target key. + * @param {number} critbit - Packed address of byte + mask. + * @return {number} - 0, left or 1, right. + */ +function getDirection(key, critbit) { + var byteIndex = critbit >> 8; + + if (byteIndex > key.length - 1) + return 0; + + var byte = key.charCodeAt(byteIndex), + mask = critbit & 0xff; + + return byte & mask; +} + +/** + * Helper returning the packed address of byte + mask or -1 if strings + * are identical. + * + * @param {string} a - First key. + * @param {string} b - Second key. + * @return {number} - Packed address of byte + mask. + */ +function findCriticalBit(a, b) { + var i = 0, + tmp; + + // Swapping so a is the shortest + if (a.length > b.length) { + tmp = b; + b = a; + a = tmp; + } + + var l = a.length, + mask; + + while (i < l) { + if (a[i] !== b[i]) { + mask = bitwise.msb8( + a.charCodeAt(i) ^ b.charCodeAt(i) + ); + + return (i << 8) | mask; + } + + i++; + } + + // Strings are identical + if (a.length === b.length) + return -1; + + // NOTE: x ^ 0 is the same as x + mask = bitwise.msb8(b.charCodeAt(i)); + + return (i << 8) | mask; +} + +/** + * FixedCritBitTreeMap. + * + * @constructor + */ +function FixedCritBitTreeMap(capacity) { + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-critbit-tree-map: `capacity` should be a positive number.'); + + // Properties + this.capacity = capacity; + this.offset = 0; + this.root = 0; + this.size = 0; + + var PointerArray = typed.getSignedPointerArray(capacity + 1); + + this.keys = new Array(capacity); + this.values = new Array(capacity); + this.lefts = new PointerArray(capacity - 1); + this.rights = new PointerArray(capacity - 1); + this.critbits = new Uint32Array(capacity); +} + +/** + * Method used to clear the FixedCritBitTreeMap. + * + * @return {undefined} + */ +FixedCritBitTreeMap.prototype.clear = function() { + + // Properties + // TODO... + this.root = null; + this.size = 0; +}; + +/** + * Method used to set the value of the given key in the trie. + * + * @param {string} key - Key to set. + * @param {any} value - Arbitrary value. + * @return {FixedCritBitTreeMap} + */ +FixedCritBitTreeMap.prototype.set = function(key, value) { + var pointer; + + // TODO: yell if capacity is already full! + + // Tree is empty + if (this.size === 0) { + this.keys[0] = key; + this.values[0] = value; + + this.size++; + + this.root = -1; + + return this; + } + + // Walk state + var pointer = this.root, + newPointer, + leftOrRight, + opposite, + ancestors = [], + path = [], + ancestor, + parent, + child, + critbit, + internal, + best, + dir, + i, + l; + + // Walking the tree + while (true) { + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + + // Choosing the correct direction + dir = getDirection(key, this.critbits[pointer]); + + leftOrRight = dir === 0 ? this.lefts : this.rights; + newPointer = leftOrRight[pointer]; + + if (newPointer === 0) { + + // Creating a fitting external node + pointer = this.size++; + leftOrRight[newPointer] = -(pointer + 1); + this.keys[pointer] = key; + this.values[pointer] = value; + return this; + } + + ancestors.push(pointer); + path.push(dir); + pointer = newPointer; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + // 1. Creating a new external node + critbit = findCriticalBit(key, this.keys[pointer]); + + // Key is identical, we just replace the value + if (critbit === -1) { + this.values[pointer] = value; + return this; + } + + internal = this.offset++; + newPointer = this.size++; + + this.keys[newPointer] = key; + this.values[newPointer] = value; + + this.critbits[internal] = critbit; + + dir = getDirection(key, critbit); + leftOrRight = dir === 0 ? this.lefts : this.rights; + opposite = dir === 0 ? this.rights : this.lefts; + + leftOrRight[internal] = -(newPointer + 1); + opposite[internal] = -(pointer + 1); + + // 2. Bubbling up + best = -1; + l = ancestors.length; + + for (i = l - 1; i >= 0; i--) { + ancestor = ancestors[i]; + + // TODO: this can be made faster + if ((this.critbits[ancestor] >> 8) > (critbit >> 8)) { + continue; + } + else if ((this.critbits[ancestor] >> 8) === (critbit >> 8)) { + if ((this.critbits[ancestor] & 0xff) < (critbit & 0xff)) + continue; + } + + best = i; + break; + } + + // Do we need to attach to the root? + if (best < 0) { + this.root = internal + 1; + + // Need to rewire parent as child? + if (l > 0) { + parent = ancestors[0]; + + opposite[internal] = parent + 1; + } + } + + // Simple case without rotation + else if (best === l - 1) { + parent = ancestors[best]; + dir = path[best]; + + leftOrRight = dir === 0 ? this.lefts : this.rights; + + leftOrRight[parent] = internal + 1; + } + + // Full rotation + else { + parent = ancestors[best]; + dir = path[best]; + child = ancestors[best + 1]; + + opposite[internal] = child + 1; + + leftOrRight = dir === 0 ? this.lefts : this.rights; + + leftOrRight[parent] = internal + 1; + } + + return this; + } + } +}; + +/** + * Method used to get the value attached to the given key in the tree or + * undefined if not found. + * + * @param {string} key - Key to get. + * @return {any} + */ +FixedCritBitTreeMap.prototype.get = function(key) { + + // Walk state + var pointer = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (pointer === 0) + return; + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + dir = getDirection(key, this.critbits[pointer]); + + pointer = dir === 0 ? this.lefts[pointer] : this.rights[pointer]; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + if (this.keys[pointer] !== key) + return; + + return this.values[pointer]; + } + } +}; + +/** + * Method used to return whether the given key exists in the tree. + * + * @param {string} key - Key to test. + * @return {boolean} + */ +FixedCritBitTreeMap.prototype.has = function(key) { + + // Walk state + var pointer = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (pointer === 0) + return false; + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + dir = getDirection(key, this.critbits[pointer]); + + pointer = dir === 0 ? this.lefts[pointer] : this.rights[pointer]; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + return this.keys[pointer] === key; + } + } +}; + +/** + * Method used to iterate over the tree in key order. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedCritBitTreeMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inorder traversal of the tree + var current = this.root, + stack = [], + p; + + while (true) { + + if (current !== 0) { + stack.push(current); + + current = current > 0 ? this.lefts[current - 1] : 0; + } + + else { + if (stack.length > 0) { + current = stack.pop(); + + if (current < 0) { + p = -current; + p -= 1; + + callback.call(scope, this.values[p], this.keys[p]); + } + + current = current > 0 ? this.rights[current - 1] : 0; + } + else { + break; + } + } + } +}; + +/** + * Convenience known methods. + */ +FixedCritBitTreeMap.prototype.inspect = function() { + return this; +}; + +if (typeof Symbol !== 'undefined') + FixedCritBitTreeMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedCritBitTreeMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a FixedCritBitTreeMap. + * + * @param {Iterable} iterable - Target iterable. + * @return {FixedCritBitTreeMap} + */ +// FixedCritBitTreeMap.from = function(iterable) { + +// }; + +/** + * Exporting. + */ +module.exports = FixedCritBitTreeMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-deque.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-deque.d.ts new file mode 100644 index 0000000..6e6b908 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-deque.d.ts @@ -0,0 +1,34 @@ +/** + * Mnemonist FixedDeque Typings + * ============================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class FixedDeque implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + unshift(item: T): number; + pop(): T | undefined; + shift(): T | undefined; + peekFirst(): T | undefined; + peekLast(): T | undefined; + get(index: number): T | undefined; + forEach(callback: (item: T, index: number, buffer: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): FixedDeque; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-deque.js b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-deque.js new file mode 100644 index 0000000..7b29858 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-deque.js @@ -0,0 +1,351 @@ +/** + * Mnemonist FixedDeque + * ===================== + * + * Fixed capacity double-ended queue implemented as ring deque. + */ +var iterables = require('./utils/iterables.js'), + Iterator = require('obliterator/iterator'); + +/** + * FixedDeque. + * + * @constructor + */ +function FixedDeque(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/fixed-deque: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-deque: `capacity` should be a positive number.'); + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + this.items = new ArrayClass(this.capacity); + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FixedDeque.prototype.clear = function() { + + // Properties + this.start = 0; + this.size = 0; +}; + +/** + * Method used to append a value to the deque. + * + * @param {any} item - Item to append. + * @return {number} - Returns the new size of the deque. + */ +FixedDeque.prototype.push = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-deque.push: deque capacity (' + this.capacity + ') exceeded!'); + + var index = (this.start + this.size) % this.capacity; + + this.items[index] = item; + + return ++this.size; +}; + +/** + * Method used to prepend a value to the deque. + * + * @param {any} item - Item to prepend. + * @return {number} - Returns the new size of the deque. + */ +FixedDeque.prototype.unshift = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-deque.unshift: deque capacity (' + this.capacity + ') exceeded!'); + + var index = this.start - 1; + + if (this.start === 0) + index = this.capacity - 1; + + this.items[index] = item; + this.start = index; + + return ++this.size; +}; + +/** + * Method used to pop the deque. + * + * @return {any} - Returns the popped item. + */ +FixedDeque.prototype.pop = function() { + if (this.size === 0) + return; + + const index = (this.start + this.size - 1) % this.capacity; + + this.size--; + + return this.items[index]; +}; + +/** + * Method used to shift the deque. + * + * @return {any} - Returns the shifted item. + */ +FixedDeque.prototype.shift = function() { + if (this.size === 0) + return; + + var index = this.start; + + this.size--; + this.start++; + + if (this.start === this.capacity) + this.start = 0; + + return this.items[index]; +}; + +/** + * Method used to peek the first value of the deque. + * + * @return {any} + */ +FixedDeque.prototype.peekFirst = function() { + if (this.size === 0) + return; + + return this.items[this.start]; +}; + +/** + * Method used to peek the last value of the deque. + * + * @return {any} + */ +FixedDeque.prototype.peekLast = function() { + if (this.size === 0) + return; + + var index = this.start + this.size - 1; + + if (index > this.capacity) + index -= this.capacity; + + return this.items[index]; +}; + +/** + * Method used to get the desired value of the deque. + * + * @param {number} index + * @return {any} + */ +FixedDeque.prototype.get = function(index) { + if (this.size === 0) + return; + + index = this.start + index; + + if (index > this.capacity) + index -= this.capacity; + + return this.items[index]; +}; + +/** + * Method used to iterate over the deque. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedDeque.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + callback.call(scope, this.items[i], j, this); + i++; + j++; + + if (i === c) + i = 0; + } +}; + +/** + * Method used to convert the deque to a JavaScript array. + * + * @return {array} + */ +// TODO: optional array class as argument? +FixedDeque.prototype.toArray = function() { + + // Optimization + var offset = this.start + this.size; + + if (offset < this.capacity) + return this.items.slice(this.start, offset); + + var array = new this.ArrayClass(this.size), + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + array[j] = this.items[i]; + i++; + j++; + + if (i === c) + i = 0; + } + + return array; +}; + +/** + * Method used to create an iterator over the deque's values. + * + * @return {Iterator} + */ +FixedDeque.prototype.values = function() { + var items = this.items, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = items[i]; + + i++; + j++; + + if (i === c) + i = 0; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over the deque's entries. + * + * @return {Iterator} + */ +FixedDeque.prototype.entries = function() { + var items = this.items, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = items[i]; + + i++; + + if (i === c) + i = 0; + + return { + value: [j++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FixedDeque.prototype[Symbol.iterator] = FixedDeque.prototype.values; + +/** + * Convenience known methods. + */ +FixedDeque.prototype.inspect = function() { + var array = this.toArray(); + + array.type = this.ArrayClass.name; + array.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: FixedDeque, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FixedDeque.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedDeque.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a deque. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FiniteStack} + */ +FixedDeque.from = function(iterable, ArrayClass, capacity) { + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/fixed-deque.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var deque = new FixedDeque(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + deque.items[i] = iterable[i]; + + deque.size = l; + + return deque; + } + + iterables.forEach(iterable, function(value) { + deque.push(value); + }); + + return deque; +}; + +/** + * Exporting. + */ +module.exports = FixedDeque; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-reverse-heap.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-reverse-heap.d.ts new file mode 100644 index 0000000..668c556 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-reverse-heap.d.ts @@ -0,0 +1,25 @@ +/** + * Mnemonist FixedReverseHeap Typings + * =================================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +type HeapComparator = (a: T, b: T) => number; + +export default class FixedReverseHeap { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, comparator: HeapComparator, capacity: number); + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + consume(): Iterable; + toArray(): Iterable; + inspect(): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-reverse-heap.js b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-reverse-heap.js new file mode 100644 index 0000000..197aac4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-reverse-heap.js @@ -0,0 +1,209 @@ +/** + * Mnemonist Fixed Reverse Heap + * ============================= + * + * Static heap implementation with fixed capacity. It's a "reverse" heap + * because it stores the elements in reverse so we can replace the worst + * item in logarithmic time. As such, one cannot pop this heap but can only + * consume it at the end. This structure is very efficient when trying to + * find the n smallest/largest items from a larger query (k nearest neigbors + * for instance). + */ +var comparators = require('./utils/comparators.js'), + Heap = require('./heap.js'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Helper functions. + */ + +/** + * Function used to sift up. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} size - Heap's true size. + * @param {number} i - Index. + */ +function siftUp(compare, heap, size, i) { + var endIndex = size, + startIndex = i, + item = heap[i], + childIndex = 2 * i + 1, + rightIndex; + + while (childIndex < endIndex) { + rightIndex = childIndex + 1; + + if ( + rightIndex < endIndex && + compare(heap[childIndex], heap[rightIndex]) >= 0 + ) { + childIndex = rightIndex; + } + + heap[i] = heap[childIndex]; + i = childIndex; + childIndex = 2 * i + 1; + } + + heap[i] = item; + Heap.siftDown(compare, heap, startIndex, i); +} + +/** + * Fully consumes the given heap. + * + * @param {function} ArrayClass - Array class to use. + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} size - True size of the heap. + * @return {array} + */ +function consume(ArrayClass, compare, heap, size) { + var l = size, + i = l; + + var array = new ArrayClass(size), + lastItem, + item; + + while (i > 0) { + lastItem = heap[--i]; + + if (i !== 0) { + item = heap[0]; + heap[0] = lastItem; + siftUp(compare, heap, --size, 0); + lastItem = item; + } + + array[i] = lastItem; + } + + return array; +} + +/** + * Binary Minimum FixedReverseHeap. + * + * @constructor + * @param {function} ArrayClass - The class of array to use. + * @param {function} comparator - Comparator function. + * @param {number} capacity - Maximum number of items to keep. + */ +function FixedReverseHeap(ArrayClass, comparator, capacity) { + + // Comparator can be omitted + if (arguments.length === 2) { + capacity = comparator; + comparator = null; + } + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + + this.items = new ArrayClass(capacity); + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof capacity !== 'number' && capacity <= 0) + throw new Error('mnemonist/FixedReverseHeap.constructor: capacity should be a number > 0.'); + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FixedReverseHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +FixedReverseHeap.prototype.clear = function() { + + // Properties + this.size = 0; +}; + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +FixedReverseHeap.prototype.push = function(item) { + + // Still some place + if (this.size < this.capacity) { + this.items[this.size] = item; + Heap.siftDown(this.comparator, this.items, 0, this.size); + this.size++; + } + + // Heap is full, we need to replace worst item + else { + + if (this.comparator(item, this.items[0]) > 0) + Heap.replace(this.comparator, this.items, item); + } + + return this.size; +}; + +/** + * Method used to peek the worst item in the heap. + * + * @return {any} + */ +FixedReverseHeap.prototype.peek = function() { + return this.items[0]; +}; + +/** + * Method used to consume the heap fully and return its items as a sorted array. + * + * @return {array} + */ +FixedReverseHeap.prototype.consume = function() { + var items = consume(this.ArrayClass, this.comparator, this.items, this.size); + this.size = 0; + + return items; +}; + +/** + * Method used to convert the heap to an array. Note that it basically clone + * the heap and consumes it completely. This is hardly performant. + * + * @return {array} + */ +FixedReverseHeap.prototype.toArray = function() { + return consume(this.ArrayClass, this.comparator, this.items.slice(0, this.size), this.size); +}; + +/** + * Convenience known methods. + */ +FixedReverseHeap.prototype.inspect = function() { + var proxy = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: FixedReverseHeap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + FixedReverseHeap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedReverseHeap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = FixedReverseHeap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-stack.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-stack.d.ts new file mode 100644 index 0000000..9965853 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-stack.d.ts @@ -0,0 +1,36 @@ +/** + * Mnemonist FixedStack Typings + * ============================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class FixedStack implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + pop(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, stack: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Iterable; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + ArrayClass: IArrayLikeConstructor, + capacity?: number + ): FixedStack; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-stack.js b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-stack.js new file mode 100644 index 0000000..c5b5f48 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fixed-stack.js @@ -0,0 +1,242 @@ +/** + * Mnemonist FixedStack + * ===================== + * + * The fixed stack is a stack whose capacity is defined beforehand and that + * cannot be exceeded. This class is really useful when combined with + * byte arrays to save up some memory and avoid memory re-allocation, hence + * speeding up computations. + * + * This has however a downside: you need to know the maximum size you stack + * can have during your iteration (which is not too difficult to compute when + * performing, say, a DFS on a balanced binary tree). + */ +var Iterator = require('obliterator/iterator'), + iterables = require('./utils/iterables.js'); + +/** + * FixedStack + * + * @constructor + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + */ +function FixedStack(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/fixed-stack: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-stack: `capacity` should be a positive number.'); + + this.capacity = capacity; + this.ArrayClass = ArrayClass; + this.items = new this.ArrayClass(this.capacity); + this.clear(); +} + +/** + * Method used to clear the stack. + * + * @return {undefined} + */ +FixedStack.prototype.clear = function() { + + // Properties + this.size = 0; +}; + +/** + * Method used to add an item to the stack. + * + * @param {any} item - Item to add. + * @return {number} + */ +FixedStack.prototype.push = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-stack.push: stack capacity (' + this.capacity + ') exceeded!'); + + this.items[this.size++] = item; + return this.size; +}; + +/** + * Method used to retrieve & remove the last item of the stack. + * + * @return {any} + */ +FixedStack.prototype.pop = function() { + if (this.size === 0) + return; + + return this.items[--this.size]; +}; + +/** + * Method used to get the last item of the stack. + * + * @return {any} + */ +FixedStack.prototype.peek = function() { + return this.items[this.size - 1]; +}; + +/** + * Method used to iterate over the stack. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedStack.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.items.length; i < l; i++) + callback.call(scope, this.items[l - i - 1], i, this); +}; + +/** + * Method used to convert the stack to a JavaScript array. + * + * @return {array} + */ +FixedStack.prototype.toArray = function() { + var array = new this.ArrayClass(this.size), + l = this.size - 1, + i = this.size; + + while (i--) + array[i] = this.items[l - i]; + + return array; +}; + +/** + * Method used to create an iterator over a stack's values. + * + * @return {Iterator} + */ +FixedStack.prototype.values = function() { + var items = this.items, + l = this.size, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a stack's entries. + * + * @return {Iterator} + */ +FixedStack.prototype.entries = function() { + var items = this.items, + l = this.size, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FixedStack.prototype[Symbol.iterator] = FixedStack.prototype.values; + + +/** + * Convenience known methods. + */ +FixedStack.prototype.toString = function() { + return this.toArray().join(','); +}; + +FixedStack.prototype.toJSON = function() { + return this.toArray(); +}; + +FixedStack.prototype.inspect = function() { + var array = this.toArray(); + + array.type = this.ArrayClass.name; + array.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: FixedStack, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FixedStack.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedStack.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a stack. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FixedStack} + */ +FixedStack.from = function(iterable, ArrayClass, capacity) { + + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/fixed-stack.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var stack = new FixedStack(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + stack.items[i] = iterable[i]; + + stack.size = l; + + return stack; + } + + iterables.forEach(iterable, function(value) { + stack.push(value); + }); + + return stack; +}; + +/** + * Exporting. + */ +module.exports = FixedStack; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-map.d.ts new file mode 100644 index 0000000..7a1644d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-map.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist FuzzyMap Typings + * ========================== + */ +type HashFunction = (key: any) => K; +type HashFunctionsTuple = [HashFunction, HashFunction]; + +export default class FuzzyMap implements Iterable { + + // Members + size: number; + + // Constructor + constructor(hashFunction: HashFunction); + constructor(hashFunctionsTuple: HashFunctionsTuple); + + // Methods + clear(): void; + add(key: V): this; + set(key: K, value: V): this; + get(key: any): V | undefined; + has(key: any): boolean; + forEach(callback: (value: V, key: V) => void, scope?: this): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + hashFunction: HashFunction | HashFunctionsTuple, + ): FuzzyMap; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-map.js new file mode 100644 index 0000000..b0d52e1 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-map.js @@ -0,0 +1,185 @@ +/** + * Mnemonist Fuzzy Map + * ==================== + * + * The fuzzy map is a map whose keys are processed by a function before + * read/write operations. This can often result in multiple keys accessing + * the same resource (example: a map with lowercased keys). + */ +var forEach = require('obliterator/foreach'); + +var identity = function(x) { + return x; +}; + +/** + * FuzzyMap. + * + * @constructor + * @param {array|function} descriptor - Hash functions descriptor. + */ +function FuzzyMap(descriptor) { + this.items = new Map(); + this.clear(); + + if (Array.isArray(descriptor)) { + this.writeHashFunction = descriptor[0]; + this.readHashFunction = descriptor[1]; + } + else { + this.writeHashFunction = descriptor; + this.readHashFunction = descriptor; + } + + if (!this.writeHashFunction) + this.writeHashFunction = identity; + if (!this.readHashFunction) + this.readHashFunction = identity; + + if (typeof this.writeHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMap.constructor: invalid hash function given.'); + + if (typeof this.readHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMap.constructor: invalid hash function given.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FuzzyMap.prototype.clear = function() { + this.items.clear(); + + // Properties + this.size = 0; +}; + +/** + * Method used to add an item to the FuzzyMap. + * + * @param {any} item - Item to add. + * @return {FuzzyMap} + */ +FuzzyMap.prototype.add = function(item) { + var key = this.writeHashFunction(item); + + this.items.set(key, item); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to set an item in the FuzzyMap using the given key. + * + * @param {any} key - Key to use. + * @param {any} item - Item to add. + * @return {FuzzyMap} + */ +FuzzyMap.prototype.set = function(key, item) { + key = this.writeHashFunction(key); + + this.items.set(key, item); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to retrieve an item from the FuzzyMap. + * + * @param {any} key - Key to use. + * @return {any} + */ +FuzzyMap.prototype.get = function(key) { + key = this.readHashFunction(key); + + return this.items.get(key); +}; + +/** + * Method used to test the existence of an item in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +FuzzyMap.prototype.has = function(key) { + key = this.readHashFunction(key); + + return this.items.has(key); +}; + +/** + * Method used to iterate over each of the FuzzyMap's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FuzzyMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(function(value) { + callback.call(scope, value, value); + }); +}; + +/** + * Method returning an iterator over the FuzzyMap's values. + * + * @return {FuzzyMapIterator} + */ +FuzzyMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FuzzyMap.prototype[Symbol.iterator] = FuzzyMap.prototype.values; + +/** + * Convenience known method. + */ +FuzzyMap.prototype.inspect = function() { + var array = Array.from(this.items.values()); + + Object.defineProperty(array, 'constructor', { + value: FuzzyMap, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FuzzyMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FuzzyMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {array|function} descriptor - Hash functions descriptor. + * @param {boolean} useSet - Whether to use #.set or #.add + * @return {FuzzyMap} + */ +FuzzyMap.from = function(iterable, descriptor, useSet) { + var map = new FuzzyMap(descriptor); + + forEach(iterable, function(value, key) { + if (useSet) + map.set(key, value); + else + map.add(value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = FuzzyMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-multi-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-multi-map.d.ts new file mode 100644 index 0000000..62b8250 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-multi-map.d.ts @@ -0,0 +1,36 @@ +/** + * Mnemonist FuzzyMultiMap Typings + * ================================ + */ +type HashFunction = (key: any) => K; +type HashFunctionsTuple = [HashFunction, HashFunction]; +type FuzzyMultiMapContainer = ArrayConstructor | SetConstructor; + +export default class FuzzyMultiMap implements Iterable { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(hashFunction: HashFunction, Container?: FuzzyMultiMapContainer); + constructor(hashFunctions: HashFunctionsTuple, Container?: FuzzyMultiMapContainer); + + // Methods + clear(): void; + add(value: V): this; + set(key: K, value: V): this; + get(key: any): Array | Set | undefined; + has(key: any): boolean; + forEach(callback: (value: V, key: V) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + hashFunction: HashFunction | HashFunctionsTuple, + Container?: FuzzyMultiMapContainer + ): FuzzyMultiMap; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-multi-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-multi-map.js new file mode 100644 index 0000000..78b2b08 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/fuzzy-multi-map.js @@ -0,0 +1,196 @@ +/** + * Mnemonist FuzzyMultiMap + * ======================== + * + * Same as the fuzzy map but relying on a MultiMap rather than a Map. + */ +var MultiMap = require('./multi-map.js'), + forEach = require('obliterator/foreach'); + +var identity = function(x) { + return x; +}; + +/** + * FuzzyMultiMap. + * + * @constructor + * @param {array|function} descriptor - Hash functions descriptor. + * @param {function} Container - Container to use. + */ +function FuzzyMultiMap(descriptor, Container) { + this.items = new MultiMap(Container); + this.clear(); + + if (Array.isArray(descriptor)) { + this.writeHashFunction = descriptor[0]; + this.readHashFunction = descriptor[1]; + } + else { + this.writeHashFunction = descriptor; + this.readHashFunction = descriptor; + } + + if (!this.writeHashFunction) + this.writeHashFunction = identity; + if (!this.readHashFunction) + this.readHashFunction = identity; + + if (typeof this.writeHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMultiMap.constructor: invalid hash function given.'); + + if (typeof this.readHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMultiMap.constructor: invalid hash function given.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FuzzyMultiMap.prototype.clear = function() { + this.items.clear(); + + // Properties + this.size = 0; + this.dimension = 0; +}; + +/** + * Method used to add an item to the index. + * + * @param {any} item - Item to add. + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.prototype.add = function(item) { + var key = this.writeHashFunction(item); + + this.items.set(key, item); + this.size = this.items.size; + this.dimension = this.items.dimension; + + return this; +}; + +/** + * Method used to set an item in the index using the given key. + * + * @param {any} key - Key to use. + * @param {any} item - Item to add. + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.prototype.set = function(key, item) { + key = this.writeHashFunction(key); + + this.items.set(key, item); + this.size = this.items.size; + this.dimension = this.items.dimension; + + return this; +}; + +/** + * Method used to retrieve an item from the index. + * + * @param {any} key - Key to use. + * @return {any} + */ +FuzzyMultiMap.prototype.get = function(key) { + key = this.readHashFunction(key); + + return this.items.get(key); +}; + +/** + * Method used to test the existence of an item in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +FuzzyMultiMap.prototype.has = function(key) { + key = this.readHashFunction(key); + + return this.items.has(key); +}; + +/** + * Method used to iterate over each of the index's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FuzzyMultiMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(function(value) { + callback.call(scope, value, value); + }); +}; + +/** + * Method returning an iterator over the index's values. + * + * @return {FuzzyMultiMapIterator} + */ +FuzzyMultiMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FuzzyMultiMap.prototype[Symbol.iterator] = FuzzyMultiMap.prototype.values; + +/** + * Convenience known method. + */ +FuzzyMultiMap.prototype.inspect = function() { + var array = Array.from(this); + + Object.defineProperty(array, 'constructor', { + value: FuzzyMultiMap, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FuzzyMultiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FuzzyMultiMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {array|function} descriptor - Hash functions descriptor. + * @param {function} Container - Container to use. + * @param {boolean} useSet - Whether to use #.set or #.add + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.from = function(iterable, descriptor, Container, useSet) { + if (arguments.length === 3) { + if (typeof Container === 'boolean') { + useSet = Container; + Container = Array; + } + } + + var map = new FuzzyMultiMap(descriptor, Container); + + forEach(iterable, function(value, key) { + if (useSet) + map.set(key, value); + else + map.add(value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = FuzzyMultiMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/hashed-array-tree.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/hashed-array-tree.d.ts new file mode 100644 index 0000000..eb56f7c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/hashed-array-tree.d.ts @@ -0,0 +1,32 @@ +/** + * Mnemonist HashedArrayTree Typings + * ================================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +type HashedArrayTreeOptions = { + initialCapacity?: number; + initialLength?: number; + blockSize?: number; +} + +export default class HashedArrayTree { + + // Members + blockSize: number; + capacity: number; + length: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + constructor(ArrayClass: IArrayLikeConstructor, options: HashedArrayTreeOptions); + + // Methods + set(index: number, value: T): this; + get(index: number): T | undefined; + grow(capacity: number): this; + resize(length: number): this; + push(value: T): number; + pop(): T | undefined; + inspect(): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/hashed-array-tree.js b/amplify/functions/deleteDocument/node_modules/mnemonist/hashed-array-tree.js new file mode 100644 index 0000000..a51667c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/hashed-array-tree.js @@ -0,0 +1,209 @@ +/** + * Mnemonist HashedArrayTree + * ========================== + * + * Abstract implementation of a hashed array tree representing arrays growing + * dynamically. + */ + +/** + * Defaults. + */ +var DEFAULT_BLOCK_SIZE = 1024; + +/** + * Helpers. + */ +function powerOfTwo(x) { + return (x & (x - 1)) === 0; +} + +/** + * HashedArrayTree. + * + * @constructor + * @param {function} ArrayClass - An array constructor. + * @param {number|object} initialCapacityOrOptions - Self-explanatory. + */ +function HashedArrayTree(ArrayClass, initialCapacityOrOptions) { + if (arguments.length < 1) + throw new Error('mnemonist/hashed-array-tree: expecting at least a byte array constructor.'); + + var initialCapacity = initialCapacityOrOptions || 0, + blockSize = DEFAULT_BLOCK_SIZE, + initialLength = 0; + + if (typeof initialCapacityOrOptions === 'object') { + initialCapacity = initialCapacityOrOptions.initialCapacity || 0; + initialLength = initialCapacityOrOptions.initialLength || 0; + blockSize = initialCapacityOrOptions.blockSize || DEFAULT_BLOCK_SIZE; + } + + if (!blockSize || !powerOfTwo(blockSize)) + throw new Error('mnemonist/hashed-array-tree: block size should be a power of two.'); + + var capacity = Math.max(initialLength, initialCapacity), + initialBlocks = Math.ceil(capacity / blockSize); + + this.ArrayClass = ArrayClass; + this.length = initialLength; + this.capacity = initialBlocks * blockSize; + this.blockSize = blockSize; + this.offsetMask = blockSize - 1; + this.blockMask = Math.log2(blockSize); + + // Allocating initial blocks + this.blocks = new Array(initialBlocks); + + for (var i = 0; i < initialBlocks; i++) + this.blocks[i] = new this.ArrayClass(this.blockSize); +} + +/** + * Method used to set a value. + * + * @param {number} index - Index to edit. + * @param {any} value - Value. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('HashedArrayTree(' + this.ArrayClass.name + ').set: index out of bounds.'); + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + this.blocks[block][i] = value; + + return this; +}; + +/** + * Method used to get a value. + * + * @param {number} index - Index to retrieve. + * @return {any} + */ +HashedArrayTree.prototype.get = function(index) { + if (this.length < index) + return; + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + return this.blocks[block][i]; +}; + +/** + * Method used to grow the array. + * + * @param {number} capacity - Optional capacity to accomodate. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.grow = function(capacity) { + if (typeof capacity !== 'number') + capacity = this.capacity + this.blockSize; + + if (this.capacity >= capacity) + return this; + + while (this.capacity < capacity) { + this.blocks.push(new this.ArrayClass(this.blockSize)); + this.capacity += this.blockSize; + } + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.grow(length); + + return this; +}; + +/** + * Method used to push a value into the array. + * + * @param {any} value - Value to push. + * @return {number} - Length of the array. + */ +HashedArrayTree.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + var index = this.length; + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + this.blocks[block][i] = value; + + return ++this.length; +}; + +/** + * Method used to pop the last value of the array. + * + * @return {number} - The popped value. + */ +HashedArrayTree.prototype.pop = function() { + if (this.length === 0) + return; + + var lastBlock = this.blocks[this.blocks.length - 1]; + + var i = (--this.length) & this.offsetMask; + + return lastBlock[i]; +}; + +/** + * Convenience known methods. + */ +HashedArrayTree.prototype.inspect = function() { + var proxy = new this.ArrayClass(this.length), + block; + + for (var i = 0, l = this.length; i < l; i++) { + block = i >> this.blockMask; + proxy[i] = this.blocks[block][i & this.offsetMask]; + } + + proxy.type = this.ArrayClass.name; + proxy.items = this.length; + proxy.capacity = this.capacity; + proxy.blockSize = this.blockSize; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: HashedArrayTree, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + HashedArrayTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = HashedArrayTree.prototype.inspect; + +/** + * Exporting. + */ +module.exports = HashedArrayTree; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/heap.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/heap.d.ts new file mode 100644 index 0000000..c6aa219 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/heap.d.ts @@ -0,0 +1,84 @@ +/** + * Mnemonist Heap Typings + * ======================= + */ +type HeapComparator = (a: T, b: T) => number; + +export default class Heap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + comparator?: HeapComparator + ): Heap; +} + +export class MinHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; +} + +export class MaxHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; +} + +// Static helpers +export function push(comparator: HeapComparator, heap: Array, item: T): void; +export function pop(comparator: HeapComparator, heap: Array): T; +export function replace(comparator: HeapComparator, heap: Array, item: T): T; +export function pushpop(comparator: HeapComparator, heap: Array, item: T): T; +export function heapify(comparator: HeapComparator, array: Array): void; +export function consume(comparator: HeapComparator, heap: Array): Array; + +export function nsmallest(comparator: HeapComparator, n: number, values: Iterable): Array; +export function nsmallest(n: number, values: Iterable): Array; +export function nlargest(comparator: HeapComparator, n: number, values: Iterable): Array; +export function nlargest(n: number, values: Iterable): Array; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/heap.js b/amplify/functions/deleteDocument/node_modules/mnemonist/heap.js new file mode 100644 index 0000000..90eb971 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/heap.js @@ -0,0 +1,576 @@ +/** + * Mnemonist Binary Heap + * ====================== + * + * Binary heap implementation. + */ +var forEach = require('obliterator/foreach'), + comparators = require('./utils/comparators.js'), + iterables = require('./utils/iterables.js'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Heap helper functions. + */ + +/** + * Function used to sift down. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} startIndex - Starting index. + * @param {number} i - Index. + */ +function siftDown(compare, heap, startIndex, i) { + var item = heap[i], + parentIndex, + parent; + + while (i > startIndex) { + parentIndex = (i - 1) >> 1; + parent = heap[parentIndex]; + + if (compare(item, parent) < 0) { + heap[i] = parent; + i = parentIndex; + continue; + } + + break; + } + + heap[i] = item; +} + +/** + * Function used to sift up. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} i - Index. + */ +function siftUp(compare, heap, i) { + var endIndex = heap.length, + startIndex = i, + item = heap[i], + childIndex = 2 * i + 1, + rightIndex; + + while (childIndex < endIndex) { + rightIndex = childIndex + 1; + + if ( + rightIndex < endIndex && + compare(heap[childIndex], heap[rightIndex]) >= 0 + ) { + childIndex = rightIndex; + } + + heap[i] = heap[childIndex]; + i = childIndex; + childIndex = 2 * i + 1; + } + + heap[i] = item; + siftDown(compare, heap, startIndex, i); +} + +/** + * Function used to push an item into a heap represented by a raw array. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - Item to push. + */ +function push(compare, heap, item) { + heap.push(item); + siftDown(compare, heap, 0, heap.length - 1); +} + +/** + * Function used to pop an item from a heap represented by a raw array. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @return {any} + */ +function pop(compare, heap) { + var lastItem = heap.pop(); + + if (heap.length !== 0) { + var item = heap[0]; + heap[0] = lastItem; + siftUp(compare, heap, 0); + + return item; + } + + return lastItem; +} + +/** + * Function used to pop the heap then push a new value into it, thus "replacing" + * it. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - The item to push. + * @return {any} + */ +function replace(compare, heap, item) { + if (heap.length === 0) + throw new Error('mnemonist/heap.replace: cannot pop an empty heap.'); + + var popped = heap[0]; + heap[0] = item; + siftUp(compare, heap, 0); + + return popped; +} + +/** + * Function used to push an item in the heap then pop the heap and return the + * popped value. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - The item to push. + * @return {any} + */ +function pushpop(compare, heap, item) { + var tmp; + + if (heap.length !== 0 && compare(heap[0], item) < 0) { + tmp = heap[0]; + heap[0] = item; + item = tmp; + siftUp(compare, heap, 0); + } + + return item; +} + +/** + * Converts and array into an abstract heap in linear time. + * + * @param {function} compare - Comparison function. + * @param {array} array - Target array. + */ +function heapify(compare, array) { + var n = array.length, + l = n >> 1, + i = l; + + while (--i >= 0) + siftUp(compare, array, i); +} + +/** + * Fully consumes the given heap. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @return {array} + */ +function consume(compare, heap) { + var l = heap.length, + i = 0; + + var array = new Array(l); + + while (i < l) + array[i++] = pop(compare, heap); + + return array; +} + +/** + * Function used to retrieve the n smallest items from the given iterable. + * + * @param {function} compare - Comparison function. + * @param {number} n - Number of top items to retrieve. + * @param {any} iterable - Arbitrary iterable. + * @param {array} + */ +function nsmallest(compare, n, iterable) { + if (arguments.length === 2) { + iterable = n; + n = compare; + compare = DEFAULT_COMPARATOR; + } + + var reverseCompare = reverseComparator(compare); + + var i, l, v; + + var min = Infinity; + + var result; + + // If n is equal to 1, it's just a matter of finding the minimum + if (n === 1) { + if (iterables.isArrayLike(iterable)) { + for (i = 0, l = iterable.length; i < l; i++) { + v = iterable[i]; + + if (min === Infinity || compare(v, min) < 0) + min = v; + } + + result = new iterable.constructor(1); + result[0] = min; + + return result; + } + + forEach(iterable, function(value) { + if (min === Infinity || compare(value, min) < 0) + min = value; + }); + + return [min]; + } + + if (iterables.isArrayLike(iterable)) { + + // If n > iterable length, we just clone and sort + if (n >= iterable.length) + return iterable.slice().sort(compare); + + result = iterable.slice(0, n); + heapify(reverseCompare, result); + + for (i = n, l = iterable.length; i < l; i++) + if (reverseCompare(iterable[i], result[0]) > 0) + replace(reverseCompare, result, iterable[i]); + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(compare); + } + + // Correct for size + var size = iterables.guessLength(iterable); + + if (size !== null && size < n) + n = size; + + result = new Array(n); + i = 0; + + forEach(iterable, function(value) { + if (i < n) { + result[i] = value; + } + else { + if (i === n) + heapify(reverseCompare, result); + + if (reverseCompare(value, result[0]) > 0) + replace(reverseCompare, result, value); + } + + i++; + }); + + if (result.length > i) + result.length = i; + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(compare); +} + +/** + * Function used to retrieve the n largest items from the given iterable. + * + * @param {function} compare - Comparison function. + * @param {number} n - Number of top items to retrieve. + * @param {any} iterable - Arbitrary iterable. + * @param {array} + */ +function nlargest(compare, n, iterable) { + if (arguments.length === 2) { + iterable = n; + n = compare; + compare = DEFAULT_COMPARATOR; + } + + var reverseCompare = reverseComparator(compare); + + var i, l, v; + + var max = -Infinity; + + var result; + + // If n is equal to 1, it's just a matter of finding the maximum + if (n === 1) { + if (iterables.isArrayLike(iterable)) { + for (i = 0, l = iterable.length; i < l; i++) { + v = iterable[i]; + + if (max === -Infinity || compare(v, max) > 0) + max = v; + } + + result = new iterable.constructor(1); + result[0] = max; + + return result; + } + + forEach(iterable, function(value) { + if (max === -Infinity || compare(value, max) > 0) + max = value; + }); + + return [max]; + } + + if (iterables.isArrayLike(iterable)) { + + // If n > iterable length, we just clone and sort + if (n >= iterable.length) + return iterable.slice().sort(reverseCompare); + + result = iterable.slice(0, n); + heapify(compare, result); + + for (i = n, l = iterable.length; i < l; i++) + if (compare(iterable[i], result[0]) > 0) + replace(compare, result, iterable[i]); + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(reverseCompare); + } + + // Correct for size + var size = iterables.guessLength(iterable); + + if (size !== null && size < n) + n = size; + + result = new Array(n); + i = 0; + + forEach(iterable, function(value) { + if (i < n) { + result[i] = value; + } + else { + if (i === n) + heapify(compare, result); + + if (compare(value, result[0]) > 0) + replace(compare, result, value); + } + + i++; + }); + + if (result.length > i) + result.length = i; + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(reverseCompare); +} + +/** + * Binary Minimum Heap. + * + * @constructor + * @param {function} comparator - Comparator function to use. + */ +function Heap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/Heap.constructor: given comparator should be a function.'); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +Heap.prototype.clear = function() { + + // Properties + this.items = []; + this.size = 0; +}; + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +Heap.prototype.push = function(item) { + push(this.comparator, this.items, item); + return ++this.size; +}; + +/** + * Method used to retrieve the "first" item of the heap. + * + * @return {any} + */ +Heap.prototype.peek = function() { + return this.items[0]; +}; + +/** + * Method used to retrieve & remove the "first" item of the heap. + * + * @return {any} + */ +Heap.prototype.pop = function() { + if (this.size !== 0) + this.size--; + + return pop(this.comparator, this.items); +}; + +/** + * Method used to pop the heap, then push an item and return the popped + * item. + * + * @param {any} item - Item to push into the heap. + * @return {any} + */ +Heap.prototype.replace = function(item) { + return replace(this.comparator, this.items, item); +}; + +/** + * Method used to push the heap, the pop it and return the pooped item. + * + * @param {any} item - Item to push into the heap. + * @return {any} + */ +Heap.prototype.pushpop = function(item) { + return pushpop(this.comparator, this.items, item); +}; + +/** + * Method used to consume the heap fully and return its items as a sorted array. + * + * @return {array} + */ +Heap.prototype.consume = function() { + this.size = 0; + return consume(this.comparator, this.items); +}; + +/** + * Method used to convert the heap to an array. Note that it basically clone + * the heap and consumes it completely. This is hardly performant. + * + * @return {array} + */ +Heap.prototype.toArray = function() { + return consume(this.comparator, this.items.slice()); +}; + +/** + * Convenience known methods. + */ +Heap.prototype.inspect = function() { + var proxy = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Heap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Heap.prototype[Symbol.for('nodejs.util.inspect.custom')] = Heap.prototype.inspect; + +/** + * Binary Maximum Heap. + * + * @constructor + * @param {function} comparator - Comparator function to use. + */ +function MaxHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/MaxHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +MaxHeap.prototype = Heap.prototype; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a heap. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} comparator - Custom comparator function. + * @return {Heap} + */ +Heap.from = function(iterable, comparator) { + var heap = new Heap(comparator); + + var items; + + // If iterable is an array, we can be clever about it + if (iterables.isArrayLike(iterable)) + items = iterable.slice(); + else + items = iterables.toArray(iterable); + + heapify(heap.comparator, items); + heap.items = items; + heap.size = items.length; + + return heap; +}; + +MaxHeap.from = function(iterable, comparator) { + var heap = new MaxHeap(comparator); + + var items; + + // If iterable is an array, we can be clever about it + if (iterables.isArrayLike(iterable)) + items = iterable.slice(); + else + items = iterables.toArray(iterable); + + heapify(heap.comparator, items); + heap.items = items; + heap.size = items.length; + + return heap; +}; + +/** + * Exporting. + */ +Heap.siftUp = siftUp; +Heap.siftDown = siftDown; +Heap.push = push; +Heap.pop = pop; +Heap.replace = replace; +Heap.pushpop = pushpop; +Heap.heapify = heapify; +Heap.consume = consume; + +Heap.nsmallest = nsmallest; +Heap.nlargest = nlargest; + +Heap.MinHeap = Heap; +Heap.MaxHeap = MaxHeap; + +module.exports = Heap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/index.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/index.d.ts new file mode 100644 index 0000000..cbdc86c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/index.d.ts @@ -0,0 +1,46 @@ +/** + * Mnemonist Typings + * ================== + * + * Gathering the library's typings. + */ +import * as set from './set'; + +export {default as BiMap, InverseMap} from './bi-map'; +export {default as BitSet} from './bit-set'; +export {default as BitVector} from './bit-vector'; +export {default as BKTree} from './bk-tree'; +export {default as BloomFilter} from './bloom-filter'; +export {default as CircularBuffer} from './circular-buffer'; +export {default as DefaultMap} from './default-map'; +export {default as DefaultWeakMap} from './default-weak-map'; +export {default as FixedDeque} from './fixed-deque'; +export {default as FibonacciHeap, MinFibonacciHeap, MaxFibonacciHeap} from './fibonacci-heap'; +export {default as FixedReverseHeap} from './fixed-reverse-heap'; +export {default as FixedStack} from './fixed-stack'; +export {default as FuzzyMap} from './fuzzy-map'; +export {default as FuzzyMultiMap} from './fuzzy-multi-map'; +export {default as HashedArrayTree} from './hashed-array-tree'; +export {default as Heap, MinHeap, MaxHeap} from './heap'; +export {default as InvertedIndex} from './inverted-index'; +export {default as KDTree} from './kd-tree'; +export {default as LinkedList} from './linked-list'; +export {default as LRUCache} from './lru-cache'; +export {default as LRUMap} from './lru-map'; +export {default as MultiMap} from './multi-map'; +export {default as MultiSet} from './multi-set'; +export {default as PassjoinIndex} from './passjoin-index'; +export {default as Queue} from './queue'; +export {set}; +export {default as SparseQueueSet} from './sparse-queue-set'; +export {default as SparseMap} from './sparse-map'; +export {default as SparseSet} from './sparse-set'; +export {default as Stack} from './stack'; +export {default as StaticDisjointSet} from './static-disjoint-set'; +export {default as StaticIntervalTree} from './static-interval-tree'; +export {default as SuffixArray, GeneralizedSuffixArray} from './suffix-array'; +export {default as SymSpell} from './symspell'; +export {default as Trie} from './trie'; +export {default as TrieMap} from './trie-map'; +export {default as Vector, Uint8Vector, Uint8ClampedVector, Int8Vector, Uint16Vector, Int16Vector, Uint32Vector, Int32Vector, Float32Vector, Float64Array} from './vector'; +export {default as VPTree} from './vp-tree'; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/index.js b/amplify/functions/deleteDocument/node_modules/mnemonist/index.js new file mode 100644 index 0000000..80c18d4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/index.js @@ -0,0 +1,56 @@ +/** + * Mnemonist Library Endpoint + * =========================== + * + * Exporting every data structure through a unified endpoint. Consumers + * of this library should prefer the modular access though. + */ +var Heap = require('./heap.js'), + FibonacciHeap = require('./fibonacci-heap.js'), + SuffixArray = require('./suffix-array.js'); + +module.exports = { + BiMap: require('./bi-map.js'), + BitSet: require('./bit-set.js'), + BitVector: require('./bit-vector.js'), + BloomFilter: require('./bloom-filter.js'), + BKTree: require('./bk-tree.js'), + CircularBuffer: require('./circular-buffer.js'), + DefaultMap: require('./default-map.js'), + DefaultWeakMap: require('./default-weak-map.js'), + FixedDeque: require('./fixed-deque.js'), + StaticDisjointSet: require('./static-disjoint-set.js'), + FibonacciHeap: FibonacciHeap, + MinFibonacciHeap: FibonacciHeap.MinFibonacciHeap, + MaxFibonacciHeap: FibonacciHeap.MaxFibonacciHeap, + FixedReverseHeap: require('./fixed-reverse-heap.js'), + FuzzyMap: require('./fuzzy-map.js'), + FuzzyMultiMap: require('./fuzzy-multi-map.js'), + HashedArrayTree: require('./hashed-array-tree.js'), + Heap: Heap, + MinHeap: Heap.MinHeap, + MaxHeap: Heap.MaxHeap, + StaticIntervalTree: require('./static-interval-tree.js'), + InvertedIndex: require('./inverted-index.js'), + KDTree: require('./kd-tree.js'), + LinkedList: require('./linked-list.js'), + LRUCache: require('./lru-cache.js'), + LRUMap: require('./lru-map.js'), + MultiMap: require('./multi-map.js'), + MultiSet: require('./multi-set.js'), + PassjoinIndex: require('./passjoin-index.js'), + Queue: require('./queue.js'), + FixedStack: require('./fixed-stack.js'), + Stack: require('./stack.js'), + SuffixArray: SuffixArray, + GeneralizedSuffixArray: SuffixArray.GeneralizedSuffixArray, + Set: require('./set.js'), + SparseQueueSet: require('./sparse-queue-set.js'), + SparseMap: require('./sparse-map.js'), + SparseSet: require('./sparse-set.js'), + SymSpell: require('./symspell.js'), + Trie: require('./trie.js'), + TrieMap: require('./trie-map.js'), + Vector: require('./vector.js'), + VPTree: require('./vp-tree.js') +}; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/inverted-index.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/inverted-index.d.ts new file mode 100644 index 0000000..4596ff8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/inverted-index.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist InvertedIndex Typings + * ================================ + */ +type Tokenizer = (key: any) => Array; +type TokenizersTuple = [Tokenizer, Tokenizer]; + +export default class InvertedIndex implements Iterable { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(tokenizer?: Tokenizer); + constructor(tokenizers?: TokenizersTuple); + + // Methods + clear(): void; + add(document: D): this; + get(query: any): Array; + forEach(callback: (document: D, index: number, invertedIndex: this) => void, scope?: any): void; + documents(): IterableIterator; + tokens(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + tokenizer?: Tokenizer | TokenizersTuple + ): InvertedIndex; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/inverted-index.js b/amplify/functions/deleteDocument/node_modules/mnemonist/inverted-index.js new file mode 100644 index 0000000..a352d19 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/inverted-index.js @@ -0,0 +1,249 @@ +/** + * Mnemonist Inverted Index + * ========================= + * + * JavaScript implementation of an inverted index. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + helpers = require('./utils/merge.js'); + +function identity(x) { + return x; +} + +/** + * InvertedIndex. + * + * @constructor + * @param {function} tokenizer - Tokenizer function. + */ +function InvertedIndex(descriptor) { + this.clear(); + + if (Array.isArray(descriptor)) { + this.documentTokenizer = descriptor[0]; + this.queryTokenizer = descriptor[1]; + } + else { + this.documentTokenizer = descriptor; + this.queryTokenizer = descriptor; + } + + if (!this.documentTokenizer) + this.documentTokenizer = identity; + if (!this.queryTokenizer) + this.queryTokenizer = identity; + + if (typeof this.documentTokenizer !== 'function') + throw new Error('mnemonist/InvertedIndex.constructor: document tokenizer is not a function.'); + + if (typeof this.queryTokenizer !== 'function') + throw new Error('mnemonist/InvertedIndex.constructor: query tokenizer is not a function.'); +} + +/** + * Method used to clear the InvertedIndex. + * + * @return {undefined} + */ +InvertedIndex.prototype.clear = function() { + + // Properties + this.items = []; + this.mapping = new Map(); + this.size = 0; + this.dimension = 0; +}; + +/** + * Method used to add a document to the index. + * + * @param {any} doc - Item to add. + * @return {InvertedIndex} + */ +InvertedIndex.prototype.add = function(doc) { + + // Increasing size + this.size++; + + // Storing document + var key = this.items.length; + this.items.push(doc); + + // Tokenizing the document + var tokens = this.documentTokenizer(doc); + + if (!Array.isArray(tokens)) + throw new Error('mnemonist/InvertedIndex.add: tokenizer function should return an array of tokens.'); + + // Indexing + var done = new Set(), + token, + container; + + for (var i = 0, l = tokens.length; i < l; i++) { + token = tokens[i]; + + if (done.has(token)) + continue; + + done.add(token); + + container = this.mapping.get(token); + + if (!container) { + container = []; + this.mapping.set(token, container); + } + + container.push(key); + } + + this.dimension = this.mapping.size; + + return this; +}; + +/** + * Method used to query the index in a AND fashion. + * + * @param {any} query - Query + * @return {Set} - Intersection of documents matching the query. + */ +InvertedIndex.prototype.get = function(query) { + + // Early termination + if (!this.size) + return []; + + // First we need to tokenize the query + var tokens = this.queryTokenizer(query); + + if (!Array.isArray(tokens)) + throw new Error('mnemonist/InvertedIndex.query: tokenizer function should return an array of tokens.'); + + if (!tokens.length) + return []; + + var results = this.mapping.get(tokens[0]), + c, + i, + l; + + if (typeof results === 'undefined' || results.length === 0) + return []; + + if (tokens.length > 1) { + for (i = 1, l = tokens.length; i < l; i++) { + c = this.mapping.get(tokens[i]); + + if (typeof c === 'undefined' || c.length === 0) + return []; + + results = helpers.intersectionUniqueArrays(results, c); + } + } + + var docs = new Array(results.length); + + for (i = 0, l = docs.length; i < l; i++) + docs[i] = this.items[results[i]]; + + return docs; +}; + +/** + * Method used to iterate over each of the documents. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +InvertedIndex.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.documents.length; i < l; i++) + callback.call(scope, this.documents[i], i, this); +}; + +/** + * Method returning an iterator over the index's documents. + * + * @return {Iterator} + */ +InvertedIndex.prototype.documents = function() { + var documents = this.items, + l = documents.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = documents[i++]; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method returning an iterator over the index's tokens. + * + * @return {Iterator} + */ +InvertedIndex.prototype.tokens = function() { + return this.mapping.keys(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + InvertedIndex.prototype[Symbol.iterator] = InvertedIndex.prototype.documents; + +/** + * Convenience known methods. + */ +InvertedIndex.prototype.inspect = function() { + var array = this.items.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: InvertedIndex, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + InvertedIndex.prototype[Symbol.for('nodejs.util.inspect.custom')] = InvertedIndex.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a InvertedIndex. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} tokenizer - Tokenizer function. + * @return {InvertedIndex} + */ +InvertedIndex.from = function(iterable, descriptor) { + var index = new InvertedIndex(descriptor); + + forEach(iterable, function(doc) { + index.add(doc); + }); + + return index; +}; + +/** + * Exporting. + */ +module.exports = InvertedIndex; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/kd-tree.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/kd-tree.d.ts new file mode 100644 index 0000000..10294f3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/kd-tree.d.ts @@ -0,0 +1,25 @@ +/** + * Mnemonist KDTree Typings + * ========================= + */ +import {IArrayLike} from './utils/types'; + +export default class KDTree { + + // Members + dimensions: number; + size: number; + visited: number; + + // Methods + nearestNeighbor(point: Array): V; + kNearestNeighbors(k: number, point: Array): Array; + linearKNearestNeighbors(k: number, point: Array): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, Array]>, dimensions: number): KDTree; + static from(axes: IArrayLike): KDTree; + static from(axes: IArrayLike, labels: Array): KDTree; +} + diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/kd-tree.js b/amplify/functions/deleteDocument/node_modules/mnemonist/kd-tree.js new file mode 100644 index 0000000..fe5d1ca --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/kd-tree.js @@ -0,0 +1,447 @@ +/** + * Mnemonist KDTree + * ================= + * + * Low-level JavaScript implementation of a k-dimensional tree. + */ +var iterables = require('./utils/iterables.js'); +var typed = require('./utils/typed-arrays.js'); +var createTupleComparator = require('./utils/comparators.js').createTupleComparator; +var FixedReverseHeap = require('./fixed-reverse-heap.js'); +var inplaceQuickSortIndices = require('./sort/quick.js').inplaceQuickSortIndices; + +/** + * Helper function used to compute the squared distance between a query point + * and an indexed points whose values are stored in a tree's axes. + * + * Note that squared distance is used instead of euclidean to avoid + * costly sqrt computations. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} axes - Axes data. + * @param {number} pivot - Pivot. + * @param {array} point - Query point. + * @return {number} + */ +function squaredDistanceAxes(dimensions, axes, pivot, b) { + var d; + + var dist = 0, + step; + + for (d = 0; d < dimensions; d++) { + step = axes[d][pivot] - b[d]; + dist += step * step; + } + + return dist; +} + +/** + * Helper function used to reshape input data into low-level axes data. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} data - Data in the shape [label, [x, y, z...]] + * @return {object} + */ +function reshapeIntoAxes(dimensions, data) { + var l = data.length; + + var axes = new Array(dimensions), + labels = new Array(l), + axis; + + var PointerArray = typed.getPointerArray(l); + + var ids = new PointerArray(l); + + var d, i, row; + + var f = true; + + for (d = 0; d < dimensions; d++) { + axis = new Float64Array(l); + + for (i = 0; i < l; i++) { + row = data[i]; + axis[i] = row[1][d]; + + if (f) { + labels[i] = row[0]; + ids[i] = i; + } + } + + f = false; + axes[d] = axis; + } + + return {axes: axes, ids: ids, labels: labels}; +} + +/** + * Helper function used to build a kd-tree from axes data. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} axes - Axes. + * @param {array} ids - Indices to sort. + * @param {array} labels - Point labels. + * @return {object} + */ +function buildTree(dimensions, axes, ids, labels) { + var l = labels.length; + + // NOTE: +1 because we need to keep 0 as null pointer + var PointerArray = typed.getPointerArray(l + 1); + + // Building the tree + var pivots = new PointerArray(l), + lefts = new PointerArray(l), + rights = new PointerArray(l); + + var stack = [[0, 0, ids.length, -1, 0]], + step, + parent, + direction, + median, + pivot, + lo, + hi; + + var d, i = 0; + + while (stack.length !== 0) { + step = stack.pop(); + + d = step[0]; + lo = step[1]; + hi = step[2]; + parent = step[3]; + direction = step[4]; + + inplaceQuickSortIndices(axes[d], ids, lo, hi); + + l = hi - lo; + median = lo + (l >>> 1); // Fancy floor(l / 2) + pivot = ids[median]; + pivots[i] = pivot; + + if (parent > -1) { + if (direction === 0) + lefts[parent] = i + 1; + else + rights[parent] = i + 1; + } + + d = (d + 1) % dimensions; + + // Right + if (median !== lo && median !== hi - 1) { + stack.push([d, median + 1, hi, i, 1]); + } + + // Left + if (median !== lo) { + stack.push([d, lo, median, i, 0]); + } + + i++; + } + + return { + axes: axes, + labels: labels, + pivots: pivots, + lefts: lefts, + rights: rights + }; +} + +/** + * KDTree. + * + * @constructor + */ +function KDTree(dimensions, build) { + this.dimensions = dimensions; + this.visited = 0; + + this.axes = build.axes; + this.labels = build.labels; + + this.pivots = build.pivots; + this.lefts = build.lefts; + this.rights = build.rights; + + this.size = this.labels.length; +} + +/** + * Method returning the query's nearest neighbor. + * + * @param {array} query - Query point. + * @return {any} + */ +KDTree.prototype.nearestNeighbor = function(query) { + var bestDistance = Infinity, + best = null; + + var dimensions = this.dimensions, + axes = this.axes, + pivots = this.pivots, + lefts = this.lefts, + rights = this.rights; + + var visited = 0; + + function recurse(d, node) { + visited++; + + var left = lefts[node], + right = rights[node], + pivot = pivots[node]; + + var dist = squaredDistanceAxes( + dimensions, + axes, + pivot, + query + ); + + if (dist < bestDistance) { + best = pivot; + bestDistance = dist; + + if (dist === 0) + return; + } + + var dx = axes[d][pivot] - query[d]; + + d = (d + 1) % dimensions; + + // Going the correct way? + if (dx > 0) { + if (left !== 0) + recurse(d, left - 1); + } + else { + if (right !== 0) + recurse(d, right - 1); + } + + // Going the other way? + if (dx * dx < bestDistance) { + if (dx > 0) { + if (right !== 0) + recurse(d, right - 1); + } + else { + if (left !== 0) + recurse(d, left - 1); + } + } + } + + recurse(0, 0); + + this.visited = visited; + return this.labels[best]; +}; + +var KNN_HEAP_COMPARATOR_3 = createTupleComparator(3); +var KNN_HEAP_COMPARATOR_2 = createTupleComparator(2); + +/** + * Method returning the query's k nearest neighbors. + * + * @param {number} k - Number of nearest neighbor to retrieve. + * @param {array} query - Query point. + * @return {array} + */ + +// TODO: can do better by improving upon static-kdtree here +KDTree.prototype.kNearestNeighbors = function(k, query) { + if (k <= 0) + throw new Error('mnemonist/kd-tree.kNearestNeighbors: k should be a positive number.'); + + k = Math.min(k, this.size); + + if (k === 1) + return [this.nearestNeighbor(query)]; + + var heap = new FixedReverseHeap(Array, KNN_HEAP_COMPARATOR_3, k); + + var dimensions = this.dimensions, + axes = this.axes, + pivots = this.pivots, + lefts = this.lefts, + rights = this.rights; + + var visited = 0; + + function recurse(d, node) { + var left = lefts[node], + right = rights[node], + pivot = pivots[node]; + + var dist = squaredDistanceAxes( + dimensions, + axes, + pivot, + query + ); + + heap.push([dist, visited++, pivot]); + + var point = query[d], + split = axes[d][pivot], + dx = point - split; + + d = (d + 1) % dimensions; + + // Going the correct way? + if (point < split) { + if (left !== 0) { + recurse(d, left - 1); + } + } + else { + if (right !== 0) { + recurse(d, right - 1); + } + } + + // Going the other way? + if (dx * dx < heap.peek()[0] || heap.size < k) { + if (point < split) { + if (right !== 0) { + recurse(d, right - 1); + } + } + else { + if (left !== 0) { + recurse(d, left - 1); + } + } + } + } + + recurse(0, 0); + + this.visited = visited; + + var best = heap.consume(); + + for (var i = 0; i < best.length; i++) + best[i] = this.labels[best[i][2]]; + + return best; +}; + +/** + * Method returning the query's k nearest neighbors by linear search. + * + * @param {number} k - Number of nearest neighbor to retrieve. + * @param {array} query - Query point. + * @return {array} + */ +KDTree.prototype.linearKNearestNeighbors = function(k, query) { + if (k <= 0) + throw new Error('mnemonist/kd-tree.kNearestNeighbors: k should be a positive number.'); + + k = Math.min(k, this.size); + + var heap = new FixedReverseHeap(Array, KNN_HEAP_COMPARATOR_2, k); + + var i, l, dist; + + for (i = 0, l = this.size; i < l; i++) { + dist = squaredDistanceAxes( + this.dimensions, + this.axes, + this.pivots[i], + query + ); + + heap.push([dist, i]); + } + + var best = heap.consume(); + + for (i = 0; i < best.length; i++) + best[i] = this.labels[this.pivots[best[i][1]]]; + + return best; +}; + +/** + * Convenience known methods. + */ +KDTree.prototype.inspect = function() { + var dummy = new Map(); + + dummy.dimensions = this.dimensions; + + Object.defineProperty(dummy, 'constructor', { + value: KDTree, + enumerable: false + }); + + var i, j, point; + + for (i = 0; i < this.size; i++) { + point = new Array(this.dimensions); + + for (j = 0; j < this.dimensions; j++) + point[j] = this.axes[j][i]; + + dummy.set(this.labels[i], point); + } + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + KDTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = KDTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {number} dimensions - Space dimensions. + * @return {KDTree} + */ +KDTree.from = function(iterable, dimensions) { + var data = iterables.toArray(iterable); + + var reshaped = reshapeIntoAxes(dimensions, data); + + var result = buildTree(dimensions, reshaped.axes, reshaped.ids, reshaped.labels); + + return new KDTree(dimensions, result); +}; + +/** + * Static @.from function building a KDTree from given axes. + * + * @param {Iterable} iterable - Target iterable. + * @param {number} dimensions - Space dimensions. + * @return {KDTree} + */ +KDTree.fromAxes = function(axes, labels) { + if (!labels) + labels = typed.indices(axes[0].length); + + var dimensions = axes.length; + + var result = buildTree(axes.length, axes, typed.indices(labels.length), labels); + + return new KDTree(dimensions, result); +}; + +/** + * Exporting. + */ +module.exports = KDTree; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/linked-list.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/linked-list.d.ts new file mode 100644 index 0000000..4eec48c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/linked-list.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist LinkedList Typings + * ============================= + */ +export default class LinkedList implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + first(): T | undefined; + last(): T | undefined; + peek(): T | undefined; + push(value: T): number; + shift(): T | undefined; + unshift(value: T): number; + forEach(callback: (value: T, index: number, list: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): LinkedList; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/linked-list.js b/amplify/functions/deleteDocument/node_modules/mnemonist/linked-list.js new file mode 100644 index 0000000..17dca06 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/linked-list.js @@ -0,0 +1,261 @@ +/** + * Mnemonist Linked List + * ====================== + * + * Singly linked list implementation. Uses raw JavaScript objects as nodes + * as benchmarks proved it was the fastest thing to do. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Linked List. + * + * @constructor + */ +function LinkedList() { + this.clear(); +} + +/** + * Method used to clear the list. + * + * @return {undefined} + */ +LinkedList.prototype.clear = function() { + + // Properties + this.head = null; + this.tail = null; + this.size = 0; +}; + +/** + * Method used to get the first item of the list. + * + * @return {any} + */ +LinkedList.prototype.first = function() { + return this.head ? this.head.item : undefined; +}; +LinkedList.prototype.peek = LinkedList.prototype.first; + +/** + * Method used to get the last item of the list. + * + * @return {any} + */ +LinkedList.prototype.last = function() { + return this.tail ? this.tail.item : undefined; +}; + +/** + * Method used to add an item at the end of the list. + * + * @param {any} item - The item to add. + * @return {number} + */ +LinkedList.prototype.push = function(item) { + var node = {item: item, next: null}; + + if (!this.head) { + this.head = node; + this.tail = node; + } + else { + this.tail.next = node; + this.tail = node; + } + + this.size++; + + return this.size; +}; + +/** + * Method used to add an item at the beginning of the list. + * + * @param {any} item - The item to add. + * @return {number} + */ +LinkedList.prototype.unshift = function(item) { + var node = {item: item, next: null}; + + if (!this.head) { + this.head = node; + this.tail = node; + } + else { + if (!this.head.next) + this.tail = this.head; + node.next = this.head; + this.head = node; + } + + this.size++; + + return this.size; +}; + +/** + * Method used to retrieve & remove the first item of the list. + * + * @return {any} + */ +LinkedList.prototype.shift = function() { + if (!this.size) + return undefined; + + var node = this.head; + + this.head = node.next; + this.size--; + + return node.item; +}; + +/** + * Method used to iterate over the list. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +LinkedList.prototype.forEach = function(callback, scope) { + if (!this.size) + return; + + scope = arguments.length > 1 ? scope : this; + + var n = this.head, + i = 0; + + while (n) { + callback.call(scope, n.item, i, this); + n = n.next; + i++; + } +}; + +/** + * Method used to convert the list into an array. + * + * @return {array} + */ +LinkedList.prototype.toArray = function() { + if (!this.size) + return []; + + var array = new Array(this.size); + + for (var i = 0, l = this.size, n = this.head; i < l; i++) { + array[i] = n.item; + n = n.next; + } + + return array; +}; + +/** + * Method used to create an iterator over a list's values. + * + * @return {Iterator} + */ +LinkedList.prototype.values = function() { + var n = this.head; + + return new Iterator(function() { + if (!n) + return { + done: true + }; + + var value = n.item; + n = n.next; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a list's entries. + * + * @return {Iterator} + */ +LinkedList.prototype.entries = function() { + var n = this.head, + i = 0; + + return new Iterator(function() { + if (!n) + return { + done: true + }; + + var value = n.item; + n = n.next; + i++; + + return { + value: [i - 1, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LinkedList.prototype[Symbol.iterator] = LinkedList.prototype.values; + +/** + * Convenience known methods. + */ +LinkedList.prototype.toString = function() { + return this.toArray().join(','); +}; + +LinkedList.prototype.toJSON = function() { + return this.toArray(); +}; + +LinkedList.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: LinkedList, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + LinkedList.prototype[Symbol.for('nodejs.util.inspect.custom')] = LinkedList.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a list. + * + * @param {Iterable} iterable - Target iterable. + * @return {LinkedList} + */ +LinkedList.from = function(iterable) { + var list = new LinkedList(); + + forEach(iterable, function(value) { + list.push(value); + }); + + return list; +}; + +/** + * Exporting. + */ +module.exports = LinkedList; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/lru-cache.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/lru-cache.d.ts new file mode 100644 index 0000000..45b61e0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/lru-cache.d.ts @@ -0,0 +1,43 @@ +/** + * Mnemonist LRUCache Typings + * =========================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class LRUCache implements Iterable<[K, V]> { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(capacity: number); + constructor(KeyArrayClass: IArrayLikeConstructor, ValueArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + set(key: K, value: V): this; + setpop(key: K, value: V): {evicted: boolean, key: K, value: V}; + get(key: K): V | undefined; + peek(key: K): V | undefined; + has(key: K): boolean; + forEach(callback: (value: V, key: K, cache: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + KeyArrayClass: IArrayLikeConstructor, + ValueArrayClass: IArrayLikeConstructor, + capacity?: number + ): LRUCache; + + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + capacity?: number + ): LRUCache; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/lru-cache.js b/amplify/functions/deleteDocument/node_modules/mnemonist/lru-cache.js new file mode 100644 index 0000000..9cab8bc --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/lru-cache.js @@ -0,0 +1,433 @@ +/** + * Mnemonist LRUCache + * =================== + * + * JavaScript implementation of the LRU Cache data structure. To save up + * memory and allocations this implementation represents its underlying + * doubly-linked list as static arrays and pointers. Thus, memory is allocated + * only once at instantiation and JS objects are never created to serve as + * pointers. This also means this implementation does not trigger too many + * garbage collections. + * + * Note that to save up memory, a LRU Cache can be implemented using a singly + * linked list by storing predecessors' pointers as hashmap values. + * However, this means more hashmap lookups and would probably slow the whole + * thing down. What's more, pointers are not the things taking most space in + * memory. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + typed = require('./utils/typed-arrays.js'), + iterables = require('./utils/iterables.js'); + +/** + * LRUCache. + * + * @constructor + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Desired capacity. + */ +function LRUCache(Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + this.capacity = capacity; + + if (typeof this.capacity !== 'number' || this.capacity <= 0) + throw new Error('mnemonist/lru-cache: capacity should be positive number.'); + + var PointerArray = typed.getPointerArray(capacity); + + this.forward = new PointerArray(capacity); + this.backward = new PointerArray(capacity); + this.K = typeof Keys === 'function' ? new Keys(capacity) : new Array(capacity); + this.V = typeof Values === 'function' ? new Values(capacity) : new Array(capacity); + + // Properties + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = {}; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +LRUCache.prototype.clear = function() { + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = {}; +}; + +/** + * Method used to splay a value on top. + * + * @param {number} pointer - Pointer of the value to splay on top. + * @return {LRUCache} + */ +LRUCache.prototype.splayOnTop = function(pointer) { + var oldHead = this.head; + + if (this.head === pointer) + return this; + + var previous = this.backward[pointer], + next = this.forward[pointer]; + + if (this.tail === pointer) { + this.tail = previous; + } + else { + this.backward[next] = previous; + } + + this.forward[previous] = next; + + this.backward[oldHead] = pointer; + this.head = pointer; + this.forward[pointer] = oldHead; + + return this; +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {undefined} + */ +LRUCache.prototype.set = function(key, value) { + + // The key already exists, we just need to update the value and splay on top + var pointer = this.items[key]; + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + this.V[pointer] = value; + + return; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + delete this.items[this.K[pointer]]; + } + + // Storing key & value + this.items[key] = pointer; + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; +}; + +/** + * Method used to set the value for the given key in the cache + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {{evicted: boolean, key: any, value: any}} An object containing the + * key and value of an item that was overwritten or evicted in the set + * operation, as well as a boolean indicating whether it was evicted due to + * limited capacity. Return value is null if nothing was evicted or overwritten + * during the set operation. + */ +LRUCache.prototype.setpop = function(key, value) { + var oldValue = null; + var oldKey = null; + // The key already exists, we just need to update the value and splay on top + var pointer = this.items[key]; + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + oldValue = this.V[pointer]; + this.V[pointer] = value; + return {evicted: false, key: key, value: oldValue}; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + oldValue = this.V[pointer]; + oldKey = this.K[pointer]; + delete this.items[this.K[pointer]]; + } + + // Storing key & value + this.items[key] = pointer; + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; + + // Return object if eviction took place, otherwise return null + if (oldKey) { + return {evicted: true, key: oldKey, value: oldValue}; + } + else { + return null; + } +}; + +/** + * Method used to check whether the key exists in the cache. + * + * @param {any} key - Key. + * @return {boolean} + */ +LRUCache.prototype.has = function(key) { + return key in this.items; +}; + +/** + * Method used to get the value attached to the given key. Will move the + * related key to the front of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUCache.prototype.get = function(key) { + var pointer = this.items[key]; + + if (typeof pointer === 'undefined') + return; + + this.splayOnTop(pointer); + + return this.V[pointer]; +}; + +/** + * Method used to get the value attached to the given key. Does not modify + * the ordering of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUCache.prototype.peek = function(key) { + var pointer = this.items[key]; + + if (typeof pointer === 'undefined') + return; + + return this.V[pointer]; +}; + +/** + * Method used to iterate over the cache's entries using a callback. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +LRUCache.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + values = this.V, + forward = this.forward; + + while (i < l) { + + callback.call(scope, values[pointer], keys[pointer], this); + pointer = forward[pointer]; + + i++; + } +}; + +/** + * Method used to create an iterator over the cache's keys from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.keys = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var key = keys[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: key + }; + }); +}; + +/** + * Method used to create an iterator over the cache's values from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.values = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + values = this.V, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var value = values[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: value + }; + }); +}; + +/** + * Method used to create an iterator over the cache's entries from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.entries = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + values = this.V, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var key = keys[pointer], + value = values[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: [key, value] + }; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LRUCache.prototype[Symbol.iterator] = LRUCache.prototype.entries; + +/** + * Convenience known methods. + */ +LRUCache.prototype.inspect = function() { + var proxy = new Map(); + + var iterator = this.entries(), + step; + + while ((step = iterator.next(), !step.done)) + proxy.set(step.value[0], step.value[1]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: LRUCache, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + LRUCache.prototype[Symbol.for('nodejs.util.inspect.custom')] = LRUCache.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Cache's capacity. + * @return {LRUCache} + */ +LRUCache.from = function(iterable, Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/lru-cache.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + else if (arguments.length === 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + var cache = new LRUCache(Keys, Values, capacity); + + forEach(iterable, function(value, key) { + cache.set(key, value); + }); + + return cache; +}; + +/** + * Exporting. + */ +module.exports = LRUCache; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/lru-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/lru-map.d.ts new file mode 100644 index 0000000..0943543 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/lru-map.d.ts @@ -0,0 +1,43 @@ +/** + * Mnemonist LRUMap Typings + * ========================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class LRUMap implements Iterable<[K, V]> { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(capacity: number); + constructor(KeyArrayClass: IArrayLikeConstructor, ValueArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + set(key: K, value: V): this; + setpop(key: K, value: V): {evicted: boolean, key: K, value: V}; + get(key: K): V | undefined; + peek(key: K): V | undefined; + has(key: K): boolean; + forEach(callback: (value: V, key: K, cache: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + KeyArrayClass: IArrayLikeConstructor, + ValueArrayClass: IArrayLikeConstructor, + capacity?: number + ): LRUMap; + + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + capacity?: number + ): LRUMap; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/lru-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/lru-map.js new file mode 100644 index 0000000..26afe27 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/lru-map.js @@ -0,0 +1,258 @@ +/** + * Mnemonist LRUMap + * ================= + * + * Variant of the LRUCache class that leverages an ES6 Map instead of an object. + * It might be faster for some use case but it is still hard to understand + * when a Map can outperform an object in v8. + */ +var LRUCache = require('./lru-cache.js'), + forEach = require('obliterator/foreach'), + typed = require('./utils/typed-arrays.js'), + iterables = require('./utils/iterables.js'); + +/** + * LRUMap. + * + * @constructor + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Desired capacity. + */ +function LRUMap(Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + this.capacity = capacity; + + if (typeof this.capacity !== 'number' || this.capacity <= 0) + throw new Error('mnemonist/lru-map: capacity should be positive number.'); + + var PointerArray = typed.getPointerArray(capacity); + + this.forward = new PointerArray(capacity); + this.backward = new PointerArray(capacity); + this.K = typeof Keys === 'function' ? new Keys(capacity) : new Array(capacity); + this.V = typeof Values === 'function' ? new Values(capacity) : new Array(capacity); + + // Properties + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = new Map(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +LRUMap.prototype.clear = function() { + this.size = 0; + this.head = 0; + this.tail = 0; + this.items.clear(); +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {undefined} + */ +LRUMap.prototype.set = function(key, value) { + + // The key already exists, we just need to update the value and splay on top + var pointer = this.items.get(key); + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + this.V[pointer] = value; + + return; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + this.items.delete(this.K[pointer]); + } + + // Storing key & value + this.items.set(key, pointer); + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {{evicted: boolean, key: any, value: any}} An object containing the + * key and value of an item that was overwritten or evicted in the set + * operation, as well as a boolean indicating whether it was evicted due to + * limited capacity. Return value is null if nothing was evicted or overwritten + * during the set operation. + */ +LRUMap.prototype.setpop = function(key, value) { + var oldValue = null; + var oldKey = null; + // The key already exists, we just need to update the value and splay on top + var pointer = this.items.get(key); + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + oldValue = this.V[pointer]; + this.V[pointer] = value; + return {evicted: false, key: key, value: oldValue}; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + oldValue = this.V[pointer]; + oldKey = this.K[pointer]; + this.items.delete(this.K[pointer]); + } + + // Storing key & value + this.items.set(key, pointer); + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; + + // Return object if eviction took place, otherwise return null + if (oldKey) { + return {evicted: true, key: oldKey, value: oldValue}; + } + else { + return null; + } +}; + +/** + * Method used to check whether the key exists in the cache. + * + * @param {any} key - Key. + * @return {boolean} + */ +LRUMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to get the value attached to the given key. Will move the + * related key to the front of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUMap.prototype.get = function(key) { + var pointer = this.items.get(key); + + if (typeof pointer === 'undefined') + return; + + this.splayOnTop(pointer); + + return this.V[pointer]; +}; + +/** + * Method used to get the value attached to the given key. Does not modify + * the ordering of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUMap.prototype.peek = function(key) { + var pointer = this.items.get(key); + + if (typeof pointer === 'undefined') + return; + + return this.V[pointer]; +}; + +/** + * Methods that can be reused as-is from LRUCache. + */ +LRUMap.prototype.splayOnTop = LRUCache.prototype.splayOnTop; +LRUMap.prototype.forEach = LRUCache.prototype.forEach; +LRUMap.prototype.keys = LRUCache.prototype.keys; +LRUMap.prototype.values = LRUCache.prototype.values; +LRUMap.prototype.entries = LRUCache.prototype.entries; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LRUMap.prototype[Symbol.iterator] = LRUMap.prototype.entries; + +/** + * Convenience known methods. + */ +LRUMap.prototype.inspect = LRUCache.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Cache's capacity. + * @return {LRUMap} + */ +LRUMap.from = function(iterable, Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/lru-cache.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + else if (arguments.length === 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + var cache = new LRUMap(Keys, Values, capacity); + + forEach(iterable, function(value, key) { + cache.set(key, value); + }); + + return cache; +}; + +/** + * Exporting. + */ +module.exports = LRUMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/multi-array.js b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-array.js new file mode 100644 index 0000000..c165b55 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-array.js @@ -0,0 +1,447 @@ +/** + * Mnemonist MultiArray + * ===================== + * + * Memory-efficient representation of an array of arrays. In JavaScript and + * most high-level languages, creating objects has a cost. This implementation + * is therefore able to represent nested containers without needing to create + * objects. This works by storing singly linked lists in a single flat array. + * However, this means that this structure comes with some read/write + * overhead but consume very few memory. + * + * This structure should be particularly suited to indices that will need to + * merge arrays anyway when queried and that are quite heavily hit (such as + * an inverted index or a quad tree). + * + * Note: the implementation does not require to keep track of head pointers + * but this comes with some advantages such as not needing to offset pointers + * by 1 and being able to perform in-order iteration. This remains quite lean + * in memory and does not hinder performance whatsoever. + */ +var typed = require('./utils/typed-arrays.js'), + Vector = require('./vector.js'), + Iterator = require('obliterator/iterator'); + +var PointerVector = Vector.PointerVector; + +/** + * MultiArray. + * + * @constructor + */ +function MultiArray(Container, capacity) { + this.capacity = capacity || null; + this.Container = Container || Array; + this.hasFixedCapacity = this.capacity !== null; + + if (typeof this.Container !== 'function') + throw new Error('mnemonist/multi-array.constructor: container should be a function.'); + + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiArray.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + + // NOTE: #.heads, #.tails & #.lengths have a length equal to the dimension of + // the array, while #.pointers has a length equal to its size. + + // Storage + if (this.hasFixedCapacity) { + var capacity = this.capacity; + + var PointerArray = typed.getPointerArray(capacity); + + var policy = function(currentCapacity) { + var newCapacity = Math.max(1, Math.ceil(currentCapacity * 1.5)); + + // Clamping max allocation + return Math.min(newCapacity, capacity); + }; + + var initialCapacity = Math.max(8, capacity); + + this.tails = new Vector(PointerArray, {policy: policy, initialCapacity: initialCapacity}); + this.lengths = new Vector(PointerArray, {policy: policy, initialCapacity: initialCapacity}); + this.pointers = new PointerArray(capacity); + + this.items = new this.Container(capacity); + } + else { + + this.tails = new PointerVector(); + this.lengths = new PointerVector(); + this.pointers = new PointerVector(); + + this.items = new this.Container(); + } +}; + +/** + * Method used to add an item to the container at the given index. + * + * @param {number} index - Index of the container. + * @param {any} item - Item to add. + * @return {MultiArray} + */ +MultiArray.prototype.set = function(index, item) { + var pointer = this.size; + + // TODO: this can be factorized! + + if (this.hasFixedCapacity) { + + if (index >= this.capacity || this.size === this.capacity) + throw new Error('mnemonist/multi-array: attempting to allocate further than capacity.'); + + // This linked list does not exist yet. Let's create it + if (index >= this.dimension) { + + // We may be required to grow the vectors + this.dimension = index + 1; + this.tails.grow(this.dimension); + this.lengths.grow(this.dimension); + + this.tails.resize(this.dimension); + this.lengths.resize(this.dimension); + + this.lengths.array[index] = 1; + } + + // Appending to the list + else { + this.pointers[pointer] = this.tails.array[index]; + this.lengths.array[index]++; + } + + this.tails.array[index] = pointer; + this.items[pointer] = item; + } + else { + + // This linked list does not exist yet. Let's create it + if (index >= this.dimension) { + + // We may be required to grow the vectors + this.dimension = index + 1; + this.tails.grow(this.dimension); + this.lengths.grow(this.dimension); + + this.tails.resize(this.dimension); + this.lengths.resize(this.dimension); + + this.pointers.push(0); + this.lengths.array[index] = 1; + } + + // Appending to the list + else { + this.pointers.push(this.tails.array[index]); + this.lengths.array[index]++; + } + + this.tails.array[index] = pointer; + this.items.push(item); + } + + this.size++; + + return this; +}; + +/** + * Method used to push a new container holding the given value. + * Note: it might be useful to make this function able to take an iterable + * or variadic someday. For the time being it's just a convenience for + * implementing compact multi maps and such. + * + * @param {any} item - Item to add. + * @return {MultiArray} + */ +MultiArray.prototype.push = function(item) { + var pointer = this.size, + index = this.dimension; + + if (this.hasFixedCapacity) { + + if (index >= this.capacity || this.size === this.capacity) + throw new Error('mnemonist/multi-array: attempting to allocate further than capacity.'); + + this.items[pointer] = item; + } + else { + this.items.push(item); + this.pointers.push(0); + } + + this.lengths.push(1); + this.tails.push(pointer); + + this.dimension++; + this.size++; + + return this; +}; + +/** + * Method used to get the desired container. + * + * @param {number} index - Index of the container. + * @return {array} + */ +MultiArray.prototype.get = function(index) { + if (index >= this.dimension) + return; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array; + + var pointer = this.tails.array[index], + length = this.lengths.array[index], + i = length; + + var array = new this.Container(length); + + while (i !== 0) { + array[--i] = this.items[pointer]; + pointer = pointers[pointer]; + } + + return array; +}; + +/** + * Method used to check if a container exists at the given index. + * + * @param {number} index - Index of the container. + * @return {boolean} + */ +MultiArray.prototype.has = function(index) { + return index < this.dimension; +}; + +/** + * Method used to get the size of the container stored at given index. + * + * @param {number} index - Index of the container. + * @return {number} + */ +MultiArray.prototype.multiplicity = function(index) { + if (index >= this.dimension) + return 0; + + return this.lengths.array[index]; +}; +MultiArray.prototype.count = MultiArray.prototype.multiplicity; + +/** + * Method used to iterate over the structure's containers. + * + * @return {Iterator} + */ +MultiArray.prototype.containers = function() { + var self = this, + l = this.dimension, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {value: self.get(i++)}; + }); +}; + +/** + * Method used to iterate over the structure's associations. + * + * @return {Iterator} + */ +MultiArray.prototype.associations = function() { + var self = this, + l = this.dimension, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var data = {value: [i, self.get(i)]}; + + i++; + + return data; + }); +}; + +/** + * Method used to iterate over the structure's values in the global insertion + * order. + * + * @param {number} [index] - Optionally, iterate over the values of a single + * container at index. + * @return {Iterator} + */ +MultiArray.prototype.values = function(index) { + var items = this.items, + length, + i = 0; + + if (typeof index === 'number') { + if (index >= this.dimension) + return Iterator.empty(); + + length = this.lengths.array[index]; + items = this.items; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array; + + if (length === 0) + return Iterator.empty(); + + var pointer = this.tails.array[index], + v; + + return new Iterator(function() { + if (i === length) + return {done: true}; + + i++; + v = items[pointer]; + pointer = pointers[pointer]; + + return {done: false, value: v}; + }); + } + + length = this.size; + + return new Iterator(function() { + if (i >= length) + return {done: true}; + + return {done: false, value: items[i++]}; + }); +}; + +/** + * Method used to iterate over the structure's entries. + * + * @return {Iterator} + */ +MultiArray.prototype.entries = function() { + if (this.size === 0) + return Iterator.empty(); + + var inContainer = false, + pointer, + length, + i = 0, + j = 0, + l = this.dimension, + v; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array, + items = this.items, + tails = this.tails.array, + lengths = this.lengths.array; + + var iterator = new Iterator(function next() { + if (!inContainer) { + + if (i >= l) + return {done: true}; + + length = lengths[i]; + pointer = tails[i]; + i++; + + if (length === 0) + return next(); + + j = 0; + inContainer = true; + } + + if (j === length) { + inContainer = false; + return next(); + } + + v = items[pointer]; + + // TODO: guard for out-of-bounds + pointer = pointers[pointer]; + + j++; + + return { + done: false, + value: [i - 1, v] + }; + }); + + return iterator; +}; + +/** + * Method used to iterate over the structure's keys. + * + * @return {Iterator} + */ +MultiArray.prototype.keys = function() { + var i = 0, + l = this.dimension; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {done: false, value: i++}; + }); +}; + +/** + * Convenience known methods. + */ +MultiArray.prototype.inspect = function() { + var proxy = new Array(this.dimension), + i, + l; + + for (i = 0, l = this.dimension; i < l; i++) + proxy[i] = Array.from(this.get(i)); + + if (this.hasFixedCapacity) { + proxy.type = this.Container.name; + proxy.capacity = this.capacity; + } + + proxy.size = this.size; + proxy.dimension = this.dimension; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: MultiArray, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + MultiArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiArray.prototype.inspect; + +// TODO: .from + +/** + * Exporting. + */ +module.exports = MultiArray; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/multi-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-map.d.ts new file mode 100644 index 0000000..e4c8543 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-map.d.ts @@ -0,0 +1,47 @@ +/** + * Mnemonist MultiMap Typings + * =========================== + */ + +interface MultiMap = V[]> extends Iterable<[K, V]> { + + // Members + dimension: number; + size: number; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + remove(key: K, value: V): boolean; + has(key: K): boolean; + get(key: K): C | undefined; + multiplicity(key: K): number; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + forEachAssociation(callback: (value: C, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + containers(): IterableIterator; + associations(): IterableIterator<[K, C]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + toJSON(): any; +} + +interface MultiMapConstructor { + new (container: SetConstructor): MultiMap>; + new (container?: ArrayConstructor): MultiMap; + + from( + iterable: Iterable<[K, V]> | {[key: string]: V}, + Container: SetConstructor + ): MultiMap>; + from( + iterable: Iterable<[K, V]> | {[key: string]: V}, + Container?: ArrayConstructor + ): MultiMap; +} + +declare const MultiMap: MultiMapConstructor; +export default MultiMap; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/multi-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-map.js new file mode 100644 index 0000000..0b36e15 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-map.js @@ -0,0 +1,408 @@ +/** + * Mnemonist MultiMap + * =================== + * + * Implementation of a MultiMap with custom container. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * MultiMap. + * + * @constructor + */ +function MultiMap(Container) { + + this.Container = Container || Array; + this.items = new Map(); + this.clear(); + + Object.defineProperty(this.items, 'constructor', { + value: MultiMap, + enumerable: false + }); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiMap.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + this.items.clear(); +}; + +/** + * Method used to set a value. + * + * @param {any} key - Key. + * @param {any} value - Value to add. + * @return {MultiMap} + */ +MultiMap.prototype.set = function(key, value) { + var container = this.items.get(key), + sizeBefore; + + if (!container) { + this.dimension++; + container = new this.Container(); + this.items.set(key, container); + } + + if (this.Container === Set) { + sizeBefore = container.size; + container.add(value); + + if (sizeBefore < container.size) + this.size++; + } + else { + container.push(value); + this.size++; + } + + return this; +}; + +/** + * Method used to delete the given key. + * + * @param {any} key - Key to delete. + * @return {boolean} + */ +MultiMap.prototype.delete = function(key) { + var container = this.items.get(key); + + if (!container) + return false; + + this.size -= (this.Container === Set ? container.size : container.length); + this.dimension--; + this.items.delete(key); + + return true; +}; + +/** + * Method used to delete the remove an item in the container stored at the + * given key. + * + * @param {any} key - Key to delete. + * @return {boolean} + */ +MultiMap.prototype.remove = function(key, value) { + var container = this.items.get(key), + wasDeleted, + index; + + if (!container) + return false; + + if (this.Container === Set) { + wasDeleted = container.delete(value); + + if (wasDeleted) + this.size--; + + if (container.size === 0) { + this.items.delete(key); + this.dimension--; + } + + return wasDeleted; + } + else { + index = container.indexOf(value); + + if (index === -1) + return false; + + this.size--; + + if (container.length === 1) { + this.items.delete(key); + this.dimension--; + + return true; + } + + container.splice(index, 1); + + return true; + } +}; + +/** + * Method used to return whether the given keys exists in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +MultiMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to return the container stored at the given key or `undefined`. + * + * @param {any} key - Key to get. + * @return {boolean} + */ +MultiMap.prototype.get = function(key) { + return this.items.get(key); +}; + +/** + * Method used to return the multiplicity of the given key, meaning the number + * of times it is set, or, more trivially, the size of the attached container. + * + * @param {any} key - Key to check. + * @return {number} + */ +MultiMap.prototype.multiplicity = function(key) { + var container = this.items.get(key); + + if (typeof container === 'undefined') + return 0; + + return this.Container === Set ? container.size : container.length; +}; +MultiMap.prototype.count = MultiMap.prototype.multiplicity; + +/** + * Method used to iterate over each of the key/value pairs. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inner iteration function is created here to avoid creating it in the loop + var key; + function inner(value) { + callback.call(scope, value, key); + } + + this.items.forEach(function(container, k) { + key = k; + container.forEach(inner); + }); +}; + +/** + * Method used to iterate over each of the associations. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiMap.prototype.forEachAssociation = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Method returning an iterator over the map's keys. + * + * @return {Iterator} + */ +MultiMap.prototype.keys = function() { + return this.items.keys(); +}; + +/** + * Method returning an iterator over the map's keys. + * + * @return {Iterator} + */ +MultiMap.prototype.values = function() { + var iterator = this.items.values(), + inContainer = false, + countainer, + step, + i, + l; + + if (this.Container === Set) + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + countainer = step.value.values(); + } + + step = countainer.next(); + + if (step.done) { + inContainer = false; + return next(); + } + + return { + done: false, + value: step.value + }; + }); + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + countainer = step.value; + i = 0; + l = countainer.length; + } + + if (i >= l) { + inContainer = false; + return next(); + } + + return { + done: false, + value: countainer[i++] + }; + }); +}; + +/** + * Method returning an iterator over the map's entries. + * + * @return {Iterator} + */ +MultiMap.prototype.entries = function() { + var iterator = this.items.entries(), + inContainer = false, + countainer, + step, + key, + i, + l; + + if (this.Container === Set) + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + key = step.value[0]; + countainer = step.value[1].values(); + } + + step = countainer.next(); + + if (step.done) { + inContainer = false; + return next(); + } + + return { + done: false, + value: [key, step.value] + }; + }); + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + key = step.value[0]; + countainer = step.value[1]; + i = 0; + l = countainer.length; + } + + if (i >= l) { + inContainer = false; + return next(); + } + + return { + done: false, + value: [key, countainer[i++]] + }; + }); +}; + +/** + * Method returning an iterator over the map's containers. + * + * @return {Iterator} + */ +MultiMap.prototype.containers = function() { + return this.items.values(); +}; + +/** + * Method returning an iterator over the map's associations. + * + * @return {Iterator} + */ +MultiMap.prototype.associations = function() { + return this.items.entries(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + MultiMap.prototype[Symbol.iterator] = MultiMap.prototype.entries; + +/** + * Convenience known methods. + */ +MultiMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + MultiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiMap.prototype.inspect; +MultiMap.prototype.toJSON = function() { + return this.items; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {Class} Container - Container. + * @return {MultiMap} + */ +MultiMap.from = function(iterable, Container) { + var map = new MultiMap(Container); + + forEach(iterable, function(value, key) { + map.set(key, value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = MultiMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/multi-set.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-set.d.ts new file mode 100644 index 0000000..0e40bc4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-set.d.ts @@ -0,0 +1,37 @@ +/** + * Mnemonist MultiSet Typings + * =========================== + */ +export default class MultiSet implements Iterable { + + // Members + dimension: number; + size: number; + + // Methods + clear(): void; + add(key: K, count?: number): this; + set(key: K, count: number): this; + has(key: K): boolean; + delete(key: K): boolean; + remove(key: K, count?: number): void; + edit(a: K, b: K): this; + multiplicity(key: K): number; + count(key: K): number; + get(key: K): number; + frequency(key: K): number; + top(n: number): Array<[K, number]>; + forEach(callback: (value: K, key: K, set: this) => void, scope?: any): void; + forEachMultiplicity(callback: (value: number, key: K, set: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + multiplicities(): IterableIterator<[K, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string]: I}): MultiSet; + static isSubset(a: MultiSet, b: MultiSet): boolean; + static isSuperset(a: MultiSet, b: MultiSet): boolean; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/multi-set.js b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-set.js new file mode 100644 index 0000000..3206af7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/multi-set.js @@ -0,0 +1,440 @@ +/** + * Mnemonist MultiSet + * ==================== + * + * JavaScript implementation of a MultiSet. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + FixedReverseHeap = require('./fixed-reverse-heap.js'); + +/** + * Helpers. + */ +var MULTISET_ITEM_COMPARATOR = function(a, b) { + if (a[1] > b[1]) + return -1; + if (a[1] < b[1]) + return 1; + + return 0; +}; + +// TODO: helper functions: union, intersection, sum, difference, subtract + +/** + * MultiSet. + * + * @constructor + */ +function MultiSet() { + this.items = new Map(); + + Object.defineProperty(this.items, 'constructor', { + value: MultiSet, + enumerable: false + }); + + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiSet.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + this.items.clear(); +}; + +/** + * Method used to add an item to the set. + * + * @param {any} item - Item to add. + * @param {number} count - Optional count. + * @return {MultiSet} + */ +MultiSet.prototype.add = function(item, count) { + if (count === 0) + return this; + + if (count < 0) + return this.remove(item, -count); + + count = count || 1; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.add: given count should be a number.'); + + this.size += count; + + const currentCount = this.items.get(item); + + if (currentCount === undefined) + this.dimension++; + else + count += currentCount; + + this.items.set(item, count); + + return this; +}; + +/** + * Method used to set the multiplicity of an item in the set. + * + * @param {any} item - Target item. + * @param {number} count - Desired multiplicity. + * @return {MultiSet} + */ +MultiSet.prototype.set = function(item, count) { + var currentCount; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.set: given count should be a number.'); + + // Setting an item to 0 or to a negative number means deleting it from the set + if (count <= 0) { + currentCount = this.items.get(item); + + if (typeof currentCount !== 'undefined') { + this.size -= currentCount; + this.dimension--; + } + + this.items.delete(item); + return this; + } + + count = count || 1; + + currentCount = this.items.get(item); + + if (typeof currentCount === 'number') { + this.items.set(item, currentCount + count); + } + else { + this.dimension++; + this.items.set(item, count); + } + + this.size += count; + + return this; +}; + +/** + * Method used to return whether the item exists in the set. + * + * @param {any} item - Item to check. + * @return {boolan} + */ +MultiSet.prototype.has = function(item) { + return this.items.has(item); +}; + +/** + * Method used to delete an item from the set. + * + * @param {any} item - Item to delete. + * @return {boolan} + */ +MultiSet.prototype.delete = function(item) { + var count = this.items.get(item); + + if (count === 0) + return false; + + this.size -= count; + this.dimension--; + this.items.delete(item); + + return true; +}; + +/** + * Method used to remove an item from the set. + * + * @param {any} item - Item to delete. + * @param {number} count - Optional count. + * @return {undefined} + */ +MultiSet.prototype.remove = function(item, count) { + if (count === 0) + return; + + if (count < 0) + return this.add(item, -count); + + count = count || 1; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.remove: given count should be a number.'); + + var currentCount = this.multiplicity(item), + newCount = Math.max(0, currentCount - count); + + if (newCount === 0) { + this.delete(item); + } + else { + this.items.set(item, newCount); + this.size -= (currentCount - newCount); + } + + return; +}; + +/** + * Method used to change a key into another one, merging counts if the target + * key already exists. + * + * @param {any} a - From key. + * @param {any} b - To key. + * @return {MultiSet} + */ +MultiSet.prototype.edit = function(a, b) { + var am = this.multiplicity(a); + + // If a does not exist in the set, we can stop right there + if (am === 0) + return; + + var bm = this.multiplicity(b); + + this.items.set(b, am + bm); + this.items.delete(a); + + return this; +}; + +/** + * Method used to return the multiplicity of the given item. + * + * @param {any} item - Item to get. + * @return {number} + */ +MultiSet.prototype.multiplicity = function(item) { + var count = this.items.get(item); + + if (typeof count === 'undefined') + return 0; + + return count; +}; +MultiSet.prototype.get = MultiSet.prototype.multiplicity; +MultiSet.prototype.count = MultiSet.prototype.multiplicity; + +/** + * Method used to return the frequency of the given item in the set. + * + * @param {any} item - Item to get. + * @return {number} + */ +MultiSet.prototype.frequency = function(item) { + if (this.size === 0) + return 0; + + var count = this.multiplicity(item); + + return count / this.size; +}; + +/** + * Method used to return the n most common items from the set. + * + * @param {number} n - Number of items to retrieve. + * @return {array} + */ +MultiSet.prototype.top = function(n) { + if (typeof n !== 'number' || n <= 0) + throw new Error('mnemonist/multi-set.top: n must be a number > 0.'); + + var heap = new FixedReverseHeap(Array, MULTISET_ITEM_COMPARATOR, n); + + var iterator = this.items.entries(), + step; + + while ((step = iterator.next(), !step.done)) + heap.push(step.value); + + return heap.consume(); +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var i; + + this.items.forEach(function(multiplicity, value) { + + for (i = 0; i < multiplicity; i++) + callback.call(scope, value, value); + }); +}; + +/** + * Method used to iterate over the set's multiplicities. + * + * @param {function} callback - Function to call for each multiplicity. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiSet.prototype.forEachMultiplicity = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Method returning an iterator over the set's keys. I.e. its unique values, + * in a sense. + * + * @return {Iterator} + */ +MultiSet.prototype.keys = function() { + return this.items.keys(); +}; + +/** + * Method returning an iterator over the set's values. + * + * @return {Iterator} + */ +MultiSet.prototype.values = function() { + var iterator = this.items.entries(), + inContainer = false, + step, + value, + multiplicity, + i; + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + value = step.value[0]; + multiplicity = step.value[1]; + i = 0; + } + + if (i >= multiplicity) { + inContainer = false; + return next(); + } + + i++; + + return { + done: false, + value: value + }; + }); +}; + +/** + * Method returning an iterator over the set's multiplicities. + * + * @return {Iterator} + */ +MultiSet.prototype.multiplicities = function() { + return this.items.entries(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + MultiSet.prototype[Symbol.iterator] = MultiSet.prototype.values; + +/** + * Convenience known methods. + */ +MultiSet.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + MultiSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiSet.prototype.inspect; +MultiSet.prototype.toJSON = function() { + return this.items; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {MultiSet} + */ +MultiSet.from = function(iterable) { + var set = new MultiSet(); + + forEach(iterable, function(value) { + set.add(value); + }); + + return set; +}; + +/** + * Function returning whether the multiset A is a subset of the multiset B. + * + * @param {MultiSet} A - First set. + * @param {MultiSet} B - Second set. + * @return {boolean} + */ +MultiSet.isSubset = function(A, B) { + var iterator = A.multiplicities(), + step, + key, + mA; + + // Shortcuts + if (A === B) + return true; + + if (A.dimension > B.dimension) + return false; + + while ((step = iterator.next(), !step.done)) { + key = step.value[0]; + mA = step.value[1]; + + if (B.multiplicity(key) < mA) + return false; + } + + return true; +}; + +/** + * Function returning whether the multiset A is a superset of the multiset B. + * + * @param {MultiSet} A - First set. + * @param {MultiSet} B - Second set. + * @return {boolean} + */ +MultiSet.isSuperset = function(A, B) { + return MultiSet.isSubset(B, A); +}; + +/** + * Exporting. + */ +module.exports = MultiSet; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/package.json b/amplify/functions/deleteDocument/node_modules/mnemonist/package.json new file mode 100644 index 0000000..79e8f19 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/package.json @@ -0,0 +1,119 @@ +{ + "name": "mnemonist", + "version": "0.38.3", + "description": "Curated collection of data structures for the JavaScript language.", + "scripts": { + "lint": "eslint ./*.js ./utils ./test", + "prepublish": "npm run lint && npm test && npm run test:types", + "test": "mocha", + "test:types": "tsc --target es2015 --noEmit --noImplicitAny --noImplicitReturns ./test/types.ts" + }, + "main": "./index.js", + "types": "./index.d.ts", + "files": [ + "sort", + "utils", + "*.d.ts", + "*.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/yomguithereal/mnemonist.git" + }, + "keywords": [ + "bag", + "bimap", + "bit array", + "bit set", + "bit vector", + "bitset", + "bk tree", + "burkhard-keller tree", + "cache", + "circular buffer", + "counter", + "data structures", + "default map", + "deque", + "disjoint set", + "fibonacci heap", + "fuzzy map", + "hashed array tree", + "heap", + "interval tree", + "inverted index", + "kd tree", + "linked list", + "lru", + "lru cache", + "multimap", + "multiset", + "passjoin", + "queue", + "sparse map", + "sparse set", + "stack", + "structures", + "suffix tree", + "symspell", + "trie", + "union find", + "vantage point tree", + "vector", + "vp tree" + ], + "author": { + "name": "Guillaume Plique", + "url": "http://github.com/Yomguithereal" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/yomguithereal/mnemonist/issues" + }, + "homepage": "https://github.com/yomguithereal/mnemonist#readme", + "dependencies": { + "obliterator": "^1.6.1" + }, + "devDependencies": { + "@yomguithereal/eslint-config": "^4.0.0", + "asciitree": "^1.0.2", + "damerau-levenshtein": "^1.0.6", + "eslint": "^7.21.0", + "leven": "^3.1.0", + "lodash": "^4.17.21", + "matcha": "^0.7.0", + "mocha": "^8.3.0", + "pandemonium": "^2.0.0", + "seedrandom": "^3.0.5", + "static-kdtree": "^1.0.2", + "typescript": "^4.2.2" + }, + "eslintConfig": { + "extends": "@yomguithereal/eslint-config", + "globals": { + "Set": true, + "Map": true, + "WeakMap": true, + "Symbol": true, + "ArrayBuffer": true, + "Uint8Array": true, + "Uint8ClampedArray": true, + "Uint16Array": true, + "Uint32Array": true, + "Int8Array": true, + "Int16Array": true, + "Int32Array": true, + "Float32Array": true, + "Float64Array": true + }, + "parserOptions": { + "ecmaVersion": 6, + "ecmaFeatures": { + "forOf": true + } + }, + "rules": { + "no-new": 0 + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/passjoin-index.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/passjoin-index.d.ts new file mode 100644 index 0000000..4d91746 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/passjoin-index.d.ts @@ -0,0 +1,54 @@ +/** + * Mnemonist PassjoinIndex Typings + * ================================ + */ +type LevenshteinDistanceFunction = (a: T, b: T) => number; + +export default class PassjoinIndex implements Iterable { + + // Members + size: number; + + // Constructor + constructor(levenshtein: LevenshteinDistanceFunction, k: number); + + // Methods + add(value: T): this; + search(query: T): Set; + clear(): void; + forEach(callback: (value: T, index: number, self: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + levenshtein: LevenshteinDistanceFunction, + k: number + ): PassjoinIndex; +} + +export function countKeys(k: number, s: number): number; +export function comparator(a: T, b: T): number; +export function partition(k: number, l: number): Array<[number, number]>; +export function segments(k: number, string: T): Array; +export function segmentPos(k: number, i: number, string: T): number; + +export function multiMatchAwareInterval( + k: number, + delta: number, + i: number, + s: number, + pi: number, + li: number +): [number, number]; + +export function multiMatchAwareSubstrings( + k: number, + string: T, + l: number, + i: number, + pi: number, + li: number +): Array; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/passjoin-index.js b/amplify/functions/deleteDocument/node_modules/mnemonist/passjoin-index.js new file mode 100644 index 0000000..652d614 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/passjoin-index.js @@ -0,0 +1,518 @@ +/** + * Mnemonist PassjoinIndex + * ======================== + * + * The PassjoinIndex is an index leveraging the "passjoin" algorithm as a mean + * to index strings for Levenshtein distance queries. It features a complexity + * related to the Levenshtein query threshold k rather than the number of + * strings to test (roughly O(k^3)). + * + * [References]: + * Jiang, Yu, Dong Deng, Jiannan Wang, Guoliang Li, et Jianhua Feng. + * « Efficient Parallel Partition-Based Algorithms for Similarity Search and Join + * with Edit Distance Constraints ». In Proceedings of the Joint EDBT/ICDT 2013 + * Workshops on - EDBT ’13, 341. Genoa, Italy: ACM Press, 2013. + * https://doi.org/10.1145/2457317.2457382. + * + * Li, Guoliang, Dong Deng, et Jianhua Feng. « A Partition-Based Method for + * String Similarity Joins with Edit-Distance Constraints ». ACM Transactions on + * Database Systems 38, no 2 (1 juin 2013): 1‑33. + * https://doi.org/10.1145/2487259.2487261. + * + * [Urls]: + * http://people.csail.mit.edu/dongdeng/projects/passjoin/index.html + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +// TODO: leveraging BagDistance as an upper bound of Levenshtein +// TODO: leverage n-grams recursive indexing +// TODO: try the MultiArray as a memory backend +// TODO: what about damerau levenshtein + +/** + * Helpers. + */ + +/** + * Function returning the number of substrings that will be selected by the + * multi-match-aware selection scheme for theshold `k`, for a string of length + * `s` to match strings of length `l`. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @param {number} l - Length of strings to match. + * @returns {number} - The number of selected substrings. + */ +function countSubstringsL(k, s, l) { + return (((Math.pow(k, 2) - Math.pow(Math.abs(s - l), 2)) / 2) | 0) + k + 1; +} + +/** + * Function returning the minimum number of substrings that will be selected by + * the multi-match-aware selection scheme for theshold `k`, for a string of + * length `s` to match any string of relevant length. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @returns {number} - The number of selected substrings. + */ +function countKeys(k, s) { + var c = 0; + + for (var l = 0, m = s + 1; l < m; l++) + c += countSubstringsL(k, s, l); + + return c; +} + +/** + * Function used to compare two keys in order to sort them first by decreasing + * length and then alphabetically as per the "4.2 Effective Indexing Strategy" + * point of the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @returns {number} - The number of selected substrings. + */ +function comparator(a, b) { + if (a.length > b.length) + return -1; + if (a.length < b.length) + return 1; + + if (a < b) + return -1; + if (a > b) + return 1; + + return 0; +} + +/** + * Function partitioning a string into k + 1 uneven segments, the shorter + * ones, then the longer ones. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} l - Length of the string. + * @returns {Array} - The partition tuples (start, length). + */ +function partition(k, l) { + var m = k + 1, + a = (l / m) | 0, + b = a + 1, + i, + j; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + var tuples = new Array(k + 1); + + for (i = 0; i < smallSegments; i++) + tuples[i] = [i * a, a]; + + var offset = (i - 1) * a + a; + + for (j = 0; j < largeSegments; j++) + tuples[i + j] = [offset + j * b, b]; + + return tuples; +} + +/** + * Function yielding a string's k + 1 passjoin segments to index. + * + * @param {number} k - Levenshtein distance threshold. + * @param {string} string - Target string. + * @returns {Array} - The string's segments. + */ +function segments(k, string) { + var l = string.length, + m = k + 1, + a = (l / m) | 0, + b = a + 1, + o, + i, + j; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + var S = new Array(k + 1); + + for (i = 0; i < smallSegments; i++) { + o = i * a; + S[i] = string.slice(o, o + a); + } + + var offset = (i - 1) * a + a; + + for (j = 0; j < largeSegments; j++) { + o = offset + j * b; + S[i + j] = string.slice(o, o + b); + } + + return S; +} + +// TODO: jsdocs +function segmentPos(k, i, string) { + if (i === 0) + return 0; + + var l = string.length; + + var m = k + 1, + a = (l / m) | 0, + b = a + 1; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + if (i <= smallSegments - 1) + return i * a; + + var offset = i - smallSegments; + + return smallSegments * a + offset * b; +} + +/** + * Function returning the interval of relevant substrings to lookup using the + * multi-match-aware substring selection scheme described in the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} delta - Signed length difference between both considered strings. + * @param {number} i - k + 1 segment index. + * @param {number} s - String's length. + * @param {number} pi - k + 1 segment position in target string. + * @param {number} li - k + 1 segment length. + * @returns {Array} - The interval (start, stop). + */ +function multiMatchAwareInterval(k, delta, i, s, pi, li) { + var start1 = pi - i, + end1 = pi + i; + + var o = k - i; + + var start2 = pi + delta - o, + end2 = pi + delta + o; + + var end3 = s - li; + + return [Math.max(0, start1, start2), Math.min(end1, end2, end3)]; +} + +/** + * Function yielding relevant substrings to lookup using the multi-match-aware + * substring selection scheme described in the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {string} string - Target string. + * @param {number} l - Length of strings to match. + * @param {number} i - k + 1 segment index. + * @param {number} pi - k + 1 segment position in target string. + * @param {number} li - k + 1 segment length. + * @returns {Array} - The contiguous substrings. + */ +function multiMatchAwareSubstrings(k, string, l, i, pi, li) { + var s = string.length; + + // Note that we need to keep the non-absolute delta for this function + // to work in both directions, up & down + var delta = s - l; + + var interval = multiMatchAwareInterval(k, delta, i, s, pi, li); + + var start = interval[0], + stop = interval[1]; + + var currentSubstring = ''; + + var substrings = []; + + var substring, j, m; + + for (j = start, m = stop + 1; j < m; j++) { + substring = string.slice(j, j + li); + + // We skip identical consecutive substrings (to avoid repetition in case + // of contiguous letter duplication) + if (substring === currentSubstring) + continue; + + substrings.push(substring); + + currentSubstring = substring; + } + + return substrings; +} + +/** + * PassjoinIndex. + * + * @note I tried to apply the paper's optimizations regarding Levenshtein + * distance computations but it did not provide a performance boost, quite + * the contrary. This is because since we are mostly using the index for small k + * here, most of the strings we work on are quite small and the bookkeeping + * induced by Ukkonen's method and the paper's one are slowing us down more than + * they actually help us go faster. + * + * @note This implementation does not try to ensure that you add the same string + * more than once. + * + * @constructor + * @param {function} levenshtein - Levenshtein distance function. + * @param {number} k - Levenshtein distance threshold. + */ +function PassjoinIndex(levenshtein, k) { + if (typeof levenshtein !== 'function') + throw new Error('mnemonist/passjoin-index: `levenshtein` should be a function returning edit distance between two strings.'); + + if (typeof k !== 'number' || k < 1) + throw new Error('mnemonist/passjoin-index: `k` should be a number > 0'); + + this.levenshtein = levenshtein; + this.k = k; + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +PassjoinIndex.prototype.clear = function() { + + // Properties + this.size = 0; + this.strings = []; + this.invertedIndices = {}; +}; + +/** + * Method used to add a new value to the index. + * + * @param {string|Array} value - Value to add. + * @return {PassjoinIndex} + */ +PassjoinIndex.prototype.add = function(value) { + var l = value.length; + + var stringIndex = this.size; + + this.strings.push(value); + this.size++; + + var S = segments(this.k, value); + + var Ll = this.invertedIndices[l]; + + if (typeof Ll === 'undefined') { + Ll = {}; + this.invertedIndices[l] = Ll; + } + + var segment, + matches, + key, + i, + m; + + for (i = 0, m = S.length; i < m; i++) { + segment = S[i]; + key = segment + i; + matches = Ll[key]; + + if (typeof matches === 'undefined') { + matches = [stringIndex]; + Ll[key] = matches; + } + else { + matches.push(stringIndex); + } + } + + return this; +}; + +/** + * Method used to search for string matching the given query. + * + * @param {string|Array} query - Query string. + * @return {Array} + */ +PassjoinIndex.prototype.search = function(query) { + var s = query.length, + k = this.k; + + var M = new Set(); + + var candidates, + candidate, + queryPos, + querySegmentLength, + key, + S, + P, + l, + m, + i, + n1, + j, + n2, + y, + n3; + + for (l = Math.max(0, s - k), m = s + k + 1; l < m; l++) { + var Ll = this.invertedIndices[l]; + + if (typeof Ll === 'undefined') + continue; + + P = partition(k, l); + + for (i = 0, n1 = P.length; i < n1; i++) { + queryPos = P[i][0]; + querySegmentLength = P[i][1]; + + S = multiMatchAwareSubstrings( + k, + query, + l, + i, + queryPos, + querySegmentLength + ); + + // Empty string edge case + if (!S.length) + S = ['']; + + for (j = 0, n2 = S.length; j < n2; j++) { + key = S[j] + i; + candidates = Ll[key]; + + if (typeof candidates === 'undefined') + continue; + + for (y = 0, n3 = candidates.length; y < n3; y++) { + candidate = this.strings[candidates[y]]; + + // NOTE: first condition is here not to compute Levenshtein + // distance for tiny strings + + // NOTE: maintaining a Set of rejected candidate is not really useful + // because it consumes more memory and because non-matches are + // less likely to be candidates agains + if ( + s <= k && l <= k || + ( + !M.has(candidate) && + this.levenshtein(query, candidate) <= k + ) + ) + M.add(candidate); + } + } + } + } + + return M; +}; + +/** + * Method used to iterate over the index. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +PassjoinIndex.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.strings.length; i < l; i++) + callback.call(scope, this.strings[i], i, this); +}; + +/** + * Method used to create an iterator over a index's values. + * + * @return {Iterator} + */ +PassjoinIndex.prototype.values = function() { + var strings = this.strings, + l = strings.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = strings[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + PassjoinIndex.prototype[Symbol.iterator] = PassjoinIndex.prototype.values; + +/** + * Convenience known methods. + */ +PassjoinIndex.prototype.inspect = function() { + var array = this.strings.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: PassjoinIndex, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + PassjoinIndex.prototype[Symbol.for('nodejs.util.inspect.custom')] = PassjoinIndex.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {PassjoinIndex} + */ +PassjoinIndex.from = function(iterable, levenshtein, k) { + var index = new PassjoinIndex(levenshtein, k); + + forEach(iterable, function(string) { + index.add(string); + }); + + return index; +}; + +/** + * Exporting. + */ +PassjoinIndex.countKeys = countKeys; +PassjoinIndex.comparator = comparator; +PassjoinIndex.partition = partition; +PassjoinIndex.segments = segments; +PassjoinIndex.segmentPos = segmentPos; +PassjoinIndex.multiMatchAwareInterval = multiMatchAwareInterval; +PassjoinIndex.multiMatchAwareSubstrings = multiMatchAwareSubstrings; + +module.exports = PassjoinIndex; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/queue.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/queue.d.ts new file mode 100644 index 0000000..2d3e434 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/queue.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist Queue Typings + * ======================== + */ +export default class Queue implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + enqueue(item: T): number; + dequeue(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, queue: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): Queue; + static of(...items: Array): Queue; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/queue.js b/amplify/functions/deleteDocument/node_modules/mnemonist/queue.js new file mode 100644 index 0000000..aa554b6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/queue.js @@ -0,0 +1,215 @@ +/** + * Mnemonist Queue + * ================ + * + * Queue implementation based on the ideas of Queue.js that seems to beat + * a LinkedList one in performance. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Queue + * + * @constructor + */ +function Queue() { + this.clear(); +} + +/** + * Method used to clear the queue. + * + * @return {undefined} + */ +Queue.prototype.clear = function() { + + // Properties + this.items = []; + this.offset = 0; + this.size = 0; +}; + +/** + * Method used to add an item to the queue. + * + * @param {any} item - Item to enqueue. + * @return {number} + */ +Queue.prototype.enqueue = function(item) { + + this.items.push(item); + return ++this.size; +}; + +/** + * Method used to retrieve & remove the first item of the queue. + * + * @return {any} + */ +Queue.prototype.dequeue = function() { + if (!this.size) + return; + + var item = this.items[this.offset]; + + if (++this.offset * 2 >= this.items.length) { + this.items = this.items.slice(this.offset); + this.offset = 0; + } + + this.size--; + + return item; +}; + +/** + * Method used to retrieve the first item of the queue. + * + * @return {any} + */ +Queue.prototype.peek = function() { + if (!this.size) + return; + + return this.items[this.offset]; +}; + +/** + * Method used to iterate over the queue. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +Queue.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = this.offset, j = 0, l = this.items.length; i < l; i++, j++) + callback.call(scope, this.items[i], j, this); +}; + +/* + * Method used to convert the queue to a JavaScript array. + * + * @return {array} + */ +Queue.prototype.toArray = function() { + return this.items.slice(this.offset); +}; + +/** + * Method used to create an iterator over a queue's values. + * + * @return {Iterator} + */ +Queue.prototype.values = function() { + var items = this.items, + i = this.offset; + + return new Iterator(function() { + if (i >= items.length) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a queue's entries. + * + * @return {Iterator} + */ +Queue.prototype.entries = function() { + var items = this.items, + i = this.offset, + j = 0; + + return new Iterator(function() { + if (i >= items.length) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: [j++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Queue.prototype[Symbol.iterator] = Queue.prototype.values; + +/** + * Convenience known methods. + */ +Queue.prototype.toString = function() { + return this.toArray().join(','); +}; + +Queue.prototype.toJSON = function() { + return this.toArray(); +}; + +Queue.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: Queue, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + Queue.prototype[Symbol.for('nodejs.util.inspect.custom')] = Queue.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a queue. + * + * @param {Iterable} iterable - Target iterable. + * @return {Queue} + */ +Queue.from = function(iterable) { + var queue = new Queue(); + + forEach(iterable, function(value) { + queue.enqueue(value); + }); + + return queue; +}; + +/** + * Static @.of function taking an arbitrary number of arguments & converting it + * into a queue. + * + * @param {...any} args + * @return {Queue} + */ +Queue.of = function() { + return Queue.from(arguments); +}; + +/** + * Exporting. + */ +module.exports = Queue; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/semi-dynamic-trie.js b/amplify/functions/deleteDocument/node_modules/mnemonist/semi-dynamic-trie.js new file mode 100644 index 0000000..6627d34 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/semi-dynamic-trie.js @@ -0,0 +1,251 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist SemiDynamicTrie + * ========================== + * + * Lowlevel Trie working at character level, storing information in typed + * array and organizing its children in linked lists. + * + * This implementation also uses a "fat node" strategy to boost access to some + * bloated node's children when the number of children rises above a certain + * threshold. + */ +var Vector = require('./vector.js'); + +// TODO: rename => ternary search tree + +/** + * Constants. + */ +const MAX_LINKED = 7; + +/** + * SemiDynamicTrie. + * + * @constructor + */ +function SemiDynamicTrie() { + + // Properties + + // TODO: make it 16 bits + this.characters = new Vector.Uint8Vector(256); + this.nextPointers = new Vector.Int32Vector(256); + this.childPointers = new Vector.Uint32Vector(256); + this.maps = new Vector.Uint32Vector(256); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SemiDynamicTrie.prototype.clear = function() { + + // Properties +}; + +SemiDynamicTrie.prototype.ensureSibling = function(block, character) { + var nextCharacter, + nextBlock, + newBlock; + + // Do we have a root? + if (this.characters.length === 0) { + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + return block; + } + + // Are we traversing a fat node? + var fatNode = this.nextPointers.array[block]; + + if (fatNode < 0) { + var mapIndex = -fatNode + character; + + nextBlock = this.maps.array[mapIndex]; + + if (nextBlock !== 0) + return nextBlock; + + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + this.maps.set(mapIndex, newBlock); + + return newBlock; + } + + var listLength = 1, + startingBlock = block; + + while (true) { + nextCharacter = this.characters.array[block]; + + if (nextCharacter === character) + return block; + + nextBlock = this.nextPointers.array[block]; + + if (nextBlock === 0) + break; + + listLength++; + block = nextBlock; + } + + // If the list is too long, we create a fat node + if (listLength > MAX_LINKED) { + block = startingBlock; + + var offset = this.maps.length; + + this.maps.resize(offset + 255); + this.maps.set(offset + 255, 0); + + while (true) { + nextBlock = this.nextPointers.array[block]; + + if (nextBlock === 0) + break; + + nextCharacter = this.characters.array[nextBlock]; + this.maps.set(offset + nextCharacter, nextBlock); + + block = nextBlock; + } + + this.nextPointers.set(startingBlock, -offset); + + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + this.maps.set(offset + character, newBlock); + + return newBlock; + } + + // Else, we append the character to the list + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.nextPointers.set(block, newBlock); + this.characters.push(character); + + return newBlock; +}; + +SemiDynamicTrie.prototype.findSibling = function(block, character) { + var nextCharacter; + + // Do we have a fat node? + var fatNode = this.nextPointers.array[block]; + + if (fatNode < 0) { + var mapIndex = -fatNode + character; + + var nextBlock = this.maps.array[mapIndex]; + + if (nextBlock === 0) + return -1; + + return nextBlock; + } + + while (true) { + nextCharacter = this.characters.array[block]; + + if (nextCharacter === character) + return block; + + block = this.nextPointers.array[block]; + + if (block === 0) + return -1; + } +}; + +SemiDynamicTrie.prototype.add = function(key) { + var keyCharacter, + childBlock, + block = 0; + + var i = 0, l = key.length; + + // Going as far as possible + while (i < l) { + keyCharacter = key.charCodeAt(i); + + // Ensuring a correct sibling exists + block = this.ensureSibling(block, keyCharacter); + + i++; + + if (i < l) { + + // Descending + childBlock = this.childPointers.array[block]; + + if (childBlock === 0) + break; + + block = childBlock; + } + } + + // Adding as many blocks as necessary + while (i < l) { + + childBlock = this.characters.length; + this.characters.push(key.charCodeAt(i)); + + this.childPointers.push(0); + this.nextPointers.push(0); + this.childPointers.set(block, childBlock); + + block = childBlock; + + i++; + } +}; + +SemiDynamicTrie.prototype.has = function(key) { + var i, l; + + var block = 0, + siblingBlock; + + for (i = 0, l = key.length; i < l; i++) { + siblingBlock = this.findSibling(block, key.charCodeAt(i)); + + if (siblingBlock === -1) + return false; + + // TODO: be sure + if (i === l - 1) + return true; + + block = this.childPointers.array[siblingBlock]; + + if (block === 0) + return false; + } + + // TODO: fix, should have a leaf pointer somehow + return true; +}; + +/** + * Exporting. + */ +module.exports = SemiDynamicTrie; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/set.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/set.d.ts new file mode 100644 index 0000000..fc8dae8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/set.d.ts @@ -0,0 +1,18 @@ +/** + * Mnemonist Set Typings + * ====================== + */ +export function intersection(...set: Array>): Set; +export function union(...set: Array>): Set; +export function difference(a: Set, b: Set): Set; +export function symmetricDifference(a: Set, b: Set): Set; +export function isSubset(a: Set, b: Set): boolean; +export function isSuperset(a: Set, b: Set): boolean; +export function add(a: Set, b: Set): void; +export function subtract(a: Set, b: Set): void; +export function intersect(a: Set, b: Set): void; +export function disjunct(a: Set, b: Set): void; +export function intersectionSize(a: Set, b:Set): number; +export function unionSize(a: Set, b:Set): number; +export function jaccard(a: Set, b:Set): number; +export function overlap(a: Set, b: Set): number; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/set.js b/amplify/functions/deleteDocument/node_modules/mnemonist/set.js new file mode 100644 index 0000000..e0d020b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/set.js @@ -0,0 +1,356 @@ +/** + * Mnemonist Set + * ============== + * + * Useful function related to sets such as union, intersection and so on... + */ + +// TODO: optimize versions for less variadicities + +/** + * Variadic function computing the intersection of multiple sets. + * + * @param {...Set} sets - Sets to intersect. + * @return {Set} - The intesection. + */ +exports.intersection = function() { + if (arguments.length < 2) + throw new Error('mnemonist/Set.intersection: needs at least two arguments.'); + + var I = new Set(); + + // First we need to find the smallest set + var smallestSize = Infinity, + smallestSet = null; + + var s, i, l = arguments.length; + + for (i = 0; i < l; i++) { + s = arguments[i]; + + // If one of the set has no items, we can stop right there + if (s.size === 0) + return I; + + if (s.size < smallestSize) { + smallestSize = s.size; + smallestSet = s; + } + } + + // Now we need to intersect this set with the others + var iterator = smallestSet.values(), + step, + item, + add, + set; + + // TODO: we can optimize by iterating each next time over the current intersection + // but this probably means more RAM to consume since we'll create n-1 sets rather than + // only the one. + while ((step = iterator.next(), !step.done)) { + item = step.value; + add = true; + + for (i = 0; i < l; i++) { + set = arguments[i]; + + if (set === smallestSet) + continue; + + if (!set.has(item)) { + add = false; + break; + } + } + + if (add) + I.add(item); + } + + return I; +}; + +/** + * Variadic function computing the union of multiple sets. + * + * @param {...Set} sets - Sets to unite. + * @return {Set} - The union. + */ +exports.union = function() { + if (arguments.length < 2) + throw new Error('mnemonist/Set.union: needs at least two arguments.'); + + var U = new Set(); + + var i, l = arguments.length; + + var iterator, + step; + + for (i = 0; i < l; i++) { + iterator = arguments[i].values(); + + while ((step = iterator.next(), !step.done)) + U.add(step.value); + } + + return U; +}; + +/** + * Function computing the difference between two sets. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {Set} - The difference. + */ +exports.difference = function(A, B) { + + // If first set is empty + if (!A.size) + return new Set(); + + if (!B.size) + return new Set(A); + + var D = new Set(); + + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + D.add(step.value); + } + + return D; +}; + +/** + * Function computing the symmetric difference between two sets. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {Set} - The symmetric difference. + */ +exports.symmetricDifference = function(A, B) { + var S = new Set(); + + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + S.add(step.value); + } + + iterator = B.values(); + + while ((step = iterator.next(), !step.done)) { + if (!A.has(step.value)) + S.add(step.value); + } + + return S; +}; + +/** + * Function returning whether A is a subset of B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {boolean} + */ +exports.isSubset = function(A, B) { + var iterator = A.values(), + step; + + // Shortcuts + if (A === B) + return true; + + if (A.size > B.size) + return false; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + return false; + } + + return true; +}; + +/** + * Function returning whether A is a superset of B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {boolean} + */ +exports.isSuperset = function(A, B) { + return exports.isSubset(B, A); +}; + +/** + * Function adding the items of set B to the set A. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.add = function(A, B) { + var iterator = B.values(), + step; + + while ((step = iterator.next(), !step.done)) + A.add(step.value); + + return; +}; + +/** + * Function subtracting the items of set B from the set A. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.subtract = function(A, B) { + var iterator = B.values(), + step; + + while ((step = iterator.next(), !step.done)) + A.delete(step.value); + + return; +}; + +/** + * Function intersecting the items of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.intersect = function(A, B) { + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + A.delete(step.value); + } + + return; +}; + +/** + * Function disjuncting the items of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.disjunct = function(A, B) { + var iterator = A.values(), + step; + + var toRemove = []; + + while ((step = iterator.next(), !step.done)) { + if (B.has(step.value)) + toRemove.push(step.value); + } + + iterator = B.values(); + + while ((step = iterator.next(), !step.done)) { + if (!A.has(step.value)) + A.add(step.value); + } + + for (var i = 0, l = toRemove.length; i < l; i++) + A.delete(toRemove[i]); + + return; +}; + +/** + * Function returning the size of the intersection of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.intersectionSize = function(A, B) { + var tmp; + + // We need to know the smallest set + if (A.size > B.size) { + tmp = A; + A = B; + B = tmp; + } + + if (A.size === 0) + return 0; + + if (A === B) + return A.size; + + var iterator = A.values(), + step; + + var I = 0; + + while ((step = iterator.next(), !step.done)) { + if (B.has(step.value)) + I++; + } + + return I; +}; + +/** + * Function returning the size of the union of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.unionSize = function(A, B) { + var I = exports.intersectionSize(A, B); + + return A.size + B.size - I; +}; + +/** + * Function returning the Jaccard similarity between A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.jaccard = function(A, B) { + var I = exports.intersectionSize(A, B); + + if (I === 0) + return 0; + + var U = A.size + B.size - I; + + return I / U; +}; + +/** + * Function returning the overlap coefficient between A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.overlap = function(A, B) { + var I = exports.intersectionSize(A, B); + + if (I === 0) + return 0; + + return I / Math.min(A.size, B.size); +}; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sort/insertion.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/sort/insertion.d.ts new file mode 100644 index 0000000..db22f9b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sort/insertion.d.ts @@ -0,0 +1,4 @@ +import {IArrayLike} from '../utils/types'; + +export function inplaceInsertionSort(array: IArrayLike, lo: number, hi: number): IArrayLike; +export function inplaceInsertionSortIndices(array: IArrayLike, indices: IArrayLike, lo: number, hi: number): IArrayLike; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sort/insertion.js b/amplify/functions/deleteDocument/node_modules/mnemonist/sort/insertion.js new file mode 100644 index 0000000..aebd1ad --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sort/insertion.js @@ -0,0 +1,50 @@ +/** + * Mnemonist Insertion Sort + * ========================= + * + * Insertion sort related functions. + */ +function inplaceInsertionSort(array, lo, hi) { + i = lo + 1; + + var j, k; + + for (; i < hi; i++) { + k = array[i]; + j = i - 1; + + while (j >= lo && array[j] > k) { + array[j + 1] = array[j]; + j--; + } + + array[j + 1] = k; + } + + return array; +} + +exports.inplaceInsertionSort = inplaceInsertionSort; + +function inplaceInsertionSortIndices(array, indices, lo, hi) { + i = lo + 1; + + var j, k, t; + + for (; i < hi; i++) { + t = indices[i]; + k = array[t]; + j = i - 1; + + while (j >= lo && array[indices[j]] > k) { + indices[j + 1] = indices[j]; + j--; + } + + indices[j + 1] = t; + } + + return indices; +} + +exports.inplaceInsertionSortIndices = inplaceInsertionSortIndices; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sort/quick.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/sort/quick.d.ts new file mode 100644 index 0000000..5e6c90d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sort/quick.d.ts @@ -0,0 +1,4 @@ +import {IArrayLike} from '../utils/types'; + +export function inplaceQuickSort(array: IArrayLike, lo: number, hi: number): IArrayLike; +export function inplaceQuickSortIndices(array: IArrayLike, indices: IArrayLike, lo: number, hi: number): IArrayLike; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sort/quick.js b/amplify/functions/deleteDocument/node_modules/mnemonist/sort/quick.js new file mode 100644 index 0000000..008d0fd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sort/quick.js @@ -0,0 +1,116 @@ +/** + * Mnemonist Quick Sort + * ===================== + * + * Quick sort related functions. + * Adapted from: https://alienryderflex.com/quicksort/ + */ +var LOS = new Float64Array(64), + HIS = new Float64Array(64); + +function inplaceQuickSort(array, lo, hi) { + var p, i, l, r, swap; + + LOS[0] = lo; + HIS[0] = hi; + i = 0; + + while (i >= 0) { + l = LOS[i]; + r = HIS[i] - 1; + + if (l < r) { + p = array[l]; + + while (l < r) { + while (array[r] >= p && l < r) + r--; + + if (l < r) + array[l++] = array[r]; + + while (array[l] <= p && l < r) + l++; + + if (l < r) + array[r--] = array[l]; + } + + array[l] = p; + LOS[i + 1] = l + 1; + HIS[i + 1] = HIS[i]; + HIS[i++] = l; + + if (HIS[i] - LOS[i] > HIS[i - 1] - LOS[i - 1]) { + swap = LOS[i]; + LOS[i] = LOS[i - 1]; + LOS[i - 1] = swap; + + swap = HIS[i]; + HIS[i] = HIS[i - 1]; + HIS[i - 1] = swap; + } + } + else { + i--; + } + } + + return array; +} + +exports.inplaceQuickSort = inplaceQuickSort; + +function inplaceQuickSortIndices(array, indices, lo, hi) { + var p, i, l, r, t, swap; + + LOS[0] = lo; + HIS[0] = hi; + i = 0; + + while (i >= 0) { + l = LOS[i]; + r = HIS[i] - 1; + + if (l < r) { + t = indices[l]; + p = array[t]; + + while (l < r) { + while (array[indices[r]] >= p && l < r) + r--; + + if (l < r) + indices[l++] = indices[r]; + + while (array[indices[l]] <= p && l < r) + l++; + + if (l < r) + indices[r--] = indices[l]; + } + + indices[l] = t; + LOS[i + 1] = l + 1; + HIS[i + 1] = HIS[i]; + HIS[i++] = l; + + if (HIS[i] - LOS[i] > HIS[i - 1] - LOS[i - 1]) { + swap = LOS[i]; + LOS[i] = LOS[i - 1]; + LOS[i - 1] = swap; + + swap = HIS[i]; + HIS[i] = HIS[i - 1]; + HIS[i - 1] = swap; + } + } + else { + i--; + } + } + + return indices; +} + +exports.inplaceQuickSortIndices = inplaceQuickSortIndices; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-map.d.ts new file mode 100644 index 0000000..0b22f90 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-map.d.ts @@ -0,0 +1,26 @@ +/** + * Mnemonist SparseMap Typings + * ============================ + */ +export default class SparseMap implements Iterable<[number, V]> { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(key: number): boolean; + get(key: number): V | undefined; + set(key: number, value: V): this; + delete(key: number): boolean; + forEach(callback: (value: V, key: number, set: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[number, V]>; + [Symbol.iterator](): IterableIterator<[number, V]>; + inspect(): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-map.js new file mode 100644 index 0000000..d5cf20d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-map.js @@ -0,0 +1,243 @@ +/** + * Mnemonist SparseMap + * ==================== + * + * JavaScript sparse map implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseMap. + * + * @constructor + */ +function SparseMap(Values, length) { + if (arguments.length < 2) { + length = Values; + Values = Array; + } + + var ByteArray = getPointerArray(length); + + // Properties + this.size = 0; + this.length = length; + this.dense = new ByteArray(length); + this.sparse = new ByteArray(length); + this.vals = new Values(length); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseMap.prototype.clear = function() { + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the set. + * + * @param {number} member - Member to test. + * @return {SparseMap} + */ +SparseMap.prototype.has = function(member) { + var index = this.sparse[member]; + + return ( + index < this.size && + this.dense[index] === member + ); +}; + +/** + * Method used to get the value associated to a member in the set. + * + * @param {number} member - Member to test. + * @return {any} + */ +SparseMap.prototype.get = function(member) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) + return this.vals[index]; + + return; +}; + +/** + * Method used to set a value into the map. + * + * @param {number} member - Member to set. + * @param {any} value - Associated value. + * @return {SparseMap} + */ +SparseMap.prototype.set = function(member, value) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) { + this.vals[index] = value; + return this; + } + + this.dense[this.size] = member; + this.sparse[member] = this.size; + this.vals[this.size] = value; + this.size++; + + return this; +}; + +/** + * Method used to remove a member from the set. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseMap.prototype.delete = function(member) { + var index = this.sparse[member]; + + if (index >= this.size || this.dense[index] !== member) + return false; + + index = this.dense[this.size - 1]; + this.dense[this.sparse[member]] = index; + this.sparse[index] = this.sparse[member]; + this.size--; + + return true; +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0; i < this.size; i++) + callback.call(scope, this.vals[i], this.dense[i]); +}; + +/** + * Method used to create an iterator over a set's members. + * + * @return {Iterator} + */ +SparseMap.prototype.keys = function() { + var size = this.size, + dense = this.dense, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = dense[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseMap.prototype.values = function() { + var size = this.size, + values = this.vals, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = values[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +SparseMap.prototype.entries = function() { + var size = this.size, + dense = this.dense, + values = this.vals, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = [dense[i], values[i]]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseMap.prototype[Symbol.iterator] = SparseMap.prototype.entries; + +/** + * Convenience known methods. + */ +SparseMap.prototype.inspect = function() { + var proxy = new Map(); + + for (var i = 0; i < this.size; i++) + proxy.set(this.dense[i], this.vals[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseMap, + enumerable: false + }); + + proxy.length = this.length; + + if (this.vals.constructor !== Array) + proxy.type = this.vals.constructor.name; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseMap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-queue-set.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-queue-set.d.ts new file mode 100644 index 0000000..e7463bf --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-queue-set.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist SparseQueueSet Typings + * ================================= + */ +export default class SparseQueueSet implements Iterable { + + // Members + capacity: number; + start: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(value: number): boolean; + enqueue(value: number): this; + dequeue(): number | undefined; + forEach(callback: (value: number, key: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-queue-set.js b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-queue-set.js new file mode 100644 index 0000000..b5f42b3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-queue-set.js @@ -0,0 +1,218 @@ +/** + * Mnemonist SparseQueueSet + * ========================= + * + * JavaScript sparse queue set implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseQueueSet. + * + * @constructor + */ +function SparseQueueSet(capacity) { + + var ByteArray = getPointerArray(capacity); + + // Properties + this.start = 0; + this.size = 0; + this.capacity = capacity; + this.dense = new ByteArray(capacity); + this.sparse = new ByteArray(capacity); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseQueueSet.prototype.clear = function() { + this.start = 0; + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the queue. + * + * @param {number} member - Member to test. + * @return {SparseQueueSet} + */ +SparseQueueSet.prototype.has = function(member) { + if (this.size === 0) + return false; + + var index = this.sparse[member]; + + var inBounds = ( + index < this.capacity && + ( + index >= this.start && + index < this.start + this.size + ) || + ( + index < ((this.start + this.size) % this.capacity) + ) + ); + + return ( + inBounds && + this.dense[index] === member + ); +}; + +/** + * Method used to add a member to the queue. + * + * @param {number} member - Member to add. + * @return {SparseQueueSet} + */ +SparseQueueSet.prototype.enqueue = function(member) { + var index = this.sparse[member]; + + if (this.size !== 0) { + var inBounds = ( + index < this.capacity && + ( + index >= this.start && + index < this.start + this.size + ) || + ( + index < ((this.start + this.size) % this.capacity) + ) + ); + + if (inBounds && this.dense[index] === member) + return this; + } + + index = (this.start + this.size) % this.capacity; + + this.dense[index] = member; + this.sparse[member] = index; + this.size++; + + return this; +}; + +/** + * Method used to remove the next member from the queue. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseQueueSet.prototype.dequeue = function() { + if (this.size === 0) + return; + + var index = this.start; + + this.size--; + this.start++; + + if (this.start === this.capacity) + this.start = 0; + + var member = this.dense[index]; + + this.sparse[member] = this.capacity; + + return member; +}; + +/** + * Method used to iterate over the queue's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseQueueSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + callback.call(scope, this.dense[i], j, this); + i++; + j++; + + if (i === c) + i = 0; + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseQueueSet.prototype.values = function() { + var dense = this.dense, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = dense[i]; + + i++; + j++; + + if (i === c) + i = 0; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseQueueSet.prototype[Symbol.iterator] = SparseQueueSet.prototype.values; + +/** + * Convenience known methods. + */ +SparseQueueSet.prototype.inspect = function() { + var proxy = []; + + this.forEach(function(member) { + proxy.push(member); + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseQueueSet, + enumerable: false + }); + + proxy.capacity = this.capacity; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseQueueSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseQueueSet.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseQueueSet; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-set.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-set.d.ts new file mode 100644 index 0000000..99fe655 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-set.d.ts @@ -0,0 +1,23 @@ +/** + * Mnemonist SparseSet Typings + * ============================ + */ +export default class SparseSet implements Iterable { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(value: number): boolean; + add(value: number): this; + delete(value: number): boolean; + forEach(callback: (value: number, key: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-set.js b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-set.js new file mode 100644 index 0000000..7498f33 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/sparse-set.js @@ -0,0 +1,168 @@ +/** + * Mnemonist SparseSet + * ==================== + * + * JavaScript sparse set implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseSet. + * + * @constructor + */ +function SparseSet(length) { + + var ByteArray = getPointerArray(length); + + // Properties + this.size = 0; + this.length = length; + this.dense = new ByteArray(length); + this.sparse = new ByteArray(length); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseSet.prototype.clear = function() { + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the set. + * + * @param {number} member - Member to test. + * @return {SparseSet} + */ +SparseSet.prototype.has = function(member) { + var index = this.sparse[member]; + + return ( + index < this.size && + this.dense[index] === member + ); +}; + +/** + * Method used to add a member to the set. + * + * @param {number} member - Member to add. + * @return {SparseSet} + */ +SparseSet.prototype.add = function(member) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) + return this; + + this.dense[this.size] = member; + this.sparse[member] = this.size; + this.size++; + + return this; +}; + +/** + * Method used to remove a member from the set. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseSet.prototype.delete = function(member) { + var index = this.sparse[member]; + + if (index >= this.size || this.dense[index] !== member) + return false; + + index = this.dense[this.size - 1]; + this.dense[this.sparse[member]] = index; + this.sparse[index] = this.sparse[member]; + this.size--; + + return true; +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var item; + + for (var i = 0; i < this.size; i++) { + item = this.dense[i]; + + callback.call(scope, item, item); + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseSet.prototype.values = function() { + var size = this.size, + dense = this.dense, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = dense[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseSet.prototype[Symbol.iterator] = SparseSet.prototype.values; + +/** + * Convenience known methods. + */ +SparseSet.prototype.inspect = function() { + var proxy = new Set(); + + for (var i = 0; i < this.size; i++) + proxy.add(this.dense[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseSet, + enumerable: false + }); + + proxy.length = this.length; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseSet.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseSet; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/stack.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/stack.d.ts new file mode 100644 index 0000000..fa6998b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/stack.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist Stack Typings + * ======================== + */ +export default class Stack implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + push(item: T): number; + pop(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, stack: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): Stack; + static of(...items: Array): Stack; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/stack.js b/amplify/functions/deleteDocument/node_modules/mnemonist/stack.js new file mode 100644 index 0000000..9e83519 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/stack.js @@ -0,0 +1,210 @@ +/** + * Mnemonist Stack + * ================ + * + * Stack implementation relying on JavaScript arrays, which are fast enough & + * correctly optimized for this kind of work. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Stack + * + * @constructor + */ +function Stack() { + this.clear(); +} + +/** + * Method used to clear the stack. + * + * @return {undefined} + */ +Stack.prototype.clear = function() { + + // Properties + this.items = []; + this.size = 0; +}; + +/** + * Method used to add an item to the stack. + * + * @param {any} item - Item to add. + * @return {number} + */ +Stack.prototype.push = function(item) { + this.items.push(item); + return ++this.size; +}; + +/** + * Method used to retrieve & remove the last item of the stack. + * + * @return {any} + */ +Stack.prototype.pop = function() { + if (this.size === 0) + return; + + this.size--; + return this.items.pop(); +}; + +/** + * Method used to get the last item of the stack. + * + * @return {any} + */ +Stack.prototype.peek = function() { + return this.items[this.size - 1]; +}; + +/** + * Method used to iterate over the stack. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +Stack.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.items.length; i < l; i++) + callback.call(scope, this.items[l - i - 1], i, this); +}; + +/** + * Method used to convert the stack to a JavaScript array. + * + * @return {array} + */ +Stack.prototype.toArray = function() { + var array = new Array(this.size), + l = this.size - 1, + i = this.size; + + while (i--) + array[i] = this.items[l - i]; + + return array; +}; + +/** + * Method used to create an iterator over a stack's values. + * + * @return {Iterator} + */ +Stack.prototype.values = function() { + var items = this.items, + l = items.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a stack's entries. + * + * @return {Iterator} + */ +Stack.prototype.entries = function() { + var items = this.items, + l = items.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Stack.prototype[Symbol.iterator] = Stack.prototype.values; + + +/** + * Convenience known methods. + */ +Stack.prototype.toString = function() { + return this.toArray().join(','); +}; + +Stack.prototype.toJSON = function() { + return this.toArray(); +}; + +Stack.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: Stack, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + Stack.prototype[Symbol.for('nodejs.util.inspect.custom')] = Stack.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a stack. + * + * @param {Iterable} iterable - Target iterable. + * @return {Stack} + */ +Stack.from = function(iterable) { + var stack = new Stack(); + + forEach(iterable, function(value) { + stack.push(value); + }); + + return stack; +}; + +/** + * Static @.of function taking an arbitrary number of arguments & converting it + * into a stack. + * + * @param {...any} args + * @return {Stack} + */ +Stack.of = function() { + return Stack.from(arguments); +}; + +/** + * Exporting. + */ +module.exports = Stack; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/static-disjoint-set.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/static-disjoint-set.d.ts new file mode 100644 index 0000000..3e808da --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/static-disjoint-set.d.ts @@ -0,0 +1,23 @@ +/** + * Mnemonist StaticDisjointSet Typings + * ==================================== + */ +import {ArrayLike} from './utils/types'; + +export default class StaticDisjointSet { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(size: number); + + // Methods + find(x: number): number; + union(x: number, y: number): this; + connected(x: number, y: number): boolean; + mapping(): ArrayLike; + compile(): Array>; + inspect(): any; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/static-disjoint-set.js b/amplify/functions/deleteDocument/node_modules/mnemonist/static-disjoint-set.js new file mode 100644 index 0000000..7a84b93 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/static-disjoint-set.js @@ -0,0 +1,195 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist StaticDisjointSet + * ============================ + * + * JavaScript implementation of a static disjoint set (union-find). + * + * Note that to remain performant, this implementation needs to know a size + * beforehand. + */ +var helpers = require('./utils/typed-arrays.js'); + +/** + * StaticDisjointSet. + * + * @constructor + */ +function StaticDisjointSet(size) { + + // Optimizing the typed array types + var ParentsTypedArray = helpers.getPointerArray(size), + RanksTypedArray = helpers.getPointerArray(Math.log2(size)); + + // Properties + this.size = size; + this.dimension = size; + this.parents = new ParentsTypedArray(size); + this.ranks = new RanksTypedArray(size); + + // Initializing parents + for (var i = 0; i < size; i++) + this.parents[i] = i; +} + +/** + * Method used to find the root of the given item. + * + * @param {number} x - Target item. + * @return {number} + */ +StaticDisjointSet.prototype.find = function(x) { + var y = x; + + var c, p; + + while (true) { + c = this.parents[y]; + + if (y === c) + break; + + y = c; + } + + // Path compression + while (true) { + p = this.parents[x]; + + if (p === y) + break; + + this.parents[x] = y; + x = p; + } + + return y; +}; + +/** + * Method used to perform the union of two items. + * + * @param {number} x - First item. + * @param {number} y - Second item. + * @return {StaticDisjointSet} + */ +StaticDisjointSet.prototype.union = function(x, y) { + var xRoot = this.find(x), + yRoot = this.find(y); + + // x and y are already in the same set + if (xRoot === yRoot) + return this; + + this.dimension--; + + // x and y are not in the same set, we merge them + var xRank = this.ranks[x], + yRank = this.ranks[y]; + + if (xRank < yRank) { + this.parents[xRoot] = yRoot; + } + else if (xRank > yRank) { + this.parents[yRoot] = xRoot; + } + else { + this.parents[yRoot] = xRoot; + this.ranks[xRoot]++; + } + + return this; +}; + +/** + * Method returning whether two items are connected. + * + * @param {number} x - First item. + * @param {number} y - Second item. + * @return {boolean} + */ +StaticDisjointSet.prototype.connected = function(x, y) { + var xRoot = this.find(x); + + return xRoot === this.find(y); +}; + +/** + * Method returning the set mapping. + * + * @return {TypedArray} + */ +StaticDisjointSet.prototype.mapping = function() { + var MappingClass = helpers.getPointerArray(this.dimension); + + var ids = {}, + mapping = new MappingClass(this.size), + c = 0; + + var r; + + for (var i = 0, l = this.parents.length; i < l; i++) { + r = this.find(i); + + if (typeof ids[r] === 'undefined') { + mapping[i] = c; + ids[r] = c++; + } + else { + mapping[i] = ids[r]; + } + } + + return mapping; +}; + +/** + * Method used to compile the disjoint set into an array of arrays. + * + * @return {array} + */ +StaticDisjointSet.prototype.compile = function() { + var ids = {}, + result = new Array(this.dimension), + c = 0; + + var r; + + for (var i = 0, l = this.parents.length; i < l; i++) { + r = this.find(i); + + if (typeof ids[r] === 'undefined') { + result[c] = [i]; + ids[r] = c++; + } + else { + result[ids[r]].push(i); + } + } + + return result; +}; + +/** + * Convenience known methods. + */ +StaticDisjointSet.prototype.inspect = function() { + var array = this.compile(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: StaticDisjointSet, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + StaticDisjointSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = StaticDisjointSet.prototype.inspect; + + +/** + * Exporting. + */ +module.exports = StaticDisjointSet; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/static-interval-tree.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/static-interval-tree.d.ts new file mode 100644 index 0000000..5302f1e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/static-interval-tree.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist StaticIntervalTree Typings + * ===================================== + */ +type StaticIntervalTreeGetter = (item: T) => number; +type StaticIntervalTreeGettersTuple = [StaticIntervalTreeGetter, StaticIntervalTreeGetter]; + +export default class StaticIntervalTree { + + // Members + height: number; + size: number; + + // Constructor + constructor(intervals: Array, getters?: StaticIntervalTreeGettersTuple); + + // Methods + intervalsContainingPoint(point: number): Array; + intervalsOverlappingInterval(interval: T): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): StaticIntervalTree; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/static-interval-tree.js b/amplify/functions/deleteDocument/node_modules/mnemonist/static-interval-tree.js new file mode 100644 index 0000000..41452f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/static-interval-tree.js @@ -0,0 +1,387 @@ +/* + * Mnemonist StaticIntervalTree + * ============================= + * + * JavaScript implementation of a static interval tree. This tree is static in + * that you are required to know all its items beforehand and to built it + * from an iterable. + * + * This implementation represents the interval tree as an augmented balanced + * binary search tree. It works by sorting the intervals by startpoint first + * then proceeds building the augmented balanced BST bottom-up from the + * sorted list. + * + * Note that this implementation considers every given intervals as closed for + * simplicity's sake. + * + * For more information: https://en.wikipedia.org/wiki/Interval_tree + */ +var iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'); + +var FixedStack = require('./fixed-stack.js'); + + +// TODO: pass index to getters +// TODO: custom comparison +// TODO: possibility to pass offset buffer + +// TODO: intervals() => Symbol.iterator +// TODO: dfs() + +/** + * Helpers. + */ + +/** + * Recursive function building the BST from the sorted list of interval + * indices. + * + * @param {array} intervals - Array of intervals to index. + * @param {function} endGetter - Getter function for end of intervals. + * @param {array} sortedIndices - Sorted indices of the intervals. + * @param {array} tree - BST memory. + * @param {array} augmentations - Array of node augmentations. + * @param {number} i - BST index of current node. + * @param {number} low - Dichotomy low index. + * @param {number} high - Dichotomy high index. + * @return {number} - Created node augmentation value. + */ +function buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + i, + low, + high +) { + var mid = (low + (high - low) / 2) | 0, + midMinusOne = ~-mid, + midPlusOne = -~mid; + + var current = sortedIndices[mid]; + tree[i] = current + 1; + + var end = endGetter ? endGetter(intervals[current]) : intervals[current][1]; + + var left = i * 2 + 1, + right = i * 2 + 2; + + var leftEnd = -Infinity, + rightEnd = -Infinity; + + if (low <= midMinusOne) { + leftEnd = buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + left, + low, + midMinusOne + ); + } + + if (midPlusOne <= high) { + rightEnd = buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + right, + midPlusOne, + high + ); + } + + var augmentation = Math.max(end, leftEnd, rightEnd); + + var augmentationPointer = current; + + if (augmentation === leftEnd) + augmentationPointer = augmentations[tree[left] - 1]; + else if (augmentation === rightEnd) + augmentationPointer = augmentations[tree[right] - 1]; + + augmentations[current] = augmentationPointer; + + return augmentation; +} + +/** + * StaticIntervalTree. + * + * @constructor + * @param {array} intervals - Array of intervals to index. + * @param {array} getters - Optional getters. + */ +function StaticIntervalTree(intervals, getters) { + + // Properties + this.size = intervals.length; + this.intervals = intervals; + + var startGetter = null, + endGetter = null; + + if (Array.isArray(getters)) { + startGetter = getters[0]; + endGetter = getters[1]; + } + + // Building the indices array + var length = intervals.length; + + var IndicesArray = typed.getPointerArray(length + 1); + + var indices = new IndicesArray(length); + + var i; + + for (i = 1; i < length; i++) + indices[i] = i; + + // Sorting indices array + // TODO: check if some version of radix sort can outperform this part + indices.sort(function(a, b) { + a = intervals[a]; + b = intervals[b]; + + if (startGetter) { + a = startGetter(a); + b = startGetter(b); + } + else { + a = a[0]; + b = b[0]; + } + + if (a < b) + return -1; + + if (a > b) + return 1; + + // TODO: use getters + // TODO: this ordering has the following invariant: if query interval + // contains [nodeStart, max], then whole right subtree can be collected + // a = a[1]; + // b = b[1]; + + // if (a < b) + // return 1; + + // if (a > b) + // return -1; + + return 0; + }); + + // Building the binary tree + var height = Math.ceil(Math.log2(length + 1)), + treeSize = Math.pow(2, height) - 1; + + var tree = new IndicesArray(treeSize); + + var augmentations = new IndicesArray(length); + + buildBST( + intervals, + endGetter, + indices, + tree, + augmentations, + 0, + 0, + length - 1 + ); + + // Dropping indices + indices = null; + + // Storing necessary information + this.height = height; + this.tree = tree; + this.augmentations = augmentations; + this.startGetter = startGetter; + this.endGetter = endGetter; + + // Initializing DFS stack + this.stack = new FixedStack(IndicesArray, this.height); +} + +/** + * Method returning a list of intervals containing the given point. + * + * @param {any} point - Target point. + * @return {array} + */ +StaticIntervalTree.prototype.intervalsContainingPoint = function(point) { + var matches = []; + + var stack = this.stack; + + stack.clear(); + stack.push(0); + + var l = this.tree.length; + + var bstIndex, + intervalIndex, + interval, + maxInterval, + start, + end, + max, + left, + right; + + while (stack.size) { + bstIndex = stack.pop(); + intervalIndex = this.tree[bstIndex] - 1; + interval = this.intervals[intervalIndex]; + maxInterval = this.intervals[this.augmentations[intervalIndex]]; + + max = this.endGetter ? this.endGetter(maxInterval) : maxInterval[1]; + + // No possible match, point is farther right than the max end value + if (point > max) + continue; + + // Searching left + left = bstIndex * 2 + 1; + + if (left < l && this.tree[left] !== 0) + stack.push(left); + + start = this.startGetter ? this.startGetter(interval) : interval[0]; + end = this.endGetter ? this.endGetter(interval) : interval[1]; + + // Checking current node + if (point >= start && point <= end) + matches.push(interval); + + // If the point is to the left of the start of the current interval, + // then it cannot be in the right child + if (point < start) + continue; + + // Searching right + right = bstIndex * 2 + 2; + + if (right < l && this.tree[right] !== 0) + stack.push(right); + } + + return matches; +}; + +/** + * Method returning a list of intervals overlapping the given interval. + * + * @param {any} interval - Target interval. + * @return {array} + */ +StaticIntervalTree.prototype.intervalsOverlappingInterval = function(interval) { + var intervalStart = this.startGetter ? this.startGetter(interval) : interval[0], + intervalEnd = this.endGetter ? this.endGetter(interval) : interval[1]; + + var matches = []; + + var stack = this.stack; + + stack.clear(); + stack.push(0); + + var l = this.tree.length; + + var bstIndex, + intervalIndex, + currentInterval, + maxInterval, + start, + end, + max, + left, + right; + + while (stack.size) { + bstIndex = stack.pop(); + intervalIndex = this.tree[bstIndex] - 1; + currentInterval = this.intervals[intervalIndex]; + maxInterval = this.intervals[this.augmentations[intervalIndex]]; + + max = this.endGetter ? this.endGetter(maxInterval) : maxInterval[1]; + + // No possible match, start is farther right than the max end value + if (intervalStart > max) + continue; + + // Searching left + left = bstIndex * 2 + 1; + + if (left < l && this.tree[left] !== 0) + stack.push(left); + + start = this.startGetter ? this.startGetter(currentInterval) : currentInterval[0]; + end = this.endGetter ? this.endGetter(currentInterval) : currentInterval[1]; + + // Checking current node + if (intervalEnd >= start && intervalStart <= end) + matches.push(currentInterval); + + // If the end is to the left of the start of the current interval, + // then it cannot be in the right child + if (intervalEnd < start) + continue; + + // Searching right + right = bstIndex * 2 + 2; + + if (right < l && this.tree[right] !== 0) + stack.push(right); + } + + return matches; +}; + +/** + * Convenience known methods. + */ +StaticIntervalTree.prototype.inspect = function() { + var proxy = this.intervals.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: StaticIntervalTree, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + StaticIntervalTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = StaticIntervalTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {StaticIntervalTree} + */ +StaticIntervalTree.from = function(iterable, getters) { + if (iterables.isArrayLike(iterable)) + return new StaticIntervalTree(iterable, getters); + + return new StaticIntervalTree(Array.from(iterable), getters); +}; + +/** + * Exporting. + */ +module.exports = StaticIntervalTree; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/suffix-array.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/suffix-array.d.ts new file mode 100644 index 0000000..b959403 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/suffix-array.d.ts @@ -0,0 +1,37 @@ +/** + * Mnemonist SuffixArray Typings + * ============================== + */ +export default class SuffixArray { + + // Members + array: Array; + length: number; + string: string | Array; + + // Constructor + constructor(string: string | Array); + + // Methods + toString(): string; + toJSON(): Array; + inspect(): any; +} + +export class GeneralizedSuffixArray { + + // Members + array: Array; + length: number; + size: number; + text: string | Array; + + // Constructor + constructor(strings: Array | Array>); + + // Methods + longestCommonSubsequence(): string | Array; + toString(): string; + toJSON(): Array; + inspect(): any; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/suffix-array.js b/amplify/functions/deleteDocument/node_modules/mnemonist/suffix-array.js new file mode 100644 index 0000000..14990f4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/suffix-array.js @@ -0,0 +1,352 @@ +/** + * Mnemonist Suffix Array + * ======================= + * + * Linear time implementation of a suffix array using the recursive + * method by Karkkainen and Sanders. + * + * [References]: + * https://www.cs.helsinki.fi/u/tpkarkka/publications/jacm05-revised.pdf + * http://people.mpi-inf.mpg.de/~sanders/programs/suffix/ + * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.184.442&rep=rep1&type=pdf + * + * [Article]: + * "Simple Linear Work Suffix Array Construction", Karkkainen and Sanders. + * + * [Note]: + * A paper by Simon J. Puglisi, William F. Smyth & Andrew Turpin named + * "The Performance of Linear Time Suffix Sorting Algorithms" seems to + * prove that supralinear algorithm are in fact better faring for + * "real" world use cases. It would be nice to check this out in JavaScript + * because the high level of the language could change a lot to the fact. + * + * The current code is largely inspired by the following: + * https://github.com/tixxit/suffixarray/blob/master/suffixarray.js + */ + +/** + * Constants. + */ +var SEPARATOR = '\u0001'; + +/** + * Function used to sort the triples. + * + * @param {string|array} string - Padded sequence. + * @param {array} array - Array to sort (will be mutated). + * @param {number} offset - Index offset. + */ +function sort(string, array, offset) { + var l = array.length, + buckets = [], + i = l, + j = -1, + b, + d = 0, + bits; + + while (i--) + j = Math.max(string[array[i] + offset], j); + + bits = j >> 24 && 32 || j >> 16 && 24 || j >> 8 && 16 || 8; + + for (; d < bits; d += 4) { + for (i = 16; i--;) + buckets[i] = []; + for (i = l; i--;) + buckets[((string[array[i] + offset]) >> d) & 15].push(array[i]); + for (b = 0; b < 16; b++) { + for (j = buckets[b].length; j--;) + array[++i] = buckets[b][j]; + } + } +} + +/** + * Comparison helper. + */ +function compare(string, lookup, m, n) { + return ( + (string[m] - string[n]) || + (m % 3 === 2 ? + (string[m + 1] - string[n + 1]) || (lookup[m + 2] - lookup[n + 2]) : + (lookup[m + 1] - lookup[n + 1])) + ); +} + +/** + * Recursive function used to build the suffix tree in linear time. + * + * @param {string|array} string - Padded sequence. + * @param {number} l - True length of sequence (unpadded). + * @return {array} + */ +function build(string, l) { + var a = [], + b = [], + al = (2 * l / 3) | 0, + bl = l - al, + r = (al + 1) >> 1, + i = al, + j = 0, + k, + lookup = [], + result = []; + + if (l === 1) + return [0]; + + while (i--) + a[i] = ((i * 3) >> 1) + 1; + + for (i = 3; i--;) + sort(string, a, i); + + j = b[((a[0] / 3) | 0) + (a[0] % 3 === 1 ? 0 : r)] = 1; + + for (i = 1; i < al; i++) { + if (string[a[i]] !== string[a[i - 1]] || + string[a[i] + 1] !== string[a[i - 1] + 1] || + string[a[i] + 2] !== string[a[i - 1] + 2]) + j++; + + b[((a[i] / 3) | 0) + (a[i] % 3 === 1 ? 0 : r)] = j; + } + + if (j < al) { + b = build(b, al); + + for (i = al; i--;) + a[i] = b[i] < r ? b[i] * 3 + 1 : ((b[i] - r) * 3 + 2); + } + + for (i = al; i--;) + lookup[a[i]] = i; + lookup[l] = -1; + lookup[l + 1] = -2; + + b = l % 3 === 1 ? [l - 1] : []; + + for (i = 0; i < al; i++) { + if (a[i] % 3 === 1) + b.push(a[i] - 1); + } + + sort(string, b, 0); + + for (i = 0, j = 0, k = 0; i < al && j < bl;) + result[k++] = ( + compare(string, lookup, a[i], b[j]) < 0 ? + a[i++] : + b[j++] + ); + + while (i < al) + result[k++] = a[i++]; + + while (j < bl) + result[k++] = b[j++]; + + return result; +} + +/** + * Function used to create the array we are going to work on. + * + * @param {string|array} target - Target sequence. + * @return {array} + */ +function convert(target) { + + // Creating the alphabet array + var length = target.length, + paddingOffset = length % 3, + array = new Array(length + paddingOffset), + l, + i; + + // If we have an arbitrary sequence, we need to transform it + if (typeof target !== 'string') { + var uniqueTokens = Object.create(null); + + for (i = 0; i < length; i++) { + if (!uniqueTokens[target[i]]) + uniqueTokens[target[i]] = true; + } + + var alphabet = Object.create(null), + sortedUniqueTokens = Object.keys(uniqueTokens).sort(); + + for (i = 0, l = sortedUniqueTokens.length; i < l; i++) + alphabet[sortedUniqueTokens[i]] = i + 1; + + for (i = 0; i < length; i++) { + array[i] = alphabet[target[i]]; + } + } + else { + for (i = 0; i < length; i++) + array[i] = target.charCodeAt(i); + } + + // Padding the array + for (; i < paddingOffset; i++) + array[i] = 0; + + return array; +} + +/** + * Suffix Array. + * + * @constructor + * @param {string|array} string - Sequence for which to build the suffix array. + */ +function SuffixArray(string) { + + // Properties + this.hasArbitrarySequence = typeof string !== 'string'; + this.string = string; + this.length = string.length; + + // Building the array + this.array = build(convert(string), this.length); +} + +/** + * Convenience known methods. + */ +SuffixArray.prototype.toString = function() { + return this.array.join(','); +}; + +SuffixArray.prototype.toJSON = function() { + return this.array; +}; + +SuffixArray.prototype.inspect = function() { + var array = new Array(this.length); + + for (var i = 0; i < this.length; i++) + array[i] = this.string.slice(this.array[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: SuffixArray, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + SuffixArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = SuffixArray.prototype.inspect; + +/** + * Generalized Suffix Array. + * + * @constructor + */ +function GeneralizedSuffixArray(strings) { + + // Properties + this.hasArbitrarySequence = typeof strings[0] !== 'string'; + this.size = strings.length; + + if (this.hasArbitrarySequence) { + this.text = []; + + for (var i = 0, l = this.size; i < l; i++) { + this.text.push.apply(this.text, strings[i]); + + if (i < l - 1) + this.text.push(SEPARATOR); + } + } + else { + this.text = strings.join(SEPARATOR); + } + + this.firstLength = strings[0].length; + this.length = this.text.length; + + // Building the array + this.array = build(convert(this.text), this.length); +} + +/** + * Method used to retrieve the longest common subsequence of the generalized + * suffix array. + * + * @return {string|array} + */ +GeneralizedSuffixArray.prototype.longestCommonSubsequence = function() { + var lcs = this.hasArbitrarySequence ? [] : '', + lcp, + i, + j, + s, + t; + + for (i = 1; i < this.length; i++) { + s = this.array[i]; + t = this.array[i - 1]; + + if (s < this.firstLength && + t < this.firstLength) + continue; + + if (s > this.firstLength && + t > this.firstLength) + continue; + + lcp = Math.min(this.length - s, this.length - t); + + for (j = 0; j < lcp; j++) { + if (this.text[s + j] !== this.text[t + j]) { + lcp = j; + break; + } + } + + if (lcp > lcs.length) + lcs = this.text.slice(s, s + lcp); + } + + return lcs; +}; + +/** + * Convenience known methods. + */ +GeneralizedSuffixArray.prototype.toString = function() { + return this.array.join(','); +}; + +GeneralizedSuffixArray.prototype.toJSON = function() { + return this.array; +}; + +GeneralizedSuffixArray.prototype.inspect = function() { + var array = new Array(this.length); + + for (var i = 0; i < this.length; i++) + array[i] = this.text.slice(this.array[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: GeneralizedSuffixArray, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + GeneralizedSuffixArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = GeneralizedSuffixArray.prototype.inspect; + +/** + * Exporting. + */ +SuffixArray.GeneralizedSuffixArray = GeneralizedSuffixArray; +module.exports = SuffixArray; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/symspell.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/symspell.d.ts new file mode 100644 index 0000000..0e926d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/symspell.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist SymSpell Typings + * =========================== + */ +type SymSpellVerbosity = 0 | 1 | 2; + +type SymSpellOptions = { + maxDistance?: number; + verbosity?: SymSpellVerbosity +}; + +type SymSpellMatch = { + term: string; + distance: number; + count: number; +} + +export default class SymSpell { + + // Members + size: number; + + // Constructor + constructor(options?: SymSpellOptions); + + // Methods + clear(): void; + add(string: string): this; + search(query: string): Array; + + // Statics + static from(strings: Iterable | {[key: string]: string}, options?: SymSpellOptions): SymSpell; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/symspell.js b/amplify/functions/deleteDocument/node_modules/mnemonist/symspell.js new file mode 100644 index 0000000..365ee43 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/symspell.js @@ -0,0 +1,547 @@ +/* eslint no-loop-func: 0 */ +/** + * Mnemonist SymSpell + * =================== + * + * JavaScript implementation of the Symmetric Delete Spelling dictionary to + * efficiently index & query expression based on edit distance. + * Note that the current implementation target the v3.0 of the algorithm. + * + * [Reference]: + * http://blog.faroo.com/2012/06/07/improved-edit-distance-based-spelling-correction/ + * https://github.com/wolfgarbe/symspell + * + * [Author]: + * Wolf Garbe + */ +var forEach = require('obliterator/foreach'); + +/** + * Constants. + */ +var DEFAULT_MAX_DISTANCE = 2, + DEFAULT_VERBOSITY = 2; + +var VERBOSITY = new Set([ + // Returns only the top suggestion + 0, + // Returns suggestions with the smallest edit distance + 1, + // Returns every suggestion (no early termination) + 2 +]); + +var VERBOSITY_EXPLANATIONS = { + 0: 'Returns only the top suggestion', + 1: 'Returns suggestions with the smallest edit distance', + 2: 'Returns every suggestion (no early termination)' +}; + +/** + * Functions. + */ + +/** + * Function creating a dictionary item. + * + * @param {number} [value] - An optional suggestion. + * @return {object} - The created item. + */ +function createDictionaryItem(value) { + var suggestions = new Set(); + + if (typeof value === 'number') + suggestions.add(value); + + return { + suggestions, + count: 0 + }; +} + +/** + * Function creating a suggestion item. + * + * @return {object} - The created item. + */ +function createSuggestionItem(term, distance, count) { + return { + term: term || '', + distance: distance || 0, + count: count || 0 + }; +} + +/** + * Simplified edit function. + * + * @param {string} word - Target word. + * @param {number} distance - Distance. + * @param {number} max - Max distance. + * @param {Set} [deletes] - Set mutated to store deletes. + */ +function edits(word, distance, max, deletes) { + deletes = deletes || new Set(); + distance++; + + var deletedItem, + l = word.length, + i; + + if (l > 1) { + for (i = 0; i < l; i++) { + deletedItem = word.substring(0, i) + word.substring(i + 1); + + if (!deletes.has(deletedItem)) { + deletes.add(deletedItem); + + if (distance < max) + edits(deletedItem, distance, max, deletes); + } + } + } + + return deletes; +} + +/** + * Function used to conditionally add suggestions. + * + * @param {array} words - Words list. + * @param {number} verbosity - Verbosity level. + * @param {object} item - The target item. + * @param {string} suggestion - The target suggestion. + * @param {number} int - Integer key of the word. + * @param {object} deletedItem - Considered deleted item. + * @param {SymSpell} + */ +function addLowestDistance(words, verbosity, item, suggestion, int, deletedItem) { + var first = item.suggestions.values().next().value; + + if (verbosity < 2 && + item.suggestions.size > 0 && + words[first].length - deletedItem.length > suggestion.length - deletedItem.length) { + item.suggestions = new Set(); + item.count = 0; + } + + if (verbosity === 2 || + !item.suggestions.size || + words[first].length - deletedItem.length >= suggestion.length - deletedItem.length) { + item.suggestions.add(int); + } +} + +/** + * Custom Damerau-Levenshtein used by the algorithm. + * + * @param {string} source - First string. + * @param {string} target - Second string. + * @return {number} - The distance. + */ +function damerauLevenshtein(source, target) { + var m = source.length, + n = target.length, + H = [[]], + INF = m + n, + sd = new Map(), + i, + l, + j; + + H[0][0] = INF; + + for (i = 0; i <= m; i++) { + if (!H[i + 1]) + H[i + 1] = []; + H[i + 1][1] = i; + H[i + 1][0] = INF; + } + + for (j = 0; j <= n; j++) { + H[1][j + 1] = j; + H[0][j + 1] = INF; + } + + var st = source + target, + letter; + + for (i = 0, l = st.length; i < l; i++) { + letter = st[i]; + + if (!sd.has(letter)) + sd.set(letter, 0); + } + + // Iterating + for (i = 1; i <= m; i++) { + var DB = 0; + + for (j = 1; j <= n; j++) { + var i1 = sd.get(target[j - 1]), + j1 = DB; + + if (source[i - 1] === target[j - 1]) { + H[i + 1][j + 1] = H[i][j]; + DB = j; + } + else { + H[i + 1][j + 1] = Math.min( + H[i][j], + H[i + 1][j], + H[i][j + 1] + ) + 1; + } + + H[i + 1][j + 1] = Math.min( + H[i + 1][j + 1], + H[i1][j1] + (i - i1 - 1) + 1 + (j - j1 - 1) + ); + } + + sd.set(source[i - 1], i); + } + + return H[m + 1][n + 1]; +} + +/** + * Lookup function. + * + * @param {object} dictionary - A SymSpell dictionary. + * @param {array} words - Unique words list. + * @param {number} verbosity - Verbosity level. + * @param {number} maxDistance - Maximum distance. + * @param {number} maxLength - Maximum word length in the dictionary. + * @param {string} input - Input string. + * @return {array} - The list of suggestions. + */ +function lookup(dictionary, words, verbosity, maxDistance, maxLength, input) { + var length = input.length; + + if (length - maxDistance > maxLength) + return []; + + var candidates = [input], + candidateSet = new Set(), + suggestionSet = new Set(); + + var suggestions = [], + candidate, + item; + + // Exhausting every candidates + while (candidates.length > 0) { + candidate = candidates.shift(); + + // Early termination + if ( + verbosity < 2 && + suggestions.length > 0 && + length - candidate.length > suggestions[0].distance + ) + break; + + item = dictionary[candidate]; + + if (item !== undefined) { + if (typeof item === 'number') + item = createDictionaryItem(item); + + if (item.count > 0 && !suggestionSet.has(candidate)) { + suggestionSet.add(candidate); + + var suggestItem = createSuggestionItem( + candidate, + length - candidate.length, + item.count + ); + + suggestions.push(suggestItem); + + // Another early termination + if (verbosity < 2 && length - candidate.length === 0) + break; + } + + // Iterating over the item's suggestions + item.suggestions.forEach(index => { + var suggestion = words[index]; + + // Do we already have this suggestion? + if (suggestionSet.has(suggestion)) + return; + + suggestionSet.add(suggestion); + + // Computing distance between candidate & suggestion + var distance = 0; + + if (input !== suggestion) { + if (suggestion.length === candidate.length) { + distance = length - candidate.length; + } + else if (length === candidate.length) { + distance = suggestion.length - candidate.length; + } + else { + var ii = 0, + jj = 0; + + var l = suggestion.length; + + while ( + ii < l && + ii < length && + suggestion[ii] === input[ii] + ) { + ii++; + } + + while ( + jj < l - ii && + jj < length && + suggestion[l - jj - 1] === input[length - jj - 1] + ) { + jj++; + } + + if (ii > 0 || jj > 0) { + distance = damerauLevenshtein( + suggestion.substr(ii, l - ii - jj), + input.substr(ii, length - ii - jj) + ); + } + else { + distance = damerauLevenshtein(suggestion, input); + } + } + } + + // Removing suggestions of higher distance + if (verbosity < 2 && + suggestions.length > 0 && + suggestions[0].distance > distance) { + suggestions = []; + } + + if (verbosity < 2 && + suggestions.length > 0 && + distance > suggestions[0].distance) { + return; + } + + if (distance <= maxDistance) { + var target = dictionary[suggestion]; + + if (target !== undefined) { + suggestions.push(createSuggestionItem( + suggestion, + distance, + target.count + )); + } + } + }); + } + + // Adding edits + if (length - candidate.length < maxDistance) { + + if (verbosity < 2 && + suggestions.length > 0 && + length - candidate.length >= suggestions[0].distance) + continue; + + for (var i = 0, l = candidate.length; i < l; i++) { + var deletedItem = ( + candidate.substring(0, i) + + candidate.substring(i + 1) + ); + + if (!candidateSet.has(deletedItem)) { + candidateSet.add(deletedItem); + candidates.push(deletedItem); + } + } + } + } + + if (verbosity === 0) + return suggestions.slice(0, 1); + + return suggestions; +} + +/** + * SymSpell. + * + * @constructor + */ +function SymSpell(options) { + options = options || {}; + + this.clear(); + + // Properties + this.maxDistance = typeof options.maxDistance === 'number' ? + options.maxDistance : + DEFAULT_MAX_DISTANCE; + this.verbosity = typeof options.verbosity === 'number' ? + options.verbosity : + DEFAULT_VERBOSITY; + + // Sanity checks + if (typeof this.maxDistance !== 'number' || this.maxDistance <= 0) + throw Error('mnemonist/SymSpell.constructor: invalid `maxDistance` option. Should be a integer greater than 0.'); + + if (!VERBOSITY.has(this.verbosity)) + throw Error('mnemonist/SymSpell.constructor: invalid `verbosity` option. Should be either 0, 1 or 2.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SymSpell.prototype.clear = function() { + + // Properties + this.size = 0; + this.dictionary = Object.create(null); + this.maxLength = 0; + this.words = []; +}; + +/** + * Method used to add a word to the index. + * + * @param {string} word - Word to add. + * @param {SymSpell} + */ +SymSpell.prototype.add = function(word) { + var item = this.dictionary[word]; + + if (item !== undefined) { + if (typeof item === 'number') { + item = createDictionaryItem(item); + this.dictionary[word] = item; + } + + item.count++; + } + + else { + item = createDictionaryItem(); + item.count++; + + this.dictionary[word] = item; + + if (word.length > this.maxLength) + this.maxLength = word.length; + } + + if (item.count === 1) { + var number = this.words.length; + this.words.push(word); + + var deletes = edits(word, 0, this.maxDistance); + + deletes.forEach(deletedItem => { + var target = this.dictionary[deletedItem]; + + if (target !== undefined) { + if (typeof target === 'number') { + target = createDictionaryItem(target); + + this.dictionary[deletedItem] = target; + } + + if (!target.suggestions.has(number)) { + addLowestDistance( + this.words, + this.verbosity, + target, + word, + number, + deletedItem + ); + } + } + else { + this.dictionary[deletedItem] = number; + } + }); + } + + this.size++; + + return this; +}; + +/** + * Method used to search the index. + * + * @param {string} input - Input query. + * @return {array} - The found suggestions. + */ +SymSpell.prototype.search = function(input) { + return lookup( + this.dictionary, + this.words, + this.verbosity, + this.maxDistance, + this.maxLength, + input + ); +}; + +/** + * Convenience known methods. + */ +SymSpell.prototype.inspect = function() { + var array = []; + + array.size = this.size; + array.maxDistance = this.maxDistance; + array.verbosity = this.verbosity; + array.behavior = VERBOSITY_EXPLANATIONS[this.verbosity]; + + for (var k in this.dictionary) { + if (typeof this.dictionary[k] === 'object' && this.dictionary[k].count) + array.push([k, this.dictionary[k].count]); + } + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: SymSpell, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + SymSpell.prototype[Symbol.for('nodejs.util.inspect.custom')] = SymSpell.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {SymSpell} + */ +SymSpell.from = function(iterable, options) { + var index = new SymSpell(options); + + forEach(iterable, function(value) { + index.add(value); + }); + + return index; +}; + +/** + * Exporting. + */ +module.exports = SymSpell; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/trie-map.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/trie-map.d.ts new file mode 100644 index 0000000..b083304 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/trie-map.d.ts @@ -0,0 +1,30 @@ +/** + * Mnemonist TrieMap Typings + * ========================== + */ +export default class TrieMap implements Iterable<[K, V]> { + + // Members + size: number; + + // Constructor + constructor(Token?: new () => K); + + // Methods + clear(): void; + set(prefix: K, value: V): this; + update(prefix: K, updateFunction: (oldValue: V | undefined) => V): this + get(prefix: K): V; + delete(prefix: K): boolean; + has(prefix: K): boolean; + find(prefix: K): Array<[K, V]>; + values(): IterableIterator; + prefixes(): IterableIterator; + keys(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, J]> | {[key: string]: J}): TrieMap; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/trie-map.js b/amplify/functions/deleteDocument/node_modules/mnemonist/trie-map.js new file mode 100644 index 0000000..d601448 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/trie-map.js @@ -0,0 +1,477 @@ +/** + * Mnemonist TrieMap + * ================== + * + * JavaScript TrieMap implementation based upon plain objects. As such this + * structure is more a convenience building upon the trie's advantages than + * a real performant alternative to already existing structures. + * + * Note that the Trie is based upon the TrieMap since the underlying machine + * is the very same. The Trie just does not let you set values and only + * considers the existence of the given prefixes. + */ +var forEach = require('obliterator/foreach'), + Iterator = require('obliterator/iterator'); + +/** + * Constants. + */ +var SENTINEL = String.fromCharCode(0); + +/** + * TrieMap. + * + * @constructor + */ +function TrieMap(Token) { + this.mode = Token === Array ? 'array' : 'string'; + this.clear(); +} + +/** + * Method used to clear the trie. + * + * @return {undefined} + */ +TrieMap.prototype.clear = function() { + + // Properties + this.root = {}; + this.size = 0; +}; + +/** + * Method used to set the value of the given prefix in the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @param {any} value - Value for the prefix. + * @return {TrieMap} + */ +TrieMap.prototype.set = function(prefix, value) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = value; + + return this; +}; + +/** + * Method used to update the value of the given prefix in the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @param {(oldValue: any | undefined) => any} updateFunction - Update value visitor callback. + * @return {TrieMap} + */ +TrieMap.prototype.update = function(prefix, updateFunction) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = updateFunction(node[SENTINEL]); + + return this; +}; + +/** + * Method used to return the value sitting at the end of the given prefix or + * undefined if none exist. + * + * @param {string|array} prefix - Prefix to follow. + * @return {any|undefined} + */ +TrieMap.prototype.get = function(prefix) { + var node = this.root, + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // Prefix does not exist + if (typeof node === 'undefined') + return; + } + + if (!(SENTINEL in node)) + return; + + return node[SENTINEL]; +}; + +/** + * Method used to delete a prefix from the trie. + * + * @param {string|array} prefix - Prefix to delete. + * @return {boolean} + */ +TrieMap.prototype.delete = function(prefix) { + var node = this.root, + toPrune = null, + tokenToPrune = null, + parent, + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + parent = node; + node = node[token]; + + // Prefix does not exist + if (typeof node === 'undefined') + return false; + + // Keeping track of a potential branch to prune + if (toPrune !== null) { + if (Object.keys(node).length > 1) { + toPrune = null; + tokenToPrune = null; + } + } + else { + if (Object.keys(node).length < 2) { + toPrune = parent; + tokenToPrune = token; + } + } + } + + if (!(SENTINEL in node)) + return false; + + this.size--; + + if (toPrune) + delete toPrune[tokenToPrune]; + else + delete node[SENTINEL]; + + return true; +}; + +// TODO: add #.prune? + +/** + * Method used to assert whether the given prefix exists in the TrieMap. + * + * @param {string|array} prefix - Prefix to check. + * @return {boolean} + */ +TrieMap.prototype.has = function(prefix) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return false; + } + + return SENTINEL in node; +}; + +/** + * Method used to retrieve every item in the trie with the given prefix. + * + * @param {string|array} prefix - Prefix to query. + * @return {array} + */ +TrieMap.prototype.find = function(prefix) { + var isString = typeof prefix === 'string'; + + var node = this.root, + matches = [], + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return matches; + } + + // Performing DFS from prefix + var nodeStack = [node], + prefixStack = [prefix], + k; + + while (nodeStack.length) { + prefix = prefixStack.pop(); + node = nodeStack.pop(); + + for (k in node) { + if (k === SENTINEL) { + matches.push([prefix, node[SENTINEL]]); + continue; + } + + nodeStack.push(node[k]); + prefixStack.push(isString ? prefix + k : prefix.concat(k)); + } + } + + return matches; +}; + +/** + * Method returning an iterator over the trie's values. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.values = function(prefix) { + var node = this.root, + nodeStack = [], + token, + i, + l; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + + nodeStack.push(node); + + return new Iterator(function() { + var currentNode, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + } + + if (hasValue) + return {done: false, value: currentNode[SENTINEL]}; + } + + return {done: true}; + }); +}; + +/** + * Method returning an iterator over the trie's prefixes. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.prefixes = function(prefix) { + var node = this.root, + nodeStack = [], + prefixStack = [], + token, + i, + l; + + var isString = this.mode === 'string'; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + else { + prefix = isString ? '' : []; + } + + nodeStack.push(node); + prefixStack.push(prefix); + + return new Iterator(function() { + var currentNode, + currentPrefix, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + currentPrefix = prefixStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + prefixStack.push(isString ? currentPrefix + k : currentPrefix.concat(k)); + } + + if (hasValue) + return {done: false, value: currentPrefix}; + } + + return {done: true}; + }); +}; +TrieMap.prototype.keys = TrieMap.prototype.prefixes; + +/** + * Method returning an iterator over the trie's entries. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.entries = function(prefix) { + var node = this.root, + nodeStack = [], + prefixStack = [], + token, + i, + l; + + var isString = this.mode === 'string'; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + else { + prefix = isString ? '' : []; + } + + nodeStack.push(node); + prefixStack.push(prefix); + + return new Iterator(function() { + var currentNode, + currentPrefix, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + currentPrefix = prefixStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + prefixStack.push(isString ? currentPrefix + k : currentPrefix.concat(k)); + } + + if (hasValue) + return {done: false, value: [currentPrefix, currentNode[SENTINEL]]}; + } + + return {done: true}; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + TrieMap.prototype[Symbol.iterator] = TrieMap.prototype.entries; + +/** + * Convenience known methods. + */ +TrieMap.prototype.inspect = function() { + var proxy = new Array(this.size); + + var iterator = this.entries(), + step, + i = 0; + + while ((step = iterator.next(), !step.done)) + proxy[i++] = step.value; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: TrieMap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + TrieMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = TrieMap.prototype.inspect; + +TrieMap.prototype.toJSON = function() { + return this.root; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a trie. + * + * @param {Iterable} iterable - Target iterable. + * @return {TrieMap} + */ +TrieMap.from = function(iterable) { + var trie = new TrieMap(); + + forEach(iterable, function(value, key) { + trie.set(key, value); + }); + + return trie; +}; + +/** + * Exporting. + */ +TrieMap.SENTINEL = SENTINEL; +module.exports = TrieMap; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/trie.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/trie.d.ts new file mode 100644 index 0000000..4b2a202 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/trie.d.ts @@ -0,0 +1,26 @@ +/** + * Mnemonist Trie Typings + * ======================= + */ +export default class Trie implements Iterable { + + // Members + size: number; + + // Constructor + constructor(Token?: new () => T); + + // Methods + clear(): void; + add(prefix: T): this; + delete(prefix: T): boolean; + has(prefix: T): boolean; + find(prefix: T): Array; + prefixes(): IterableIterator; + keys(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string]: I}): Trie; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/trie.js b/amplify/functions/deleteDocument/node_modules/mnemonist/trie.js new file mode 100644 index 0000000..9562aef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/trie.js @@ -0,0 +1,167 @@ +/** + * Mnemonist Trie + * =============== + * + * JavaScript Trie implementation based upon plain objects. As such this + * structure is more a convenience building upon the trie's advantages than + * a real performant alternative to already existing structures. + * + * Note that the Trie is based upon the TrieMap since the underlying machine + * is the very same. The Trie just does not let you set values and only + * considers the existence of the given prefixes. + */ +var forEach = require('obliterator/foreach'), + TrieMap = require('./trie-map.js'); + +/** + * Constants. + */ +var SENTINEL = String.fromCharCode(0); + +/** + * Trie. + * + * @constructor + */ +function Trie(Token) { + this.mode = Token === Array ? 'array' : 'string'; + this.clear(); +} + +// Re-using TrieMap's prototype +for (var methodName in TrieMap.prototype) + Trie.prototype[methodName] = TrieMap.prototype[methodName]; + +// Dropping irrelevant methods +delete Trie.prototype.set; +delete Trie.prototype.get; +delete Trie.prototype.values; +delete Trie.prototype.entries; + +/** + * Method used to add the given prefix to the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @return {TrieMap} + */ +Trie.prototype.add = function(prefix) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = true; + + return this; +}; + +/** + * Method used to retrieve every item in the trie with the given prefix. + * + * @param {string|array} prefix - Prefix to query. + * @return {array} + */ +Trie.prototype.find = function(prefix) { + var isString = typeof prefix === 'string'; + + var node = this.root, + matches = [], + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return matches; + } + + // Performing DFS from prefix + var nodeStack = [node], + prefixStack = [prefix], + k; + + while (nodeStack.length) { + prefix = prefixStack.pop(); + node = nodeStack.pop(); + + for (k in node) { + if (k === SENTINEL) { + matches.push(prefix); + continue; + } + + nodeStack.push(node[k]); + prefixStack.push(isString ? prefix + k : prefix.concat(k)); + } + } + + return matches; +}; + +/** + * Attaching the #.keys method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Trie.prototype[Symbol.iterator] = Trie.prototype.keys; + +/** + * Convenience known methods. + */ +Trie.prototype.inspect = function() { + var proxy = new Set(); + + var iterator = this.keys(), + step; + + while ((step = iterator.next(), !step.done)) + proxy.add(step.value); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Trie, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Trie.prototype[Symbol.for('nodejs.util.inspect.custom')] = Trie.prototype.inspect; + +Trie.prototype.toJSON = function() { + return this.root; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a trie. + * + * @param {Iterable} iterable - Target iterable. + * @return {Trie} + */ +Trie.from = function(iterable) { + var trie = new Trie(); + + forEach(iterable, function(value) { + trie.add(value); + }); + + return trie; +}; + +/** + * Exporting. + */ +Trie.SENTINEL = SENTINEL; +module.exports = Trie; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/binary-search.js b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/binary-search.js new file mode 100644 index 0000000..0666c82 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/binary-search.js @@ -0,0 +1,216 @@ +/** + * Mnemonist Binary Search Helpers + * ================================ + * + * Typical binary search functions. + */ + +/** + * Function returning the index of the search value in the array or `-1` if + * not found. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.search = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + hi--; + + var current; + + while (lo <= hi) { + mid = (lo + hi) >>> 1; + + current = array[mid]; + + if (current > value) { + hi = ~-mid; + } + else if (current < value) { + lo = -~mid; + } + else { + return mid; + } + } + + return -1; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.searchWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = ~-array.length, + comparison; + + while (lo <= hi) { + mid = (lo + hi) >>> 1; + + comparison = comparator(array[mid], value); + + if (comparison > 0) { + hi = ~-mid; + } + else if (comparison < 0) { + lo = -~mid; + } + else { + return mid; + } + } + + return -1; +}; + +/** + * Function returning the lower bound of the given value in the array. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @param {number} [lo] - Start index. + * @param {numner} [hi] - End index. + * @return {number} + */ +exports.lowerBound = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value <= array[mid]) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.lowerBoundWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (comparator(value, array[mid]) <= 0) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Same as above, but can work on sorted indices. + * + * @param {array} array - Haystack. + * @param {array} array - Indices. + * @param {any} value - Needle. + * @return {number} + */ +exports.lowerBoundIndices = function(array, indices, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value <= array[indices[mid]]) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Function returning the upper bound of the given value in the array. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @param {number} [lo] - Start index. + * @param {numner} [hi] - End index. + * @return {number} + */ +exports.upperBound = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value >= array[mid]) { + lo = -~mid; + } + else { + hi = mid; + } + } + + return lo; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.upperBoundWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (comparator(value, array[mid]) >= 0) { + lo = -~mid; + } + else { + hi = mid; + } + } + + return lo; +}; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/bitwise.js b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/bitwise.js new file mode 100644 index 0000000..191dfc2 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/bitwise.js @@ -0,0 +1,109 @@ +/** + * Mnemonist Bitwise Helpers + * ========================== + * + * Miscellaneous helpers helping with bitwise operations. + */ + +/** + * Takes a 32 bits integer and returns its MSB using SWAR strategy. + * + * @param {number} x - Target number. + * @return {number} + */ +function msb32(x) { + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + + return (x & ~(x >> 1)); +} +exports.msb32 = msb32; + +/** + * Takes a byte and returns its MSB using SWAR strategy. + * + * @param {number} x - Target number. + * @return {number} + */ +function msb8(x) { + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + + return (x & ~(x >> 1)); +} +exports.msb8 = msb8; + +/** + * Takes a number and return bit at position. + * + * @param {number} x - Target number. + * @param {number} pos - Position. + * @return {number} + */ +exports.test = function(x, pos) { + return (x >> pos) & 1; +}; + +/** + * Compare two bytes and return their critical bit. + * + * @param {number} a - First byte. + * @param {number} b - Second byte. + * @return {number} + */ +exports.criticalBit8 = function(a, b) { + return msb8(a ^ b); +}; + +exports.criticalBit8Mask = function(a, b) { + return (~msb8(a ^ b) >>> 0) & 0xff; +}; + +exports.testCriticalBit8 = function(x, mask) { + return (1 + (x | mask)) >> 8; +}; + +exports.criticalBit32Mask = function(a, b) { + return (~msb32(a ^ b) >>> 0) & 0xffffffff; +}; + +/** + * Takes a 32 bits integer and returns its population count (number of 1 of + * the binary representation). + * + * @param {number} x - Target number. + * @return {number} + */ +exports.popcount = function(x) { + x -= x >> 1 & 0x55555555; + x = (x & 0x33333333) + (x >> 2 & 0x33333333); + x = x + (x >> 4) & 0x0f0f0f0f; + x += x >> 8; + x += x >> 16; + return x & 0x7f; +}; + +/** + * Slightly faster popcount function based on a precomputed table of 8bits + * words. + * + * @param {number} x - Target number. + * @return {number} + */ +var TABLE8 = new Uint8Array(Math.pow(2, 8)); + +for (var i = 0, l = TABLE8.length; i < l; i++) + TABLE8[i] = exports.popcount(i); + +exports.table8Popcount = function(x) { + return ( + TABLE8[x & 0xff] + + TABLE8[(x >> 8) & 0xff] + + TABLE8[(x >> 16) & 0xff] + + TABLE8[(x >> 24) & 0xff] + ); +}; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/comparators.js b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/comparators.js new file mode 100644 index 0000000..498b4a6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/comparators.js @@ -0,0 +1,79 @@ +/** + * Mnemonist Heap Comparators + * =========================== + * + * Default comparators & functions dealing with comparators reversing etc. + */ +var DEFAULT_COMPARATOR = function(a, b) { + if (a < b) + return -1; + if (a > b) + return 1; + + return 0; +}; + +var DEFAULT_REVERSE_COMPARATOR = function(a, b) { + if (a < b) + return 1; + if (a > b) + return -1; + + return 0; +}; + +/** + * Function used to reverse a comparator. + */ +function reverseComparator(comparator) { + return function(a, b) { + return comparator(b, a); + }; +} + +/** + * Function returning a tuple comparator. + */ +function createTupleComparator(size) { + if (size === 2) { + return function(a, b) { + if (a[0] < b[0]) + return -1; + + if (a[0] > b[0]) + return 1; + + if (a[1] < b[1]) + return -1; + + if (a[1] > b[1]) + return 1; + + return 0; + }; + } + + return function(a, b) { + var i = 0; + + while (i < size) { + if (a[i] < b[i]) + return -1; + + if (a[i] > b[i]) + return 1; + + i++; + } + + return 0; + }; +} + +/** + * Exporting. + */ +exports.DEFAULT_COMPARATOR = DEFAULT_COMPARATOR; +exports.DEFAULT_REVERSE_COMPARATOR = DEFAULT_REVERSE_COMPARATOR; +exports.reverseComparator = reverseComparator; +exports.createTupleComparator = createTupleComparator; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/hash-tables.js b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/hash-tables.js new file mode 100644 index 0000000..dfed95e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/hash-tables.js @@ -0,0 +1,107 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Hashtable Helpers + * ============================ + * + * Miscellaneous helpers helper function dealing with hashtables. + */ +function jenkinsInt32(a) { + + a = (a + 0x7ed55d16) + (a << 12); + a = (a ^ 0xc761c23c) ^ (a >> 19); + a = (a + 0x165667b1) + (a << 5); + a = (a + 0xd3a2646c) ^ (a << 9); + a = (a + 0xfd7046c5) + (a << 3); + a = (a ^ 0xb55a4f09) ^ (a >> 16); + + return a; +} + +function linearProbingGet(hash, keys, values, key) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === key) + return values[i]; + + else if (c === 0) + return; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + return; + } +} + +function linearProbingHas(hash, keys, key) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === key) + return true; + + else if (c === 0) + return false; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + return false; + } +} + +function linearProbingSet(hash, keys, values, key, value) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === 0 || c === key) + break; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + throw new Error('mnemonist/utils/hash-tables.linearProbingSet: table is full.'); + } + + keys[i] = key; + values[i] = value; +} + +module.exports = { + hashes: { + jenkinsInt32: jenkinsInt32 + }, + linearProbing: { + get: linearProbingGet, + has: linearProbingHas, + set: linearProbingSet + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/iterables.js b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/iterables.js new file mode 100644 index 0000000..d95f701 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/iterables.js @@ -0,0 +1,93 @@ +/** + * Mnemonist Iterable Function + * ============================ + * + * Harmonized iteration helpers over mixed iterable targets. + */ +var forEach = require('obliterator/foreach'); + +var typed = require('./typed-arrays.js'); + +/** + * Function used to determine whether the given object supports array-like + * random access. + * + * @param {any} target - Target object. + * @return {boolean} + */ +function isArrayLike(target) { + return Array.isArray(target) || typed.isTypedArray(target); +} + +/** + * Function used to guess the length of the structure over which we are going + * to iterate. + * + * @param {any} target - Target object. + * @return {number|undefined} + */ +function guessLength(target) { + if (typeof target.length === 'number') + return target.length; + + if (typeof target.size === 'number') + return target.size; + + return; +} + +/** + * Function used to convert an iterable to an array. + * + * @param {any} target - Iteration target. + * @return {array} + */ +function toArray(target) { + var l = guessLength(target); + + var array = typeof l === 'number' ? new Array(l) : []; + + var i = 0; + + // TODO: we could optimize when given target is array like + forEach(target, function(value) { + array[i++] = value; + }); + + return array; +} + +/** + * Same as above but returns a supplementary indices array. + * + * @param {any} target - Iteration target. + * @return {array} + */ +function toArrayWithIndices(target) { + var l = guessLength(target); + + var IndexArray = typeof l === 'number' ? + typed.getPointerArray(l) : + Array; + + var array = typeof l === 'number' ? new Array(l) : []; + var indices = typeof l === 'number' ? new IndexArray(l) : []; + + var i = 0; + + // TODO: we could optimize when given target is array like + forEach(target, function(value) { + array[i] = value; + indices[i] = i++; + }); + + return [array, indices]; +} + +/** + * Exporting. + */ +exports.isArrayLike = isArrayLike; +exports.guessLength = guessLength; +exports.toArray = toArray; +exports.toArrayWithIndices = toArrayWithIndices; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/merge.js b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/merge.js new file mode 100644 index 0000000..bf40d45 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/merge.js @@ -0,0 +1,563 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Merge Helpers + * ======================== + * + * Various merge algorithms used to handle sorted lists. Note that the given + * functions are optimized and won't accept mixed arguments. + * + * Note: maybe this piece of code belong to sortilege, along with binary-search. + */ +var typed = require('./typed-arrays.js'), + isArrayLike = require('./iterables.js').isArrayLike, + binarySearch = require('./binary-search.js'), + FibonacciHeap = require('../fibonacci-heap.js'); + +// TODO: update to use exponential search +// TODO: when not knowing final length => should use plain arrays rather than +// same type as input + +/** + * Merge two sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +function mergeArrays(a, b) { + + // One of the arrays is empty + if (a.length === 0) + return b.slice(); + if (b.length === 0) + return a.slice(); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, we can just concatenate them + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd <= bStart) { + if (typed.isTypedArray(a)) + return typed.concat(a, b); + return a.concat(b); + } + + // Initializing target + var array = new a.constructor(a.length + b.length); + + // Iterating until we overlap + var i, l, v; + + for (i = 0, l = a.length; i < l; i++) { + v = a[i]; + + if (v <= bStart) + array[i] = v; + else + break; + } + + // Handling overlap + var aPointer = i, + aLength = a.length, + bPointer = 0, + bLength = b.length, + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead <= bHead) { + array[i++] = aHead; + aPointer++; + } + else { + array[i++] = bHead; + bPointer++; + } + } + + // Filling + while (aPointer < aLength) + array[i++] = a[aPointer++]; + while (bPointer < bLength) + array[i++] = b[bPointer++]; + + return array; +} + +/** + * Perform the union of two already unique sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +function unionUniqueArrays(a, b) { + + // One of the arrays is empty + if (a.length === 0) + return b.slice(); + if (b.length === 0) + return a.slice(); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, we can just concatenate them + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd < bStart) { + if (typed.isTypedArray(a)) + return typed.concat(a, b); + return a.concat(b); + } + + // Initializing target + var array = new a.constructor(); + + // Iterating until we overlap + var i, l, v; + + for (i = 0, l = a.length; i < l; i++) { + v = a[i]; + + if (v < bStart) + array.push(v); + else + break; + } + + // Handling overlap + var aPointer = i, + aLength = a.length, + bPointer = 0, + bLength = b.length, + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead <= bHead) { + + if (array.length === 0 || array[array.length - 1] !== aHead) + array.push(aHead); + + aPointer++; + } + else { + if (array.length === 0 || array[array.length - 1] !== bHead) + array.push(bHead); + + bPointer++; + } + } + + // Filling + // TODO: it's possible to optimize a bit here, since the condition is only + // relevant the first time + while (aPointer < aLength) { + aHead = a[aPointer++]; + + if (array.length === 0 || array[array.length - 1] !== aHead) + array.push(aHead); + } + while (bPointer < bLength) { + bHead = b[bPointer++]; + + if (array.length === 0 || array[array.length - 1] !== bHead) + array.push(bHead); + } + + return array; +} + +/** + * Perform the intersection of two already unique sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +exports.intersectionUniqueArrays = function(a, b) { + + // One of the arrays is empty + if (a.length === 0 || b.length === 0) + return new a.constructor(0); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, there is no intersection + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd < bStart) + return new a.constructor(0); + + // Initializing target + var array = new a.constructor(); + + // Handling overlap + var aPointer = binarySearch.lowerBound(a, bStart), + aLength = a.length, + bPointer = 0, + bLength = binarySearch.upperBound(b, aEnd), + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead < bHead) { + aPointer = binarySearch.lowerBound(a, bHead, aPointer + 1); + } + else if (aHead > bHead) { + bPointer = binarySearch.lowerBound(b, aHead, bPointer + 1); + } + else { + array.push(aHead); + aPointer++; + bPointer++; + } + } + + return array; +}; + +/** + * Merge k sorted array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +function kWayMergeArrays(arrays) { + var length = 0, + max = -Infinity, + al, + i, + l; + + var filtered = []; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + if (al === 0) + continue; + + filtered.push(arrays[i]); + + length += al; + + if (al > max) + max = al; + } + + if (filtered.length === 0) + return new arrays[0].constructor(0); + + if (filtered.length === 1) + return filtered[0].slice(); + + if (filtered.length === 2) + return mergeArrays(filtered[0], filtered[1]); + + arrays = filtered; + + var array = new arrays[0].constructor(length); + + var PointerArray = typed.getPointerArray(max); + + var pointers = new PointerArray(arrays.length); + + // TODO: benchmark vs. a binomial heap + var heap = new FibonacciHeap(function(a, b) { + a = arrays[a][pointers[a]]; + b = arrays[b][pointers[b]]; + + if (a < b) + return -1; + + if (a > b) + return 1; + + return 0; + }); + + for (i = 0; i < l; i++) + heap.push(i); + + i = 0; + + var p, + v; + + while (heap.size) { + p = heap.pop(); + v = arrays[p][pointers[p]++]; + array[i++] = v; + + if (pointers[p] < arrays[p].length) + heap.push(p); + } + + return array; +} + +/** + * Perform the union of k sorted unique array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +function kWayUnionUniqueArrays(arrays) { + var max = -Infinity, + al, + i, + l; + + var filtered = []; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + if (al === 0) + continue; + + filtered.push(arrays[i]); + + if (al > max) + max = al; + } + + if (filtered.length === 0) + return new arrays[0].constructor(0); + + if (filtered.length === 1) + return filtered[0].slice(); + + if (filtered.length === 2) + return unionUniqueArrays(filtered[0], filtered[1]); + + arrays = filtered; + + var array = new arrays[0].constructor(); + + var PointerArray = typed.getPointerArray(max); + + var pointers = new PointerArray(arrays.length); + + // TODO: benchmark vs. a binomial heap + var heap = new FibonacciHeap(function(a, b) { + a = arrays[a][pointers[a]]; + b = arrays[b][pointers[b]]; + + if (a < b) + return -1; + + if (a > b) + return 1; + + return 0; + }); + + for (i = 0; i < l; i++) + heap.push(i); + + var p, + v; + + while (heap.size) { + p = heap.pop(); + v = arrays[p][pointers[p]++]; + + if (array.length === 0 || array[array.length - 1] !== v) + array.push(v); + + if (pointers[p] < arrays[p].length) + heap.push(p); + } + + return array; +} + +/** + * Perform the intersection of k sorted array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +exports.kWayIntersectionUniqueArrays = function(arrays) { + var max = -Infinity, + maxStart = -Infinity, + minEnd = Infinity, + first, + last, + al, + i, + l; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + // If one of the arrays is empty, so is the intersection + if (al === 0) + return []; + + if (al > max) + max = al; + + first = arrays[i][0]; + last = arrays[i][al - 1]; + + if (first > maxStart) + maxStart = first; + + if (last < minEnd) + minEnd = last; + } + + // Full overlap is impossible + if (maxStart > minEnd) + return []; + + // Only one value + if (maxStart === minEnd) + return [maxStart]; + + // NOTE: trying to outsmart I(D,I(C,I(A,B))) is pointless unfortunately... + // NOTE: I tried to be very clever about bounds but it does not seem + // to improve the performance of the algorithm. + var a, b, + array = arrays[0], + aPointer, + bPointer, + aLimit, + bLimit, + aHead, + bHead, + start = maxStart; + + for (i = 1; i < l; i++) { + a = array; + b = arrays[i]; + + // Change that to `[]` and observe some perf drops on V8... + array = new Array(); + + aPointer = 0; + bPointer = binarySearch.lowerBound(b, start); + + aLimit = a.length; + bLimit = b.length; + + while (aPointer < aLimit && bPointer < bLimit) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead < bHead) { + aPointer = binarySearch.lowerBound(a, bHead, aPointer + 1); + } + else if (aHead > bHead) { + bPointer = binarySearch.lowerBound(b, aHead, bPointer + 1); + } + else { + array.push(aHead); + aPointer++; + bPointer++; + } + } + + if (array.length === 0) + return array; + + start = array[0]; + } + + return array; +}; + +/** + * Variadic merging all of the given arrays. + * + * @param {...array} + * @return {array} + */ +exports.merge = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return mergeArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return kWayMergeArrays(arguments); + } + + return null; +}; + +/** + * Variadic function performing the union of all the given unique arrays. + * + * @param {...array} + * @return {array} + */ +exports.unionUnique = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return unionUniqueArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return kWayUnionUniqueArrays(arguments); + } + + return null; +}; + +/** + * Variadic function performing the intersection of all the given unique arrays. + * + * @param {...array} + * @return {array} + */ +exports.intersectionUnique = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return exports.intersectionUniqueArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return exports.kWayIntersectionUniqueArrays(arguments); + } + + return null; +}; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/murmurhash3.js b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/murmurhash3.js new file mode 100644 index 0000000..c09ec8a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/murmurhash3.js @@ -0,0 +1,87 @@ +/* eslint no-fallthrough: 0 */ +/** + * Mnemonist MurmurHash 3 + * ======================= + * + * Straightforward implementation of the third version of MurmurHash. + * + * Note: this piece of code belong to haschisch. + */ + +/** + * Various helpers. + */ +function mul32(a, b) { + return (a & 0xffff) * b + (((a >>> 16) * b & 0xffff) << 16) & 0xffffffff; +} + +function sum32(a, b) { + return (a & 0xffff) + (b >>> 16) + (((a >>> 16) + b & 0xffff) << 16) & 0xffffffff; +} + +function rotl32(a, b) { + return (a << b) | (a >>> (32 - b)); +} + +/** + * MumurHash3 function. + * + * @param {number} seed - Seed. + * @param {ByteArray} data - Data. + */ +module.exports = function murmurhash3(seed, data) { + var c1 = 0xcc9e2d51, + c2 = 0x1b873593, + r1 = 15, + r2 = 13, + m = 5, + n = 0x6b64e654; + + var hash = seed, + k1, + i, + l; + + for (i = 0, l = data.length - 4; i <= l; i += 4) { + k1 = ( + data[i] | + (data[i + 1] << 8) | + (data[i + 2] << 16) | + (data[i + 3] << 24) + ); + + k1 = mul32(k1, c1); + k1 = rotl32(k1, r1); + k1 = mul32(k1, c2); + + hash ^= k1; + hash = rotl32(hash, r2); + hash = mul32(hash, m); + hash = sum32(hash, n); + } + + k1 = 0; + + switch (data.length & 3) { + case 3: + k1 ^= data[i + 2] << 16; + case 2: + k1 ^= data[i + 1] << 8; + case 1: + k1 ^= data[i]; + k1 = mul32(k1, c1); + k1 = rotl32(k1, r1); + k1 = mul32(k1, c2); + hash ^= k1; + default: + } + + hash ^= data.length; + hash ^= hash >>> 16; + hash = mul32(hash, 0x85ebca6b); + hash ^= hash >>> 13; + hash = mul32(hash, 0xc2b2ae35); + hash ^= hash >>> 16; + + return hash >>> 0; +}; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/typed-arrays.js b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/typed-arrays.js new file mode 100644 index 0000000..474a2cb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/typed-arrays.js @@ -0,0 +1,187 @@ +/** + * Mnemonist Typed Array Helpers + * ============================== + * + * Miscellaneous helpers related to typed arrays. + */ + +/** + * When using an unsigned integer array to store pointers, one might want to + * choose the optimal word size in regards to the actual numbers of pointers + * to store. + * + * This helpers does just that. + * + * @param {number} size - Expected size of the array to map. + * @return {TypedArray} + */ +var MAX_8BIT_INTEGER = Math.pow(2, 8) - 1, + MAX_16BIT_INTEGER = Math.pow(2, 16) - 1, + MAX_32BIT_INTEGER = Math.pow(2, 32) - 1; + +var MAX_SIGNED_8BIT_INTEGER = Math.pow(2, 7) - 1, + MAX_SIGNED_16BIT_INTEGER = Math.pow(2, 15) - 1, + MAX_SIGNED_32BIT_INTEGER = Math.pow(2, 31) - 1; + +exports.getPointerArray = function(size) { + var maxIndex = size - 1; + + if (maxIndex <= MAX_8BIT_INTEGER) + return Uint8Array; + + if (maxIndex <= MAX_16BIT_INTEGER) + return Uint16Array; + + if (maxIndex <= MAX_32BIT_INTEGER) + return Uint32Array; + + return Float64Array; +}; + +exports.getSignedPointerArray = function(size) { + var maxIndex = size - 1; + + if (maxIndex <= MAX_SIGNED_8BIT_INTEGER) + return Int8Array; + + if (maxIndex <= MAX_SIGNED_16BIT_INTEGER) + return Int16Array; + + if (maxIndex <= MAX_SIGNED_32BIT_INTEGER) + return Int32Array; + + return Float64Array; +}; + +/** + * Function returning the minimal type able to represent the given number. + * + * @param {number} value - Value to test. + * @return {TypedArrayClass} + */ +exports.getNumberType = function(value) { + + // <= 32 bits itnteger? + if (value === (value | 0)) { + + // Negative + if (Math.sign(value) === -1) { + if (value <= 127 && value >= -128) + return Int8Array; + + if (value <= 32767 && value >= -32768) + return Int16Array; + + return Int32Array; + } + else { + + if (value <= 255) + return Uint8Array; + + if (value <= 65535) + return Uint16Array; + + return Uint32Array; + } + } + + // 53 bits integer & floats + // NOTE: it's kinda hard to tell whether we could use 32bits or not... + return Float64Array; +}; + +/** + * Function returning the minimal type able to represent the given array + * of JavaScript numbers. + * + * @param {array} array - Array to represent. + * @param {function} getter - Optional getter. + * @return {TypedArrayClass} + */ +var TYPE_PRIORITY = { + Uint8Array: 1, + Int8Array: 2, + Uint16Array: 3, + Int16Array: 4, + Uint32Array: 5, + Int32Array: 6, + Float32Array: 7, + Float64Array: 8 +}; + +// TODO: make this a one-shot for one value +exports.getMinimalRepresentation = function(array, getter) { + var maxType = null, + maxPriority = 0, + p, + t, + v, + i, + l; + + for (i = 0, l = array.length; i < l; i++) { + v = getter ? getter(array[i]) : array[i]; + t = exports.getNumberType(v); + p = TYPE_PRIORITY[t.name]; + + if (p > maxPriority) { + maxPriority = p; + maxType = t; + } + } + + return maxType; +}; + +/** + * Function returning whether the given value is a typed array. + * + * @param {any} value - Value to test. + * @return {boolean} + */ +exports.isTypedArray = function(value) { + return typeof ArrayBuffer !== 'undefined' && ArrayBuffer.isView(value); +}; + +/** + * Function used to concat byte arrays. + * + * @param {...ByteArray} + * @return {ByteArray} + */ +exports.concat = function() { + var length = 0, + i, + o, + l; + + for (i = 0, l = arguments.length; i < l; i++) + length += arguments[i].length; + + var array = new (arguments[0].constructor)(length); + + for (i = 0, o = 0; i < l; i++) { + array.set(arguments[i], o); + o += arguments[i].length; + } + + return array; +}; + +/** + * Function used to initialize a byte array of indices. + * + * @param {number} length - Length of target. + * @return {ByteArray} + */ +exports.indices = function(length) { + var PointerArray = exports.getPointerArray(length); + + var array = new PointerArray(length); + + for (var i = 0; i < length; i++) + array[i] = i; + + return array; +}; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/utils/types.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/types.d.ts new file mode 100644 index 0000000..1a199d6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/utils/types.d.ts @@ -0,0 +1,16 @@ +/** + * Mnemonist Generic Types + * ======================== + * + * Collection of types used throughout the library. + */ +export interface IArrayLike { + length: number; + slice(from: number, to?: number): IArrayLike; +} + +export type ArrayLike = IArrayLike | ArrayBuffer; + +export interface IArrayLikeConstructor { + new(...args: any[]): ArrayLike; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/vector.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/vector.d.ts new file mode 100644 index 0000000..414f969 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/vector.d.ts @@ -0,0 +1,81 @@ +/** + * Mnemonist Vector Typings + * ========================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +type VectorOptions = { + initialLength?: number; + initialCapacity?: number; + policy?: (capacity: number) => number; +} + +export default class Vector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, length: number | VectorOptions); + + // Methods + clear(): void; + set(index: number, value: number): this; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: number): number; + pop(): number | undefined; + get(index: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): Vector; +} + +declare class TypedVector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(length: number | VectorOptions); + + // Methods + clear(): void; + set(index: number, value: number): this; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: number): number; + pop(): number | undefined; + get(index: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, capacity?: number): TypedVector; +} + +export class Int8Vector extends TypedVector {} +export class Uint8Vector extends TypedVector {} +export class Uint8ClampedVector extends TypedVector {} +export class Int16Vector extends TypedVector {} +export class Uint16Vector extends TypedVector {} +export class Int32Vector extends TypedVector {} +export class Uint32Vector extends TypedVector {} +export class Float32Vector extends TypedVector {} +export class Float64Array extends TypedVector {} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/vector.js b/amplify/functions/deleteDocument/node_modules/mnemonist/vector.js new file mode 100644 index 0000000..467bf20 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/vector.js @@ -0,0 +1,373 @@ +/** + * Mnemonist Vector + * ================= + * + * Abstract implementation of a growing array that can be used with JavaScript + * typed arrays and other array-like structures. + * + * Note: should try and use ArrayBuffer.transfer when it will be available. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'); + +/** + * Defaults. + */ +var DEFAULT_GROWING_POLICY = function(currentCapacity) { + return Math.max(1, Math.ceil(currentCapacity * 1.5)); +}; + +var pointerArrayFactory = function(capacity) { + var PointerArray = typed.getPointerArray(capacity); + + return new PointerArray(capacity); +}; + +/** + * Vector. + * + * @constructor + * @param {function} ArrayClass - An array constructor. + * @param {number|object} initialCapacityOrOptions - Self-explanatory: + * @param {number} initialCapacity - Initial capacity. + * @param {number} initialLength - Initial length. + * @param {function} policy - Allocation policy. + */ +function Vector(ArrayClass, initialCapacityOrOptions) { + if (arguments.length < 1) + throw new Error('mnemonist/vector: expecting at least a byte array constructor.'); + + var initialCapacity = initialCapacityOrOptions || 0, + policy = DEFAULT_GROWING_POLICY, + initialLength = 0, + factory = false; + + if (typeof initialCapacityOrOptions === 'object') { + initialCapacity = initialCapacityOrOptions.initialCapacity || 0; + initialLength = initialCapacityOrOptions.initialLength || 0; + policy = initialCapacityOrOptions.policy || policy; + factory = initialCapacityOrOptions.factory === true; + } + + this.factory = factory ? ArrayClass : null; + this.ArrayClass = ArrayClass; + this.length = initialLength; + this.capacity = Math.max(initialLength, initialCapacity); + this.policy = policy; + this.array = new ArrayClass(this.capacity); +} + +/** + * Method used to set a value. + * + * @param {number} index - Index to edit. + * @param {any} value - Value. + * @return {Vector} + */ +Vector.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('Vector(' + this.ArrayClass.name + ').set: index out of bounds.'); + + // Updating value + this.array[index] = value; + + return this; +}; + +/** + * Method used to get a value. + * + * @param {number} index - Index to retrieve. + * @return {any} + */ +Vector.prototype.get = function(index) { + if (this.length < index) + return undefined; + + return this.array[index]; +}; + +/** + * Method used to apply the growing policy. + * + * @param {number} [override] - Override capacity. + * @return {number} + */ +Vector.prototype.applyPolicy = function(override) { + var newCapacity = this.policy(override || this.capacity); + + if (typeof newCapacity !== 'number' || newCapacity < 0) + throw new Error('mnemonist/vector.applyPolicy: policy returned an invalid value (expecting a positive integer).'); + + if (newCapacity <= this.capacity) + throw new Error('mnemonist/vector.applyPolicy: policy returned a less or equal capacity to allocate.'); + + // TODO: we should probably check that the returned number is an integer + return newCapacity; +}; + +/** + * Method used to reallocate the underlying array. + * + * @param {number} capacity - Target capacity. + * @return {Vector} + */ +Vector.prototype.reallocate = function(capacity) { + if (capacity === this.capacity) + return this; + + var oldArray = this.array; + + if (capacity < this.length) + this.length = capacity; + + if (capacity > this.capacity) { + if (this.factory === null) + this.array = new this.ArrayClass(capacity); + else + this.array = this.factory(capacity); + + if (typed.isTypedArray(this.array)) { + this.array.set(oldArray, 0); + } + else { + for (var i = 0, l = this.length; i < l; i++) + this.array[i] = oldArray[i]; + } + } + else { + this.array = oldArray.slice(0, capacity); + } + + this.capacity = capacity; + + return this; +}; + +/** + * Method used to grow the array. + * + * @param {number} [capacity] - Optional capacity to match. + * @return {Vector} + */ +Vector.prototype.grow = function(capacity) { + var newCapacity; + + if (typeof capacity === 'number') { + + if (this.capacity >= capacity) + return this; + + // We need to match the given capacity + newCapacity = this.capacity; + + while (newCapacity < capacity) + newCapacity = this.applyPolicy(newCapacity); + + this.reallocate(newCapacity); + + return this; + } + + // We need to run the policy once + newCapacity = this.applyPolicy(); + this.reallocate(newCapacity); + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {Vector} + */ +Vector.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.reallocate(length); + + return this; +}; + +/** + * Method used to push a value into the array. + * + * @param {any} value - Value to push. + * @return {number} - Length of the array. + */ +Vector.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + this.array[this.length++] = value; + + return this.length; +}; + +/** + * Method used to pop the last value of the array. + * + * @return {number} - The popped value. + */ +Vector.prototype.pop = function() { + if (this.length === 0) + return; + + return this.array[--this.length]; +}; + +/** + * Method used to create an iterator over a vector's values. + * + * @return {Iterator} + */ +Vector.prototype.values = function() { + var items = this.array, + l = this.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a vector's entries. + * + * @return {Iterator} + */ +Vector.prototype.entries = function() { + var items = this.array, + l = this.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[i]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Vector.prototype[Symbol.iterator] = Vector.prototype.values; + +/** + * Convenience known methods. + */ +Vector.prototype.inspect = function() { + var proxy = this.array.slice(0, this.length); + + proxy.type = this.array.constructor.name; + proxy.items = this.length; + proxy.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Vector, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Vector.prototype[Symbol.for('nodejs.util.inspect.custom')] = Vector.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a vector. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Byte array class. + * @param {number} capacity - Desired capacity. + * @return {Vector} + */ +Vector.from = function(iterable, ArrayClass, capacity) { + + if (arguments.length < 3) { + + // Attempting to guess the needed capacity + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/vector.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var vector = new Vector(ArrayClass, capacity); + + forEach(iterable, function(value) { + vector.push(value); + }); + + return vector; +}; + +/** + * Exporting. + */ +function subClass(ArrayClass) { + var SubClass = function(initialCapacityOrOptions) { + Vector.call(this, ArrayClass, initialCapacityOrOptions); + }; + + for (var k in Vector.prototype) { + if (Vector.prototype.hasOwnProperty(k)) + SubClass.prototype[k] = Vector.prototype[k]; + } + + SubClass.from = function(iterable, capacity) { + return Vector.from(iterable, ArrayClass, capacity); + }; + + if (typeof Symbol !== 'undefined') + SubClass.prototype[Symbol.iterator] = SubClass.prototype.values; + + return SubClass; +} + +Vector.Int8Vector = subClass(Int8Array); +Vector.Uint8Vector = subClass(Uint8Array); +Vector.Uint8ClampedVector = subClass(Uint8ClampedArray); +Vector.Int16Vector = subClass(Int16Array); +Vector.Uint16Vector = subClass(Uint16Array); +Vector.Int32Vector = subClass(Int32Array); +Vector.Uint32Vector = subClass(Uint32Array); +Vector.Float32Vector = subClass(Float32Array); +Vector.Float64Vector = subClass(Float64Array); +Vector.PointerVector = subClass(pointerArrayFactory); + +module.exports = Vector; diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/vp-tree.d.ts b/amplify/functions/deleteDocument/node_modules/mnemonist/vp-tree.d.ts new file mode 100644 index 0000000..2c03354 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/vp-tree.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist VPTree Typings + * ========================= + */ +type DistanceFunction = (a: T, b: T) => number; +type QueryMatch = {distance: number, item: T}; + +export default class VPTree { + + // Members + distance: DistanceFunction; + size: number; + D: number; + + // Constructor + constructor(distance: DistanceFunction, items: Iterable); + + // Methods + nearestNeighbors(k: number, query: T): Array>; + neighbors(radius: number, query: T): Array>; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + distance: DistanceFunction + ): VPTree; +} diff --git a/amplify/functions/deleteDocument/node_modules/mnemonist/vp-tree.js b/amplify/functions/deleteDocument/node_modules/mnemonist/vp-tree.js new file mode 100644 index 0000000..2acd01e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/mnemonist/vp-tree.js @@ -0,0 +1,367 @@ +/** + * Mnemonist Vantage Point Tree + * ============================= + * + * JavaScript implementation of the Vantage Point Tree storing the binary + * tree as a flat byte array. + * + * Note that a VPTree has worst cases and is likely not to be perfectly + * balanced because of median ambiguity. It is therefore not suitable + * for hairballs and tiny datasets. + * + * [Reference]: + * https://en.wikipedia.org/wiki/Vantage-point_tree + */ +var iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'), + inplaceQuickSortIndices = require('./sort/quick.js').inplaceQuickSortIndices, + lowerBoundIndices = require('./utils/binary-search.js').lowerBoundIndices, + Heap = require('./heap.js'); + +var getPointerArray = typed.getPointerArray; + +// TODO: implement vantage point selection techniques (by swapping with last) +// TODO: is this required to implement early termination for k <= size? + +/** + * Heap comparator used by the #.nearestNeighbors method. + */ +function comparator(a, b) { + if (a.distance < b.distance) + return 1; + + if (a.distance > b.distance) + return -1; + + return 0; +} + +/** + * Function used to create the binary tree. + * + * @param {function} distance - Distance function to use. + * @param {array} items - Items to index (will be mutated). + * @param {array} indices - Indexes of the items. + * @return {Float64Array} - The flat binary tree. + */ +function createBinaryTree(distance, items, indices) { + var N = indices.length; + + var PointerArray = getPointerArray(N); + + var C = 0, + nodes = new PointerArray(N), + lefts = new PointerArray(N), + rights = new PointerArray(N), + mus = new Float64Array(N), + stack = [0, 0, N], + distances = new Float64Array(N), + nodeIndex, + vantagePoint, + medianIndex, + lo, + hi, + mid, + mu, + i, + l; + + while (stack.length) { + hi = stack.pop(); + lo = stack.pop(); + nodeIndex = stack.pop(); + + // Getting our vantage point + vantagePoint = indices[hi - 1]; + hi--; + + l = hi - lo; + + // Storing vantage point + nodes[nodeIndex] = vantagePoint; + + // We are in a leaf + if (l === 0) + continue; + + // We only have two elements, the second one has to go right + if (l === 1) { + + // We put remaining item to the right + mu = distance(items[vantagePoint], items[indices[lo]]); + + mus[nodeIndex] = mu; + + // Right + C++; + rights[nodeIndex] = C; + nodes[C] = indices[lo]; + + continue; + } + + // Computing distance from vantage point to other points + for (i = lo; i < hi; i++) + distances[indices[i]] = distance(items[vantagePoint], items[indices[i]]); + + inplaceQuickSortIndices(distances, indices, lo, hi); + + // Finding median of distances + medianIndex = lo + (l / 2) - 1; + + // Need to interpolate? + if (medianIndex === (medianIndex | 0)) { + mu = ( + distances[indices[medianIndex]] + + distances[indices[medianIndex + 1]] + ) / 2; + } + else { + mu = distances[indices[Math.ceil(medianIndex)]]; + } + + // Storing mu + mus[nodeIndex] = mu; + + mid = lowerBoundIndices(distances, indices, mu, lo, hi); + + // console.log('Vantage point', items[vantagePoint], vantagePoint); + // console.log('mu =', mu); + // console.log('lo =', lo); + // console.log('hi =', hi); + // console.log('mid =', mid); + + // console.log('need to split', Array.from(indices).slice(lo, hi).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + + // Right + if (hi - mid > 0) { + C++; + rights[nodeIndex] = C; + stack.push(C, mid, hi); + // console.log('Went right with ', Array.from(indices).slice(mid, hi).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + } + + // Left + if (mid - lo > 0) { + C++; + lefts[nodeIndex] = C; + stack.push(C, lo, mid); + // console.log('Went left with', Array.from(indices).slice(lo, mid).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + } + + // console.log(); + } + + return { + nodes: nodes, + lefts: lefts, + rights: rights, + mus: mus + }; +} + +/** + * VPTree. + * + * @constructor + * @param {function} distance - Distance function to use. + * @param {Iterable} items - Items to store. + */ +function VPTree(distance, items) { + if (typeof distance !== 'function') + throw new Error('mnemonist/VPTree.constructor: given `distance` must be a function.'); + + if (!items) + throw new Error('mnemonist/VPTree.constructor: you must provide items to the tree. A VPTree cannot be updated after its creation.'); + + // Properties + this.distance = distance; + this.heap = new Heap(comparator); + this.D = 0; + + var arrays = iterables.toArrayWithIndices(items); + this.items = arrays[0]; + var indices = arrays[1]; + + // Creating the binary tree + this.size = indices.length; + + var result = createBinaryTree(distance, this.items, indices); + + this.nodes = result.nodes; + this.lefts = result.lefts; + this.rights = result.rights; + this.mus = result.mus; +} + +/** + * Function used to retrieve the k nearest neighbors of the query. + * + * @param {number} k - Number of neighbors to retrieve. + * @param {any} query - The query. + * @return {array} + */ +VPTree.prototype.nearestNeighbors = function(k, query) { + var neighbors = this.heap, + stack = [0], + tau = Infinity, + nodeIndex, + itemIndex, + vantagePoint, + leftIndex, + rightIndex, + mu, + d; + + this.D = 0; + + while (stack.length) { + nodeIndex = stack.pop(); + itemIndex = this.nodes[nodeIndex]; + vantagePoint = this.items[itemIndex]; + + // Distance between query & the current vantage point + d = this.distance(vantagePoint, query); + this.D++; + + if (d < tau) { + neighbors.push({distance: d, item: vantagePoint}); + + // Trimming + if (neighbors.size > k) + neighbors.pop(); + + // Adjusting tau (only if we already have k items, else it stays Infinity) + if (neighbors.size >= k) + tau = neighbors.peek().distance; + } + + leftIndex = this.lefts[nodeIndex]; + rightIndex = this.rights[nodeIndex]; + + // We are a leaf + if (!leftIndex && !rightIndex) + continue; + + mu = this.mus[nodeIndex]; + + if (d < mu) { + if (leftIndex && d < mu + tau) + stack.push(leftIndex); + if (rightIndex && d >= mu - tau) // Might not be necessary to test d + stack.push(rightIndex); + } + else { + if (rightIndex && d >= mu - tau) + stack.push(rightIndex); + if (leftIndex && d < mu + tau) // Might not be necessary to test d + stack.push(leftIndex); + } + } + + var array = new Array(neighbors.size); + + for (var i = neighbors.size - 1; i >= 0; i--) + array[i] = neighbors.pop(); + + return array; +}; + +/** + * Function used to retrieve every neighbors of query in the given radius. + * + * @param {number} radius - Radius. + * @param {any} query - The query. + * @return {array} + */ +VPTree.prototype.neighbors = function(radius, query) { + var neighbors = [], + stack = [0], + nodeIndex, + itemIndex, + vantagePoint, + leftIndex, + rightIndex, + mu, + d; + + this.D = 0; + + while (stack.length) { + nodeIndex = stack.pop(); + itemIndex = this.nodes[nodeIndex]; + vantagePoint = this.items[itemIndex]; + + // Distance between query & the current vantage point + d = this.distance(vantagePoint, query); + this.D++; + + if (d <= radius) + neighbors.push({distance: d, item: vantagePoint}); + + leftIndex = this.lefts[nodeIndex]; + rightIndex = this.rights[nodeIndex]; + + // We are a leaf + if (!leftIndex && !rightIndex) + continue; + + mu = this.mus[nodeIndex]; + + if (d < mu) { + if (leftIndex && d < mu + radius) + stack.push(leftIndex); + if (rightIndex && d >= mu - radius) // Might not be necessary to test d + stack.push(rightIndex); + } + else { + if (rightIndex && d >= mu - radius) + stack.push(rightIndex); + if (leftIndex && d < mu + radius) // Might not be necessary to test d + stack.push(leftIndex); + } + } + + return neighbors; +}; + +/** + * Convenience known methods. + */ +VPTree.prototype.inspect = function() { + var array = this.items.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: VPTree, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + VPTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = VPTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a tree. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} distance - Distance function to use. + * @return {VPTree} + */ +VPTree.from = function(iterable, distance) { + return new VPTree(distance, iterable); +}; + +/** + * Exporting. + */ +module.exports = VPTree; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/LICENSE.txt b/amplify/functions/deleteDocument/node_modules/obliterator/LICENSE.txt new file mode 100644 index 0000000..ca37c96 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Guillaume Plique (Yomguithereal) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/README.md b/amplify/functions/deleteDocument/node_modules/obliterator/README.md new file mode 100644 index 0000000..f611e39 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/README.md @@ -0,0 +1,321 @@ +[![Build Status](https://travis-ci.org/Yomguithereal/obliterator.svg)](https://travis-ci.org/Yomguithereal/obliterator) + +# Obliterator + +Obliterator is a dead simple JavaScript/TypeScript library providing miscellaneous higher-order iterator functions such as combining two or more iterators into a single one. + +# Installation + +``` +npm install --save obliterator +``` + +Note `obliterator` comes along with its TypeScript declarations. + +# Usage + +## Summary + +*Classes* + +* [Iterator](#iterator) + +*Functions* + +* [chain](#chain) +* [combinations](#combinations) +* [consume](#consume) +* [filter](#filter) +* [forEach](#foreach) +* [map](#map) +* [match](#match) +* [permutations](#permutations) +* [powerSet](#powerSet) +* [split](#split) +* [take](#take) + +## Iterator + +A handy Iterator class with safeguards and usable with ES2015's `for ... of` loop constructs & spread operator. + +```js +import Iterator from 'obliterator/iterator'; +// Or +import {Iterator} from 'obliterator'; + +const iterator = new Iterator(function() { + // Define what the `next` function does +}); + +// Checking that the given value is an iterator (native or else) +Iterator.is(value); + +// Creating an empty iterator +const emptyIterator = Iterator.empty(); + +// Creating a simple iterator from a single value +const simpleIterator = Iterator.of(34); + +// Creating a simple iterator from multiple values +const multipleIterator = Iterator.of(1, 2, 3); +``` + +## chain + +Variadic function chaining all the given iterators. + +```js +import chain from 'obliterator/chain'; +// Or +import {chain} from 'obliterator'; + +const set1 = new Set('a'); +const set2 = new Set('bc'); + +const chained = chain(set1.values(), set2.values()); + +chained.next(); +>>> {done: false, value: 'a'} +chained.next(); +>>> {done: false, value: 'b'} +``` + +## combinations + +Returns an iterator of combinations of the given array and of the given size. + +Note that for performance reasons, the yielded combination is always the same object. + +```js +import combinations from 'obliterator/combinations'; +// Or +import {combinations} from 'obliterator'; + +const iterator = combinations(['A', 'B', 'C', 'D'], 2); + +iterator.next().value; +>>> ['A', 'B'] +iterator.next().value; +>>> ['A', 'C'] +``` + +## consume + +Function consuming the given iterator fully or for n steps. + +```js +import consume from 'obliterator/consume'; +// Or +import {consume} from 'obliterator'; + +const set = new Set([1, 2, 3]); + +// Consuming the whole iterator +let iterator = set.values(); +consume(iterator); +iterator.next().done +>>> true + +// Consuming n steps +let iterator = set.values(); +consume(iterator, 2); +iterator.next().value +>>> 3 +``` + +## filter + +Function returning an iterator filtering another one's values using the given predicate. + +```js +import filter from 'obliterator/filter'; +// Or +import {filter} from 'obliterator'; + +const set = new Set([1, 2, 3, 4, 5]); + +const even = x => x % 2 === 0; + +const iterator = filter(even, set.values()); + +iterator.next().value +>>> 2 +iterator.next().value +>>> 4 +``` + +## forEach + +Function able to iterate over almost any JavaScript iterable value using a callback. + +Supported values range from arrays, typed arrays, sets, maps, objects, strings, arguments, iterators, arbitrary iterables etc. + +```js +import forEach from 'obliterator/foreach'; +// Or +import {forEach} from 'obliterator'; + +const set = new Set(['apple', 'banana']); + +forEach(set.values(), (value, i) => { + console.log(i, value); +}); + +// Iterating over a string +forEach('abc', (char, i) => ...); + +// Iterating over a map +forEach(map, (value, key) => ...); +``` + +Optionally, one can use the `forEachWithNullKeys` function to iterate over mixed values but with the twist that iterables without proper keys (lists, sets etc.), will yield `null` instead of an index key. + +```js +import {forEachWithNullKeys} from 'obliterator/foreach'; + +const set = new Set(['apple', 'banana']); + +forEach(set, (value, key) => { + console.log(key, value); +}); +>>> null, 'apple' +>>> null, 'banana' +``` + +## map + +Function returning an iterator mapping another one's values using the given function. + +```js +import map from 'obliterator/map'; +// Or +import {map} from 'obliterator'; + +const set = new Set([1, 2, 3, 4, 5]); + +const triple = x => x * 3; + +const iterator = map(triple, set.values()); + +iterator.next().value +>>> 3 +iterator.next().value +>>> 6 +``` + +## match + +Function returning an iterator over the matches of a given regex applied to the target string. + +```js +import match from 'obliterator/match'; +// Or +import {match} from 'obliterator'; + +const iterator = match(/t/, 'test'); + +iterator.next().value.index +>>> 0 +iterator.next().value.index +>>> 3 +``` + +## permutations + +Returns an iterator of permutations of the given array and of the given size. + +Note that for performance reasons, the yielded permutation is always the same object. + +```js +import permutations from 'obliterator/permutations'; +// Or +import {permutations} from 'obliterator'; + +let iterator = permutations([1, 2, 3]); + +iterator.next().value +>>> [1, 2, 3] +iterator.next().value +>>> [1, 3, 2] + +iterator = permutations(['A', 'B', 'C', 'D'], 2); + +iterator.next().value; +>>> ['A', 'B'] +iterator.next().value; +>>> ['A', 'C'] +``` + +## powerSet + +Returns an iterator of sets composing the power set of the given array. + +```js +import powerSet from 'obliterator/power-set'; +// Or +import {powerSet} from 'obliterator'; + +const iterator = powerSet(['A', 'B', 'C']); + +iterator.next().value; +>>> [] +iterator.next().value; +>>> ['A'] +``` + +## split + +Returns an iterator over the splits of the target string, according to the given RegExp pattern. + +```js +import split from 'obliterator/split'; +// Or +import {split} from 'obliterator'; + +const iterator = split(/;/g, 'hello;world;super'); + +iterator.next().value; +>>> 'hello' +iterator.next().value; +>>> 'world' +``` + +## take + +Function taking values from given iterator and returning them in an array. + +```js +import take from 'obliterator/take'; +// Or +import {take} from 'obliterator'; + +const set = new Set([1, 2, 3]); + +// To take n values from the iterator +take(set.values(), 2); +>>> [1, 2] + +// To convert the full iterator into an array +take(set.values()); +>>> [1, 2, 3] +``` + +# Contribution + +Contributions are obviously welcome. Please be sure to lint the code & add the relevant unit tests before submitting any PR. + +``` +git clone git@github.com:Yomguithereal/obliterator.git +cd obliterator +npm install + +# To lint the code +npm run lint + +# To run the unit tests +npm test +``` + +# License + +[MIT](LICENSE.txt) diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/chain.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/chain.d.ts new file mode 100644 index 0000000..298e592 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/chain.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function chain(...iterators: Iterator[]): ObliteratorIterator; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/chain.js b/amplify/functions/deleteDocument/node_modules/obliterator/chain.js new file mode 100644 index 0000000..1e4e25d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/chain.js @@ -0,0 +1,39 @@ +/** + * Obliterator Chain Function + * =========================== + * + * Variadic function combining the given iterators. + */ +var Iterator = require('./iterator.js'); + +/** + * Chain. + * + * @param {...Iterator} iterators - Target iterators. + * @return {Iterator} + */ +module.exports = function chain() { + var iterators = arguments, + current, + i = -1; + + return new Iterator(function iterate() { + if (!current) { + i++; + + if (i >= iterators.length) + return {done: true}; + + current = iterators[i]; + } + + var step = current.next(); + + if (step.done) { + current = null; + return iterate(); + } + + return step; + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/combinations.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/combinations.d.ts new file mode 100644 index 0000000..206dea9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/combinations.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function combinations(array: Array, r: number): ObliteratorIterator>; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/combinations.js b/amplify/functions/deleteDocument/node_modules/obliterator/combinations.js new file mode 100644 index 0000000..d1cf456 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/combinations.js @@ -0,0 +1,76 @@ +/** + * Obliterator Combinations Function + * ================================== + * + * Iterator returning combinations of the given array. + */ +var Iterator = require('./iterator.js'); + +/** + * Helper mapping indices to items. + */ +function indicesToItems(target, items, indices, r) { + for (var i = 0; i < r; i++) + target[i] = items[indices[i]]; +} + +/** + * Combinations. + * + * @param {array} array - Target array. + * @param {number} r - Size of the subsequences. + * @return {Iterator} + */ +module.exports = function combinations(array, r) { + if (!Array.isArray(array)) + throw new Error('obliterator/combinations: first argument should be an array.'); + + var n = array.length; + + if (typeof r !== 'number') + throw new Error('obliterator/combinations: second argument should be omitted or a number.'); + + if (r > n) + throw new Error('obliterator/combinations: the size of the subsequences should not exceed the length of the array.'); + + if (r === n) + return Iterator.of(array.slice()); + + var indices = new Array(r), + subsequence = new Array(r), + first = true, + i; + + for (i = 0; i < r; i++) + indices[i] = i; + + return new Iterator(function next() { + if (first) { + first = false; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + if (indices[r - 1]++ < n - 1) { + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + i = r - 2; + + while (i >= 0 && indices[i] >= (n - (r - i))) + --i; + + if (i < 0) + return {done: true}; + + indices[i]++; + + while (++i < r) + indices[i] = indices[i - 1] + 1; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/consume.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/consume.d.ts new file mode 100644 index 0000000..16812ee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/consume.d.ts @@ -0,0 +1 @@ +export default function consume(iterator: Iterator, steps?: number): void; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/consume.js b/amplify/functions/deleteDocument/node_modules/obliterator/consume.js new file mode 100644 index 0000000..455fea5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/consume.js @@ -0,0 +1,32 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Consume Function + * ============================= + * + * Function consuming the given iterator for n or every steps. + */ + +/** + * Consume. + * + * @param {Iterator} iterator - Target iterator. + * @param {number} [steps] - Optional steps. + */ +module.exports = function consume(iterator, steps) { + var step, + l = arguments.length > 1 ? steps : Infinity, + i = 0; + + while (true) { + + if (i === l) + return; + + step = iterator.next(); + + if (step.done) + return; + + i++; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/filter.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/filter.d.ts new file mode 100644 index 0000000..18b17f9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/filter.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +type PredicateFunction = (item: T) => boolean; + +export default function filter(predicate: PredicateFunction, iterator: Iterator): ObliteratorIterator; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/filter.js b/amplify/functions/deleteDocument/node_modules/obliterator/filter.js new file mode 100644 index 0000000..25a519b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/filter.js @@ -0,0 +1,28 @@ +/** + * Obliterator Filter Function + * =========================== + * + * Function returning a iterator filtering the given iterator. + */ +var Iterator = require('./iterator.js'); + +/** + * Filter. + * + * @param {function} predicate - Predicate function. + * @param {Iterator} target - Target iterator. + * @return {Iterator} + */ +module.exports = function filter(predicate, target) { + return new Iterator(function next() { + var step = target.next(); + + if (step.done) + return step; + + if (!predicate(step.value)) + return next(); + + return step; + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/foreach.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/foreach.d.ts new file mode 100644 index 0000000..61fb9ea --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/foreach.d.ts @@ -0,0 +1 @@ +export default function forEach(iterable: any, callback: (item: any, key: any) => void): void; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/foreach.js b/amplify/functions/deleteDocument/node_modules/obliterator/foreach.js new file mode 100644 index 0000000..ef90051 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/foreach.js @@ -0,0 +1,156 @@ +/** + * Obliterator ForEach Function + * ============================= + * + * Helper function used to easily iterate over mixed values. + */ + +/** + * Constants. + */ +var ARRAY_BUFFER_SUPPORT = typeof ArrayBuffer !== 'undefined', + SYMBOL_SUPPORT = typeof Symbol !== 'undefined'; + +/** + * Function able to iterate over almost any iterable JS value. + * + * @param {any} iterable - Iterable value. + * @param {function} callback - Callback function. + */ +function forEach(iterable, callback) { + var iterator, k, i, l, s; + + if (!iterable) + throw new Error('obliterator/forEach: invalid iterable.'); + + if (typeof callback !== 'function') + throw new Error('obliterator/forEach: expecting a callback.'); + + // The target is an array or a string or function arguments + if ( + Array.isArray(iterable) || + (ARRAY_BUFFER_SUPPORT && ArrayBuffer.isView(iterable)) || + typeof iterable === 'string' || + iterable.toString() === '[object Arguments]' + ) { + for (i = 0, l = iterable.length; i < l; i++) + callback(iterable[i], i); + return; + } + + // The target has a #.forEach method + if (typeof iterable.forEach === 'function') { + iterable.forEach(callback); + return; + } + + // The target is iterable + if ( + SYMBOL_SUPPORT && + Symbol.iterator in iterable && + typeof iterable.next !== 'function' + ) { + iterable = iterable[Symbol.iterator](); + } + + // The target is an iterator + if (typeof iterable.next === 'function') { + iterator = iterable; + i = 0; + + while ((s = iterator.next(), s.done !== true)) { + callback(s.value, i); + i++; + } + + return; + } + + // The target is a plain object + for (k in iterable) { + if (iterable.hasOwnProperty(k)) { + callback(iterable[k], k); + } + } + + return; +} + +/** + * Same function as the above `forEach` but will yield `null` when the target + * does not have keys. + * + * @param {any} iterable - Iterable value. + * @param {function} callback - Callback function. + */ +forEach.forEachWithNullKeys = function(iterable, callback) { + var iterator, k, i, l, s; + + if (!iterable) + throw new Error('obliterator/forEachWithNullKeys: invalid iterable.'); + + if (typeof callback !== 'function') + throw new Error('obliterator/forEachWithNullKeys: expecting a callback.'); + + // The target is an array or a string or function arguments + if ( + Array.isArray(iterable) || + (ARRAY_BUFFER_SUPPORT && ArrayBuffer.isView(iterable)) || + typeof iterable === 'string' || + iterable.toString() === '[object Arguments]' + ) { + for (i = 0, l = iterable.length; i < l; i++) + callback(iterable[i], null); + return; + } + + // The target is a Set + if (iterable instanceof Set) { + iterable.forEach(function(value) { + callback(value, null); + }); + return; + } + + // The target has a #.forEach method + if (typeof iterable.forEach === 'function') { + iterable.forEach(callback); + return; + } + + // The target is iterable + if ( + SYMBOL_SUPPORT && + Symbol.iterator in iterable && + typeof iterable.next !== 'function' + ) { + iterable = iterable[Symbol.iterator](); + } + + // The target is an iterator + if (typeof iterable.next === 'function') { + iterator = iterable; + i = 0; + + while ((s = iterator.next(), s.done !== true)) { + callback(s.value, null); + i++; + } + + return; + } + + // The target is a plain object + for (k in iterable) { + if (iterable.hasOwnProperty(k)) { + callback(iterable[k], k); + } + } + + return; +}; + +/** + * Exporting. + */ +module.exports = forEach; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/index.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/index.d.ts new file mode 100644 index 0000000..9aa15bb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/index.d.ts @@ -0,0 +1,14 @@ +export {default as Iterator} from './iterator'; +export {default as chain} from './chain'; +export {default as combinations} from './combinations'; +export {default as consume} from './consume'; +export {default as filter} from './filter'; +export {default as forEach} from './foreach'; +export {default as map} from './map'; +export {default as match} from './match'; +export {default as permutations} from './permutations'; +export {default as powerSet} from './power-set'; +export {default as range} from './range'; +export {default as split} from './split'; +export {default as take} from './take'; +export {default as takeInto} from './take-into'; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/index.js b/amplify/functions/deleteDocument/node_modules/obliterator/index.js new file mode 100644 index 0000000..d84da62 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/index.js @@ -0,0 +1,22 @@ +/** + * Obliterator Library Endpoint + * ============================= + * + * Exporting the library's functions. + */ +module.exports = { + Iterator: require('./iterator.js'), + chain: require('./chain.js'), + combinations: require('./combinations.js'), + consume: require('./consume.js'), + filter: require('./filter.js'), + forEach: require('./foreach.js'), + map: require('./map.js'), + match: require('./match.js'), + permutations: require('./permutations.js'), + powerSet: require('./power-set.js'), + range: require('./range.js'), + split: require('./split.js'), + take: require('./take.js'), + takeInto: require('./take-into.js') +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/iterator.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/iterator.d.ts new file mode 100644 index 0000000..a8ea6a8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/iterator.d.ts @@ -0,0 +1,19 @@ +type NextFunction = () => IteratorResult; + +export default class Iterator implements IterableIterator { + + // Constructor + constructor(next: NextFunction); + + // Members + done: boolean; + + // Well-known methods + next(): IteratorResult; + [Symbol.iterator](): IterableIterator; + + // Static methods + static of(...args: T[]): Iterator; + static empty(): Iterator; + static is(value: any): boolean; +} diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/iterator.js b/amplify/functions/deleteDocument/node_modules/obliterator/iterator.js new file mode 100644 index 0000000..67652ab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/iterator.js @@ -0,0 +1,104 @@ +/** + * Obliterator Iterator Class + * =========================== + * + * Simple class representing the library's iterators. + */ + +/** + * Iterator class. + * + * @constructor + * @param {function} next - Next function. + */ +function Iterator(next) { + + // Hiding the given function + Object.defineProperty(this, '_next', { + writable: false, + enumerable: false, + value: next + }); + + // Is the iterator complete? + this.done = false; +} + +/** + * Next function. + * + * @return {object} + */ +// NOTE: maybe this should dropped for performance? +Iterator.prototype.next = function() { + if (this.done) + return {done: true}; + + var step = this._next(); + + if (step.done) + this.done = true; + + return step; +}; + +/** + * If symbols are supported, we add `next` to `Symbol.iterator`. + */ +if (typeof Symbol !== 'undefined') + Iterator.prototype[Symbol.iterator] = function() { + return this; + }; + +/** + * Returning an iterator of the given values. + * + * @param {any...} values - Values. + * @return {Iterator} + */ +Iterator.of = function() { + var args = arguments, + l = args.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {done: false, value: args[i++]}; + }); +}; + +/** + * Returning an empty iterator. + * + * @return {Iterator} + */ +Iterator.empty = function() { + var iterator = new Iterator(null); + iterator.done = true; + + return iterator; +}; + +/** + * Returning whether the given value is an iterator. + * + * @param {any} value - Value. + * @return {boolean} + */ +Iterator.is = function(value) { + if (value instanceof Iterator) + return true; + + return ( + typeof value === 'object' && + value !== null && + typeof value.next === 'function' + ); +}; + +/** + * Exporting. + */ +module.exports = Iterator; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/map.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/map.d.ts new file mode 100644 index 0000000..389591b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/map.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +type MapFunction = (item: S) => T; + +export default function map(predicate: MapFunction, iterator: Iterator): ObliteratorIterator; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/map.js b/amplify/functions/deleteDocument/node_modules/obliterator/map.js new file mode 100644 index 0000000..fd6dd17 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/map.js @@ -0,0 +1,27 @@ +/** + * Obliterator Map Function + * =========================== + * + * Function returning a iterator mapping the given iterator's values. + */ +var Iterator = require('./iterator.js'); + +/** + * Map. + * + * @param {function} mapper - Map function. + * @param {Iterator} target - Target iterator. + * @return {Iterator} + */ +module.exports = function map(mapper, target) { + return new Iterator(function next() { + var step = target.next(); + + if (step.done) + return step; + + return { + value: mapper(step.value) + }; + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/match.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/match.d.ts new file mode 100644 index 0000000..9a42616 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/match.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function match(pattern: RegExp, string: string): ObliteratorIterator; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/match.js b/amplify/functions/deleteDocument/node_modules/obliterator/match.js new file mode 100644 index 0000000..82edf41 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/match.js @@ -0,0 +1,42 @@ +/** + * Obliterator Match Function + * =========================== + * + * Function returning an iterator over the matches of the given regex on the + * target string. + */ +var Iterator = require('./iterator.js'); + +/** + * Match. + * + * @param {RegExp} pattern - Regular expression to use. + * @param {string} string - Target string. + * @return {Iterator} + */ +module.exports = function match(pattern, string) { + var executed = false; + + if (!(pattern instanceof RegExp)) + throw new Error('obliterator/match: invalid pattern. Expecting a regular expression.'); + + if (typeof string !== 'string') + throw new Error('obliterator/match: invalid target. Expecting a string.'); + + return new Iterator(function() { + if (executed && !pattern.global) { + pattern.lastIndex = 0; + return {done: true}; + } + + executed = true; + + var m = pattern.exec(string); + + if (m) + return {value: m}; + + pattern.lastIndex = 0; + return {done: true}; + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/package.json b/amplify/functions/deleteDocument/node_modules/obliterator/package.json new file mode 100644 index 0000000..ddfaead --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/package.json @@ -0,0 +1,45 @@ +{ + "name": "obliterator", + "version": "1.6.1", + "description": "Higher order iterator library for JavaScript.", + "main": "index.js", + "scripts": { + "lint": "eslint *.js", + "prepublish": "npm run lint && npm test", + "test": "mocha test.js && npm run test:types", + "test:types": "tsc --lib es2015,dom --noEmit --noImplicitAny --noImplicitReturns ./test-types.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/yomguithereal/obliterator.git" + }, + "keywords": [ + "iterator" + ], + "author": { + "name": "Guillaume Plique", + "url": "http://github.com/Yomguithereal" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/yomguithereal/obliterator/issues" + }, + "homepage": "https://github.com/yomguithereal/obliterator#readme", + "devDependencies": { + "@yomguithereal/eslint-config": "^4.0.0", + "eslint": "^6.8.0", + "mocha": "^7.0.0", + "typescript": "^3.7.5" + }, + "eslintConfig": { + "extends": "@yomguithereal/eslint-config", + "globals": { + "ArrayBuffer": true, + "Map": true, + "Set": true, + "Symbol": true, + "Uint8Array": true, + "Uint32Array": true + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/permutations.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/permutations.d.ts new file mode 100644 index 0000000..d48dffd --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/permutations.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function permutations(array: Array, r: number): ObliteratorIterator>; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/permutations.js b/amplify/functions/deleteDocument/node_modules/obliterator/permutations.js new file mode 100644 index 0000000..4f4752f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/permutations.js @@ -0,0 +1,96 @@ +/** + * Obliterator Permutations Function + * ================================== + * + * Iterator returning permutations of the given array. + */ +var Iterator = require('./iterator.js'); + +/** + * Helper mapping indices to items. + */ +function indicesToItems(target, items, indices, r) { + for (var i = 0; i < r; i++) + target[i] = items[indices[i]]; +} + +/** + * Permutations. + * + * @param {array} array - Target array. + * @param {number} r - Size of the subsequences. + * @return {Iterator} + */ +module.exports = function permutations(array, r) { + if (!Array.isArray(array)) + throw new Error('obliterator/permutations: first argument should be an array.'); + + var n = array.length; + + if (arguments.length < 2) + r = n; + + if (typeof r !== 'number') + throw new Error('obliterator/permutations: second argument should be omitted or a number.'); + + if (r > n) + throw new Error('obliterator/permutations: the size of the subsequences should not exceed the length of the array.'); + + var indices = new Uint32Array(n), + subsequence = new Array(r), + cycles = new Uint32Array(r), + first = true, + i; + + for (i = 0; i < n; i++) { + indices[i] = i; + + if (i < r) + cycles[i] = n - i; + } + + i = r; + + return new Iterator(function next() { + if (first) { + first = false; + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + var tmp, + j; + + i--; + + if (i < 0) + return {done: true}; + + cycles[i]--; + + if (cycles[i] === 0) { + + tmp = indices[i]; + + for (j = i; j < n - 1; j++) + indices[j] = indices[j + 1]; + + indices[n - 1] = tmp; + + cycles[i] = n - i; + return next(); + } + else { + j = cycles[i]; + tmp = indices[i]; + + indices[i] = indices[n - j]; + indices[n - j] = tmp; + + i = r; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/power-set.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/power-set.d.ts new file mode 100644 index 0000000..1f238d0 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/power-set.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function powerSet(array: Array): ObliteratorIterator>; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/power-set.js b/amplify/functions/deleteDocument/node_modules/obliterator/power-set.js new file mode 100644 index 0000000..a9d1850 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/power-set.js @@ -0,0 +1,28 @@ +/** + * Obliterator Power Set Function + * =============================== + * + * Iterator returning the power set of the given array. + */ +var Iterator = require('./iterator.js'), + combinations = require('./combinations.js'), + chain = require('./chain.js'); + +/** + * Power set. + * + * @param {array} array - Target array. + * @return {Iterator} + */ +module.exports = function powerSet(array) { + var n = array.length; + + var iterators = new Array(n + 1); + + iterators[0] = Iterator.of([]); + + for (var i = 1; i < n + 1; i++) + iterators[i] = combinations(array, i); + + return chain.apply(null, iterators); +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/range.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/range.d.ts new file mode 100644 index 0000000..498229e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/range.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function range(end: number): ObliteratorIterator; +export default function range(start: number, end: number): ObliteratorIterator; +export default function range(start: number, end: number, step: number): ObliteratorIterator; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/range.js b/amplify/functions/deleteDocument/node_modules/obliterator/range.js new file mode 100644 index 0000000..eea7e4d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/range.js @@ -0,0 +1,45 @@ +/** + * Obliterator Range Function + * =========================== + * + * Function returning a range iterator. + */ +var Iterator = require('./iterator.js'); + +/** + * Range. + * + * @param {number} start - Start. + * @param {number} end - End. + * @param {number} step - Step. + * @return {Iterator} + */ +module.exports = function range(start, end, step) { + if (arguments.length === 1) { + end = start; + start = 0; + } + + if (arguments.length < 3) + step = 1; + + var i = start; + + var iterator = new Iterator(function() { + if (i < end) { + var value = i; + + i += step; + + return {value: value}; + } + + return {done: true}; + }); + + iterator.start = start; + iterator.end = end; + iterator.step = step; + + return iterator; +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/split.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/split.d.ts new file mode 100644 index 0000000..e9124ab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/split.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function split(pattern: RegExp, string: string): ObliteratorIterator; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/split.js b/amplify/functions/deleteDocument/node_modules/obliterator/split.js new file mode 100644 index 0000000..09abf83 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/split.js @@ -0,0 +1,68 @@ +/** + * Obliterator Split Function + * =========================== + * + * Function returning an iterator over the pieces of a regex split. + */ +var Iterator = require('./iterator.js'); + +/** + * Function used to make the given pattern global. + * + * @param {RegExp} pattern - Regular expression to make global. + * @return {RegExp} + */ +function makeGlobal(pattern) { + var flags = 'g'; + + if (pattern.multiline) flags += 'm'; + if (pattern.ignoreCase) flags += 'i'; + if (pattern.sticky) flags += 'y'; + if (pattern.unicode) flags += 'u'; + + return new RegExp(pattern.source, flags); +} + +/** + * Split. + * + * @param {RegExp} pattern - Regular expression to use. + * @param {string} string - Target string. + * @return {Iterator} + */ +module.exports = function split(pattern, string) { + if (!(pattern instanceof RegExp)) + throw new Error('obliterator/split: invalid pattern. Expecting a regular expression.'); + + if (typeof string !== 'string') + throw new Error('obliterator/split: invalid target. Expecting a string.'); + + // NOTE: cloning the pattern has a performance cost but side effects for not + // doing so might be worse. + pattern = makeGlobal(pattern); + + var consumed = false, + current = 0; + + return new Iterator(function() { + if (consumed) + return {done: true}; + + var match = pattern.exec(string), + value, + length; + + if (match) { + length = match.index + match[0].length; + + value = string.slice(current, match.index); + current = length; + } + else { + consumed = true; + value = string.slice(current); + } + + return {value: value}; + }); +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/take-into.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/take-into.d.ts new file mode 100644 index 0000000..d2bf004 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/take-into.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +// Requires a resolution of https://github.com/microsoft/TypeScript/issues/1213 +// export default function takeInto, T>(ArrayClass: new (n: number) => C, iterator: Iterator, n: number): C; +export default function takeInto(ArrayClass: new (arrayLength: number) => T[], iterator: Iterator, n: number): T[]; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/take-into.js b/amplify/functions/deleteDocument/node_modules/obliterator/take-into.js new file mode 100644 index 0000000..6b814f8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/take-into.js @@ -0,0 +1,40 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Take Into Function + * =============================== + * + * Same as the take function but enables the user to select an array class + * in which to insert the retrieved values. + */ + +/** + * Take Into. + * + * @param {function} ArrayClass - Array class to use. + * @param {Iterator} iterator - Target iterator. + * @param {number} n - Number of items to take. + * @return {array} + */ +module.exports = function takeInto(ArrayClass, iterator, n) { + var array = new ArrayClass(n), + step, + i = 0; + + while (true) { + + if (i === n) + return array; + + step = iterator.next(); + + if (step.done) { + + if (i !== n) + return array.slice(0, i); + + return array; + } + + array[i++] = step.value; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/take.d.ts b/amplify/functions/deleteDocument/node_modules/obliterator/take.d.ts new file mode 100644 index 0000000..c9b5026 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/take.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function take(iterator: Iterator, n: number): Array; diff --git a/amplify/functions/deleteDocument/node_modules/obliterator/take.js b/amplify/functions/deleteDocument/node_modules/obliterator/take.js new file mode 100644 index 0000000..d7c5e96 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/obliterator/take.js @@ -0,0 +1,40 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Take Function + * ========================== + * + * Function taking n or every value of the given iterator and returns them + * into an array. + */ + +/** + * Take. + * + * @param {Iterator} iterator - Target iterator. + * @param {number} [n] - Optional number of items to take. + * @return {array} + */ +module.exports = function take(iterator, n) { + var l = arguments.length > 1 ? n : Infinity, + array = l !== Infinity ? new Array(l) : [], + step, + i = 0; + + while (true) { + + if (i === l) + return array; + + step = iterator.next(); + + if (step.done) { + + if (i !== n) + return array.slice(0, i); + + return array; + } + + array[i++] = step.value; + } +}; diff --git a/amplify/functions/deleteDocument/node_modules/strnum/.vscode/launch.json b/amplify/functions/deleteDocument/node_modules/strnum/.vscode/launch.json new file mode 100644 index 0000000..b87b349 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/strnum/.vscode/launch.json @@ -0,0 +1,25 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Jasmine Tests", + "program": "${workspaceFolder}/node_modules/jasmine/bin/jasmine.js", + "args": [ + "${workspaceFolder}/spec/attr_spec.js" + ], + "internalConsoleOptions": "openOnSessionStart" + },{ + "type": "node", + "request": "launch", + "name": "Jasmine Tests current test file", + "program": "${workspaceFolder}/node_modules/jasmine/bin/jasmine.js", + "args": [ + "${file}" + ], + "internalConsoleOptions": "openOnSessionStart" + } + ] + +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/strnum/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/strnum/CHANGELOG.md new file mode 100644 index 0000000..582e460 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/strnum/CHANGELOG.md @@ -0,0 +1,22 @@ + +**1.1.2 / 2025-02-27** +- fix skiplike for 0 + +**1.1.1 / 2025-02-21** +- All recent fixes of version 2 + +**2.0.4 / 2025-02-20** +- remove console log + +**2.0.3 / 2025-02-20** +- fix for string which are falsly identified as e-notation + +**2.0.1 / 2025-02-20** +- fix: handle only zeros +- fix: return original string when NaN + +**2.0.0 / 2025-02-20** +- Migrating to ESM modules. No functional change + +**1.1.0 / 2025-02-20** +- fix (#9): support missing floating point and e notations \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/strnum/LICENSE b/amplify/functions/deleteDocument/node_modules/strnum/LICENSE new file mode 100644 index 0000000..6450554 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/strnum/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Natural Intelligence + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/amplify/functions/deleteDocument/node_modules/strnum/README.md b/amplify/functions/deleteDocument/node_modules/strnum/README.md new file mode 100644 index 0000000..419e8ef --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/strnum/README.md @@ -0,0 +1,97 @@ +# strnum +Parse string into Number based on configuration + +## Users + + + + + +Many React Native projects and plugins + +## Usage + +```bash +npm install strnum +``` +```js +const toNumber = require("strnum"); + +toNumber(undefined) // undefined +toNumber(null)) //null +toNumber("")) // "" +toNumber("string"); //"string") +toNumber("12,12"); //"12,12") +toNumber("12 12"); //"12 12") +toNumber("12-12"); //"12-12") +toNumber("12.12.12"); //"12.12.12") +toNumber("0x2f"); //47) +toNumber("-0x2f"); //-47) +toNumber("0x2f", { hex : true}); //47) +toNumber("-0x2f", { hex : true}); //-47) +toNumber("0x2f", { hex : false}); //"0x2f") +toNumber("-0x2f", { hex : false}); //"-0x2f") +toNumber("06"); //6) +toNumber("06", { leadingZeros : true}); //6) +toNumber("06", { leadingZeros : false}); //"06") + +toNumber("006"); //6) +toNumber("006", { leadingZeros : true}); //6) +toNumber("006", { leadingZeros : false}); //"006") +toNumber("0.0"); //0) +toNumber("00.00"); //0) +toNumber("0.06"); //0.06) +toNumber("00.6"); //0.6) +toNumber(".006"); //0.006) +toNumber("6.0"); //6) +toNumber("06.0"); //6) + +toNumber("0.0", { leadingZeros : false}); //0) +toNumber("00.00", { leadingZeros : false}); //"00.00") +toNumber("0.06", { leadingZeros : false}); //0.06) +toNumber("00.6", { leadingZeros : false}); //"00.6") +toNumber(".006", { leadingZeros : false}); //0.006) +toNumber("6.0" , { leadingZeros : false}); //6) +toNumber("06.0" , { leadingZeros : false}); //"06.0") +toNumber("-06"); //-6) +toNumber("-06", { leadingZeros : true}); //-6) +toNumber("-06", { leadingZeros : false}); //"-06") + +toNumber("-0.0"); //-0) +toNumber("-00.00"); //-0) +toNumber("-0.06"); //-0.06) +toNumber("-00.6"); //-0.6) +toNumber("-.006"); //-0.006) +toNumber("-6.0"); //-6) +toNumber("-06.0"); //-6) + +toNumber("-0.0" , { leadingZeros : false}); //-0) +toNumber("-00.00", { leadingZeros : false}); //"-00.00") +toNumber("-0.06", { leadingZeros : false}); //-0.06) +toNumber("-00.6", { leadingZeros : false}); //"-00.6") +toNumber("-.006", {leadingZeros : false}); //-0.006) +toNumber("-6.0" , { leadingZeros : false}); //-6) +toNumber("-06.0" , { leadingZeros : false}); //"-06.0") +toNumber("420926189200190257681175017717") ; //4.209261892001902e+29) +toNumber("000000000000000000000000017717" , { leadingZeros : false}); //"000000000000000000000000017717") +toNumber("000000000000000000000000017717" , { leadingZeros : true}); //17717) +toNumber("01.0e2" , { leadingZeros : false}); //"01.0e2") +toNumber("-01.0e2" , { leadingZeros : false}); //"-01.0e2") +toNumber("01.0e2") ; //100) +toNumber("-01.0e2") ; //-100) +toNumber("1.0e2") ; //100) + +toNumber("-1.0e2") ; //-100) +toNumber("1.0e-2"); //0.01) + +toNumber("+1212121212"); // 1212121212 +toNumber("+1212121212", { skipLike: /\+[0-9]{10}/} )); //"+1212121212" +``` + +Supported Options +```js +hex: true, //when hexadecimal string should be parsed +leadingZeros: true, //when number with leading zeros like 08 should be parsed. 0.0 is not impacted +eNotation: true, //when number with eNotation or number parsed in eNotation should be considered +skipLike: /regex/ //when string should not be parsed when it matches the specified regular expression +``` diff --git a/amplify/functions/deleteDocument/node_modules/strnum/package.json b/amplify/functions/deleteDocument/node_modules/strnum/package.json new file mode 100644 index 0000000..90a1b96 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/strnum/package.json @@ -0,0 +1,30 @@ +{ + "name": "strnum", + "version": "1.1.2", + "description": "Parse String to Number based on configuration", + "main": "strnum.js", + "scripts": { + "test": "jasmine strnum.test.js" + }, + "keywords": [ + "string", + "number", + "parse", + "convert" + ], + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/strnum" + }, + "author": "Amit Gupta (https://amitkumargupta.work/)", + "license": "MIT", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "devDependencies": { + "jasmine": "^5.6.0" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/strnum/strnum.js b/amplify/functions/deleteDocument/node_modules/strnum/strnum.js new file mode 100644 index 0000000..c3bd08e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/strnum/strnum.js @@ -0,0 +1,111 @@ +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/; +// const octRegex = /^0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +const consider = { + hex : true, + // oct: false, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true, + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if(str==="0") return 0; + else if (options.hex && hexRegex.test(trimmedStr)) { + return parse_int(trimmedStr, 16); + // }else if (options.oct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + }else if (trimmedStr.search(/[eE]/)!== -1) { //eNotation + const notation = trimmedStr.match(/^([-\+])?(0*)([0-9]*(\.[0-9]*)?[eE][-\+]?[0-9]+)$/); + // +00.123 => [ , '+', '00', '.123', .. + if(notation){ + // console.log(notation) + if(options.leadingZeros){ //accept with leading zeros + trimmedStr = (notation[1] || "") + notation[3]; + }else{ + if(notation[2] === "0" && notation[3][0]=== "."){ //valid number + }else{ + return str; + } + } + return options.eNotation ? Number(trimmedStr) : str; + }else{ + return str; + } + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + // +00.123 => [ , '+', '00', '.123', .. + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else if(options.leadingZeros && leadingZeros===str) return 0; //00 + + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + return (numTrimmedByZeros === numStr) || (sign+numTrimmedByZeros === numStr) ? num : str + }else { + return (trimmedStr === numStr) || (trimmedStr === sign+numStr) ? num : str + } + } + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} + +function parse_int(numStr, base){ + //polyfill + if(parseInt) return parseInt(numStr, base); + else if(Number.parseInt) return Number.parseInt(numStr, base); + else if(window && window.parseInt) return window.parseInt(numStr, base); + else throw new Error("parseInt, Number.parseInt, window.parseInt are not supported") +} + +module.exports = toNumber; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/strnum/strnum.test.js b/amplify/functions/deleteDocument/node_modules/strnum/strnum.test.js new file mode 100644 index 0000000..c476614 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/strnum/strnum.test.js @@ -0,0 +1,165 @@ +const toNumber = require("./strnum.js"); + +describe("Should convert all the valid numeric strings to number", () => { + it("should return undefined, null, empty string, or non-numeric as it is", () => { + expect(toNumber(undefined)).not.toBeDefined(); + expect(toNumber(null)).toEqual(null); + expect(toNumber("")).toEqual(""); + expect(toNumber("string")).toEqual("string"); + expect(toNumber("e89794659669cb7bb967db73a7ea6889c3891727")).toEqual("e89794659669cb7bb967db73a7ea6889c3891727"); + + }); + it("should not parse number with spaces or comma", () => { + expect(toNumber("12,12")).toEqual("12,12"); + expect(toNumber("12 12")).toEqual("12 12"); + expect(toNumber("12-12")).toEqual("12-12"); + expect(toNumber("12.12.12")).toEqual("12.12.12"); + }) + it("should consider + sign", () => { + expect(toNumber("+12")).toEqual(12); + expect(toNumber("+ 12")).toEqual("+ 12"); + expect(toNumber("12+12")).toEqual("12+12"); + expect(toNumber("1212+")).toEqual("1212+"); + }) + it("should parse hexadecimal values", () => { + expect(toNumber("0x2f")).toEqual(47); + expect(toNumber("-0x2f")).toEqual(-47); + expect(toNumber("0x2f", { hex : true})).toEqual(47); + expect(toNumber("-0x2f", { hex : true})).toEqual(-47); + expect(toNumber("0x2f", { hex : false})).toEqual("0x2f"); + expect(toNumber("-0x2f", { hex : false})).toEqual("-0x2f"); + }) + it("should not parse strings with 0x embedded", () => { + expect(toNumber("0xzz")).toEqual("0xzz"); + expect(toNumber("iweraf0x123qwerqwer")).toEqual("iweraf0x123qwerqwer"); + expect(toNumber("1230x55")).toEqual("1230x55"); + expect(toNumber("JVBERi0xLjMNCiXi48")).toEqual("JVBERi0xLjMNCiXi48"); + }) + it("leading zeros", () => { + expect(toNumber("0")).toEqual(0); + expect(toNumber("00")).toEqual(0); + expect(toNumber("00.0")).toEqual(0); + + expect(toNumber("0",{ leadingZeros : false})).toEqual(0); + expect(toNumber("00",{ leadingZeros : false})).toEqual("00"); + expect(toNumber("00.0",{ leadingZeros : false})).toEqual("00.0"); + + expect(toNumber("06")).toEqual(6); + expect(toNumber("06", { leadingZeros : true})).toEqual(6); + expect(toNumber("06", { leadingZeros : false})).toEqual("06"); + + expect(toNumber("006")).toEqual(6); + expect(toNumber("006", { leadingZeros : true})).toEqual(6); + expect(toNumber("006", { leadingZeros : false})).toEqual("006"); + + expect(toNumber("000000000000000000000000017717" , { leadingZeros : false})).toEqual("000000000000000000000000017717"); + expect(toNumber("000000000000000000000000017717" , { leadingZeros : true})).toEqual(17717); + expect(toNumber("020211201030005811824") ).toEqual("020211201030005811824"); + expect(toNumber("0420926189200190257681175017717") ).toEqual(4.209261892001902e+29); + }) + it("invalid floating number", () => { + expect(toNumber("20.21.030") ).toEqual("20.21.030"); + expect(toNumber("0.21.030") ).toEqual("0.21.030"); + expect(toNumber("0.21.") ).toEqual("0.21."); + }); + it("floating point and leading zeros", () => { + expect(toNumber("0.")).toEqual(0); + expect(toNumber("+0.")).toEqual(0); + expect(toNumber("-0.")).toEqual(-0); + expect(toNumber("1.") ).toEqual(1); + expect(toNumber("00.00")).toEqual(0); + expect(toNumber("0.06")).toEqual(0.06); + expect(toNumber("00.6")).toEqual(0.6); + expect(toNumber(".006")).toEqual(0.006); + expect(toNumber("6.0")).toEqual(6); + expect(toNumber("06.0")).toEqual(6); + + expect(toNumber("0.0", { leadingZeros : false})).toEqual(0); + expect(toNumber("00.00", { leadingZeros : false})).toEqual("00.00"); + expect(toNumber("0.06", { leadingZeros : false})).toEqual(0.06); + expect(toNumber("00.6", { leadingZeros : false})).toEqual("00.6"); + expect(toNumber(".006", { leadingZeros : false})).toEqual(0.006); + expect(toNumber("6.0" , { leadingZeros : false})).toEqual(6); + expect(toNumber("06.0" , { leadingZeros : false})).toEqual("06.0"); + }) + it("negative number leading zeros", () => { + expect(toNumber("+06")).toEqual(6); + expect(toNumber("-06")).toEqual(-6); + expect(toNumber("-06", { leadingZeros : true})).toEqual(-6); + expect(toNumber("-06", { leadingZeros : false})).toEqual("-06"); + + expect(toNumber("-0.0")).toEqual(-0); + expect(toNumber("-00.00")).toEqual(-0); + expect(toNumber("-0.06")).toEqual(-0.06); + expect(toNumber("-00.6")).toEqual(-0.6); + expect(toNumber("-.006")).toEqual(-0.006); + expect(toNumber("-6.0")).toEqual(-6); + expect(toNumber("-06.0")).toEqual(-6); + + expect(toNumber("-0.0" , { leadingZeros : false})).toEqual(-0); + expect(toNumber("-00.00", { leadingZeros : false})).toEqual("-00.00"); + expect(toNumber("-0.06", { leadingZeros : false})).toEqual(-0.06); + expect(toNumber("-00.6", { leadingZeros : false})).toEqual("-00.6"); + expect(toNumber("-.006", {leadingZeros : false})).toEqual(-0.006); + expect(toNumber("-6.0" , { leadingZeros : false})).toEqual(-6); + expect(toNumber("-06.0" , { leadingZeros : false})).toEqual("-06.0"); + }) + it("long number", () => { + expect(toNumber("020211201030005811824") ).toEqual("020211201030005811824"); + expect(toNumber("20211201030005811824") ).toEqual("20211201030005811824"); + expect(toNumber("20.211201030005811824") ).toEqual("20.211201030005811824"); + expect(toNumber("0.211201030005811824") ).toEqual("0.211201030005811824"); + }); + it("scientific notation", () => { + expect(toNumber("01.0e2" , { leadingZeros : false})).toEqual("01.0e2"); + expect(toNumber("-01.0e2" , { leadingZeros : false})).toEqual("-01.0e2"); + expect(toNumber("01.0e2") ).toEqual(100); + expect(toNumber("-01.0e2") ).toEqual(-100); + expect(toNumber("1.0e2") ).toEqual(100); + + expect(toNumber("-1.0e2") ).toEqual(-100); + expect(toNumber("1.0e-2")).toEqual(0.01); + + expect(toNumber("420926189200190257681175017717") ).toEqual(4.209261892001902e+29); + expect(toNumber("420926189200190257681175017717" , { eNotation: false} )).toEqual("420926189200190257681175017717"); + + expect(toNumber("1e-2")).toEqual(0.01); + expect(toNumber("1e+2")).toEqual(100); + expect(toNumber("1.e+2")).toEqual(100); + }); + + it("scientific notation with upper E", () => { + expect(toNumber("01.0E2" , { leadingZeros : false})).toEqual("01.0E2"); + expect(toNumber("-01.0E2" , { leadingZeros : false})).toEqual("-01.0E2"); + expect(toNumber("01.0E2") ).toEqual(100); + expect(toNumber("-01.0E2") ).toEqual(-100); + expect(toNumber("1.0E2") ).toEqual(100); + + expect(toNumber("-1.0E2") ).toEqual(-100); + expect(toNumber("1.0E-2")).toEqual(0.01); + }); + + it("should skip matching pattern", () => { + expect(toNumber("0", { skipLike: /.*/ })).toEqual("0"); + expect(toNumber("+12", { skipLike: /\+[0-9]{10}/} )).toEqual(12); + expect(toNumber("12+12", { skipLike: /\+[0-9]{10}/} )).toEqual("12+12"); + expect(toNumber("12+1212121212", { skipLike: /\+[0-9]{10}/} )).toEqual("12+1212121212"); + expect(toNumber("+1212121212") ).toEqual(1212121212); + expect(toNumber("+1212121212", { skipLike: /\+[0-9]{10}/} )).toEqual("+1212121212"); + }) + it("should not change string if not number", () => { + expect(toNumber("+12 12")).toEqual("+12 12"); + expect(toNumber(" +12 12 ")).toEqual(" +12 12 "); + }) + it("should ignore sorrounded spaces ", () => { + expect(toNumber(" +1212 ")).toEqual(1212); + }) + + it("negative numbers", () => { + expect(toNumber("+1212")).toEqual(1212); + expect(toNumber("+12.12")).toEqual(12.12); + expect(toNumber("-12.12")).toEqual(-12.12); + expect(toNumber("-012.12")).toEqual(-12.12); + expect(toNumber("-012.12")).toEqual(-12.12); + }) +}); diff --git a/amplify/functions/deleteDocument/node_modules/tslib/CopyrightNotice.txt b/amplify/functions/deleteDocument/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000..0e42542 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/amplify/functions/deleteDocument/node_modules/tslib/LICENSE.txt b/amplify/functions/deleteDocument/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000..bfe6430 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/tslib/README.md b/amplify/functions/deleteDocument/node_modules/tslib/README.md new file mode 100644 index 0000000..290cc61 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](https://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/amplify/functions/deleteDocument/node_modules/tslib/SECURITY.md b/amplify/functions/deleteDocument/node_modules/tslib/SECURITY.md new file mode 100644 index 0000000..869fdfe --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/SECURITY.md @@ -0,0 +1,41 @@ + + +## Security + +Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). + +If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. + +## Reporting Security Issues + +**Please do not report security vulnerabilities through public GitHub issues.** + +Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). + +If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). + +You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). + +Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: + + * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) + * Full paths of source file(s) related to the manifestation of the issue + * The location of the affected source code (tag/branch/commit or direct URL) + * Any special configuration required to reproduce the issue + * Step-by-step instructions to reproduce the issue + * Proof-of-concept or exploit code (if possible) + * Impact of the issue, including how an attacker might exploit the issue + +This information will help us triage your report more quickly. + +If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. + +## Preferred Languages + +We prefer all communications to be in English. + +## Policy + +Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). + + diff --git a/amplify/functions/deleteDocument/node_modules/tslib/modules/index.d.ts b/amplify/functions/deleteDocument/node_modules/tslib/modules/index.d.ts new file mode 100644 index 0000000..3244fab --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/modules/index.d.ts @@ -0,0 +1,38 @@ +// Note: named reexports are used instead of `export *` because +// TypeScript itself doesn't resolve the `export *` when checking +// if a particular helper exists. +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __createBinding, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +} from '../tslib.js'; +export * as default from '../tslib.js'; diff --git a/amplify/functions/deleteDocument/node_modules/tslib/modules/index.js b/amplify/functions/deleteDocument/node_modules/tslib/modules/index.js new file mode 100644 index 0000000..c91f618 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/modules/index.js @@ -0,0 +1,70 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +}; +export default tslib; diff --git a/amplify/functions/deleteDocument/node_modules/tslib/modules/package.json b/amplify/functions/deleteDocument/node_modules/tslib/modules/package.json new file mode 100644 index 0000000..aafa0e4 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/tslib/package.json b/amplify/functions/deleteDocument/node_modules/tslib/package.json new file mode 100644 index 0000000..57d0578 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/package.json @@ -0,0 +1,47 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.8.1", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": { + "types": "./modules/index.d.ts", + "default": "./tslib.es6.mjs" + }, + "import": { + "node": "./modules/index.js", + "default": { + "types": "./modules/index.d.ts", + "default": "./tslib.es6.mjs" + } + }, + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/amplify/functions/deleteDocument/node_modules/tslib/tslib.d.ts b/amplify/functions/deleteDocument/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000..f23df55 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/tslib.d.ts @@ -0,0 +1,460 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Applies decorators to a class or class member, following the native ECMAScript decorator specification. + * @param ctor For non-field class members, the class constructor. Otherwise, `null`. + * @param descriptorIn The `PropertyDescriptor` to use when unable to look up the property from `ctor`. + * @param decorators The decorators to apply + * @param contextIn The `DecoratorContext` to clone for each decorator application. + * @param initializers An array of field initializer mutation functions into which new initializers are written. + * @param extraInitializers An array of extra initializer functions into which new initializers are written. + */ +export declare function __esDecorate(ctor: Function | null, descriptorIn: object | null, decorators: Function[], contextIn: object, initializers: Function[] | null, extraInitializers: Function[]): void; + +/** + * Runs field initializers or extra initializers generated by `__esDecorate`. + * @param thisArg The `this` argument to use. + * @param initializers The array of initializers to evaluate. + * @param value The initial value to pass to the initializers. + */ +export declare function __runInitializers(thisArg: unknown, initializers: Function[], value?: any): any; + +/** + * Converts a computed property name into a `string` or `symbol` value. + */ +export declare function __propKey(x: any): string | symbol; + +/** + * Assigns the name of a function derived from the left-hand side of an assignment. + * @param f The function to rename. + * @param name The new name for the function. + * @param prefix A prefix (such as `"get"` or `"set"`) to insert before the name. + */ +export declare function __setFunctionName(f: Function, name: string | symbol, prefix?: string): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param o The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; + +/** + * Adds a disposable resource to a resource-tracking environment object. + * @param env A resource-tracking environment object. + * @param value Either a Disposable or AsyncDisposable object, `null`, or `undefined`. + * @param async When `true`, `AsyncDisposable` resources can be added. When `false`, `AsyncDisposable` resources cannot be added. + * @returns The {@link value} argument. + * + * @throws {TypeError} If {@link value} is not an object, or if either `Symbol.dispose` or `Symbol.asyncDispose` are not + * defined, or if {@link value} does not have an appropriate `Symbol.dispose` or `Symbol.asyncDispose` method. + */ +export declare function __addDisposableResource(env: { stack: { value?: unknown, dispose?: Function, async: boolean }[]; error: unknown; hasError: boolean; }, value: T, async: boolean): T; + +/** + * Disposes all resources in a resource-tracking environment object. + * @param env A resource-tracking environment object. + * @returns A {@link Promise} if any resources in the environment were marked as `async` when added; otherwise, `void`. + * + * @throws {SuppressedError} if an error thrown during disposal would have suppressed a prior error from disposal or the + * error recorded in the resource-tracking environment object. + * @seealso {@link __addDisposableResource} + */ +export declare function __disposeResources(env: { stack: { value?: unknown, dispose?: Function, async: boolean }[]; error: unknown; hasError: boolean; }): any; + +/** + * Transforms a relative import specifier ending in a non-declaration TypeScript file extension to its JavaScript file extension counterpart. + * @param path The import specifier. + * @param preserveJsx Causes '*.tsx' to transform to '*.jsx' instead of '*.js'. Should be true when `--jsx` is set to `preserve`. + */ +export declare function __rewriteRelativeImportExtension(path: string, preserveJsx?: boolean): string; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.html b/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000..b122e41 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.js b/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000..6c1739b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.js @@ -0,0 +1,402 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise, SuppressedError, Symbol, Iterator */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; + +export function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; + +export function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +}; + +export function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +}; + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} + +export function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + +} + +var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; +}; + +export function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); +} + +export function __rewriteRelativeImportExtension(path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; +} + +export default { + __extends: __extends, + __assign: __assign, + __rest: __rest, + __decorate: __decorate, + __param: __param, + __esDecorate: __esDecorate, + __runInitializers: __runInitializers, + __propKey: __propKey, + __setFunctionName: __setFunctionName, + __metadata: __metadata, + __awaiter: __awaiter, + __generator: __generator, + __createBinding: __createBinding, + __exportStar: __exportStar, + __values: __values, + __read: __read, + __spread: __spread, + __spreadArrays: __spreadArrays, + __spreadArray: __spreadArray, + __await: __await, + __asyncGenerator: __asyncGenerator, + __asyncDelegator: __asyncDelegator, + __asyncValues: __asyncValues, + __makeTemplateObject: __makeTemplateObject, + __importStar: __importStar, + __importDefault: __importDefault, + __classPrivateFieldGet: __classPrivateFieldGet, + __classPrivateFieldSet: __classPrivateFieldSet, + __classPrivateFieldIn: __classPrivateFieldIn, + __addDisposableResource: __addDisposableResource, + __disposeResources: __disposeResources, + __rewriteRelativeImportExtension: __rewriteRelativeImportExtension, +}; diff --git a/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.mjs b/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.mjs new file mode 100644 index 0000000..c17990a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/tslib.es6.mjs @@ -0,0 +1,401 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise, SuppressedError, Symbol, Iterator */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; + +export function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; + +export function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +}; + +export function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +}; + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} + +export function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; +} + +var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; +}; + +export function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); +} + +export function __rewriteRelativeImportExtension(path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; +} + +export default { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __createBinding, + __exportStar, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +}; diff --git a/amplify/functions/deleteDocument/node_modules/tslib/tslib.html b/amplify/functions/deleteDocument/node_modules/tslib/tslib.html new file mode 100644 index 0000000..44c9ba5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/tslib/tslib.js b/amplify/functions/deleteDocument/node_modules/tslib/tslib.js new file mode 100644 index 0000000..5e12ace --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/tslib/tslib.js @@ -0,0 +1,484 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError, Iterator */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +var __rewriteRelativeImportExtension; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); + }; + + __rewriteRelativeImportExtension = function (path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); + exporter("__rewriteRelativeImportExtension", __rewriteRelativeImportExtension); +}); + +0 && (module.exports = { + __extends: __extends, + __assign: __assign, + __rest: __rest, + __decorate: __decorate, + __param: __param, + __esDecorate: __esDecorate, + __runInitializers: __runInitializers, + __propKey: __propKey, + __setFunctionName: __setFunctionName, + __metadata: __metadata, + __awaiter: __awaiter, + __generator: __generator, + __exportStar: __exportStar, + __createBinding: __createBinding, + __values: __values, + __read: __read, + __spread: __spread, + __spreadArrays: __spreadArrays, + __spreadArray: __spreadArray, + __await: __await, + __asyncGenerator: __asyncGenerator, + __asyncDelegator: __asyncDelegator, + __asyncValues: __asyncValues, + __makeTemplateObject: __makeTemplateObject, + __importStar: __importStar, + __importDefault: __importDefault, + __classPrivateFieldGet: __classPrivateFieldGet, + __classPrivateFieldSet: __classPrivateFieldSet, + __classPrivateFieldIn: __classPrivateFieldIn, + __addDisposableResource: __addDisposableResource, + __disposeResources: __disposeResources, + __rewriteRelativeImportExtension: __rewriteRelativeImportExtension, +}); diff --git a/amplify/functions/deleteDocument/node_modules/uuid/CHANGELOG.md b/amplify/functions/deleteDocument/node_modules/uuid/CHANGELOG.md new file mode 100644 index 0000000..0412ad8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,274 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [9.0.1](https://github.com/uuidjs/uuid/compare/v9.0.0...v9.0.1) (2023-09-12) + +### build + +- Fix CI to work with Node.js 20.x + +## [9.0.0](https://github.com/uuidjs/uuid/compare/v8.3.2...v9.0.0) (2022-09-05) + +### ⚠ BREAKING CHANGES + +- Drop Node.js 10.x support. This library always aims at supporting one EOLed LTS release which by this time now is 12.x which has reached EOL 30 Apr 2022. + +- Remove the minified UMD build from the package. + + Minified code is hard to audit and since this is a widely used library it seems more appropriate nowadays to optimize for auditability than to ship a legacy module format that, at best, serves educational purposes nowadays. + + For production browser use cases, users should be using a bundler. For educational purposes, today's online sandboxes like replit.com offer convenient ways to load npm modules, so the use case for UMD through repos like UNPKG or jsDelivr has largely vanished. + +- Drop IE 11 and Safari 10 support. Drop support for browsers that don't correctly implement const/let and default arguments, and no longer transpile the browser build to ES2015. + + This also removes the fallback on msCrypto instead of the crypto API. + + Browser tests are run in the first supported version of each supported browser and in the latest (as of this commit) version available on Browserstack. + +### Features + +- optimize uuid.v1 by 1.3x uuid.v4 by 4.3x (430%) ([#597](https://github.com/uuidjs/uuid/issues/597)) ([3a033f6](https://github.com/uuidjs/uuid/commit/3a033f6bab6bb3780ece6d645b902548043280bc)) +- remove UMD build ([#645](https://github.com/uuidjs/uuid/issues/645)) ([e948a0f](https://github.com/uuidjs/uuid/commit/e948a0f22bf22f4619b27bd913885e478e20fe6f)), closes [#620](https://github.com/uuidjs/uuid/issues/620) +- use native crypto.randomUUID when available ([#600](https://github.com/uuidjs/uuid/issues/600)) ([c9e076c](https://github.com/uuidjs/uuid/commit/c9e076c852edad7e9a06baaa1d148cf4eda6c6c4)) + +### Bug Fixes + +- add Jest/jsdom compatibility ([#642](https://github.com/uuidjs/uuid/issues/642)) ([16f9c46](https://github.com/uuidjs/uuid/commit/16f9c469edf46f0786164cdf4dc980743984a6fd)) +- change default export to named function ([#545](https://github.com/uuidjs/uuid/issues/545)) ([c57bc5a](https://github.com/uuidjs/uuid/commit/c57bc5a9a0653273aa639cda9177ce52efabe42a)) +- handle error when parameter is not set in v3 and v5 ([#622](https://github.com/uuidjs/uuid/issues/622)) ([fcd7388](https://github.com/uuidjs/uuid/commit/fcd73881692d9fabb63872576ba28e30ff852091)) +- run npm audit fix ([#644](https://github.com/uuidjs/uuid/issues/644)) ([04686f5](https://github.com/uuidjs/uuid/commit/04686f54c5fed2cfffc1b619f4970c4bb8532353)) +- upgrading from uuid3 broken link ([#568](https://github.com/uuidjs/uuid/issues/568)) ([1c849da](https://github.com/uuidjs/uuid/commit/1c849da6e164259e72e18636726345b13a7eddd6)) + +### build + +- drop Node.js 8.x from babel transpile target ([#603](https://github.com/uuidjs/uuid/issues/603)) ([aa11485](https://github.com/uuidjs/uuid/commit/aa114858260402107ec8a1e1a825dea0a259bcb5)) +- drop support for legacy browsers (IE11, Safari 10) ([#604](https://github.com/uuidjs/uuid/issues/604)) ([0f433e5](https://github.com/uuidjs/uuid/commit/0f433e5ec444edacd53016de67db021102f36148)) + +- drop node 10.x to upgrade dev dependencies ([#653](https://github.com/uuidjs/uuid/issues/653)) ([28a5712](https://github.com/uuidjs/uuid/commit/28a571283f8abda6b9d85e689f95b7d3ee9e282e)), closes [#643](https://github.com/uuidjs/uuid/issues/643) + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/amplify/functions/deleteDocument/node_modules/uuid/CONTRIBUTING.md b/amplify/functions/deleteDocument/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 0000000..4a4503d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/amplify/functions/deleteDocument/node_modules/uuid/LICENSE.md b/amplify/functions/deleteDocument/node_modules/uuid/LICENSE.md new file mode 100644 index 0000000..3934168 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/amplify/functions/deleteDocument/node_modules/uuid/README.md b/amplify/functions/deleteDocument/node_modules/uuid/README.md new file mode 100644 index 0000000..4f51e09 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/README.md @@ -0,0 +1,466 @@ + + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](https://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - NodeJS 12+ ([LTS releases](https://github.com/nodejs/Release)) + - Chrome, Safari, Firefox, Edge browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +> **Note** Upgrading from `uuid@3`? Your code is probably okay, but check out [Upgrading From `uuid@3`](#upgrading-from-uuid3) for details. + +> **Note** Only interested in creating a version 4 UUID? You might be able to use [`crypto.randomUUID()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/randomUUID), eliminating the need to install this library. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, 0xc0, 0xbd, 0x7f, 0x11, 0xc0, 0x43, 0xda, 0x97, 0x5e, 0x2a, 0x8a, 0xd9, 0xeb, 0xae, 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanoseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ npx uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ npx uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +As of `uuid@9` [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds are no longer shipped with this library. + +If you need a UMD build of this library, use a bundler like Webpack or Rollup. Alternatively, refer to the documentation of [`uuid@8.3.2`](https://github.com/uuidjs/uuid/blob/v8.3.2/README.md#umd) which was the last version that shipped UMD builds. + +## Known issues + +### Duplicate UUIDs (Googlebot) + +This module may generate duplicate UUIDs when run in clients with _deterministic_ random number generators, such as [Googlebot crawlers](https://developers.google.com/search/docs/advanced/crawling/overview-google-crawlers). This can cause problems for apps that expect client-generated UUIDs to always be unique. Developers should be prepared for this and have a strategy for dealing with possible collisions, such as: + +- Check for duplicate UUIDs, fail gracefully +- Disable write operations for Googlebot clients + +### "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +### IE 11 (Internet Explorer) + +Support for IE11 and other legacy browsers has been dropped as of `uuid@9`. If you need to support legacy browsers, you can always transpile the uuid module source yourself (e.g. using [Babel](https://babeljs.io/)). + +## Upgrading From `uuid@7` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3` + +"_Wait... what happened to `uuid@4` thru `uuid@6`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3` and has been removed in `uuid@7`. + +--- + +Markdown generated from [README_js.md](README_js.md) by diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/bin/uuid b/amplify/functions/deleteDocument/node_modules/uuid/dist/bin/uuid new file mode 100755 index 0000000..f38d2ee --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/index.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/index.js new file mode 100644 index 0000000..5586dd3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function get() { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function get() { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function get() { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function get() { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function get() { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function get() { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function get() { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function get() { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function get() { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/md5.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/md5.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/md5.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/native.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/native.js new file mode 100644 index 0000000..c2eea59 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/native.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/nil.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/parse.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/regex.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/rng.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/rng.js new file mode 100644 index 0000000..d067cdb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/rng.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/sha1.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/sha1.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/sha1.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/stringify.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/stringify.js new file mode 100644 index 0000000..390bf89 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v1.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v1.js new file mode 100644 index 0000000..125bc58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v3.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v35.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v35.js new file mode 100644 index 0000000..7c522d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v4.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v4.js new file mode 100644 index 0000000..959d698 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v5.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/validate.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/version.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/version.js new file mode 100644 index 0000000..f63af01 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/commonjs-browser/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/index.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/md5.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 0000000..f12212e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/native.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/native.js new file mode 100644 index 0000000..b22292c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/native.js @@ -0,0 +1,4 @@ +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +export default { + randomUUID +}; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/nil.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/parse.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/regex.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/rng.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 0000000..6e65234 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,18 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/sha1.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 0000000..d3c2565 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/stringify.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 0000000..a6e4c88 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v1.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 0000000..382e5d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v3.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v35.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 0000000..3355e1f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v4.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 0000000..95ea879 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v5.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/validate.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/version.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 0000000..9363076 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/index.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/md5.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 0000000..4d68b04 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/native.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/native.js new file mode 100644 index 0000000..f0d1992 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/native.js @@ -0,0 +1,4 @@ +import crypto from 'crypto'; +export default { + randomUUID: crypto.randomUUID +}; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/nil.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/parse.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/regex.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/rng.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 0000000..8006244 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/sha1.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 0000000..e23850b --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/stringify.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 0000000..a6e4c88 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v1.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 0000000..382e5d7 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v3.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v35.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 0000000..3355e1f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v4.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 0000000..95ea879 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v5.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/validate.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/version.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 0000000..9363076 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/index.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/index.js new file mode 100644 index 0000000..88d676a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/md5-browser.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/md5.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/md5.js new file mode 100644 index 0000000..824d481 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/native-browser.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/native-browser.js new file mode 100644 index 0000000..c2eea59 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/native-browser.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/native.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/native.js new file mode 100644 index 0000000..de80469 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/native.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/nil.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/parse.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/regex.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/rng-browser.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 0000000..d067cdb --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/rng.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/rng.js new file mode 100644 index 0000000..3507f93 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/sha1-browser.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/sha1.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/sha1.js new file mode 100644 index 0000000..03bdd63 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/stringify.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/stringify.js new file mode 100644 index 0000000..390bf89 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/uuid-bin.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 0000000..50a7a9f --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/v1.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/v1.js new file mode 100644 index 0000000..125bc58 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/v3.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/v35.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/v35.js new file mode 100644 index 0000000..7c522d9 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/v4.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/v4.js new file mode 100644 index 0000000..959d698 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/v5.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/validate.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/dist/version.js b/amplify/functions/deleteDocument/node_modules/uuid/dist/version.js new file mode 100644 index 0000000..f63af01 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/deleteDocument/node_modules/uuid/package.json b/amplify/functions/deleteDocument/node_modules/uuid/package.json new file mode 100644 index 0000000..6cc3361 --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/package.json @@ -0,0 +1,135 @@ +{ + "name": "uuid", + "version": "9.0.1", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "browser": { + "import": "./dist/esm-browser/index.js", + "require": "./dist/commonjs-browser/index.js" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/native.js": "./dist/native-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.18.10", + "@babel/core": "7.18.10", + "@babel/eslint-parser": "7.18.9", + "@babel/preset-env": "7.18.10", + "@commitlint/cli": "17.0.3", + "@commitlint/config-conventional": "17.0.3", + "bundlewatch": "0.3.3", + "eslint": "8.21.0", + "eslint-config-prettier": "8.5.0", + "eslint-config-standard": "17.0.0", + "eslint-plugin-import": "2.26.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-promise": "6.0.0", + "husky": "8.0.1", + "jest": "28.1.3", + "lint-staged": "13.0.3", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.7.1", + "random-seed": "0.3.0", + "runmd": "1.3.9", + "standard-version": "9.5.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "7.16.10", + "@wdio/cli": "7.16.10", + "@wdio/jasmine-framework": "7.16.6", + "@wdio/local-runner": "7.16.10", + "@wdio/spec-reporter": "7.16.9", + "@wdio/static-server-service": "7.16.6" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "examples:node:jest:test": "cd examples/node-jest && npm install && npm test", + "prepare": "cd $( git rev-parse --show-toplevel ) && husky install", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjsNode node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v18' ) && ( npm run build && npx runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/amplify/functions/deleteDocument/node_modules/uuid/wrapper.mjs b/amplify/functions/deleteDocument/node_modules/uuid/wrapper.mjs new file mode 100644 index 0000000..c31e9ce --- /dev/null +++ b/amplify/functions/deleteDocument/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/amplify/functions/deleteDocument/package-lock.json b/amplify/functions/deleteDocument/package-lock.json new file mode 100644 index 0000000..1f31bee --- /dev/null +++ b/amplify/functions/deleteDocument/package-lock.json @@ -0,0 +1,1349 @@ +{ + "name": "deletedocument", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "deletedocument", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0", + "@aws-sdk/lib-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.803.0.tgz", + "integrity": "sha512-rJPidxfyTQHz/1Naq3FukSoIt40GwXfv3npVR15bCBFpqx9TXEt7GoIUbiqm+Ftx8sx9hqJ6XNhf80FIa243gw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.803.0.tgz", + "integrity": "sha512-TT3BRD1yiL3IGXBKfq560vvEdyOJtJr8bp+R82dD6P0IoS8aFcNtF822BOJy7CqvxksOc3hQKLaPVzE82gE8Ow==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.803.0.tgz", + "integrity": "sha512-XtbFftJex18GobpRWJxg5V7stVwvmV2gdBYW+zRM0YW6NZAR4NP/4vcc9ktM3++BWW5OF4Kvl7Nu7N4mAzRHmw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.803.0.tgz", + "integrity": "sha512-lPdRYbjxwmv7gRqbaEe1Y1Yl5fD4c43AuK3P31eKjf1j41hZEQ0dg9a9KLk7i6ehEoVsxewnJrvbC2pVoYrCmQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.803.0.tgz", + "integrity": "sha512-HEAcxSHrHxVekGnZqjFrkqdYAf4jFiZIMhuh0jqiqY6A4udEyXy1V623HVcTz/XXj6UBRnyD+zmOmlbzBvkfQg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.803.0.tgz", + "integrity": "sha512-oChnEpwI25OW4GPvhI1VnXM3IQEkDhESGFZd5JHzJDHyvSF2NU58V86jkJyaa4H4X25IbGaThuulNI5xCOngjw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/endpoint-cache": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.723.0.tgz", + "integrity": "sha512-2+a4WXRc+07uiPR+zJiPGKSOWaNJQNqitkks+6Hhm/haTLJqNVTgY2OWDh2PXvwMNpKB+AlGdhE65Oy6NzUgXg==", + "license": "Apache-2.0", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.803.0.tgz", + "integrity": "sha512-J9oeaKnF0vfw1ixUc0Bu1GTcYwp/riiGCst/MSLPHeGqoFiYzyox/im1Pbuv2Ipx7/0QI7w5PxYdxOpwvqMCFg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/middleware-endpoint-discovery": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.775.0.tgz", + "integrity": "sha512-L0PmjSg7t+wovRo/Lin1kpei3e7wBhrENWb1Bbccu3PWUIfxolGeWplOmNhSlXjuQe9GXjf3z8kJRYOGBMFOvw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.803.0.tgz", + "integrity": "sha512-wiWiYaFQxK2u37G9IOXuWkHelEbU8ulLxdHpoPf0TSu/1boqLW7fcofuZATAvFcvigQx3oebwO8G4c/mmixTTw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.803.0.tgz", + "integrity": "sha512-lDbMgVjWWEPT7a6lLaAEPPljwOeLTjPX2sJ7MoDICpQotg4Yd8cQfX3nqScSyLAGSc7Rq/21UPnPoij/E0K3lg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.803.0.tgz", + "integrity": "sha512-QiXvurnve8xIm41Zf/jNXwcYotDX3KZbHcsTaJ7ILhyFomqCjJ6bjLcCRdfndG600N5ua6AEK2XGw1luyBQxig==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz", + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.1.tgz", + "integrity": "sha512-W7AppgQD3fP1aBmo8wWo0id5zeR2/aYRy067vZsDVaa6v/mdhkg6DxXwEVuSPjZl+ZnvWAQbUMCd5ckw38+tHQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.2.tgz", + "integrity": "sha512-EqOy3xaEGQpsKxLlzYstDRJ8eY90CbyBP4cl+w7r45mE60S8YliyL9AgWsdWcyNiB95E2PMqHBEv67nNl1zLfg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.3.tgz", + "integrity": "sha512-AsJtI9KiFoEGAhcEKZyzzPfrszAQGcf4HSYKmenz0WGx/6YNvoPPv4OSGfZTCsDmgPHv4pXzxE+7QV7jcGWNKw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz", + "integrity": "sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.2.tgz", + "integrity": "sha512-3AnHfsMdq9Wg7+3BeR1HuLWI9+DMA/SoHVpCWq6xSsa52ikNd6nlF/wFzdpHyGtVa+Aji6lMgvwOF4sGcVA7SA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.10.tgz", + "integrity": "sha512-2k6fgUNOZ1Rn0gEjvGPGrDEINLG8qSBHsN7xlkkbO+fnHJ36BQPDzhFfMmYSDS8AgzoygqQiDOQ+6Hp2vBTUdA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.10.tgz", + "integrity": "sha512-2XR1WRglLVmoIFts7bODUTgBdVyvkfKNkydHrlsI5VxW9q3s1hnJCuY+f1OHzvj5ue23q4vydM2fjrMjf2HSdQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz", + "integrity": "sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "license": "MIT" + }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==", + "license": "MIT" + }, + "node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/mnemonist": { + "version": "0.38.3", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz", + "integrity": "sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw==", + "license": "MIT", + "dependencies": { + "obliterator": "^1.6.1" + } + }, + "node_modules/obliterator": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz", + "integrity": "sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig==", + "license": "MIT" + }, + "node_modules/strnum": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", + "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + } + } +} diff --git a/amplify/functions/deleteDocument/package.json b/amplify/functions/deleteDocument/package.json index e69de29..2ee9c29 100644 --- a/amplify/functions/deleteDocument/package.json +++ b/amplify/functions/deleteDocument/package.json @@ -0,0 +1,16 @@ +{ + "name": "deletedocument", + "version": "1.0.0", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0", + "@aws-sdk/lib-dynamodb": "^3.803.0" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/.bin/fxparser b/amplify/functions/downloadDocument/node_modules/.bin/fxparser new file mode 120000 index 0000000..75327ed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/.bin/fxparser @@ -0,0 +1 @@ +../fast-xml-parser/src/cli/cli.js \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/.bin/uuid b/amplify/functions/downloadDocument/node_modules/.bin/uuid new file mode 120000 index 0000000..588f70e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/dist/bin/uuid \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/.package-lock.json b/amplify/functions/downloadDocument/node_modules/.package-lock.json new file mode 100644 index 0000000..f8c4591 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/.package-lock.json @@ -0,0 +1,1340 @@ +{ + "name": "downloaddocument", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.803.0.tgz", + "integrity": "sha512-rJPidxfyTQHz/1Naq3FukSoIt40GwXfv3npVR15bCBFpqx9TXEt7GoIUbiqm+Ftx8sx9hqJ6XNhf80FIa243gw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.803.0.tgz", + "integrity": "sha512-TT3BRD1yiL3IGXBKfq560vvEdyOJtJr8bp+R82dD6P0IoS8aFcNtF822BOJy7CqvxksOc3hQKLaPVzE82gE8Ow==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.803.0.tgz", + "integrity": "sha512-XtbFftJex18GobpRWJxg5V7stVwvmV2gdBYW+zRM0YW6NZAR4NP/4vcc9ktM3++BWW5OF4Kvl7Nu7N4mAzRHmw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.803.0.tgz", + "integrity": "sha512-lPdRYbjxwmv7gRqbaEe1Y1Yl5fD4c43AuK3P31eKjf1j41hZEQ0dg9a9KLk7i6ehEoVsxewnJrvbC2pVoYrCmQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.803.0.tgz", + "integrity": "sha512-HEAcxSHrHxVekGnZqjFrkqdYAf4jFiZIMhuh0jqiqY6A4udEyXy1V623HVcTz/XXj6UBRnyD+zmOmlbzBvkfQg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.803.0.tgz", + "integrity": "sha512-oChnEpwI25OW4GPvhI1VnXM3IQEkDhESGFZd5JHzJDHyvSF2NU58V86jkJyaa4H4X25IbGaThuulNI5xCOngjw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/endpoint-cache": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.723.0.tgz", + "integrity": "sha512-2+a4WXRc+07uiPR+zJiPGKSOWaNJQNqitkks+6Hhm/haTLJqNVTgY2OWDh2PXvwMNpKB+AlGdhE65Oy6NzUgXg==", + "license": "Apache-2.0", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.803.0.tgz", + "integrity": "sha512-J9oeaKnF0vfw1ixUc0Bu1GTcYwp/riiGCst/MSLPHeGqoFiYzyox/im1Pbuv2Ipx7/0QI7w5PxYdxOpwvqMCFg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/middleware-endpoint-discovery": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.775.0.tgz", + "integrity": "sha512-L0PmjSg7t+wovRo/Lin1kpei3e7wBhrENWb1Bbccu3PWUIfxolGeWplOmNhSlXjuQe9GXjf3z8kJRYOGBMFOvw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.803.0.tgz", + "integrity": "sha512-wiWiYaFQxK2u37G9IOXuWkHelEbU8ulLxdHpoPf0TSu/1boqLW7fcofuZATAvFcvigQx3oebwO8G4c/mmixTTw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.803.0.tgz", + "integrity": "sha512-lDbMgVjWWEPT7a6lLaAEPPljwOeLTjPX2sJ7MoDICpQotg4Yd8cQfX3nqScSyLAGSc7Rq/21UPnPoij/E0K3lg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.803.0.tgz", + "integrity": "sha512-QiXvurnve8xIm41Zf/jNXwcYotDX3KZbHcsTaJ7ILhyFomqCjJ6bjLcCRdfndG600N5ua6AEK2XGw1luyBQxig==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz", + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.1.tgz", + "integrity": "sha512-W7AppgQD3fP1aBmo8wWo0id5zeR2/aYRy067vZsDVaa6v/mdhkg6DxXwEVuSPjZl+ZnvWAQbUMCd5ckw38+tHQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.2.tgz", + "integrity": "sha512-EqOy3xaEGQpsKxLlzYstDRJ8eY90CbyBP4cl+w7r45mE60S8YliyL9AgWsdWcyNiB95E2PMqHBEv67nNl1zLfg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.3.tgz", + "integrity": "sha512-AsJtI9KiFoEGAhcEKZyzzPfrszAQGcf4HSYKmenz0WGx/6YNvoPPv4OSGfZTCsDmgPHv4pXzxE+7QV7jcGWNKw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz", + "integrity": "sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.2.tgz", + "integrity": "sha512-3AnHfsMdq9Wg7+3BeR1HuLWI9+DMA/SoHVpCWq6xSsa52ikNd6nlF/wFzdpHyGtVa+Aji6lMgvwOF4sGcVA7SA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.10.tgz", + "integrity": "sha512-2k6fgUNOZ1Rn0gEjvGPGrDEINLG8qSBHsN7xlkkbO+fnHJ36BQPDzhFfMmYSDS8AgzoygqQiDOQ+6Hp2vBTUdA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.10.tgz", + "integrity": "sha512-2XR1WRglLVmoIFts7bODUTgBdVyvkfKNkydHrlsI5VxW9q3s1hnJCuY+f1OHzvj5ue23q4vydM2fjrMjf2HSdQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz", + "integrity": "sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "license": "MIT" + }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==", + "license": "MIT" + }, + "node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/mnemonist": { + "version": "0.38.3", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz", + "integrity": "sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw==", + "license": "MIT", + "dependencies": { + "obliterator": "^1.6.1" + } + }, + "node_modules/obliterator": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz", + "integrity": "sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig==", + "license": "MIT" + }, + "node_modules/strnum": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", + "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md new file mode 100644 index 0000000..e6036f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md @@ -0,0 +1,118 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +- feat!: drop support for IE 11 (#629) ([6c49fb6](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6c49fb6c1b1f18bbff02dbd77a37a21bdb40c959)), closes [#629](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/629) + +### BREAKING CHANGES + +- Remove support for IE11 + +Co-authored-by: texastony <5892063+texastony@users.noreply.github.com> + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +### Bug Fixes + +- **docs:** sha256 packages, clarify hmac support ([#455](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/455)) ([1be5043](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/1be5043325991f3f5ccb52a8dd928f004b4d442e)) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) + +## [1.1.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.1.0...@aws-crypto/sha256-browser@1.1.1) (2021-07-13) + +### Bug Fixes + +- **sha256-browser:** throw errors not string ([#194](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/194)) ([7fa7ac4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/7fa7ac445ef7a04dfb1ff479e7114aba045b2b2c)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.0.0...@aws-crypto/sha256-browser@1.1.0) (2021-01-13) + +### Bug Fixes + +- remove package lock ([6002a5a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6002a5ab9218dc8798c19dc205d3eebd3bec5b43)) +- **aws-crypto:** export explicit dependencies on [@aws-types](https://github.com/aws-types) ([6a1873a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6a1873a4dcc2aaa4a1338595703cfa7099f17b8c)) +- **deps-dev:** move @aws-sdk/types to devDependencies ([#188](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/188)) ([08efdf4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/08efdf46dcc612d88c441e29945d787f253ee77d)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.0.0-alpha.0...@aws-crypto/sha256-browser@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.4...@aws-crypto/sha256-browser@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.2...@aws-crypto/sha256-browser@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.2...@aws-crypto/sha256-browser@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.1...@aws-crypto/sha256-browser@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/README.md new file mode 100644 index 0000000..75bf105 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/README.md @@ -0,0 +1,31 @@ +# @aws-crypto/sha256-browser + +SHA256 wrapper for browsers that prefers `window.crypto.subtle` but will +fall back to a pure JS implementation in @aws-crypto/sha256-js +to provide a consistent interface for SHA256. + +## Usage + +- To hash "some data" +``` +import {Sha256} from '@aws-crypto/sha256-browser' + +const hash = new Sha256(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +- To hmac "some data" with "a key" +``` +import {Sha256} from '@aws-crypto/sha256-browser' + +const hash = new Sha256('a key'); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts new file mode 100644 index 0000000..fe8def7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_256_HASH: { + name: "SHA-256"; +}; +export declare const SHA_256_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-256"; + }; +}; +export declare const EMPTY_DATA_SHA_256: Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js new file mode 100644 index 0000000..acb5c55 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EMPTY_DATA_SHA_256 = exports.SHA_256_HMAC_ALGO = exports.SHA_256_HASH = void 0; +exports.SHA_256_HASH = { name: "SHA-256" }; +exports.SHA_256_HMAC_ALGO = { + name: "HMAC", + hash: exports.SHA_256_HASH +}; +exports.EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map new file mode 100644 index 0000000..217561a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAa,QAAA,YAAY,GAAwB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AAExD,QAAA,iBAAiB,GAAgD;IAC5E,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,oBAAY;CACnB,CAAC;AAEW,QAAA,kBAAkB,GAAG,IAAI,UAAU,CAAC;IAC/C,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;CACH,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts new file mode 100644 index 0000000..055d3ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js new file mode 100644 index 0000000..cde2a42 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var webCryptoSha256_1 = require("./webCryptoSha256"); +var sha256_js_1 = require("@aws-crypto/sha256-js"); +var supports_web_crypto_1 = require("@aws-crypto/supports-web-crypto"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var util_1 = require("@aws-crypto/util"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + if ((0, supports_web_crypto_1.supportsWebCrypto)((0, util_locate_window_1.locateWindow)())) { + this.hash = new webCryptoSha256_1.Sha256(secret); + } + else { + this.hash = new sha256_js_1.Sha256(secret); + } + } + Sha256.prototype.update = function (data, encoding) { + this.hash.update((0, util_1.convertToBuffer)(data)); + }; + Sha256.prototype.digest = function () { + return this.hash.digest(); + }; + Sha256.prototype.reset = function () { + this.hash.reset(); + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +//# sourceMappingURL=crossPlatformSha256.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map new file mode 100644 index 0000000..9a177dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha256.js","sourceRoot":"","sources":["../../src/crossPlatformSha256.ts"],"names":[],"mappings":";;;AAAA,qDAA8D;AAC9D,mDAA2D;AAE3D,uEAAoE;AACpE,kEAA2D;AAC3D,yCAAmD;AAEnD;IAGE,gBAAY,MAAmB;QAC7B,IAAI,IAAA,uCAAiB,EAAC,IAAA,iCAAY,GAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,wBAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,IAAI,CAAC,IAAI,GAAG,IAAI,kBAAQ,CAAC,MAAM,CAAC,CAAC;SAClC;IACH,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,aAAC;AAAD,CAAC,AAtBD,IAsBC;AAtBY,wBAAM"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js new file mode 100644 index 0000000..a270349 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WebCryptoSha256 = void 0; +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./crossPlatformSha256"), exports); +var webCryptoSha256_1 = require("./webCryptoSha256"); +Object.defineProperty(exports, "WebCryptoSha256", { enumerable: true, get: function () { return webCryptoSha256_1.Sha256; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map new file mode 100644 index 0000000..64b19eb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAAA,gEAAsC;AACtC,qDAA8D;AAArD,kHAAA,MAAM,OAAmB"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js new file mode 100644 index 0000000..fe91548 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map new file mode 100644 index 0000000..20ccfd6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";;;AAEA,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts new file mode 100644 index 0000000..ec0e214 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts @@ -0,0 +1,10 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js new file mode 100644 index 0000000..778fdd9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var util_1 = require("@aws-crypto/util"); +var constants_1 = require("./constants"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.toHash = new Uint8Array(0); + this.secret = secret; + this.reset(); + } + Sha256.prototype.update = function (data) { + if ((0, util_1.isEmptyData)(data)) { + return; + } + var update = (0, util_1.convertToBuffer)(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha256.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return (0, util_locate_window_1.locateWindow)() + .crypto.subtle.sign(constants_1.SHA_256_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if ((0, util_1.isEmptyData)(this.toHash)) { + return Promise.resolve(constants_1.EMPTY_DATA_SHA_256); + } + return Promise.resolve() + .then(function () { + return (0, util_locate_window_1.locateWindow)().crypto.subtle.digest(constants_1.SHA_256_HASH, _this.toHash); + }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha256.prototype.reset = function () { + var _this = this; + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + (0, util_locate_window_1.locateWindow)() + .crypto.subtle.importKey("raw", (0, util_1.convertToBuffer)(_this.secret), constants_1.SHA_256_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +//# sourceMappingURL=webCryptoSha256.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map new file mode 100644 index 0000000..7b55a07 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha256.js","sourceRoot":"","sources":["../../src/webCryptoSha256.ts"],"names":[],"mappings":";;;AACA,yCAAgE;AAChE,yCAIqB;AACrB,kEAA2D;AAE3D;IAKE,gBAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAA,kBAAW,EAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,uBAAM,GAAN;QAAA,iBAkBC;QAjBC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,IAAA,iCAAY,GAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,6BAAiB,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACvD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,IAAA,kBAAW,EAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,8BAAkB,CAAC,CAAC;SAC5C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC;YACJ,OAAA,IAAA,iCAAY,GAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,wBAAY,EAAE,KAAI,CAAC,MAAM,CAAC;QAA9D,CAA8D,CAC/D;aACA,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,sBAAK,GAAL;QAAA,iBAgBC;QAfC,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,IAAA,iCAAY,GAAE;qBACT,MAAM,CAAC,MAAM,CAAC,SAAS,CACxB,KAAK,EACL,IAAA,sBAAe,EAAC,KAAI,CAAC,MAAoB,CAAC,EAC1C,6BAAiB,EACjB,KAAK,EACL,CAAC,MAAM,CAAC,CACX;qBACI,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA7DD,IA6DC;AA7DY,wBAAM"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts new file mode 100644 index 0000000..fe8def7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_256_HASH: { + name: "SHA-256"; +}; +export declare const SHA_256_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-256"; + }; +}; +export declare const EMPTY_DATA_SHA_256: Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js new file mode 100644 index 0000000..7fb1613 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js @@ -0,0 +1,40 @@ +export var SHA_256_HASH = { name: "SHA-256" }; +export var SHA_256_HMAC_ALGO = { + name: "HMAC", + hash: SHA_256_HASH +}; +export var EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map new file mode 100644 index 0000000..09ed9a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,IAAM,YAAY,GAAwB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AAErE,MAAM,CAAC,IAAM,iBAAiB,GAAgD;IAC5E,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,YAAY;CACnB,CAAC;AAEF,MAAM,CAAC,IAAM,kBAAkB,GAAG,IAAI,UAAU,CAAC;IAC/C,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;CACH,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts new file mode 100644 index 0000000..055d3ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js new file mode 100644 index 0000000..5ae82ea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js @@ -0,0 +1,27 @@ +import { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +import { Sha256 as JsSha256 } from "@aws-crypto/sha256-js"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha256(secret); + } + else { + this.hash = new JsSha256(secret); + } + } + Sha256.prototype.update = function (data, encoding) { + this.hash.update(convertToBuffer(data)); + }; + Sha256.prototype.digest = function () { + return this.hash.digest(); + }; + Sha256.prototype.reset = function () { + this.hash.reset(); + }; + return Sha256; +}()); +export { Sha256 }; +//# sourceMappingURL=crossPlatformSha256.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map new file mode 100644 index 0000000..4a83c57 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha256.js","sourceRoot":"","sources":["../../src/crossPlatformSha256.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAC9D,OAAO,EAAE,MAAM,IAAI,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAE3D,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAC3D,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEnD;IAGE,gBAAY,MAAmB;QAC7B,IAAI,iBAAiB,CAAC,YAAY,EAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,eAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,IAAI,CAAC,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,CAAC,CAAC;SAClC;IACH,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,aAAC;AAAD,CAAC,AAtBD,IAsBC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js new file mode 100644 index 0000000..94ffb63 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js @@ -0,0 +1,3 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map new file mode 100644 index 0000000..01d20bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,uBAAuB,CAAC;AACtC,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js new file mode 100644 index 0000000..4f31a61 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js @@ -0,0 +1,7 @@ +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map new file mode 100644 index 0000000..776ce2b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts new file mode 100644 index 0000000..ec0e214 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts @@ -0,0 +1,10 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js new file mode 100644 index 0000000..d12acd0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js @@ -0,0 +1,53 @@ +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +import { EMPTY_DATA_SHA_256, SHA_256_HASH, SHA_256_HMAC_ALGO, } from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.toHash = new Uint8Array(0); + this.secret = secret; + this.reset(); + } + Sha256.prototype.update = function (data) { + if (isEmptyData(data)) { + return; + } + var update = convertToBuffer(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha256.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return locateWindow() + .crypto.subtle.sign(SHA_256_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_256); + } + return Promise.resolve() + .then(function () { + return locateWindow().crypto.subtle.digest(SHA_256_HASH, _this.toHash); + }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha256.prototype.reset = function () { + var _this = this; + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + locateWindow() + .crypto.subtle.importKey("raw", convertToBuffer(_this.secret), SHA_256_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + }; + return Sha256; +}()); +export { Sha256 }; +//# sourceMappingURL=webCryptoSha256.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map new file mode 100644 index 0000000..c7b31c0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha256.js","sourceRoot":"","sources":["../../src/webCryptoSha256.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAChE,OAAO,EACL,kBAAkB,EAClB,YAAY,EACZ,iBAAiB,GAClB,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAE3D;IAKE,gBAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,WAAW,CAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,uBAAM,GAAN;QAAA,iBAkBC;QAjBC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,YAAY,EAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,iBAAiB,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACvD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;SAC5C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC;YACJ,OAAA,YAAY,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,EAAE,KAAI,CAAC,MAAM,CAAC;QAA9D,CAA8D,CAC/D;aACA,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,sBAAK,GAAL;QAAA,iBAgBC;QAfC,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,YAAY,EAAE;qBACT,MAAM,CAAC,MAAM,CAAC,SAAS,CACxB,KAAK,EACL,eAAe,CAAC,KAAI,CAAC,MAAoB,CAAC,EAC1C,iBAAiB,EACjB,KAAK,EACL,CAAC,MAAM,CAAC,CACX;qBACI,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA7DD,IA6DC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..ed8affc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..a12e51c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..78bfb4d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/package.json new file mode 100644 index 0000000..2688ecf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/package.json @@ -0,0 +1,33 @@ +{ + "name": "@aws-crypto/sha256-browser", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha256-browser", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/constants.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/constants.ts new file mode 100644 index 0000000..7f68e2a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/constants.ts @@ -0,0 +1,41 @@ +export const SHA_256_HASH: { name: "SHA-256" } = { name: "SHA-256" }; + +export const SHA_256_HMAC_ALGO: { name: "HMAC"; hash: { name: "SHA-256" } } = { + name: "HMAC", + hash: SHA_256_HASH +}; + +export const EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts new file mode 100644 index 0000000..8cb9ff0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts @@ -0,0 +1,30 @@ +import { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +import { Sha256 as JsSha256 } from "@aws-crypto/sha256-js"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; + +export class Sha256 implements Checksum { + private hash: Checksum; + + constructor(secret?: SourceData) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha256(secret); + } else { + this.hash = new JsSha256(secret); + } + } + + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void { + this.hash.update(convertToBuffer(data)); + } + + digest(): Promise { + return this.hash.digest(); + } + + reset(): void { + this.hash.reset(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/index.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/index.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/index.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts new file mode 100644 index 0000000..538971f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts @@ -0,0 +1,9 @@ +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts new file mode 100644 index 0000000..fe4db57 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts @@ -0,0 +1,71 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +import { + EMPTY_DATA_SHA_256, + SHA_256_HASH, + SHA_256_HMAC_ALGO, +} from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; + +export class Sha256 implements Checksum { + private readonly secret?: SourceData; + private key: Promise | undefined; + private toHash: Uint8Array = new Uint8Array(0); + + constructor(secret?: SourceData) { + this.secret = secret; + this.reset(); + } + + update(data: SourceData): void { + if (isEmptyData(data)) { + return; + } + + const update = convertToBuffer(data); + const typedArray = new Uint8Array( + this.toHash.byteLength + update.byteLength + ); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + } + + digest(): Promise { + if (this.key) { + return this.key.then((key) => + locateWindow() + .crypto.subtle.sign(SHA_256_HMAC_ALGO, key, this.toHash) + .then((data) => new Uint8Array(data)) + ); + } + + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_256); + } + + return Promise.resolve() + .then(() => + locateWindow().crypto.subtle.digest(SHA_256_HASH, this.toHash) + ) + .then((data) => Promise.resolve(new Uint8Array(data))); + } + + reset(): void { + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise((resolve, reject) => { + locateWindow() + .crypto.subtle.importKey( + "raw", + convertToBuffer(this.secret as SourceData), + SHA_256_HMAC_ALGO, + false, + ["sign"] + ) + .then(resolve, reject); + }); + this.key.catch(() => {}); + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.json new file mode 100644 index 0000000..fb9aa95 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/CHANGELOG.md new file mode 100644 index 0000000..97c1f60 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/CHANGELOG.md @@ -0,0 +1,106 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +### Bug Fixes + +- **docs:** sha256 packages, clarify hmac support ([#455](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/455)) ([1be5043](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/1be5043325991f3f5ccb52a8dd928f004b4d442e)) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@1.0.0...@aws-crypto/sha256-js@1.1.0) (2021-01-13) + +### Bug Fixes + +- remove package lock ([6002a5a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6002a5ab9218dc8798c19dc205d3eebd3bec5b43)) +- **aws-crypto:** export explicit dependencies on [@aws-types](https://github.com/aws-types) ([6a1873a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6a1873a4dcc2aaa4a1338595703cfa7099f17b8c)) +- **deps-dev:** move @aws-sdk/types to devDependencies ([#188](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/188)) ([08efdf4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/08efdf46dcc612d88c441e29945d787f253ee77d)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@1.0.0-alpha.0...@aws-crypto/sha256-js@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.4...@aws-crypto/sha256-js@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.2...@aws-crypto/sha256-js@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.2...@aws-crypto/sha256-js@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.1...@aws-crypto/sha256-js@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) + +### Features + +- **sha256-js:** expose synchronous digest ([#7](https://github.com/aws/aws-javascript-crypto-helpers/issues/7)) ([9edaef7](https://github.com/aws/aws-javascript-crypto-helpers/commit/9edaef7)), closes [#6](https://github.com/aws/aws-javascript-crypto-helpers/issues/6) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/LICENSE new file mode 100644 index 0000000..ad410e1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/README.md new file mode 100644 index 0000000..f769f5b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/README.md @@ -0,0 +1,29 @@ +# crypto-sha256-js + +A pure JS implementation SHA256. + +## Usage + +- To hash "some data" +``` +import {Sha256} from '@aws-crypto/sha256-js'; + +const hash = new Sha256(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +- To hmac "some data" with "a key" +``` +import {Sha256} from '@aws-crypto/sha256-js'; + +const hash = new Sha256('a key'); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts new file mode 100644 index 0000000..1f580b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export declare class RawSha256 { + private state; + private temp; + private buffer; + private bufferLength; + private bytesHashed; + /** + * @internal + */ + finished: boolean; + update(data: Uint8Array): void; + digest(): Uint8Array; + private hashBuffer; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js new file mode 100644 index 0000000..68ceacc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js @@ -0,0 +1,124 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RawSha256 = void 0; +var constants_1 = require("./constants"); +/** + * @internal + */ +var RawSha256 = /** @class */ (function () { + function RawSha256() { + this.state = Int32Array.from(constants_1.INIT); + this.temp = new Int32Array(64); + this.buffer = new Uint8Array(64); + this.bufferLength = 0; + this.bytesHashed = 0; + /** + * @internal + */ + this.finished = false; + } + RawSha256.prototype.update = function (data) { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + var position = 0; + var byteLength = data.byteLength; + this.bytesHashed += byteLength; + if (this.bytesHashed * 8 > constants_1.MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + if (this.bufferLength === constants_1.BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + }; + RawSha256.prototype.digest = function () { + if (!this.finished) { + var bitsHashed = this.bytesHashed * 8; + var bufferView = new DataView(this.buffer.buffer, this.buffer.byteOffset, this.buffer.byteLength); + var undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % constants_1.BLOCK_SIZE >= constants_1.BLOCK_SIZE - 8) { + for (var i = this.bufferLength; i < constants_1.BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (var i = this.bufferLength; i < constants_1.BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32(constants_1.BLOCK_SIZE - 8, Math.floor(bitsHashed / 0x100000000), true); + bufferView.setUint32(constants_1.BLOCK_SIZE - 4, bitsHashed); + this.hashBuffer(); + this.finished = true; + } + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + var out = new Uint8Array(constants_1.DIGEST_LENGTH); + for (var i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + return out; + }; + RawSha256.prototype.hashBuffer = function () { + var _a = this, buffer = _a.buffer, state = _a.state; + var state0 = state[0], state1 = state[1], state2 = state[2], state3 = state[3], state4 = state[4], state5 = state[5], state6 = state[6], state7 = state[7]; + for (var i = 0; i < constants_1.BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } + else { + var u = this.temp[i - 2]; + var t1_1 = ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + u = this.temp[i - 15]; + var t2_1 = ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + this.temp[i] = + ((t1_1 + this.temp[i - 7]) | 0) + ((t2_1 + this.temp[i - 16]) | 0); + } + var t1 = ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((constants_1.KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + var t2 = ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + }; + return RawSha256; +}()); +exports.RawSha256 = RawSha256; +//# sourceMappingURL=RawSha256.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map new file mode 100644 index 0000000..81659f5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RawSha256.js","sourceRoot":"","sources":["../../src/RawSha256.ts"],"names":[],"mappings":";;;AAAA,yCAMqB;AAErB;;GAEG;AACH;IAAA;QACU,UAAK,GAAe,UAAU,CAAC,IAAI,CAAC,gBAAI,CAAC,CAAC;QAC1C,SAAI,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,WAAM,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAEhC;;WAEG;QACH,aAAQ,GAAY,KAAK,CAAC;IA8I5B,CAAC;IA5IC,0BAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;SAClE;QAED,IAAI,QAAQ,GAAG,CAAC,CAAC;QACX,IAAA,UAAU,GAAK,IAAI,WAAT,CAAU;QAC1B,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAE/B,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,GAAG,+BAAmB,EAAE;YAC9C,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,OAAO,UAAU,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpD,UAAU,EAAE,CAAC;YAEb,IAAI,IAAI,CAAC,YAAY,KAAK,sBAAU,EAAE;gBACpC,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;SACF;IACH,CAAC;IAED,0BAAM,GAAN;QACE,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YAClB,IAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;YACxC,IAAM,UAAU,GAAG,IAAI,QAAQ,CAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,EAClB,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CACvB,CAAC;YAEF,IAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC5C,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC;YAE/C,+DAA+D;YAC/D,IAAI,iBAAiB,GAAG,sBAAU,IAAI,sBAAU,GAAG,CAAC,EAAE;gBACpD,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;oBACnD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;iBAC3B;gBACD,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;YAED,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,sBAAU,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;gBACvD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;aAC3B;YACD,UAAU,CAAC,SAAS,CAClB,sBAAU,GAAG,CAAC,EACd,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,WAAW,CAAC,EACpC,IAAI,CACL,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,sBAAU,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;YAEjD,IAAI,CAAC,UAAU,EAAE,CAAC;YAElB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;SACtB;QAED,sEAAsE;QACtE,kCAAkC;QAClC,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,yBAAa,CAAC,CAAC;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC3C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC/C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;YAC9C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;SAC/C;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IAEO,8BAAU,GAAlB;QACQ,IAAA,KAAoB,IAAI,EAAtB,MAAM,YAAA,EAAE,KAAK,WAAS,CAAC;QAE/B,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACnB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAEpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;YACnC,IAAI,CAAC,GAAG,EAAE,EAAE;gBACV,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC9B,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAClC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;wBACjC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;aAC9B;iBAAM;gBACL,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACzB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;gBAEnE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACtB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBAEjE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;aAClE;YAED,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBACzC,CAAC,CAAC;gBACF,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,eAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACjD,CAAC,CAAC;YAEJ,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACjC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC9D,CAAC,CAAC;YAEJ,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;YAC3B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;IACrB,CAAC;IACH,gBAAC;AAAD,CAAC,AAxJD,IAwJC;AAxJY,8BAAS"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts new file mode 100644 index 0000000..63bd764 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts @@ -0,0 +1,20 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE: number; +/** + * @internal + */ +export declare const DIGEST_LENGTH: number; +/** + * @internal + */ +export declare const KEY: Uint32Array; +/** + * @internal + */ +export declare const INIT: number[]; +/** + * @internal + */ +export declare const MAX_HASHABLE_LENGTH: number; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js new file mode 100644 index 0000000..c83aa09 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js @@ -0,0 +1,98 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MAX_HASHABLE_LENGTH = exports.INIT = exports.KEY = exports.DIGEST_LENGTH = exports.BLOCK_SIZE = void 0; +/** + * @internal + */ +exports.BLOCK_SIZE = 64; +/** + * @internal + */ +exports.DIGEST_LENGTH = 32; +/** + * @internal + */ +exports.KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); +/** + * @internal + */ +exports.INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; +/** + * @internal + */ +exports.MAX_HASHABLE_LENGTH = Math.pow(2, 53) - 1; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map new file mode 100644 index 0000000..1132c12 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACU,QAAA,UAAU,GAAW,EAAE,CAAC;AAErC;;GAEG;AACU,QAAA,aAAa,GAAW,EAAE,CAAC;AAExC;;GAEG;AACU,QAAA,GAAG,GAAG,IAAI,WAAW,CAAC;IACjC,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC,CAAC;AAEH;;GAEG;AACU,QAAA,IAAI,GAAG;IAClB,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC;AAEF;;GAEG;AACU,QAAA,mBAAmB,GAAG,SAAA,CAAC,EAAI,EAAE,CAAA,GAAG,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js new file mode 100644 index 0000000..4329f10 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./jsSha256"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js.map new file mode 100644 index 0000000..9f97d54 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,qDAA2B"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts new file mode 100644 index 0000000..d813b25 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts @@ -0,0 +1,12 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private hash; + private outer?; + private error; + constructor(secret?: SourceData); + update(toHash: SourceData): void; + digestSync(): Uint8Array; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js new file mode 100644 index 0000000..2a4f2f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js @@ -0,0 +1,85 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var tslib_1 = require("tslib"); +var constants_1 = require("./constants"); +var RawSha256_1 = require("./RawSha256"); +var util_1 = require("@aws-crypto/util"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.secret = secret; + this.hash = new RawSha256_1.RawSha256(); + this.reset(); + } + Sha256.prototype.update = function (toHash) { + if ((0, util_1.isEmptyData)(toHash) || this.error) { + return; + } + try { + this.hash.update((0, util_1.convertToBuffer)(toHash)); + } + catch (e) { + this.error = e; + } + }; + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + Sha256.prototype.digestSync = function () { + if (this.error) { + throw this.error; + } + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + return this.outer.digest(); + } + return this.hash.digest(); + }; + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + Sha256.prototype.digest = function () { + return tslib_1.__awaiter(this, void 0, void 0, function () { + return tslib_1.__generator(this, function (_a) { + return [2 /*return*/, this.digestSync()]; + }); + }); + }; + Sha256.prototype.reset = function () { + this.hash = new RawSha256_1.RawSha256(); + if (this.secret) { + this.outer = new RawSha256_1.RawSha256(); + var inner = bufferFromSecret(this.secret); + var outer = new Uint8Array(constants_1.BLOCK_SIZE); + outer.set(inner); + for (var i = 0; i < constants_1.BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + this.hash.update(inner); + this.outer.update(outer); + // overwrite the copied key in memory + for (var i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +function bufferFromSecret(secret) { + var input = (0, util_1.convertToBuffer)(secret); + if (input.byteLength > constants_1.BLOCK_SIZE) { + var bufferHash = new RawSha256_1.RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + var buffer = new Uint8Array(constants_1.BLOCK_SIZE); + buffer.set(input); + return buffer; +} +//# sourceMappingURL=jsSha256.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map new file mode 100644 index 0000000..c34eb36 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsSha256.js","sourceRoot":"","sources":["../../src/jsSha256.ts"],"names":[],"mappings":";;;;AAAA,yCAAyC;AACzC,yCAAwC;AAExC,yCAAgE;AAEhE;IAME,gBAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,IAAI,qBAAS,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,IAAA,kBAAW,EAAC,MAAM,CAAC,IAAI,IAAI,CAAC,KAAK,EAAE;YACrC,OAAO;SACR;QAED,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC,CAAC;SAC3C;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;SAChB;IACH,CAAC;IAED;;OAEG;IACH,2BAAU,GAAV;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,CAAC,KAAK,CAAC;SAClB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;gBACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;aACvC;YAED,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;SAC5B;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACG,uBAAM,GAAZ;;;gBACE,sBAAO,IAAI,CAAC,UAAU,EAAE,EAAC;;;KAC1B;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,GAAG,IAAI,qBAAS,EAAE,CAAC;QAC5B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,KAAK,GAAG,IAAI,qBAAS,EAAE,CAAC;YAC7B,IAAM,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5C,IAAM,KAAK,GAAG,IAAI,UAAU,CAAC,sBAAU,CAAC,CAAC;YACzC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;gBACnC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACjB,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;aAClB;YAED,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEzB,qCAAqC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE;gBACzC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;SACF;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA1ED,IA0EC;AA1EY,wBAAM;AA4EnB,SAAS,gBAAgB,CAAC,MAAkB;IAC1C,IAAI,KAAK,GAAG,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,KAAK,CAAC,UAAU,GAAG,sBAAU,EAAE;QACjC,IAAM,UAAU,GAAG,IAAI,qBAAS,EAAE,CAAC;QACnC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACzB,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;KAC7B;IAED,IAAM,MAAM,GAAG,IAAI,UAAU,CAAC,sBAAU,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts new file mode 100644 index 0000000..d880343 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts @@ -0,0 +1,5 @@ +export declare const hashTestVectors: Array<[Uint8Array, Uint8Array]>; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export declare const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js new file mode 100644 index 0000000..3f0dd2f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js @@ -0,0 +1,322 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hmacTestVectors = exports.hashTestVectors = void 0; +var util_hex_encoding_1 = require("@aws-sdk/util-hex-encoding"); +var millionChars = new Uint8Array(1000000); +for (var i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} +exports.hashTestVectors = [ + [ + Uint8Array.from([97, 98, 99]), + (0, util_hex_encoding_1.fromHex)("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + (0, util_hex_encoding_1.fromHex)("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + (0, util_hex_encoding_1.fromHex)("61"), + (0, util_hex_encoding_1.fromHex)("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161"), + (0, util_hex_encoding_1.fromHex)("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161"), + (0, util_hex_encoding_1.fromHex)("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161"), + (0, util_hex_encoding_1.fromHex)("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161"), + (0, util_hex_encoding_1.fromHex)("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161"), + (0, util_hex_encoding_1.fromHex)("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161"), + (0, util_hex_encoding_1.fromHex)("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161"), + (0, util_hex_encoding_1.fromHex)("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161"), + (0, util_hex_encoding_1.fromHex)("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + (0, util_hex_encoding_1.fromHex)("de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e"), + (0, util_hex_encoding_1.fromHex)("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + (0, util_hex_encoding_1.fromHex)("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +exports.hmacTestVectors = [ + [ + (0, util_hex_encoding_1.fromHex)("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + (0, util_hex_encoding_1.fromHex)("4869205468657265"), + (0, util_hex_encoding_1.fromHex)("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + (0, util_hex_encoding_1.fromHex)("4a656665"), + (0, util_hex_encoding_1.fromHex)("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + (0, util_hex_encoding_1.fromHex)("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"), + (0, util_hex_encoding_1.fromHex)("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + (0, util_hex_encoding_1.fromHex)("0102030405060708090a0b0c0d0e0f10111213141516171819"), + (0, util_hex_encoding_1.fromHex)("cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"), + (0, util_hex_encoding_1.fromHex)("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374"), + (0, util_hex_encoding_1.fromHex)("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e"), + (0, util_hex_encoding_1.fromHex)("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; +//# sourceMappingURL=knownHashes.fixture.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map new file mode 100644 index 0000000..8ffc02e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map @@ -0,0 +1 @@ +{"version":3,"file":"knownHashes.fixture.js","sourceRoot":"","sources":["../../src/knownHashes.fixture.ts"],"names":[],"mappings":";;;AAAA,gEAAqD;AAErD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;IAChC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;CACtB;AAEY,QAAA,eAAe,GAAoC;IAC9D;QACE,UAAU,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,IAAI,CAAC;QACb,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,MAAM,CAAC;QACf,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,QAAQ,CAAC;QACjB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,UAAU,CAAC;QACnB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,YAAY,CAAC;QACrB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,cAAc,CAAC;QACvB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gBAAgB,CAAC;QACzB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kBAAkB,CAAC;QAC3B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oBAAoB,CAAC;QAC7B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sBAAsB,CAAC;QAC/B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wBAAwB,CAAC;QACjC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0BAA0B,CAAC;QACnC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4BAA4B,CAAC;QACrC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8BAA8B,CAAC;QACvC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gCAAgC,CAAC;QACzC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kCAAkC,CAAC;QAC3C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oCAAoC,CAAC;QAC7C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sCAAsC,CAAC;QAC/C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wCAAwC,CAAC;QACjD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4CAA4C,CAAC;QACrD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8CAA8C,CAAC;QACvD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gDAAgD,CAAC;QACzD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kDAAkD,CAAC;QAC3D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oDAAoD,CAAC;QAC7D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sDAAsD,CAAC;QAC/D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wDAAwD,CAAC;QACjE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0DAA0D,CAAC;QACnE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4DAA4D,CAAC;QACrE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8DAA8D,CAAC;QACvE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gEAAgE,CAAC;QACzE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;QAC3E,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oEAAoE,CACrE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sEAAsE,CACvE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wEAAwE,CACzE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0EAA0E,CAC3E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4EAA4E,CAC7E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8EAA8E,CAC/E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gFAAgF,CACjF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kFAAkF,CACnF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oFAAoF,CACrF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sFAAsF,CACvF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wFAAwF,CACzF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0FAA0F,CAC3F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4FAA4F,CAC7F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8FAA8F,CAC/F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gGAAgG,CACjG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kGAAkG,CACnG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oGAAoG,CACrG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wGAAwG,CACzG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0GAA0G,CAC3G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4GAA4G,CAC7G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8GAA8G,CAC/G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gHAAgH,CACjH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kHAAkH,CACnH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oHAAoH,CACrH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sHAAsH,CACvH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wHAAwH,CACzH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0HAA0H,CAC3H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4HAA4H,CAC7H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8HAA8H,CAC/H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gIAAgI,CACjI;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kIAAkI,CACnI;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gHAAgH,CACjH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,YAAY;QACZ,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC;AAEF;;GAEG;AACU,QAAA,eAAe,GAAgD;IAC1E;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EAAC,kBAAkB,CAAC;QAC3B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,UAAU,CAAC;QACnB,IAAA,2BAAO,EAAC,0DAA0D,CAAC;QACnE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oDAAoD,CAAC;QAC7D,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EACL,8GAA8G,CAC/G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EACL,kTAAkT,CACnT;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts new file mode 100644 index 0000000..1f580b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export declare class RawSha256 { + private state; + private temp; + private buffer; + private bufferLength; + private bytesHashed; + /** + * @internal + */ + finished: boolean; + update(data: Uint8Array): void; + digest(): Uint8Array; + private hashBuffer; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js new file mode 100644 index 0000000..f799acd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js @@ -0,0 +1,121 @@ +import { BLOCK_SIZE, DIGEST_LENGTH, INIT, KEY, MAX_HASHABLE_LENGTH } from "./constants"; +/** + * @internal + */ +var RawSha256 = /** @class */ (function () { + function RawSha256() { + this.state = Int32Array.from(INIT); + this.temp = new Int32Array(64); + this.buffer = new Uint8Array(64); + this.bufferLength = 0; + this.bytesHashed = 0; + /** + * @internal + */ + this.finished = false; + } + RawSha256.prototype.update = function (data) { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + var position = 0; + var byteLength = data.byteLength; + this.bytesHashed += byteLength; + if (this.bytesHashed * 8 > MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + }; + RawSha256.prototype.digest = function () { + if (!this.finished) { + var bitsHashed = this.bytesHashed * 8; + var bufferView = new DataView(this.buffer.buffer, this.buffer.byteOffset, this.buffer.byteLength); + var undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (var i = this.bufferLength; i < BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (var i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32(BLOCK_SIZE - 8, Math.floor(bitsHashed / 0x100000000), true); + bufferView.setUint32(BLOCK_SIZE - 4, bitsHashed); + this.hashBuffer(); + this.finished = true; + } + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + var out = new Uint8Array(DIGEST_LENGTH); + for (var i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + return out; + }; + RawSha256.prototype.hashBuffer = function () { + var _a = this, buffer = _a.buffer, state = _a.state; + var state0 = state[0], state1 = state[1], state2 = state[2], state3 = state[3], state4 = state[4], state5 = state[5], state6 = state[6], state7 = state[7]; + for (var i = 0; i < BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } + else { + var u = this.temp[i - 2]; + var t1_1 = ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + u = this.temp[i - 15]; + var t2_1 = ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + this.temp[i] = + ((t1_1 + this.temp[i - 7]) | 0) + ((t2_1 + this.temp[i - 16]) | 0); + } + var t1 = ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + var t2 = ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + }; + return RawSha256; +}()); +export { RawSha256 }; +//# sourceMappingURL=RawSha256.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map new file mode 100644 index 0000000..c4d50a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RawSha256.js","sourceRoot":"","sources":["../../src/RawSha256.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,UAAU,EACV,aAAa,EACb,IAAI,EACJ,GAAG,EACH,mBAAmB,EACpB,MAAM,aAAa,CAAC;AAErB;;GAEG;AACH;IAAA;QACU,UAAK,GAAe,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC1C,SAAI,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,WAAM,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAEhC;;WAEG;QACH,aAAQ,GAAY,KAAK,CAAC;IA8I5B,CAAC;IA5IC,0BAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;SAClE;QAED,IAAI,QAAQ,GAAG,CAAC,CAAC;QACX,IAAA,UAAU,GAAK,IAAI,WAAT,CAAU;QAC1B,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAE/B,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,GAAG,mBAAmB,EAAE;YAC9C,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,OAAO,UAAU,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpD,UAAU,EAAE,CAAC;YAEb,IAAI,IAAI,CAAC,YAAY,KAAK,UAAU,EAAE;gBACpC,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;SACF;IACH,CAAC;IAED,0BAAM,GAAN;QACE,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YAClB,IAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;YACxC,IAAM,UAAU,GAAG,IAAI,QAAQ,CAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,EAClB,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CACvB,CAAC;YAEF,IAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC5C,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC;YAE/C,+DAA+D;YAC/D,IAAI,iBAAiB,GAAG,UAAU,IAAI,UAAU,GAAG,CAAC,EAAE;gBACpD,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;oBACnD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;iBAC3B;gBACD,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;YAED,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,UAAU,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;gBACvD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;aAC3B;YACD,UAAU,CAAC,SAAS,CAClB,UAAU,GAAG,CAAC,EACd,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,WAAW,CAAC,EACpC,IAAI,CACL,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;YAEjD,IAAI,CAAC,UAAU,EAAE,CAAC;YAElB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;SACtB;QAED,sEAAsE;QACtE,kCAAkC;QAClC,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,aAAa,CAAC,CAAC;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC3C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC/C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;YAC9C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;SAC/C;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IAEO,8BAAU,GAAlB;QACQ,IAAA,KAAoB,IAAI,EAAtB,MAAM,YAAA,EAAE,KAAK,WAAS,CAAC;QAE/B,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACnB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAEpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,IAAI,CAAC,GAAG,EAAE,EAAE;gBACV,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC9B,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAClC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;wBACjC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;aAC9B;iBAAM;gBACL,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACzB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;gBAEnE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACtB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBAEjE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;aAClE;YAED,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBACzC,CAAC,CAAC;gBACF,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACjD,CAAC,CAAC;YAEJ,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACjC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC9D,CAAC,CAAC;YAEJ,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;YAC3B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;IACrB,CAAC;IACH,gBAAC;AAAD,CAAC,AAxJD,IAwJC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts new file mode 100644 index 0000000..63bd764 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts @@ -0,0 +1,20 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE: number; +/** + * @internal + */ +export declare const DIGEST_LENGTH: number; +/** + * @internal + */ +export declare const KEY: Uint32Array; +/** + * @internal + */ +export declare const INIT: number[]; +/** + * @internal + */ +export declare const MAX_HASHABLE_LENGTH: number; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js new file mode 100644 index 0000000..68037b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js @@ -0,0 +1,95 @@ +/** + * @internal + */ +export var BLOCK_SIZE = 64; +/** + * @internal + */ +export var DIGEST_LENGTH = 32; +/** + * @internal + */ +export var KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); +/** + * @internal + */ +export var INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; +/** + * @internal + */ +export var MAX_HASHABLE_LENGTH = Math.pow(2, 53) - 1; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map new file mode 100644 index 0000000..6c93089 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,CAAC,IAAM,UAAU,GAAW,EAAE,CAAC;AAErC;;GAEG;AACH,MAAM,CAAC,IAAM,aAAa,GAAW,EAAE,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,IAAM,GAAG,GAAG,IAAI,WAAW,CAAC;IACjC,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,IAAM,IAAI,GAAG;IAClB,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,IAAM,mBAAmB,GAAG,SAAA,CAAC,EAAI,EAAE,CAAA,GAAG,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js new file mode 100644 index 0000000..a8f73a0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js @@ -0,0 +1,2 @@ +export * from "./jsSha256"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js.map new file mode 100644 index 0000000..030d795 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts new file mode 100644 index 0000000..d813b25 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts @@ -0,0 +1,12 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private hash; + private outer?; + private error; + constructor(secret?: SourceData); + update(toHash: SourceData): void; + digestSync(): Uint8Array; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js new file mode 100644 index 0000000..fa40899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js @@ -0,0 +1,82 @@ +import { __awaiter, __generator } from "tslib"; +import { BLOCK_SIZE } from "./constants"; +import { RawSha256 } from "./RawSha256"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.secret = secret; + this.hash = new RawSha256(); + this.reset(); + } + Sha256.prototype.update = function (toHash) { + if (isEmptyData(toHash) || this.error) { + return; + } + try { + this.hash.update(convertToBuffer(toHash)); + } + catch (e) { + this.error = e; + } + }; + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + Sha256.prototype.digestSync = function () { + if (this.error) { + throw this.error; + } + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + return this.outer.digest(); + } + return this.hash.digest(); + }; + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + Sha256.prototype.digest = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, this.digestSync()]; + }); + }); + }; + Sha256.prototype.reset = function () { + this.hash = new RawSha256(); + if (this.secret) { + this.outer = new RawSha256(); + var inner = bufferFromSecret(this.secret); + var outer = new Uint8Array(BLOCK_SIZE); + outer.set(inner); + for (var i = 0; i < BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + this.hash.update(inner); + this.outer.update(outer); + // overwrite the copied key in memory + for (var i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + }; + return Sha256; +}()); +export { Sha256 }; +function bufferFromSecret(secret) { + var input = convertToBuffer(secret); + if (input.byteLength > BLOCK_SIZE) { + var bufferHash = new RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + var buffer = new Uint8Array(BLOCK_SIZE); + buffer.set(input); + return buffer; +} +//# sourceMappingURL=jsSha256.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map new file mode 100644 index 0000000..94fa401 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsSha256.js","sourceRoot":"","sources":["../../src/jsSha256.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEhE;IAME,gBAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,IAAI,CAAC,KAAK,EAAE;YACrC,OAAO;SACR;QAED,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,CAAC;SAC3C;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;SAChB;IACH,CAAC;IAED;;OAEG;IACH,2BAAU,GAAV;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,CAAC,KAAK,CAAC;SAClB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;gBACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;aACvC;YAED,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;SAC5B;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACG,uBAAM,GAAZ;;;gBACE,sBAAO,IAAI,CAAC,UAAU,EAAE,EAAC;;;KAC1B;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;YAC7B,IAAM,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5C,IAAM,KAAK,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;YACzC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;gBACnC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACjB,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;aAClB;YAED,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEzB,qCAAqC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE;gBACzC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;SACF;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA1ED,IA0EC;;AAED,SAAS,gBAAgB,CAAC,MAAkB;IAC1C,IAAI,KAAK,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,KAAK,CAAC,UAAU,GAAG,UAAU,EAAE;QACjC,IAAM,UAAU,GAAG,IAAI,SAAS,EAAE,CAAC;QACnC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACzB,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;KAC7B;IAED,IAAM,MAAM,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts new file mode 100644 index 0000000..d880343 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts @@ -0,0 +1,5 @@ +export declare const hashTestVectors: Array<[Uint8Array, Uint8Array]>; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export declare const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js new file mode 100644 index 0000000..c2d2663 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js @@ -0,0 +1,319 @@ +import { fromHex } from "@aws-sdk/util-hex-encoding"; +var millionChars = new Uint8Array(1000000); +for (var i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} +export var hashTestVectors = [ + [ + Uint8Array.from([97, 98, 99]), + fromHex("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + fromHex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + fromHex("61"), + fromHex("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + fromHex("6161"), + fromHex("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + fromHex("616161"), + fromHex("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + fromHex("61616161"), + fromHex("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + fromHex("6161616161"), + fromHex("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + fromHex("616161616161"), + fromHex("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + fromHex("61616161616161"), + fromHex("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + fromHex("6161616161616161"), + fromHex("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + fromHex("616161616161616161"), + fromHex("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + fromHex("61616161616161616161"), + fromHex("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + fromHex("6161616161616161616161"), + fromHex("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + fromHex("616161616161616161616161"), + fromHex("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + fromHex("61616161616161616161616161"), + fromHex("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + fromHex("6161616161616161616161616161"), + fromHex("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + fromHex("616161616161616161616161616161"), + fromHex("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + fromHex("61616161616161616161616161616161"), + fromHex("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + fromHex("6161616161616161616161616161616161"), + fromHex("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + fromHex("616161616161616161616161616161616161"), + fromHex("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + fromHex("61616161616161616161616161616161616161"), + fromHex("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + fromHex("6161616161616161616161616161616161616161"), + fromHex("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + fromHex("616161616161616161616161616161616161616161"), + fromHex("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + fromHex("61616161616161616161616161616161616161616161"), + fromHex("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + fromHex("6161616161616161616161616161616161616161616161"), + fromHex("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + fromHex("616161616161616161616161616161616161616161616161"), + fromHex("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161"), + fromHex("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161"), + fromHex("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161"), + fromHex("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161"), + fromHex("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161"), + fromHex("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161"), + fromHex("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161"), + fromHex("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161"), + fromHex("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + fromHex("de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e"), + fromHex("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + fromHex("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export var hmacTestVectors = [ + [ + fromHex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + fromHex("4869205468657265"), + fromHex("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + fromHex("4a656665"), + fromHex("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + fromHex("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"), + fromHex("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + fromHex("0102030405060708090a0b0c0d0e0f10111213141516171819"), + fromHex("cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"), + fromHex("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374"), + fromHex("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e"), + fromHex("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; +//# sourceMappingURL=knownHashes.fixture.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map new file mode 100644 index 0000000..1232159 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map @@ -0,0 +1 @@ +{"version":3,"file":"knownHashes.fixture.js","sourceRoot":"","sources":["../../src/knownHashes.fixture.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,4BAA4B,CAAC;AAErD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;IAChC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;CACtB;AAED,MAAM,CAAC,IAAM,eAAe,GAAoC;IAC9D;QACE,UAAU,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,IAAI,CAAC;QACb,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,MAAM,CAAC;QACf,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,QAAQ,CAAC;QACjB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,UAAU,CAAC;QACnB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,YAAY,CAAC;QACrB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,cAAc,CAAC;QACvB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gBAAgB,CAAC;QACzB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kBAAkB,CAAC;QAC3B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oBAAoB,CAAC;QAC7B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sBAAsB,CAAC;QAC/B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wBAAwB,CAAC;QACjC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0BAA0B,CAAC;QACnC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4BAA4B,CAAC;QACrC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8BAA8B,CAAC;QACvC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gCAAgC,CAAC;QACzC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kCAAkC,CAAC;QAC3C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oCAAoC,CAAC;QAC7C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sCAAsC,CAAC;QAC/C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wCAAwC,CAAC;QACjD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4CAA4C,CAAC;QACrD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8CAA8C,CAAC;QACvD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gDAAgD,CAAC;QACzD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kDAAkD,CAAC;QAC3D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oDAAoD,CAAC;QAC7D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sDAAsD,CAAC;QAC/D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wDAAwD,CAAC;QACjE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0DAA0D,CAAC;QACnE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4DAA4D,CAAC;QACrE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8DAA8D,CAAC;QACvE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gEAAgE,CAAC;QACzE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kEAAkE,CAAC;QAC3E,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oEAAoE,CACrE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sEAAsE,CACvE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wEAAwE,CACzE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0EAA0E,CAC3E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4EAA4E,CAC7E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8EAA8E,CAC/E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gFAAgF,CACjF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kFAAkF,CACnF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oFAAoF,CACrF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sFAAsF,CACvF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wFAAwF,CACzF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0FAA0F,CAC3F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4FAA4F,CAC7F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8FAA8F,CAC/F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gGAAgG,CACjG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kGAAkG,CACnG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oGAAoG,CACrG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wGAAwG,CACzG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0GAA0G,CAC3G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4GAA4G,CAC7G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8GAA8G,CAC/G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gHAAgH,CACjH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kHAAkH,CACnH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oHAAoH,CACrH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sHAAsH,CACvH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wHAAwH,CACzH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0HAA0H,CAC3H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4HAA4H,CAC7H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8HAA8H,CAC/H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gIAAgI,CACjI;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kIAAkI,CACnI;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gHAAgH,CACjH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,YAAY;QACZ,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,IAAM,eAAe,GAAgD;IAC1E;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CAAC,kBAAkB,CAAC;QAC3B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,UAAU,CAAC;QACnB,OAAO,CAAC,0DAA0D,CAAC;QACnE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oDAAoD,CAAC;QAC7D,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CACL,8GAA8G,CAC/G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CACL,kTAAkT,CACnT;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/package.json new file mode 100644 index 0000000..e8ef52d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/sha256-js", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha256-js", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts new file mode 100644 index 0000000..f4a385c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts @@ -0,0 +1,164 @@ +import { + BLOCK_SIZE, + DIGEST_LENGTH, + INIT, + KEY, + MAX_HASHABLE_LENGTH +} from "./constants"; + +/** + * @internal + */ +export class RawSha256 { + private state: Int32Array = Int32Array.from(INIT); + private temp: Int32Array = new Int32Array(64); + private buffer: Uint8Array = new Uint8Array(64); + private bufferLength: number = 0; + private bytesHashed: number = 0; + + /** + * @internal + */ + finished: boolean = false; + + update(data: Uint8Array): void { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + + let position = 0; + let { byteLength } = data; + this.bytesHashed += byteLength; + + if (this.bytesHashed * 8 > MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + } + + digest(): Uint8Array { + if (!this.finished) { + const bitsHashed = this.bytesHashed * 8; + const bufferView = new DataView( + this.buffer.buffer, + this.buffer.byteOffset, + this.buffer.byteLength + ); + + const undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (let i = this.bufferLength; i < BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + + for (let i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32( + BLOCK_SIZE - 8, + Math.floor(bitsHashed / 0x100000000), + true + ); + bufferView.setUint32(BLOCK_SIZE - 4, bitsHashed); + + this.hashBuffer(); + + this.finished = true; + } + + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + const out = new Uint8Array(DIGEST_LENGTH); + for (let i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + + return out; + } + + private hashBuffer(): void { + const { buffer, state } = this; + + let state0 = state[0], + state1 = state[1], + state2 = state[2], + state3 = state[3], + state4 = state[4], + state5 = state[5], + state6 = state[6], + state7 = state[7]; + + for (let i = 0; i < BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } else { + let u = this.temp[i - 2]; + const t1 = + ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + + u = this.temp[i - 15]; + const t2 = + ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + + this.temp[i] = + ((t1 + this.temp[i - 7]) | 0) + ((t2 + this.temp[i - 16]) | 0); + } + + const t1 = + ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + + const t2 = + ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/constants.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/constants.ts new file mode 100644 index 0000000..8cede57 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/constants.ts @@ -0,0 +1,98 @@ +/** + * @internal + */ +export const BLOCK_SIZE: number = 64; + +/** + * @internal + */ +export const DIGEST_LENGTH: number = 32; + +/** + * @internal + */ +export const KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); + +/** + * @internal + */ +export const INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; + +/** + * @internal + */ +export const MAX_HASHABLE_LENGTH = 2 ** 53 - 1; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/index.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/index.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/index.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts new file mode 100644 index 0000000..f7bd993 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts @@ -0,0 +1,94 @@ +import { BLOCK_SIZE } from "./constants"; +import { RawSha256 } from "./RawSha256"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; + +export class Sha256 implements Checksum { + private readonly secret?: SourceData; + private hash: RawSha256; + private outer?: RawSha256; + private error: any; + + constructor(secret?: SourceData) { + this.secret = secret; + this.hash = new RawSha256(); + this.reset(); + } + + update(toHash: SourceData): void { + if (isEmptyData(toHash) || this.error) { + return; + } + + try { + this.hash.update(convertToBuffer(toHash)); + } catch (e) { + this.error = e; + } + } + + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + digestSync(): Uint8Array { + if (this.error) { + throw this.error; + } + + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + + return this.outer.digest(); + } + + return this.hash.digest(); + } + + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + async digest(): Promise { + return this.digestSync(); + } + + reset(): void { + this.hash = new RawSha256(); + if (this.secret) { + this.outer = new RawSha256(); + const inner = bufferFromSecret(this.secret); + const outer = new Uint8Array(BLOCK_SIZE); + outer.set(inner); + + for (let i = 0; i < BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + + this.hash.update(inner); + this.outer.update(outer); + + // overwrite the copied key in memory + for (let i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + } +} + +function bufferFromSecret(secret: SourceData): Uint8Array { + let input = convertToBuffer(secret); + + if (input.byteLength > BLOCK_SIZE) { + const bufferHash = new RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + + const buffer = new Uint8Array(BLOCK_SIZE); + buffer.set(input); + return buffer; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts new file mode 100644 index 0000000..c83dae2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts @@ -0,0 +1,401 @@ +import { fromHex } from "@aws-sdk/util-hex-encoding"; + +const millionChars = new Uint8Array(1000000); +for (let i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} + +export const hashTestVectors: Array<[Uint8Array, Uint8Array]> = [ + [ + Uint8Array.from([97, 98, 99]), + fromHex("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + fromHex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + fromHex("61"), + fromHex("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + fromHex("6161"), + fromHex("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + fromHex("616161"), + fromHex("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + fromHex("61616161"), + fromHex("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + fromHex("6161616161"), + fromHex("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + fromHex("616161616161"), + fromHex("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + fromHex("61616161616161"), + fromHex("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + fromHex("6161616161616161"), + fromHex("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + fromHex("616161616161616161"), + fromHex("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + fromHex("61616161616161616161"), + fromHex("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + fromHex("6161616161616161616161"), + fromHex("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + fromHex("616161616161616161616161"), + fromHex("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + fromHex("61616161616161616161616161"), + fromHex("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + fromHex("6161616161616161616161616161"), + fromHex("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + fromHex("616161616161616161616161616161"), + fromHex("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + fromHex("61616161616161616161616161616161"), + fromHex("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + fromHex("6161616161616161616161616161616161"), + fromHex("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + fromHex("616161616161616161616161616161616161"), + fromHex("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + fromHex("61616161616161616161616161616161616161"), + fromHex("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + fromHex("6161616161616161616161616161616161616161"), + fromHex("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + fromHex("616161616161616161616161616161616161616161"), + fromHex("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + fromHex("61616161616161616161616161616161616161616161"), + fromHex("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + fromHex("6161616161616161616161616161616161616161616161"), + fromHex("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + fromHex("616161616161616161616161616161616161616161616161"), + fromHex("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161"), + fromHex("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161"), + fromHex("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161"), + fromHex("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161"), + fromHex("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161"), + fromHex("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161"), + fromHex("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161"), + fromHex("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161"), + fromHex("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + fromHex( + "de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e" + ), + fromHex("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + fromHex("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; + +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]> = [ + [ + fromHex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + fromHex("4869205468657265"), + fromHex("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + fromHex("4a656665"), + fromHex("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + fromHex("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex( + "dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + ), + fromHex("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + fromHex("0102030405060708090a0b0c0d0e0f10111213141516171819"), + fromHex( + "cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd" + ), + fromHex("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex( + "54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374" + ), + fromHex("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex( + "5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e" + ), + fromHex("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/tsconfig.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/tsconfig.json new file mode 100644 index 0000000..fb9aa95 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/tsconfig.module.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/sha256-js/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md new file mode 100644 index 0000000..13023ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md @@ -0,0 +1,66 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@1.0.0-alpha.0...@aws-crypto/supports-web-crypto@1.0.0) (2020-10-22) + +### Bug Fixes + +- replace `sourceRoot` -> `rootDir` in tsconfig ([#169](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/169)) ([d437167](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/d437167b51d1c56a4fcc2bb8a446b74a7e3b7e06)) + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.4...@aws-crypto/supports-web-crypto@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.2...@aws-crypto/supports-web-crypto@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.2...@aws-crypto/supports-web-crypto@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.1...@aws-crypto/supports-web-crypto@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/README.md new file mode 100644 index 0000000..7891357 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/README.md @@ -0,0 +1,32 @@ +# @aws-crypto/supports-web-crypto + +Functions to check web crypto support for browsers. + +## Usage + +``` +import {supportsWebCrypto} from '@aws-crypto/supports-web-crypto'; + +if (supportsWebCrypto(window)) { + // window.crypto.subtle.encrypt will exist +} + +``` + +## supportsWebCrypto + +Used to make sure `window.crypto.subtle` exists and implements crypto functions +as well as a cryptographic secure random source exists. + +## supportsSecureRandom + +Used to make sure that a cryptographic secure random source exists. +Does not check for `window.crypto.subtle`. + +## supportsSubtleCrypto + +## supportsZeroByteGCM + +## Test + +`npm test` diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js new file mode 100644 index 0000000..cc4c93f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./supportsWebCrypto"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map new file mode 100644 index 0000000..df0dd2c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,8DAAoC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts new file mode 100644 index 0000000..f2723dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts @@ -0,0 +1,4 @@ +export declare function supportsWebCrypto(window: Window): boolean; +export declare function supportsSecureRandom(window: Window): boolean; +export declare function supportsSubtleCrypto(subtle: SubtleCrypto): boolean; +export declare function supportsZeroByteGCM(subtle: SubtleCrypto): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js new file mode 100644 index 0000000..378f31e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.supportsZeroByteGCM = exports.supportsSubtleCrypto = exports.supportsSecureRandom = exports.supportsWebCrypto = void 0; +var tslib_1 = require("tslib"); +var subtleCryptoMethods = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; +function supportsWebCrypto(window) { + if (supportsSecureRandom(window) && + typeof window.crypto.subtle === "object") { + var subtle = window.crypto.subtle; + return supportsSubtleCrypto(subtle); + } + return false; +} +exports.supportsWebCrypto = supportsWebCrypto; +function supportsSecureRandom(window) { + if (typeof window === "object" && typeof window.crypto === "object") { + var getRandomValues = window.crypto.getRandomValues; + return typeof getRandomValues === "function"; + } + return false; +} +exports.supportsSecureRandom = supportsSecureRandom; +function supportsSubtleCrypto(subtle) { + return (subtle && + subtleCryptoMethods.every(function (methodName) { return typeof subtle[methodName] === "function"; })); +} +exports.supportsSubtleCrypto = supportsSubtleCrypto; +function supportsZeroByteGCM(subtle) { + return tslib_1.__awaiter(this, void 0, void 0, function () { + var key, zeroByteAuthTag, _a; + return tslib_1.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!supportsSubtleCrypto(subtle)) + return [2 /*return*/, false]; + _b.label = 1; + case 1: + _b.trys.push([1, 4, , 5]); + return [4 /*yield*/, subtle.generateKey({ name: "AES-GCM", length: 128 }, false, ["encrypt"])]; + case 2: + key = _b.sent(); + return [4 /*yield*/, subtle.encrypt({ + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, key, new Uint8Array(0))]; + case 3: + zeroByteAuthTag = _b.sent(); + return [2 /*return*/, zeroByteAuthTag.byteLength === 16]; + case 4: + _a = _b.sent(); + return [2 /*return*/, false]; + case 5: return [2 /*return*/]; + } + }); + }); +} +exports.supportsZeroByteGCM = supportsZeroByteGCM; +//# sourceMappingURL=supportsWebCrypto.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map new file mode 100644 index 0000000..1cc0ea3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map @@ -0,0 +1 @@ +{"version":3,"file":"supportsWebCrypto.js","sourceRoot":"","sources":["../../src/supportsWebCrypto.ts"],"names":[],"mappings":";;;;AAUA,IAAM,mBAAmB,GAA8B;IACrD,SAAS;IACT,QAAQ;IACR,SAAS;IACT,WAAW;IACX,aAAa;IACb,WAAW;IACX,MAAM;IACN,QAAQ;CACT,CAAC;AAEF,SAAgB,iBAAiB,CAAC,MAAc;IAC9C,IACE,oBAAoB,CAAC,MAAM,CAAC;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,KAAK,QAAQ,EACxC;QACQ,IAAA,MAAM,GAAK,MAAM,CAAC,MAAM,OAAlB,CAAmB;QAEjC,OAAO,oBAAoB,CAAC,MAAM,CAAC,CAAC;KACrC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAXD,8CAWC;AAED,SAAgB,oBAAoB,CAAC,MAAc;IACjD,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;QAC3D,IAAA,eAAe,GAAK,MAAM,CAAC,MAAM,gBAAlB,CAAmB;QAE1C,OAAO,OAAO,eAAe,KAAK,UAAU,CAAC;KAC9C;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AARD,oDAQC;AAED,SAAgB,oBAAoB,CAAC,MAAoB;IACvD,OAAO,CACL,MAAM;QACN,mBAAmB,CAAC,KAAK,CACvB,UAAA,UAAU,IAAI,OAAA,OAAO,MAAM,CAAC,UAAU,CAAC,KAAK,UAAU,EAAxC,CAAwC,CACvD,CACF,CAAC;AACJ,CAAC;AAPD,oDAOC;AAED,SAAsB,mBAAmB,CAAC,MAAoB;;;;;;oBAC5D,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC;wBAAE,sBAAO,KAAK,EAAC;;;;oBAElC,qBAAM,MAAM,CAAC,WAAW,CAClC,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,EAChC,KAAK,EACL,CAAC,SAAS,CAAC,CACZ,EAAA;;oBAJK,GAAG,GAAG,SAIX;oBACuB,qBAAM,MAAM,CAAC,OAAO,CAC1C;4BACE,IAAI,EAAE,SAAS;4BACf,EAAE,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BAC7B,cAAc,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BACzC,SAAS,EAAE,GAAG;yBACf,EACD,GAAG,EACH,IAAI,UAAU,CAAC,CAAC,CAAC,CAClB,EAAA;;oBATK,eAAe,GAAG,SASvB;oBACD,sBAAO,eAAe,CAAC,UAAU,KAAK,EAAE,EAAC;;;oBAEzC,sBAAO,KAAK,EAAC;;;;;CAEhB;AAtBD,kDAsBC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js new file mode 100644 index 0000000..f5527ea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js @@ -0,0 +1,2 @@ +export * from "./supportsWebCrypto"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map new file mode 100644 index 0000000..b2df430 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts new file mode 100644 index 0000000..f2723dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts @@ -0,0 +1,4 @@ +export declare function supportsWebCrypto(window: Window): boolean; +export declare function supportsSecureRandom(window: Window): boolean; +export declare function supportsSubtleCrypto(subtle: SubtleCrypto): boolean; +export declare function supportsZeroByteGCM(subtle: SubtleCrypto): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js new file mode 100644 index 0000000..70b46e6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js @@ -0,0 +1,62 @@ +import { __awaiter, __generator } from "tslib"; +var subtleCryptoMethods = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; +export function supportsWebCrypto(window) { + if (supportsSecureRandom(window) && + typeof window.crypto.subtle === "object") { + var subtle = window.crypto.subtle; + return supportsSubtleCrypto(subtle); + } + return false; +} +export function supportsSecureRandom(window) { + if (typeof window === "object" && typeof window.crypto === "object") { + var getRandomValues = window.crypto.getRandomValues; + return typeof getRandomValues === "function"; + } + return false; +} +export function supportsSubtleCrypto(subtle) { + return (subtle && + subtleCryptoMethods.every(function (methodName) { return typeof subtle[methodName] === "function"; })); +} +export function supportsZeroByteGCM(subtle) { + return __awaiter(this, void 0, void 0, function () { + var key, zeroByteAuthTag, _a; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!supportsSubtleCrypto(subtle)) + return [2 /*return*/, false]; + _b.label = 1; + case 1: + _b.trys.push([1, 4, , 5]); + return [4 /*yield*/, subtle.generateKey({ name: "AES-GCM", length: 128 }, false, ["encrypt"])]; + case 2: + key = _b.sent(); + return [4 /*yield*/, subtle.encrypt({ + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, key, new Uint8Array(0))]; + case 3: + zeroByteAuthTag = _b.sent(); + return [2 /*return*/, zeroByteAuthTag.byteLength === 16]; + case 4: + _a = _b.sent(); + return [2 /*return*/, false]; + case 5: return [2 /*return*/]; + } + }); + }); +} +//# sourceMappingURL=supportsWebCrypto.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map new file mode 100644 index 0000000..967fc19 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map @@ -0,0 +1 @@ +{"version":3,"file":"supportsWebCrypto.js","sourceRoot":"","sources":["../../src/supportsWebCrypto.ts"],"names":[],"mappings":";AAUA,IAAM,mBAAmB,GAA8B;IACrD,SAAS;IACT,QAAQ;IACR,SAAS;IACT,WAAW;IACX,aAAa;IACb,WAAW;IACX,MAAM;IACN,QAAQ;CACT,CAAC;AAEF,MAAM,UAAU,iBAAiB,CAAC,MAAc;IAC9C,IACE,oBAAoB,CAAC,MAAM,CAAC;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,KAAK,QAAQ,EACxC;QACQ,IAAA,MAAM,GAAK,MAAM,CAAC,MAAM,OAAlB,CAAmB;QAEjC,OAAO,oBAAoB,CAAC,MAAM,CAAC,CAAC;KACrC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAc;IACjD,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;QAC3D,IAAA,eAAe,GAAK,MAAM,CAAC,MAAM,gBAAlB,CAAmB;QAE1C,OAAO,OAAO,eAAe,KAAK,UAAU,CAAC;KAC9C;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAoB;IACvD,OAAO,CACL,MAAM;QACN,mBAAmB,CAAC,KAAK,CACvB,UAAA,UAAU,IAAI,OAAA,OAAO,MAAM,CAAC,UAAU,CAAC,KAAK,UAAU,EAAxC,CAAwC,CACvD,CACF,CAAC;AACJ,CAAC;AAED,MAAM,UAAgB,mBAAmB,CAAC,MAAoB;;;;;;oBAC5D,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC;wBAAE,sBAAO,KAAK,EAAC;;;;oBAElC,qBAAM,MAAM,CAAC,WAAW,CAClC,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,EAChC,KAAK,EACL,CAAC,SAAS,CAAC,CACZ,EAAA;;oBAJK,GAAG,GAAG,SAIX;oBACuB,qBAAM,MAAM,CAAC,OAAO,CAC1C;4BACE,IAAI,EAAE,SAAS;4BACf,EAAE,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BAC7B,cAAc,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BACzC,SAAS,EAAE,GAAG;yBACf,EACD,GAAG,EACH,IAAI,UAAU,CAAC,CAAC,CAAC,CAClB,EAAA;;oBATK,eAAe,GAAG,SASvB;oBACD,sBAAO,eAAe,CAAC,UAAU,KAAK,EAAE,EAAC;;;oBAEzC,sBAAO,KAAK,EAAC;;;;;CAEhB"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/package.json new file mode 100644 index 0000000..a97bf01 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/package.json @@ -0,0 +1,28 @@ +{ + "name": "@aws-crypto/supports-web-crypto", + "version": "5.2.0", + "description": "Provides functions for detecting if the host environment supports the WebCrypto API", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/supports-web-crypto", + "license": "Apache-2.0", + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/src/index.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/src/index.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/src/index.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts new file mode 100644 index 0000000..7eef629 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts @@ -0,0 +1,76 @@ +type SubtleCryptoMethod = + | "decrypt" + | "digest" + | "encrypt" + | "exportKey" + | "generateKey" + | "importKey" + | "sign" + | "verify"; + +const subtleCryptoMethods: Array = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; + +export function supportsWebCrypto(window: Window): boolean { + if ( + supportsSecureRandom(window) && + typeof window.crypto.subtle === "object" + ) { + const { subtle } = window.crypto; + + return supportsSubtleCrypto(subtle); + } + + return false; +} + +export function supportsSecureRandom(window: Window): boolean { + if (typeof window === "object" && typeof window.crypto === "object") { + const { getRandomValues } = window.crypto; + + return typeof getRandomValues === "function"; + } + + return false; +} + +export function supportsSubtleCrypto(subtle: SubtleCrypto) { + return ( + subtle && + subtleCryptoMethods.every( + methodName => typeof subtle[methodName] === "function" + ) + ); +} + +export async function supportsZeroByteGCM(subtle: SubtleCrypto) { + if (!supportsSubtleCrypto(subtle)) return false; + try { + const key = await subtle.generateKey( + { name: "AES-GCM", length: 128 }, + false, + ["encrypt"] + ); + const zeroByteAuthTag = await subtle.encrypt( + { + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, + key, + new Uint8Array(0) + ); + return zeroByteAuthTag.byteLength === 16; + } catch { + return false; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json new file mode 100644 index 0000000..efca6de --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "lib": ["dom"], + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/CHANGELOG.md new file mode 100644 index 0000000..df2cecb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/CHANGELOG.md @@ -0,0 +1,71 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/util + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/util + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) +- **docs:** update README for packages/util ([#382](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/382)) ([f3e650e](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/f3e650e1b4792ffbea2e8a1a015fd55fb951a3a4)) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +### Bug Fixes + +- **uint32ArrayFrom:** increment index & polyfill for Uint32Array ([#270](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/270)) ([a70d603](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/a70d603f3ba7600d3c1213f297d4160a4b3793bd)) + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/util + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +### Bug Fixes + +- **crc32c:** ie11 does not support Array.from ([#221](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/221)) ([5f49547](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/5f495472ab8988cf203e0f2a70a51f7e1fcd7e60)) + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +### Bug Fixes + +- better pollyfill check for Buffer ([#217](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/217)) ([bc97da2](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/bc97da29aaf473943e4407c9a29cc30f74f15723)) + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/LICENSE new file mode 100644 index 0000000..980a15a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/README.md new file mode 100644 index 0000000..4c1c8aa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/README.md @@ -0,0 +1,16 @@ +# @aws-crypto/util + +Helper functions + +## Usage + +``` +import { convertToBuffer } from '@aws-crypto/util'; + +const data = "asdf"; +const utf8EncodedUint8Array = convertToBuffer(data); +``` + +## Test + +`npm test` diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts new file mode 100644 index 0000000..697a5cd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function convertToBuffer(data: SourceData): Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js new file mode 100644 index 0000000..85bc8af --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js @@ -0,0 +1,24 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertToBuffer = void 0; +var util_utf8_1 = require("@smithy/util-utf8"); +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : util_utf8_1.fromUtf8; +function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +exports.convertToBuffer = convertToBuffer; +//# sourceMappingURL=convertToBuffer.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map new file mode 100644 index 0000000..916d787 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"convertToBuffer.js","sourceRoot":"","sources":["../../src/convertToBuffer.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAGtC,+CAAgE;AAEhE,iBAAiB;AACjB,IAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,IAAI;IAC1C,CAAC,CAAC,UAAC,KAAa,IAAK,OAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,EAA1B,CAA0B;IAC/C,CAAC,CAAC,oBAAe,CAAC;AAEtB,SAAgB,eAAe,CAAC,IAAgB;IAC9C,8BAA8B;IAC9B,IAAI,IAAI,YAAY,UAAU;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC;AAjBD,0CAiBC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.d.ts new file mode 100644 index 0000000..783c73c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.d.ts @@ -0,0 +1,4 @@ +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.js new file mode 100644 index 0000000..94e1ca9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.uint32ArrayFrom = exports.numToUint8 = exports.isEmptyData = exports.convertToBuffer = void 0; +var convertToBuffer_1 = require("./convertToBuffer"); +Object.defineProperty(exports, "convertToBuffer", { enumerable: true, get: function () { return convertToBuffer_1.convertToBuffer; } }); +var isEmptyData_1 = require("./isEmptyData"); +Object.defineProperty(exports, "isEmptyData", { enumerable: true, get: function () { return isEmptyData_1.isEmptyData; } }); +var numToUint8_1 = require("./numToUint8"); +Object.defineProperty(exports, "numToUint8", { enumerable: true, get: function () { return numToUint8_1.numToUint8; } }); +var uint32ArrayFrom_1 = require("./uint32ArrayFrom"); +Object.defineProperty(exports, "uint32ArrayFrom", { enumerable: true, get: function () { return uint32ArrayFrom_1.uint32ArrayFrom; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.js.map new file mode 100644 index 0000000..a170172 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,qDAAoD;AAA3C,kHAAA,eAAe,OAAA;AACxB,6CAA4C;AAAnC,0GAAA,WAAW,OAAA;AACpB,2CAA0C;AAAjC,wGAAA,UAAU,OAAA;AACnB,qDAAkD;AAA1C,kHAAA,eAAe,OAAA"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js new file mode 100644 index 0000000..6af1e89 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js @@ -0,0 +1,13 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map new file mode 100644 index 0000000..e1eaa02 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAItC,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts new file mode 100644 index 0000000..5b702e8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts @@ -0,0 +1 @@ +export declare function numToUint8(num: number): Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js new file mode 100644 index 0000000..2f070e1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js @@ -0,0 +1,15 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.numToUint8 = void 0; +function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +exports.numToUint8 = numToUint8; +//# sourceMappingURL=numToUint8.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js.map new file mode 100644 index 0000000..fea3aca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/numToUint8.js.map @@ -0,0 +1 @@ +{"version":3,"file":"numToUint8.js","sourceRoot":"","sources":["../../src/numToUint8.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,SAAgB,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,UAAU,CAAC;QACpB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC;QACvB,GAAG,GAAG,UAAU;KACjB,CAAC,CAAC;AACL,CAAC;AAPD,gCAOC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts new file mode 100644 index 0000000..fea6607 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts @@ -0,0 +1 @@ +export declare function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js new file mode 100644 index 0000000..226cdc3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js @@ -0,0 +1,20 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.uint32ArrayFrom = void 0; +// IE 11 does not support Array.from, so we do it manually +function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +exports.uint32ArrayFrom = uint32ArrayFrom; +//# sourceMappingURL=uint32ArrayFrom.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map new file mode 100644 index 0000000..fe016e1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uint32ArrayFrom.js","sourceRoot":"","sources":["../../src/uint32ArrayFrom.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,0DAA0D;AAC1D,SAAgB,eAAe,CAAC,aAA4B;IAC1D,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE;QACrB,IAAM,YAAY,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE;YACrC,YAAY,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;YAC9C,OAAO,IAAI,CAAC,CAAA;SACb;QACD,OAAO,YAAY,CAAA;KACpB;IACD,OAAO,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;AACxC,CAAC;AAXD,0CAWC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts new file mode 100644 index 0000000..697a5cd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function convertToBuffer(data: SourceData): Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js new file mode 100644 index 0000000..c700d1e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js @@ -0,0 +1,20 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { fromUtf8 as fromUtf8Browser } from "@smithy/util-utf8"; +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : fromUtf8Browser; +export function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +//# sourceMappingURL=convertToBuffer.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map new file mode 100644 index 0000000..92694a4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"convertToBuffer.js","sourceRoot":"","sources":["../../src/convertToBuffer.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAGtC,OAAO,EAAE,QAAQ,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAEhE,iBAAiB;AACjB,IAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,IAAI;IAC1C,CAAC,CAAC,UAAC,KAAa,IAAK,OAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,EAA1B,CAA0B;IAC/C,CAAC,CAAC,eAAe,CAAC;AAEtB,MAAM,UAAU,eAAe,CAAC,IAAgB;IAC9C,8BAA8B;IAC9B,IAAI,IAAI,YAAY,UAAU;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.d.ts new file mode 100644 index 0000000..783c73c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.d.ts @@ -0,0 +1,4 @@ +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.js new file mode 100644 index 0000000..077e8b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.js @@ -0,0 +1,7 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.js.map new file mode 100644 index 0000000..4ddb12d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAC,eAAe,EAAC,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js new file mode 100644 index 0000000..13841c7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js @@ -0,0 +1,9 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map new file mode 100644 index 0000000..fe0fa02 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAItC,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts new file mode 100644 index 0000000..5b702e8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts @@ -0,0 +1 @@ +export declare function numToUint8(num: number): Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js new file mode 100644 index 0000000..0ca6e47 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +//# sourceMappingURL=numToUint8.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js.map new file mode 100644 index 0000000..ac53e33 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/numToUint8.js.map @@ -0,0 +1 @@ +{"version":3,"file":"numToUint8.js","sourceRoot":"","sources":["../../src/numToUint8.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,UAAU,CAAC;QACpB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC;QACvB,GAAG,GAAG,UAAU;KACjB,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts new file mode 100644 index 0000000..fea6607 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts @@ -0,0 +1 @@ +export declare function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js new file mode 100644 index 0000000..c69435e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js @@ -0,0 +1,16 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// IE 11 does not support Array.from, so we do it manually +export function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +//# sourceMappingURL=uint32ArrayFrom.js.map \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map new file mode 100644 index 0000000..7384b0a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uint32ArrayFrom.js","sourceRoot":"","sources":["../../src/uint32ArrayFrom.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,0DAA0D;AAC1D,MAAM,UAAU,eAAe,CAAC,aAA4B;IAC1D,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE;QACrB,IAAM,YAAY,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE;YACrC,YAAY,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;YAC9C,OAAO,IAAI,CAAC,CAAA;SACb;QACD,OAAO,YAAY,CAAA;KACpB;IACD,OAAO,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;AACxC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..ed8affc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..a12e51c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..78bfb4d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/package.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/package.json new file mode 100644 index 0000000..431107a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/util", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/util", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "publishConfig": { + "access": "public" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/convertToBuffer.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/convertToBuffer.ts new file mode 100644 index 0000000..f9f163e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/convertToBuffer.ts @@ -0,0 +1,30 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData } from "@aws-sdk/types"; +import { fromUtf8 as fromUtf8Browser } from "@smithy/util-utf8"; + +// Quick polyfill +const fromUtf8 = + typeof Buffer !== "undefined" && Buffer.from + ? (input: string) => Buffer.from(input, "utf8") + : fromUtf8Browser; + +export function convertToBuffer(data: SourceData): Uint8Array { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) return data; + + if (typeof data === "string") { + return fromUtf8(data); + } + + if (ArrayBuffer.isView(data)) { + return new Uint8Array( + data.buffer, + data.byteOffset, + data.byteLength / Uint8Array.BYTES_PER_ELEMENT + ); + } + + return new Uint8Array(data); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/index.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/index.ts new file mode 100644 index 0000000..2f6c62a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/index.ts @@ -0,0 +1,7 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export {uint32ArrayFrom} from './uint32ArrayFrom'; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/isEmptyData.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/isEmptyData.ts new file mode 100644 index 0000000..089764d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/isEmptyData.ts @@ -0,0 +1,12 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/numToUint8.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/numToUint8.ts new file mode 100644 index 0000000..2f40ace --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/numToUint8.ts @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export function numToUint8(num: number) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts new file mode 100644 index 0000000..b9b6d88 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts @@ -0,0 +1,16 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// IE 11 does not support Array.from, so we do it manually +export function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array { + if (!Uint32Array.from) { + const return_array = new Uint32Array(a_lookUpTable.length) + let a_index = 0 + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index] + a_index += 1 + } + return return_array + } + return Uint32Array.from(a_lookUpTable) +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/tsconfig.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/tsconfig.json new file mode 100644 index 0000000..2b996d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/tsconfig.module.json b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-crypto/util/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/README.md new file mode 100644 index 0000000..c1da3dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/README.md @@ -0,0 +1,678 @@ + + +# @aws-sdk/client-dynamodb + +## Description + +AWS SDK for JavaScript DynamoDB Client for Node.js, Browser and React Native. + +Amazon DynamoDB + +

Amazon DynamoDB is a fully managed NoSQL database service that provides fast +and predictable performance with seamless scalability. DynamoDB lets you +offload the administrative burdens of operating and scaling a distributed database, so +that you don't have to worry about hardware provisioning, setup and configuration, +replication, software patching, or cluster scaling.

+

With DynamoDB, you can create database tables that can store and retrieve +any amount of data, and serve any level of request traffic. You can scale up or scale +down your tables' throughput capacity without downtime or performance degradation, and +use the Amazon Web Services Management Console to monitor resource utilization and performance +metrics.

+

DynamoDB automatically spreads the data and traffic for your tables over +a sufficient number of servers to handle your throughput and storage requirements, while +maintaining consistent and fast performance. All of your data is stored on solid state +disks (SSDs) and automatically replicated across multiple Availability Zones in an +Amazon Web Services Region, providing built-in high availability and data +durability.

+ +## Installing + +To install this package, simply type add or install @aws-sdk/client-dynamodb +using your favorite package manager: + +- `npm install @aws-sdk/client-dynamodb` +- `yarn add @aws-sdk/client-dynamodb` +- `pnpm add @aws-sdk/client-dynamodb` + +## Getting Started + +### Import + +The AWS SDK is modulized by clients and commands. +To send a request, you only need to import the `DynamoDBClient` and +the commands you need, for example `ListBackupsCommand`: + +```js +// ES5 example +const { DynamoDBClient, ListBackupsCommand } = require("@aws-sdk/client-dynamodb"); +``` + +```ts +// ES6+ example +import { DynamoDBClient, ListBackupsCommand } from "@aws-sdk/client-dynamodb"; +``` + +### Usage + +To send a request, you: + +- Initiate client with configuration (e.g. credentials, region). +- Initiate command with input parameters. +- Call `send` operation on client with command object as input. +- If you are using a custom http handler, you may call `destroy()` to close open connections. + +```js +// a client can be shared by different commands. +const client = new DynamoDBClient({ region: "REGION" }); + +const params = { + /** input parameters */ +}; +const command = new ListBackupsCommand(params); +``` + +#### Async/await + +We recommend using [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) +operator to wait for the promise returned by send operation as follows: + +```js +// async/await. +try { + const data = await client.send(command); + // process data. +} catch (error) { + // error handling. +} finally { + // finally. +} +``` + +Async-await is clean, concise, intuitive, easy to debug and has better error handling +as compared to using Promise chains or callbacks. + +#### Promises + +You can also use [Promise chaining](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises#chaining) +to execute send operation. + +```js +client.send(command).then( + (data) => { + // process data. + }, + (error) => { + // error handling. + } +); +``` + +Promises can also be called using `.catch()` and `.finally()` as follows: + +```js +client + .send(command) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }) + .finally(() => { + // finally. + }); +``` + +#### Callbacks + +We do not recommend using callbacks because of [callback hell](http://callbackhell.com/), +but they are supported by the send operation. + +```js +// callbacks. +client.send(command, (err, data) => { + // process err and data. +}); +``` + +#### v2 compatible style + +The client can also send requests using v2 compatible style. +However, it results in a bigger bundle size and may be dropped in next major version. More details in the blog post +on [modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/) + +```ts +import * as AWS from "@aws-sdk/client-dynamodb"; +const client = new AWS.DynamoDB({ region: "REGION" }); + +// async/await. +try { + const data = await client.listBackups(params); + // process data. +} catch (error) { + // error handling. +} + +// Promises. +client + .listBackups(params) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }); + +// callbacks. +client.listBackups(params, (err, data) => { + // process err and data. +}); +``` + +### Troubleshooting + +When the service returns an exception, the error will include the exception information, +as well as response metadata (e.g. request id). + +```js +try { + const data = await client.send(command); + // process data. +} catch (error) { + const { requestId, cfId, extendedRequestId } = error.$metadata; + console.log({ requestId, cfId, extendedRequestId }); + /** + * The keys within exceptions are also parsed. + * You can access them by specifying exception names: + * if (error.name === 'SomeServiceException') { + * const value = error.specialKeyInException; + * } + */ +} +``` + +## Getting Help + +Please use these community resources for getting help. +We use the GitHub issues for tracking bugs and feature requests, but have limited bandwidth to address them. + +- Visit [Developer Guide](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) + or [API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html). +- Check out the blog posts tagged with [`aws-sdk-js`](https://aws.amazon.com/blogs/developer/tag/aws-sdk-js/) + on AWS Developer Blog. +- Ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/aws-sdk-js) and tag it with `aws-sdk-js`. +- Join the AWS JavaScript community on [gitter](https://gitter.im/aws/aws-sdk-js-v3). +- If it turns out that you may have found a bug, please [open an issue](https://github.com/aws/aws-sdk-js-v3/issues/new/choose). + +To test your universal JavaScript code in Node.js, browser and react-native environments, +visit our [code samples repo](https://github.com/aws-samples/aws-sdk-js-tests). + +## Contributing + +This client code is generated automatically. Any modifications will be overwritten the next time the `@aws-sdk/client-dynamodb` package is updated. +To contribute to client you can check our [generate clients scripts](https://github.com/aws/aws-sdk-js-v3/tree/main/scripts/generate-clients). + +## License + +This SDK is distributed under the +[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0), +see LICENSE for more information. + +## Client Commands (Operations List) + +
+ +BatchExecuteStatement + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchExecuteStatementCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchExecuteStatementCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchExecuteStatementCommandOutput/) + +
+
+ +BatchGetItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchGetItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchGetItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchGetItemCommandOutput/) + +
+
+ +BatchWriteItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchWriteItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchWriteItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchWriteItemCommandOutput/) + +
+
+ +CreateBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateBackupCommandOutput/) + +
+
+ +CreateGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateGlobalTableCommandOutput/) + +
+
+ +CreateTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateTableCommandOutput/) + +
+
+ +DeleteBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteBackupCommandOutput/) + +
+
+ +DeleteItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteItemCommandOutput/) + +
+
+ +DeleteResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteResourcePolicyCommandOutput/) + +
+
+ +DeleteTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteTableCommandOutput/) + +
+
+ +DescribeBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeBackupCommandOutput/) + +
+
+ +DescribeContinuousBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeContinuousBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContinuousBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContinuousBackupsCommandOutput/) + +
+
+ +DescribeContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContributorInsightsCommandOutput/) + +
+
+ +DescribeEndpoints + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeEndpointsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeEndpointsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeEndpointsCommandOutput/) + +
+
+ +DescribeExport + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeExportCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeExportCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeExportCommandOutput/) + +
+
+ +DescribeGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableCommandOutput/) + +
+
+ +DescribeGlobalTableSettings + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeGlobalTableSettingsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableSettingsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableSettingsCommandOutput/) + +
+
+ +DescribeImport + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeImportCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeImportCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeImportCommandOutput/) + +
+
+ +DescribeKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeKinesisStreamingDestinationCommandOutput/) + +
+
+ +DescribeLimits + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeLimitsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeLimitsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeLimitsCommandOutput/) + +
+
+ +DescribeTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableCommandOutput/) + +
+
+ +DescribeTableReplicaAutoScaling + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTableReplicaAutoScalingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableReplicaAutoScalingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableReplicaAutoScalingCommandOutput/) + +
+
+ +DescribeTimeToLive + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTimeToLiveCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTimeToLiveCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTimeToLiveCommandOutput/) + +
+
+ +DisableKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DisableKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DisableKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DisableKinesisStreamingDestinationCommandOutput/) + +
+
+ +EnableKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/EnableKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/EnableKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/EnableKinesisStreamingDestinationCommandOutput/) + +
+
+ +ExecuteStatement + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExecuteStatementCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteStatementCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteStatementCommandOutput/) + +
+
+ +ExecuteTransaction + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExecuteTransactionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteTransactionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteTransactionCommandOutput/) + +
+
+ +ExportTableToPointInTime + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExportTableToPointInTimeCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExportTableToPointInTimeCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExportTableToPointInTimeCommandOutput/) + +
+
+ +GetItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/GetItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetItemCommandOutput/) + +
+
+ +GetResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/GetResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetResourcePolicyCommandOutput/) + +
+
+ +ImportTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ImportTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ImportTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ImportTableCommandOutput/) + +
+
+ +ListBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListBackupsCommandOutput/) + +
+
+ +ListContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListContributorInsightsCommandOutput/) + +
+
+ +ListExports + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListExportsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListExportsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListExportsCommandOutput/) + +
+
+ +ListGlobalTables + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListGlobalTablesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListGlobalTablesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListGlobalTablesCommandOutput/) + +
+
+ +ListImports + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListImportsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListImportsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListImportsCommandOutput/) + +
+
+ +ListTables + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListTablesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTablesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTablesCommandOutput/) + +
+
+ +ListTagsOfResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListTagsOfResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTagsOfResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTagsOfResourceCommandOutput/) + +
+
+ +PutItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/PutItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutItemCommandOutput/) + +
+
+ +PutResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/PutResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutResourcePolicyCommandOutput/) + +
+
+ +Query + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/QueryCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/QueryCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/QueryCommandOutput/) + +
+
+ +RestoreTableFromBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/RestoreTableFromBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableFromBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableFromBackupCommandOutput/) + +
+
+ +RestoreTableToPointInTime + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/RestoreTableToPointInTimeCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableToPointInTimeCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableToPointInTimeCommandOutput/) + +
+
+ +Scan + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ScanCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ScanCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ScanCommandOutput/) + +
+
+ +TagResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TagResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TagResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TagResourceCommandOutput/) + +
+
+ +TransactGetItems + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TransactGetItemsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactGetItemsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactGetItemsCommandOutput/) + +
+
+ +TransactWriteItems + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TransactWriteItemsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactWriteItemsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactWriteItemsCommandOutput/) + +
+
+ +UntagResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UntagResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UntagResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UntagResourceCommandOutput/) + +
+
+ +UpdateContinuousBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateContinuousBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContinuousBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContinuousBackupsCommandOutput/) + +
+
+ +UpdateContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContributorInsightsCommandOutput/) + +
+
+ +UpdateGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableCommandOutput/) + +
+
+ +UpdateGlobalTableSettings + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateGlobalTableSettingsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableSettingsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableSettingsCommandOutput/) + +
+
+ +UpdateItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateItemCommandOutput/) + +
+
+ +UpdateKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateKinesisStreamingDestinationCommandOutput/) + +
+
+ +UpdateTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableCommandOutput/) + +
+
+ +UpdateTableReplicaAutoScaling + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTableReplicaAutoScalingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableReplicaAutoScalingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableReplicaAutoScalingCommandOutput/) + +
+
+ +UpdateTimeToLive + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTimeToLiveCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTimeToLiveCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTimeToLiveCommandOutput/) + +
diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..db59164 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultDynamoDBHttpAuthSchemeProvider = exports.defaultDynamoDBHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultDynamoDBHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultDynamoDBHttpAuthSchemeParametersProvider = defaultDynamoDBHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "dynamodb", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +const defaultDynamoDBHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultDynamoDBHttpAuthSchemeProvider = defaultDynamoDBHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js new file mode 100644 index 0000000..1df276c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: [ + "AccountId", + "AccountIdEndpointMode", + "Endpoint", + "Region", + "ResourceArn", + "ResourceArnList", + "UseDualStack", + "UseFIPS", + ], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js new file mode 100644 index 0000000..0a9f993 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const S = "required", T = "type", U = "fn", V = "argv", W = "ref", X = "properties", Y = "headers"; +const a = false, b = "isSet", c = "error", d = "endpoint", e = "tree", f = "PartitionResult", g = "stringEquals", h = "dynamodb", i = "getAttr", j = "aws.parseArn", k = "ParsedArn", l = "isValidHostLabel", m = "FirstArn", n = { [S]: false, [T]: "String" }, o = { [S]: true, "default": false, [T]: "Boolean" }, p = { [U]: "booleanEquals", [V]: [{ [W]: "UseFIPS" }, true] }, q = { [U]: "booleanEquals", [V]: [{ [W]: "UseDualStack" }, true] }, r = {}, s = { [W]: "Region" }, t = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsFIPS"] }, true] }, u = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsDualStack"] }, true] }, v = { "conditions": [{ [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], "rules": [{ [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }], [T]: e }, w = { [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, x = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }, y = { [U]: i, [V]: [{ [W]: f }, "name"] }, z = { [d]: { "url": "https://dynamodb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, [T]: d }, A = { [U]: "not", [V]: [p] }, B = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and DualStack is enabled, but DualStack account endpoints are not supported", [T]: c }, C = { [U]: "not", [V]: [{ [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "disabled"] }] }, D = { [U]: g, [V]: [y, "aws"] }, E = { [U]: "not", [V]: [q] }, F = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "service"] }, h] }, G = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, false] }, H = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, "{Region}"] }, I = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "accountId"] }, false] }, J = { "url": "https://{ParsedArn#accountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, K = { [W]: "ResourceArnList" }, L = { [W]: "AccountId" }, M = [p], N = [q], O = [s], P = [w, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], Q = [A], R = [{ [W]: "ResourceArn" }]; +const _data = { version: "1.0", parameters: { Region: n, UseDualStack: o, UseFIPS: o, Endpoint: n, AccountId: n, AccountIdEndpointMode: n, ResourceArn: n, ResourceArnList: { [S]: a, [T]: "stringArray" } }, rules: [{ conditions: [{ [U]: b, [V]: [{ [W]: "Endpoint" }] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [T]: c }, { endpoint: { url: "{Endpoint}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { conditions: [{ [U]: b, [V]: O }], rules: [{ conditions: [{ [U]: "aws.partition", [V]: O, assign: f }], rules: [{ conditions: [{ [U]: g, [V]: [s, "local"] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and local endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and local endpoint are not supported", [T]: c }, { endpoint: { url: "http://localhost:8000", [X]: { authSchemes: [{ signingRegion: "us-east-1", name: "sigv4", signingName: h }] }, [Y]: r }, [T]: d }], [T]: e }, { conditions: [p, q], rules: [{ conditions: [t, u], rules: [v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [T]: c }], [T]: e }, { conditions: M, rules: [{ conditions: [t], rules: [{ conditions: [{ [U]: g, [V]: [y, "aws-us-gov"] }], rules: [v, z], [T]: e }, v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS is enabled but this partition does not support FIPS", [T]: c }], [T]: e }, { conditions: N, rules: [{ conditions: [u], rules: [{ conditions: P, rules: [{ conditions: Q, rules: [B], [T]: e }, x], [T]: e }, { endpoint: { url: "https://dynamodb.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "DualStack is enabled but this partition does not support DualStack", [T]: c }], [T]: e }, { conditions: [w, C, D, A, E, { [U]: b, [V]: R }, { [U]: j, [V]: R, assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [K] }, { [U]: i, [V]: [K, "[0]"], assign: m }, { [U]: j, [V]: [{ [W]: m }], assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [L] }], rules: [{ conditions: [{ [U]: l, [V]: [L, a] }], rules: [{ endpoint: { url: "https://{AccountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "Credentials-sourced account ID parameter is invalid", [T]: c }], [T]: e }, { conditions: P, rules: [{ conditions: Q, rules: [{ conditions: [E], rules: [{ conditions: [D], rules: [{ error: "AccountIdEndpointMode is required but no AccountID was provided or able to be loaded", [T]: c }], [T]: e }, { error: "Invalid Configuration: AccountIdEndpointMode is required but account endpoints are not supported in this partition", [T]: c }], [T]: e }, B], [T]: e }, x], [T]: e }, z], [T]: e }], [T]: e }, { error: "Invalid Configuration: Missing Region", [T]: c }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..b29fd11 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js @@ -0,0 +1,5561 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ApproximateCreationDateTimePrecision: () => ApproximateCreationDateTimePrecision, + AttributeAction: () => AttributeAction, + AttributeValue: () => AttributeValue, + BackupInUseException: () => BackupInUseException, + BackupNotFoundException: () => BackupNotFoundException, + BackupStatus: () => BackupStatus, + BackupType: () => BackupType, + BackupTypeFilter: () => BackupTypeFilter, + BatchExecuteStatementCommand: () => BatchExecuteStatementCommand, + BatchGetItemCommand: () => BatchGetItemCommand, + BatchStatementErrorCodeEnum: () => BatchStatementErrorCodeEnum, + BatchWriteItemCommand: () => BatchWriteItemCommand, + BillingMode: () => BillingMode, + ComparisonOperator: () => ComparisonOperator, + ConditionalCheckFailedException: () => ConditionalCheckFailedException, + ConditionalOperator: () => ConditionalOperator, + ContinuousBackupsStatus: () => ContinuousBackupsStatus, + ContinuousBackupsUnavailableException: () => ContinuousBackupsUnavailableException, + ContributorInsightsAction: () => ContributorInsightsAction, + ContributorInsightsStatus: () => ContributorInsightsStatus, + CreateBackupCommand: () => CreateBackupCommand, + CreateGlobalTableCommand: () => CreateGlobalTableCommand, + CreateTableCommand: () => CreateTableCommand, + DeleteBackupCommand: () => DeleteBackupCommand, + DeleteItemCommand: () => DeleteItemCommand, + DeleteResourcePolicyCommand: () => DeleteResourcePolicyCommand, + DeleteTableCommand: () => DeleteTableCommand, + DescribeBackupCommand: () => DescribeBackupCommand, + DescribeContinuousBackupsCommand: () => DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand: () => DescribeContributorInsightsCommand, + DescribeEndpointsCommand: () => DescribeEndpointsCommand, + DescribeExportCommand: () => DescribeExportCommand, + DescribeGlobalTableCommand: () => DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand: () => DescribeGlobalTableSettingsCommand, + DescribeImportCommand: () => DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand: () => DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand: () => DescribeLimitsCommand, + DescribeTableCommand: () => DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand: () => DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand: () => DescribeTimeToLiveCommand, + DestinationStatus: () => DestinationStatus, + DisableKinesisStreamingDestinationCommand: () => DisableKinesisStreamingDestinationCommand, + DuplicateItemException: () => DuplicateItemException, + DynamoDB: () => DynamoDB, + DynamoDBClient: () => DynamoDBClient, + DynamoDBServiceException: () => DynamoDBServiceException, + EnableKinesisStreamingDestinationCommand: () => EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand: () => ExecuteStatementCommand, + ExecuteTransactionCommand: () => ExecuteTransactionCommand, + ExportConflictException: () => ExportConflictException, + ExportFormat: () => ExportFormat, + ExportNotFoundException: () => ExportNotFoundException, + ExportStatus: () => ExportStatus, + ExportTableToPointInTimeCommand: () => ExportTableToPointInTimeCommand, + ExportType: () => ExportType, + ExportViewType: () => ExportViewType, + GetItemCommand: () => GetItemCommand, + GetResourcePolicyCommand: () => GetResourcePolicyCommand, + GlobalTableAlreadyExistsException: () => GlobalTableAlreadyExistsException, + GlobalTableNotFoundException: () => GlobalTableNotFoundException, + GlobalTableStatus: () => GlobalTableStatus, + IdempotentParameterMismatchException: () => IdempotentParameterMismatchException, + ImportConflictException: () => ImportConflictException, + ImportNotFoundException: () => ImportNotFoundException, + ImportStatus: () => ImportStatus, + ImportTableCommand: () => ImportTableCommand, + IndexNotFoundException: () => IndexNotFoundException, + IndexStatus: () => IndexStatus, + InputCompressionType: () => InputCompressionType, + InputFormat: () => InputFormat, + InternalServerError: () => InternalServerError, + InvalidEndpointException: () => InvalidEndpointException, + InvalidExportTimeException: () => InvalidExportTimeException, + InvalidRestoreTimeException: () => InvalidRestoreTimeException, + ItemCollectionSizeLimitExceededException: () => ItemCollectionSizeLimitExceededException, + KeyType: () => KeyType, + LimitExceededException: () => LimitExceededException, + ListBackupsCommand: () => ListBackupsCommand, + ListContributorInsightsCommand: () => ListContributorInsightsCommand, + ListExportsCommand: () => ListExportsCommand, + ListGlobalTablesCommand: () => ListGlobalTablesCommand, + ListImportsCommand: () => ListImportsCommand, + ListTablesCommand: () => ListTablesCommand, + ListTagsOfResourceCommand: () => ListTagsOfResourceCommand, + MultiRegionConsistency: () => MultiRegionConsistency, + PointInTimeRecoveryStatus: () => PointInTimeRecoveryStatus, + PointInTimeRecoveryUnavailableException: () => PointInTimeRecoveryUnavailableException, + PolicyNotFoundException: () => PolicyNotFoundException, + ProjectionType: () => ProjectionType, + ProvisionedThroughputExceededException: () => ProvisionedThroughputExceededException, + PutItemCommand: () => PutItemCommand, + PutResourcePolicyCommand: () => PutResourcePolicyCommand, + QueryCommand: () => QueryCommand, + ReplicaAlreadyExistsException: () => ReplicaAlreadyExistsException, + ReplicaNotFoundException: () => ReplicaNotFoundException, + ReplicaStatus: () => ReplicaStatus, + ReplicatedWriteConflictException: () => ReplicatedWriteConflictException, + RequestLimitExceeded: () => RequestLimitExceeded, + ResourceInUseException: () => ResourceInUseException, + ResourceNotFoundException: () => ResourceNotFoundException, + RestoreTableFromBackupCommand: () => RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand: () => RestoreTableToPointInTimeCommand, + ReturnConsumedCapacity: () => ReturnConsumedCapacity, + ReturnItemCollectionMetrics: () => ReturnItemCollectionMetrics, + ReturnValue: () => ReturnValue, + ReturnValuesOnConditionCheckFailure: () => ReturnValuesOnConditionCheckFailure, + S3SseAlgorithm: () => S3SseAlgorithm, + SSEStatus: () => SSEStatus, + SSEType: () => SSEType, + ScalarAttributeType: () => ScalarAttributeType, + ScanCommand: () => ScanCommand, + Select: () => Select, + StreamViewType: () => StreamViewType, + TableAlreadyExistsException: () => TableAlreadyExistsException, + TableClass: () => TableClass, + TableInUseException: () => TableInUseException, + TableNotFoundException: () => TableNotFoundException, + TableStatus: () => TableStatus, + TagResourceCommand: () => TagResourceCommand, + TimeToLiveStatus: () => TimeToLiveStatus, + TransactGetItemsCommand: () => TransactGetItemsCommand, + TransactWriteItemsCommand: () => TransactWriteItemsCommand, + TransactionCanceledException: () => TransactionCanceledException, + TransactionConflictException: () => TransactionConflictException, + TransactionInProgressException: () => TransactionInProgressException, + UntagResourceCommand: () => UntagResourceCommand, + UpdateContinuousBackupsCommand: () => UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand: () => UpdateContributorInsightsCommand, + UpdateGlobalTableCommand: () => UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand: () => UpdateGlobalTableSettingsCommand, + UpdateItemCommand: () => UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand: () => UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand: () => UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand: () => UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand: () => UpdateTimeToLiveCommand, + __Client: () => import_smithy_client.Client, + paginateListContributorInsights: () => paginateListContributorInsights, + paginateListExports: () => paginateListExports, + paginateListImports: () => paginateListImports, + paginateListTables: () => paginateListTables, + paginateQuery: () => paginateQuery, + paginateScan: () => paginateScan, + waitForTableExists: () => waitForTableExists, + waitForTableNotExists: () => waitForTableNotExists, + waitUntilTableExists: () => waitUntilTableExists, + waitUntilTableNotExists: () => waitUntilTableNotExists +}); +module.exports = __toCommonJS(index_exports); + +// src/DynamoDBClient.ts +var import_account_id_endpoint = require("@aws-sdk/core/account-id-endpoint"); +var import_middleware_endpoint_discovery = require("@aws-sdk/middleware-endpoint-discovery"); +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core2 = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); + +var import_middleware_retry = require("@smithy/middleware-retry"); + +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/commands/DescribeEndpointsCommand.ts +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); + + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "dynamodb" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + AccountId: { type: "builtInParams", name: "accountId" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + AccountIdEndpointMode: { type: "builtInParams", name: "accountIdEndpointMode" } +}; + +// src/protocols/Aws_json1_0.ts +var import_core = require("@aws-sdk/core"); +var import_protocol_http = require("@smithy/protocol-http"); + +var import_uuid = require("uuid"); + +// src/models/DynamoDBServiceException.ts +var import_smithy_client = require("@smithy/smithy-client"); +var DynamoDBServiceException = class _DynamoDBServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "DynamoDBServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _DynamoDBServiceException.prototype); + } +}; + +// src/models/models_0.ts +var ApproximateCreationDateTimePrecision = { + MICROSECOND: "MICROSECOND", + MILLISECOND: "MILLISECOND" +}; +var AttributeAction = { + ADD: "ADD", + DELETE: "DELETE", + PUT: "PUT" +}; +var ScalarAttributeType = { + B: "B", + N: "N", + S: "S" +}; +var BackupStatus = { + AVAILABLE: "AVAILABLE", + CREATING: "CREATING", + DELETED: "DELETED" +}; +var BackupType = { + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER" +}; +var BillingMode = { + PAY_PER_REQUEST: "PAY_PER_REQUEST", + PROVISIONED: "PROVISIONED" +}; +var KeyType = { + HASH: "HASH", + RANGE: "RANGE" +}; +var ProjectionType = { + ALL: "ALL", + INCLUDE: "INCLUDE", + KEYS_ONLY: "KEYS_ONLY" +}; +var SSEType = { + AES256: "AES256", + KMS: "KMS" +}; +var SSEStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + UPDATING: "UPDATING" +}; +var StreamViewType = { + KEYS_ONLY: "KEYS_ONLY", + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", + OLD_IMAGE: "OLD_IMAGE" +}; +var TimeToLiveStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING" +}; +var BackupInUseException = class _BackupInUseException extends DynamoDBServiceException { + static { + __name(this, "BackupInUseException"); + } + name = "BackupInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "BackupInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _BackupInUseException.prototype); + } +}; +var BackupNotFoundException = class _BackupNotFoundException extends DynamoDBServiceException { + static { + __name(this, "BackupNotFoundException"); + } + name = "BackupNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "BackupNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _BackupNotFoundException.prototype); + } +}; +var BackupTypeFilter = { + ALL: "ALL", + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER" +}; +var ReturnConsumedCapacity = { + INDEXES: "INDEXES", + NONE: "NONE", + TOTAL: "TOTAL" +}; +var ReturnValuesOnConditionCheckFailure = { + ALL_OLD: "ALL_OLD", + NONE: "NONE" +}; +var BatchStatementErrorCodeEnum = { + AccessDenied: "AccessDenied", + ConditionalCheckFailed: "ConditionalCheckFailed", + DuplicateItem: "DuplicateItem", + InternalServerError: "InternalServerError", + ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded", + ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded", + RequestLimitExceeded: "RequestLimitExceeded", + ResourceNotFound: "ResourceNotFound", + ThrottlingError: "ThrottlingError", + TransactionConflict: "TransactionConflict", + ValidationError: "ValidationError" +}; +var InternalServerError = class _InternalServerError extends DynamoDBServiceException { + static { + __name(this, "InternalServerError"); + } + name = "InternalServerError"; + $fault = "server"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServerError", + $fault: "server", + ...opts + }); + Object.setPrototypeOf(this, _InternalServerError.prototype); + } +}; +var RequestLimitExceeded = class _RequestLimitExceeded extends DynamoDBServiceException { + static { + __name(this, "RequestLimitExceeded"); + } + name = "RequestLimitExceeded"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "RequestLimitExceeded", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _RequestLimitExceeded.prototype); + } +}; +var InvalidEndpointException = class _InvalidEndpointException extends DynamoDBServiceException { + static { + __name(this, "InvalidEndpointException"); + } + name = "InvalidEndpointException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidEndpointException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidEndpointException.prototype); + this.Message = opts.Message; + } +}; +var ProvisionedThroughputExceededException = class _ProvisionedThroughputExceededException extends DynamoDBServiceException { + static { + __name(this, "ProvisionedThroughputExceededException"); + } + name = "ProvisionedThroughputExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ProvisionedThroughputExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ProvisionedThroughputExceededException.prototype); + } +}; +var ResourceNotFoundException = class _ResourceNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ResourceNotFoundException"); + } + name = "ResourceNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + } +}; +var ReturnItemCollectionMetrics = { + NONE: "NONE", + SIZE: "SIZE" +}; +var ItemCollectionSizeLimitExceededException = class _ItemCollectionSizeLimitExceededException extends DynamoDBServiceException { + static { + __name(this, "ItemCollectionSizeLimitExceededException"); + } + name = "ItemCollectionSizeLimitExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ItemCollectionSizeLimitExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ItemCollectionSizeLimitExceededException.prototype); + } +}; +var ComparisonOperator = { + BEGINS_WITH: "BEGINS_WITH", + BETWEEN: "BETWEEN", + CONTAINS: "CONTAINS", + EQ: "EQ", + GE: "GE", + GT: "GT", + IN: "IN", + LE: "LE", + LT: "LT", + NE: "NE", + NOT_CONTAINS: "NOT_CONTAINS", + NOT_NULL: "NOT_NULL", + NULL: "NULL" +}; +var ConditionalOperator = { + AND: "AND", + OR: "OR" +}; +var ContinuousBackupsStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED" +}; +var PointInTimeRecoveryStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED" +}; +var ContinuousBackupsUnavailableException = class _ContinuousBackupsUnavailableException extends DynamoDBServiceException { + static { + __name(this, "ContinuousBackupsUnavailableException"); + } + name = "ContinuousBackupsUnavailableException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ContinuousBackupsUnavailableException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ContinuousBackupsUnavailableException.prototype); + } +}; +var ContributorInsightsAction = { + DISABLE: "DISABLE", + ENABLE: "ENABLE" +}; +var ContributorInsightsStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + FAILED: "FAILED" +}; +var LimitExceededException = class _LimitExceededException extends DynamoDBServiceException { + static { + __name(this, "LimitExceededException"); + } + name = "LimitExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "LimitExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _LimitExceededException.prototype); + } +}; +var TableInUseException = class _TableInUseException extends DynamoDBServiceException { + static { + __name(this, "TableInUseException"); + } + name = "TableInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableInUseException.prototype); + } +}; +var TableNotFoundException = class _TableNotFoundException extends DynamoDBServiceException { + static { + __name(this, "TableNotFoundException"); + } + name = "TableNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableNotFoundException.prototype); + } +}; +var GlobalTableStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING" +}; +var IndexStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING" +}; +var ReplicaStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + CREATION_FAILED: "CREATION_FAILED", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + REGION_DISABLED: "REGION_DISABLED", + UPDATING: "UPDATING" +}; +var TableClass = { + STANDARD: "STANDARD", + STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS" +}; +var TableStatus = { + ACTIVE: "ACTIVE", + ARCHIVED: "ARCHIVED", + ARCHIVING: "ARCHIVING", + CREATING: "CREATING", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + UPDATING: "UPDATING" +}; +var GlobalTableAlreadyExistsException = class _GlobalTableAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "GlobalTableAlreadyExistsException"); + } + name = "GlobalTableAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "GlobalTableAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _GlobalTableAlreadyExistsException.prototype); + } +}; +var MultiRegionConsistency = { + EVENTUAL: "EVENTUAL", + STRONG: "STRONG" +}; +var ResourceInUseException = class _ResourceInUseException extends DynamoDBServiceException { + static { + __name(this, "ResourceInUseException"); + } + name = "ResourceInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceInUseException.prototype); + } +}; +var ReturnValue = { + ALL_NEW: "ALL_NEW", + ALL_OLD: "ALL_OLD", + NONE: "NONE", + UPDATED_NEW: "UPDATED_NEW", + UPDATED_OLD: "UPDATED_OLD" +}; +var ReplicatedWriteConflictException = class _ReplicatedWriteConflictException extends DynamoDBServiceException { + static { + __name(this, "ReplicatedWriteConflictException"); + } + name = "ReplicatedWriteConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicatedWriteConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicatedWriteConflictException.prototype); + } +}; +var TransactionConflictException = class _TransactionConflictException extends DynamoDBServiceException { + static { + __name(this, "TransactionConflictException"); + } + name = "TransactionConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionConflictException.prototype); + } +}; +var PolicyNotFoundException = class _PolicyNotFoundException extends DynamoDBServiceException { + static { + __name(this, "PolicyNotFoundException"); + } + name = "PolicyNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PolicyNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PolicyNotFoundException.prototype); + } +}; +var ExportFormat = { + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION" +}; +var ExportStatus = { + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS" +}; +var ExportType = { + FULL_EXPORT: "FULL_EXPORT", + INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT" +}; +var ExportViewType = { + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE" +}; +var S3SseAlgorithm = { + AES256: "AES256", + KMS: "KMS" +}; +var ExportNotFoundException = class _ExportNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ExportNotFoundException"); + } + name = "ExportNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExportNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExportNotFoundException.prototype); + } +}; +var GlobalTableNotFoundException = class _GlobalTableNotFoundException extends DynamoDBServiceException { + static { + __name(this, "GlobalTableNotFoundException"); + } + name = "GlobalTableNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "GlobalTableNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _GlobalTableNotFoundException.prototype); + } +}; +var ImportStatus = { + CANCELLED: "CANCELLED", + CANCELLING: "CANCELLING", + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS" +}; +var InputCompressionType = { + GZIP: "GZIP", + NONE: "NONE", + ZSTD: "ZSTD" +}; +var InputFormat = { + CSV: "CSV", + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION" +}; +var ImportNotFoundException = class _ImportNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ImportNotFoundException"); + } + name = "ImportNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ImportNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ImportNotFoundException.prototype); + } +}; +var DestinationStatus = { + ACTIVE: "ACTIVE", + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLE_FAILED: "ENABLE_FAILED", + ENABLING: "ENABLING", + UPDATING: "UPDATING" +}; +var DuplicateItemException = class _DuplicateItemException extends DynamoDBServiceException { + static { + __name(this, "DuplicateItemException"); + } + name = "DuplicateItemException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "DuplicateItemException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _DuplicateItemException.prototype); + } +}; +var IdempotentParameterMismatchException = class _IdempotentParameterMismatchException extends DynamoDBServiceException { + static { + __name(this, "IdempotentParameterMismatchException"); + } + name = "IdempotentParameterMismatchException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IdempotentParameterMismatchException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IdempotentParameterMismatchException.prototype); + this.Message = opts.Message; + } +}; +var TransactionInProgressException = class _TransactionInProgressException extends DynamoDBServiceException { + static { + __name(this, "TransactionInProgressException"); + } + name = "TransactionInProgressException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionInProgressException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionInProgressException.prototype); + this.Message = opts.Message; + } +}; +var ExportConflictException = class _ExportConflictException extends DynamoDBServiceException { + static { + __name(this, "ExportConflictException"); + } + name = "ExportConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExportConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExportConflictException.prototype); + } +}; +var InvalidExportTimeException = class _InvalidExportTimeException extends DynamoDBServiceException { + static { + __name(this, "InvalidExportTimeException"); + } + name = "InvalidExportTimeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidExportTimeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidExportTimeException.prototype); + } +}; +var PointInTimeRecoveryUnavailableException = class _PointInTimeRecoveryUnavailableException extends DynamoDBServiceException { + static { + __name(this, "PointInTimeRecoveryUnavailableException"); + } + name = "PointInTimeRecoveryUnavailableException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PointInTimeRecoveryUnavailableException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PointInTimeRecoveryUnavailableException.prototype); + } +}; +var ImportConflictException = class _ImportConflictException extends DynamoDBServiceException { + static { + __name(this, "ImportConflictException"); + } + name = "ImportConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ImportConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ImportConflictException.prototype); + } +}; +var Select = { + ALL_ATTRIBUTES: "ALL_ATTRIBUTES", + ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES", + COUNT: "COUNT", + SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES" +}; +var TableAlreadyExistsException = class _TableAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "TableAlreadyExistsException"); + } + name = "TableAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableAlreadyExistsException.prototype); + } +}; +var InvalidRestoreTimeException = class _InvalidRestoreTimeException extends DynamoDBServiceException { + static { + __name(this, "InvalidRestoreTimeException"); + } + name = "InvalidRestoreTimeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRestoreTimeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRestoreTimeException.prototype); + } +}; +var ReplicaAlreadyExistsException = class _ReplicaAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "ReplicaAlreadyExistsException"); + } + name = "ReplicaAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicaAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicaAlreadyExistsException.prototype); + } +}; +var ReplicaNotFoundException = class _ReplicaNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ReplicaNotFoundException"); + } + name = "ReplicaNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicaNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicaNotFoundException.prototype); + } +}; +var IndexNotFoundException = class _IndexNotFoundException extends DynamoDBServiceException { + static { + __name(this, "IndexNotFoundException"); + } + name = "IndexNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IndexNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IndexNotFoundException.prototype); + } +}; +var AttributeValue; +((AttributeValue2) => { + AttributeValue2.visit = /* @__PURE__ */ __name((value, visitor) => { + if (value.S !== void 0) return visitor.S(value.S); + if (value.N !== void 0) return visitor.N(value.N); + if (value.B !== void 0) return visitor.B(value.B); + if (value.SS !== void 0) return visitor.SS(value.SS); + if (value.NS !== void 0) return visitor.NS(value.NS); + if (value.BS !== void 0) return visitor.BS(value.BS); + if (value.M !== void 0) return visitor.M(value.M); + if (value.L !== void 0) return visitor.L(value.L); + if (value.NULL !== void 0) return visitor.NULL(value.NULL); + if (value.BOOL !== void 0) return visitor.BOOL(value.BOOL); + return visitor._(value.$unknown[0], value.$unknown[1]); + }, "visit"); +})(AttributeValue || (AttributeValue = {})); +var ConditionalCheckFailedException = class _ConditionalCheckFailedException extends DynamoDBServiceException { + static { + __name(this, "ConditionalCheckFailedException"); + } + name = "ConditionalCheckFailedException"; + $fault = "client"; + /** + *

Item which caused the ConditionalCheckFailedException.

+ * @public + */ + Item; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ConditionalCheckFailedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ConditionalCheckFailedException.prototype); + this.Item = opts.Item; + } +}; +var TransactionCanceledException = class _TransactionCanceledException extends DynamoDBServiceException { + static { + __name(this, "TransactionCanceledException"); + } + name = "TransactionCanceledException"; + $fault = "client"; + Message; + /** + *

A list of cancellation reasons.

+ * @public + */ + CancellationReasons; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionCanceledException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionCanceledException.prototype); + this.Message = opts.Message; + this.CancellationReasons = opts.CancellationReasons; + } +}; + +// src/protocols/Aws_json1_0.ts +var se_BatchExecuteStatementCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchExecuteStatement"); + let body; + body = JSON.stringify(se_BatchExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchExecuteStatementCommand"); +var se_BatchGetItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchGetItem"); + let body; + body = JSON.stringify(se_BatchGetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchGetItemCommand"); +var se_BatchWriteItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchWriteItem"); + let body; + body = JSON.stringify(se_BatchWriteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchWriteItemCommand"); +var se_CreateBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateBackupCommand"); +var se_CreateGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateGlobalTableCommand"); +var se_CreateTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateTableCommand"); +var se_DeleteBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteBackupCommand"); +var se_DeleteItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteItem"); + let body; + body = JSON.stringify(se_DeleteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteItemCommand"); +var se_DeleteResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteResourcePolicyCommand"); +var se_DeleteTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteTableCommand"); +var se_DescribeBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeBackupCommand"); +var se_DescribeContinuousBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeContinuousBackups"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeContinuousBackupsCommand"); +var se_DescribeContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeContributorInsightsCommand"); +var se_DescribeEndpointsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeEndpoints"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeEndpointsCommand"); +var se_DescribeExportCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeExport"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeExportCommand"); +var se_DescribeGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeGlobalTableCommand"); +var se_DescribeGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTableSettings"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeGlobalTableSettingsCommand"); +var se_DescribeImportCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeImport"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeImportCommand"); +var se_DescribeKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeKinesisStreamingDestinationCommand"); +var se_DescribeLimitsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeLimits"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeLimitsCommand"); +var se_DescribeTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTableCommand"); +var se_DescribeTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTableReplicaAutoScaling"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTableReplicaAutoScalingCommand"); +var se_DescribeTimeToLiveCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTimeToLive"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTimeToLiveCommand"); +var se_DisableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DisableKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DisableKinesisStreamingDestinationCommand"); +var se_EnableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("EnableKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_EnableKinesisStreamingDestinationCommand"); +var se_ExecuteStatementCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExecuteStatement"); + let body; + body = JSON.stringify(se_ExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExecuteStatementCommand"); +var se_ExecuteTransactionCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExecuteTransaction"); + let body; + body = JSON.stringify(se_ExecuteTransactionInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExecuteTransactionCommand"); +var se_ExportTableToPointInTimeCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExportTableToPointInTime"); + let body; + body = JSON.stringify(se_ExportTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExportTableToPointInTimeCommand"); +var se_GetItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetItem"); + let body; + body = JSON.stringify(se_GetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetItemCommand"); +var se_GetResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetResourcePolicyCommand"); +var se_ImportTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ImportTable"); + let body; + body = JSON.stringify(se_ImportTableInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ImportTableCommand"); +var se_ListBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListBackups"); + let body; + body = JSON.stringify(se_ListBackupsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListBackupsCommand"); +var se_ListContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListContributorInsightsCommand"); +var se_ListExportsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListExports"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListExportsCommand"); +var se_ListGlobalTablesCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListGlobalTables"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListGlobalTablesCommand"); +var se_ListImportsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListImports"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListImportsCommand"); +var se_ListTablesCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListTables"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListTablesCommand"); +var se_ListTagsOfResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListTagsOfResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListTagsOfResourceCommand"); +var se_PutItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("PutItem"); + let body; + body = JSON.stringify(se_PutItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_PutItemCommand"); +var se_PutResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("PutResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_PutResourcePolicyCommand"); +var se_QueryCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("Query"); + let body; + body = JSON.stringify(se_QueryInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_QueryCommand"); +var se_RestoreTableFromBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RestoreTableFromBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RestoreTableFromBackupCommand"); +var se_RestoreTableToPointInTimeCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RestoreTableToPointInTime"); + let body; + body = JSON.stringify(se_RestoreTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RestoreTableToPointInTimeCommand"); +var se_ScanCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("Scan"); + let body; + body = JSON.stringify(se_ScanInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ScanCommand"); +var se_TagResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TagResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TagResourceCommand"); +var se_TransactGetItemsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TransactGetItems"); + let body; + body = JSON.stringify(se_TransactGetItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TransactGetItemsCommand"); +var se_TransactWriteItemsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TransactWriteItems"); + let body; + body = JSON.stringify(se_TransactWriteItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TransactWriteItemsCommand"); +var se_UntagResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UntagResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UntagResourceCommand"); +var se_UpdateContinuousBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateContinuousBackups"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateContinuousBackupsCommand"); +var se_UpdateContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateContributorInsightsCommand"); +var se_UpdateGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateGlobalTableCommand"); +var se_UpdateGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTableSettings"); + let body; + body = JSON.stringify(se_UpdateGlobalTableSettingsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateGlobalTableSettingsCommand"); +var se_UpdateItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateItem"); + let body; + body = JSON.stringify(se_UpdateItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateItemCommand"); +var se_UpdateKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateKinesisStreamingDestinationCommand"); +var se_UpdateTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTableCommand"); +var se_UpdateTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTableReplicaAutoScaling"); + let body; + body = JSON.stringify(se_UpdateTableReplicaAutoScalingInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTableReplicaAutoScalingCommand"); +var se_UpdateTimeToLiveCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTimeToLive"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTimeToLiveCommand"); +var de_BatchExecuteStatementCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchExecuteStatementCommand"); +var de_BatchGetItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchGetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchGetItemCommand"); +var de_BatchWriteItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchWriteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchWriteItemCommand"); +var de_CreateBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateBackupCommand"); +var de_CreateGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateGlobalTableCommand"); +var de_CreateTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateTableCommand"); +var de_DeleteBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteBackupCommand"); +var de_DeleteItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteItemCommand"); +var de_DeleteResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteResourcePolicyCommand"); +var de_DeleteTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteTableCommand"); +var de_DescribeBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeBackupCommand"); +var de_DescribeContinuousBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeContinuousBackupsCommand"); +var de_DescribeContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeContributorInsightsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeContributorInsightsCommand"); +var de_DescribeEndpointsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeEndpointsCommand"); +var de_DescribeExportCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeExportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeExportCommand"); +var de_DescribeGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeGlobalTableCommand"); +var de_DescribeGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeGlobalTableSettingsCommand"); +var de_DescribeImportCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeImportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeImportCommand"); +var de_DescribeKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeKinesisStreamingDestinationCommand"); +var de_DescribeLimitsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeLimitsCommand"); +var de_DescribeTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTableCommand"); +var de_DescribeTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTableReplicaAutoScalingCommand"); +var de_DescribeTimeToLiveCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTimeToLiveCommand"); +var de_DisableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DisableKinesisStreamingDestinationCommand"); +var de_EnableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_EnableKinesisStreamingDestinationCommand"); +var de_ExecuteStatementCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExecuteStatementCommand"); +var de_ExecuteTransactionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExecuteTransactionOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExecuteTransactionCommand"); +var de_ExportTableToPointInTimeCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExportTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExportTableToPointInTimeCommand"); +var de_GetItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_GetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetItemCommand"); +var de_GetResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetResourcePolicyCommand"); +var de_ImportTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ImportTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ImportTableCommand"); +var de_ListBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ListBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListBackupsCommand"); +var de_ListContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListContributorInsightsCommand"); +var de_ListExportsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListExportsCommand"); +var de_ListGlobalTablesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListGlobalTablesCommand"); +var de_ListImportsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ListImportsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListImportsCommand"); +var de_ListTablesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListTablesCommand"); +var de_ListTagsOfResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListTagsOfResourceCommand"); +var de_PutItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_PutItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_PutItemCommand"); +var de_PutResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_PutResourcePolicyCommand"); +var de_QueryCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_QueryOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_QueryCommand"); +var de_RestoreTableFromBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_RestoreTableFromBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RestoreTableFromBackupCommand"); +var de_RestoreTableToPointInTimeCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_RestoreTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RestoreTableToPointInTimeCommand"); +var de_ScanCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ScanOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ScanCommand"); +var de_TagResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await (0, import_smithy_client.collectBody)(output.body, context); + const response = { + $metadata: deserializeMetadata(output) + }; + return response; +}, "de_TagResourceCommand"); +var de_TransactGetItemsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_TransactGetItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_TransactGetItemsCommand"); +var de_TransactWriteItemsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_TransactWriteItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_TransactWriteItemsCommand"); +var de_UntagResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await (0, import_smithy_client.collectBody)(output.body, context); + const response = { + $metadata: deserializeMetadata(output) + }; + return response; +}, "de_UntagResourceCommand"); +var de_UpdateContinuousBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateContinuousBackupsCommand"); +var de_UpdateContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateContributorInsightsCommand"); +var de_UpdateGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateGlobalTableCommand"); +var de_UpdateGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateGlobalTableSettingsCommand"); +var de_UpdateItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateItemCommand"); +var de_UpdateKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateKinesisStreamingDestinationCommand"); +var de_UpdateTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTableCommand"); +var de_UpdateTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTableReplicaAutoScalingCommand"); +var de_UpdateTimeToLiveCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTimeToLiveCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerError": + case "com.amazonaws.dynamodb#InternalServerError": + throw await de_InternalServerErrorRes(parsedOutput, context); + case "RequestLimitExceeded": + case "com.amazonaws.dynamodb#RequestLimitExceeded": + throw await de_RequestLimitExceededRes(parsedOutput, context); + case "InvalidEndpointException": + case "com.amazonaws.dynamodb#InvalidEndpointException": + throw await de_InvalidEndpointExceptionRes(parsedOutput, context); + case "ProvisionedThroughputExceededException": + case "com.amazonaws.dynamodb#ProvisionedThroughputExceededException": + throw await de_ProvisionedThroughputExceededExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.dynamodb#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "ItemCollectionSizeLimitExceededException": + case "com.amazonaws.dynamodb#ItemCollectionSizeLimitExceededException": + throw await de_ItemCollectionSizeLimitExceededExceptionRes(parsedOutput, context); + case "BackupInUseException": + case "com.amazonaws.dynamodb#BackupInUseException": + throw await de_BackupInUseExceptionRes(parsedOutput, context); + case "ContinuousBackupsUnavailableException": + case "com.amazonaws.dynamodb#ContinuousBackupsUnavailableException": + throw await de_ContinuousBackupsUnavailableExceptionRes(parsedOutput, context); + case "LimitExceededException": + case "com.amazonaws.dynamodb#LimitExceededException": + throw await de_LimitExceededExceptionRes(parsedOutput, context); + case "TableInUseException": + case "com.amazonaws.dynamodb#TableInUseException": + throw await de_TableInUseExceptionRes(parsedOutput, context); + case "TableNotFoundException": + case "com.amazonaws.dynamodb#TableNotFoundException": + throw await de_TableNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableAlreadyExistsException": + case "com.amazonaws.dynamodb#GlobalTableAlreadyExistsException": + throw await de_GlobalTableAlreadyExistsExceptionRes(parsedOutput, context); + case "ResourceInUseException": + case "com.amazonaws.dynamodb#ResourceInUseException": + throw await de_ResourceInUseExceptionRes(parsedOutput, context); + case "BackupNotFoundException": + case "com.amazonaws.dynamodb#BackupNotFoundException": + throw await de_BackupNotFoundExceptionRes(parsedOutput, context); + case "ConditionalCheckFailedException": + case "com.amazonaws.dynamodb#ConditionalCheckFailedException": + throw await de_ConditionalCheckFailedExceptionRes(parsedOutput, context); + case "ReplicatedWriteConflictException": + case "com.amazonaws.dynamodb#ReplicatedWriteConflictException": + throw await de_ReplicatedWriteConflictExceptionRes(parsedOutput, context); + case "TransactionConflictException": + case "com.amazonaws.dynamodb#TransactionConflictException": + throw await de_TransactionConflictExceptionRes(parsedOutput, context); + case "PolicyNotFoundException": + case "com.amazonaws.dynamodb#PolicyNotFoundException": + throw await de_PolicyNotFoundExceptionRes(parsedOutput, context); + case "ExportNotFoundException": + case "com.amazonaws.dynamodb#ExportNotFoundException": + throw await de_ExportNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableNotFoundException": + case "com.amazonaws.dynamodb#GlobalTableNotFoundException": + throw await de_GlobalTableNotFoundExceptionRes(parsedOutput, context); + case "ImportNotFoundException": + case "com.amazonaws.dynamodb#ImportNotFoundException": + throw await de_ImportNotFoundExceptionRes(parsedOutput, context); + case "DuplicateItemException": + case "com.amazonaws.dynamodb#DuplicateItemException": + throw await de_DuplicateItemExceptionRes(parsedOutput, context); + case "IdempotentParameterMismatchException": + case "com.amazonaws.dynamodb#IdempotentParameterMismatchException": + throw await de_IdempotentParameterMismatchExceptionRes(parsedOutput, context); + case "TransactionCanceledException": + case "com.amazonaws.dynamodb#TransactionCanceledException": + throw await de_TransactionCanceledExceptionRes(parsedOutput, context); + case "TransactionInProgressException": + case "com.amazonaws.dynamodb#TransactionInProgressException": + throw await de_TransactionInProgressExceptionRes(parsedOutput, context); + case "ExportConflictException": + case "com.amazonaws.dynamodb#ExportConflictException": + throw await de_ExportConflictExceptionRes(parsedOutput, context); + case "InvalidExportTimeException": + case "com.amazonaws.dynamodb#InvalidExportTimeException": + throw await de_InvalidExportTimeExceptionRes(parsedOutput, context); + case "PointInTimeRecoveryUnavailableException": + case "com.amazonaws.dynamodb#PointInTimeRecoveryUnavailableException": + throw await de_PointInTimeRecoveryUnavailableExceptionRes(parsedOutput, context); + case "ImportConflictException": + case "com.amazonaws.dynamodb#ImportConflictException": + throw await de_ImportConflictExceptionRes(parsedOutput, context); + case "TableAlreadyExistsException": + case "com.amazonaws.dynamodb#TableAlreadyExistsException": + throw await de_TableAlreadyExistsExceptionRes(parsedOutput, context); + case "InvalidRestoreTimeException": + case "com.amazonaws.dynamodb#InvalidRestoreTimeException": + throw await de_InvalidRestoreTimeExceptionRes(parsedOutput, context); + case "ReplicaAlreadyExistsException": + case "com.amazonaws.dynamodb#ReplicaAlreadyExistsException": + throw await de_ReplicaAlreadyExistsExceptionRes(parsedOutput, context); + case "ReplicaNotFoundException": + case "com.amazonaws.dynamodb#ReplicaNotFoundException": + throw await de_ReplicaNotFoundExceptionRes(parsedOutput, context); + case "IndexNotFoundException": + case "com.amazonaws.dynamodb#IndexNotFoundException": + throw await de_IndexNotFoundExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var de_BackupInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new BackupInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_BackupInUseExceptionRes"); +var de_BackupNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new BackupNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_BackupNotFoundExceptionRes"); +var de_ConditionalCheckFailedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ConditionalCheckFailedException(body, context); + const exception = new ConditionalCheckFailedException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ConditionalCheckFailedExceptionRes"); +var de_ContinuousBackupsUnavailableExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ContinuousBackupsUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ContinuousBackupsUnavailableExceptionRes"); +var de_DuplicateItemExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new DuplicateItemException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_DuplicateItemExceptionRes"); +var de_ExportConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ExportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ExportConflictExceptionRes"); +var de_ExportNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ExportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ExportNotFoundExceptionRes"); +var de_GlobalTableAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new GlobalTableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_GlobalTableAlreadyExistsExceptionRes"); +var de_GlobalTableNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new GlobalTableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_GlobalTableNotFoundExceptionRes"); +var de_IdempotentParameterMismatchExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new IdempotentParameterMismatchException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_IdempotentParameterMismatchExceptionRes"); +var de_ImportConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ImportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ImportConflictExceptionRes"); +var de_ImportNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ImportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ImportNotFoundExceptionRes"); +var de_IndexNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new IndexNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_IndexNotFoundExceptionRes"); +var de_InternalServerErrorRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InternalServerError({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InternalServerErrorRes"); +var de_InvalidEndpointExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidEndpointException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidEndpointExceptionRes"); +var de_InvalidExportTimeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidExportTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidExportTimeExceptionRes"); +var de_InvalidRestoreTimeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidRestoreTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidRestoreTimeExceptionRes"); +var de_ItemCollectionSizeLimitExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ItemCollectionSizeLimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ItemCollectionSizeLimitExceededExceptionRes"); +var de_LimitExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new LimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_LimitExceededExceptionRes"); +var de_PointInTimeRecoveryUnavailableExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new PointInTimeRecoveryUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PointInTimeRecoveryUnavailableExceptionRes"); +var de_PolicyNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new PolicyNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PolicyNotFoundExceptionRes"); +var de_ProvisionedThroughputExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ProvisionedThroughputExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ProvisionedThroughputExceededExceptionRes"); +var de_ReplicaAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicaAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicaAlreadyExistsExceptionRes"); +var de_ReplicaNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicaNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicaNotFoundExceptionRes"); +var de_ReplicatedWriteConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicatedWriteConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicatedWriteConflictExceptionRes"); +var de_RequestLimitExceededRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new RequestLimitExceeded({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_RequestLimitExceededRes"); +var de_ResourceInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ResourceInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ResourceInUseExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ResourceNotFoundExceptionRes"); +var de_TableAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableAlreadyExistsExceptionRes"); +var de_TableInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableInUseExceptionRes"); +var de_TableNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableNotFoundExceptionRes"); +var de_TransactionCanceledExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_TransactionCanceledException(body, context); + const exception = new TransactionCanceledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionCanceledExceptionRes"); +var de_TransactionConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TransactionConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionConflictExceptionRes"); +var de_TransactionInProgressExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TransactionInProgressException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionInProgressExceptionRes"); +var se_AttributeUpdates = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValueUpdate(value, context); + return acc; + }, {}); +}, "se_AttributeUpdates"); +var se_AttributeValue = /* @__PURE__ */ __name((input, context) => { + return AttributeValue.visit(input, { + B: /* @__PURE__ */ __name((value) => ({ B: context.base64Encoder(value) }), "B"), + BOOL: /* @__PURE__ */ __name((value) => ({ BOOL: value }), "BOOL"), + BS: /* @__PURE__ */ __name((value) => ({ BS: se_BinarySetAttributeValue(value, context) }), "BS"), + L: /* @__PURE__ */ __name((value) => ({ L: se_ListAttributeValue(value, context) }), "L"), + M: /* @__PURE__ */ __name((value) => ({ M: se_MapAttributeValue(value, context) }), "M"), + N: /* @__PURE__ */ __name((value) => ({ N: value }), "N"), + NS: /* @__PURE__ */ __name((value) => ({ NS: (0, import_smithy_client._json)(value) }), "NS"), + NULL: /* @__PURE__ */ __name((value) => ({ NULL: value }), "NULL"), + S: /* @__PURE__ */ __name((value) => ({ S: value }), "S"), + SS: /* @__PURE__ */ __name((value) => ({ SS: (0, import_smithy_client._json)(value) }), "SS"), + _: /* @__PURE__ */ __name((name, value) => ({ [name]: value }), "_") + }); +}, "se_AttributeValue"); +var se_AttributeValueList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_AttributeValueList"); +var se_AttributeValueUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Action: [], + Value: /* @__PURE__ */ __name((_) => se_AttributeValue(_, context), "Value") + }); +}, "se_AttributeValueUpdate"); +var se_AutoScalingPolicyUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + PolicyName: [], + TargetTrackingScalingPolicyConfiguration: /* @__PURE__ */ __name((_) => se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate(_, context), "TargetTrackingScalingPolicyConfiguration") + }); +}, "se_AutoScalingPolicyUpdate"); +var se_AutoScalingSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AutoScalingDisabled: [], + AutoScalingRoleArn: [], + MaximumUnits: [], + MinimumUnits: [], + ScalingPolicyUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingPolicyUpdate(_, context), "ScalingPolicyUpdate") + }); +}, "se_AutoScalingSettingsUpdate"); +var se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + DisableScaleIn: [], + ScaleInCooldown: [], + ScaleOutCooldown: [], + TargetValue: import_smithy_client.serializeFloat + }); +}, "se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate"); +var se_BatchExecuteStatementInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ReturnConsumedCapacity: [], + Statements: /* @__PURE__ */ __name((_) => se_PartiQLBatchRequest(_, context), "Statements") + }); +}, "se_BatchExecuteStatementInput"); +var se_BatchGetItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RequestItems: /* @__PURE__ */ __name((_) => se_BatchGetRequestMap(_, context), "RequestItems"), + ReturnConsumedCapacity: [] + }); +}, "se_BatchGetItemInput"); +var se_BatchGetRequestMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_KeysAndAttributes(value, context); + return acc; + }, {}); +}, "se_BatchGetRequestMap"); +var se_BatchStatementRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConsistentRead: [], + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_BatchStatementRequest"); +var se_BatchWriteItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RequestItems: /* @__PURE__ */ __name((_) => se_BatchWriteItemRequestMap(_, context), "RequestItems"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [] + }); +}, "se_BatchWriteItemInput"); +var se_BatchWriteItemRequestMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_WriteRequests(value, context); + return acc; + }, {}); +}, "se_BatchWriteItemRequestMap"); +var se_BinarySetAttributeValue = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return context.base64Encoder(entry); + }); +}, "se_BinarySetAttributeValue"); +var se_Condition = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeValueList: /* @__PURE__ */ __name((_) => se_AttributeValueList(_, context), "AttributeValueList"), + ComparisonOperator: [] + }); +}, "se_Condition"); +var se_ConditionCheck = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_ConditionCheck"); +var se_Delete = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_Delete"); +var se_DeleteItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_DeleteItemInput"); +var se_DeleteRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key") + }); +}, "se_DeleteRequest"); +var se_ExecuteStatementInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConsistentRead: [], + Limit: [], + NextToken: [], + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnConsumedCapacity: [], + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_ExecuteStatementInput"); +var se_ExecuteTransactionInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ReturnConsumedCapacity: [], + TransactStatements: /* @__PURE__ */ __name((_) => se_ParameterizedStatements(_, context), "TransactStatements") + }); +}, "se_ExecuteTransactionInput"); +var se_ExpectedAttributeMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_ExpectedAttributeValue(value, context); + return acc; + }, {}); +}, "se_ExpectedAttributeMap"); +var se_ExpectedAttributeValue = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeValueList: /* @__PURE__ */ __name((_) => se_AttributeValueList(_, context), "AttributeValueList"), + ComparisonOperator: [], + Exists: [], + Value: /* @__PURE__ */ __name((_) => se_AttributeValue(_, context), "Value") + }); +}, "se_ExpectedAttributeValue"); +var se_ExportTableToPointInTimeInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ExportFormat: [], + ExportTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportTime"), + ExportType: [], + IncrementalExportSpecification: /* @__PURE__ */ __name((_) => se_IncrementalExportSpecification(_, context), "IncrementalExportSpecification"), + S3Bucket: [], + S3BucketOwner: [], + S3Prefix: [], + S3SseAlgorithm: [], + S3SseKmsKeyId: [], + TableArn: [] + }); +}, "se_ExportTableToPointInTimeInput"); +var se_ExpressionAttributeValueMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_ExpressionAttributeValueMap"); +var se_FilterConditionMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}, "se_FilterConditionMap"); +var se_Get = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ExpressionAttributeNames: import_smithy_client._json, + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ProjectionExpression: [], + TableName: [] + }); +}, "se_Get"); +var se_GetItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: [], + ExpressionAttributeNames: import_smithy_client._json, + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ProjectionExpression: [], + ReturnConsumedCapacity: [], + TableName: [] + }); +}, "se_GetItemInput"); +var se_GlobalSecondaryIndexAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingUpdate") + }); +}, "se_GlobalSecondaryIndexAutoScalingUpdate"); +var se_GlobalSecondaryIndexAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_GlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}, "se_GlobalSecondaryIndexAutoScalingUpdateList"); +var se_GlobalTableGlobalSecondaryIndexSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingSettingsUpdate"), + ProvisionedWriteCapacityUnits: [] + }); +}, "se_GlobalTableGlobalSecondaryIndexSettingsUpdate"); +var se_GlobalTableGlobalSecondaryIndexSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_GlobalTableGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}, "se_GlobalTableGlobalSecondaryIndexSettingsUpdateList"); +var se_ImportTableInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + InputCompressionType: [], + InputFormat: [], + InputFormatOptions: import_smithy_client._json, + S3BucketSource: import_smithy_client._json, + TableCreationParameters: import_smithy_client._json + }); +}, "se_ImportTableInput"); +var se_IncrementalExportSpecification = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ExportFromTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportFromTime"), + ExportToTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportToTime"), + ExportViewType: [] + }); +}, "se_IncrementalExportSpecification"); +var se_Key = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_Key"); +var se_KeyConditions = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}, "se_KeyConditions"); +var se_KeyList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_Key(entry, context); + }); +}, "se_KeyList"); +var se_KeysAndAttributes = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: [], + ExpressionAttributeNames: import_smithy_client._json, + Keys: /* @__PURE__ */ __name((_) => se_KeyList(_, context), "Keys"), + ProjectionExpression: [] + }); +}, "se_KeysAndAttributes"); +var se_ListAttributeValue = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_ListAttributeValue"); +var se_ListBackupsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + BackupType: [], + ExclusiveStartBackupArn: [], + Limit: [], + TableName: [], + TimeRangeLowerBound: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "TimeRangeLowerBound"), + TimeRangeUpperBound: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "TimeRangeUpperBound") + }); +}, "se_ListBackupsInput"); +var se_MapAttributeValue = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_MapAttributeValue"); +var se_ParameterizedStatement = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_ParameterizedStatement"); +var se_ParameterizedStatements = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ParameterizedStatement(entry, context); + }); +}, "se_ParameterizedStatements"); +var se_PartiQLBatchRequest = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_BatchStatementRequest(entry, context); + }); +}, "se_PartiQLBatchRequest"); +var se_PreparedStatementParameters = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_PreparedStatementParameters"); +var se_Put = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_Put"); +var se_PutItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_PutItemInput"); +var se_PutItemInputAttributeMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_PutItemInputAttributeMap"); +var se_PutRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item") + }); +}, "se_PutRequest"); +var se_QueryInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: /* @__PURE__ */ __name((_) => se_Key(_, context), "ExclusiveStartKey"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + FilterExpression: [], + IndexName: [], + KeyConditionExpression: [], + KeyConditions: /* @__PURE__ */ __name((_) => se_KeyConditions(_, context), "KeyConditions"), + Limit: [], + ProjectionExpression: [], + QueryFilter: /* @__PURE__ */ __name((_) => se_FilterConditionMap(_, context), "QueryFilter"), + ReturnConsumedCapacity: [], + ScanIndexForward: [], + Select: [], + TableName: [] + }); +}, "se_QueryInput"); +var se_ReplicaAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexUpdates: /* @__PURE__ */ __name((_) => se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList(_, context), "ReplicaGlobalSecondaryIndexUpdates"), + ReplicaProvisionedReadCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ReplicaProvisionedReadCapacityAutoScalingUpdate") + }); +}, "se_ReplicaAutoScalingUpdate"); +var se_ReplicaAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaAutoScalingUpdate(entry, context); + }); +}, "se_ReplicaAutoScalingUpdateList"); +var se_ReplicaGlobalSecondaryIndexAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedReadCapacityAutoScalingUpdate") + }); +}, "se_ReplicaGlobalSecondaryIndexAutoScalingUpdate"); +var se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaGlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}, "se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList"); +var se_ReplicaGlobalSecondaryIndexSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedReadCapacityAutoScalingSettingsUpdate"), + ProvisionedReadCapacityUnits: [] + }); +}, "se_ReplicaGlobalSecondaryIndexSettingsUpdate"); +var se_ReplicaGlobalSecondaryIndexSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}, "se_ReplicaGlobalSecondaryIndexSettingsUpdateList"); +var se_ReplicaSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexSettingsUpdate: /* @__PURE__ */ __name((_) => se_ReplicaGlobalSecondaryIndexSettingsUpdateList(_, context), "ReplicaGlobalSecondaryIndexSettingsUpdate"), + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate"), + ReplicaProvisionedReadCapacityUnits: [], + ReplicaTableClass: [] + }); +}, "se_ReplicaSettingsUpdate"); +var se_ReplicaSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaSettingsUpdate(entry, context); + }); +}, "se_ReplicaSettingsUpdateList"); +var se_RestoreTableToPointInTimeInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + BillingModeOverride: [], + GlobalSecondaryIndexOverride: import_smithy_client._json, + LocalSecondaryIndexOverride: import_smithy_client._json, + OnDemandThroughputOverride: import_smithy_client._json, + ProvisionedThroughputOverride: import_smithy_client._json, + RestoreDateTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "RestoreDateTime"), + SSESpecificationOverride: import_smithy_client._json, + SourceTableArn: [], + SourceTableName: [], + TargetTableName: [], + UseLatestRestorableTime: [] + }); +}, "se_RestoreTableToPointInTimeInput"); +var se_ScanInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: /* @__PURE__ */ __name((_) => se_Key(_, context), "ExclusiveStartKey"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + FilterExpression: [], + IndexName: [], + Limit: [], + ProjectionExpression: [], + ReturnConsumedCapacity: [], + ScanFilter: /* @__PURE__ */ __name((_) => se_FilterConditionMap(_, context), "ScanFilter"), + Segment: [], + Select: [], + TableName: [], + TotalSegments: [] + }); +}, "se_ScanInput"); +var se_TransactGetItem = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Get: /* @__PURE__ */ __name((_) => se_Get(_, context), "Get") + }); +}, "se_TransactGetItem"); +var se_TransactGetItemList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_TransactGetItem(entry, context); + }); +}, "se_TransactGetItemList"); +var se_TransactGetItemsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ReturnConsumedCapacity: [], + TransactItems: /* @__PURE__ */ __name((_) => se_TransactGetItemList(_, context), "TransactItems") + }); +}, "se_TransactGetItemsInput"); +var se_TransactWriteItem = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionCheck: /* @__PURE__ */ __name((_) => se_ConditionCheck(_, context), "ConditionCheck"), + Delete: /* @__PURE__ */ __name((_) => se_Delete(_, context), "Delete"), + Put: /* @__PURE__ */ __name((_) => se_Put(_, context), "Put"), + Update: /* @__PURE__ */ __name((_) => se_Update(_, context), "Update") + }); +}, "se_TransactWriteItem"); +var se_TransactWriteItemList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_TransactWriteItem(entry, context); + }); +}, "se_TransactWriteItemList"); +var se_TransactWriteItemsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + TransactItems: /* @__PURE__ */ __name((_) => se_TransactWriteItemList(_, context), "TransactItems") + }); +}, "se_TransactWriteItemsInput"); +var se_Update = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [] + }); +}, "se_Update"); +var se_UpdateGlobalTableSettingsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + GlobalTableBillingMode: [], + GlobalTableGlobalSecondaryIndexSettingsUpdate: /* @__PURE__ */ __name((_) => se_GlobalTableGlobalSecondaryIndexSettingsUpdateList(_, context), "GlobalTableGlobalSecondaryIndexSettingsUpdate"), + GlobalTableName: [], + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate"), + GlobalTableProvisionedWriteCapacityUnits: [], + ReplicaSettingsUpdate: /* @__PURE__ */ __name((_) => se_ReplicaSettingsUpdateList(_, context), "ReplicaSettingsUpdate") + }); +}, "se_UpdateGlobalTableSettingsInput"); +var se_UpdateItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeUpdates: /* @__PURE__ */ __name((_) => se_AttributeUpdates(_, context), "AttributeUpdates"), + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [] + }); +}, "se_UpdateItemInput"); +var se_UpdateTableReplicaAutoScalingInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + GlobalSecondaryIndexUpdates: /* @__PURE__ */ __name((_) => se_GlobalSecondaryIndexAutoScalingUpdateList(_, context), "GlobalSecondaryIndexUpdates"), + ProvisionedWriteCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingUpdate"), + ReplicaUpdates: /* @__PURE__ */ __name((_) => se_ReplicaAutoScalingUpdateList(_, context), "ReplicaUpdates"), + TableName: [] + }); +}, "se_UpdateTableReplicaAutoScalingInput"); +var se_WriteRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + DeleteRequest: /* @__PURE__ */ __name((_) => se_DeleteRequest(_, context), "DeleteRequest"), + PutRequest: /* @__PURE__ */ __name((_) => se_PutRequest(_, context), "PutRequest") + }); +}, "se_WriteRequest"); +var se_WriteRequests = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_WriteRequest(entry, context); + }); +}, "se_WriteRequests"); +var de_ArchivalSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ArchivalBackupArn: import_smithy_client.expectString, + ArchivalDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ArchivalDateTime"), + ArchivalReason: import_smithy_client.expectString + }); +}, "de_ArchivalSummary"); +var de_AttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_AttributeMap"); +var de_AttributeValue = /* @__PURE__ */ __name((output, context) => { + if (output.B != null) { + return { + B: context.base64Decoder(output.B) + }; + } + if ((0, import_smithy_client.expectBoolean)(output.BOOL) !== void 0) { + return { BOOL: (0, import_smithy_client.expectBoolean)(output.BOOL) }; + } + if (output.BS != null) { + return { + BS: de_BinarySetAttributeValue(output.BS, context) + }; + } + if (output.L != null) { + return { + L: de_ListAttributeValue(output.L, context) + }; + } + if (output.M != null) { + return { + M: de_MapAttributeValue(output.M, context) + }; + } + if ((0, import_smithy_client.expectString)(output.N) !== void 0) { + return { N: (0, import_smithy_client.expectString)(output.N) }; + } + if (output.NS != null) { + return { + NS: (0, import_smithy_client._json)(output.NS) + }; + } + if ((0, import_smithy_client.expectBoolean)(output.NULL) !== void 0) { + return { NULL: (0, import_smithy_client.expectBoolean)(output.NULL) }; + } + if ((0, import_smithy_client.expectString)(output.S) !== void 0) { + return { S: (0, import_smithy_client.expectString)(output.S) }; + } + if (output.SS != null) { + return { + SS: (0, import_smithy_client._json)(output.SS) + }; + } + return { $unknown: Object.entries(output)[0] }; +}, "de_AttributeValue"); +var de_AutoScalingPolicyDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + PolicyName: import_smithy_client.expectString, + TargetTrackingScalingPolicyConfiguration: /* @__PURE__ */ __name((_) => de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription(_, context), "TargetTrackingScalingPolicyConfiguration") + }); +}, "de_AutoScalingPolicyDescription"); +var de_AutoScalingPolicyDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AutoScalingPolicyDescription(entry, context); + }); + return retVal; +}, "de_AutoScalingPolicyDescriptionList"); +var de_AutoScalingSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + AutoScalingDisabled: import_smithy_client.expectBoolean, + AutoScalingRoleArn: import_smithy_client.expectString, + MaximumUnits: import_smithy_client.expectLong, + MinimumUnits: import_smithy_client.expectLong, + ScalingPolicies: /* @__PURE__ */ __name((_) => de_AutoScalingPolicyDescriptionList(_, context), "ScalingPolicies") + }); +}, "de_AutoScalingSettingsDescription"); +var de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + DisableScaleIn: import_smithy_client.expectBoolean, + ScaleInCooldown: import_smithy_client.expectInt32, + ScaleOutCooldown: import_smithy_client.expectInt32, + TargetValue: import_smithy_client.limitedParseDouble + }); +}, "de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription"); +var de_BackupDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDetails: /* @__PURE__ */ __name((_) => de_BackupDetails(_, context), "BackupDetails"), + SourceTableDetails: /* @__PURE__ */ __name((_) => de_SourceTableDetails(_, context), "SourceTableDetails"), + SourceTableFeatureDetails: /* @__PURE__ */ __name((_) => de_SourceTableFeatureDetails(_, context), "SourceTableFeatureDetails") + }); +}, "de_BackupDescription"); +var de_BackupDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupArn: import_smithy_client.expectString, + BackupCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupCreationDateTime"), + BackupExpiryDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupExpiryDateTime"), + BackupName: import_smithy_client.expectString, + BackupSizeBytes: import_smithy_client.expectLong, + BackupStatus: import_smithy_client.expectString, + BackupType: import_smithy_client.expectString + }); +}, "de_BackupDetails"); +var de_BackupSummaries = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_BackupSummary(entry, context); + }); + return retVal; +}, "de_BackupSummaries"); +var de_BackupSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupArn: import_smithy_client.expectString, + BackupCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupCreationDateTime"), + BackupExpiryDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupExpiryDateTime"), + BackupName: import_smithy_client.expectString, + BackupSizeBytes: import_smithy_client.expectLong, + BackupStatus: import_smithy_client.expectString, + BackupType: import_smithy_client.expectString, + TableArn: import_smithy_client.expectString, + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString + }); +}, "de_BackupSummary"); +var de_BatchExecuteStatementOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_PartiQLBatchResponse(_, context), "Responses") + }); +}, "de_BatchExecuteStatementOutput"); +var de_BatchGetItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_BatchGetResponseMap(_, context), "Responses"), + UnprocessedKeys: /* @__PURE__ */ __name((_) => de_BatchGetRequestMap(_, context), "UnprocessedKeys") + }); +}, "de_BatchGetItemOutput"); +var de_BatchGetRequestMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_KeysAndAttributes(value, context); + return acc; + }, {}); +}, "de_BatchGetRequestMap"); +var de_BatchGetResponseMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce( + (acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemList(value, context); + return acc; + }, + {} + ); +}, "de_BatchGetResponseMap"); +var de_BatchStatementError = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Code: import_smithy_client.expectString, + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + Message: import_smithy_client.expectString + }); +}, "de_BatchStatementError"); +var de_BatchStatementResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Error: /* @__PURE__ */ __name((_) => de_BatchStatementError(_, context), "Error"), + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + TableName: import_smithy_client.expectString + }); +}, "de_BatchStatementResponse"); +var de_BatchWriteItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetricsPerTable(_, context), "ItemCollectionMetrics"), + UnprocessedItems: /* @__PURE__ */ __name((_) => de_BatchWriteItemRequestMap(_, context), "UnprocessedItems") + }); +}, "de_BatchWriteItemOutput"); +var de_BatchWriteItemRequestMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_WriteRequests(value, context); + return acc; + }, {}); +}, "de_BatchWriteItemRequestMap"); +var de_BillingModeSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BillingMode: import_smithy_client.expectString, + LastUpdateToPayPerRequestDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateToPayPerRequestDateTime") + }); +}, "de_BillingModeSummary"); +var de_BinarySetAttributeValue = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return context.base64Decoder(entry); + }); + return retVal; +}, "de_BinarySetAttributeValue"); +var de_CancellationReason = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Code: import_smithy_client.expectString, + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + Message: import_smithy_client.expectString + }); +}, "de_CancellationReason"); +var de_CancellationReasonList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_CancellationReason(entry, context); + }); + return retVal; +}, "de_CancellationReasonList"); +var de_Capacity = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CapacityUnits: import_smithy_client.limitedParseDouble, + ReadCapacityUnits: import_smithy_client.limitedParseDouble, + WriteCapacityUnits: import_smithy_client.limitedParseDouble + }); +}, "de_Capacity"); +var de_ConditionalCheckFailedException = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + message: import_smithy_client.expectString + }); +}, "de_ConditionalCheckFailedException"); +var de_ConsumedCapacity = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CapacityUnits: import_smithy_client.limitedParseDouble, + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_SecondaryIndexesCapacityMap(_, context), "GlobalSecondaryIndexes"), + LocalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_SecondaryIndexesCapacityMap(_, context), "LocalSecondaryIndexes"), + ReadCapacityUnits: import_smithy_client.limitedParseDouble, + Table: /* @__PURE__ */ __name((_) => de_Capacity(_, context), "Table"), + TableName: import_smithy_client.expectString, + WriteCapacityUnits: import_smithy_client.limitedParseDouble + }); +}, "de_ConsumedCapacity"); +var de_ConsumedCapacityMultiple = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ConsumedCapacity(entry, context); + }); + return retVal; +}, "de_ConsumedCapacityMultiple"); +var de_ContinuousBackupsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsStatus: import_smithy_client.expectString, + PointInTimeRecoveryDescription: /* @__PURE__ */ __name((_) => de_PointInTimeRecoveryDescription(_, context), "PointInTimeRecoveryDescription") + }); +}, "de_ContinuousBackupsDescription"); +var de_CreateBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDetails: /* @__PURE__ */ __name((_) => de_BackupDetails(_, context), "BackupDetails") + }); +}, "de_CreateBackupOutput"); +var de_CreateGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_CreateGlobalTableOutput"); +var de_CreateTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_CreateTableOutput"); +var de_DeleteBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDescription: /* @__PURE__ */ __name((_) => de_BackupDescription(_, context), "BackupDescription") + }); +}, "de_DeleteBackupOutput"); +var de_DeleteItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_DeleteItemOutput"); +var de_DeleteRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Key: /* @__PURE__ */ __name((_) => de_Key(_, context), "Key") + }); +}, "de_DeleteRequest"); +var de_DeleteTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_DeleteTableOutput"); +var de_DescribeBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDescription: /* @__PURE__ */ __name((_) => de_BackupDescription(_, context), "BackupDescription") + }); +}, "de_DescribeBackupOutput"); +var de_DescribeContinuousBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsDescription: /* @__PURE__ */ __name((_) => de_ContinuousBackupsDescription(_, context), "ContinuousBackupsDescription") + }); +}, "de_DescribeContinuousBackupsOutput"); +var de_DescribeContributorInsightsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContributorInsightsRuleList: import_smithy_client._json, + ContributorInsightsStatus: import_smithy_client.expectString, + FailureException: import_smithy_client._json, + IndexName: import_smithy_client.expectString, + LastUpdateDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateDateTime"), + TableName: import_smithy_client.expectString + }); +}, "de_DescribeContributorInsightsOutput"); +var de_DescribeExportOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportDescription: /* @__PURE__ */ __name((_) => de_ExportDescription(_, context), "ExportDescription") + }); +}, "de_DescribeExportOutput"); +var de_DescribeGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_DescribeGlobalTableOutput"); +var de_DescribeGlobalTableSettingsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableName: import_smithy_client.expectString, + ReplicaSettings: /* @__PURE__ */ __name((_) => de_ReplicaSettingsDescriptionList(_, context), "ReplicaSettings") + }); +}, "de_DescribeGlobalTableSettingsOutput"); +var de_DescribeImportOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportTableDescription: /* @__PURE__ */ __name((_) => de_ImportTableDescription(_, context), "ImportTableDescription") + }); +}, "de_DescribeImportOutput"); +var de_DescribeTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Table: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "Table") + }); +}, "de_DescribeTableOutput"); +var de_DescribeTableReplicaAutoScalingOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableAutoScalingDescription: /* @__PURE__ */ __name((_) => de_TableAutoScalingDescription(_, context), "TableAutoScalingDescription") + }); +}, "de_DescribeTableReplicaAutoScalingOutput"); +var de_ExecuteStatementOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + NextToken: import_smithy_client.expectString + }); +}, "de_ExecuteStatementOutput"); +var de_ExecuteTransactionOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_ItemResponseList(_, context), "Responses") + }); +}, "de_ExecuteTransactionOutput"); +var de_ExportDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BilledSizeBytes: import_smithy_client.expectLong, + ClientToken: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ExportArn: import_smithy_client.expectString, + ExportFormat: import_smithy_client.expectString, + ExportManifest: import_smithy_client.expectString, + ExportStatus: import_smithy_client.expectString, + ExportTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportTime"), + ExportType: import_smithy_client.expectString, + FailureCode: import_smithy_client.expectString, + FailureMessage: import_smithy_client.expectString, + IncrementalExportSpecification: /* @__PURE__ */ __name((_) => de_IncrementalExportSpecification(_, context), "IncrementalExportSpecification"), + ItemCount: import_smithy_client.expectLong, + S3Bucket: import_smithy_client.expectString, + S3BucketOwner: import_smithy_client.expectString, + S3Prefix: import_smithy_client.expectString, + S3SseAlgorithm: import_smithy_client.expectString, + S3SseKmsKeyId: import_smithy_client.expectString, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString, + TableId: import_smithy_client.expectString + }); +}, "de_ExportDescription"); +var de_ExportTableToPointInTimeOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportDescription: /* @__PURE__ */ __name((_) => de_ExportDescription(_, context), "ExportDescription") + }); +}, "de_ExportTableToPointInTimeOutput"); +var de_GetItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item") + }); +}, "de_GetItemOutput"); +var de_GlobalSecondaryIndexDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Backfilling: import_smithy_client.expectBoolean, + IndexArn: import_smithy_client.expectString, + IndexName: import_smithy_client.expectString, + IndexSizeBytes: import_smithy_client.expectLong, + IndexStatus: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + OnDemandThroughput: import_smithy_client._json, + Projection: import_smithy_client._json, + ProvisionedThroughput: /* @__PURE__ */ __name((_) => de_ProvisionedThroughputDescription(_, context), "ProvisionedThroughput"), + WarmThroughput: import_smithy_client._json + }); +}, "de_GlobalSecondaryIndexDescription"); +var de_GlobalSecondaryIndexDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_GlobalSecondaryIndexDescription(entry, context); + }); + return retVal; +}, "de_GlobalSecondaryIndexDescriptionList"); +var de_GlobalTableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "CreationDateTime"), + GlobalTableArn: import_smithy_client.expectString, + GlobalTableName: import_smithy_client.expectString, + GlobalTableStatus: import_smithy_client.expectString, + ReplicationGroup: /* @__PURE__ */ __name((_) => de_ReplicaDescriptionList(_, context), "ReplicationGroup") + }); +}, "de_GlobalTableDescription"); +var de_ImportSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CloudWatchLogGroupArn: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ImportArn: import_smithy_client.expectString, + ImportStatus: import_smithy_client.expectString, + InputFormat: import_smithy_client.expectString, + S3BucketSource: import_smithy_client._json, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString + }); +}, "de_ImportSummary"); +var de_ImportSummaryList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ImportSummary(entry, context); + }); + return retVal; +}, "de_ImportSummaryList"); +var de_ImportTableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ClientToken: import_smithy_client.expectString, + CloudWatchLogGroupArn: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ErrorCount: import_smithy_client.expectLong, + FailureCode: import_smithy_client.expectString, + FailureMessage: import_smithy_client.expectString, + ImportArn: import_smithy_client.expectString, + ImportStatus: import_smithy_client.expectString, + ImportedItemCount: import_smithy_client.expectLong, + InputCompressionType: import_smithy_client.expectString, + InputFormat: import_smithy_client.expectString, + InputFormatOptions: import_smithy_client._json, + ProcessedItemCount: import_smithy_client.expectLong, + ProcessedSizeBytes: import_smithy_client.expectLong, + S3BucketSource: import_smithy_client._json, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString, + TableCreationParameters: import_smithy_client._json, + TableId: import_smithy_client.expectString + }); +}, "de_ImportTableDescription"); +var de_ImportTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportTableDescription: /* @__PURE__ */ __name((_) => de_ImportTableDescription(_, context), "ImportTableDescription") + }); +}, "de_ImportTableOutput"); +var de_IncrementalExportSpecification = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportFromTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportFromTime"), + ExportToTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportToTime"), + ExportViewType: import_smithy_client.expectString + }); +}, "de_IncrementalExportSpecification"); +var de_ItemCollectionKeyAttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_ItemCollectionKeyAttributeMap"); +var de_ItemCollectionMetrics = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ItemCollectionKey: /* @__PURE__ */ __name((_) => de_ItemCollectionKeyAttributeMap(_, context), "ItemCollectionKey"), + SizeEstimateRangeGB: /* @__PURE__ */ __name((_) => de_ItemCollectionSizeEstimateRange(_, context), "SizeEstimateRangeGB") + }); +}, "de_ItemCollectionMetrics"); +var de_ItemCollectionMetricsMultiple = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ItemCollectionMetrics(entry, context); + }); + return retVal; +}, "de_ItemCollectionMetricsMultiple"); +var de_ItemCollectionMetricsPerTable = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemCollectionMetricsMultiple(value, context); + return acc; + }, {}); +}, "de_ItemCollectionMetricsPerTable"); +var de_ItemCollectionSizeEstimateRange = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.limitedParseDouble)(entry); + }); + return retVal; +}, "de_ItemCollectionSizeEstimateRange"); +var de_ItemList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AttributeMap(entry, context); + }); + return retVal; +}, "de_ItemList"); +var de_ItemResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item") + }); +}, "de_ItemResponse"); +var de_ItemResponseList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ItemResponse(entry, context); + }); + return retVal; +}, "de_ItemResponseList"); +var de_Key = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_Key"); +var de_KeyList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_Key(entry, context); + }); + return retVal; +}, "de_KeyList"); +var de_KeysAndAttributes = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: import_smithy_client.expectBoolean, + ExpressionAttributeNames: import_smithy_client._json, + Keys: /* @__PURE__ */ __name((_) => de_KeyList(_, context), "Keys"), + ProjectionExpression: import_smithy_client.expectString + }); +}, "de_KeysAndAttributes"); +var de_ListAttributeValue = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AttributeValue((0, import_core.awsExpectUnion)(entry), context); + }); + return retVal; +}, "de_ListAttributeValue"); +var de_ListBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupSummaries: /* @__PURE__ */ __name((_) => de_BackupSummaries(_, context), "BackupSummaries"), + LastEvaluatedBackupArn: import_smithy_client.expectString + }); +}, "de_ListBackupsOutput"); +var de_ListImportsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportSummaryList: /* @__PURE__ */ __name((_) => de_ImportSummaryList(_, context), "ImportSummaryList"), + NextToken: import_smithy_client.expectString + }); +}, "de_ListImportsOutput"); +var de_MapAttributeValue = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_MapAttributeValue"); +var de_PartiQLBatchResponse = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_BatchStatementResponse(entry, context); + }); + return retVal; +}, "de_PartiQLBatchResponse"); +var de_PointInTimeRecoveryDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + EarliestRestorableDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EarliestRestorableDateTime"), + LatestRestorableDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LatestRestorableDateTime"), + PointInTimeRecoveryStatus: import_smithy_client.expectString, + RecoveryPeriodInDays: import_smithy_client.expectInt32 + }); +}, "de_PointInTimeRecoveryDescription"); +var de_ProvisionedThroughputDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + LastDecreaseDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastDecreaseDateTime"), + LastIncreaseDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastIncreaseDateTime"), + NumberOfDecreasesToday: import_smithy_client.expectLong, + ReadCapacityUnits: import_smithy_client.expectLong, + WriteCapacityUnits: import_smithy_client.expectLong + }); +}, "de_ProvisionedThroughputDescription"); +var de_PutItemInputAttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_PutItemInputAttributeMap"); +var de_PutItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_PutItemOutput"); +var de_PutRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_PutItemInputAttributeMap(_, context), "Item") + }); +}, "de_PutRequest"); +var de_QueryOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Count: import_smithy_client.expectInt32, + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + ScannedCount: import_smithy_client.expectInt32 + }); +}, "de_QueryOutput"); +var de_ReplicaAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList(_, context), "GlobalSecondaryIndexes"), + RegionName: import_smithy_client.expectString, + ReplicaProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettings"), + ReplicaProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedWriteCapacityAutoScalingSettings"), + ReplicaStatus: import_smithy_client.expectString + }); +}, "de_ReplicaAutoScalingDescription"); +var de_ReplicaAutoScalingDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaAutoScalingDescription(entry, context); + }); + return retVal; +}, "de_ReplicaAutoScalingDescriptionList"); +var de_ReplicaDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: import_smithy_client._json, + KMSMasterKeyId: import_smithy_client.expectString, + OnDemandThroughputOverride: import_smithy_client._json, + ProvisionedThroughputOverride: import_smithy_client._json, + RegionName: import_smithy_client.expectString, + ReplicaInaccessibleDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ReplicaInaccessibleDateTime"), + ReplicaStatus: import_smithy_client.expectString, + ReplicaStatusDescription: import_smithy_client.expectString, + ReplicaStatusPercentProgress: import_smithy_client.expectString, + ReplicaTableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "ReplicaTableClassSummary"), + WarmThroughput: import_smithy_client._json + }); +}, "de_ReplicaDescription"); +var de_ReplicaDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaDescription(entry, context); + }); + return retVal; +}, "de_ReplicaDescriptionList"); +var de_ReplicaGlobalSecondaryIndexAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + IndexName: import_smithy_client.expectString, + IndexStatus: import_smithy_client.expectString, + ProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedReadCapacityAutoScalingSettings"), + ProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedWriteCapacityAutoScalingSettings") + }); +}, "de_ReplicaGlobalSecondaryIndexAutoScalingDescription"); +var de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaGlobalSecondaryIndexAutoScalingDescription(entry, context); + }); + return retVal; +}, "de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList"); +var de_ReplicaGlobalSecondaryIndexSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + IndexName: import_smithy_client.expectString, + IndexStatus: import_smithy_client.expectString, + ProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedReadCapacityAutoScalingSettings"), + ProvisionedReadCapacityUnits: import_smithy_client.expectLong, + ProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedWriteCapacityAutoScalingSettings"), + ProvisionedWriteCapacityUnits: import_smithy_client.expectLong + }); +}, "de_ReplicaGlobalSecondaryIndexSettingsDescription"); +var de_ReplicaGlobalSecondaryIndexSettingsDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaGlobalSecondaryIndexSettingsDescription(entry, context); + }); + return retVal; +}, "de_ReplicaGlobalSecondaryIndexSettingsDescriptionList"); +var de_ReplicaSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + RegionName: import_smithy_client.expectString, + ReplicaBillingModeSummary: /* @__PURE__ */ __name((_) => de_BillingModeSummary(_, context), "ReplicaBillingModeSummary"), + ReplicaGlobalSecondaryIndexSettings: /* @__PURE__ */ __name((_) => de_ReplicaGlobalSecondaryIndexSettingsDescriptionList(_, context), "ReplicaGlobalSecondaryIndexSettings"), + ReplicaProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettings"), + ReplicaProvisionedReadCapacityUnits: import_smithy_client.expectLong, + ReplicaProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedWriteCapacityAutoScalingSettings"), + ReplicaProvisionedWriteCapacityUnits: import_smithy_client.expectLong, + ReplicaStatus: import_smithy_client.expectString, + ReplicaTableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "ReplicaTableClassSummary") + }); +}, "de_ReplicaSettingsDescription"); +var de_ReplicaSettingsDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaSettingsDescription(entry, context); + }); + return retVal; +}, "de_ReplicaSettingsDescriptionList"); +var de_RestoreSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + RestoreDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "RestoreDateTime"), + RestoreInProgress: import_smithy_client.expectBoolean, + SourceBackupArn: import_smithy_client.expectString, + SourceTableArn: import_smithy_client.expectString + }); +}, "de_RestoreSummary"); +var de_RestoreTableFromBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_RestoreTableFromBackupOutput"); +var de_RestoreTableToPointInTimeOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_RestoreTableToPointInTimeOutput"); +var de_ScanOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Count: import_smithy_client.expectInt32, + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + ScannedCount: import_smithy_client.expectInt32 + }); +}, "de_ScanOutput"); +var de_SecondaryIndexesCapacityMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_Capacity(value, context); + return acc; + }, {}); +}, "de_SecondaryIndexesCapacityMap"); +var de_SourceTableDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BillingMode: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + OnDemandThroughput: import_smithy_client._json, + ProvisionedThroughput: import_smithy_client._json, + TableArn: import_smithy_client.expectString, + TableCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "TableCreationDateTime"), + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString, + TableSizeBytes: import_smithy_client.expectLong + }); +}, "de_SourceTableDetails"); +var de_SourceTableFeatureDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: import_smithy_client._json, + LocalSecondaryIndexes: import_smithy_client._json, + SSEDescription: /* @__PURE__ */ __name((_) => de_SSEDescription(_, context), "SSEDescription"), + StreamDescription: import_smithy_client._json, + TimeToLiveDescription: import_smithy_client._json + }); +}, "de_SourceTableFeatureDetails"); +var de_SSEDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + InaccessibleEncryptionDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "InaccessibleEncryptionDateTime"), + KMSMasterKeyArn: import_smithy_client.expectString, + SSEType: import_smithy_client.expectString, + Status: import_smithy_client.expectString + }); +}, "de_SSEDescription"); +var de_TableAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Replicas: /* @__PURE__ */ __name((_) => de_ReplicaAutoScalingDescriptionList(_, context), "Replicas"), + TableName: import_smithy_client.expectString, + TableStatus: import_smithy_client.expectString + }); +}, "de_TableAutoScalingDescription"); +var de_TableClassSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + LastUpdateDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateDateTime"), + TableClass: import_smithy_client.expectString + }); +}, "de_TableClassSummary"); +var de_TableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ArchivalSummary: /* @__PURE__ */ __name((_) => de_ArchivalSummary(_, context), "ArchivalSummary"), + AttributeDefinitions: import_smithy_client._json, + BillingModeSummary: /* @__PURE__ */ __name((_) => de_BillingModeSummary(_, context), "BillingModeSummary"), + CreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "CreationDateTime"), + DeletionProtectionEnabled: import_smithy_client.expectBoolean, + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_GlobalSecondaryIndexDescriptionList(_, context), "GlobalSecondaryIndexes"), + GlobalTableVersion: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + LatestStreamArn: import_smithy_client.expectString, + LatestStreamLabel: import_smithy_client.expectString, + LocalSecondaryIndexes: import_smithy_client._json, + MultiRegionConsistency: import_smithy_client.expectString, + OnDemandThroughput: import_smithy_client._json, + ProvisionedThroughput: /* @__PURE__ */ __name((_) => de_ProvisionedThroughputDescription(_, context), "ProvisionedThroughput"), + Replicas: /* @__PURE__ */ __name((_) => de_ReplicaDescriptionList(_, context), "Replicas"), + RestoreSummary: /* @__PURE__ */ __name((_) => de_RestoreSummary(_, context), "RestoreSummary"), + SSEDescription: /* @__PURE__ */ __name((_) => de_SSEDescription(_, context), "SSEDescription"), + StreamSpecification: import_smithy_client._json, + TableArn: import_smithy_client.expectString, + TableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "TableClassSummary"), + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString, + TableSizeBytes: import_smithy_client.expectLong, + TableStatus: import_smithy_client.expectString, + WarmThroughput: import_smithy_client._json + }); +}, "de_TableDescription"); +var de_TransactGetItemsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_ItemResponseList(_, context), "Responses") + }); +}, "de_TransactGetItemsOutput"); +var de_TransactionCanceledException = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CancellationReasons: /* @__PURE__ */ __name((_) => de_CancellationReasonList(_, context), "CancellationReasons"), + Message: import_smithy_client.expectString + }); +}, "de_TransactionCanceledException"); +var de_TransactWriteItemsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetricsPerTable(_, context), "ItemCollectionMetrics") + }); +}, "de_TransactWriteItemsOutput"); +var de_UpdateContinuousBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsDescription: /* @__PURE__ */ __name((_) => de_ContinuousBackupsDescription(_, context), "ContinuousBackupsDescription") + }); +}, "de_UpdateContinuousBackupsOutput"); +var de_UpdateGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_UpdateGlobalTableOutput"); +var de_UpdateGlobalTableSettingsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableName: import_smithy_client.expectString, + ReplicaSettings: /* @__PURE__ */ __name((_) => de_ReplicaSettingsDescriptionList(_, context), "ReplicaSettings") + }); +}, "de_UpdateGlobalTableSettingsOutput"); +var de_UpdateItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_UpdateItemOutput"); +var de_UpdateTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_UpdateTableOutput"); +var de_UpdateTableReplicaAutoScalingOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableAutoScalingDescription: /* @__PURE__ */ __name((_) => de_TableAutoScalingDescription(_, context), "TableAutoScalingDescription") + }); +}, "de_UpdateTableReplicaAutoScalingOutput"); +var de_WriteRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + DeleteRequest: /* @__PURE__ */ __name((_) => de_DeleteRequest(_, context), "DeleteRequest"), + PutRequest: /* @__PURE__ */ __name((_) => de_PutRequest(_, context), "PutRequest") + }); +}, "de_WriteRequest"); +var de_WriteRequests = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_WriteRequest(entry, context); + }); + return retVal; +}, "de_WriteRequests"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(DynamoDBServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +function sharedHeaders(operation) { + return { + "content-type": "application/x-amz-json-1.0", + "x-amz-target": `DynamoDB_20120810.${operation}` + }; +} +__name(sharedHeaders, "sharedHeaders"); + +// src/commands/DescribeEndpointsCommand.ts +var DescribeEndpointsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeEndpoints", {}).n("DynamoDBClient", "DescribeEndpointsCommand").f(void 0, void 0).ser(se_DescribeEndpointsCommand).de(de_DescribeEndpointsCommand).build() { + static { + __name(this, "DescribeEndpointsCommand"); + } +}; + +// src/DynamoDBClient.ts +var import_runtimeConfig = require("././runtimeConfig"); + +// src/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); + + + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/DynamoDBClient.ts +var DynamoDBClient = class extends import_smithy_client.Client { + static { + __name(this, "DynamoDBClient"); + } + /** + * The resolved configuration of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_account_id_endpoint.resolveAccountIdEndpointModeConfig)(_config_1); + const _config_3 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_2); + const _config_4 = (0, import_middleware_retry.resolveRetryConfig)(_config_3); + const _config_5 = (0, import_config_resolver.resolveRegionConfig)(_config_4); + const _config_6 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_5); + const _config_7 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_6); + const _config_8 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_7); + const _config_9 = (0, import_middleware_endpoint_discovery.resolveEndpointDiscoveryConfig)(_config_8, { + endpointDiscoveryCommandCtor: DescribeEndpointsCommand + }); + const _config_10 = resolveRuntimeExtensions(_config_9, configuration?.extensions || []); + this.config = _config_10; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core2.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultDynamoDBHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core2.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core2.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/DynamoDB.ts + + +// src/commands/BatchExecuteStatementCommand.ts + + + +var BatchExecuteStatementCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchExecuteStatement", {}).n("DynamoDBClient", "BatchExecuteStatementCommand").f(void 0, void 0).ser(se_BatchExecuteStatementCommand).de(de_BatchExecuteStatementCommand).build() { + static { + __name(this, "BatchExecuteStatementCommand"); + } +}; + +// src/commands/BatchGetItemCommand.ts + + + +var BatchGetItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => Object.keys(input?.RequestItems ?? {}), "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchGetItem", {}).n("DynamoDBClient", "BatchGetItemCommand").f(void 0, void 0).ser(se_BatchGetItemCommand).de(de_BatchGetItemCommand).build() { + static { + __name(this, "BatchGetItemCommand"); + } +}; + +// src/commands/BatchWriteItemCommand.ts + + + +var BatchWriteItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => Object.keys(input?.RequestItems ?? {}), "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchWriteItem", {}).n("DynamoDBClient", "BatchWriteItemCommand").f(void 0, void 0).ser(se_BatchWriteItemCommand).de(de_BatchWriteItemCommand).build() { + static { + __name(this, "BatchWriteItemCommand"); + } +}; + +// src/commands/CreateBackupCommand.ts + + + +var CreateBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateBackup", {}).n("DynamoDBClient", "CreateBackupCommand").f(void 0, void 0).ser(se_CreateBackupCommand).de(de_CreateBackupCommand).build() { + static { + __name(this, "CreateBackupCommand"); + } +}; + +// src/commands/CreateGlobalTableCommand.ts + + + +var CreateGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateGlobalTable", {}).n("DynamoDBClient", "CreateGlobalTableCommand").f(void 0, void 0).ser(se_CreateGlobalTableCommand).de(de_CreateGlobalTableCommand).build() { + static { + __name(this, "CreateGlobalTableCommand"); + } +}; + +// src/commands/CreateTableCommand.ts + + + +var CreateTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateTable", {}).n("DynamoDBClient", "CreateTableCommand").f(void 0, void 0).ser(se_CreateTableCommand).de(de_CreateTableCommand).build() { + static { + __name(this, "CreateTableCommand"); + } +}; + +// src/commands/DeleteBackupCommand.ts + + + +var DeleteBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteBackup", {}).n("DynamoDBClient", "DeleteBackupCommand").f(void 0, void 0).ser(se_DeleteBackupCommand).de(de_DeleteBackupCommand).build() { + static { + __name(this, "DeleteBackupCommand"); + } +}; + +// src/commands/DeleteItemCommand.ts + + + +var DeleteItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteItem", {}).n("DynamoDBClient", "DeleteItemCommand").f(void 0, void 0).ser(se_DeleteItemCommand).de(de_DeleteItemCommand).build() { + static { + __name(this, "DeleteItemCommand"); + } +}; + +// src/commands/DeleteResourcePolicyCommand.ts + + + +var DeleteResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteResourcePolicy", {}).n("DynamoDBClient", "DeleteResourcePolicyCommand").f(void 0, void 0).ser(se_DeleteResourcePolicyCommand).de(de_DeleteResourcePolicyCommand).build() { + static { + __name(this, "DeleteResourcePolicyCommand"); + } +}; + +// src/commands/DeleteTableCommand.ts + + + +var DeleteTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteTable", {}).n("DynamoDBClient", "DeleteTableCommand").f(void 0, void 0).ser(se_DeleteTableCommand).de(de_DeleteTableCommand).build() { + static { + __name(this, "DeleteTableCommand"); + } +}; + +// src/commands/DescribeBackupCommand.ts + + + +var DescribeBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeBackup", {}).n("DynamoDBClient", "DescribeBackupCommand").f(void 0, void 0).ser(se_DescribeBackupCommand).de(de_DescribeBackupCommand).build() { + static { + __name(this, "DescribeBackupCommand"); + } +}; + +// src/commands/DescribeContinuousBackupsCommand.ts + + + +var DescribeContinuousBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeContinuousBackups", {}).n("DynamoDBClient", "DescribeContinuousBackupsCommand").f(void 0, void 0).ser(se_DescribeContinuousBackupsCommand).de(de_DescribeContinuousBackupsCommand).build() { + static { + __name(this, "DescribeContinuousBackupsCommand"); + } +}; + +// src/commands/DescribeContributorInsightsCommand.ts + + + +var DescribeContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeContributorInsights", {}).n("DynamoDBClient", "DescribeContributorInsightsCommand").f(void 0, void 0).ser(se_DescribeContributorInsightsCommand).de(de_DescribeContributorInsightsCommand).build() { + static { + __name(this, "DescribeContributorInsightsCommand"); + } +}; + +// src/commands/DescribeExportCommand.ts + + + +var DescribeExportCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ExportArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeExport", {}).n("DynamoDBClient", "DescribeExportCommand").f(void 0, void 0).ser(se_DescribeExportCommand).de(de_DescribeExportCommand).build() { + static { + __name(this, "DescribeExportCommand"); + } +}; + +// src/commands/DescribeGlobalTableCommand.ts + + + +var DescribeGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeGlobalTable", {}).n("DynamoDBClient", "DescribeGlobalTableCommand").f(void 0, void 0).ser(se_DescribeGlobalTableCommand).de(de_DescribeGlobalTableCommand).build() { + static { + __name(this, "DescribeGlobalTableCommand"); + } +}; + +// src/commands/DescribeGlobalTableSettingsCommand.ts + + + +var DescribeGlobalTableSettingsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeGlobalTableSettings", {}).n("DynamoDBClient", "DescribeGlobalTableSettingsCommand").f(void 0, void 0).ser(se_DescribeGlobalTableSettingsCommand).de(de_DescribeGlobalTableSettingsCommand).build() { + static { + __name(this, "DescribeGlobalTableSettingsCommand"); + } +}; + +// src/commands/DescribeImportCommand.ts + + + +var DescribeImportCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ImportArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeImport", {}).n("DynamoDBClient", "DescribeImportCommand").f(void 0, void 0).ser(se_DescribeImportCommand).de(de_DescribeImportCommand).build() { + static { + __name(this, "DescribeImportCommand"); + } +}; + +// src/commands/DescribeKinesisStreamingDestinationCommand.ts + + + +var DescribeKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeKinesisStreamingDestination", {}).n("DynamoDBClient", "DescribeKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_DescribeKinesisStreamingDestinationCommand).de(de_DescribeKinesisStreamingDestinationCommand).build() { + static { + __name(this, "DescribeKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/DescribeLimitsCommand.ts + + + +var DescribeLimitsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeLimits", {}).n("DynamoDBClient", "DescribeLimitsCommand").f(void 0, void 0).ser(se_DescribeLimitsCommand).de(de_DescribeLimitsCommand).build() { + static { + __name(this, "DescribeLimitsCommand"); + } +}; + +// src/commands/DescribeTableCommand.ts + + + +var DescribeTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTable", {}).n("DynamoDBClient", "DescribeTableCommand").f(void 0, void 0).ser(se_DescribeTableCommand).de(de_DescribeTableCommand).build() { + static { + __name(this, "DescribeTableCommand"); + } +}; + +// src/commands/DescribeTableReplicaAutoScalingCommand.ts + + + +var DescribeTableReplicaAutoScalingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTableReplicaAutoScaling", {}).n("DynamoDBClient", "DescribeTableReplicaAutoScalingCommand").f(void 0, void 0).ser(se_DescribeTableReplicaAutoScalingCommand).de(de_DescribeTableReplicaAutoScalingCommand).build() { + static { + __name(this, "DescribeTableReplicaAutoScalingCommand"); + } +}; + +// src/commands/DescribeTimeToLiveCommand.ts + + + +var DescribeTimeToLiveCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTimeToLive", {}).n("DynamoDBClient", "DescribeTimeToLiveCommand").f(void 0, void 0).ser(se_DescribeTimeToLiveCommand).de(de_DescribeTimeToLiveCommand).build() { + static { + __name(this, "DescribeTimeToLiveCommand"); + } +}; + +// src/commands/DisableKinesisStreamingDestinationCommand.ts + + + +var DisableKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DisableKinesisStreamingDestination", {}).n("DynamoDBClient", "DisableKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_DisableKinesisStreamingDestinationCommand).de(de_DisableKinesisStreamingDestinationCommand).build() { + static { + __name(this, "DisableKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/EnableKinesisStreamingDestinationCommand.ts + + + +var EnableKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "EnableKinesisStreamingDestination", {}).n("DynamoDBClient", "EnableKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_EnableKinesisStreamingDestinationCommand).de(de_EnableKinesisStreamingDestinationCommand).build() { + static { + __name(this, "EnableKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/ExecuteStatementCommand.ts + + + +var ExecuteStatementCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExecuteStatement", {}).n("DynamoDBClient", "ExecuteStatementCommand").f(void 0, void 0).ser(se_ExecuteStatementCommand).de(de_ExecuteStatementCommand).build() { + static { + __name(this, "ExecuteStatementCommand"); + } +}; + +// src/commands/ExecuteTransactionCommand.ts + + + +var ExecuteTransactionCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExecuteTransaction", {}).n("DynamoDBClient", "ExecuteTransactionCommand").f(void 0, void 0).ser(se_ExecuteTransactionCommand).de(de_ExecuteTransactionCommand).build() { + static { + __name(this, "ExecuteTransactionCommand"); + } +}; + +// src/commands/ExportTableToPointInTimeCommand.ts + + + +var ExportTableToPointInTimeCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExportTableToPointInTime", {}).n("DynamoDBClient", "ExportTableToPointInTimeCommand").f(void 0, void 0).ser(se_ExportTableToPointInTimeCommand).de(de_ExportTableToPointInTimeCommand).build() { + static { + __name(this, "ExportTableToPointInTimeCommand"); + } +}; + +// src/commands/GetItemCommand.ts + + + +var GetItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "GetItem", {}).n("DynamoDBClient", "GetItemCommand").f(void 0, void 0).ser(se_GetItemCommand).de(de_GetItemCommand).build() { + static { + __name(this, "GetItemCommand"); + } +}; + +// src/commands/GetResourcePolicyCommand.ts + + + +var GetResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "GetResourcePolicy", {}).n("DynamoDBClient", "GetResourcePolicyCommand").f(void 0, void 0).ser(se_GetResourcePolicyCommand).de(de_GetResourcePolicyCommand).build() { + static { + __name(this, "GetResourcePolicyCommand"); + } +}; + +// src/commands/ImportTableCommand.ts + + + +var ImportTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => input?.TableCreationParameters?.TableName, "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ImportTable", {}).n("DynamoDBClient", "ImportTableCommand").f(void 0, void 0).ser(se_ImportTableCommand).de(de_ImportTableCommand).build() { + static { + __name(this, "ImportTableCommand"); + } +}; + +// src/commands/ListBackupsCommand.ts + + + +var ListBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListBackups", {}).n("DynamoDBClient", "ListBackupsCommand").f(void 0, void 0).ser(se_ListBackupsCommand).de(de_ListBackupsCommand).build() { + static { + __name(this, "ListBackupsCommand"); + } +}; + +// src/commands/ListContributorInsightsCommand.ts + + + +var ListContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListContributorInsights", {}).n("DynamoDBClient", "ListContributorInsightsCommand").f(void 0, void 0).ser(se_ListContributorInsightsCommand).de(de_ListContributorInsightsCommand).build() { + static { + __name(this, "ListContributorInsightsCommand"); + } +}; + +// src/commands/ListExportsCommand.ts + + + +var ListExportsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListExports", {}).n("DynamoDBClient", "ListExportsCommand").f(void 0, void 0).ser(se_ListExportsCommand).de(de_ListExportsCommand).build() { + static { + __name(this, "ListExportsCommand"); + } +}; + +// src/commands/ListGlobalTablesCommand.ts + + + +var ListGlobalTablesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListGlobalTables", {}).n("DynamoDBClient", "ListGlobalTablesCommand").f(void 0, void 0).ser(se_ListGlobalTablesCommand).de(de_ListGlobalTablesCommand).build() { + static { + __name(this, "ListGlobalTablesCommand"); + } +}; + +// src/commands/ListImportsCommand.ts + + + +var ListImportsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListImports", {}).n("DynamoDBClient", "ListImportsCommand").f(void 0, void 0).ser(se_ListImportsCommand).de(de_ListImportsCommand).build() { + static { + __name(this, "ListImportsCommand"); + } +}; + +// src/commands/ListTablesCommand.ts + + + +var ListTablesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListTables", {}).n("DynamoDBClient", "ListTablesCommand").f(void 0, void 0).ser(se_ListTablesCommand).de(de_ListTablesCommand).build() { + static { + __name(this, "ListTablesCommand"); + } +}; + +// src/commands/ListTagsOfResourceCommand.ts + + + +var ListTagsOfResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListTagsOfResource", {}).n("DynamoDBClient", "ListTagsOfResourceCommand").f(void 0, void 0).ser(se_ListTagsOfResourceCommand).de(de_ListTagsOfResourceCommand).build() { + static { + __name(this, "ListTagsOfResourceCommand"); + } +}; + +// src/commands/PutItemCommand.ts + + + +var PutItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "PutItem", {}).n("DynamoDBClient", "PutItemCommand").f(void 0, void 0).ser(se_PutItemCommand).de(de_PutItemCommand).build() { + static { + __name(this, "PutItemCommand"); + } +}; + +// src/commands/PutResourcePolicyCommand.ts + + + +var PutResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "PutResourcePolicy", {}).n("DynamoDBClient", "PutResourcePolicyCommand").f(void 0, void 0).ser(se_PutResourcePolicyCommand).de(de_PutResourcePolicyCommand).build() { + static { + __name(this, "PutResourcePolicyCommand"); + } +}; + +// src/commands/QueryCommand.ts + + + +var QueryCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "Query", {}).n("DynamoDBClient", "QueryCommand").f(void 0, void 0).ser(se_QueryCommand).de(de_QueryCommand).build() { + static { + __name(this, "QueryCommand"); + } +}; + +// src/commands/RestoreTableFromBackupCommand.ts + + + +var RestoreTableFromBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "RestoreTableFromBackup", {}).n("DynamoDBClient", "RestoreTableFromBackupCommand").f(void 0, void 0).ser(se_RestoreTableFromBackupCommand).de(de_RestoreTableFromBackupCommand).build() { + static { + __name(this, "RestoreTableFromBackupCommand"); + } +}; + +// src/commands/RestoreTableToPointInTimeCommand.ts + + + +var RestoreTableToPointInTimeCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "RestoreTableToPointInTime", {}).n("DynamoDBClient", "RestoreTableToPointInTimeCommand").f(void 0, void 0).ser(se_RestoreTableToPointInTimeCommand).de(de_RestoreTableToPointInTimeCommand).build() { + static { + __name(this, "RestoreTableToPointInTimeCommand"); + } +}; + +// src/commands/ScanCommand.ts + + + +var ScanCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "Scan", {}).n("DynamoDBClient", "ScanCommand").f(void 0, void 0).ser(se_ScanCommand).de(de_ScanCommand).build() { + static { + __name(this, "ScanCommand"); + } +}; + +// src/commands/TagResourceCommand.ts + + + +var TagResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TagResource", {}).n("DynamoDBClient", "TagResourceCommand").f(void 0, void 0).ser(se_TagResourceCommand).de(de_TagResourceCommand).build() { + static { + __name(this, "TagResourceCommand"); + } +}; + +// src/commands/TransactGetItemsCommand.ts + + + +var TransactGetItemsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: /* @__PURE__ */ __name((input) => input?.TransactItems?.map((obj) => obj?.Get?.TableName), "get") + } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TransactGetItems", {}).n("DynamoDBClient", "TransactGetItemsCommand").f(void 0, void 0).ser(se_TransactGetItemsCommand).de(de_TransactGetItemsCommand).build() { + static { + __name(this, "TransactGetItemsCommand"); + } +}; + +// src/commands/TransactWriteItemsCommand.ts + + + +var TransactWriteItemsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: /* @__PURE__ */ __name((input) => input?.TransactItems?.map( + (obj) => [obj?.ConditionCheck?.TableName, obj?.Put?.TableName, obj?.Delete?.TableName, obj?.Update?.TableName].filter( + (i) => i + ) + ).flat(), "get") + } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TransactWriteItems", {}).n("DynamoDBClient", "TransactWriteItemsCommand").f(void 0, void 0).ser(se_TransactWriteItemsCommand).de(de_TransactWriteItemsCommand).build() { + static { + __name(this, "TransactWriteItemsCommand"); + } +}; + +// src/commands/UntagResourceCommand.ts + + + +var UntagResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UntagResource", {}).n("DynamoDBClient", "UntagResourceCommand").f(void 0, void 0).ser(se_UntagResourceCommand).de(de_UntagResourceCommand).build() { + static { + __name(this, "UntagResourceCommand"); + } +}; + +// src/commands/UpdateContinuousBackupsCommand.ts + + + +var UpdateContinuousBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateContinuousBackups", {}).n("DynamoDBClient", "UpdateContinuousBackupsCommand").f(void 0, void 0).ser(se_UpdateContinuousBackupsCommand).de(de_UpdateContinuousBackupsCommand).build() { + static { + __name(this, "UpdateContinuousBackupsCommand"); + } +}; + +// src/commands/UpdateContributorInsightsCommand.ts + + + +var UpdateContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateContributorInsights", {}).n("DynamoDBClient", "UpdateContributorInsightsCommand").f(void 0, void 0).ser(se_UpdateContributorInsightsCommand).de(de_UpdateContributorInsightsCommand).build() { + static { + __name(this, "UpdateContributorInsightsCommand"); + } +}; + +// src/commands/UpdateGlobalTableCommand.ts + + + +var UpdateGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateGlobalTable", {}).n("DynamoDBClient", "UpdateGlobalTableCommand").f(void 0, void 0).ser(se_UpdateGlobalTableCommand).de(de_UpdateGlobalTableCommand).build() { + static { + __name(this, "UpdateGlobalTableCommand"); + } +}; + +// src/commands/UpdateGlobalTableSettingsCommand.ts + + + +var UpdateGlobalTableSettingsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateGlobalTableSettings", {}).n("DynamoDBClient", "UpdateGlobalTableSettingsCommand").f(void 0, void 0).ser(se_UpdateGlobalTableSettingsCommand).de(de_UpdateGlobalTableSettingsCommand).build() { + static { + __name(this, "UpdateGlobalTableSettingsCommand"); + } +}; + +// src/commands/UpdateItemCommand.ts + + + +var UpdateItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateItem", {}).n("DynamoDBClient", "UpdateItemCommand").f(void 0, void 0).ser(se_UpdateItemCommand).de(de_UpdateItemCommand).build() { + static { + __name(this, "UpdateItemCommand"); + } +}; + +// src/commands/UpdateKinesisStreamingDestinationCommand.ts + + + +var UpdateKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateKinesisStreamingDestination", {}).n("DynamoDBClient", "UpdateKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_UpdateKinesisStreamingDestinationCommand).de(de_UpdateKinesisStreamingDestinationCommand).build() { + static { + __name(this, "UpdateKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/UpdateTableCommand.ts + + + +var UpdateTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTable", {}).n("DynamoDBClient", "UpdateTableCommand").f(void 0, void 0).ser(se_UpdateTableCommand).de(de_UpdateTableCommand).build() { + static { + __name(this, "UpdateTableCommand"); + } +}; + +// src/commands/UpdateTableReplicaAutoScalingCommand.ts + + + +var UpdateTableReplicaAutoScalingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTableReplicaAutoScaling", {}).n("DynamoDBClient", "UpdateTableReplicaAutoScalingCommand").f(void 0, void 0).ser(se_UpdateTableReplicaAutoScalingCommand).de(de_UpdateTableReplicaAutoScalingCommand).build() { + static { + __name(this, "UpdateTableReplicaAutoScalingCommand"); + } +}; + +// src/commands/UpdateTimeToLiveCommand.ts + + + +var UpdateTimeToLiveCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTimeToLive", {}).n("DynamoDBClient", "UpdateTimeToLiveCommand").f(void 0, void 0).ser(se_UpdateTimeToLiveCommand).de(de_UpdateTimeToLiveCommand).build() { + static { + __name(this, "UpdateTimeToLiveCommand"); + } +}; + +// src/DynamoDB.ts +var commands = { + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand +}; +var DynamoDB = class extends DynamoDBClient { + static { + __name(this, "DynamoDB"); + } +}; +(0, import_smithy_client.createAggregatedClient)(commands, DynamoDB); + +// src/pagination/ListContributorInsightsPaginator.ts +var import_core3 = require("@smithy/core"); +var paginateListContributorInsights = (0, import_core3.createPaginator)(DynamoDBClient, ListContributorInsightsCommand, "NextToken", "NextToken", "MaxResults"); + +// src/pagination/ListExportsPaginator.ts +var import_core4 = require("@smithy/core"); +var paginateListExports = (0, import_core4.createPaginator)(DynamoDBClient, ListExportsCommand, "NextToken", "NextToken", "MaxResults"); + +// src/pagination/ListImportsPaginator.ts +var import_core5 = require("@smithy/core"); +var paginateListImports = (0, import_core5.createPaginator)(DynamoDBClient, ListImportsCommand, "NextToken", "NextToken", "PageSize"); + +// src/pagination/ListTablesPaginator.ts +var import_core6 = require("@smithy/core"); +var paginateListTables = (0, import_core6.createPaginator)(DynamoDBClient, ListTablesCommand, "ExclusiveStartTableName", "LastEvaluatedTableName", "Limit"); + +// src/pagination/QueryPaginator.ts +var import_core7 = require("@smithy/core"); +var paginateQuery = (0, import_core7.createPaginator)(DynamoDBClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/pagination/ScanPaginator.ts +var import_core8 = require("@smithy/core"); +var paginateScan = (0, import_core8.createPaginator)(DynamoDBClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/waiters/waitForTableExists.ts +var import_util_waiter = require("@smithy/util-waiter"); +var checkState = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + try { + const returnComparator = /* @__PURE__ */ __name(() => { + return result.Table.TableStatus; + }, "returnComparator"); + if (returnComparator() === "ACTIVE") { + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } + } catch (e) { + } + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: import_util_waiter.WaiterState.RETRY, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForTableExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState); +}, "waitForTableExists"); +var waitUntilTableExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilTableExists"); + +// src/waiters/waitForTableNotExists.ts + +var checkState2 = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForTableNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState2); +}, "waitForTableNotExists"); +var waitUntilTableNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState2); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilTableNotExists"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DynamoDBServiceException, + __Client, + DynamoDBClient, + DynamoDB, + $Command, + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand, + paginateListContributorInsights, + paginateListExports, + paginateListImports, + paginateListTables, + paginateQuery, + paginateScan, + waitForTableExists, + waitUntilTableExists, + waitForTableNotExists, + waitUntilTableNotExists, + ApproximateCreationDateTimePrecision, + AttributeAction, + ScalarAttributeType, + BackupStatus, + BackupType, + BillingMode, + KeyType, + ProjectionType, + SSEType, + SSEStatus, + StreamViewType, + TimeToLiveStatus, + BackupInUseException, + BackupNotFoundException, + BackupTypeFilter, + ReturnConsumedCapacity, + ReturnValuesOnConditionCheckFailure, + BatchStatementErrorCodeEnum, + InternalServerError, + RequestLimitExceeded, + InvalidEndpointException, + ProvisionedThroughputExceededException, + ResourceNotFoundException, + ReturnItemCollectionMetrics, + ItemCollectionSizeLimitExceededException, + ComparisonOperator, + ConditionalOperator, + ContinuousBackupsStatus, + PointInTimeRecoveryStatus, + ContinuousBackupsUnavailableException, + ContributorInsightsAction, + ContributorInsightsStatus, + LimitExceededException, + TableInUseException, + TableNotFoundException, + GlobalTableStatus, + IndexStatus, + ReplicaStatus, + TableClass, + TableStatus, + GlobalTableAlreadyExistsException, + MultiRegionConsistency, + ResourceInUseException, + ReturnValue, + ReplicatedWriteConflictException, + TransactionConflictException, + PolicyNotFoundException, + ExportFormat, + ExportStatus, + ExportType, + ExportViewType, + S3SseAlgorithm, + ExportNotFoundException, + GlobalTableNotFoundException, + ImportStatus, + InputCompressionType, + InputFormat, + ImportNotFoundException, + DestinationStatus, + DuplicateItemException, + IdempotentParameterMismatchException, + TransactionInProgressException, + ExportConflictException, + InvalidExportTimeException, + PointInTimeRecoveryUnavailableException, + ImportConflictException, + Select, + TableAlreadyExistsException, + InvalidRestoreTimeException, + ReplicaAlreadyExistsException, + ReplicaNotFoundException, + IndexNotFoundException, + AttributeValue, + ConditionalCheckFailedException, + TransactionCanceledException +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js new file mode 100644 index 0000000..be381dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js @@ -0,0 +1,42 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const account_id_endpoint_1 = require("@aws-sdk/core/account-id-endpoint"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (() => Promise.resolve(account_id_endpoint_1.DEFAULT_ACCOUNT_ID_ENDPOINT_MODE)), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (() => Promise.resolve(undefined)), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js new file mode 100644 index 0000000..a07d8b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const core_1 = require("@aws-sdk/core"); +const account_id_endpoint_1 = require("@aws-sdk/core/account-id-endpoint"); +const credential_provider_node_1 = require("@aws-sdk/credential-provider-node"); +const middleware_endpoint_discovery_1 = require("@aws-sdk/middleware-endpoint-discovery"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (0, node_config_provider_1.loadConfig)(account_id_endpoint_1.NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, profileConfig), + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credential_provider_node_1.defaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (0, node_config_provider_1.loadConfig)(middleware_endpoint_discovery_1.NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, profileConfig), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js new file mode 100644 index 0000000..817ba14 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2012-08-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultDynamoDBHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "DynamoDB", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js new file mode 100644 index 0000000..e7f892c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js @@ -0,0 +1,121 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { BatchExecuteStatementCommand, } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommand, } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommand, } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommand, } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommand, } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommand } from "./commands/CreateTableCommand"; +import { DeleteBackupCommand, } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommand } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommand, } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommand } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommand, } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommand, } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommand, } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommand, } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommand, } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommand, } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommand, } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommand, } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommand, } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommand, } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommand, } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommand, } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommand, } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommand, } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommand, } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommand, } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommand, } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommand, } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommand } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommand, } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommand } from "./commands/ImportTableCommand"; +import { ListBackupsCommand } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommand, } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommand } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommand, } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommand } from "./commands/ListImportsCommand"; +import { ListTablesCommand } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommand, } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommand } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommand, } from "./commands/PutResourcePolicyCommand"; +import { QueryCommand } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommand, } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommand, } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommand } from "./commands/ScanCommand"; +import { TagResourceCommand } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommand, } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommand, } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommand, } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommand, } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommand, } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommand, } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommand, } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommand } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommand, } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommand } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommand, } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommand, } from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +const commands = { + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand, +}; +export class DynamoDB extends DynamoDBClient { +} +createAggregatedClient(commands, DynamoDB); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js new file mode 100644 index 0000000..b77d943 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js @@ -0,0 +1,55 @@ +import { resolveAccountIdEndpointModeConfig, } from "@aws-sdk/core/account-id-endpoint"; +import { resolveEndpointDiscoveryConfig, } from "@aws-sdk/middleware-endpoint-discovery"; +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultDynamoDBHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { DescribeEndpointsCommand, } from "./commands/DescribeEndpointsCommand"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class DynamoDBClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveAccountIdEndpointModeConfig(_config_1); + const _config_3 = resolveUserAgentConfig(_config_2); + const _config_4 = resolveRetryConfig(_config_3); + const _config_5 = resolveRegionConfig(_config_4); + const _config_6 = resolveHostHeaderConfig(_config_5); + const _config_7 = resolveEndpointConfig(_config_6); + const _config_8 = resolveHttpAuthSchemeConfig(_config_7); + const _config_9 = resolveEndpointDiscoveryConfig(_config_8, { + endpointDiscoveryCommandCtor: DescribeEndpointsCommand, + }); + const _config_10 = resolveRuntimeExtensions(_config_9, configuration?.extensions || []); + this.config = _config_10; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultDynamoDBHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..6a9e23e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js @@ -0,0 +1,41 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultDynamoDBHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "dynamodb", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +export const defaultDynamoDBHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js new file mode 100644 index 0000000..4cbd251 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchExecuteStatementCommand, se_BatchExecuteStatementCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchExecuteStatementCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchExecuteStatement", {}) + .n("DynamoDBClient", "BatchExecuteStatementCommand") + .f(void 0, void 0) + .ser(se_BatchExecuteStatementCommand) + .de(de_BatchExecuteStatementCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js new file mode 100644 index 0000000..88dbf81 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchGetItemCommand, se_BatchGetItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchGetItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: (input) => Object.keys(input?.RequestItems ?? {}) }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchGetItem", {}) + .n("DynamoDBClient", "BatchGetItemCommand") + .f(void 0, void 0) + .ser(se_BatchGetItemCommand) + .de(de_BatchGetItemCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js new file mode 100644 index 0000000..c27df1a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchWriteItemCommand, se_BatchWriteItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchWriteItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: (input) => Object.keys(input?.RequestItems ?? {}) }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchWriteItem", {}) + .n("DynamoDBClient", "BatchWriteItemCommand") + .f(void 0, void 0) + .ser(se_BatchWriteItemCommand) + .de(de_BatchWriteItemCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js new file mode 100644 index 0000000..c932b5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateBackupCommand, se_CreateBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateBackup", {}) + .n("DynamoDBClient", "CreateBackupCommand") + .f(void 0, void 0) + .ser(se_CreateBackupCommand) + .de(de_CreateBackupCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js new file mode 100644 index 0000000..49b8b7a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateGlobalTableCommand, se_CreateGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateGlobalTable", {}) + .n("DynamoDBClient", "CreateGlobalTableCommand") + .f(void 0, void 0) + .ser(se_CreateGlobalTableCommand) + .de(de_CreateGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js new file mode 100644 index 0000000..89f3586 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateTableCommand, se_CreateTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateTable", {}) + .n("DynamoDBClient", "CreateTableCommand") + .f(void 0, void 0) + .ser(se_CreateTableCommand) + .de(de_CreateTableCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js new file mode 100644 index 0000000..d420225 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBackupCommand, se_DeleteBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteBackup", {}) + .n("DynamoDBClient", "DeleteBackupCommand") + .f(void 0, void 0) + .ser(se_DeleteBackupCommand) + .de(de_DeleteBackupCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js new file mode 100644 index 0000000..0550355 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteItemCommand, se_DeleteItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteItem", {}) + .n("DynamoDBClient", "DeleteItemCommand") + .f(void 0, void 0) + .ser(se_DeleteItemCommand) + .de(de_DeleteItemCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js new file mode 100644 index 0000000..045379c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteResourcePolicyCommand, se_DeleteResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteResourcePolicy", {}) + .n("DynamoDBClient", "DeleteResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_DeleteResourcePolicyCommand) + .de(de_DeleteResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js new file mode 100644 index 0000000..b7792b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteTableCommand, se_DeleteTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteTable", {}) + .n("DynamoDBClient", "DeleteTableCommand") + .f(void 0, void 0) + .ser(se_DeleteTableCommand) + .de(de_DeleteTableCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js new file mode 100644 index 0000000..70345e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeBackupCommand, se_DescribeBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeBackup", {}) + .n("DynamoDBClient", "DescribeBackupCommand") + .f(void 0, void 0) + .ser(se_DescribeBackupCommand) + .de(de_DescribeBackupCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js new file mode 100644 index 0000000..1104b60 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeContinuousBackupsCommand, se_DescribeContinuousBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeContinuousBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeContinuousBackups", {}) + .n("DynamoDBClient", "DescribeContinuousBackupsCommand") + .f(void 0, void 0) + .ser(se_DescribeContinuousBackupsCommand) + .de(de_DescribeContinuousBackupsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js new file mode 100644 index 0000000..18a44c9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeContributorInsightsCommand, se_DescribeContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeContributorInsights", {}) + .n("DynamoDBClient", "DescribeContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_DescribeContributorInsightsCommand) + .de(de_DescribeContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js new file mode 100644 index 0000000..7fa3d01 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeEndpointsCommand, se_DescribeEndpointsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeEndpointsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeEndpoints", {}) + .n("DynamoDBClient", "DescribeEndpointsCommand") + .f(void 0, void 0) + .ser(se_DescribeEndpointsCommand) + .de(de_DescribeEndpointsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js new file mode 100644 index 0000000..18f9c9a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeExportCommand, se_DescribeExportCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeExportCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ExportArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeExport", {}) + .n("DynamoDBClient", "DescribeExportCommand") + .f(void 0, void 0) + .ser(se_DescribeExportCommand) + .de(de_DescribeExportCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js new file mode 100644 index 0000000..87acf97 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeGlobalTableCommand, se_DescribeGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeGlobalTable", {}) + .n("DynamoDBClient", "DescribeGlobalTableCommand") + .f(void 0, void 0) + .ser(se_DescribeGlobalTableCommand) + .de(de_DescribeGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js new file mode 100644 index 0000000..e6497a5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeGlobalTableSettingsCommand, se_DescribeGlobalTableSettingsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeGlobalTableSettingsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeGlobalTableSettings", {}) + .n("DynamoDBClient", "DescribeGlobalTableSettingsCommand") + .f(void 0, void 0) + .ser(se_DescribeGlobalTableSettingsCommand) + .de(de_DescribeGlobalTableSettingsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js new file mode 100644 index 0000000..2c147ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeImportCommand, se_DescribeImportCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeImportCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ImportArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeImport", {}) + .n("DynamoDBClient", "DescribeImportCommand") + .f(void 0, void 0) + .ser(se_DescribeImportCommand) + .de(de_DescribeImportCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..b2011ae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeKinesisStreamingDestinationCommand, se_DescribeKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeKinesisStreamingDestination", {}) + .n("DynamoDBClient", "DescribeKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_DescribeKinesisStreamingDestinationCommand) + .de(de_DescribeKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js new file mode 100644 index 0000000..38a0fcc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeLimitsCommand, se_DescribeLimitsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeLimitsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeLimits", {}) + .n("DynamoDBClient", "DescribeLimitsCommand") + .f(void 0, void 0) + .ser(se_DescribeLimitsCommand) + .de(de_DescribeLimitsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js new file mode 100644 index 0000000..b2a3ddf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTableCommand, se_DescribeTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTable", {}) + .n("DynamoDBClient", "DescribeTableCommand") + .f(void 0, void 0) + .ser(se_DescribeTableCommand) + .de(de_DescribeTableCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js new file mode 100644 index 0000000..127d173 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTableReplicaAutoScalingCommand, se_DescribeTableReplicaAutoScalingCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTableReplicaAutoScalingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTableReplicaAutoScaling", {}) + .n("DynamoDBClient", "DescribeTableReplicaAutoScalingCommand") + .f(void 0, void 0) + .ser(se_DescribeTableReplicaAutoScalingCommand) + .de(de_DescribeTableReplicaAutoScalingCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js new file mode 100644 index 0000000..0ae052c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTimeToLiveCommand, se_DescribeTimeToLiveCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTimeToLiveCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTimeToLive", {}) + .n("DynamoDBClient", "DescribeTimeToLiveCommand") + .f(void 0, void 0) + .ser(se_DescribeTimeToLiveCommand) + .de(de_DescribeTimeToLiveCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..59ac9ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DisableKinesisStreamingDestinationCommand, se_DisableKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DisableKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DisableKinesisStreamingDestination", {}) + .n("DynamoDBClient", "DisableKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_DisableKinesisStreamingDestinationCommand) + .de(de_DisableKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..e96a5f5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_EnableKinesisStreamingDestinationCommand, se_EnableKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class EnableKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "EnableKinesisStreamingDestination", {}) + .n("DynamoDBClient", "EnableKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_EnableKinesisStreamingDestinationCommand) + .de(de_EnableKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js new file mode 100644 index 0000000..8402c48 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExecuteStatementCommand, se_ExecuteStatementCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExecuteStatementCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExecuteStatement", {}) + .n("DynamoDBClient", "ExecuteStatementCommand") + .f(void 0, void 0) + .ser(se_ExecuteStatementCommand) + .de(de_ExecuteStatementCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js new file mode 100644 index 0000000..2298f6f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExecuteTransactionCommand, se_ExecuteTransactionCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExecuteTransactionCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExecuteTransaction", {}) + .n("DynamoDBClient", "ExecuteTransactionCommand") + .f(void 0, void 0) + .ser(se_ExecuteTransactionCommand) + .de(de_ExecuteTransactionCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js new file mode 100644 index 0000000..7cd72fc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExportTableToPointInTimeCommand, se_ExportTableToPointInTimeCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExportTableToPointInTimeCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExportTableToPointInTime", {}) + .n("DynamoDBClient", "ExportTableToPointInTimeCommand") + .f(void 0, void 0) + .ser(se_ExportTableToPointInTimeCommand) + .de(de_ExportTableToPointInTimeCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js new file mode 100644 index 0000000..9b8e996 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetItemCommand, se_GetItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class GetItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "GetItem", {}) + .n("DynamoDBClient", "GetItemCommand") + .f(void 0, void 0) + .ser(se_GetItemCommand) + .de(de_GetItemCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js new file mode 100644 index 0000000..9b67404 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetResourcePolicyCommand, se_GetResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class GetResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "GetResourcePolicy", {}) + .n("DynamoDBClient", "GetResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_GetResourcePolicyCommand) + .de(de_GetResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js new file mode 100644 index 0000000..2f7a05d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ImportTableCommand, se_ImportTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ImportTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "operationContextParams", get: (input) => input?.TableCreationParameters?.TableName }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ImportTable", {}) + .n("DynamoDBClient", "ImportTableCommand") + .f(void 0, void 0) + .ser(se_ImportTableCommand) + .de(de_ImportTableCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js new file mode 100644 index 0000000..8e378a6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListBackupsCommand, se_ListBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListBackups", {}) + .n("DynamoDBClient", "ListBackupsCommand") + .f(void 0, void 0) + .ser(se_ListBackupsCommand) + .de(de_ListBackupsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js new file mode 100644 index 0000000..09e5506 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListContributorInsightsCommand, se_ListContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListContributorInsights", {}) + .n("DynamoDBClient", "ListContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_ListContributorInsightsCommand) + .de(de_ListContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js new file mode 100644 index 0000000..52e6bf0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListExportsCommand, se_ListExportsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListExportsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListExports", {}) + .n("DynamoDBClient", "ListExportsCommand") + .f(void 0, void 0) + .ser(se_ListExportsCommand) + .de(de_ListExportsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js new file mode 100644 index 0000000..01def1c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListGlobalTablesCommand, se_ListGlobalTablesCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListGlobalTablesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListGlobalTables", {}) + .n("DynamoDBClient", "ListGlobalTablesCommand") + .f(void 0, void 0) + .ser(se_ListGlobalTablesCommand) + .de(de_ListGlobalTablesCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js new file mode 100644 index 0000000..89b1f8b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListImportsCommand, se_ListImportsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListImportsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListImports", {}) + .n("DynamoDBClient", "ListImportsCommand") + .f(void 0, void 0) + .ser(se_ListImportsCommand) + .de(de_ListImportsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js new file mode 100644 index 0000000..6b299d1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListTablesCommand, se_ListTablesCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListTablesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListTables", {}) + .n("DynamoDBClient", "ListTablesCommand") + .f(void 0, void 0) + .ser(se_ListTablesCommand) + .de(de_ListTablesCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js new file mode 100644 index 0000000..1e0fb57 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListTagsOfResourceCommand, se_ListTagsOfResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListTagsOfResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListTagsOfResource", {}) + .n("DynamoDBClient", "ListTagsOfResourceCommand") + .f(void 0, void 0) + .ser(se_ListTagsOfResourceCommand) + .de(de_ListTagsOfResourceCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js new file mode 100644 index 0000000..7d4bef4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutItemCommand, se_PutItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class PutItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "PutItem", {}) + .n("DynamoDBClient", "PutItemCommand") + .f(void 0, void 0) + .ser(se_PutItemCommand) + .de(de_PutItemCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js new file mode 100644 index 0000000..d683507 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutResourcePolicyCommand, se_PutResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class PutResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "PutResourcePolicy", {}) + .n("DynamoDBClient", "PutResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_PutResourcePolicyCommand) + .de(de_PutResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js new file mode 100644 index 0000000..9ee6441 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_QueryCommand, se_QueryCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class QueryCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "Query", {}) + .n("DynamoDBClient", "QueryCommand") + .f(void 0, void 0) + .ser(se_QueryCommand) + .de(de_QueryCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js new file mode 100644 index 0000000..7dd9ba0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_RestoreTableFromBackupCommand, se_RestoreTableFromBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class RestoreTableFromBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "RestoreTableFromBackup", {}) + .n("DynamoDBClient", "RestoreTableFromBackupCommand") + .f(void 0, void 0) + .ser(se_RestoreTableFromBackupCommand) + .de(de_RestoreTableFromBackupCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js new file mode 100644 index 0000000..98bbb12 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_RestoreTableToPointInTimeCommand, se_RestoreTableToPointInTimeCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class RestoreTableToPointInTimeCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "RestoreTableToPointInTime", {}) + .n("DynamoDBClient", "RestoreTableToPointInTimeCommand") + .f(void 0, void 0) + .ser(se_RestoreTableToPointInTimeCommand) + .de(de_RestoreTableToPointInTimeCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js new file mode 100644 index 0000000..66285c2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ScanCommand, se_ScanCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ScanCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "Scan", {}) + .n("DynamoDBClient", "ScanCommand") + .f(void 0, void 0) + .ser(se_ScanCommand) + .de(de_ScanCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js new file mode 100644 index 0000000..244c7c2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TagResourceCommand, se_TagResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TagResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TagResource", {}) + .n("DynamoDBClient", "TagResourceCommand") + .f(void 0, void 0) + .ser(se_TagResourceCommand) + .de(de_TagResourceCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js new file mode 100644 index 0000000..82b3738 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js @@ -0,0 +1,28 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TransactGetItemsCommand, se_TransactGetItemsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TransactGetItemsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: (input) => input?.TransactItems?.map((obj) => obj?.Get?.TableName), + }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TransactGetItems", {}) + .n("DynamoDBClient", "TransactGetItemsCommand") + .f(void 0, void 0) + .ser(se_TransactGetItemsCommand) + .de(de_TransactGetItemsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js new file mode 100644 index 0000000..86d749b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js @@ -0,0 +1,28 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TransactWriteItemsCommand, se_TransactWriteItemsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TransactWriteItemsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: (input) => input?.TransactItems?.map((obj) => [obj?.ConditionCheck?.TableName, obj?.Put?.TableName, obj?.Delete?.TableName, obj?.Update?.TableName].filter((i) => i)).flat(), + }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TransactWriteItems", {}) + .n("DynamoDBClient", "TransactWriteItemsCommand") + .f(void 0, void 0) + .ser(se_TransactWriteItemsCommand) + .de(de_TransactWriteItemsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js new file mode 100644 index 0000000..849acf1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UntagResourceCommand, se_UntagResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UntagResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UntagResource", {}) + .n("DynamoDBClient", "UntagResourceCommand") + .f(void 0, void 0) + .ser(se_UntagResourceCommand) + .de(de_UntagResourceCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js new file mode 100644 index 0000000..753b715 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateContinuousBackupsCommand, se_UpdateContinuousBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateContinuousBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateContinuousBackups", {}) + .n("DynamoDBClient", "UpdateContinuousBackupsCommand") + .f(void 0, void 0) + .ser(se_UpdateContinuousBackupsCommand) + .de(de_UpdateContinuousBackupsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js new file mode 100644 index 0000000..8d43ddc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateContributorInsightsCommand, se_UpdateContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateContributorInsights", {}) + .n("DynamoDBClient", "UpdateContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_UpdateContributorInsightsCommand) + .de(de_UpdateContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js new file mode 100644 index 0000000..c613742 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateGlobalTableCommand, se_UpdateGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateGlobalTable", {}) + .n("DynamoDBClient", "UpdateGlobalTableCommand") + .f(void 0, void 0) + .ser(se_UpdateGlobalTableCommand) + .de(de_UpdateGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js new file mode 100644 index 0000000..bab8a85 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateGlobalTableSettingsCommand, se_UpdateGlobalTableSettingsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateGlobalTableSettingsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateGlobalTableSettings", {}) + .n("DynamoDBClient", "UpdateGlobalTableSettingsCommand") + .f(void 0, void 0) + .ser(se_UpdateGlobalTableSettingsCommand) + .de(de_UpdateGlobalTableSettingsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js new file mode 100644 index 0000000..1182c0d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateItemCommand, se_UpdateItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateItem", {}) + .n("DynamoDBClient", "UpdateItemCommand") + .f(void 0, void 0) + .ser(se_UpdateItemCommand) + .de(de_UpdateItemCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..5f44195 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateKinesisStreamingDestinationCommand, se_UpdateKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateKinesisStreamingDestination", {}) + .n("DynamoDBClient", "UpdateKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_UpdateKinesisStreamingDestinationCommand) + .de(de_UpdateKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js new file mode 100644 index 0000000..845f3e6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTableCommand, se_UpdateTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTable", {}) + .n("DynamoDBClient", "UpdateTableCommand") + .f(void 0, void 0) + .ser(se_UpdateTableCommand) + .de(de_UpdateTableCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js new file mode 100644 index 0000000..99fb7f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTableReplicaAutoScalingCommand, se_UpdateTableReplicaAutoScalingCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTableReplicaAutoScalingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTableReplicaAutoScaling", {}) + .n("DynamoDBClient", "UpdateTableReplicaAutoScalingCommand") + .f(void 0, void 0) + .ser(se_UpdateTableReplicaAutoScalingCommand) + .de(de_UpdateTableReplicaAutoScalingCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js new file mode 100644 index 0000000..97200fb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTimeToLiveCommand, se_UpdateTimeToLiveCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTimeToLiveCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTimeToLive", {}) + .n("DynamoDBClient", "UpdateTimeToLiveCommand") + .f(void 0, void 0) + .ser(se_UpdateTimeToLiveCommand) + .de(de_UpdateTimeToLiveCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js new file mode 100644 index 0000000..e5cae7d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js @@ -0,0 +1,15 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "dynamodb", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + AccountId: { type: "builtInParams", name: "accountId" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + AccountIdEndpointMode: { type: "builtInParams", name: "accountIdEndpointMode" }, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js new file mode 100644 index 0000000..7fbe485 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js @@ -0,0 +1,23 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: [ + "AccountId", + "AccountIdEndpointMode", + "Endpoint", + "Region", + "ResourceArn", + "ResourceArnList", + "UseDualStack", + "UseFIPS", + ], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js new file mode 100644 index 0000000..86e26fd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const S = "required", T = "type", U = "fn", V = "argv", W = "ref", X = "properties", Y = "headers"; +const a = false, b = "isSet", c = "error", d = "endpoint", e = "tree", f = "PartitionResult", g = "stringEquals", h = "dynamodb", i = "getAttr", j = "aws.parseArn", k = "ParsedArn", l = "isValidHostLabel", m = "FirstArn", n = { [S]: false, [T]: "String" }, o = { [S]: true, "default": false, [T]: "Boolean" }, p = { [U]: "booleanEquals", [V]: [{ [W]: "UseFIPS" }, true] }, q = { [U]: "booleanEquals", [V]: [{ [W]: "UseDualStack" }, true] }, r = {}, s = { [W]: "Region" }, t = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsFIPS"] }, true] }, u = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsDualStack"] }, true] }, v = { "conditions": [{ [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], "rules": [{ [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }], [T]: e }, w = { [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, x = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }, y = { [U]: i, [V]: [{ [W]: f }, "name"] }, z = { [d]: { "url": "https://dynamodb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, [T]: d }, A = { [U]: "not", [V]: [p] }, B = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and DualStack is enabled, but DualStack account endpoints are not supported", [T]: c }, C = { [U]: "not", [V]: [{ [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "disabled"] }] }, D = { [U]: g, [V]: [y, "aws"] }, E = { [U]: "not", [V]: [q] }, F = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "service"] }, h] }, G = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, false] }, H = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, "{Region}"] }, I = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "accountId"] }, false] }, J = { "url": "https://{ParsedArn#accountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, K = { [W]: "ResourceArnList" }, L = { [W]: "AccountId" }, M = [p], N = [q], O = [s], P = [w, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], Q = [A], R = [{ [W]: "ResourceArn" }]; +const _data = { version: "1.0", parameters: { Region: n, UseDualStack: o, UseFIPS: o, Endpoint: n, AccountId: n, AccountIdEndpointMode: n, ResourceArn: n, ResourceArnList: { [S]: a, [T]: "stringArray" } }, rules: [{ conditions: [{ [U]: b, [V]: [{ [W]: "Endpoint" }] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [T]: c }, { endpoint: { url: "{Endpoint}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { conditions: [{ [U]: b, [V]: O }], rules: [{ conditions: [{ [U]: "aws.partition", [V]: O, assign: f }], rules: [{ conditions: [{ [U]: g, [V]: [s, "local"] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and local endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and local endpoint are not supported", [T]: c }, { endpoint: { url: "http://localhost:8000", [X]: { authSchemes: [{ signingRegion: "us-east-1", name: "sigv4", signingName: h }] }, [Y]: r }, [T]: d }], [T]: e }, { conditions: [p, q], rules: [{ conditions: [t, u], rules: [v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [T]: c }], [T]: e }, { conditions: M, rules: [{ conditions: [t], rules: [{ conditions: [{ [U]: g, [V]: [y, "aws-us-gov"] }], rules: [v, z], [T]: e }, v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS is enabled but this partition does not support FIPS", [T]: c }], [T]: e }, { conditions: N, rules: [{ conditions: [u], rules: [{ conditions: P, rules: [{ conditions: Q, rules: [B], [T]: e }, x], [T]: e }, { endpoint: { url: "https://dynamodb.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "DualStack is enabled but this partition does not support DualStack", [T]: c }], [T]: e }, { conditions: [w, C, D, A, E, { [U]: b, [V]: R }, { [U]: j, [V]: R, assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [K] }, { [U]: i, [V]: [K, "[0]"], assign: m }, { [U]: j, [V]: [{ [W]: m }], assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [L] }], rules: [{ conditions: [{ [U]: l, [V]: [L, a] }], rules: [{ endpoint: { url: "https://{AccountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "Credentials-sourced account ID parameter is invalid", [T]: c }], [T]: e }, { conditions: P, rules: [{ conditions: Q, rules: [{ conditions: [E], rules: [{ conditions: [D], rules: [{ error: "AccountIdEndpointMode is required but no AccountID was provided or able to be loaded", [T]: c }], [T]: e }, { error: "Invalid Configuration: AccountIdEndpointMode is required but account endpoints are not supported in this partition", [T]: c }], [T]: e }, B], [T]: e }, x], [T]: e }, z], [T]: e }], [T]: e }, { error: "Invalid Configuration: Missing Region", [T]: c }] }; +export const ruleSet = _data; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js new file mode 100644 index 0000000..8bd6d9e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js new file mode 100644 index 0000000..2712903 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class DynamoDBServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, DynamoDBServiceException.prototype); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js new file mode 100644 index 0000000..2b4b364 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js @@ -0,0 +1,674 @@ +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +export const ApproximateCreationDateTimePrecision = { + MICROSECOND: "MICROSECOND", + MILLISECOND: "MILLISECOND", +}; +export const AttributeAction = { + ADD: "ADD", + DELETE: "DELETE", + PUT: "PUT", +}; +export const ScalarAttributeType = { + B: "B", + N: "N", + S: "S", +}; +export const BackupStatus = { + AVAILABLE: "AVAILABLE", + CREATING: "CREATING", + DELETED: "DELETED", +}; +export const BackupType = { + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER", +}; +export const BillingMode = { + PAY_PER_REQUEST: "PAY_PER_REQUEST", + PROVISIONED: "PROVISIONED", +}; +export const KeyType = { + HASH: "HASH", + RANGE: "RANGE", +}; +export const ProjectionType = { + ALL: "ALL", + INCLUDE: "INCLUDE", + KEYS_ONLY: "KEYS_ONLY", +}; +export const SSEType = { + AES256: "AES256", + KMS: "KMS", +}; +export const SSEStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + UPDATING: "UPDATING", +}; +export const StreamViewType = { + KEYS_ONLY: "KEYS_ONLY", + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", + OLD_IMAGE: "OLD_IMAGE", +}; +export const TimeToLiveStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", +}; +export class BackupInUseException extends __BaseException { + name = "BackupInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "BackupInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, BackupInUseException.prototype); + } +} +export class BackupNotFoundException extends __BaseException { + name = "BackupNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "BackupNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, BackupNotFoundException.prototype); + } +} +export const BackupTypeFilter = { + ALL: "ALL", + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER", +}; +export const ReturnConsumedCapacity = { + INDEXES: "INDEXES", + NONE: "NONE", + TOTAL: "TOTAL", +}; +export const ReturnValuesOnConditionCheckFailure = { + ALL_OLD: "ALL_OLD", + NONE: "NONE", +}; +export const BatchStatementErrorCodeEnum = { + AccessDenied: "AccessDenied", + ConditionalCheckFailed: "ConditionalCheckFailed", + DuplicateItem: "DuplicateItem", + InternalServerError: "InternalServerError", + ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded", + ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded", + RequestLimitExceeded: "RequestLimitExceeded", + ResourceNotFound: "ResourceNotFound", + ThrottlingError: "ThrottlingError", + TransactionConflict: "TransactionConflict", + ValidationError: "ValidationError", +}; +export class InternalServerError extends __BaseException { + name = "InternalServerError"; + $fault = "server"; + constructor(opts) { + super({ + name: "InternalServerError", + $fault: "server", + ...opts, + }); + Object.setPrototypeOf(this, InternalServerError.prototype); + } +} +export class RequestLimitExceeded extends __BaseException { + name = "RequestLimitExceeded"; + $fault = "client"; + constructor(opts) { + super({ + name: "RequestLimitExceeded", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, RequestLimitExceeded.prototype); + } +} +export class InvalidEndpointException extends __BaseException { + name = "InvalidEndpointException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "InvalidEndpointException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidEndpointException.prototype); + this.Message = opts.Message; + } +} +export class ProvisionedThroughputExceededException extends __BaseException { + name = "ProvisionedThroughputExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ProvisionedThroughputExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ProvisionedThroughputExceededException.prototype); + } +} +export class ResourceNotFoundException extends __BaseException { + name = "ResourceNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +export const ReturnItemCollectionMetrics = { + NONE: "NONE", + SIZE: "SIZE", +}; +export class ItemCollectionSizeLimitExceededException extends __BaseException { + name = "ItemCollectionSizeLimitExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ItemCollectionSizeLimitExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ItemCollectionSizeLimitExceededException.prototype); + } +} +export const ComparisonOperator = { + BEGINS_WITH: "BEGINS_WITH", + BETWEEN: "BETWEEN", + CONTAINS: "CONTAINS", + EQ: "EQ", + GE: "GE", + GT: "GT", + IN: "IN", + LE: "LE", + LT: "LT", + NE: "NE", + NOT_CONTAINS: "NOT_CONTAINS", + NOT_NULL: "NOT_NULL", + NULL: "NULL", +}; +export const ConditionalOperator = { + AND: "AND", + OR: "OR", +}; +export const ContinuousBackupsStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED", +}; +export const PointInTimeRecoveryStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED", +}; +export class ContinuousBackupsUnavailableException extends __BaseException { + name = "ContinuousBackupsUnavailableException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ContinuousBackupsUnavailableException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ContinuousBackupsUnavailableException.prototype); + } +} +export const ContributorInsightsAction = { + DISABLE: "DISABLE", + ENABLE: "ENABLE", +}; +export const ContributorInsightsStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + FAILED: "FAILED", +}; +export class LimitExceededException extends __BaseException { + name = "LimitExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "LimitExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, LimitExceededException.prototype); + } +} +export class TableInUseException extends __BaseException { + name = "TableInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableInUseException.prototype); + } +} +export class TableNotFoundException extends __BaseException { + name = "TableNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableNotFoundException.prototype); + } +} +export const GlobalTableStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING", +}; +export const IndexStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING", +}; +export const ReplicaStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + CREATION_FAILED: "CREATION_FAILED", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + REGION_DISABLED: "REGION_DISABLED", + UPDATING: "UPDATING", +}; +export const TableClass = { + STANDARD: "STANDARD", + STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS", +}; +export const TableStatus = { + ACTIVE: "ACTIVE", + ARCHIVED: "ARCHIVED", + ARCHIVING: "ARCHIVING", + CREATING: "CREATING", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + UPDATING: "UPDATING", +}; +export class GlobalTableAlreadyExistsException extends __BaseException { + name = "GlobalTableAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "GlobalTableAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, GlobalTableAlreadyExistsException.prototype); + } +} +export const MultiRegionConsistency = { + EVENTUAL: "EVENTUAL", + STRONG: "STRONG", +}; +export class ResourceInUseException extends __BaseException { + name = "ResourceInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceInUseException.prototype); + } +} +export const ReturnValue = { + ALL_NEW: "ALL_NEW", + ALL_OLD: "ALL_OLD", + NONE: "NONE", + UPDATED_NEW: "UPDATED_NEW", + UPDATED_OLD: "UPDATED_OLD", +}; +export class ReplicatedWriteConflictException extends __BaseException { + name = "ReplicatedWriteConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicatedWriteConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicatedWriteConflictException.prototype); + } +} +export class TransactionConflictException extends __BaseException { + name = "TransactionConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TransactionConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionConflictException.prototype); + } +} +export class PolicyNotFoundException extends __BaseException { + name = "PolicyNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PolicyNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PolicyNotFoundException.prototype); + } +} +export const ExportFormat = { + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION", +}; +export const ExportStatus = { + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS", +}; +export const ExportType = { + FULL_EXPORT: "FULL_EXPORT", + INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT", +}; +export const ExportViewType = { + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", +}; +export const S3SseAlgorithm = { + AES256: "AES256", + KMS: "KMS", +}; +export class ExportNotFoundException extends __BaseException { + name = "ExportNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExportNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExportNotFoundException.prototype); + } +} +export class GlobalTableNotFoundException extends __BaseException { + name = "GlobalTableNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "GlobalTableNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, GlobalTableNotFoundException.prototype); + } +} +export const ImportStatus = { + CANCELLED: "CANCELLED", + CANCELLING: "CANCELLING", + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS", +}; +export const InputCompressionType = { + GZIP: "GZIP", + NONE: "NONE", + ZSTD: "ZSTD", +}; +export const InputFormat = { + CSV: "CSV", + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION", +}; +export class ImportNotFoundException extends __BaseException { + name = "ImportNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ImportNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ImportNotFoundException.prototype); + } +} +export const DestinationStatus = { + ACTIVE: "ACTIVE", + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLE_FAILED: "ENABLE_FAILED", + ENABLING: "ENABLING", + UPDATING: "UPDATING", +}; +export class DuplicateItemException extends __BaseException { + name = "DuplicateItemException"; + $fault = "client"; + constructor(opts) { + super({ + name: "DuplicateItemException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, DuplicateItemException.prototype); + } +} +export class IdempotentParameterMismatchException extends __BaseException { + name = "IdempotentParameterMismatchException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "IdempotentParameterMismatchException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IdempotentParameterMismatchException.prototype); + this.Message = opts.Message; + } +} +export class TransactionInProgressException extends __BaseException { + name = "TransactionInProgressException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "TransactionInProgressException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionInProgressException.prototype); + this.Message = opts.Message; + } +} +export class ExportConflictException extends __BaseException { + name = "ExportConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExportConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExportConflictException.prototype); + } +} +export class InvalidExportTimeException extends __BaseException { + name = "InvalidExportTimeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidExportTimeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidExportTimeException.prototype); + } +} +export class PointInTimeRecoveryUnavailableException extends __BaseException { + name = "PointInTimeRecoveryUnavailableException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PointInTimeRecoveryUnavailableException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PointInTimeRecoveryUnavailableException.prototype); + } +} +export class ImportConflictException extends __BaseException { + name = "ImportConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ImportConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ImportConflictException.prototype); + } +} +export const Select = { + ALL_ATTRIBUTES: "ALL_ATTRIBUTES", + ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES", + COUNT: "COUNT", + SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES", +}; +export class TableAlreadyExistsException extends __BaseException { + name = "TableAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableAlreadyExistsException.prototype); + } +} +export class InvalidRestoreTimeException extends __BaseException { + name = "InvalidRestoreTimeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidRestoreTimeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRestoreTimeException.prototype); + } +} +export class ReplicaAlreadyExistsException extends __BaseException { + name = "ReplicaAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicaAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicaAlreadyExistsException.prototype); + } +} +export class ReplicaNotFoundException extends __BaseException { + name = "ReplicaNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicaNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicaNotFoundException.prototype); + } +} +export class IndexNotFoundException extends __BaseException { + name = "IndexNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IndexNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IndexNotFoundException.prototype); + } +} +export var AttributeValue; +(function (AttributeValue) { + AttributeValue.visit = (value, visitor) => { + if (value.S !== undefined) + return visitor.S(value.S); + if (value.N !== undefined) + return visitor.N(value.N); + if (value.B !== undefined) + return visitor.B(value.B); + if (value.SS !== undefined) + return visitor.SS(value.SS); + if (value.NS !== undefined) + return visitor.NS(value.NS); + if (value.BS !== undefined) + return visitor.BS(value.BS); + if (value.M !== undefined) + return visitor.M(value.M); + if (value.L !== undefined) + return visitor.L(value.L); + if (value.NULL !== undefined) + return visitor.NULL(value.NULL); + if (value.BOOL !== undefined) + return visitor.BOOL(value.BOOL); + return visitor._(value.$unknown[0], value.$unknown[1]); + }; +})(AttributeValue || (AttributeValue = {})); +export class ConditionalCheckFailedException extends __BaseException { + name = "ConditionalCheckFailedException"; + $fault = "client"; + Item; + constructor(opts) { + super({ + name: "ConditionalCheckFailedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ConditionalCheckFailedException.prototype); + this.Item = opts.Item; + } +} +export class TransactionCanceledException extends __BaseException { + name = "TransactionCanceledException"; + $fault = "client"; + Message; + CancellationReasons; + constructor(opts) { + super({ + name: "TransactionCanceledException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionCanceledException.prototype); + this.Message = opts.Message; + this.CancellationReasons = opts.CancellationReasons; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js new file mode 100644 index 0000000..23bb95c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListContributorInsightsCommand, } from "../commands/ListContributorInsightsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListContributorInsights = createPaginator(DynamoDBClient, ListContributorInsightsCommand, "NextToken", "NextToken", "MaxResults"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js new file mode 100644 index 0000000..e252e7f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListExportsCommand } from "../commands/ListExportsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListExports = createPaginator(DynamoDBClient, ListExportsCommand, "NextToken", "NextToken", "MaxResults"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js new file mode 100644 index 0000000..c3fe323 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListImportsCommand } from "../commands/ListImportsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListImports = createPaginator(DynamoDBClient, ListImportsCommand, "NextToken", "NextToken", "PageSize"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js new file mode 100644 index 0000000..979f3f6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListTablesCommand } from "../commands/ListTablesCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListTables = createPaginator(DynamoDBClient, ListTablesCommand, "ExclusiveStartTableName", "LastEvaluatedTableName", "Limit"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js new file mode 100644 index 0000000..4fcc17d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { QueryCommand } from "../commands/QueryCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateQuery = createPaginator(DynamoDBClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js new file mode 100644 index 0000000..b95b746 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ScanCommand } from "../commands/ScanCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateScan = createPaginator(DynamoDBClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js new file mode 100644 index 0000000..d6c7135 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js @@ -0,0 +1,3094 @@ +import { awsExpectUnion as __expectUnion, loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody, } from "@aws-sdk/core"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectBoolean as __expectBoolean, expectInt32 as __expectInt32, expectLong as __expectLong, expectNonNull as __expectNonNull, expectNumber as __expectNumber, expectString as __expectString, limitedParseDouble as __limitedParseDouble, parseEpochTimestamp as __parseEpochTimestamp, serializeFloat as __serializeFloat, take, withBaseException, } from "@smithy/smithy-client"; +import { v4 as generateIdempotencyToken } from "uuid"; +import { DynamoDBServiceException as __BaseException } from "../models/DynamoDBServiceException"; +import { AttributeValue, BackupInUseException, BackupNotFoundException, ConditionalCheckFailedException, ContinuousBackupsUnavailableException, DuplicateItemException, ExportConflictException, ExportNotFoundException, GlobalTableAlreadyExistsException, GlobalTableNotFoundException, IdempotentParameterMismatchException, ImportConflictException, ImportNotFoundException, IndexNotFoundException, InternalServerError, InvalidEndpointException, InvalidExportTimeException, InvalidRestoreTimeException, ItemCollectionSizeLimitExceededException, LimitExceededException, PointInTimeRecoveryUnavailableException, PolicyNotFoundException, ProvisionedThroughputExceededException, ReplicaAlreadyExistsException, ReplicaNotFoundException, ReplicatedWriteConflictException, RequestLimitExceeded, ResourceInUseException, ResourceNotFoundException, TableAlreadyExistsException, TableInUseException, TableNotFoundException, TransactionCanceledException, TransactionConflictException, TransactionInProgressException, } from "../models/models_0"; +export const se_BatchExecuteStatementCommand = async (input, context) => { + const headers = sharedHeaders("BatchExecuteStatement"); + let body; + body = JSON.stringify(se_BatchExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_BatchGetItemCommand = async (input, context) => { + const headers = sharedHeaders("BatchGetItem"); + let body; + body = JSON.stringify(se_BatchGetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_BatchWriteItemCommand = async (input, context) => { + const headers = sharedHeaders("BatchWriteItem"); + let body; + body = JSON.stringify(se_BatchWriteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateBackupCommand = async (input, context) => { + const headers = sharedHeaders("CreateBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("CreateGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateTableCommand = async (input, context) => { + const headers = sharedHeaders("CreateTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteBackupCommand = async (input, context) => { + const headers = sharedHeaders("DeleteBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteItemCommand = async (input, context) => { + const headers = sharedHeaders("DeleteItem"); + let body; + body = JSON.stringify(se_DeleteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("DeleteResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteTableCommand = async (input, context) => { + const headers = sharedHeaders("DeleteTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeBackupCommand = async (input, context) => { + const headers = sharedHeaders("DescribeBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeContinuousBackupsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeContinuousBackups"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeEndpointsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeEndpoints"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeExportCommand = async (input, context) => { + const headers = sharedHeaders("DescribeExport"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeGlobalTableSettingsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTableSettings"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeImportCommand = async (input, context) => { + const headers = sharedHeaders("DescribeImport"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("DescribeKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeLimitsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeLimits"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTableCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTableReplicaAutoScalingCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTableReplicaAutoScaling"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTimeToLiveCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTimeToLive"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DisableKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("DisableKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_EnableKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("EnableKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExecuteStatementCommand = async (input, context) => { + const headers = sharedHeaders("ExecuteStatement"); + let body; + body = JSON.stringify(se_ExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExecuteTransactionCommand = async (input, context) => { + const headers = sharedHeaders("ExecuteTransaction"); + let body; + body = JSON.stringify(se_ExecuteTransactionInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExportTableToPointInTimeCommand = async (input, context) => { + const headers = sharedHeaders("ExportTableToPointInTime"); + let body; + body = JSON.stringify(se_ExportTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_GetItemCommand = async (input, context) => { + const headers = sharedHeaders("GetItem"); + let body; + body = JSON.stringify(se_GetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_GetResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("GetResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ImportTableCommand = async (input, context) => { + const headers = sharedHeaders("ImportTable"); + let body; + body = JSON.stringify(se_ImportTableInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListBackupsCommand = async (input, context) => { + const headers = sharedHeaders("ListBackups"); + let body; + body = JSON.stringify(se_ListBackupsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("ListContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListExportsCommand = async (input, context) => { + const headers = sharedHeaders("ListExports"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListGlobalTablesCommand = async (input, context) => { + const headers = sharedHeaders("ListGlobalTables"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListImportsCommand = async (input, context) => { + const headers = sharedHeaders("ListImports"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListTablesCommand = async (input, context) => { + const headers = sharedHeaders("ListTables"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListTagsOfResourceCommand = async (input, context) => { + const headers = sharedHeaders("ListTagsOfResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_PutItemCommand = async (input, context) => { + const headers = sharedHeaders("PutItem"); + let body; + body = JSON.stringify(se_PutItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_PutResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("PutResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_QueryCommand = async (input, context) => { + const headers = sharedHeaders("Query"); + let body; + body = JSON.stringify(se_QueryInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_RestoreTableFromBackupCommand = async (input, context) => { + const headers = sharedHeaders("RestoreTableFromBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_RestoreTableToPointInTimeCommand = async (input, context) => { + const headers = sharedHeaders("RestoreTableToPointInTime"); + let body; + body = JSON.stringify(se_RestoreTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ScanCommand = async (input, context) => { + const headers = sharedHeaders("Scan"); + let body; + body = JSON.stringify(se_ScanInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TagResourceCommand = async (input, context) => { + const headers = sharedHeaders("TagResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TransactGetItemsCommand = async (input, context) => { + const headers = sharedHeaders("TransactGetItems"); + let body; + body = JSON.stringify(se_TransactGetItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TransactWriteItemsCommand = async (input, context) => { + const headers = sharedHeaders("TransactWriteItems"); + let body; + body = JSON.stringify(se_TransactWriteItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UntagResourceCommand = async (input, context) => { + const headers = sharedHeaders("UntagResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateContinuousBackupsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateContinuousBackups"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateGlobalTableSettingsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTableSettings"); + let body; + body = JSON.stringify(se_UpdateGlobalTableSettingsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateItemCommand = async (input, context) => { + const headers = sharedHeaders("UpdateItem"); + let body; + body = JSON.stringify(se_UpdateItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("UpdateKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTableCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTableReplicaAutoScalingCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTableReplicaAutoScaling"); + let body; + body = JSON.stringify(se_UpdateTableReplicaAutoScalingInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTimeToLiveCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTimeToLive"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const de_BatchExecuteStatementCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_BatchGetItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchGetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_BatchWriteItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchWriteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeContinuousBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeContributorInsightsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeEndpointsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeExportCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeExportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeGlobalTableSettingsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeImportCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeImportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeLimitsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTableReplicaAutoScalingCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTimeToLiveCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DisableKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_EnableKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExecuteStatementCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExecuteTransactionCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExecuteTransactionOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExportTableToPointInTimeCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExportTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_GetItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_GetResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ImportTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ImportTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ListBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListExportsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListGlobalTablesCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListImportsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ListImportsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListTablesCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListTagsOfResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_PutItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_PutItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_PutResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_QueryCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_QueryOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_RestoreTableFromBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_RestoreTableFromBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_RestoreTableToPointInTimeCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_RestoreTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ScanCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ScanOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_TagResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await collectBody(output.body, context); + const response = { + $metadata: deserializeMetadata(output), + }; + return response; +}; +export const de_TransactGetItemsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_TransactGetItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_TransactWriteItemsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_TransactWriteItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UntagResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await collectBody(output.body, context); + const response = { + $metadata: deserializeMetadata(output), + }; + return response; +}; +export const de_UpdateContinuousBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateGlobalTableSettingsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTableReplicaAutoScalingCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTimeToLiveCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerError": + case "com.amazonaws.dynamodb#InternalServerError": + throw await de_InternalServerErrorRes(parsedOutput, context); + case "RequestLimitExceeded": + case "com.amazonaws.dynamodb#RequestLimitExceeded": + throw await de_RequestLimitExceededRes(parsedOutput, context); + case "InvalidEndpointException": + case "com.amazonaws.dynamodb#InvalidEndpointException": + throw await de_InvalidEndpointExceptionRes(parsedOutput, context); + case "ProvisionedThroughputExceededException": + case "com.amazonaws.dynamodb#ProvisionedThroughputExceededException": + throw await de_ProvisionedThroughputExceededExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.dynamodb#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "ItemCollectionSizeLimitExceededException": + case "com.amazonaws.dynamodb#ItemCollectionSizeLimitExceededException": + throw await de_ItemCollectionSizeLimitExceededExceptionRes(parsedOutput, context); + case "BackupInUseException": + case "com.amazonaws.dynamodb#BackupInUseException": + throw await de_BackupInUseExceptionRes(parsedOutput, context); + case "ContinuousBackupsUnavailableException": + case "com.amazonaws.dynamodb#ContinuousBackupsUnavailableException": + throw await de_ContinuousBackupsUnavailableExceptionRes(parsedOutput, context); + case "LimitExceededException": + case "com.amazonaws.dynamodb#LimitExceededException": + throw await de_LimitExceededExceptionRes(parsedOutput, context); + case "TableInUseException": + case "com.amazonaws.dynamodb#TableInUseException": + throw await de_TableInUseExceptionRes(parsedOutput, context); + case "TableNotFoundException": + case "com.amazonaws.dynamodb#TableNotFoundException": + throw await de_TableNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableAlreadyExistsException": + case "com.amazonaws.dynamodb#GlobalTableAlreadyExistsException": + throw await de_GlobalTableAlreadyExistsExceptionRes(parsedOutput, context); + case "ResourceInUseException": + case "com.amazonaws.dynamodb#ResourceInUseException": + throw await de_ResourceInUseExceptionRes(parsedOutput, context); + case "BackupNotFoundException": + case "com.amazonaws.dynamodb#BackupNotFoundException": + throw await de_BackupNotFoundExceptionRes(parsedOutput, context); + case "ConditionalCheckFailedException": + case "com.amazonaws.dynamodb#ConditionalCheckFailedException": + throw await de_ConditionalCheckFailedExceptionRes(parsedOutput, context); + case "ReplicatedWriteConflictException": + case "com.amazonaws.dynamodb#ReplicatedWriteConflictException": + throw await de_ReplicatedWriteConflictExceptionRes(parsedOutput, context); + case "TransactionConflictException": + case "com.amazonaws.dynamodb#TransactionConflictException": + throw await de_TransactionConflictExceptionRes(parsedOutput, context); + case "PolicyNotFoundException": + case "com.amazonaws.dynamodb#PolicyNotFoundException": + throw await de_PolicyNotFoundExceptionRes(parsedOutput, context); + case "ExportNotFoundException": + case "com.amazonaws.dynamodb#ExportNotFoundException": + throw await de_ExportNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableNotFoundException": + case "com.amazonaws.dynamodb#GlobalTableNotFoundException": + throw await de_GlobalTableNotFoundExceptionRes(parsedOutput, context); + case "ImportNotFoundException": + case "com.amazonaws.dynamodb#ImportNotFoundException": + throw await de_ImportNotFoundExceptionRes(parsedOutput, context); + case "DuplicateItemException": + case "com.amazonaws.dynamodb#DuplicateItemException": + throw await de_DuplicateItemExceptionRes(parsedOutput, context); + case "IdempotentParameterMismatchException": + case "com.amazonaws.dynamodb#IdempotentParameterMismatchException": + throw await de_IdempotentParameterMismatchExceptionRes(parsedOutput, context); + case "TransactionCanceledException": + case "com.amazonaws.dynamodb#TransactionCanceledException": + throw await de_TransactionCanceledExceptionRes(parsedOutput, context); + case "TransactionInProgressException": + case "com.amazonaws.dynamodb#TransactionInProgressException": + throw await de_TransactionInProgressExceptionRes(parsedOutput, context); + case "ExportConflictException": + case "com.amazonaws.dynamodb#ExportConflictException": + throw await de_ExportConflictExceptionRes(parsedOutput, context); + case "InvalidExportTimeException": + case "com.amazonaws.dynamodb#InvalidExportTimeException": + throw await de_InvalidExportTimeExceptionRes(parsedOutput, context); + case "PointInTimeRecoveryUnavailableException": + case "com.amazonaws.dynamodb#PointInTimeRecoveryUnavailableException": + throw await de_PointInTimeRecoveryUnavailableExceptionRes(parsedOutput, context); + case "ImportConflictException": + case "com.amazonaws.dynamodb#ImportConflictException": + throw await de_ImportConflictExceptionRes(parsedOutput, context); + case "TableAlreadyExistsException": + case "com.amazonaws.dynamodb#TableAlreadyExistsException": + throw await de_TableAlreadyExistsExceptionRes(parsedOutput, context); + case "InvalidRestoreTimeException": + case "com.amazonaws.dynamodb#InvalidRestoreTimeException": + throw await de_InvalidRestoreTimeExceptionRes(parsedOutput, context); + case "ReplicaAlreadyExistsException": + case "com.amazonaws.dynamodb#ReplicaAlreadyExistsException": + throw await de_ReplicaAlreadyExistsExceptionRes(parsedOutput, context); + case "ReplicaNotFoundException": + case "com.amazonaws.dynamodb#ReplicaNotFoundException": + throw await de_ReplicaNotFoundExceptionRes(parsedOutput, context); + case "IndexNotFoundException": + case "com.amazonaws.dynamodb#IndexNotFoundException": + throw await de_IndexNotFoundExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_BackupInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new BackupInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_BackupNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new BackupNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ConditionalCheckFailedExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ConditionalCheckFailedException(body, context); + const exception = new ConditionalCheckFailedException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ContinuousBackupsUnavailableExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ContinuousBackupsUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_DuplicateItemExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new DuplicateItemException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ExportConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ExportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ExportNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ExportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_GlobalTableAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new GlobalTableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_GlobalTableNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new GlobalTableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IdempotentParameterMismatchExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new IdempotentParameterMismatchException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ImportConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ImportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ImportNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ImportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IndexNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new IndexNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InternalServerErrorRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InternalServerError({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidEndpointExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidEndpointException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidExportTimeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidExportTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidRestoreTimeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidRestoreTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ItemCollectionSizeLimitExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ItemCollectionSizeLimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_LimitExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new LimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PointInTimeRecoveryUnavailableExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new PointInTimeRecoveryUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PolicyNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new PolicyNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ProvisionedThroughputExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ProvisionedThroughputExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicaAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicaAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicaNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicaNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicatedWriteConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicatedWriteConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_RequestLimitExceededRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new RequestLimitExceeded({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ResourceInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ResourceInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionCanceledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_TransactionCanceledException(body, context); + const exception = new TransactionCanceledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TransactionConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionInProgressExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TransactionInProgressException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const se_AttributeUpdates = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValueUpdate(value, context); + return acc; + }, {}); +}; +const se_AttributeValue = (input, context) => { + return AttributeValue.visit(input, { + B: (value) => ({ B: context.base64Encoder(value) }), + BOOL: (value) => ({ BOOL: value }), + BS: (value) => ({ BS: se_BinarySetAttributeValue(value, context) }), + L: (value) => ({ L: se_ListAttributeValue(value, context) }), + M: (value) => ({ M: se_MapAttributeValue(value, context) }), + N: (value) => ({ N: value }), + NS: (value) => ({ NS: _json(value) }), + NULL: (value) => ({ NULL: value }), + S: (value) => ({ S: value }), + SS: (value) => ({ SS: _json(value) }), + _: (name, value) => ({ [name]: value }), + }); +}; +const se_AttributeValueList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_AttributeValueUpdate = (input, context) => { + return take(input, { + Action: [], + Value: (_) => se_AttributeValue(_, context), + }); +}; +const se_AutoScalingPolicyUpdate = (input, context) => { + return take(input, { + PolicyName: [], + TargetTrackingScalingPolicyConfiguration: (_) => se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate(_, context), + }); +}; +const se_AutoScalingSettingsUpdate = (input, context) => { + return take(input, { + AutoScalingDisabled: [], + AutoScalingRoleArn: [], + MaximumUnits: [], + MinimumUnits: [], + ScalingPolicyUpdate: (_) => se_AutoScalingPolicyUpdate(_, context), + }); +}; +const se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate = (input, context) => { + return take(input, { + DisableScaleIn: [], + ScaleInCooldown: [], + ScaleOutCooldown: [], + TargetValue: __serializeFloat, + }); +}; +const se_BatchExecuteStatementInput = (input, context) => { + return take(input, { + ReturnConsumedCapacity: [], + Statements: (_) => se_PartiQLBatchRequest(_, context), + }); +}; +const se_BatchGetItemInput = (input, context) => { + return take(input, { + RequestItems: (_) => se_BatchGetRequestMap(_, context), + ReturnConsumedCapacity: [], + }); +}; +const se_BatchGetRequestMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_KeysAndAttributes(value, context); + return acc; + }, {}); +}; +const se_BatchStatementRequest = (input, context) => { + return take(input, { + ConsistentRead: [], + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_BatchWriteItemInput = (input, context) => { + return take(input, { + RequestItems: (_) => se_BatchWriteItemRequestMap(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + }); +}; +const se_BatchWriteItemRequestMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_WriteRequests(value, context); + return acc; + }, {}); +}; +const se_BinarySetAttributeValue = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return context.base64Encoder(entry); + }); +}; +const se_Condition = (input, context) => { + return take(input, { + AttributeValueList: (_) => se_AttributeValueList(_, context), + ComparisonOperator: [], + }); +}; +const se_ConditionCheck = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_Delete = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_DeleteItemInput = (input, context) => { + return take(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_DeleteRequest = (input, context) => { + return take(input, { + Key: (_) => se_Key(_, context), + }); +}; +const se_ExecuteStatementInput = (input, context) => { + return take(input, { + ConsistentRead: [], + Limit: [], + NextToken: [], + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnConsumedCapacity: [], + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_ExecuteTransactionInput = (input, context) => { + return take(input, { + ClientRequestToken: [true, (_) => _ ?? generateIdempotencyToken()], + ReturnConsumedCapacity: [], + TransactStatements: (_) => se_ParameterizedStatements(_, context), + }); +}; +const se_ExpectedAttributeMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_ExpectedAttributeValue(value, context); + return acc; + }, {}); +}; +const se_ExpectedAttributeValue = (input, context) => { + return take(input, { + AttributeValueList: (_) => se_AttributeValueList(_, context), + ComparisonOperator: [], + Exists: [], + Value: (_) => se_AttributeValue(_, context), + }); +}; +const se_ExportTableToPointInTimeInput = (input, context) => { + return take(input, { + ClientToken: [true, (_) => _ ?? generateIdempotencyToken()], + ExportFormat: [], + ExportTime: (_) => _.getTime() / 1000, + ExportType: [], + IncrementalExportSpecification: (_) => se_IncrementalExportSpecification(_, context), + S3Bucket: [], + S3BucketOwner: [], + S3Prefix: [], + S3SseAlgorithm: [], + S3SseKmsKeyId: [], + TableArn: [], + }); +}; +const se_ExpressionAttributeValueMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_FilterConditionMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}; +const se_Get = (input, context) => { + return take(input, { + ExpressionAttributeNames: _json, + Key: (_) => se_Key(_, context), + ProjectionExpression: [], + TableName: [], + }); +}; +const se_GetItemInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConsistentRead: [], + ExpressionAttributeNames: _json, + Key: (_) => se_Key(_, context), + ProjectionExpression: [], + ReturnConsumedCapacity: [], + TableName: [], + }); +}; +const se_GlobalSecondaryIndexAutoScalingUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_GlobalSecondaryIndexAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_GlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}; +const se_GlobalTableGlobalSecondaryIndexSettingsUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ProvisionedWriteCapacityUnits: [], + }); +}; +const se_GlobalTableGlobalSecondaryIndexSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_GlobalTableGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}; +const se_ImportTableInput = (input, context) => { + return take(input, { + ClientToken: [true, (_) => _ ?? generateIdempotencyToken()], + InputCompressionType: [], + InputFormat: [], + InputFormatOptions: _json, + S3BucketSource: _json, + TableCreationParameters: _json, + }); +}; +const se_IncrementalExportSpecification = (input, context) => { + return take(input, { + ExportFromTime: (_) => _.getTime() / 1000, + ExportToTime: (_) => _.getTime() / 1000, + ExportViewType: [], + }); +}; +const se_Key = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_KeyConditions = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}; +const se_KeyList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_Key(entry, context); + }); +}; +const se_KeysAndAttributes = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConsistentRead: [], + ExpressionAttributeNames: _json, + Keys: (_) => se_KeyList(_, context), + ProjectionExpression: [], + }); +}; +const se_ListAttributeValue = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_ListBackupsInput = (input, context) => { + return take(input, { + BackupType: [], + ExclusiveStartBackupArn: [], + Limit: [], + TableName: [], + TimeRangeLowerBound: (_) => _.getTime() / 1000, + TimeRangeUpperBound: (_) => _.getTime() / 1000, + }); +}; +const se_MapAttributeValue = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_ParameterizedStatement = (input, context) => { + return take(input, { + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_ParameterizedStatements = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ParameterizedStatement(entry, context); + }); +}; +const se_PartiQLBatchRequest = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_BatchStatementRequest(entry, context); + }); +}; +const se_PreparedStatementParameters = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_Put = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Item: (_) => se_PutItemInputAttributeMap(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_PutItemInput = (input, context) => { + return take(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Item: (_) => se_PutItemInputAttributeMap(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_PutItemInputAttributeMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_PutRequest = (input, context) => { + return take(input, { + Item: (_) => se_PutItemInputAttributeMap(_, context), + }); +}; +const se_QueryInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: (_) => se_Key(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + FilterExpression: [], + IndexName: [], + KeyConditionExpression: [], + KeyConditions: (_) => se_KeyConditions(_, context), + Limit: [], + ProjectionExpression: [], + QueryFilter: (_) => se_FilterConditionMap(_, context), + ReturnConsumedCapacity: [], + ScanIndexForward: [], + Select: [], + TableName: [], + }); +}; +const se_ReplicaAutoScalingUpdate = (input, context) => { + return take(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexUpdates: (_) => se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList(_, context), + ReplicaProvisionedReadCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_ReplicaAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaAutoScalingUpdate(entry, context); + }); +}; +const se_ReplicaGlobalSecondaryIndexAutoScalingUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaGlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}; +const se_ReplicaGlobalSecondaryIndexSettingsUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ProvisionedReadCapacityUnits: [], + }); +}; +const se_ReplicaGlobalSecondaryIndexSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}; +const se_ReplicaSettingsUpdate = (input, context) => { + return take(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexSettingsUpdate: (_) => se_ReplicaGlobalSecondaryIndexSettingsUpdateList(_, context), + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ReplicaProvisionedReadCapacityUnits: [], + ReplicaTableClass: [], + }); +}; +const se_ReplicaSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaSettingsUpdate(entry, context); + }); +}; +const se_RestoreTableToPointInTimeInput = (input, context) => { + return take(input, { + BillingModeOverride: [], + GlobalSecondaryIndexOverride: _json, + LocalSecondaryIndexOverride: _json, + OnDemandThroughputOverride: _json, + ProvisionedThroughputOverride: _json, + RestoreDateTime: (_) => _.getTime() / 1000, + SSESpecificationOverride: _json, + SourceTableArn: [], + SourceTableName: [], + TargetTableName: [], + UseLatestRestorableTime: [], + }); +}; +const se_ScanInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: (_) => se_Key(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + FilterExpression: [], + IndexName: [], + Limit: [], + ProjectionExpression: [], + ReturnConsumedCapacity: [], + ScanFilter: (_) => se_FilterConditionMap(_, context), + Segment: [], + Select: [], + TableName: [], + TotalSegments: [], + }); +}; +const se_TransactGetItem = (input, context) => { + return take(input, { + Get: (_) => se_Get(_, context), + }); +}; +const se_TransactGetItemList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_TransactGetItem(entry, context); + }); +}; +const se_TransactGetItemsInput = (input, context) => { + return take(input, { + ReturnConsumedCapacity: [], + TransactItems: (_) => se_TransactGetItemList(_, context), + }); +}; +const se_TransactWriteItem = (input, context) => { + return take(input, { + ConditionCheck: (_) => se_ConditionCheck(_, context), + Delete: (_) => se_Delete(_, context), + Put: (_) => se_Put(_, context), + Update: (_) => se_Update(_, context), + }); +}; +const se_TransactWriteItemList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_TransactWriteItem(entry, context); + }); +}; +const se_TransactWriteItemsInput = (input, context) => { + return take(input, { + ClientRequestToken: [true, (_) => _ ?? generateIdempotencyToken()], + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + TransactItems: (_) => se_TransactWriteItemList(_, context), + }); +}; +const se_Update = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [], + }); +}; +const se_UpdateGlobalTableSettingsInput = (input, context) => { + return take(input, { + GlobalTableBillingMode: [], + GlobalTableGlobalSecondaryIndexSettingsUpdate: (_) => se_GlobalTableGlobalSecondaryIndexSettingsUpdateList(_, context), + GlobalTableName: [], + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + GlobalTableProvisionedWriteCapacityUnits: [], + ReplicaSettingsUpdate: (_) => se_ReplicaSettingsUpdateList(_, context), + }); +}; +const se_UpdateItemInput = (input, context) => { + return take(input, { + AttributeUpdates: (_) => se_AttributeUpdates(_, context), + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [], + }); +}; +const se_UpdateTableReplicaAutoScalingInput = (input, context) => { + return take(input, { + GlobalSecondaryIndexUpdates: (_) => se_GlobalSecondaryIndexAutoScalingUpdateList(_, context), + ProvisionedWriteCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ReplicaUpdates: (_) => se_ReplicaAutoScalingUpdateList(_, context), + TableName: [], + }); +}; +const se_WriteRequest = (input, context) => { + return take(input, { + DeleteRequest: (_) => se_DeleteRequest(_, context), + PutRequest: (_) => se_PutRequest(_, context), + }); +}; +const se_WriteRequests = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_WriteRequest(entry, context); + }); +}; +const de_ArchivalSummary = (output, context) => { + return take(output, { + ArchivalBackupArn: __expectString, + ArchivalDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ArchivalReason: __expectString, + }); +}; +const de_AttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_AttributeValue = (output, context) => { + if (output.B != null) { + return { + B: context.base64Decoder(output.B), + }; + } + if (__expectBoolean(output.BOOL) !== undefined) { + return { BOOL: __expectBoolean(output.BOOL) }; + } + if (output.BS != null) { + return { + BS: de_BinarySetAttributeValue(output.BS, context), + }; + } + if (output.L != null) { + return { + L: de_ListAttributeValue(output.L, context), + }; + } + if (output.M != null) { + return { + M: de_MapAttributeValue(output.M, context), + }; + } + if (__expectString(output.N) !== undefined) { + return { N: __expectString(output.N) }; + } + if (output.NS != null) { + return { + NS: _json(output.NS), + }; + } + if (__expectBoolean(output.NULL) !== undefined) { + return { NULL: __expectBoolean(output.NULL) }; + } + if (__expectString(output.S) !== undefined) { + return { S: __expectString(output.S) }; + } + if (output.SS != null) { + return { + SS: _json(output.SS), + }; + } + return { $unknown: Object.entries(output)[0] }; +}; +const de_AutoScalingPolicyDescription = (output, context) => { + return take(output, { + PolicyName: __expectString, + TargetTrackingScalingPolicyConfiguration: (_) => de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription(_, context), + }); +}; +const de_AutoScalingPolicyDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AutoScalingPolicyDescription(entry, context); + }); + return retVal; +}; +const de_AutoScalingSettingsDescription = (output, context) => { + return take(output, { + AutoScalingDisabled: __expectBoolean, + AutoScalingRoleArn: __expectString, + MaximumUnits: __expectLong, + MinimumUnits: __expectLong, + ScalingPolicies: (_) => de_AutoScalingPolicyDescriptionList(_, context), + }); +}; +const de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription = (output, context) => { + return take(output, { + DisableScaleIn: __expectBoolean, + ScaleInCooldown: __expectInt32, + ScaleOutCooldown: __expectInt32, + TargetValue: __limitedParseDouble, + }); +}; +const de_BackupDescription = (output, context) => { + return take(output, { + BackupDetails: (_) => de_BackupDetails(_, context), + SourceTableDetails: (_) => de_SourceTableDetails(_, context), + SourceTableFeatureDetails: (_) => de_SourceTableFeatureDetails(_, context), + }); +}; +const de_BackupDetails = (output, context) => { + return take(output, { + BackupArn: __expectString, + BackupCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupExpiryDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupName: __expectString, + BackupSizeBytes: __expectLong, + BackupStatus: __expectString, + BackupType: __expectString, + }); +}; +const de_BackupSummaries = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_BackupSummary(entry, context); + }); + return retVal; +}; +const de_BackupSummary = (output, context) => { + return take(output, { + BackupArn: __expectString, + BackupCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupExpiryDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupName: __expectString, + BackupSizeBytes: __expectLong, + BackupStatus: __expectString, + BackupType: __expectString, + TableArn: __expectString, + TableId: __expectString, + TableName: __expectString, + }); +}; +const de_BatchExecuteStatementOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_PartiQLBatchResponse(_, context), + }); +}; +const de_BatchGetItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_BatchGetResponseMap(_, context), + UnprocessedKeys: (_) => de_BatchGetRequestMap(_, context), + }); +}; +const de_BatchGetRequestMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_KeysAndAttributes(value, context); + return acc; + }, {}); +}; +const de_BatchGetResponseMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemList(value, context); + return acc; + }, {}); +}; +const de_BatchStatementError = (output, context) => { + return take(output, { + Code: __expectString, + Item: (_) => de_AttributeMap(_, context), + Message: __expectString, + }); +}; +const de_BatchStatementResponse = (output, context) => { + return take(output, { + Error: (_) => de_BatchStatementError(_, context), + Item: (_) => de_AttributeMap(_, context), + TableName: __expectString, + }); +}; +const de_BatchWriteItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetricsPerTable(_, context), + UnprocessedItems: (_) => de_BatchWriteItemRequestMap(_, context), + }); +}; +const de_BatchWriteItemRequestMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_WriteRequests(value, context); + return acc; + }, {}); +}; +const de_BillingModeSummary = (output, context) => { + return take(output, { + BillingMode: __expectString, + LastUpdateToPayPerRequestDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + }); +}; +const de_BinarySetAttributeValue = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return context.base64Decoder(entry); + }); + return retVal; +}; +const de_CancellationReason = (output, context) => { + return take(output, { + Code: __expectString, + Item: (_) => de_AttributeMap(_, context), + Message: __expectString, + }); +}; +const de_CancellationReasonList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_CancellationReason(entry, context); + }); + return retVal; +}; +const de_Capacity = (output, context) => { + return take(output, { + CapacityUnits: __limitedParseDouble, + ReadCapacityUnits: __limitedParseDouble, + WriteCapacityUnits: __limitedParseDouble, + }); +}; +const de_ConditionalCheckFailedException = (output, context) => { + return take(output, { + Item: (_) => de_AttributeMap(_, context), + message: __expectString, + }); +}; +const de_ConsumedCapacity = (output, context) => { + return take(output, { + CapacityUnits: __limitedParseDouble, + GlobalSecondaryIndexes: (_) => de_SecondaryIndexesCapacityMap(_, context), + LocalSecondaryIndexes: (_) => de_SecondaryIndexesCapacityMap(_, context), + ReadCapacityUnits: __limitedParseDouble, + Table: (_) => de_Capacity(_, context), + TableName: __expectString, + WriteCapacityUnits: __limitedParseDouble, + }); +}; +const de_ConsumedCapacityMultiple = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ConsumedCapacity(entry, context); + }); + return retVal; +}; +const de_ContinuousBackupsDescription = (output, context) => { + return take(output, { + ContinuousBackupsStatus: __expectString, + PointInTimeRecoveryDescription: (_) => de_PointInTimeRecoveryDescription(_, context), + }); +}; +const de_CreateBackupOutput = (output, context) => { + return take(output, { + BackupDetails: (_) => de_BackupDetails(_, context), + }); +}; +const de_CreateGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_CreateTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_DeleteBackupOutput = (output, context) => { + return take(output, { + BackupDescription: (_) => de_BackupDescription(_, context), + }); +}; +const de_DeleteItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_DeleteRequest = (output, context) => { + return take(output, { + Key: (_) => de_Key(_, context), + }); +}; +const de_DeleteTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_DescribeBackupOutput = (output, context) => { + return take(output, { + BackupDescription: (_) => de_BackupDescription(_, context), + }); +}; +const de_DescribeContinuousBackupsOutput = (output, context) => { + return take(output, { + ContinuousBackupsDescription: (_) => de_ContinuousBackupsDescription(_, context), + }); +}; +const de_DescribeContributorInsightsOutput = (output, context) => { + return take(output, { + ContributorInsightsRuleList: _json, + ContributorInsightsStatus: __expectString, + FailureException: _json, + IndexName: __expectString, + LastUpdateDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableName: __expectString, + }); +}; +const de_DescribeExportOutput = (output, context) => { + return take(output, { + ExportDescription: (_) => de_ExportDescription(_, context), + }); +}; +const de_DescribeGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_DescribeGlobalTableSettingsOutput = (output, context) => { + return take(output, { + GlobalTableName: __expectString, + ReplicaSettings: (_) => de_ReplicaSettingsDescriptionList(_, context), + }); +}; +const de_DescribeImportOutput = (output, context) => { + return take(output, { + ImportTableDescription: (_) => de_ImportTableDescription(_, context), + }); +}; +const de_DescribeTableOutput = (output, context) => { + return take(output, { + Table: (_) => de_TableDescription(_, context), + }); +}; +const de_DescribeTableReplicaAutoScalingOutput = (output, context) => { + return take(output, { + TableAutoScalingDescription: (_) => de_TableAutoScalingDescription(_, context), + }); +}; +const de_ExecuteStatementOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + NextToken: __expectString, + }); +}; +const de_ExecuteTransactionOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_ItemResponseList(_, context), + }); +}; +const de_ExportDescription = (output, context) => { + return take(output, { + BilledSizeBytes: __expectLong, + ClientToken: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportArn: __expectString, + ExportFormat: __expectString, + ExportManifest: __expectString, + ExportStatus: __expectString, + ExportTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportType: __expectString, + FailureCode: __expectString, + FailureMessage: __expectString, + IncrementalExportSpecification: (_) => de_IncrementalExportSpecification(_, context), + ItemCount: __expectLong, + S3Bucket: __expectString, + S3BucketOwner: __expectString, + S3Prefix: __expectString, + S3SseAlgorithm: __expectString, + S3SseKmsKeyId: __expectString, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + TableId: __expectString, + }); +}; +const de_ExportTableToPointInTimeOutput = (output, context) => { + return take(output, { + ExportDescription: (_) => de_ExportDescription(_, context), + }); +}; +const de_GetItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Item: (_) => de_AttributeMap(_, context), + }); +}; +const de_GlobalSecondaryIndexDescription = (output, context) => { + return take(output, { + Backfilling: __expectBoolean, + IndexArn: __expectString, + IndexName: __expectString, + IndexSizeBytes: __expectLong, + IndexStatus: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + OnDemandThroughput: _json, + Projection: _json, + ProvisionedThroughput: (_) => de_ProvisionedThroughputDescription(_, context), + WarmThroughput: _json, + }); +}; +const de_GlobalSecondaryIndexDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_GlobalSecondaryIndexDescription(entry, context); + }); + return retVal; +}; +const de_GlobalTableDescription = (output, context) => { + return take(output, { + CreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + GlobalTableArn: __expectString, + GlobalTableName: __expectString, + GlobalTableStatus: __expectString, + ReplicationGroup: (_) => de_ReplicaDescriptionList(_, context), + }); +}; +const de_ImportSummary = (output, context) => { + return take(output, { + CloudWatchLogGroupArn: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ImportArn: __expectString, + ImportStatus: __expectString, + InputFormat: __expectString, + S3BucketSource: _json, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + }); +}; +const de_ImportSummaryList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ImportSummary(entry, context); + }); + return retVal; +}; +const de_ImportTableDescription = (output, context) => { + return take(output, { + ClientToken: __expectString, + CloudWatchLogGroupArn: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ErrorCount: __expectLong, + FailureCode: __expectString, + FailureMessage: __expectString, + ImportArn: __expectString, + ImportStatus: __expectString, + ImportedItemCount: __expectLong, + InputCompressionType: __expectString, + InputFormat: __expectString, + InputFormatOptions: _json, + ProcessedItemCount: __expectLong, + ProcessedSizeBytes: __expectLong, + S3BucketSource: _json, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + TableCreationParameters: _json, + TableId: __expectString, + }); +}; +const de_ImportTableOutput = (output, context) => { + return take(output, { + ImportTableDescription: (_) => de_ImportTableDescription(_, context), + }); +}; +const de_IncrementalExportSpecification = (output, context) => { + return take(output, { + ExportFromTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportToTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportViewType: __expectString, + }); +}; +const de_ItemCollectionKeyAttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_ItemCollectionMetrics = (output, context) => { + return take(output, { + ItemCollectionKey: (_) => de_ItemCollectionKeyAttributeMap(_, context), + SizeEstimateRangeGB: (_) => de_ItemCollectionSizeEstimateRange(_, context), + }); +}; +const de_ItemCollectionMetricsMultiple = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ItemCollectionMetrics(entry, context); + }); + return retVal; +}; +const de_ItemCollectionMetricsPerTable = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemCollectionMetricsMultiple(value, context); + return acc; + }, {}); +}; +const de_ItemCollectionSizeEstimateRange = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return __limitedParseDouble(entry); + }); + return retVal; +}; +const de_ItemList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AttributeMap(entry, context); + }); + return retVal; +}; +const de_ItemResponse = (output, context) => { + return take(output, { + Item: (_) => de_AttributeMap(_, context), + }); +}; +const de_ItemResponseList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ItemResponse(entry, context); + }); + return retVal; +}; +const de_Key = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_KeyList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Key(entry, context); + }); + return retVal; +}; +const de_KeysAndAttributes = (output, context) => { + return take(output, { + AttributesToGet: _json, + ConsistentRead: __expectBoolean, + ExpressionAttributeNames: _json, + Keys: (_) => de_KeyList(_, context), + ProjectionExpression: __expectString, + }); +}; +const de_ListAttributeValue = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AttributeValue(__expectUnion(entry), context); + }); + return retVal; +}; +const de_ListBackupsOutput = (output, context) => { + return take(output, { + BackupSummaries: (_) => de_BackupSummaries(_, context), + LastEvaluatedBackupArn: __expectString, + }); +}; +const de_ListImportsOutput = (output, context) => { + return take(output, { + ImportSummaryList: (_) => de_ImportSummaryList(_, context), + NextToken: __expectString, + }); +}; +const de_MapAttributeValue = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_PartiQLBatchResponse = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_BatchStatementResponse(entry, context); + }); + return retVal; +}; +const de_PointInTimeRecoveryDescription = (output, context) => { + return take(output, { + EarliestRestorableDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + LatestRestorableDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + PointInTimeRecoveryStatus: __expectString, + RecoveryPeriodInDays: __expectInt32, + }); +}; +const de_ProvisionedThroughputDescription = (output, context) => { + return take(output, { + LastDecreaseDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + LastIncreaseDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + NumberOfDecreasesToday: __expectLong, + ReadCapacityUnits: __expectLong, + WriteCapacityUnits: __expectLong, + }); +}; +const de_PutItemInputAttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_PutItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_PutRequest = (output, context) => { + return take(output, { + Item: (_) => de_PutItemInputAttributeMap(_, context), + }); +}; +const de_QueryOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Count: __expectInt32, + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + ScannedCount: __expectInt32, + }); +}; +const de_ReplicaAutoScalingDescription = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: (_) => de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList(_, context), + RegionName: __expectString, + ReplicaProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaStatus: __expectString, + }); +}; +const de_ReplicaAutoScalingDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaAutoScalingDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaDescription = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: _json, + KMSMasterKeyId: __expectString, + OnDemandThroughputOverride: _json, + ProvisionedThroughputOverride: _json, + RegionName: __expectString, + ReplicaInaccessibleDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ReplicaStatus: __expectString, + ReplicaStatusDescription: __expectString, + ReplicaStatusPercentProgress: __expectString, + ReplicaTableClassSummary: (_) => de_TableClassSummary(_, context), + WarmThroughput: _json, + }); +}; +const de_ReplicaDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaGlobalSecondaryIndexAutoScalingDescription = (output, context) => { + return take(output, { + IndexName: __expectString, + IndexStatus: __expectString, + ProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + }); +}; +const de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaGlobalSecondaryIndexAutoScalingDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaGlobalSecondaryIndexSettingsDescription = (output, context) => { + return take(output, { + IndexName: __expectString, + IndexStatus: __expectString, + ProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedReadCapacityUnits: __expectLong, + ProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedWriteCapacityUnits: __expectLong, + }); +}; +const de_ReplicaGlobalSecondaryIndexSettingsDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaGlobalSecondaryIndexSettingsDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaSettingsDescription = (output, context) => { + return take(output, { + RegionName: __expectString, + ReplicaBillingModeSummary: (_) => de_BillingModeSummary(_, context), + ReplicaGlobalSecondaryIndexSettings: (_) => de_ReplicaGlobalSecondaryIndexSettingsDescriptionList(_, context), + ReplicaProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedReadCapacityUnits: __expectLong, + ReplicaProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedWriteCapacityUnits: __expectLong, + ReplicaStatus: __expectString, + ReplicaTableClassSummary: (_) => de_TableClassSummary(_, context), + }); +}; +const de_ReplicaSettingsDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaSettingsDescription(entry, context); + }); + return retVal; +}; +const de_RestoreSummary = (output, context) => { + return take(output, { + RestoreDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + RestoreInProgress: __expectBoolean, + SourceBackupArn: __expectString, + SourceTableArn: __expectString, + }); +}; +const de_RestoreTableFromBackupOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_RestoreTableToPointInTimeOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_ScanOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Count: __expectInt32, + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + ScannedCount: __expectInt32, + }); +}; +const de_SecondaryIndexesCapacityMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_Capacity(value, context); + return acc; + }, {}); +}; +const de_SourceTableDetails = (output, context) => { + return take(output, { + BillingMode: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + OnDemandThroughput: _json, + ProvisionedThroughput: _json, + TableArn: __expectString, + TableCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableId: __expectString, + TableName: __expectString, + TableSizeBytes: __expectLong, + }); +}; +const de_SourceTableFeatureDetails = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: _json, + LocalSecondaryIndexes: _json, + SSEDescription: (_) => de_SSEDescription(_, context), + StreamDescription: _json, + TimeToLiveDescription: _json, + }); +}; +const de_SSEDescription = (output, context) => { + return take(output, { + InaccessibleEncryptionDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + KMSMasterKeyArn: __expectString, + SSEType: __expectString, + Status: __expectString, + }); +}; +const de_TableAutoScalingDescription = (output, context) => { + return take(output, { + Replicas: (_) => de_ReplicaAutoScalingDescriptionList(_, context), + TableName: __expectString, + TableStatus: __expectString, + }); +}; +const de_TableClassSummary = (output, context) => { + return take(output, { + LastUpdateDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableClass: __expectString, + }); +}; +const de_TableDescription = (output, context) => { + return take(output, { + ArchivalSummary: (_) => de_ArchivalSummary(_, context), + AttributeDefinitions: _json, + BillingModeSummary: (_) => de_BillingModeSummary(_, context), + CreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + DeletionProtectionEnabled: __expectBoolean, + GlobalSecondaryIndexes: (_) => de_GlobalSecondaryIndexDescriptionList(_, context), + GlobalTableVersion: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + LatestStreamArn: __expectString, + LatestStreamLabel: __expectString, + LocalSecondaryIndexes: _json, + MultiRegionConsistency: __expectString, + OnDemandThroughput: _json, + ProvisionedThroughput: (_) => de_ProvisionedThroughputDescription(_, context), + Replicas: (_) => de_ReplicaDescriptionList(_, context), + RestoreSummary: (_) => de_RestoreSummary(_, context), + SSEDescription: (_) => de_SSEDescription(_, context), + StreamSpecification: _json, + TableArn: __expectString, + TableClassSummary: (_) => de_TableClassSummary(_, context), + TableId: __expectString, + TableName: __expectString, + TableSizeBytes: __expectLong, + TableStatus: __expectString, + WarmThroughput: _json, + }); +}; +const de_TransactGetItemsOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_ItemResponseList(_, context), + }); +}; +const de_TransactionCanceledException = (output, context) => { + return take(output, { + CancellationReasons: (_) => de_CancellationReasonList(_, context), + Message: __expectString, + }); +}; +const de_TransactWriteItemsOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetricsPerTable(_, context), + }); +}; +const de_UpdateContinuousBackupsOutput = (output, context) => { + return take(output, { + ContinuousBackupsDescription: (_) => de_ContinuousBackupsDescription(_, context), + }); +}; +const de_UpdateGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_UpdateGlobalTableSettingsOutput = (output, context) => { + return take(output, { + GlobalTableName: __expectString, + ReplicaSettings: (_) => de_ReplicaSettingsDescriptionList(_, context), + }); +}; +const de_UpdateItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_UpdateTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_UpdateTableReplicaAutoScalingOutput = (output, context) => { + return take(output, { + TableAutoScalingDescription: (_) => de_TableAutoScalingDescription(_, context), + }); +}; +const de_WriteRequest = (output, context) => { + return take(output, { + DeleteRequest: (_) => de_DeleteRequest(_, context), + PutRequest: (_) => de_PutRequest(_, context), + }); +}; +const de_WriteRequests = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_WriteRequest(entry, context); + }); + return retVal; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = withBaseException(__BaseException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new __HttpRequest(contents); +}; +function sharedHeaders(operation) { + return { + "content-type": "application/x-amz-json-1.0", + "x-amz-target": `DynamoDB_20120810.${operation}`, + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js new file mode 100644 index 0000000..a29a02d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js @@ -0,0 +1,37 @@ +import packageInfo from "../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE } from "@aws-sdk/core/account-id-endpoint"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (() => Promise.resolve(DEFAULT_ACCOUNT_ID_ENDPOINT_MODE)), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (() => Promise.resolve(undefined)), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js new file mode 100644 index 0000000..69898d1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js @@ -0,0 +1,52 @@ +import packageInfo from "../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS } from "@aws-sdk/core/account-id-endpoint"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS } from "@aws-sdk/middleware-endpoint-discovery"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? loadNodeConfig(NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, profileConfig), + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credentialDefaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? loadNodeConfig(NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, profileConfig), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js new file mode 100644 index 0000000..ee4ffa7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js @@ -0,0 +1,30 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultDynamoDBHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2012-08-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultDynamoDBHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "DynamoDB", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js new file mode 100644 index 0000000..c6faadd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js @@ -0,0 +1,34 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { DescribeTableCommand } from "../commands/DescribeTableCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + try { + const returnComparator = () => { + return result.Table.TableStatus; + }; + if (returnComparator() === "ACTIVE") { + return { state: WaiterState.SUCCESS, reason }; + } + } + catch (e) { } + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: WaiterState.RETRY, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForTableExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilTableExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js new file mode 100644 index 0000000..b691c03 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js @@ -0,0 +1,25 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { DescribeTableCommand } from "../commands/DescribeTableCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: WaiterState.SUCCESS, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForTableNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilTableNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts new file mode 100644 index 0000000..7ad45f0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts @@ -0,0 +1,433 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "./commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "./commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "./commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +export interface DynamoDB { + /** + * @see {@link BatchExecuteStatementCommand} + */ + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + /** + * @see {@link BatchGetItemCommand} + */ + batchGetItem(args: BatchGetItemCommandInput, options?: __HttpHandlerOptions): Promise; + batchGetItem(args: BatchGetItemCommandInput, cb: (err: any, data?: BatchGetItemCommandOutput) => void): void; + batchGetItem(args: BatchGetItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchGetItemCommandOutput) => void): void; + /** + * @see {@link BatchWriteItemCommand} + */ + batchWriteItem(args: BatchWriteItemCommandInput, options?: __HttpHandlerOptions): Promise; + batchWriteItem(args: BatchWriteItemCommandInput, cb: (err: any, data?: BatchWriteItemCommandOutput) => void): void; + batchWriteItem(args: BatchWriteItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchWriteItemCommandOutput) => void): void; + /** + * @see {@link CreateBackupCommand} + */ + createBackup(args: CreateBackupCommandInput, options?: __HttpHandlerOptions): Promise; + createBackup(args: CreateBackupCommandInput, cb: (err: any, data?: CreateBackupCommandOutput) => void): void; + createBackup(args: CreateBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateBackupCommandOutput) => void): void; + /** + * @see {@link CreateGlobalTableCommand} + */ + createGlobalTable(args: CreateGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + createGlobalTable(args: CreateGlobalTableCommandInput, cb: (err: any, data?: CreateGlobalTableCommandOutput) => void): void; + createGlobalTable(args: CreateGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateGlobalTableCommandOutput) => void): void; + /** + * @see {@link CreateTableCommand} + */ + createTable(args: CreateTableCommandInput, options?: __HttpHandlerOptions): Promise; + createTable(args: CreateTableCommandInput, cb: (err: any, data?: CreateTableCommandOutput) => void): void; + createTable(args: CreateTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateTableCommandOutput) => void): void; + /** + * @see {@link DeleteBackupCommand} + */ + deleteBackup(args: DeleteBackupCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBackup(args: DeleteBackupCommandInput, cb: (err: any, data?: DeleteBackupCommandOutput) => void): void; + deleteBackup(args: DeleteBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBackupCommandOutput) => void): void; + /** + * @see {@link DeleteItemCommand} + */ + deleteItem(args: DeleteItemCommandInput, options?: __HttpHandlerOptions): Promise; + deleteItem(args: DeleteItemCommandInput, cb: (err: any, data?: DeleteItemCommandOutput) => void): void; + deleteItem(args: DeleteItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteItemCommandOutput) => void): void; + /** + * @see {@link DeleteResourcePolicyCommand} + */ + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void): void; + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void): void; + /** + * @see {@link DeleteTableCommand} + */ + deleteTable(args: DeleteTableCommandInput, options?: __HttpHandlerOptions): Promise; + deleteTable(args: DeleteTableCommandInput, cb: (err: any, data?: DeleteTableCommandOutput) => void): void; + deleteTable(args: DeleteTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteTableCommandOutput) => void): void; + /** + * @see {@link DescribeBackupCommand} + */ + describeBackup(args: DescribeBackupCommandInput, options?: __HttpHandlerOptions): Promise; + describeBackup(args: DescribeBackupCommandInput, cb: (err: any, data?: DescribeBackupCommandOutput) => void): void; + describeBackup(args: DescribeBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeBackupCommandOutput) => void): void; + /** + * @see {@link DescribeContinuousBackupsCommand} + */ + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void): void; + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void): void; + /** + * @see {@link DescribeContributorInsightsCommand} + */ + describeContributorInsights(args: DescribeContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + describeContributorInsights(args: DescribeContributorInsightsCommandInput, cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void): void; + describeContributorInsights(args: DescribeContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void): void; + /** + * @see {@link DescribeEndpointsCommand} + */ + describeEndpoints(): Promise; + describeEndpoints(args: DescribeEndpointsCommandInput, options?: __HttpHandlerOptions): Promise; + describeEndpoints(args: DescribeEndpointsCommandInput, cb: (err: any, data?: DescribeEndpointsCommandOutput) => void): void; + describeEndpoints(args: DescribeEndpointsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeEndpointsCommandOutput) => void): void; + /** + * @see {@link DescribeExportCommand} + */ + describeExport(args: DescribeExportCommandInput, options?: __HttpHandlerOptions): Promise; + describeExport(args: DescribeExportCommandInput, cb: (err: any, data?: DescribeExportCommandOutput) => void): void; + describeExport(args: DescribeExportCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeExportCommandOutput) => void): void; + /** + * @see {@link DescribeGlobalTableCommand} + */ + describeGlobalTable(args: DescribeGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + describeGlobalTable(args: DescribeGlobalTableCommandInput, cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void): void; + describeGlobalTable(args: DescribeGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void): void; + /** + * @see {@link DescribeGlobalTableSettingsCommand} + */ + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, options?: __HttpHandlerOptions): Promise; + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void): void; + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void): void; + /** + * @see {@link DescribeImportCommand} + */ + describeImport(args: DescribeImportCommandInput, options?: __HttpHandlerOptions): Promise; + describeImport(args: DescribeImportCommandInput, cb: (err: any, data?: DescribeImportCommandOutput) => void): void; + describeImport(args: DescribeImportCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeImportCommandOutput) => void): void; + /** + * @see {@link DescribeKinesisStreamingDestinationCommand} + */ + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, cb: (err: any, data?: DescribeKinesisStreamingDestinationCommandOutput) => void): void; + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link DescribeLimitsCommand} + */ + describeLimits(): Promise; + describeLimits(args: DescribeLimitsCommandInput, options?: __HttpHandlerOptions): Promise; + describeLimits(args: DescribeLimitsCommandInput, cb: (err: any, data?: DescribeLimitsCommandOutput) => void): void; + describeLimits(args: DescribeLimitsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeLimitsCommandOutput) => void): void; + /** + * @see {@link DescribeTableCommand} + */ + describeTable(args: DescribeTableCommandInput, options?: __HttpHandlerOptions): Promise; + describeTable(args: DescribeTableCommandInput, cb: (err: any, data?: DescribeTableCommandOutput) => void): void; + describeTable(args: DescribeTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTableCommandOutput) => void): void; + /** + * @see {@link DescribeTableReplicaAutoScalingCommand} + */ + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, options?: __HttpHandlerOptions): Promise; + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void): void; + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void): void; + /** + * @see {@link DescribeTimeToLiveCommand} + */ + describeTimeToLive(args: DescribeTimeToLiveCommandInput, options?: __HttpHandlerOptions): Promise; + describeTimeToLive(args: DescribeTimeToLiveCommandInput, cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void): void; + describeTimeToLive(args: DescribeTimeToLiveCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void): void; + /** + * @see {@link DisableKinesisStreamingDestinationCommand} + */ + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, cb: (err: any, data?: DisableKinesisStreamingDestinationCommandOutput) => void): void; + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DisableKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link EnableKinesisStreamingDestinationCommand} + */ + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, cb: (err: any, data?: EnableKinesisStreamingDestinationCommandOutput) => void): void; + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: EnableKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link ExecuteStatementCommand} + */ + executeStatement(args: ExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + executeStatement(args: ExecuteStatementCommandInput, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + executeStatement(args: ExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + /** + * @see {@link ExecuteTransactionCommand} + */ + executeTransaction(args: ExecuteTransactionCommandInput, options?: __HttpHandlerOptions): Promise; + executeTransaction(args: ExecuteTransactionCommandInput, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + executeTransaction(args: ExecuteTransactionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + /** + * @see {@link ExportTableToPointInTimeCommand} + */ + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, options?: __HttpHandlerOptions): Promise; + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void): void; + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void): void; + /** + * @see {@link GetItemCommand} + */ + getItem(args: GetItemCommandInput, options?: __HttpHandlerOptions): Promise; + getItem(args: GetItemCommandInput, cb: (err: any, data?: GetItemCommandOutput) => void): void; + getItem(args: GetItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetItemCommandOutput) => void): void; + /** + * @see {@link GetResourcePolicyCommand} + */ + getResourcePolicy(args: GetResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + getResourcePolicy(args: GetResourcePolicyCommandInput, cb: (err: any, data?: GetResourcePolicyCommandOutput) => void): void; + getResourcePolicy(args: GetResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetResourcePolicyCommandOutput) => void): void; + /** + * @see {@link ImportTableCommand} + */ + importTable(args: ImportTableCommandInput, options?: __HttpHandlerOptions): Promise; + importTable(args: ImportTableCommandInput, cb: (err: any, data?: ImportTableCommandOutput) => void): void; + importTable(args: ImportTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ImportTableCommandOutput) => void): void; + /** + * @see {@link ListBackupsCommand} + */ + listBackups(): Promise; + listBackups(args: ListBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + listBackups(args: ListBackupsCommandInput, cb: (err: any, data?: ListBackupsCommandOutput) => void): void; + listBackups(args: ListBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListBackupsCommandOutput) => void): void; + /** + * @see {@link ListContributorInsightsCommand} + */ + listContributorInsights(): Promise; + listContributorInsights(args: ListContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + listContributorInsights(args: ListContributorInsightsCommandInput, cb: (err: any, data?: ListContributorInsightsCommandOutput) => void): void; + listContributorInsights(args: ListContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListContributorInsightsCommandOutput) => void): void; + /** + * @see {@link ListExportsCommand} + */ + listExports(): Promise; + listExports(args: ListExportsCommandInput, options?: __HttpHandlerOptions): Promise; + listExports(args: ListExportsCommandInput, cb: (err: any, data?: ListExportsCommandOutput) => void): void; + listExports(args: ListExportsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListExportsCommandOutput) => void): void; + /** + * @see {@link ListGlobalTablesCommand} + */ + listGlobalTables(): Promise; + listGlobalTables(args: ListGlobalTablesCommandInput, options?: __HttpHandlerOptions): Promise; + listGlobalTables(args: ListGlobalTablesCommandInput, cb: (err: any, data?: ListGlobalTablesCommandOutput) => void): void; + listGlobalTables(args: ListGlobalTablesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListGlobalTablesCommandOutput) => void): void; + /** + * @see {@link ListImportsCommand} + */ + listImports(): Promise; + listImports(args: ListImportsCommandInput, options?: __HttpHandlerOptions): Promise; + listImports(args: ListImportsCommandInput, cb: (err: any, data?: ListImportsCommandOutput) => void): void; + listImports(args: ListImportsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListImportsCommandOutput) => void): void; + /** + * @see {@link ListTablesCommand} + */ + listTables(): Promise; + listTables(args: ListTablesCommandInput, options?: __HttpHandlerOptions): Promise; + listTables(args: ListTablesCommandInput, cb: (err: any, data?: ListTablesCommandOutput) => void): void; + listTables(args: ListTablesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListTablesCommandOutput) => void): void; + /** + * @see {@link ListTagsOfResourceCommand} + */ + listTagsOfResource(args: ListTagsOfResourceCommandInput, options?: __HttpHandlerOptions): Promise; + listTagsOfResource(args: ListTagsOfResourceCommandInput, cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void): void; + listTagsOfResource(args: ListTagsOfResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void): void; + /** + * @see {@link PutItemCommand} + */ + putItem(args: PutItemCommandInput, options?: __HttpHandlerOptions): Promise; + putItem(args: PutItemCommandInput, cb: (err: any, data?: PutItemCommandOutput) => void): void; + putItem(args: PutItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutItemCommandOutput) => void): void; + /** + * @see {@link PutResourcePolicyCommand} + */ + putResourcePolicy(args: PutResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + putResourcePolicy(args: PutResourcePolicyCommandInput, cb: (err: any, data?: PutResourcePolicyCommandOutput) => void): void; + putResourcePolicy(args: PutResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutResourcePolicyCommandOutput) => void): void; + /** + * @see {@link QueryCommand} + */ + query(args: QueryCommandInput, options?: __HttpHandlerOptions): Promise; + query(args: QueryCommandInput, cb: (err: any, data?: QueryCommandOutput) => void): void; + query(args: QueryCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: QueryCommandOutput) => void): void; + /** + * @see {@link RestoreTableFromBackupCommand} + */ + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, options?: __HttpHandlerOptions): Promise; + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void): void; + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void): void; + /** + * @see {@link RestoreTableToPointInTimeCommand} + */ + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, options?: __HttpHandlerOptions): Promise; + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void): void; + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void): void; + /** + * @see {@link ScanCommand} + */ + scan(args: ScanCommandInput, options?: __HttpHandlerOptions): Promise; + scan(args: ScanCommandInput, cb: (err: any, data?: ScanCommandOutput) => void): void; + scan(args: ScanCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ScanCommandOutput) => void): void; + /** + * @see {@link TagResourceCommand} + */ + tagResource(args: TagResourceCommandInput, options?: __HttpHandlerOptions): Promise; + tagResource(args: TagResourceCommandInput, cb: (err: any, data?: TagResourceCommandOutput) => void): void; + tagResource(args: TagResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TagResourceCommandOutput) => void): void; + /** + * @see {@link TransactGetItemsCommand} + */ + transactGetItems(args: TransactGetItemsCommandInput, options?: __HttpHandlerOptions): Promise; + transactGetItems(args: TransactGetItemsCommandInput, cb: (err: any, data?: TransactGetItemsCommandOutput) => void): void; + transactGetItems(args: TransactGetItemsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactGetItemsCommandOutput) => void): void; + /** + * @see {@link TransactWriteItemsCommand} + */ + transactWriteItems(args: TransactWriteItemsCommandInput, options?: __HttpHandlerOptions): Promise; + transactWriteItems(args: TransactWriteItemsCommandInput, cb: (err: any, data?: TransactWriteItemsCommandOutput) => void): void; + transactWriteItems(args: TransactWriteItemsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactWriteItemsCommandOutput) => void): void; + /** + * @see {@link UntagResourceCommand} + */ + untagResource(args: UntagResourceCommandInput, options?: __HttpHandlerOptions): Promise; + untagResource(args: UntagResourceCommandInput, cb: (err: any, data?: UntagResourceCommandOutput) => void): void; + untagResource(args: UntagResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UntagResourceCommandOutput) => void): void; + /** + * @see {@link UpdateContinuousBackupsCommand} + */ + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void): void; + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void): void; + /** + * @see {@link UpdateContributorInsightsCommand} + */ + updateContributorInsights(args: UpdateContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + updateContributorInsights(args: UpdateContributorInsightsCommandInput, cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void): void; + updateContributorInsights(args: UpdateContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void): void; + /** + * @see {@link UpdateGlobalTableCommand} + */ + updateGlobalTable(args: UpdateGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + updateGlobalTable(args: UpdateGlobalTableCommandInput, cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void): void; + updateGlobalTable(args: UpdateGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void): void; + /** + * @see {@link UpdateGlobalTableSettingsCommand} + */ + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, options?: __HttpHandlerOptions): Promise; + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void): void; + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void): void; + /** + * @see {@link UpdateItemCommand} + */ + updateItem(args: UpdateItemCommandInput, options?: __HttpHandlerOptions): Promise; + updateItem(args: UpdateItemCommandInput, cb: (err: any, data?: UpdateItemCommandOutput) => void): void; + updateItem(args: UpdateItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateItemCommandOutput) => void): void; + /** + * @see {@link UpdateKinesisStreamingDestinationCommand} + */ + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, cb: (err: any, data?: UpdateKinesisStreamingDestinationCommandOutput) => void): void; + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link UpdateTableCommand} + */ + updateTable(args: UpdateTableCommandInput, options?: __HttpHandlerOptions): Promise; + updateTable(args: UpdateTableCommandInput, cb: (err: any, data?: UpdateTableCommandOutput) => void): void; + updateTable(args: UpdateTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTableCommandOutput) => void): void; + /** + * @see {@link UpdateTableReplicaAutoScalingCommand} + */ + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, options?: __HttpHandlerOptions): Promise; + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void): void; + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void): void; + /** + * @see {@link UpdateTimeToLiveCommand} + */ + updateTimeToLive(args: UpdateTimeToLiveCommandInput, options?: __HttpHandlerOptions): Promise; + updateTimeToLive(args: UpdateTimeToLiveCommandInput, cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void): void; + updateTimeToLive(args: UpdateTimeToLiveCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void): void; +} +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * @public + */ +export declare class DynamoDB extends DynamoDBClient implements DynamoDB { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts new file mode 100644 index 0000000..c508d62 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts @@ -0,0 +1,272 @@ +import { AccountIdEndpointMode, AccountIdEndpointModeInputConfig, AccountIdEndpointModeResolvedConfig } from "@aws-sdk/core/account-id-endpoint"; +import { EndpointDiscoveryInputConfig, EndpointDiscoveryResolvedConfig } from "@aws-sdk/middleware-endpoint-discovery"; +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { AwsCredentialIdentityProvider, BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "./commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "./commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "./commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "./commands/UpdateTimeToLiveCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = BatchExecuteStatementCommandInput | BatchGetItemCommandInput | BatchWriteItemCommandInput | CreateBackupCommandInput | CreateGlobalTableCommandInput | CreateTableCommandInput | DeleteBackupCommandInput | DeleteItemCommandInput | DeleteResourcePolicyCommandInput | DeleteTableCommandInput | DescribeBackupCommandInput | DescribeContinuousBackupsCommandInput | DescribeContributorInsightsCommandInput | DescribeEndpointsCommandInput | DescribeExportCommandInput | DescribeGlobalTableCommandInput | DescribeGlobalTableSettingsCommandInput | DescribeImportCommandInput | DescribeKinesisStreamingDestinationCommandInput | DescribeLimitsCommandInput | DescribeTableCommandInput | DescribeTableReplicaAutoScalingCommandInput | DescribeTimeToLiveCommandInput | DisableKinesisStreamingDestinationCommandInput | EnableKinesisStreamingDestinationCommandInput | ExecuteStatementCommandInput | ExecuteTransactionCommandInput | ExportTableToPointInTimeCommandInput | GetItemCommandInput | GetResourcePolicyCommandInput | ImportTableCommandInput | ListBackupsCommandInput | ListContributorInsightsCommandInput | ListExportsCommandInput | ListGlobalTablesCommandInput | ListImportsCommandInput | ListTablesCommandInput | ListTagsOfResourceCommandInput | PutItemCommandInput | PutResourcePolicyCommandInput | QueryCommandInput | RestoreTableFromBackupCommandInput | RestoreTableToPointInTimeCommandInput | ScanCommandInput | TagResourceCommandInput | TransactGetItemsCommandInput | TransactWriteItemsCommandInput | UntagResourceCommandInput | UpdateContinuousBackupsCommandInput | UpdateContributorInsightsCommandInput | UpdateGlobalTableCommandInput | UpdateGlobalTableSettingsCommandInput | UpdateItemCommandInput | UpdateKinesisStreamingDestinationCommandInput | UpdateTableCommandInput | UpdateTableReplicaAutoScalingCommandInput | UpdateTimeToLiveCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = BatchExecuteStatementCommandOutput | BatchGetItemCommandOutput | BatchWriteItemCommandOutput | CreateBackupCommandOutput | CreateGlobalTableCommandOutput | CreateTableCommandOutput | DeleteBackupCommandOutput | DeleteItemCommandOutput | DeleteResourcePolicyCommandOutput | DeleteTableCommandOutput | DescribeBackupCommandOutput | DescribeContinuousBackupsCommandOutput | DescribeContributorInsightsCommandOutput | DescribeEndpointsCommandOutput | DescribeExportCommandOutput | DescribeGlobalTableCommandOutput | DescribeGlobalTableSettingsCommandOutput | DescribeImportCommandOutput | DescribeKinesisStreamingDestinationCommandOutput | DescribeLimitsCommandOutput | DescribeTableCommandOutput | DescribeTableReplicaAutoScalingCommandOutput | DescribeTimeToLiveCommandOutput | DisableKinesisStreamingDestinationCommandOutput | EnableKinesisStreamingDestinationCommandOutput | ExecuteStatementCommandOutput | ExecuteTransactionCommandOutput | ExportTableToPointInTimeCommandOutput | GetItemCommandOutput | GetResourcePolicyCommandOutput | ImportTableCommandOutput | ListBackupsCommandOutput | ListContributorInsightsCommandOutput | ListExportsCommandOutput | ListGlobalTablesCommandOutput | ListImportsCommandOutput | ListTablesCommandOutput | ListTagsOfResourceCommandOutput | PutItemCommandOutput | PutResourcePolicyCommandOutput | QueryCommandOutput | RestoreTableFromBackupCommandOutput | RestoreTableToPointInTimeCommandOutput | ScanCommandOutput | TagResourceCommandOutput | TransactGetItemsCommandOutput | TransactWriteItemsCommandOutput | UntagResourceCommandOutput | UpdateContinuousBackupsCommandOutput | UpdateContributorInsightsCommandOutput | UpdateGlobalTableCommandOutput | UpdateGlobalTableSettingsCommandOutput | UpdateItemCommandOutput | UpdateKinesisStreamingDestinationCommandOutput | UpdateTableCommandOutput | UpdateTableReplicaAutoScalingCommandOutput | UpdateTimeToLiveCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * Defines if the AWS AccountId will be used for endpoint routing. + */ + accountIdEndpointMode?: AccountIdEndpointMode | __Provider; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Default credentials provider; Not available in browser runtime. + * @deprecated + * @internal + */ + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; + /** + * The provider which populates default for endpointDiscoveryEnabled configuration, if it's + * not passed during client creation. + * @internal + */ + endpointDiscoveryEnabledProvider?: __Provider; +} +/** + * @public + */ +export type DynamoDBClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & AccountIdEndpointModeInputConfig & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & EndpointDiscoveryInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of DynamoDBClient class constructor that set the region, credentials and other options. + */ +export interface DynamoDBClientConfig extends DynamoDBClientConfigType { +} +/** + * @public + */ +export type DynamoDBClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & AccountIdEndpointModeResolvedConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & EndpointDiscoveryResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ +export interface DynamoDBClientResolvedConfig extends DynamoDBClientResolvedConfigType { +} +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * @public + */ +export declare class DynamoDBClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, DynamoDBClientResolvedConfig> { + /** + * The resolved configuration of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ + readonly config: DynamoDBClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..b2b8f76 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { DynamoDBHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): DynamoDBHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..3760ff0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { DynamoDBClientResolvedConfig } from "../DynamoDBClient"; +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultDynamoDBHttpAuthSchemeParametersProvider: (config: DynamoDBClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultDynamoDBHttpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: DynamoDBHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..0d8793c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,227 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchExecuteStatementInput, BatchExecuteStatementOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchExecuteStatementCommand}. + */ +export interface BatchExecuteStatementCommandInput extends BatchExecuteStatementInput { +} +/** + * @public + * + * The output of {@link BatchExecuteStatementCommand}. + */ +export interface BatchExecuteStatementCommandOutput extends BatchExecuteStatementOutput, __MetadataBearer { +} +declare const BatchExecuteStatementCommand_base: { + new (input: BatchExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform batch reads or writes on data stored in DynamoDB, + * using PartiQL. Each read statement in a BatchExecuteStatement must specify + * an equality condition on all key attributes. This enforces that each SELECT + * statement in a batch returns at most a single item. For more information, see Running batch operations with PartiQL for DynamoDB .

+ * + *

The entire batch must consist of either read statements or write statements, you + * cannot mix both in one batch.

+ *
+ * + *

A HTTP 200 response does not mean that all statements in the BatchExecuteStatement + * succeeded. Error details for individual statements can be found under the Error field of the BatchStatementResponse for each + * statement.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchExecuteStatementCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchExecuteStatementInput + * Statements: [ // PartiQLBatchRequest // required + * { // BatchStatementRequest + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ConsistentRead: true || false, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new BatchExecuteStatementCommand(input); + * const response = await client.send(command); + * // { // BatchExecuteStatementOutput + * // Responses: [ // PartiQLBatchResponse + * // { // BatchStatementResponse + * // Error: { // BatchStatementError + * // Code: "ConditionalCheckFailed" || "ItemCollectionSizeLimitExceeded" || "RequestLimitExceeded" || "ValidationError" || "ProvisionedThroughputExceeded" || "TransactionConflict" || "ThrottlingError" || "InternalServerError" || "ResourceNotFound" || "AccessDenied" || "DuplicateItem", + * // Message: "STRING_VALUE", + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // TableName: "STRING_VALUE", + * // Item: { + * // "": "", + * // }, + * // }, + * // ], + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchExecuteStatementCommandInput - {@link BatchExecuteStatementCommandInput} + * @returns {@link BatchExecuteStatementCommandOutput} + * @see {@link BatchExecuteStatementCommandInput} for command's `input` shape. + * @see {@link BatchExecuteStatementCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class BatchExecuteStatementCommand extends BatchExecuteStatementCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchExecuteStatementInput; + output: BatchExecuteStatementOutput; + }; + sdk: { + input: BatchExecuteStatementCommandInput; + output: BatchExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts new file mode 100644 index 0000000..aee4fd5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts @@ -0,0 +1,357 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchGetItemInput, BatchGetItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchGetItemCommand}. + */ +export interface BatchGetItemCommandInput extends BatchGetItemInput { +} +/** + * @public + * + * The output of {@link BatchGetItemCommand}. + */ +export interface BatchGetItemCommandOutput extends BatchGetItemOutput, __MetadataBearer { +} +declare const BatchGetItemCommand_base: { + new (input: BatchGetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchGetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The BatchGetItem operation returns the attributes of one or more items + * from one or more tables. You identify requested items by primary key.

+ *

A single operation can retrieve up to 16 MB of data, which can contain as many as 100 + * items. BatchGetItem returns a partial result if the response size limit is + * exceeded, the table's provisioned throughput is exceeded, more than 1MB per partition is + * requested, or an internal processing failure occurs. If a partial result is returned, + * the operation returns a value for UnprocessedKeys. You can use this value + * to retry the operation starting with the next item to get.

+ * + *

If you request more than 100 items, BatchGetItem returns a + * ValidationException with the message "Too many items requested for + * the BatchGetItem call."

+ *
+ *

For example, if you ask to retrieve 100 items, but each individual item is 300 KB in + * size, the system returns 52 items (so as not to exceed the 16 MB limit). It also returns + * an appropriate UnprocessedKeys value so you can get the next page of + * results. If desired, your application can include its own logic to assemble the pages of + * results into one dataset.

+ *

If none of the items can be processed due to insufficient + * provisioned throughput on all of the tables in the request, then + * BatchGetItem returns a + * ProvisionedThroughputExceededException. If at least + * one of the items is successfully processed, then + * BatchGetItem completes successfully, while returning the keys of the + * unread items in UnprocessedKeys.

+ * + *

If DynamoDB returns any unprocessed items, you should retry the batch operation on + * those items. However, we strongly recommend that you use an exponential + * backoff algorithm. If you retry the batch operation immediately, the + * underlying read or write requests can still fail due to throttling on the individual + * tables. If you delay the batch operation using exponential backoff, the individual + * requests in the batch are much more likely to succeed.

+ *

For more information, see Batch Operations and Error Handling in the Amazon DynamoDB + * Developer Guide.

+ *
+ *

By default, BatchGetItem performs eventually consistent reads on every + * table in the request. If you want strongly consistent reads instead, you can set + * ConsistentRead to true for any or all tables.

+ *

In order to minimize response latency, BatchGetItem may retrieve items in + * parallel.

+ *

When designing your application, keep in mind that DynamoDB does not return items in + * any particular order. To help parse the response by item, include the primary key values + * for the items in your request in the ProjectionExpression parameter.

+ *

If a requested item does not exist, it is not returned in the result. Requests for + * nonexistent items consume the minimum read capacity units according to the type of read. + * For more information, see Working with Tables in the Amazon DynamoDB Developer + * Guide.

+ * + *

+ * BatchGetItem will result in a ValidationException if the + * same key is specified multiple times.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchGetItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchGetItemInput + * RequestItems: { // BatchGetRequestMap // required + * "": { // KeysAndAttributes + * Keys: [ // KeyList // required + * { // Key + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * ], + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * ConsistentRead: true || false, + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }, + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new BatchGetItemCommand(input); + * const response = await client.send(command); + * // { // BatchGetItemOutput + * // Responses: { // BatchGetResponseMap + * // "": [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // }, + * // UnprocessedKeys: { // BatchGetRequestMap + * // "": { // KeysAndAttributes + * // Keys: [ // KeyList // required + * // { // Key + * // "": "", + * // }, + * // ], + * // AttributesToGet: [ // AttributeNameList + * // "STRING_VALUE", + * // ], + * // ConsistentRead: true || false, + * // ProjectionExpression: "STRING_VALUE", + * // ExpressionAttributeNames: { // ExpressionAttributeNameMap + * // "": "STRING_VALUE", + * // }, + * // }, + * // }, + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchGetItemCommandInput - {@link BatchGetItemCommandInput} + * @returns {@link BatchGetItemCommandOutput} + * @see {@link BatchGetItemCommandInput} for command's `input` shape. + * @see {@link BatchGetItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To retrieve multiple items from a table + * ```javascript + * // This example reads multiple items from the Music table using a batch of three GetItem requests. Only the AlbumTitle attribute is returned. + * const input = { + * RequestItems: { + * Music: { + * Keys: [ + * { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * ], + * ProjectionExpression: "AlbumTitle" + * } + * } + * }; + * const command = new BatchGetItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Responses: { + * Music: [ + * { + * AlbumTitle: { + * S: "Somewhat Famous" + * } + * }, + * { + * AlbumTitle: { + * S: "Blue Sky Blues" + * } + * }, + * { + * AlbumTitle: { + * S: "Louder Than Ever" + * } + * } + * ] + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class BatchGetItemCommand extends BatchGetItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchGetItemInput; + output: BatchGetItemOutput; + }; + sdk: { + input: BatchGetItemCommandInput; + output: BatchGetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts new file mode 100644 index 0000000..13bed0c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts @@ -0,0 +1,401 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchWriteItemInput, BatchWriteItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchWriteItemCommand}. + */ +export interface BatchWriteItemCommandInput extends BatchWriteItemInput { +} +/** + * @public + * + * The output of {@link BatchWriteItemCommand}. + */ +export interface BatchWriteItemCommandOutput extends BatchWriteItemOutput, __MetadataBearer { +} +declare const BatchWriteItemCommand_base: { + new (input: BatchWriteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchWriteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The BatchWriteItem operation puts or deletes multiple items in one or + * more tables. A single call to BatchWriteItem can transmit up to 16MB of + * data over the network, consisting of up to 25 item put or delete operations. While + * individual items can be up to 400 KB once stored, it's important to note that an item's + * representation might be greater than 400KB while being sent in DynamoDB's JSON format + * for the API call. For more details on this distinction, see Naming Rules and Data Types.

+ * + *

+ * BatchWriteItem cannot update items. If you perform a + * BatchWriteItem operation on an existing item, that item's values + * will be overwritten by the operation and it will appear like it was updated. To + * update items, we recommend you use the UpdateItem action.

+ *
+ *

The individual PutItem and DeleteItem operations specified + * in BatchWriteItem are atomic; however BatchWriteItem as a + * whole is not. If any requested operations fail because the table's provisioned + * throughput is exceeded or an internal processing failure occurs, the failed operations + * are returned in the UnprocessedItems response parameter. You can + * investigate and optionally resend the requests. Typically, you would call + * BatchWriteItem in a loop. Each iteration would check for unprocessed + * items and submit a new BatchWriteItem request with those unprocessed items + * until all items have been processed.

+ *

For tables and indexes with provisioned capacity, if none of the items can be + * processed due to insufficient provisioned throughput on all of the tables in the + * request, then BatchWriteItem returns a + * ProvisionedThroughputExceededException. For all tables and indexes, if + * none of the items can be processed due to other throttling scenarios (such as exceeding + * partition level limits), then BatchWriteItem returns a + * ThrottlingException.

+ * + *

If DynamoDB returns any unprocessed items, you should retry the batch operation on + * those items. However, we strongly recommend that you use an exponential + * backoff algorithm. If you retry the batch operation immediately, the + * underlying read or write requests can still fail due to throttling on the individual + * tables. If you delay the batch operation using exponential backoff, the individual + * requests in the batch are much more likely to succeed.

+ *

For more information, see Batch Operations and Error Handling in the Amazon DynamoDB + * Developer Guide.

+ *
+ *

With BatchWriteItem, you can efficiently write or delete large amounts of + * data, such as from Amazon EMR, or copy data from another database into DynamoDB. In + * order to improve performance with these large-scale operations, + * BatchWriteItem does not behave in the same way as individual + * PutItem and DeleteItem calls would. For example, you + * cannot specify conditions on individual put and delete requests, and + * BatchWriteItem does not return deleted items in the response.

+ *

If you use a programming language that supports concurrency, you can use threads to + * write items in parallel. Your application must include the necessary logic to manage the + * threads. With languages that don't support threading, you must update or delete the + * specified items one at a time. In both situations, BatchWriteItem performs + * the specified put and delete operations in parallel, giving you the power of the thread + * pool approach without having to introduce complexity into your application.

+ *

Parallel processing reduces latency, but each specified put and delete request + * consumes the same number of write capacity units whether it is processed in parallel or + * not. Delete operations on nonexistent items consume one write capacity unit.

+ *

If one or more of the following is true, DynamoDB rejects the entire batch write + * operation:

+ *
    + *
  • + *

    One or more tables specified in the BatchWriteItem request does + * not exist.

    + *
  • + *
  • + *

    Primary key attributes specified on an item in the request do not match those + * in the corresponding table's primary key schema.

    + *
  • + *
  • + *

    You try to perform multiple operations on the same item in the same + * BatchWriteItem request. For example, you cannot put and delete + * the same item in the same BatchWriteItem request.

    + *
  • + *
  • + *

    Your request contains at least two items with identical hash and range keys + * (which essentially is two put operations).

    + *
  • + *
  • + *

    There are more than 25 requests in the batch.

    + *
  • + *
  • + *

    Any individual item in a batch exceeds 400 KB.

    + *
  • + *
  • + *

    The total request size exceeds 16 MB.

    + *
  • + *
  • + *

    Any individual items with keys exceeding the key length limits. For a + * partition key, the limit is 2048 bytes and for a sort key, the limit is 1024 + * bytes.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchWriteItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchWriteItemInput + * RequestItems: { // BatchWriteItemRequestMap // required + * "": [ // WriteRequests + * { // WriteRequest + * PutRequest: { // PutRequest + * Item: { // PutItemInputAttributeMap // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * }, + * DeleteRequest: { // DeleteRequest + * Key: { // Key // required + * "": "", + * }, + * }, + * }, + * ], + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * }; + * const command = new BatchWriteItemCommand(input); + * const response = await client.send(command); + * // { // BatchWriteItemOutput + * // UnprocessedItems: { // BatchWriteItemRequestMap + * // "": [ // WriteRequests + * // { // WriteRequest + * // PutRequest: { // PutRequest + * // Item: { // PutItemInputAttributeMap // required + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // DeleteRequest: { // DeleteRequest + * // Key: { // Key // required + * // "": "", + * // }, + * // }, + * // }, + * // ], + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetricsPerTable + * // "": [ // ItemCollectionMetricsMultiple + * // { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // ], + * // }, + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchWriteItemCommandInput - {@link BatchWriteItemCommandInput} + * @returns {@link BatchWriteItemCommandOutput} + * @see {@link BatchWriteItemCommandInput} for command's `input` shape. + * @see {@link BatchWriteItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To add multiple items to a table + * ```javascript + * // This example adds three new items to the Music table using a batch of three PutItem requests. + * const input = { + * RequestItems: { + * Music: [ + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * } + * } + * }, + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Songs About Life" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * } + * } + * }, + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Blue Sky Blues" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * } + * } + * ] + * } + * }; + * const command = new BatchWriteItemCommand(input); + * const response = await client.send(command); + * /* response is + * { /* empty *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class BatchWriteItemCommand extends BatchWriteItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchWriteItemInput; + output: BatchWriteItemOutput; + }; + sdk: { + input: BatchWriteItemCommandInput; + output: BatchWriteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts new file mode 100644 index 0000000..32ec75a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts @@ -0,0 +1,146 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateBackupInput, CreateBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateBackupCommand}. + */ +export interface CreateBackupCommandInput extends CreateBackupInput { +} +/** + * @public + * + * The output of {@link CreateBackupCommand}. + */ +export interface CreateBackupCommandOutput extends CreateBackupOutput, __MetadataBearer { +} +declare const CreateBackupCommand_base: { + new (input: CreateBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a backup for an existing table.

+ *

Each time you create an on-demand backup, the entire table data is backed up. There + * is no limit to the number of on-demand backups that can be taken.

+ *

When you create an on-demand backup, a time marker of the request is cataloged, and + * the backup is created asynchronously, by applying all changes until the time of the + * request to the last full table snapshot. Backup requests are processed instantaneously + * and become available for restore within minutes.

+ *

You can call CreateBackup at a maximum rate of 50 times per + * second.

+ *

All backups in DynamoDB work without consuming any provisioned throughput on the + * table.

+ *

If you submit a backup request on 2018-12-14 at 14:25:00, the backup is guaranteed to + * contain all data committed to the table up to 14:24:00, and data committed after + * 14:26:00 will not be. The backup might contain data modifications made between 14:24:00 + * and 14:26:00. On-demand backup does not support causal consistency.

+ *

Along with data, the following are also included on the backups:

+ *
    + *
  • + *

    Global secondary indexes (GSIs)

    + *
  • + *
  • + *

    Local secondary indexes (LSIs)

    + *
  • + *
  • + *

    Streams

    + *
  • + *
  • + *

    Provisioned read and write capacity

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateBackupInput + * TableName: "STRING_VALUE", // required + * BackupName: "STRING_VALUE", // required + * }; + * const command = new CreateBackupCommand(input); + * const response = await client.send(command); + * // { // CreateBackupOutput + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // }; + * + * ``` + * + * @param CreateBackupCommandInput - {@link CreateBackupCommandInput} + * @returns {@link CreateBackupCommandOutput} + * @see {@link CreateBackupCommandInput} for command's `input` shape. + * @see {@link CreateBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link ContinuousBackupsUnavailableException} (client fault) + *

Backups have not yet been enabled for this table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateBackupCommand extends CreateBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateBackupInput; + output: CreateBackupOutput; + }; + sdk: { + input: CreateBackupCommandInput; + output: CreateBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts new file mode 100644 index 0000000..551fcd3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts @@ -0,0 +1,205 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateGlobalTableInput, CreateGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateGlobalTableCommand}. + */ +export interface CreateGlobalTableCommandInput extends CreateGlobalTableInput { +} +/** + * @public + * + * The output of {@link CreateGlobalTableCommand}. + */ +export interface CreateGlobalTableCommandOutput extends CreateGlobalTableOutput, __MetadataBearer { +} +declare const CreateGlobalTableCommand_base: { + new (input: CreateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a global table from an existing table. A global table creates a replication + * relationship between two or more DynamoDB tables with the same table name in the + * provided Regions.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ *

If you want to add a new replica table to a global table, each of the following + * conditions must be true:

+ *
    + *
  • + *

    The table must have the same primary key as all of the other replicas.

    + *
  • + *
  • + *

    The table must have the same name as all of the other replicas.

    + *
  • + *
  • + *

    The table must have DynamoDB Streams enabled, with the stream containing both + * the new and the old images of the item.

    + *
  • + *
  • + *

    None of the replica tables in the global table can contain any data.

    + *
  • + *
+ *

If global secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The global secondary indexes must have the same name.

    + *
  • + *
  • + *

    The global secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
+ *

If local secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The local secondary indexes must have the same name.

    + *
  • + *
  • + *

    The local secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
+ * + *

Write capacity settings should be set consistently across your replica tables and + * secondary indexes. DynamoDB strongly recommends enabling auto scaling to manage the + * write capacity settings for all of your global tables replicas and indexes.

+ *

If you prefer to manage write capacity settings manually, you should provision + * equal replicated write capacity units to your replica tables. You should also + * provision equal replicated write capacity units to matching secondary indexes across + * your global table.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * ReplicationGroup: [ // ReplicaList // required + * { // Replica + * RegionName: "STRING_VALUE", + * }, + * ], + * }; + * const command = new CreateGlobalTableCommand(input); + * const response = await client.send(command); + * // { // CreateGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param CreateGlobalTableCommandInput - {@link CreateGlobalTableCommandInput} + * @returns {@link CreateGlobalTableCommandOutput} + * @see {@link CreateGlobalTableCommandInput} for command's `input` shape. + * @see {@link CreateGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableAlreadyExistsException} (client fault) + *

The specified global table already exists.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateGlobalTableCommand extends CreateGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateGlobalTableInput; + output: CreateGlobalTableOutput; + }; + sdk: { + input: CreateGlobalTableCommandInput; + output: CreateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts new file mode 100644 index 0000000..0c21eec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts @@ -0,0 +1,378 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateTableInput, CreateTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateTableCommand}. + */ +export interface CreateTableCommandInput extends CreateTableInput { +} +/** + * @public + * + * The output of {@link CreateTableCommand}. + */ +export interface CreateTableCommandOutput extends CreateTableOutput, __MetadataBearer { +} +declare const CreateTableCommand_base: { + new (input: CreateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The CreateTable operation adds a new table to your account. In an Amazon Web Services account, table names must be unique within each Region. That is, you can + * have two tables with same name if you create the tables in different Regions.

+ *

+ * CreateTable is an asynchronous operation. Upon receiving a + * CreateTable request, DynamoDB immediately returns a response with a + * TableStatus of CREATING. After the table is created, + * DynamoDB sets the TableStatus to ACTIVE. You can perform read + * and write operations only on an ACTIVE table.

+ *

You can optionally define secondary indexes on the new table, as part of the + * CreateTable operation. If you want to create multiple tables with + * secondary indexes on them, you must create the tables sequentially. Only one table with + * secondary indexes can be in the CREATING state at any given time.

+ *

You can use the DescribeTable action to check the table status.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateTableInput + * AttributeDefinitions: [ // AttributeDefinitions // required + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * TableName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * LocalSecondaryIndexes: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * StreamSpecification: { // StreamSpecification + * StreamEnabled: true || false, // required + * StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * Tags: [ // TagList + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * DeletionProtectionEnabled: true || false, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * ResourcePolicy: "STRING_VALUE", + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * }; + * const command = new CreateTableCommand(input); + * const response = await client.send(command); + * // { // CreateTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param CreateTableCommandInput - {@link CreateTableCommandInput} + * @returns {@link CreateTableCommandOutput} + * @see {@link CreateTableCommandInput} for command's `input` shape. + * @see {@link CreateTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateTableCommand extends CreateTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateTableInput; + output: CreateTableOutput; + }; + sdk: { + input: CreateTableCommandInput; + output: CreateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts new file mode 100644 index 0000000..d3da508 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts @@ -0,0 +1,193 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteBackupInput, DeleteBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBackupCommand}. + */ +export interface DeleteBackupCommandInput extends DeleteBackupInput { +} +/** + * @public + * + * The output of {@link DeleteBackupCommand}. + */ +export interface DeleteBackupCommandOutput extends DeleteBackupOutput, __MetadataBearer { +} +declare const DeleteBackupCommand_base: { + new (input: DeleteBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes an existing backup of a table.

+ *

You can call DeleteBackup at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteBackupInput + * BackupArn: "STRING_VALUE", // required + * }; + * const command = new DeleteBackupCommand(input); + * const response = await client.send(command); + * // { // DeleteBackupOutput + * // BackupDescription: { // BackupDescription + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // SourceTableDetails: { // SourceTableDetails + * // TableName: "STRING_VALUE", // required + * // TableId: "STRING_VALUE", // required + * // TableArn: "STRING_VALUE", + * // TableSizeBytes: Number("long"), + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableCreationDateTime: new Date("TIMESTAMP"), // required + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // ItemCount: Number("long"), + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // }, + * // SourceTableFeatureDetails: { // SourceTableFeatureDetails + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexes + * // { // LocalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexes + * // { // GlobalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // }, + * // ], + * // StreamDescription: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param DeleteBackupCommandInput - {@link DeleteBackupCommandInput} + * @returns {@link DeleteBackupCommandOutput} + * @see {@link DeleteBackupCommandInput} for command's `input` shape. + * @see {@link DeleteBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DeleteBackupCommand extends DeleteBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBackupInput; + output: DeleteBackupOutput; + }; + sdk: { + input: DeleteBackupCommandInput; + output: DeleteBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts new file mode 100644 index 0000000..619ecf2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts @@ -0,0 +1,286 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteItemInput, DeleteItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteItemCommand}. + */ +export interface DeleteItemCommandInput extends DeleteItemInput { +} +/** + * @public + * + * The output of {@link DeleteItemCommand}. + */ +export interface DeleteItemCommandOutput extends DeleteItemOutput, __MetadataBearer { +} +declare const DeleteItemCommand_base: { + new (input: DeleteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes a single item in a table by primary key. You can perform a conditional delete + * operation that deletes the item if it exists, or if it has an expected attribute + * value.

+ *

In addition to deleting an item, you can also return the item's attribute values in + * the same operation, using the ReturnValues parameter.

+ *

Unless you specify conditions, the DeleteItem is an idempotent operation; + * running it multiple times on the same item or attribute does not + * result in an error response.

+ *

Conditional deletes are useful for deleting items only if specific conditions are met. + * If those conditions are met, DynamoDB performs the delete. Otherwise, the item is not + * deleted.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new DeleteItemCommand(input); + * const response = await client.send(command); + * // { // DeleteItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param DeleteItemCommandInput - {@link DeleteItemCommandInput} + * @returns {@link DeleteItemCommandOutput} + * @see {@link DeleteItemCommandInput} for command's `input` shape. + * @see {@link DeleteItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To delete an item + * ```javascript + * // This example deletes an item from the Music table. + * const input = { + * Key: { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * }, + * TableName: "Music" + * }; + * const command = new DeleteItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { + * CapacityUnits: 1, + * TableName: "Music" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteItemCommand extends DeleteItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteItemInput; + output: DeleteItemOutput; + }; + sdk: { + input: DeleteItemCommandInput; + output: DeleteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts new file mode 100644 index 0000000..da92cf9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts @@ -0,0 +1,138 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteResourcePolicyInput, DeleteResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteResourcePolicyCommand}. + */ +export interface DeleteResourcePolicyCommandInput extends DeleteResourcePolicyInput { +} +/** + * @public + * + * The output of {@link DeleteResourcePolicyCommand}. + */ +export interface DeleteResourcePolicyCommandOutput extends DeleteResourcePolicyOutput, __MetadataBearer { +} +declare const DeleteResourcePolicyCommand_base: { + new (input: DeleteResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes the resource-based policy attached to the resource, which can be a table or + * stream.

+ *

+ * DeleteResourcePolicy is an idempotent operation; running it multiple + * times on the same resource doesn't result in an error response, + * unless you specify an ExpectedRevisionId, which will then return a + * PolicyNotFoundException.

+ * + *

To make sure that you don't inadvertently lock yourself out of your own resources, + * the root principal in your Amazon Web Services account can perform + * DeleteResourcePolicy requests, even if your resource-based policy + * explicitly denies the root principal's access.

+ *
+ * + *

+ * DeleteResourcePolicy is an asynchronous operation. If you issue a + * GetResourcePolicy request immediately after running the + * DeleteResourcePolicy request, DynamoDB might still return + * the deleted policy. This is because the policy for your resource might not have been + * deleted yet. Wait for a few seconds, and then try the GetResourcePolicy + * request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * ExpectedRevisionId: "STRING_VALUE", + * }; + * const command = new DeleteResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // DeleteResourcePolicyOutput + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param DeleteResourcePolicyCommandInput - {@link DeleteResourcePolicyCommandInput} + * @returns {@link DeleteResourcePolicyCommandOutput} + * @see {@link DeleteResourcePolicyCommandInput} for command's `input` shape. + * @see {@link DeleteResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DeleteResourcePolicyCommand extends DeleteResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteResourcePolicyInput; + output: DeleteResourcePolicyOutput; + }; + sdk: { + input: DeleteResourcePolicyCommandInput; + output: DeleteResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts new file mode 100644 index 0000000..f756cd0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts @@ -0,0 +1,328 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteTableInput, DeleteTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteTableCommand}. + */ +export interface DeleteTableCommandInput extends DeleteTableInput { +} +/** + * @public + * + * The output of {@link DeleteTableCommand}. + */ +export interface DeleteTableCommandOutput extends DeleteTableOutput, __MetadataBearer { +} +declare const DeleteTableCommand_base: { + new (input: DeleteTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The DeleteTable operation deletes a table and all of its items. After a + * DeleteTable request, the specified table is in the + * DELETING state until DynamoDB completes the deletion. If the table is + * in the ACTIVE state, you can delete it. If a table is in + * CREATING or UPDATING states, then DynamoDB returns a + * ResourceInUseException. If the specified table does not exist, DynamoDB + * returns a ResourceNotFoundException. If table is already in the + * DELETING state, no error is returned.

+ * + *

For global tables, this operation only applies to + * global tables using Version 2019.11.21 (Current version).

+ *
+ * + *

DynamoDB might continue to accept data read and write operations, such as + * GetItem and PutItem, on a table in the + * DELETING state until the table deletion is complete. For the full + * list of table states, see TableStatus.

+ *
+ *

When you delete a table, any indexes on that table are also deleted.

+ *

If you have DynamoDB Streams enabled on the table, then the corresponding stream on + * that table goes into the DISABLED state, and the stream is automatically + * deleted after 24 hours.

+ *

Use the DescribeTable action to check the status of the table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteTableInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DeleteTableCommand(input); + * const response = await client.send(command); + * // { // DeleteTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param DeleteTableCommandInput - {@link DeleteTableCommandInput} + * @returns {@link DeleteTableCommandOutput} + * @see {@link DeleteTableCommandInput} for command's `input` shape. + * @see {@link DeleteTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To delete a table + * ```javascript + * // This example deletes the Music table. + * const input = { + * TableName: "Music" + * }; + * const command = new DeleteTableCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TableDescription: { + * ItemCount: 0, + * ProvisionedThroughput: { + * NumberOfDecreasesToday: 1, + * ReadCapacityUnits: 5, + * WriteCapacityUnits: 5 + * }, + * TableName: "Music", + * TableSizeBytes: 0, + * TableStatus: "DELETING" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteTableCommand extends DeleteTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteTableInput; + output: DeleteTableOutput; + }; + sdk: { + input: DeleteTableCommandInput; + output: DeleteTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts new file mode 100644 index 0000000..8568846 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts @@ -0,0 +1,173 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeBackupInput, DescribeBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeBackupCommand}. + */ +export interface DescribeBackupCommandInput extends DescribeBackupInput { +} +/** + * @public + * + * The output of {@link DescribeBackupCommand}. + */ +export interface DescribeBackupCommandOutput extends DescribeBackupOutput, __MetadataBearer { +} +declare const DescribeBackupCommand_base: { + new (input: DescribeBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes an existing backup of a table.

+ *

You can call DescribeBackup at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeBackupInput + * BackupArn: "STRING_VALUE", // required + * }; + * const command = new DescribeBackupCommand(input); + * const response = await client.send(command); + * // { // DescribeBackupOutput + * // BackupDescription: { // BackupDescription + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // SourceTableDetails: { // SourceTableDetails + * // TableName: "STRING_VALUE", // required + * // TableId: "STRING_VALUE", // required + * // TableArn: "STRING_VALUE", + * // TableSizeBytes: Number("long"), + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableCreationDateTime: new Date("TIMESTAMP"), // required + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // ItemCount: Number("long"), + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // }, + * // SourceTableFeatureDetails: { // SourceTableFeatureDetails + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexes + * // { // LocalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexes + * // { // GlobalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // }, + * // ], + * // StreamDescription: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeBackupCommandInput - {@link DescribeBackupCommandInput} + * @returns {@link DescribeBackupCommandOutput} + * @see {@link DescribeBackupCommandInput} for command's `input` shape. + * @see {@link DescribeBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeBackupCommand extends DescribeBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeBackupInput; + output: DescribeBackupOutput; + }; + sdk: { + input: DescribeBackupCommandInput; + output: DescribeBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..301ba60 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts @@ -0,0 +1,101 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeContinuousBackupsInput, DescribeContinuousBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeContinuousBackupsCommand}. + */ +export interface DescribeContinuousBackupsCommandInput extends DescribeContinuousBackupsInput { +} +/** + * @public + * + * The output of {@link DescribeContinuousBackupsCommand}. + */ +export interface DescribeContinuousBackupsCommandOutput extends DescribeContinuousBackupsOutput, __MetadataBearer { +} +declare const DescribeContinuousBackupsCommand_base: { + new (input: DescribeContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Checks the status of continuous backups and point in time recovery on the specified + * table. Continuous backups are ENABLED on all tables at table creation. If + * point in time recovery is enabled, PointInTimeRecoveryStatus will be set to + * ENABLED.

+ *

After continuous backups and point in time recovery are enabled, you can restore to + * any point in time within EarliestRestorableDateTime and + * LatestRestorableDateTime.

+ *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + * You can restore your table to any point in time in the last 35 days. You can set the + * recovery period to any value between 1 and 35 days.

+ *

You can call DescribeContinuousBackups at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeContinuousBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeContinuousBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeContinuousBackupsInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeContinuousBackupsCommand(input); + * const response = await client.send(command); + * // { // DescribeContinuousBackupsOutput + * // ContinuousBackupsDescription: { // ContinuousBackupsDescription + * // ContinuousBackupsStatus: "ENABLED" || "DISABLED", // required + * // PointInTimeRecoveryDescription: { // PointInTimeRecoveryDescription + * // PointInTimeRecoveryStatus: "ENABLED" || "DISABLED", + * // RecoveryPeriodInDays: Number("int"), + * // EarliestRestorableDateTime: new Date("TIMESTAMP"), + * // LatestRestorableDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeContinuousBackupsCommandInput - {@link DescribeContinuousBackupsCommandInput} + * @returns {@link DescribeContinuousBackupsCommandOutput} + * @see {@link DescribeContinuousBackupsCommandInput} for command's `input` shape. + * @see {@link DescribeContinuousBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeContinuousBackupsCommand extends DescribeContinuousBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeContinuousBackupsInput; + output: DescribeContinuousBackupsOutput; + }; + sdk: { + input: DescribeContinuousBackupsCommandInput; + output: DescribeContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts new file mode 100644 index 0000000..99eb5e0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts @@ -0,0 +1,91 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeContributorInsightsInput, DescribeContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeContributorInsightsCommand}. + */ +export interface DescribeContributorInsightsCommandInput extends DescribeContributorInsightsInput { +} +/** + * @public + * + * The output of {@link DescribeContributorInsightsCommand}. + */ +export interface DescribeContributorInsightsCommandOutput extends DescribeContributorInsightsOutput, __MetadataBearer { +} +declare const DescribeContributorInsightsCommand_base: { + new (input: DescribeContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about contributor insights for a given table or global secondary + * index.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeContributorInsightsInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * }; + * const command = new DescribeContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // DescribeContributorInsightsOutput + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsRuleList: [ // ContributorInsightsRuleList + * // "STRING_VALUE", + * // ], + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // FailureException: { // FailureException + * // ExceptionName: "STRING_VALUE", + * // ExceptionDescription: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeContributorInsightsCommandInput - {@link DescribeContributorInsightsCommandInput} + * @returns {@link DescribeContributorInsightsCommandOutput} + * @see {@link DescribeContributorInsightsCommandInput} for command's `input` shape. + * @see {@link DescribeContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeContributorInsightsCommand extends DescribeContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeContributorInsightsInput; + output: DescribeContributorInsightsOutput; + }; + sdk: { + input: DescribeContributorInsightsCommandInput; + output: DescribeContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts new file mode 100644 index 0000000..57fe82d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts @@ -0,0 +1,76 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeEndpointsRequest, DescribeEndpointsResponse } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeEndpointsCommand}. + */ +export interface DescribeEndpointsCommandInput extends DescribeEndpointsRequest { +} +/** + * @public + * + * The output of {@link DescribeEndpointsCommand}. + */ +export interface DescribeEndpointsCommandOutput extends DescribeEndpointsResponse, __MetadataBearer { +} +declare const DescribeEndpointsCommand_base: { + new (input: DescribeEndpointsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [DescribeEndpointsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the regional endpoint information. For more information on policy permissions, + * please see Internetwork traffic privacy.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeEndpointsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeEndpointsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = {}; + * const command = new DescribeEndpointsCommand(input); + * const response = await client.send(command); + * // { // DescribeEndpointsResponse + * // Endpoints: [ // Endpoints // required + * // { // Endpoint + * // Address: "STRING_VALUE", // required + * // CachePeriodInMinutes: Number("long"), // required + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeEndpointsCommandInput - {@link DescribeEndpointsCommandInput} + * @returns {@link DescribeEndpointsCommandOutput} + * @see {@link DescribeEndpointsCommandInput} for command's `input` shape. + * @see {@link DescribeEndpointsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeEndpointsCommand extends DescribeEndpointsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: {}; + output: DescribeEndpointsResponse; + }; + sdk: { + input: DescribeEndpointsCommandInput; + output: DescribeEndpointsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts new file mode 100644 index 0000000..400d9ab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts @@ -0,0 +1,120 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeExportInput, DescribeExportOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeExportCommand}. + */ +export interface DescribeExportCommandInput extends DescribeExportInput { +} +/** + * @public + * + * The output of {@link DescribeExportCommand}. + */ +export interface DescribeExportCommandOutput extends DescribeExportOutput, __MetadataBearer { +} +declare const DescribeExportCommand_base: { + new (input: DescribeExportCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeExportCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes an existing table export.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeExportCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeExportCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeExportInput + * ExportArn: "STRING_VALUE", // required + * }; + * const command = new DescribeExportCommand(input); + * const response = await client.send(command); + * // { // DescribeExportOutput + * // ExportDescription: { // ExportDescription + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ExportManifest: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ExportTime: new Date("TIMESTAMP"), + * // ClientToken: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", + * // S3BucketOwner: "STRING_VALUE", + * // S3Prefix: "STRING_VALUE", + * // S3SseAlgorithm: "AES256" || "KMS", + * // S3SseKmsKeyId: "STRING_VALUE", + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // ExportFormat: "DYNAMODB_JSON" || "ION", + * // BilledSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // IncrementalExportSpecification: { // IncrementalExportSpecification + * // ExportFromTime: new Date("TIMESTAMP"), + * // ExportToTime: new Date("TIMESTAMP"), + * // ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeExportCommandInput - {@link DescribeExportCommandInput} + * @returns {@link DescribeExportCommandOutput} + * @see {@link DescribeExportCommandInput} for command's `input` shape. + * @see {@link DescribeExportCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ExportNotFoundException} (client fault) + *

The specified export was not found.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeExportCommand extends DescribeExportCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeExportInput; + output: DescribeExportOutput; + }; + sdk: { + input: DescribeExportCommandInput; + output: DescribeExportCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts new file mode 100644 index 0000000..79c9f59 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts @@ -0,0 +1,130 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeGlobalTableInput, DescribeGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeGlobalTableCommand}. + */ +export interface DescribeGlobalTableCommandInput extends DescribeGlobalTableInput { +} +/** + * @public + * + * The output of {@link DescribeGlobalTableCommand}. + */ +export interface DescribeGlobalTableCommandOutput extends DescribeGlobalTableOutput, __MetadataBearer { +} +declare const DescribeGlobalTableCommand_base: { + new (input: DescribeGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the specified global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * }; + * const command = new DescribeGlobalTableCommand(input); + * const response = await client.send(command); + * // { // DescribeGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeGlobalTableCommandInput - {@link DescribeGlobalTableCommandInput} + * @returns {@link DescribeGlobalTableCommandOutput} + * @see {@link DescribeGlobalTableCommandInput} for command's `input` shape. + * @see {@link DescribeGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeGlobalTableCommand extends DescribeGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeGlobalTableInput; + output: DescribeGlobalTableOutput; + }; + sdk: { + input: DescribeGlobalTableCommandInput; + output: DescribeGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..aadabee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts @@ -0,0 +1,176 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeGlobalTableSettingsInput, DescribeGlobalTableSettingsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeGlobalTableSettingsCommand}. + */ +export interface DescribeGlobalTableSettingsCommandInput extends DescribeGlobalTableSettingsInput { +} +/** + * @public + * + * The output of {@link DescribeGlobalTableSettingsCommand}. + */ +export interface DescribeGlobalTableSettingsCommandOutput extends DescribeGlobalTableSettingsOutput, __MetadataBearer { +} +declare const DescribeGlobalTableSettingsCommand_base: { + new (input: DescribeGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes Region-specific settings for a global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeGlobalTableSettingsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeGlobalTableSettingsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeGlobalTableSettingsInput + * GlobalTableName: "STRING_VALUE", // required + * }; + * const command = new DescribeGlobalTableSettingsCommand(input); + * const response = await client.send(command); + * // { // DescribeGlobalTableSettingsOutput + * // GlobalTableName: "STRING_VALUE", + * // ReplicaSettings: [ // ReplicaSettingsDescriptionList + * // { // ReplicaSettingsDescription + * // RegionName: "STRING_VALUE", // required + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaBillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // ReplicaProvisionedReadCapacityUnits: Number("long"), + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityUnits: Number("long"), + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaGlobalSecondaryIndexSettings: [ // ReplicaGlobalSecondaryIndexSettingsDescriptionList + * // { // ReplicaGlobalSecondaryIndexSettingsDescription + * // IndexName: "STRING_VALUE", // required + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityUnits: Number("long"), + * // ProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityUnits: Number("long"), + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeGlobalTableSettingsCommandInput - {@link DescribeGlobalTableSettingsCommandInput} + * @returns {@link DescribeGlobalTableSettingsCommandOutput} + * @see {@link DescribeGlobalTableSettingsCommandInput} for command's `input` shape. + * @see {@link DescribeGlobalTableSettingsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeGlobalTableSettingsCommand extends DescribeGlobalTableSettingsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeGlobalTableSettingsInput; + output: DescribeGlobalTableSettingsOutput; + }; + sdk: { + input: DescribeGlobalTableSettingsCommandInput; + output: DescribeGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts new file mode 100644 index 0000000..f440258 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts @@ -0,0 +1,165 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeImportInput, DescribeImportOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeImportCommand}. + */ +export interface DescribeImportCommandInput extends DescribeImportInput { +} +/** + * @public + * + * The output of {@link DescribeImportCommand}. + */ +export interface DescribeImportCommandOutput extends DescribeImportOutput, __MetadataBearer { +} +declare const DescribeImportCommand_base: { + new (input: DescribeImportCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeImportCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Represents the properties of the import.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeImportCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeImportCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeImportInput + * ImportArn: "STRING_VALUE", // required + * }; + * const command = new DescribeImportCommand(input); + * const response = await client.send(command); + * // { // DescribeImportOutput + * // ImportTableDescription: { // ImportTableDescription + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ClientToken: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // ErrorCount: Number("long"), + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // InputFormatOptions: { // InputFormatOptions + * // Csv: { // CsvOptions + * // Delimiter: "STRING_VALUE", + * // HeaderList: [ // CsvHeaderList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * // TableCreationParameters: { // TableCreationParameters + * // TableName: "STRING_VALUE", // required + * // AttributeDefinitions: [ // AttributeDefinitions // required + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // SSESpecification: { // SSESpecification + * // Enabled: true || false, + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyId: "STRING_VALUE", + * // }, + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * // { // GlobalSecondaryIndex + * // IndexName: "STRING_VALUE", // required + * // KeySchema: [ // required + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // WarmThroughput + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // }, + * // }, + * // ], + * // }, + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ProcessedSizeBytes: Number("long"), + * // ProcessedItemCount: Number("long"), + * // ImportedItemCount: Number("long"), + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeImportCommandInput - {@link DescribeImportCommandInput} + * @returns {@link DescribeImportCommandOutput} + * @see {@link DescribeImportCommandInput} for command's `input` shape. + * @see {@link DescribeImportCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ImportNotFoundException} (client fault) + *

+ * The specified import was not found. + *

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeImportCommand extends DescribeImportCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeImportInput; + output: DescribeImportOutput; + }; + sdk: { + input: DescribeImportCommandInput; + output: DescribeImportCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..5af13ee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeKinesisStreamingDestinationInput, DescribeKinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeKinesisStreamingDestinationCommand}. + */ +export interface DescribeKinesisStreamingDestinationCommandInput extends DescribeKinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link DescribeKinesisStreamingDestinationCommand}. + */ +export interface DescribeKinesisStreamingDestinationCommandOutput extends DescribeKinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const DescribeKinesisStreamingDestinationCommand_base: { + new (input: DescribeKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the status of Kinesis streaming.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeKinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // DescribeKinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // KinesisDataStreamDestinations: [ // KinesisDataStreamDestinations + * // { // KinesisDataStreamDestination + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // DestinationStatusDescription: "STRING_VALUE", + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeKinesisStreamingDestinationCommandInput - {@link DescribeKinesisStreamingDestinationCommandInput} + * @returns {@link DescribeKinesisStreamingDestinationCommandOutput} + * @see {@link DescribeKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link DescribeKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeKinesisStreamingDestinationCommand extends DescribeKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeKinesisStreamingDestinationInput; + output: DescribeKinesisStreamingDestinationOutput; + }; + sdk: { + input: DescribeKinesisStreamingDestinationCommandInput; + output: DescribeKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts new file mode 100644 index 0000000..bdf6316 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts @@ -0,0 +1,163 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeLimitsInput, DescribeLimitsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeLimitsCommand}. + */ +export interface DescribeLimitsCommandInput extends DescribeLimitsInput { +} +/** + * @public + * + * The output of {@link DescribeLimitsCommand}. + */ +export interface DescribeLimitsCommandOutput extends DescribeLimitsOutput, __MetadataBearer { +} +declare const DescribeLimitsCommand_base: { + new (input: DescribeLimitsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [DescribeLimitsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the current provisioned-capacity quotas for your Amazon Web Services account in + * a Region, both for the Region as a whole and for any one DynamoDB table that you create + * there.

+ *

When you establish an Amazon Web Services account, the account has initial quotas on + * the maximum read capacity units and write capacity units that you can provision across + * all of your DynamoDB tables in a given Region. Also, there are per-table + * quotas that apply when you create a table there. For more information, see Service, + * Account, and Table Quotas page in the Amazon DynamoDB + * Developer Guide.

+ *

Although you can increase these quotas by filing a case at Amazon Web Services Support Center, obtaining the + * increase is not instantaneous. The DescribeLimits action lets you write + * code to compare the capacity you are currently using to those quotas imposed by your + * account so that you have enough time to apply for an increase before you hit a + * quota.

+ *

For example, you could use one of the Amazon Web Services SDKs to do the + * following:

+ *
    + *
  1. + *

    Call DescribeLimits for a particular Region to obtain your + * current account quotas on provisioned capacity there.

    + *
  2. + *
  3. + *

    Create a variable to hold the aggregate read capacity units provisioned for + * all your tables in that Region, and one to hold the aggregate write capacity + * units. Zero them both.

    + *
  4. + *
  5. + *

    Call ListTables to obtain a list of all your DynamoDB + * tables.

    + *
  6. + *
  7. + *

    For each table name listed by ListTables, do the + * following:

    + *
      + *
    • + *

      Call DescribeTable with the table name.

      + *
    • + *
    • + *

      Use the data returned by DescribeTable to add the read + * capacity units and write capacity units provisioned for the table itself + * to your variables.

      + *
    • + *
    • + *

      If the table has one or more global secondary indexes (GSIs), loop + * over these GSIs and add their provisioned capacity values to your + * variables as well.

      + *
    • + *
    + *
  8. + *
  9. + *

    Report the account quotas for that Region returned by + * DescribeLimits, along with the total current provisioned + * capacity levels you have calculated.

    + *
  10. + *
+ *

This will let you see whether you are getting close to your account-level + * quotas.

+ *

The per-table quotas apply only when you are creating a new table. They restrict the + * sum of the provisioned capacity of the new table itself and all its global secondary + * indexes.

+ *

For existing tables and their GSIs, DynamoDB doesn't let you increase provisioned + * capacity extremely rapidly, but the only quota that applies is that the aggregate + * provisioned capacity over all your tables and GSIs cannot exceed either of the + * per-account quotas.

+ * + *

+ * DescribeLimits should only be called periodically. You can expect + * throttling errors if you call it more than once in a minute.

+ *
+ *

The DescribeLimits Request element has no content.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeLimitsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeLimitsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = {}; + * const command = new DescribeLimitsCommand(input); + * const response = await client.send(command); + * // { // DescribeLimitsOutput + * // AccountMaxReadCapacityUnits: Number("long"), + * // AccountMaxWriteCapacityUnits: Number("long"), + * // TableMaxReadCapacityUnits: Number("long"), + * // TableMaxWriteCapacityUnits: Number("long"), + * // }; + * + * ``` + * + * @param DescribeLimitsCommandInput - {@link DescribeLimitsCommandInput} + * @returns {@link DescribeLimitsCommandOutput} + * @see {@link DescribeLimitsCommandInput} for command's `input` shape. + * @see {@link DescribeLimitsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To determine capacity limits per table and account, in the current AWS region + * ```javascript + * // The following example returns the maximum read and write capacity units per table, and for the AWS account, in the current AWS region. + * const input = { /* empty *\/ }; + * const command = new DescribeLimitsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AccountMaxReadCapacityUnits: 20000, + * AccountMaxWriteCapacityUnits: 20000, + * TableMaxReadCapacityUnits: 10000, + * TableMaxWriteCapacityUnits: 10000 + * } + * *\/ + * ``` + * + * @public + */ +export declare class DescribeLimitsCommand extends DescribeLimitsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: {}; + output: DescribeLimitsOutput; + }; + sdk: { + input: DescribeLimitsCommandInput; + output: DescribeLimitsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts new file mode 100644 index 0000000..b74ef35 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts @@ -0,0 +1,263 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTableInput, DescribeTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTableCommand}. + */ +export interface DescribeTableCommandInput extends DescribeTableInput { +} +/** + * @public + * + * The output of {@link DescribeTableCommand}. + */ +export interface DescribeTableCommandOutput extends DescribeTableOutput, __MetadataBearer { +} +declare const DescribeTableCommand_base: { + new (input: DescribeTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the table, including the current status of the table, when + * it was created, the primary key schema, and any indexes on the table.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * + *

If you issue a DescribeTable request immediately after a + * CreateTable request, DynamoDB might return a + * ResourceNotFoundException. This is because + * DescribeTable uses an eventually consistent query, and the metadata + * for your table might not be available at that moment. Wait for a few seconds, and + * then try the DescribeTable request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTableInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTableCommand(input); + * const response = await client.send(command); + * // { // DescribeTableOutput + * // Table: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param DescribeTableCommandInput - {@link DescribeTableCommandInput} + * @returns {@link DescribeTableCommandOutput} + * @see {@link DescribeTableCommandInput} for command's `input` shape. + * @see {@link DescribeTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTableCommand extends DescribeTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTableInput; + output: DescribeTableOutput; + }; + sdk: { + input: DescribeTableCommandInput; + output: DescribeTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..6875231 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,166 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTableReplicaAutoScalingInput, DescribeTableReplicaAutoScalingOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTableReplicaAutoScalingCommand}. + */ +export interface DescribeTableReplicaAutoScalingCommandInput extends DescribeTableReplicaAutoScalingInput { +} +/** + * @public + * + * The output of {@link DescribeTableReplicaAutoScalingCommand}. + */ +export interface DescribeTableReplicaAutoScalingCommandOutput extends DescribeTableReplicaAutoScalingOutput, __MetadataBearer { +} +declare const DescribeTableReplicaAutoScalingCommand_base: { + new (input: DescribeTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes auto scaling settings across replicas of the global table at once.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTableReplicaAutoScalingCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTableReplicaAutoScalingCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTableReplicaAutoScalingInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTableReplicaAutoScalingCommand(input); + * const response = await client.send(command); + * // { // DescribeTableReplicaAutoScalingOutput + * // TableAutoScalingDescription: { // TableAutoScalingDescription + * // TableName: "STRING_VALUE", + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // Replicas: [ // ReplicaAutoScalingDescriptionList + * // { // ReplicaAutoScalingDescription + * // RegionName: "STRING_VALUE", + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexAutoScalingDescriptionList + * // { // ReplicaGlobalSecondaryIndexAutoScalingDescription + * // IndexName: "STRING_VALUE", + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param DescribeTableReplicaAutoScalingCommandInput - {@link DescribeTableReplicaAutoScalingCommandInput} + * @returns {@link DescribeTableReplicaAutoScalingCommandOutput} + * @see {@link DescribeTableReplicaAutoScalingCommandInput} for command's `input` shape. + * @see {@link DescribeTableReplicaAutoScalingCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTableReplicaAutoScalingCommand extends DescribeTableReplicaAutoScalingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTableReplicaAutoScalingInput; + output: DescribeTableReplicaAutoScalingOutput; + }; + sdk: { + input: DescribeTableReplicaAutoScalingCommandInput; + output: DescribeTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts new file mode 100644 index 0000000..f5f3419 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts @@ -0,0 +1,84 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTimeToLiveInput, DescribeTimeToLiveOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTimeToLiveCommand}. + */ +export interface DescribeTimeToLiveCommandInput extends DescribeTimeToLiveInput { +} +/** + * @public + * + * The output of {@link DescribeTimeToLiveCommand}. + */ +export interface DescribeTimeToLiveCommandOutput extends DescribeTimeToLiveOutput, __MetadataBearer { +} +declare const DescribeTimeToLiveCommand_base: { + new (input: DescribeTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Gives a description of the Time to Live (TTL) status on the specified table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTimeToLiveCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTimeToLiveCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTimeToLiveInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTimeToLiveCommand(input); + * const response = await client.send(command); + * // { // DescribeTimeToLiveOutput + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeTimeToLiveCommandInput - {@link DescribeTimeToLiveCommandInput} + * @returns {@link DescribeTimeToLiveCommandOutput} + * @see {@link DescribeTimeToLiveCommandInput} for command's `input` shape. + * @see {@link DescribeTimeToLiveCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTimeToLiveCommand extends DescribeTimeToLiveCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTimeToLiveInput; + output: DescribeTimeToLiveOutput; + }; + sdk: { + input: DescribeTimeToLiveCommandInput; + output: DescribeTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..e52268c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,122 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { KinesisStreamingDestinationInput, KinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DisableKinesisStreamingDestinationCommand}. + */ +export interface DisableKinesisStreamingDestinationCommandInput extends KinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link DisableKinesisStreamingDestinationCommand}. + */ +export interface DisableKinesisStreamingDestinationCommandOutput extends KinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const DisableKinesisStreamingDestinationCommand_base: { + new (input: DisableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DisableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Stops replication from the DynamoDB table to the Kinesis data stream. This + * is done without deleting either of the resources.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DisableKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DisableKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // KinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new DisableKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // KinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param DisableKinesisStreamingDestinationCommandInput - {@link DisableKinesisStreamingDestinationCommandInput} + * @returns {@link DisableKinesisStreamingDestinationCommandOutput} + * @see {@link DisableKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link DisableKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DisableKinesisStreamingDestinationCommand extends DisableKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: DisableKinesisStreamingDestinationCommandInput; + output: DisableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..ff8985a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,124 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { KinesisStreamingDestinationInput, KinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link EnableKinesisStreamingDestinationCommand}. + */ +export interface EnableKinesisStreamingDestinationCommandInput extends KinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link EnableKinesisStreamingDestinationCommand}. + */ +export interface EnableKinesisStreamingDestinationCommandOutput extends KinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const EnableKinesisStreamingDestinationCommand_base: { + new (input: EnableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: EnableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Starts table data replication to the specified Kinesis data stream at a timestamp + * chosen during the enable workflow. If this operation doesn't return results immediately, + * use DescribeKinesisStreamingDestination to check if streaming to the Kinesis data stream + * is ACTIVE.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, EnableKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, EnableKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // KinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new EnableKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // KinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param EnableKinesisStreamingDestinationCommandInput - {@link EnableKinesisStreamingDestinationCommandInput} + * @returns {@link EnableKinesisStreamingDestinationCommandOutput} + * @see {@link EnableKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link EnableKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class EnableKinesisStreamingDestinationCommand extends EnableKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: EnableKinesisStreamingDestinationCommandInput; + output: EnableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..999b557 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,242 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExecuteStatementInput, ExecuteStatementOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExecuteStatementCommand}. + */ +export interface ExecuteStatementCommandInput extends ExecuteStatementInput { +} +/** + * @public + * + * The output of {@link ExecuteStatementCommand}. + */ +export interface ExecuteStatementCommandOutput extends ExecuteStatementOutput, __MetadataBearer { +} +declare const ExecuteStatementCommand_base: { + new (input: ExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform reads and singleton writes on data stored in + * DynamoDB, using PartiQL.

+ *

For PartiQL reads (SELECT statement), if the total number of processed + * items exceeds the maximum dataset size limit of 1 MB, the read stops and results are + * returned to the user as a LastEvaluatedKey value to continue the read in a + * subsequent operation. If the filter criteria in WHERE clause does not match + * any data, the read will return an empty result set.

+ *

A single SELECT statement response can return up to the maximum number of + * items (if using the Limit parameter) or a maximum of 1 MB of data (and then apply any + * filtering to the results using WHERE clause). If + * LastEvaluatedKey is present in the response, you need to paginate the + * result set. If NextToken is present, you need to paginate the result set + * and include NextToken.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExecuteStatementCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExecuteStatementInput + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ConsistentRead: true || false, + * NextToken: "STRING_VALUE", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * Limit: Number("int"), + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new ExecuteStatementCommand(input); + * const response = await client.send(command); + * // { // ExecuteStatementOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // }; + * + * ``` + * + * @param ExecuteStatementCommandInput - {@link ExecuteStatementCommandInput} + * @returns {@link ExecuteStatementCommandOutput} + * @see {@link ExecuteStatementCommandInput} for command's `input` shape. + * @see {@link ExecuteStatementCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link DuplicateItemException} (client fault) + *

There was an attempt to insert an item with the same primary key as an item that + * already exists in the DynamoDB table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExecuteStatementCommand extends ExecuteStatementCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExecuteStatementInput; + output: ExecuteStatementOutput; + }; + sdk: { + input: ExecuteStatementCommandInput; + output: ExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..6e1a94b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,533 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExecuteTransactionInput, ExecuteTransactionOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExecuteTransactionCommand}. + */ +export interface ExecuteTransactionCommandInput extends ExecuteTransactionInput { +} +/** + * @public + * + * The output of {@link ExecuteTransactionCommand}. + */ +export interface ExecuteTransactionCommandOutput extends ExecuteTransactionOutput, __MetadataBearer { +} +declare const ExecuteTransactionCommand_base: { + new (input: ExecuteTransactionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExecuteTransactionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform transactional reads or writes on data stored in + * DynamoDB, using PartiQL.

+ * + *

The entire transaction must consist of either read statements or write statements, + * you cannot mix both in one transaction. The EXISTS function is an exception and can + * be used to check the condition of specific attributes of the item in a similar + * manner to ConditionCheck in the TransactWriteItems API.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExecuteTransactionCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExecuteTransactionInput + * TransactStatements: [ // ParameterizedStatements // required + * { // ParameterizedStatement + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * ], + * ClientRequestToken: "STRING_VALUE", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new ExecuteTransactionCommand(input); + * const response = await client.send(command); + * // { // ExecuteTransactionOutput + * // Responses: [ // ItemResponseList + * // { // ItemResponse + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // ], + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param ExecuteTransactionCommandInput - {@link ExecuteTransactionCommandInput} + * @returns {@link ExecuteTransactionCommandOutput} + * @see {@link ExecuteTransactionCommandInput} for command's `input` shape. + * @see {@link ExecuteTransactionCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link IdempotentParameterMismatchException} (client fault) + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link TransactionInProgressException} (client fault) + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExecuteTransactionCommand extends ExecuteTransactionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExecuteTransactionInput; + output: ExecuteTransactionOutput; + }; + sdk: { + input: ExecuteTransactionCommandInput; + output: ExecuteTransactionCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..672cebb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts @@ -0,0 +1,147 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExportTableToPointInTimeInput, ExportTableToPointInTimeOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExportTableToPointInTimeCommand}. + */ +export interface ExportTableToPointInTimeCommandInput extends ExportTableToPointInTimeInput { +} +/** + * @public + * + * The output of {@link ExportTableToPointInTimeCommand}. + */ +export interface ExportTableToPointInTimeCommandOutput extends ExportTableToPointInTimeOutput, __MetadataBearer { +} +declare const ExportTableToPointInTimeCommand_base: { + new (input: ExportTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExportTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Exports table data to an S3 bucket. The table must have point in time recovery + * enabled, and you can export data from any time within the point in time recovery + * window.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExportTableToPointInTimeCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExportTableToPointInTimeCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExportTableToPointInTimeInput + * TableArn: "STRING_VALUE", // required + * ExportTime: new Date("TIMESTAMP"), + * ClientToken: "STRING_VALUE", + * S3Bucket: "STRING_VALUE", // required + * S3BucketOwner: "STRING_VALUE", + * S3Prefix: "STRING_VALUE", + * S3SseAlgorithm: "AES256" || "KMS", + * S3SseKmsKeyId: "STRING_VALUE", + * ExportFormat: "DYNAMODB_JSON" || "ION", + * ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * IncrementalExportSpecification: { // IncrementalExportSpecification + * ExportFromTime: new Date("TIMESTAMP"), + * ExportToTime: new Date("TIMESTAMP"), + * ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * }, + * }; + * const command = new ExportTableToPointInTimeCommand(input); + * const response = await client.send(command); + * // { // ExportTableToPointInTimeOutput + * // ExportDescription: { // ExportDescription + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ExportManifest: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ExportTime: new Date("TIMESTAMP"), + * // ClientToken: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", + * // S3BucketOwner: "STRING_VALUE", + * // S3Prefix: "STRING_VALUE", + * // S3SseAlgorithm: "AES256" || "KMS", + * // S3SseKmsKeyId: "STRING_VALUE", + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // ExportFormat: "DYNAMODB_JSON" || "ION", + * // BilledSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // IncrementalExportSpecification: { // IncrementalExportSpecification + * // ExportFromTime: new Date("TIMESTAMP"), + * // ExportToTime: new Date("TIMESTAMP"), + * // ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * // }, + * // }, + * // }; + * + * ``` + * + * @param ExportTableToPointInTimeCommandInput - {@link ExportTableToPointInTimeCommandInput} + * @returns {@link ExportTableToPointInTimeCommandOutput} + * @see {@link ExportTableToPointInTimeCommandInput} for command's `input` shape. + * @see {@link ExportTableToPointInTimeCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ExportConflictException} (client fault) + *

There was a conflict when writing to the specified S3 bucket.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidExportTimeException} (client fault) + *

The specified ExportTime is outside of the point in time recovery + * window.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PointInTimeRecoveryUnavailableException} (client fault) + *

Point in time recovery has not yet been enabled for this source table.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExportTableToPointInTimeCommand extends ExportTableToPointInTimeCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExportTableToPointInTimeInput; + output: ExportTableToPointInTimeOutput; + }; + sdk: { + input: ExportTableToPointInTimeCommandInput; + output: ExportTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts new file mode 100644 index 0000000..b5e2dfa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts @@ -0,0 +1,255 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { GetItemInput, GetItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetItemCommand}. + */ +export interface GetItemCommandInput extends GetItemInput { +} +/** + * @public + * + * The output of {@link GetItemCommand}. + */ +export interface GetItemCommandOutput extends GetItemOutput, __MetadataBearer { +} +declare const GetItemCommand_base: { + new (input: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The GetItem operation returns a set of attributes for the item with the + * given primary key. If there is no matching item, GetItem does not return + * any data and there will be no Item element in the response.

+ *

+ * GetItem provides an eventually consistent read by default. If your + * application requires a strongly consistent read, set ConsistentRead to + * true. Although a strongly consistent read might take more time than an + * eventually consistent read, it always returns the last updated value.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, GetItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, GetItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // GetItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * ConsistentRead: true || false, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }; + * const command = new GetItemCommand(input); + * const response = await client.send(command); + * // { // GetItemOutput + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetItemCommandInput - {@link GetItemCommandInput} + * @returns {@link GetItemCommandOutput} + * @see {@link GetItemCommandInput} for command's `input` shape. + * @see {@link GetItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To read an item from a table + * ```javascript + * // This example retrieves an item from the Music table. The table has a partition key and a sort key (Artist and SongTitle), so you must specify both of these attributes. + * const input = { + * Key: { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * TableName: "Music" + * }; + * const command = new GetItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Item: { + * AlbumTitle: { + * S: "Songs About Life" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetItemCommand extends GetItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetItemInput; + output: GetItemOutput; + }; + sdk: { + input: GetItemCommandInput; + output: GetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts new file mode 100644 index 0000000..9544c1a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts @@ -0,0 +1,121 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { GetResourcePolicyInput, GetResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetResourcePolicyCommand}. + */ +export interface GetResourcePolicyCommandInput extends GetResourcePolicyInput { +} +/** + * @public + * + * The output of {@link GetResourcePolicyCommand}. + */ +export interface GetResourcePolicyCommandOutput extends GetResourcePolicyOutput, __MetadataBearer { +} +declare const GetResourcePolicyCommand_base: { + new (input: GetResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the resource-based policy document attached to the resource, which can be a + * table or stream, in JSON format.

+ *

+ * GetResourcePolicy follows an + * eventually consistent + * model. The following list + * describes the outcomes when you issue the GetResourcePolicy request + * immediately after issuing another request:

+ *
    + *
  • + *

    If you issue a GetResourcePolicy request immediately after a + * PutResourcePolicy request, DynamoDB might return a + * PolicyNotFoundException.

    + *
  • + *
  • + *

    If you issue a GetResourcePolicyrequest immediately after a + * DeleteResourcePolicy request, DynamoDB might return + * the policy that was present before the deletion request.

    + *
  • + *
  • + *

    If you issue a GetResourcePolicy request immediately after a + * CreateTable request, which includes a resource-based policy, + * DynamoDB might return a ResourceNotFoundException or + * a PolicyNotFoundException.

    + *
  • + *
+ *

Because GetResourcePolicy uses an eventually + * consistent query, the metadata for your policy or table might not be + * available at that moment. Wait for a few seconds, and then retry the + * GetResourcePolicy request.

+ *

After a GetResourcePolicy request returns a policy created using the + * PutResourcePolicy request, the policy will be applied in the + * authorization of requests to the resource. Because this process is eventually + * consistent, it will take some time to apply the policy to all requests to a resource. + * Policies that you attach while creating a table using the CreateTable + * request will always be applied to all requests for that table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, GetResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, GetResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // GetResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * }; + * const command = new GetResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // GetResourcePolicyOutput + * // Policy: "STRING_VALUE", + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param GetResourcePolicyCommandInput - {@link GetResourcePolicyCommandInput} + * @returns {@link GetResourcePolicyCommandOutput} + * @see {@link GetResourcePolicyCommandInput} for command's `input` shape. + * @see {@link GetResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class GetResourcePolicyCommand extends GetResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetResourcePolicyInput; + output: GetResourcePolicyOutput; + }; + sdk: { + input: GetResourcePolicyCommandInput; + output: GetResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts new file mode 100644 index 0000000..48a37a1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts @@ -0,0 +1,271 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ImportTableInput, ImportTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ImportTableCommand}. + */ +export interface ImportTableCommandInput extends ImportTableInput { +} +/** + * @public + * + * The output of {@link ImportTableCommand}. + */ +export interface ImportTableCommandOutput extends ImportTableOutput, __MetadataBearer { +} +declare const ImportTableCommand_base: { + new (input: ImportTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ImportTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Imports table data from an S3 bucket.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ImportTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ImportTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ImportTableInput + * ClientToken: "STRING_VALUE", + * S3BucketSource: { // S3BucketSource + * S3BucketOwner: "STRING_VALUE", + * S3Bucket: "STRING_VALUE", // required + * S3KeyPrefix: "STRING_VALUE", + * }, + * InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", // required + * InputFormatOptions: { // InputFormatOptions + * Csv: { // CsvOptions + * Delimiter: "STRING_VALUE", + * HeaderList: [ // CsvHeaderList + * "STRING_VALUE", + * ], + * }, + * }, + * InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * TableCreationParameters: { // TableCreationParameters + * TableName: "STRING_VALUE", // required + * AttributeDefinitions: [ // AttributeDefinitions // required + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * }, + * }; + * const command = new ImportTableCommand(input); + * const response = await client.send(command); + * // { // ImportTableOutput + * // ImportTableDescription: { // ImportTableDescription + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ClientToken: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // ErrorCount: Number("long"), + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // InputFormatOptions: { // InputFormatOptions + * // Csv: { // CsvOptions + * // Delimiter: "STRING_VALUE", + * // HeaderList: [ // CsvHeaderList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * // TableCreationParameters: { // TableCreationParameters + * // TableName: "STRING_VALUE", // required + * // AttributeDefinitions: [ // AttributeDefinitions // required + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // SSESpecification: { // SSESpecification + * // Enabled: true || false, + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyId: "STRING_VALUE", + * // }, + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * // { // GlobalSecondaryIndex + * // IndexName: "STRING_VALUE", // required + * // KeySchema: [ // required + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // WarmThroughput + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // }, + * // }, + * // ], + * // }, + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ProcessedSizeBytes: Number("long"), + * // ProcessedItemCount: Number("long"), + * // ImportedItemCount: Number("long"), + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param ImportTableCommandInput - {@link ImportTableCommandInput} + * @returns {@link ImportTableCommandOutput} + * @see {@link ImportTableCommandInput} for command's `input` shape. + * @see {@link ImportTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ImportConflictException} (client fault) + *

+ * There was a conflict when importing from the specified S3 source. + * This can occur when the current import conflicts with a previous import request + * that had the same client token. + *

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ImportTableCommand extends ImportTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ImportTableInput; + output: ImportTableOutput; + }; + sdk: { + input: ImportTableCommandInput; + output: ImportTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts new file mode 100644 index 0000000..50c70da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts @@ -0,0 +1,107 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListBackupsInput, ListBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListBackupsCommand}. + */ +export interface ListBackupsCommandInput extends ListBackupsInput { +} +/** + * @public + * + * The output of {@link ListBackupsCommand}. + */ +export interface ListBackupsCommandOutput extends ListBackupsOutput, __MetadataBearer { +} +declare const ListBackupsCommand_base: { + new (input: ListBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListBackupsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

List DynamoDB backups that are associated with an Amazon Web Services account and + * weren't made with Amazon Web Services Backup. To list these backups for a given table, + * specify TableName. ListBackups returns a paginated list of + * results with at most 1 MB worth of items in a page. You can also specify a maximum + * number of entries to be returned in a page.

+ *

In the request, start time is inclusive, but end time is exclusive. Note that these + * boundaries are for the time at which the original backup was requested.

+ *

You can call ListBackups a maximum of five times per second.

+ *

If you want to retrieve the complete list of backups made with Amazon Web Services + * Backup, use the Amazon Web Services Backup + * list API. + *

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListBackupsInput + * TableName: "STRING_VALUE", + * Limit: Number("int"), + * TimeRangeLowerBound: new Date("TIMESTAMP"), + * TimeRangeUpperBound: new Date("TIMESTAMP"), + * ExclusiveStartBackupArn: "STRING_VALUE", + * BackupType: "USER" || "SYSTEM" || "AWS_BACKUP" || "ALL", + * }; + * const command = new ListBackupsCommand(input); + * const response = await client.send(command); + * // { // ListBackupsOutput + * // BackupSummaries: [ // BackupSummaries + * // { // BackupSummary + * // TableName: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // BackupArn: "STRING_VALUE", + * // BackupName: "STRING_VALUE", + * // BackupCreationDateTime: new Date("TIMESTAMP"), + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", + * // BackupSizeBytes: Number("long"), + * // }, + * // ], + * // LastEvaluatedBackupArn: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListBackupsCommandInput - {@link ListBackupsCommandInput} + * @returns {@link ListBackupsCommandOutput} + * @see {@link ListBackupsCommandInput} for command's `input` shape. + * @see {@link ListBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListBackupsCommand extends ListBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListBackupsInput; + output: ListBackupsOutput; + }; + sdk: { + input: ListBackupsCommandInput; + output: ListBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts new file mode 100644 index 0000000..9d508c7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListContributorInsightsInput, ListContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListContributorInsightsCommand}. + */ +export interface ListContributorInsightsCommandInput extends ListContributorInsightsInput { +} +/** + * @public + * + * The output of {@link ListContributorInsightsCommand}. + */ +export interface ListContributorInsightsCommandOutput extends ListContributorInsightsOutput, __MetadataBearer { +} +declare const ListContributorInsightsCommand_base: { + new (input: ListContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListContributorInsightsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a list of ContributorInsightsSummary for a table and all its global secondary + * indexes.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListContributorInsightsInput + * TableName: "STRING_VALUE", + * NextToken: "STRING_VALUE", + * MaxResults: Number("int"), + * }; + * const command = new ListContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // ListContributorInsightsOutput + * // ContributorInsightsSummaries: [ // ContributorInsightsSummaries + * // { // ContributorInsightsSummary + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListContributorInsightsCommandInput - {@link ListContributorInsightsCommandInput} + * @returns {@link ListContributorInsightsCommandOutput} + * @see {@link ListContributorInsightsCommandInput} for command's `input` shape. + * @see {@link ListContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListContributorInsightsCommand extends ListContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListContributorInsightsInput; + output: ListContributorInsightsOutput; + }; + sdk: { + input: ListContributorInsightsCommandInput; + output: ListContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts new file mode 100644 index 0000000..83f5476 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts @@ -0,0 +1,100 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListExportsInput, ListExportsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListExportsCommand}. + */ +export interface ListExportsCommandInput extends ListExportsInput { +} +/** + * @public + * + * The output of {@link ListExportsCommand}. + */ +export interface ListExportsCommandOutput extends ListExportsOutput, __MetadataBearer { +} +declare const ListExportsCommand_base: { + new (input: ListExportsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListExportsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists completed exports within the past 90 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListExportsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListExportsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListExportsInput + * TableArn: "STRING_VALUE", + * MaxResults: Number("int"), + * NextToken: "STRING_VALUE", + * }; + * const command = new ListExportsCommand(input); + * const response = await client.send(command); + * // { // ListExportsOutput + * // ExportSummaries: [ // ExportSummaries + * // { // ExportSummary + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListExportsCommandInput - {@link ListExportsCommandInput} + * @returns {@link ListExportsCommandOutput} + * @see {@link ListExportsCommandInput} for command's `input` shape. + * @see {@link ListExportsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListExportsCommand extends ListExportsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListExportsInput; + output: ListExportsOutput; + }; + sdk: { + input: ListExportsCommandInput; + output: ListExportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts new file mode 100644 index 0000000..530e8e4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts @@ -0,0 +1,93 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListGlobalTablesInput, ListGlobalTablesOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListGlobalTablesCommand}. + */ +export interface ListGlobalTablesCommandInput extends ListGlobalTablesInput { +} +/** + * @public + * + * The output of {@link ListGlobalTablesCommand}. + */ +export interface ListGlobalTablesCommandOutput extends ListGlobalTablesOutput, __MetadataBearer { +} +declare const ListGlobalTablesCommand_base: { + new (input: ListGlobalTablesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListGlobalTablesCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all global tables that have a replica in the specified Region.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListGlobalTablesCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListGlobalTablesCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListGlobalTablesInput + * ExclusiveStartGlobalTableName: "STRING_VALUE", + * Limit: Number("int"), + * RegionName: "STRING_VALUE", + * }; + * const command = new ListGlobalTablesCommand(input); + * const response = await client.send(command); + * // { // ListGlobalTablesOutput + * // GlobalTables: [ // GlobalTableList + * // { // GlobalTable + * // GlobalTableName: "STRING_VALUE", + * // ReplicationGroup: [ // ReplicaList + * // { // Replica + * // RegionName: "STRING_VALUE", + * // }, + * // ], + * // }, + * // ], + * // LastEvaluatedGlobalTableName: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListGlobalTablesCommandInput - {@link ListGlobalTablesCommandInput} + * @returns {@link ListGlobalTablesCommandOutput} + * @see {@link ListGlobalTablesCommandInput} for command's `input` shape. + * @see {@link ListGlobalTablesCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListGlobalTablesCommand extends ListGlobalTablesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListGlobalTablesInput; + output: ListGlobalTablesOutput; + }; + sdk: { + input: ListGlobalTablesCommandInput; + output: ListGlobalTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts new file mode 100644 index 0000000..be76088 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts @@ -0,0 +1,106 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListImportsInput, ListImportsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListImportsCommand}. + */ +export interface ListImportsCommandInput extends ListImportsInput { +} +/** + * @public + * + * The output of {@link ListImportsCommand}. + */ +export interface ListImportsCommandOutput extends ListImportsOutput, __MetadataBearer { +} +declare const ListImportsCommand_base: { + new (input: ListImportsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListImportsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists completed imports within the past 90 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListImportsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListImportsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListImportsInput + * TableArn: "STRING_VALUE", + * PageSize: Number("int"), + * NextToken: "STRING_VALUE", + * }; + * const command = new ListImportsCommand(input); + * const response = await client.send(command); + * // { // ListImportsOutput + * // ImportSummaryList: [ // ImportSummaryList + * // { // ImportSummary + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListImportsCommandInput - {@link ListImportsCommandInput} + * @returns {@link ListImportsCommandOutput} + * @see {@link ListImportsCommandInput} for command's `input` shape. + * @see {@link ListImportsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListImportsCommand extends ListImportsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListImportsInput; + output: ListImportsOutput; + }; + sdk: { + input: ListImportsCommandInput; + output: ListImportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts new file mode 100644 index 0000000..394c20f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts @@ -0,0 +1,101 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListTablesInput, ListTablesOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListTablesCommand}. + */ +export interface ListTablesCommandInput extends ListTablesInput { +} +/** + * @public + * + * The output of {@link ListTablesCommand}. + */ +export interface ListTablesCommandOutput extends ListTablesOutput, __MetadataBearer { +} +declare const ListTablesCommand_base: { + new (input: ListTablesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListTablesCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns an array of table names associated with the current account and endpoint. The + * output from ListTables is paginated, with each page returning a maximum of + * 100 table names.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListTablesCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListTablesCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListTablesInput + * ExclusiveStartTableName: "STRING_VALUE", + * Limit: Number("int"), + * }; + * const command = new ListTablesCommand(input); + * const response = await client.send(command); + * // { // ListTablesOutput + * // TableNames: [ // TableNameList + * // "STRING_VALUE", + * // ], + * // LastEvaluatedTableName: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListTablesCommandInput - {@link ListTablesCommandInput} + * @returns {@link ListTablesCommandOutput} + * @see {@link ListTablesCommandInput} for command's `input` shape. + * @see {@link ListTablesCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To list tables + * ```javascript + * // This example lists all of the tables associated with the current AWS account and endpoint. + * const input = { /* empty *\/ }; + * const command = new ListTablesCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TableNames: [ + * "Forum", + * "ProductCatalog", + * "Reply", + * "Thread" + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListTablesCommand extends ListTablesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListTablesInput; + output: ListTablesOutput; + }; + sdk: { + input: ListTablesCommandInput; + output: ListTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts new file mode 100644 index 0000000..c2230f4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts @@ -0,0 +1,91 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListTagsOfResourceInput, ListTagsOfResourceOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListTagsOfResourceCommand}. + */ +export interface ListTagsOfResourceCommandInput extends ListTagsOfResourceInput { +} +/** + * @public + * + * The output of {@link ListTagsOfResourceCommand}. + */ +export interface ListTagsOfResourceCommandOutput extends ListTagsOfResourceOutput, __MetadataBearer { +} +declare const ListTagsOfResourceCommand_base: { + new (input: ListTagsOfResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListTagsOfResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

List all tags on an Amazon DynamoDB resource. You can call ListTagsOfResource up to 10 + * times per second, per account.

+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListTagsOfResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListTagsOfResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListTagsOfResourceInput + * ResourceArn: "STRING_VALUE", // required + * NextToken: "STRING_VALUE", + * }; + * const command = new ListTagsOfResourceCommand(input); + * const response = await client.send(command); + * // { // ListTagsOfResourceOutput + * // Tags: [ // TagList + * // { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListTagsOfResourceCommandInput - {@link ListTagsOfResourceCommandInput} + * @returns {@link ListTagsOfResourceCommandOutput} + * @see {@link ListTagsOfResourceCommandInput} for command's `input` shape. + * @see {@link ListTagsOfResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListTagsOfResourceCommand extends ListTagsOfResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListTagsOfResourceInput; + output: ListTagsOfResourceOutput; + }; + sdk: { + input: ListTagsOfResourceCommandInput; + output: ListTagsOfResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts new file mode 100644 index 0000000..f9e32df --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts @@ -0,0 +1,300 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { PutItemInput, PutItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutItemCommand}. + */ +export interface PutItemCommandInput extends PutItemInput { +} +/** + * @public + * + * The output of {@link PutItemCommand}. + */ +export interface PutItemCommandOutput extends PutItemOutput, __MetadataBearer { +} +declare const PutItemCommand_base: { + new (input: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a new item, or replaces an old item with a new item. If an item that has the + * same primary key as the new item already exists in the specified table, the new item + * completely replaces the existing item. You can perform a conditional put operation (add + * a new item if one with the specified primary key doesn't exist), or replace an existing + * item if it has certain attribute values. You can return the item's attribute values in + * the same operation, using the ReturnValues parameter.

+ *

When you add an item, the primary key attributes are the only required attributes.

+ *

Empty String and Binary attribute values are allowed. Attribute values of type String + * and Binary must have a length greater than zero if the attribute is used as a key + * attribute for a table or index. Set type attributes cannot be empty.

+ *

Invalid Requests with empty values will be rejected with a + * ValidationException exception.

+ * + *

To prevent a new item from replacing an existing item, use a conditional + * expression that contains the attribute_not_exists function with the + * name of the attribute being used as the partition key for the table. Since every + * record must contain that attribute, the attribute_not_exists function + * will only succeed if no matching item exists.

+ *
+ *

For more information about PutItem, see Working with + * Items in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, PutItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, PutItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // PutItemInput + * TableName: "STRING_VALUE", // required + * Item: { // PutItemInputAttributeMap // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ConditionalOperator: "AND" || "OR", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new PutItemCommand(input); + * const response = await client.send(command); + * // { // PutItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param PutItemCommandInput - {@link PutItemCommandInput} + * @returns {@link PutItemCommandOutput} + * @see {@link PutItemCommandInput} for command's `input` shape. + * @see {@link PutItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To add an item to a table + * ```javascript + * // This example adds a new item to the Music table. + * const input = { + * Item: { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * ReturnConsumedCapacity: "TOTAL", + * TableName: "Music" + * }; + * const command = new PutItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { + * CapacityUnits: 1, + * TableName: "Music" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class PutItemCommand extends PutItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutItemInput; + output: PutItemOutput; + }; + sdk: { + input: PutItemCommandInput; + output: PutItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts new file mode 100644 index 0000000..e222271 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts @@ -0,0 +1,140 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { PutResourcePolicyInput, PutResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutResourcePolicyCommand}. + */ +export interface PutResourcePolicyCommandInput extends PutResourcePolicyInput { +} +/** + * @public + * + * The output of {@link PutResourcePolicyCommand}. + */ +export interface PutResourcePolicyCommandOutput extends PutResourcePolicyOutput, __MetadataBearer { +} +declare const PutResourcePolicyCommand_base: { + new (input: PutResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Attaches a resource-based policy document to the resource, which can be a table or + * stream. When you attach a resource-based policy using this API, the policy application + * is + * eventually consistent + * .

+ *

+ * PutResourcePolicy is an idempotent operation; running it multiple times + * on the same resource using the same policy document will return the same revision ID. If + * you specify an ExpectedRevisionId that doesn't match the current policy's + * RevisionId, the PolicyNotFoundException will be + * returned.

+ * + *

+ * PutResourcePolicy is an asynchronous operation. If you issue a + * GetResourcePolicy request immediately after a + * PutResourcePolicy request, DynamoDB might return your + * previous policy, if there was one, or return the + * PolicyNotFoundException. This is because + * GetResourcePolicy uses an eventually consistent query, and the + * metadata for your policy or table might not be available at that moment. Wait for a + * few seconds, and then try the GetResourcePolicy request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, PutResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, PutResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // PutResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * Policy: "STRING_VALUE", // required + * ExpectedRevisionId: "STRING_VALUE", + * ConfirmRemoveSelfResourceAccess: true || false, + * }; + * const command = new PutResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // PutResourcePolicyOutput + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param PutResourcePolicyCommandInput - {@link PutResourcePolicyCommandInput} + * @returns {@link PutResourcePolicyCommandOutput} + * @see {@link PutResourcePolicyCommandInput} for command's `input` shape. + * @see {@link PutResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class PutResourcePolicyCommand extends PutResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutResourcePolicyInput; + output: PutResourcePolicyOutput; + }; + sdk: { + input: PutResourcePolicyCommandInput; + output: PutResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts new file mode 100644 index 0000000..bac7977 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts @@ -0,0 +1,329 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { QueryInput, QueryOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link QueryCommand}. + */ +export interface QueryCommandInput extends QueryInput { +} +/** + * @public + * + * The output of {@link QueryCommand}. + */ +export interface QueryCommandOutput extends QueryOutput, __MetadataBearer { +} +declare const QueryCommand_base: { + new (input: QueryCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: QueryCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

You must provide the name of the partition key attribute and a single value for that + * attribute. Query returns all items with that partition key value. + * Optionally, you can provide a sort key attribute and use a comparison operator to refine + * the search results.

+ *

Use the KeyConditionExpression parameter to provide a specific value for + * the partition key. The Query operation will return all of the items from + * the table or index with that partition key value. You can optionally narrow the scope of + * the Query operation by specifying a sort key value and a comparison + * operator in KeyConditionExpression. To further refine the + * Query results, you can optionally provide a + * FilterExpression. A FilterExpression determines which + * items within the results should be returned to you. All of the other results are + * discarded.

+ *

A Query operation always returns a result set. If no matching items are + * found, the result set will be empty. Queries that do not return results consume the + * minimum number of read capacity units for that type of read operation.

+ * + *

DynamoDB calculates the number of read capacity units consumed based on item + * size, not on the amount of data that is returned to an application. The number of + * capacity units consumed will be the same whether you request all of the attributes + * (the default behavior) or just some of them (using a projection expression). The + * number will also be the same whether or not you use a FilterExpression. + *

+ *
+ *

+ * Query results are always sorted by the sort key value. If the data type of + * the sort key is Number, the results are returned in numeric order; otherwise, the + * results are returned in order of UTF-8 bytes. By default, the sort order is ascending. + * To reverse the order, set the ScanIndexForward parameter to false.

+ *

A single Query operation will read up to the maximum number of items set + * (if using the Limit parameter) or a maximum of 1 MB of data and then apply + * any filtering to the results using FilterExpression. If + * LastEvaluatedKey is present in the response, you will need to paginate + * the result set. For more information, see Paginating + * the Results in the Amazon DynamoDB Developer Guide.

+ *

+ * FilterExpression is applied after a Query finishes, but before + * the results are returned. A FilterExpression cannot contain partition key + * or sort key attributes. You need to specify those attributes in the + * KeyConditionExpression.

+ * + *

A Query operation can return an empty result set and a + * LastEvaluatedKey if all the items read for the page of results are + * filtered out.

+ *
+ *

You can query a table, a local secondary index, or a global secondary index. For a + * query on a table or on a local secondary index, you can set the + * ConsistentRead parameter to true and obtain a strongly + * consistent result. Global secondary indexes support eventually consistent reads only, so + * do not specify ConsistentRead when querying a global secondary + * index.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, QueryCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, QueryCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // QueryInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * Select: "ALL_ATTRIBUTES" || "ALL_PROJECTED_ATTRIBUTES" || "SPECIFIC_ATTRIBUTES" || "COUNT", + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * Limit: Number("int"), + * ConsistentRead: true || false, + * KeyConditions: { // KeyConditions + * "": { // Condition + * AttributeValueList: [ // AttributeValueList + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * QueryFilter: { // FilterConditionMap + * "": { + * AttributeValueList: [ + * "", + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ScanIndexForward: true || false, + * ExclusiveStartKey: { // Key + * "": "", + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ProjectionExpression: "STRING_VALUE", + * FilterExpression: "STRING_VALUE", + * KeyConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * }; + * const command = new QueryCommand(input); + * const response = await client.send(command); + * // { // QueryOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // Count: Number("int"), + * // ScannedCount: Number("int"), + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param QueryCommandInput - {@link QueryCommandInput} + * @returns {@link QueryCommandOutput} + * @see {@link QueryCommandInput} for command's `input` shape. + * @see {@link QueryCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To query an item + * ```javascript + * // This example queries items in the Music table. The table has a partition key and sort key (Artist and SongTitle), but this query only specifies the partition key value. It returns song titles by the artist named "No One You Know". + * const input = { + * ExpressionAttributeValues: { + * :v1: { + * S: "No One You Know" + * } + * }, + * KeyConditionExpression: "Artist = :v1", + * ProjectionExpression: "SongTitle", + * TableName: "Music" + * }; + * const command = new QueryCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { /* empty *\/ }, + * Count: 2, + * Items: [ + * { + * SongTitle: { + * S: "Call Me Today" + * } + * } + * ], + * ScannedCount: 2 + * } + * *\/ + * ``` + * + * @public + */ +export declare class QueryCommand extends QueryCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: QueryInput; + output: QueryOutput; + }; + sdk: { + input: QueryCommandInput; + output: QueryCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts new file mode 100644 index 0000000..495b072 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts @@ -0,0 +1,361 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { RestoreTableFromBackupInput, RestoreTableFromBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link RestoreTableFromBackupCommand}. + */ +export interface RestoreTableFromBackupCommandInput extends RestoreTableFromBackupInput { +} +/** + * @public + * + * The output of {@link RestoreTableFromBackupCommand}. + */ +export interface RestoreTableFromBackupCommandOutput extends RestoreTableFromBackupOutput, __MetadataBearer { +} +declare const RestoreTableFromBackupCommand_base: { + new (input: RestoreTableFromBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: RestoreTableFromBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a new table from an existing backup. Any number of users can execute up to 50 + * concurrent restores (any type of restore) in a given account.

+ *

You can call RestoreTableFromBackup at a maximum rate of 10 times per + * second.

+ *

You must manually set up the following on the restored table:

+ *
    + *
  • + *

    Auto scaling policies

    + *
  • + *
  • + *

    IAM policies

    + *
  • + *
  • + *

    Amazon CloudWatch metrics and alarms

    + *
  • + *
  • + *

    Tags

    + *
  • + *
  • + *

    Stream settings

    + *
  • + *
  • + *

    Time to Live (TTL) settings

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, RestoreTableFromBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, RestoreTableFromBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // RestoreTableFromBackupInput + * TargetTableName: "STRING_VALUE", // required + * BackupArn: "STRING_VALUE", // required + * BillingModeOverride: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalSecondaryIndexOverride: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * LocalSecondaryIndexOverride: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecificationOverride: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * }; + * const command = new RestoreTableFromBackupCommand(input); + * const response = await client.send(command); + * // { // RestoreTableFromBackupOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param RestoreTableFromBackupCommandInput - {@link RestoreTableFromBackupCommandInput} + * @returns {@link RestoreTableFromBackupCommandOutput} + * @see {@link RestoreTableFromBackupCommandInput} for command's `input` shape. + * @see {@link RestoreTableFromBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableAlreadyExistsException} (client fault) + *

A target table with the specified name already exists.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class RestoreTableFromBackupCommand extends RestoreTableFromBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: RestoreTableFromBackupInput; + output: RestoreTableFromBackupOutput; + }; + sdk: { + input: RestoreTableFromBackupCommandInput; + output: RestoreTableFromBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..156e857 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts @@ -0,0 +1,394 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { RestoreTableToPointInTimeInput, RestoreTableToPointInTimeOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link RestoreTableToPointInTimeCommand}. + */ +export interface RestoreTableToPointInTimeCommandInput extends RestoreTableToPointInTimeInput { +} +/** + * @public + * + * The output of {@link RestoreTableToPointInTimeCommand}. + */ +export interface RestoreTableToPointInTimeCommandOutput extends RestoreTableToPointInTimeOutput, __MetadataBearer { +} +declare const RestoreTableToPointInTimeCommand_base: { + new (input: RestoreTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: RestoreTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Restores the specified table to the specified point in time within + * EarliestRestorableDateTime and LatestRestorableDateTime. + * You can restore your table to any point in time in the last 35 days. You can set the + * recovery period to any value between 1 and 35 days. Any number of users can execute up + * to 50 concurrent restores (any type of restore) in a given account.

+ *

When you restore using point in time recovery, DynamoDB restores your table data to + * the state based on the selected date and time (day:hour:minute:second) to a new table.

+ *

Along with data, the following are also included on the new restored table using point + * in time recovery:

+ *
    + *
  • + *

    Global secondary indexes (GSIs)

    + *
  • + *
  • + *

    Local secondary indexes (LSIs)

    + *
  • + *
  • + *

    Provisioned read and write capacity

    + *
  • + *
  • + *

    Encryption settings

    + * + *

    All these settings come from the current settings of the source table at + * the time of restore.

    + *
    + *
  • + *
+ *

You must manually set up the following on the restored table:

+ *
    + *
  • + *

    Auto scaling policies

    + *
  • + *
  • + *

    IAM policies

    + *
  • + *
  • + *

    Amazon CloudWatch metrics and alarms

    + *
  • + *
  • + *

    Tags

    + *
  • + *
  • + *

    Stream settings

    + *
  • + *
  • + *

    Time to Live (TTL) settings

    + *
  • + *
  • + *

    Point in time recovery settings

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, RestoreTableToPointInTimeCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, RestoreTableToPointInTimeCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // RestoreTableToPointInTimeInput + * SourceTableArn: "STRING_VALUE", + * SourceTableName: "STRING_VALUE", + * TargetTableName: "STRING_VALUE", // required + * UseLatestRestorableTime: true || false, + * RestoreDateTime: new Date("TIMESTAMP"), + * BillingModeOverride: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalSecondaryIndexOverride: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * LocalSecondaryIndexOverride: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecificationOverride: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * }; + * const command = new RestoreTableToPointInTimeCommand(input); + * const response = await client.send(command); + * // { // RestoreTableToPointInTimeOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param RestoreTableToPointInTimeCommandInput - {@link RestoreTableToPointInTimeCommandInput} + * @returns {@link RestoreTableToPointInTimeCommandOutput} + * @see {@link RestoreTableToPointInTimeCommandInput} for command's `input` shape. + * @see {@link RestoreTableToPointInTimeCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link InvalidRestoreTimeException} (client fault) + *

An invalid restore time was specified. RestoreDateTime must be between + * EarliestRestorableDateTime and LatestRestorableDateTime.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PointInTimeRecoveryUnavailableException} (client fault) + *

Point in time recovery has not yet been enabled for this source table.

+ * + * @throws {@link TableAlreadyExistsException} (client fault) + *

A target table with the specified name already exists.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class RestoreTableToPointInTimeCommand extends RestoreTableToPointInTimeCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: RestoreTableToPointInTimeInput; + output: RestoreTableToPointInTimeOutput; + }; + sdk: { + input: RestoreTableToPointInTimeCommandInput; + output: RestoreTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts new file mode 100644 index 0000000..64261d8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts @@ -0,0 +1,328 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ScanInput, ScanOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ScanCommand}. + */ +export interface ScanCommandInput extends ScanInput { +} +/** + * @public + * + * The output of {@link ScanCommand}. + */ +export interface ScanCommandOutput extends ScanOutput, __MetadataBearer { +} +declare const ScanCommand_base: { + new (input: ScanCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ScanCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The Scan operation returns one or more items and item attributes by + * accessing every item in a table or a secondary index. To have DynamoDB return fewer + * items, you can provide a FilterExpression operation.

+ *

If the total size of scanned items exceeds the maximum dataset size limit of 1 MB, the + * scan completes and results are returned to the user. The LastEvaluatedKey + * value is also returned and the requestor can use the LastEvaluatedKey to + * continue the scan in a subsequent operation. Each scan response also includes number of + * items that were scanned (ScannedCount) as part of the request. If using a + * FilterExpression, a scan result can result in no items meeting the + * criteria and the Count will result in zero. If you did not use a + * FilterExpression in the scan request, then Count is the + * same as ScannedCount.

+ * + *

+ * Count and ScannedCount only return the count of items + * specific to a single scan request and, unless the table is less than 1MB, do not + * represent the total number of items in the table.

+ *
+ *

A single Scan operation first reads up to the maximum number of items set + * (if using the Limit parameter) or a maximum of 1 MB of data and then + * applies any filtering to the results if a FilterExpression is provided. If + * LastEvaluatedKey is present in the response, pagination is required to + * complete the full table scan. For more information, see Paginating the + * Results in the Amazon DynamoDB Developer Guide.

+ *

+ * Scan operations proceed sequentially; however, for faster performance on + * a large table or secondary index, applications can request a parallel Scan + * operation by providing the Segment and TotalSegments + * parameters. For more information, see Parallel + * Scan in the Amazon DynamoDB Developer Guide.

+ *

By default, a Scan uses eventually consistent reads when accessing the + * items in a table. Therefore, the results from an eventually consistent Scan + * may not include the latest item changes at the time the scan iterates through each item + * in the table. If you require a strongly consistent read of each item as the scan + * iterates through the items in the table, you can set the ConsistentRead + * parameter to true. Strong consistency only relates to the consistency of the read at the + * item level.

+ * + *

DynamoDB does not provide snapshot isolation for a scan operation when the + * ConsistentRead parameter is set to true. Thus, a DynamoDB scan + * operation does not guarantee that all reads in a scan see a consistent snapshot of + * the table when the scan operation was requested.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ScanCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ScanCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ScanInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * Limit: Number("int"), + * Select: "ALL_ATTRIBUTES" || "ALL_PROJECTED_ATTRIBUTES" || "SPECIFIC_ATTRIBUTES" || "COUNT", + * ScanFilter: { // FilterConditionMap + * "": { // Condition + * AttributeValueList: [ // AttributeValueList + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ExclusiveStartKey: { // Key + * "": "", + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * TotalSegments: Number("int"), + * Segment: Number("int"), + * ProjectionExpression: "STRING_VALUE", + * FilterExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ConsistentRead: true || false, + * }; + * const command = new ScanCommand(input); + * const response = await client.send(command); + * // { // ScanOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // Count: Number("int"), + * // ScannedCount: Number("int"), + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param ScanCommandInput - {@link ScanCommandInput} + * @returns {@link ScanCommandOutput} + * @see {@link ScanCommandInput} for command's `input` shape. + * @see {@link ScanCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To scan a table + * ```javascript + * // This example scans the entire Music table, and then narrows the results to songs by the artist "No One You Know". For each item, only the album title and song title are returned. + * const input = { + * ExpressionAttributeNames: { + * #AT: "AlbumTitle", + * #ST: "SongTitle" + * }, + * ExpressionAttributeValues: { + * :a: { + * S: "No One You Know" + * } + * }, + * FilterExpression: "Artist = :a", + * ProjectionExpression: "#ST, #AT", + * TableName: "Music" + * }; + * const command = new ScanCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { /* empty *\/ }, + * Count: 2, + * Items: [ + * { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * { + * AlbumTitle: { + * S: "Blue Sky Blues" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * ], + * ScannedCount: 3 + * } + * *\/ + * ``` + * + * @public + */ +export declare class ScanCommand extends ScanCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ScanInput; + output: ScanOutput; + }; + sdk: { + input: ScanCommandInput; + output: ScanCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts new file mode 100644 index 0000000..8475f32 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts @@ -0,0 +1,139 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TagResourceInput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TagResourceCommand}. + */ +export interface TagResourceCommandInput extends TagResourceInput { +} +/** + * @public + * + * The output of {@link TagResourceCommand}. + */ +export interface TagResourceCommandOutput extends __MetadataBearer { +} +declare const TagResourceCommand_base: { + new (input: TagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Associate a set of tags with an Amazon DynamoDB resource. You can then activate these + * user-defined tags so that they appear on the Billing and Cost Management console for + * cost allocation tracking. You can call TagResource up to five times per second, per + * account.

+ *
    + *
  • + *

    + * TagResource is an asynchronous operation. If you issue a ListTagsOfResource request immediately after a + * TagResource request, DynamoDB might return your + * previous tag set, if there was one, or an empty tag set. This is because + * ListTagsOfResource uses an eventually consistent query, and the + * metadata for your tags or table might not be available at that moment. Wait for + * a few seconds, and then try the ListTagsOfResource request + * again.

    + *
  • + *
  • + *

    The application or removal of tags using TagResource and + * UntagResource APIs is eventually consistent. + * ListTagsOfResource API will only reflect the changes after a + * few seconds.

    + *
  • + *
+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TagResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TagResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TagResourceInput + * ResourceArn: "STRING_VALUE", // required + * Tags: [ // TagList // required + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }; + * const command = new TagResourceCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param TagResourceCommandInput - {@link TagResourceCommandInput} + * @returns {@link TagResourceCommandOutput} + * @see {@link TagResourceCommandInput} for command's `input` shape. + * @see {@link TagResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TagResourceCommand extends TagResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TagResourceInput; + output: {}; + }; + sdk: { + input: TagResourceCommandInput; + output: TagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts new file mode 100644 index 0000000..a9d9997 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts @@ -0,0 +1,489 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TransactGetItemsInput, TransactGetItemsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TransactGetItemsCommand}. + */ +export interface TransactGetItemsCommandInput extends TransactGetItemsInput { +} +/** + * @public + * + * The output of {@link TransactGetItemsCommand}. + */ +export interface TransactGetItemsCommandOutput extends TransactGetItemsOutput, __MetadataBearer { +} +declare const TransactGetItemsCommand_base: { + new (input: TransactGetItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TransactGetItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * TransactGetItems is a synchronous operation that atomically retrieves + * multiple items from one or more tables (but not from indexes) in a single account and + * Region. A TransactGetItems call can contain up to 100 + * TransactGetItem objects, each of which contains a Get + * structure that specifies an item to retrieve from a table in the account and Region. A + * call to TransactGetItems cannot retrieve items from tables in more than one + * Amazon Web Services account or Region. The aggregate size of the items in the + * transaction cannot exceed 4 MB.

+ *

DynamoDB rejects the entire TransactGetItems request if any of + * the following is true:

+ *
    + *
  • + *

    A conflicting operation is in the process of updating an item to be + * read.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    The aggregate size of the items in the transaction exceeded 4 MB.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TransactGetItemsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TransactGetItemsInput + * TransactItems: [ // TransactGetItemList // required + * { // TransactGetItem + * Get: { // Get + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * TableName: "STRING_VALUE", // required + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }, + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new TransactGetItemsCommand(input); + * const response = await client.send(command); + * // { // TransactGetItemsOutput + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // Responses: [ // ItemResponseList + * // { // ItemResponse + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param TransactGetItemsCommandInput - {@link TransactGetItemsCommandInput} + * @returns {@link TransactGetItemsCommandOutput} + * @see {@link TransactGetItemsCommandInput} for command's `input` shape. + * @see {@link TransactGetItemsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TransactGetItemsCommand extends TransactGetItemsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TransactGetItemsInput; + output: TransactGetItemsOutput; + }; + sdk: { + input: TransactGetItemsCommandInput; + output: TransactGetItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts new file mode 100644 index 0000000..644f975 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts @@ -0,0 +1,658 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TransactWriteItemsInput, TransactWriteItemsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TransactWriteItemsCommand}. + */ +export interface TransactWriteItemsCommandInput extends TransactWriteItemsInput { +} +/** + * @public + * + * The output of {@link TransactWriteItemsCommand}. + */ +export interface TransactWriteItemsCommandOutput extends TransactWriteItemsOutput, __MetadataBearer { +} +declare const TransactWriteItemsCommand_base: { + new (input: TransactWriteItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TransactWriteItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * TransactWriteItems is a synchronous write operation that groups up to 100 + * action requests. These actions can target items in different tables, but not in + * different Amazon Web Services accounts or Regions, and no two actions can target the same + * item. For example, you cannot both ConditionCheck and Update + * the same item. The aggregate size of the items in the transaction cannot exceed 4 + * MB.

+ *

The actions are completed atomically so that either all of them succeed, or all of + * them fail. They are defined by the following objects:

+ *
    + *
  • + *

    + * Put  —   Initiates a PutItem + * operation to write a new item. This structure specifies the primary key of the + * item to be written, the name of the table to write it in, an optional condition + * expression that must be satisfied for the write to succeed, a list of the item's + * attributes, and a field indicating whether to retrieve the item's attributes if + * the condition is not met.

    + *
  • + *
  • + *

    + * Update  —   Initiates an UpdateItem + * operation to update an existing item. This structure specifies the primary key + * of the item to be updated, the name of the table where it resides, an optional + * condition expression that must be satisfied for the update to succeed, an + * expression that defines one or more attributes to be updated, and a field + * indicating whether to retrieve the item's attributes if the condition is not + * met.

    + *
  • + *
  • + *

    + * Delete  —   Initiates a DeleteItem + * operation to delete an existing item. This structure specifies the primary key + * of the item to be deleted, the name of the table where it resides, an optional + * condition expression that must be satisfied for the deletion to succeed, and a + * field indicating whether to retrieve the item's attributes if the condition is + * not met.

    + *
  • + *
  • + *

    + * ConditionCheck  —   Applies a condition to an item + * that is not being modified by the transaction. This structure specifies the + * primary key of the item to be checked, the name of the table where it resides, a + * condition expression that must be satisfied for the transaction to succeed, and + * a field indicating whether to retrieve the item's attributes if the condition is + * not met.

    + *
  • + *
+ *

DynamoDB rejects the entire TransactWriteItems request if any of the + * following is true:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    An ongoing operation is in the process of updating the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (bigger than 400 KB), a local secondary index + * (LSI) becomes too large, or a similar validation error occurs because of changes + * made by the transaction.

    + *
  • + *
  • + *

    The aggregate size of the items in the transaction exceeds 4 MB.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TransactWriteItemsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TransactWriteItemsInput + * TransactItems: [ // TransactWriteItemList // required + * { // TransactWriteItem + * ConditionCheck: { // ConditionCheck + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", // required + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Put: { // Put + * Item: { // PutItemInputAttributeMap // required + * "": "", + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Delete: { // Delete + * Key: { // required + * "": "", + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Update: { // Update + * Key: { // required + * "": "", + * }, + * UpdateExpression: "STRING_VALUE", // required + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ClientRequestToken: "STRING_VALUE", + * }; + * const command = new TransactWriteItemsCommand(input); + * const response = await client.send(command); + * // { // TransactWriteItemsOutput + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // ItemCollectionMetrics: { // ItemCollectionMetricsPerTable + * // "": [ // ItemCollectionMetricsMultiple + * // { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param TransactWriteItemsCommandInput - {@link TransactWriteItemsCommandInput} + * @returns {@link TransactWriteItemsCommandOutput} + * @see {@link TransactWriteItemsCommandInput} for command's `input` shape. + * @see {@link TransactWriteItemsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link IdempotentParameterMismatchException} (client fault) + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link TransactionInProgressException} (client fault) + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TransactWriteItemsCommand extends TransactWriteItemsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TransactWriteItemsInput; + output: TransactWriteItemsOutput; + }; + sdk: { + input: TransactWriteItemsCommandInput; + output: TransactWriteItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts new file mode 100644 index 0000000..70f702d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts @@ -0,0 +1,134 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UntagResourceInput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UntagResourceCommand}. + */ +export interface UntagResourceCommandInput extends UntagResourceInput { +} +/** + * @public + * + * The output of {@link UntagResourceCommand}. + */ +export interface UntagResourceCommandOutput extends __MetadataBearer { +} +declare const UntagResourceCommand_base: { + new (input: UntagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UntagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Removes the association of tags from an Amazon DynamoDB resource. You can call + * UntagResource up to five times per second, per account.

+ *
    + *
  • + *

    + * UntagResource is an asynchronous operation. If you issue a ListTagsOfResource request immediately after an + * UntagResource request, DynamoDB might return your + * previous tag set, if there was one, or an empty tag set. This is because + * ListTagsOfResource uses an eventually consistent query, and the + * metadata for your tags or table might not be available at that moment. Wait for + * a few seconds, and then try the ListTagsOfResource request + * again.

    + *
  • + *
  • + *

    The application or removal of tags using TagResource and + * UntagResource APIs is eventually consistent. + * ListTagsOfResource API will only reflect the changes after a + * few seconds.

    + *
  • + *
+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UntagResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UntagResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UntagResourceInput + * ResourceArn: "STRING_VALUE", // required + * TagKeys: [ // TagKeyList // required + * "STRING_VALUE", + * ], + * }; + * const command = new UntagResourceCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param UntagResourceCommandInput - {@link UntagResourceCommandInput} + * @returns {@link UntagResourceCommandOutput} + * @see {@link UntagResourceCommandInput} for command's `input` shape. + * @see {@link UntagResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UntagResourceCommand extends UntagResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UntagResourceInput; + output: {}; + }; + sdk: { + input: UntagResourceCommandInput; + output: UntagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..c15d7f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts @@ -0,0 +1,108 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateContinuousBackupsInput, UpdateContinuousBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateContinuousBackupsCommand}. + */ +export interface UpdateContinuousBackupsCommandInput extends UpdateContinuousBackupsInput { +} +/** + * @public + * + * The output of {@link UpdateContinuousBackupsCommand}. + */ +export interface UpdateContinuousBackupsCommandOutput extends UpdateContinuousBackupsOutput, __MetadataBearer { +} +declare const UpdateContinuousBackupsCommand_base: { + new (input: UpdateContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * UpdateContinuousBackups enables or disables point in time recovery for + * the specified table. A successful UpdateContinuousBackups call returns the + * current ContinuousBackupsDescription. Continuous backups are + * ENABLED on all tables at table creation. If point in time recovery is + * enabled, PointInTimeRecoveryStatus will be set to ENABLED.

+ *

Once continuous backups and point in time recovery are enabled, you can restore to + * any point in time within EarliestRestorableDateTime and + * LatestRestorableDateTime.

+ *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + * You can restore your table to any point in time in the last 35 days. You can set the + * RecoveryPeriodInDays to any value between 1 and 35 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateContinuousBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateContinuousBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateContinuousBackupsInput + * TableName: "STRING_VALUE", // required + * PointInTimeRecoverySpecification: { // PointInTimeRecoverySpecification + * PointInTimeRecoveryEnabled: true || false, // required + * RecoveryPeriodInDays: Number("int"), + * }, + * }; + * const command = new UpdateContinuousBackupsCommand(input); + * const response = await client.send(command); + * // { // UpdateContinuousBackupsOutput + * // ContinuousBackupsDescription: { // ContinuousBackupsDescription + * // ContinuousBackupsStatus: "ENABLED" || "DISABLED", // required + * // PointInTimeRecoveryDescription: { // PointInTimeRecoveryDescription + * // PointInTimeRecoveryStatus: "ENABLED" || "DISABLED", + * // RecoveryPeriodInDays: Number("int"), + * // EarliestRestorableDateTime: new Date("TIMESTAMP"), + * // LatestRestorableDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }; + * + * ``` + * + * @param UpdateContinuousBackupsCommandInput - {@link UpdateContinuousBackupsCommandInput} + * @returns {@link UpdateContinuousBackupsCommandOutput} + * @see {@link UpdateContinuousBackupsCommandInput} for command's `input` shape. + * @see {@link UpdateContinuousBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ContinuousBackupsUnavailableException} (client fault) + *

Backups have not yet been enabled for this table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateContinuousBackupsCommand extends UpdateContinuousBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateContinuousBackupsInput; + output: UpdateContinuousBackupsOutput; + }; + sdk: { + input: UpdateContinuousBackupsCommandInput; + output: UpdateContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts new file mode 100644 index 0000000..61c9981 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateContributorInsightsInput, UpdateContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateContributorInsightsCommand}. + */ +export interface UpdateContributorInsightsCommandInput extends UpdateContributorInsightsInput { +} +/** + * @public + * + * The output of {@link UpdateContributorInsightsCommand}. + */ +export interface UpdateContributorInsightsCommandOutput extends UpdateContributorInsightsOutput, __MetadataBearer { +} +declare const UpdateContributorInsightsCommand_base: { + new (input: UpdateContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates the status for contributor insights for a specific table or index. CloudWatch + * Contributor Insights for DynamoDB graphs display the partition key and (if applicable) + * sort key of frequently accessed items and frequently throttled items in plaintext. If + * you require the use of Amazon Web Services Key Management Service (KMS) to encrypt this + * table’s partition key and sort key data with an Amazon Web Services managed key or + * customer managed key, you should not enable CloudWatch Contributor Insights for DynamoDB + * for this table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateContributorInsightsInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * ContributorInsightsAction: "ENABLE" || "DISABLE", // required + * }; + * const command = new UpdateContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // UpdateContributorInsightsOutput + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // }; + * + * ``` + * + * @param UpdateContributorInsightsCommandInput - {@link UpdateContributorInsightsCommandInput} + * @returns {@link UpdateContributorInsightsCommandOutput} + * @see {@link UpdateContributorInsightsCommandInput} for command's `input` shape. + * @see {@link UpdateContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateContributorInsightsCommand extends UpdateContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateContributorInsightsInput; + output: UpdateContributorInsightsOutput; + }; + sdk: { + input: UpdateContributorInsightsCommandInput; + output: UpdateContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts new file mode 100644 index 0000000..ff0b588 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts @@ -0,0 +1,176 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateGlobalTableInput, UpdateGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateGlobalTableCommand}. + */ +export interface UpdateGlobalTableCommandInput extends UpdateGlobalTableInput { +} +/** + * @public + * + * The output of {@link UpdateGlobalTableCommand}. + */ +export interface UpdateGlobalTableCommandOutput extends UpdateGlobalTableOutput, __MetadataBearer { +} +declare const UpdateGlobalTableCommand_base: { + new (input: UpdateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Adds or removes replicas in the specified global table. The global table must already + * exist to be able to use this operation. Any replica to be added must be empty, have the + * same name as the global table, have the same key schema, have DynamoDB Streams enabled, + * and have the same provisioned and maximum write capacity units.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version). If you are using global tables Version + * 2019.11.21 you can use UpdateTable instead.

+ *

Although you can use UpdateGlobalTable to add replicas and remove + * replicas in a single request, for simplicity we recommend that you issue separate + * requests for adding or removing replicas.

+ *
+ *

If global secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The global secondary indexes must have the same name.

    + *
  • + *
  • + *

    The global secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
  • + *

    The global secondary indexes must have the same provisioned and maximum write + * capacity units.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * ReplicaUpdates: [ // ReplicaUpdateList // required + * { // ReplicaUpdate + * Create: { // CreateReplicaAction + * RegionName: "STRING_VALUE", // required + * }, + * Delete: { // DeleteReplicaAction + * RegionName: "STRING_VALUE", // required + * }, + * }, + * ], + * }; + * const command = new UpdateGlobalTableCommand(input); + * const response = await client.send(command); + * // { // UpdateGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param UpdateGlobalTableCommandInput - {@link UpdateGlobalTableCommandInput} + * @returns {@link UpdateGlobalTableCommandOutput} + * @see {@link UpdateGlobalTableCommandInput} for command's `input` shape. + * @see {@link UpdateGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ReplicaAlreadyExistsException} (client fault) + *

The specified replica is already part of the global table.

+ * + * @throws {@link ReplicaNotFoundException} (client fault) + *

The specified replica is no longer part of the global table.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateGlobalTableCommand extends UpdateGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateGlobalTableInput; + output: UpdateGlobalTableOutput; + }; + sdk: { + input: UpdateGlobalTableCommandInput; + output: UpdateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..bf002ac --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts @@ -0,0 +1,280 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateGlobalTableSettingsInput, UpdateGlobalTableSettingsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateGlobalTableSettingsCommand}. + */ +export interface UpdateGlobalTableSettingsCommandInput extends UpdateGlobalTableSettingsInput { +} +/** + * @public + * + * The output of {@link UpdateGlobalTableSettingsCommand}. + */ +export interface UpdateGlobalTableSettingsCommandOutput extends UpdateGlobalTableSettingsOutput, __MetadataBearer { +} +declare const UpdateGlobalTableSettingsCommand_base: { + new (input: UpdateGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates settings for a global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateGlobalTableSettingsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateGlobalTableSettingsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateGlobalTableSettingsInput + * GlobalTableName: "STRING_VALUE", // required + * GlobalTableBillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalTableProvisionedWriteCapacityUnits: Number("long"), + * GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: { // AutoScalingSettingsUpdate + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { // AutoScalingPolicyUpdate + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * GlobalTableGlobalSecondaryIndexSettingsUpdate: [ // GlobalTableGlobalSecondaryIndexSettingsUpdateList + * { // GlobalTableGlobalSecondaryIndexSettingsUpdate + * IndexName: "STRING_VALUE", // required + * ProvisionedWriteCapacityUnits: Number("long"), + * ProvisionedWriteCapacityAutoScalingSettingsUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * }, + * ], + * ReplicaSettingsUpdate: [ // ReplicaSettingsUpdateList + * { // ReplicaSettingsUpdate + * RegionName: "STRING_VALUE", // required + * ReplicaProvisionedReadCapacityUnits: Number("long"), + * ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * ReplicaGlobalSecondaryIndexSettingsUpdate: [ // ReplicaGlobalSecondaryIndexSettingsUpdateList + * { // ReplicaGlobalSecondaryIndexSettingsUpdate + * IndexName: "STRING_VALUE", // required + * ProvisionedReadCapacityUnits: Number("long"), + * ProvisionedReadCapacityAutoScalingSettingsUpdate: "", + * }, + * ], + * ReplicaTableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * ], + * }; + * const command = new UpdateGlobalTableSettingsCommand(input); + * const response = await client.send(command); + * // { // UpdateGlobalTableSettingsOutput + * // GlobalTableName: "STRING_VALUE", + * // ReplicaSettings: [ // ReplicaSettingsDescriptionList + * // { // ReplicaSettingsDescription + * // RegionName: "STRING_VALUE", // required + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaBillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // ReplicaProvisionedReadCapacityUnits: Number("long"), + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityUnits: Number("long"), + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaGlobalSecondaryIndexSettings: [ // ReplicaGlobalSecondaryIndexSettingsDescriptionList + * // { // ReplicaGlobalSecondaryIndexSettingsDescription + * // IndexName: "STRING_VALUE", // required + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityUnits: Number("long"), + * // ProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityUnits: Number("long"), + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param UpdateGlobalTableSettingsCommandInput - {@link UpdateGlobalTableSettingsCommandInput} + * @returns {@link UpdateGlobalTableSettingsCommandOutput} + * @see {@link UpdateGlobalTableSettingsCommandInput} for command's `input` shape. + * @see {@link UpdateGlobalTableSettingsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link IndexNotFoundException} (client fault) + *

The operation tried to access a nonexistent index.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ReplicaNotFoundException} (client fault) + *

The specified replica is no longer part of the global table.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateGlobalTableSettingsCommand extends UpdateGlobalTableSettingsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateGlobalTableSettingsInput; + output: UpdateGlobalTableSettingsOutput; + }; + sdk: { + input: UpdateGlobalTableSettingsCommandInput; + output: UpdateGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts new file mode 100644 index 0000000..0fc6013 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts @@ -0,0 +1,313 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateItemInput, UpdateItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateItemCommand}. + */ +export interface UpdateItemCommandInput extends UpdateItemInput { +} +/** + * @public + * + * The output of {@link UpdateItemCommand}. + */ +export interface UpdateItemCommandOutput extends UpdateItemOutput, __MetadataBearer { +} +declare const UpdateItemCommand_base: { + new (input: UpdateItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Edits an existing item's attributes, or adds a new item to the table if it does not + * already exist. You can put, delete, or add attribute values. You can also perform a + * conditional update on an existing item (insert a new attribute name-value pair if it + * doesn't exist, or replace an existing name-value pair if it has certain expected + * attribute values).

+ *

You can also return the item's attribute values in the same UpdateItem + * operation using the ReturnValues parameter.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * AttributeUpdates: { // AttributeUpdates + * "": { // AttributeValueUpdate + * Value: "", + * Action: "ADD" || "PUT" || "DELETE", + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * UpdateExpression: "STRING_VALUE", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new UpdateItemCommand(input); + * const response = await client.send(command); + * // { // UpdateItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param UpdateItemCommandInput - {@link UpdateItemCommandInput} + * @returns {@link UpdateItemCommandOutput} + * @see {@link UpdateItemCommandInput} for command's `input` shape. + * @see {@link UpdateItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To update an item in a table + * ```javascript + * // This example updates an item in the Music table. It adds a new attribute (Year) and modifies the AlbumTitle attribute. All of the attributes in the item, as they appear after the update, are returned in the response. + * const input = { + * ExpressionAttributeNames: { + * #AT: "AlbumTitle", + * #Y: "Year" + * }, + * ExpressionAttributeValues: { + * :t: { + * S: "Louder Than Ever" + * }, + * :y: { + * N: "2015" + * } + * }, + * Key: { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * ReturnValues: "ALL_NEW", + * TableName: "Music", + * UpdateExpression: "SET #Y = :y, #AT = :t" + * }; + * const command = new UpdateItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Attributes: { + * AlbumTitle: { + * S: "Louder Than Ever" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * }, + * Year: { + * N: "2015" + * } + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class UpdateItemCommand extends UpdateItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateItemInput; + output: UpdateItemOutput; + }; + sdk: { + input: UpdateItemCommandInput; + output: UpdateItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..ec8c635 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,121 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateKinesisStreamingDestinationInput, UpdateKinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateKinesisStreamingDestinationCommand}. + */ +export interface UpdateKinesisStreamingDestinationCommandInput extends UpdateKinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link UpdateKinesisStreamingDestinationCommand}. + */ +export interface UpdateKinesisStreamingDestinationCommandOutput extends UpdateKinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const UpdateKinesisStreamingDestinationCommand_base: { + new (input: UpdateKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The command to update the Kinesis stream destination.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateKinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * UpdateKinesisStreamingConfiguration: { // UpdateKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new UpdateKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // UpdateKinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // UpdateKinesisStreamingConfiguration: { // UpdateKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param UpdateKinesisStreamingDestinationCommandInput - {@link UpdateKinesisStreamingDestinationCommandInput} + * @returns {@link UpdateKinesisStreamingDestinationCommandOutput} + * @see {@link UpdateKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link UpdateKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateKinesisStreamingDestinationCommand extends UpdateKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateKinesisStreamingDestinationInput; + output: UpdateKinesisStreamingDestinationOutput; + }; + sdk: { + input: UpdateKinesisStreamingDestinationCommandInput; + output: UpdateKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts new file mode 100644 index 0000000..cf64a60 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts @@ -0,0 +1,437 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTableInput, UpdateTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTableCommand}. + */ +export interface UpdateTableCommandInput extends UpdateTableInput { +} +/** + * @public + * + * The output of {@link UpdateTableCommand}. + */ +export interface UpdateTableCommandOutput extends UpdateTableOutput, __MetadataBearer { +} +declare const UpdateTableCommand_base: { + new (input: UpdateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Modifies the provisioned throughput settings, global secondary indexes, or DynamoDB + * Streams settings for a given table.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ *

You can only perform one of the following operations at once:

+ *
    + *
  • + *

    Modify the provisioned throughput settings of the table.

    + *
  • + *
  • + *

    Remove a global secondary index from the table.

    + *
  • + *
  • + *

    Create a new global secondary index on the table. After the index begins + * backfilling, you can use UpdateTable to perform other + * operations.

    + *
  • + *
+ *

+ * UpdateTable is an asynchronous operation; while it's executing, the table + * status changes from ACTIVE to UPDATING. While it's + * UPDATING, you can't issue another UpdateTable request. + * When the table returns to the ACTIVE state, the UpdateTable + * operation is complete.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTableInput + * AttributeDefinitions: [ // AttributeDefinitions + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * TableName: "STRING_VALUE", // required + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * GlobalSecondaryIndexUpdates: [ // GlobalSecondaryIndexUpdateList + * { // GlobalSecondaryIndexUpdate + * Update: { // UpdateGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * Create: { // CreateGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * Delete: { // DeleteGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * }, + * }, + * ], + * StreamSpecification: { // StreamSpecification + * StreamEnabled: true || false, // required + * StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * ReplicaUpdates: [ // ReplicationGroupUpdateList + * { // ReplicationGroupUpdate + * Create: { // CreateReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * KMSMasterKeyId: "STRING_VALUE", + * ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { // OnDemandThroughputOverride + * MaxReadRequestUnits: Number("long"), + * }, + * GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexList + * { // ReplicaGlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * }, + * ], + * TableClassOverride: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * Update: { // UpdateReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * KMSMasterKeyId: "STRING_VALUE", + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * GlobalSecondaryIndexes: [ + * { + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * }, + * ], + * TableClassOverride: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * Delete: { // DeleteReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * }, + * }, + * ], + * TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * DeletionProtectionEnabled: true || false, + * MultiRegionConsistency: "EVENTUAL" || "STRONG", + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }; + * const command = new UpdateTableCommand(input); + * const response = await client.send(command); + * // { // UpdateTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param UpdateTableCommandInput - {@link UpdateTableCommandInput} + * @returns {@link UpdateTableCommandOutput} + * @see {@link UpdateTableCommandInput} for command's `input` shape. + * @see {@link UpdateTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTableCommand extends UpdateTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTableInput; + output: UpdateTableOutput; + }; + sdk: { + input: UpdateTableCommandInput; + output: UpdateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..e6f341c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,244 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTableReplicaAutoScalingInput, UpdateTableReplicaAutoScalingOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTableReplicaAutoScalingCommand}. + */ +export interface UpdateTableReplicaAutoScalingCommandInput extends UpdateTableReplicaAutoScalingInput { +} +/** + * @public + * + * The output of {@link UpdateTableReplicaAutoScalingCommand}. + */ +export interface UpdateTableReplicaAutoScalingCommandOutput extends UpdateTableReplicaAutoScalingOutput, __MetadataBearer { +} +declare const UpdateTableReplicaAutoScalingCommand_base: { + new (input: UpdateTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates auto scaling settings on your global tables at once.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTableReplicaAutoScalingCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTableReplicaAutoScalingCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTableReplicaAutoScalingInput + * GlobalSecondaryIndexUpdates: [ // GlobalSecondaryIndexAutoScalingUpdateList + * { // GlobalSecondaryIndexAutoScalingUpdate + * IndexName: "STRING_VALUE", + * ProvisionedWriteCapacityAutoScalingUpdate: { // AutoScalingSettingsUpdate + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { // AutoScalingPolicyUpdate + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * }, + * ], + * TableName: "STRING_VALUE", // required + * ProvisionedWriteCapacityAutoScalingUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * ReplicaUpdates: [ // ReplicaAutoScalingUpdateList + * { // ReplicaAutoScalingUpdate + * RegionName: "STRING_VALUE", // required + * ReplicaGlobalSecondaryIndexUpdates: [ // ReplicaGlobalSecondaryIndexAutoScalingUpdateList + * { // ReplicaGlobalSecondaryIndexAutoScalingUpdate + * IndexName: "STRING_VALUE", + * ProvisionedReadCapacityAutoScalingUpdate: "", + * }, + * ], + * ReplicaProvisionedReadCapacityAutoScalingUpdate: "", + * }, + * ], + * }; + * const command = new UpdateTableReplicaAutoScalingCommand(input); + * const response = await client.send(command); + * // { // UpdateTableReplicaAutoScalingOutput + * // TableAutoScalingDescription: { // TableAutoScalingDescription + * // TableName: "STRING_VALUE", + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // Replicas: [ // ReplicaAutoScalingDescriptionList + * // { // ReplicaAutoScalingDescription + * // RegionName: "STRING_VALUE", + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexAutoScalingDescriptionList + * // { // ReplicaGlobalSecondaryIndexAutoScalingDescription + * // IndexName: "STRING_VALUE", + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param UpdateTableReplicaAutoScalingCommandInput - {@link UpdateTableReplicaAutoScalingCommandInput} + * @returns {@link UpdateTableReplicaAutoScalingCommandOutput} + * @see {@link UpdateTableReplicaAutoScalingCommandInput} for command's `input` shape. + * @see {@link UpdateTableReplicaAutoScalingCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTableReplicaAutoScalingCommand extends UpdateTableReplicaAutoScalingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTableReplicaAutoScalingInput; + output: UpdateTableReplicaAutoScalingOutput; + }; + sdk: { + input: UpdateTableReplicaAutoScalingCommandInput; + output: UpdateTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts new file mode 100644 index 0000000..b36e07e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts @@ -0,0 +1,143 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTimeToLiveInput, UpdateTimeToLiveOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTimeToLiveCommand}. + */ +export interface UpdateTimeToLiveCommandInput extends UpdateTimeToLiveInput { +} +/** + * @public + * + * The output of {@link UpdateTimeToLiveCommand}. + */ +export interface UpdateTimeToLiveCommandOutput extends UpdateTimeToLiveOutput, __MetadataBearer { +} +declare const UpdateTimeToLiveCommand_base: { + new (input: UpdateTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The UpdateTimeToLive method enables or disables Time to Live (TTL) for + * the specified table. A successful UpdateTimeToLive call returns the current + * TimeToLiveSpecification. It can take up to one hour for the change to + * fully process. Any additional UpdateTimeToLive calls for the same table + * during this one hour duration result in a ValidationException.

+ *

TTL compares the current time in epoch time format to the time stored in the TTL + * attribute of an item. If the epoch time value stored in the attribute is less than the + * current time, the item is marked as expired and subsequently deleted.

+ * + *

The epoch time format is the number of seconds elapsed since 12:00:00 AM January + * 1, 1970 UTC.

+ *
+ *

DynamoDB deletes expired items on a best-effort basis to ensure availability of + * throughput for other data operations.

+ * + *

DynamoDB typically deletes expired items within two days of expiration. The exact + * duration within which an item gets deleted after expiration is specific to the + * nature of the workload. Items that have expired and not been deleted will still show + * up in reads, queries, and scans.

+ *
+ *

As items are deleted, they are removed from any local secondary index and global + * secondary index immediately in the same eventually consistent way as a standard delete + * operation.

+ *

For more information, see Time To Live in the + * Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTimeToLiveCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTimeToLiveCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTimeToLiveInput + * TableName: "STRING_VALUE", // required + * TimeToLiveSpecification: { // TimeToLiveSpecification + * Enabled: true || false, // required + * AttributeName: "STRING_VALUE", // required + * }, + * }; + * const command = new UpdateTimeToLiveCommand(input); + * const response = await client.send(command); + * // { // UpdateTimeToLiveOutput + * // TimeToLiveSpecification: { // TimeToLiveSpecification + * // Enabled: true || false, // required + * // AttributeName: "STRING_VALUE", // required + * // }, + * // }; + * + * ``` + * + * @param UpdateTimeToLiveCommandInput - {@link UpdateTimeToLiveCommandInput} + * @returns {@link UpdateTimeToLiveCommandOutput} + * @see {@link UpdateTimeToLiveCommandInput} for command's `input` shape. + * @see {@link UpdateTimeToLiveCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTimeToLiveCommand extends UpdateTimeToLiveCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTimeToLiveInput; + output: UpdateTimeToLiveOutput; + }; + sdk: { + input: UpdateTimeToLiveCommandInput; + output: UpdateTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..057fd52 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; + accountId?: string | Provider; + accountIdEndpointMode?: string | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly AccountId: { + readonly type: "builtInParams"; + readonly name: "accountId"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; + readonly AccountIdEndpointMode: { + readonly type: "builtInParams"; + readonly name: "accountIdEndpointMode"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + AccountId?: string; + AccountIdEndpointMode?: string; + ResourceArn?: string; + ResourceArnList?: string[]; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts new file mode 100644 index 0000000..3aa1e50 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface DynamoDBExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..bb6be8f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts @@ -0,0 +1,31 @@ +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * + * @packageDocumentation + */ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts new file mode 100644 index 0000000..f8ff019 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from DynamoDB service. + */ +export declare class DynamoDBServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts new file mode 100644 index 0000000..9821a22 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts @@ -0,0 +1,11039 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +/** + * @public + * @enum + */ +export declare const ApproximateCreationDateTimePrecision: { + readonly MICROSECOND: "MICROSECOND"; + readonly MILLISECOND: "MILLISECOND"; +}; +/** + * @public + */ +export type ApproximateCreationDateTimePrecision = (typeof ApproximateCreationDateTimePrecision)[keyof typeof ApproximateCreationDateTimePrecision]; +/** + *

Contains details of a table archival operation.

+ * @public + */ +export interface ArchivalSummary { + /** + *

The date and time when table archival was initiated by DynamoDB, in UNIX epoch time + * format.

+ * @public + */ + ArchivalDateTime?: Date | undefined; + /** + *

The reason DynamoDB archived the table. Currently, the only possible value is:

+ *
    + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The table was archived due + * to the table's KMS key being inaccessible for more than seven + * days. An On-Demand backup was created at the archival time.

    + *
  • + *
+ * @public + */ + ArchivalReason?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the backup the table was archived to, when + * applicable in the archival reason. If you wish to restore this backup to the same table + * name, you will need to delete the original table.

+ * @public + */ + ArchivalBackupArn?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const AttributeAction: { + readonly ADD: "ADD"; + readonly DELETE: "DELETE"; + readonly PUT: "PUT"; +}; +/** + * @public + */ +export type AttributeAction = (typeof AttributeAction)[keyof typeof AttributeAction]; +/** + * @public + * @enum + */ +export declare const ScalarAttributeType: { + readonly B: "B"; + readonly N: "N"; + readonly S: "S"; +}; +/** + * @public + */ +export type ScalarAttributeType = (typeof ScalarAttributeType)[keyof typeof ScalarAttributeType]; +/** + *

Represents an attribute for describing the schema for the table and indexes.

+ * @public + */ +export interface AttributeDefinition { + /** + *

A name for the attribute.

+ * @public + */ + AttributeName: string | undefined; + /** + *

The data type for the attribute, where:

+ *
    + *
  • + *

    + * S - the attribute is of type String

    + *
  • + *
  • + *

    + * N - the attribute is of type Number

    + *
  • + *
  • + *

    + * B - the attribute is of type Binary

    + *
  • + *
+ * @public + */ + AttributeType: ScalarAttributeType | undefined; +} +/** + *

Represents the properties of a target tracking scaling policy.

+ * @public + */ +export interface AutoScalingTargetTrackingScalingPolicyConfigurationDescription { + /** + *

Indicates whether scale in by the target tracking policy is disabled. If the value is + * true, scale in is disabled and the target tracking policy won't remove capacity from the + * scalable resource. Otherwise, scale in is enabled and the target tracking policy can + * remove capacity from the scalable resource. The default value is false.

+ * @public + */ + DisableScaleIn?: boolean | undefined; + /** + *

The amount of time, in seconds, after a scale in activity completes before another + * scale in activity can start. The cooldown period is used to block subsequent scale in + * requests until it has expired. You should scale in conservatively to protect your + * application's availability. However, if another alarm triggers a scale out policy during + * the cooldown period after a scale-in, application auto scaling scales out your scalable + * target immediately.

+ * @public + */ + ScaleInCooldown?: number | undefined; + /** + *

The amount of time, in seconds, after a scale out activity completes before another + * scale out activity can start. While the cooldown period is in effect, the capacity that + * has been added by the previous scale out event that initiated the cooldown is calculated + * as part of the desired capacity for the next scale out. You should continuously (but not + * excessively) scale out.

+ * @public + */ + ScaleOutCooldown?: number | undefined; + /** + *

The target value for the metric. The range is 8.515920e-109 to 1.174271e+108 (Base 10) + * or 2e-360 to 2e360 (Base 2).

+ * @public + */ + TargetValue: number | undefined; +} +/** + *

Represents the properties of the scaling policy.

+ * @public + */ +export interface AutoScalingPolicyDescription { + /** + *

The name of the scaling policy.

+ * @public + */ + PolicyName?: string | undefined; + /** + *

Represents a target tracking scaling policy configuration.

+ * @public + */ + TargetTrackingScalingPolicyConfiguration?: AutoScalingTargetTrackingScalingPolicyConfigurationDescription | undefined; +} +/** + *

Represents the settings of a target tracking scaling policy that will be + * modified.

+ * @public + */ +export interface AutoScalingTargetTrackingScalingPolicyConfigurationUpdate { + /** + *

Indicates whether scale in by the target tracking policy is disabled. If the value is + * true, scale in is disabled and the target tracking policy won't remove capacity from the + * scalable resource. Otherwise, scale in is enabled and the target tracking policy can + * remove capacity from the scalable resource. The default value is false.

+ * @public + */ + DisableScaleIn?: boolean | undefined; + /** + *

The amount of time, in seconds, after a scale in activity completes before another + * scale in activity can start. The cooldown period is used to block subsequent scale in + * requests until it has expired. You should scale in conservatively to protect your + * application's availability. However, if another alarm triggers a scale out policy during + * the cooldown period after a scale-in, application auto scaling scales out your scalable + * target immediately.

+ * @public + */ + ScaleInCooldown?: number | undefined; + /** + *

The amount of time, in seconds, after a scale out activity completes before another + * scale out activity can start. While the cooldown period is in effect, the capacity that + * has been added by the previous scale out event that initiated the cooldown is calculated + * as part of the desired capacity for the next scale out. You should continuously (but not + * excessively) scale out.

+ * @public + */ + ScaleOutCooldown?: number | undefined; + /** + *

The target value for the metric. The range is 8.515920e-109 to 1.174271e+108 (Base 10) + * or 2e-360 to 2e360 (Base 2).

+ * @public + */ + TargetValue: number | undefined; +} +/** + *

Represents the auto scaling policy to be modified.

+ * @public + */ +export interface AutoScalingPolicyUpdate { + /** + *

The name of the scaling policy.

+ * @public + */ + PolicyName?: string | undefined; + /** + *

Represents a target tracking scaling policy configuration.

+ * @public + */ + TargetTrackingScalingPolicyConfiguration: AutoScalingTargetTrackingScalingPolicyConfigurationUpdate | undefined; +} +/** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ +export interface AutoScalingSettingsDescription { + /** + *

The minimum capacity units that a global table or global secondary index should be + * scaled down to.

+ * @public + */ + MinimumUnits?: number | undefined; + /** + *

The maximum capacity units that a global table or global secondary index should be + * scaled up to.

+ * @public + */ + MaximumUnits?: number | undefined; + /** + *

Disabled auto scaling for this global table or global secondary index.

+ * @public + */ + AutoScalingDisabled?: boolean | undefined; + /** + *

Role ARN used for configuring the auto scaling policy.

+ * @public + */ + AutoScalingRoleArn?: string | undefined; + /** + *

Information about the scaling policies.

+ * @public + */ + ScalingPolicies?: AutoScalingPolicyDescription[] | undefined; +} +/** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ +export interface AutoScalingSettingsUpdate { + /** + *

The minimum capacity units that a global table or global secondary index should be + * scaled down to.

+ * @public + */ + MinimumUnits?: number | undefined; + /** + *

The maximum capacity units that a global table or global secondary index should be + * scaled up to.

+ * @public + */ + MaximumUnits?: number | undefined; + /** + *

Disabled auto scaling for this global table or global secondary index.

+ * @public + */ + AutoScalingDisabled?: boolean | undefined; + /** + *

Role ARN used for configuring auto scaling policy.

+ * @public + */ + AutoScalingRoleArn?: string | undefined; + /** + *

The scaling policy to apply for scaling target global table or global secondary index + * capacity units.

+ * @public + */ + ScalingPolicyUpdate?: AutoScalingPolicyUpdate | undefined; +} +/** + * @public + * @enum + */ +export declare const BackupStatus: { + readonly AVAILABLE: "AVAILABLE"; + readonly CREATING: "CREATING"; + readonly DELETED: "DELETED"; +}; +/** + * @public + */ +export type BackupStatus = (typeof BackupStatus)[keyof typeof BackupStatus]; +/** + * @public + * @enum + */ +export declare const BackupType: { + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +/** + * @public + */ +export type BackupType = (typeof BackupType)[keyof typeof BackupType]; +/** + *

Contains the details of the backup created for the table.

+ * @public + */ +export interface BackupDetails { + /** + *

ARN associated with the backup.

+ * @public + */ + BackupArn: string | undefined; + /** + *

Name of the requested backup.

+ * @public + */ + BackupName: string | undefined; + /** + *

Size of the backup in bytes. DynamoDB updates this value approximately every six + * hours. Recent changes might not be reflected in this value.

+ * @public + */ + BackupSizeBytes?: number | undefined; + /** + *

Backup can be in one of the following states: CREATING, ACTIVE, DELETED.

+ * @public + */ + BackupStatus: BackupStatus | undefined; + /** + *

BackupType:

+ *
    + *
  • + *

    + * USER - You create and manage these using the on-demand backup + * feature.

    + *
  • + *
  • + *

    + * SYSTEM - If you delete a table with point-in-time recovery enabled, + * a SYSTEM backup is automatically created and is retained for 35 + * days (at no additional cost). System backups allow you to restore the deleted + * table to the state it was in just before the point of deletion.

    + *
  • + *
  • + *

    + * AWS_BACKUP - On-demand backup created by you from Backup service.

    + *
  • + *
+ * @public + */ + BackupType: BackupType | undefined; + /** + *

Time at which the backup was created. This is the request time of the backup.

+ * @public + */ + BackupCreationDateTime: Date | undefined; + /** + *

Time at which the automatic on-demand backup created by DynamoDB will + * expire. This SYSTEM on-demand backup expires automatically 35 days after + * its creation.

+ * @public + */ + BackupExpiryDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const BillingMode: { + readonly PAY_PER_REQUEST: "PAY_PER_REQUEST"; + readonly PROVISIONED: "PROVISIONED"; +}; +/** + * @public + */ +export type BillingMode = (typeof BillingMode)[keyof typeof BillingMode]; +/** + * @public + * @enum + */ +export declare const KeyType: { + readonly HASH: "HASH"; + readonly RANGE: "RANGE"; +}; +/** + * @public + */ +export type KeyType = (typeof KeyType)[keyof typeof KeyType]; +/** + *

Represents a single element of a key schema. A key schema + * specifies the attributes that make up the primary key of a table, or the key attributes + * of an index.

+ *

A KeySchemaElement represents exactly one attribute of the primary key. + * For example, a simple primary key would be represented by one + * KeySchemaElement (for the partition key). A composite primary key would + * require one KeySchemaElement for the partition key, and another + * KeySchemaElement for the sort key.

+ *

A KeySchemaElement must be a scalar, top-level attribute (not a nested + * attribute). The data type must be one of String, Number, or Binary. The attribute cannot + * be nested within a List or a Map.

+ * @public + */ +export interface KeySchemaElement { + /** + *

The name of a key attribute.

+ * @public + */ + AttributeName: string | undefined; + /** + *

The role that this key attribute will assume:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeyType: KeyType | undefined; +} +/** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ +export interface OnDemandThroughput { + /** + *

Maximum number of read request units for the specified table.

+ *

To specify a maximum OnDemandThroughput on your table, set the value of + * MaxReadRequestUnits as greater than or equal to 1. To remove the + * maximum OnDemandThroughput that is currently set on your table, set the + * value of MaxReadRequestUnits to -1.

+ * @public + */ + MaxReadRequestUnits?: number | undefined; + /** + *

Maximum number of write request units for the specified table.

+ *

To specify a maximum OnDemandThroughput on your table, set the value of + * MaxWriteRequestUnits as greater than or equal to 1. To remove the + * maximum OnDemandThroughput that is currently set on your table, set the + * value of MaxWriteRequestUnits to -1.

+ * @public + */ + MaxWriteRequestUnits?: number | undefined; +} +/** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use ProvisionedThroughput or + * OnDemandThroughput based on your table’s capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface ProvisionedThroughput { + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying + * Read and Write Requirements in the Amazon DynamoDB Developer + * Guide.

+ *

If read/write capacity mode is PAY_PER_REQUEST the value is set to + * 0.

+ * @public + */ + ReadCapacityUnits: number | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. For more information, see Specifying + * Read and Write Requirements in the Amazon DynamoDB Developer + * Guide.

+ *

If read/write capacity mode is PAY_PER_REQUEST the value is set to + * 0.

+ * @public + */ + WriteCapacityUnits: number | undefined; +} +/** + *

Contains the details of the table when the backup was created.

+ * @public + */ +export interface SourceTableDetails { + /** + *

The name of the table for which the backup was created.

+ * @public + */ + TableName: string | undefined; + /** + *

Unique identifier for the table for which the backup was created.

+ * @public + */ + TableId: string | undefined; + /** + *

ARN of the table for which backup was created.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Size of the table in bytes. Note that this is an approximate value.

+ * @public + */ + TableSizeBytes?: number | undefined; + /** + *

Schema of the table.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Time when the source table was created.

+ * @public + */ + TableCreationDateTime: Date | undefined; + /** + *

Read IOPs and Write IOPS on the table when the backup was created.

+ * @public + */ + ProvisionedThroughput: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Number of items in the table. Note that this is an approximate value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PROVISIONED - Sets the read/write capacity mode to + * PROVISIONED. We recommend using PROVISIONED for + * predictable workloads.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - Sets the read/write capacity mode to + * PAY_PER_REQUEST. We recommend using + * PAY_PER_REQUEST for unpredictable workloads.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; +} +/** + * @public + * @enum + */ +export declare const ProjectionType: { + readonly ALL: "ALL"; + readonly INCLUDE: "INCLUDE"; + readonly KEYS_ONLY: "KEYS_ONLY"; +}; +/** + * @public + */ +export type ProjectionType = (typeof ProjectionType)[keyof typeof ProjectionType]; +/** + *

Represents attributes that are copied (projected) from the table into an index. These + * are in addition to the primary key attributes and index key attributes, which are + * automatically projected.

+ * @public + */ +export interface Projection { + /** + *

The set of attributes that are projected into the index:

+ *
    + *
  • + *

    + * KEYS_ONLY - Only the index and primary keys are projected into the + * index.

    + *
  • + *
  • + *

    + * INCLUDE - In addition to the attributes described in + * KEYS_ONLY, the secondary index will include other non-key + * attributes that you specify.

    + *
  • + *
  • + *

    + * ALL - All of the table attributes are projected into the + * index.

    + *
  • + *
+ *

When using the DynamoDB console, ALL is selected by default.

+ * @public + */ + ProjectionType?: ProjectionType | undefined; + /** + *

Represents the non-key attribute names which will be projected into the index.

+ *

For global and local secondary indexes, the total count of NonKeyAttributes summed + * across all of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct attributes when + * determining the total. This limit only applies when you specify the ProjectionType of + * INCLUDE. You still can specify the ProjectionType of ALL to + * project all attributes from the source table, even if the table has more than 100 + * attributes.

+ * @public + */ + NonKeyAttributes?: string[] | undefined; +} +/** + *

Represents the properties of a global secondary index for the table when the backup + * was created.

+ * @public + */ +export interface GlobalSecondaryIndexInfo { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; +} +/** + *

Represents the properties of a local secondary index for the table when the backup was + * created.

+ * @public + */ +export interface LocalSecondaryIndexInfo { + /** + *

Represents the name of the local secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a local secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; +} +/** + * @public + * @enum + */ +export declare const SSEType: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +/** + * @public + */ +export type SSEType = (typeof SSEType)[keyof typeof SSEType]; +/** + * @public + * @enum + */ +export declare const SSEStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type SSEStatus = (typeof SSEStatus)[keyof typeof SSEStatus]; +/** + *

The description of the server-side encryption status on the specified table.

+ * @public + */ +export interface SSEDescription { + /** + *

Represents the current state of server-side encryption. The only supported values + * are:

+ *
    + *
  • + *

    + * ENABLED - Server-side encryption is enabled.

    + *
  • + *
  • + *

    + * UPDATING - Server-side encryption is being updated.

    + *
  • + *
+ * @public + */ + Status?: SSEStatus | undefined; + /** + *

Server-side encryption type. The only supported value is:

+ *
    + *
  • + *

    + * KMS - Server-side encryption that uses Key Management Service. The + * key is stored in your account and is managed by KMS (KMS charges apply).

    + *
  • + *
+ * @public + */ + SSEType?: SSEType | undefined; + /** + *

The KMS key ARN used for the KMS encryption.

+ * @public + */ + KMSMasterKeyArn?: string | undefined; + /** + *

Indicates the time, in UNIX epoch date format, when DynamoDB detected that + * the table's KMS key was inaccessible. This attribute will automatically + * be cleared when DynamoDB detects that the table's KMS key is accessible + * again. DynamoDB will initiate the table archival process when table's KMS key remains inaccessible for more than seven days from this date.

+ * @public + */ + InaccessibleEncryptionDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const StreamViewType: { + readonly KEYS_ONLY: "KEYS_ONLY"; + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; + readonly OLD_IMAGE: "OLD_IMAGE"; +}; +/** + * @public + */ +export type StreamViewType = (typeof StreamViewType)[keyof typeof StreamViewType]; +/** + *

Represents the DynamoDB Streams configuration for a table in DynamoDB.

+ * @public + */ +export interface StreamSpecification { + /** + *

Indicates whether DynamoDB Streams is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + StreamEnabled: boolean | undefined; + /** + *

When an item in the table is modified, StreamViewType determines what + * information is written to the stream for this table. Valid values for + * StreamViewType are:

+ *
    + *
  • + *

    + * KEYS_ONLY - Only the key attributes of the modified item are + * written to the stream.

    + *
  • + *
  • + *

    + * NEW_IMAGE - The entire item, as it appears after it was modified, + * is written to the stream.

    + *
  • + *
  • + *

    + * OLD_IMAGE - The entire item, as it appeared before it was modified, + * is written to the stream.

    + *
  • + *
  • + *

    + * NEW_AND_OLD_IMAGES - Both the new and the old item images of the + * item are written to the stream.

    + *
  • + *
+ * @public + */ + StreamViewType?: StreamViewType | undefined; +} +/** + * @public + * @enum + */ +export declare const TimeToLiveStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; +}; +/** + * @public + */ +export type TimeToLiveStatus = (typeof TimeToLiveStatus)[keyof typeof TimeToLiveStatus]; +/** + *

The description of the Time to Live (TTL) status on the specified table.

+ * @public + */ +export interface TimeToLiveDescription { + /** + *

The TTL status for the table.

+ * @public + */ + TimeToLiveStatus?: TimeToLiveStatus | undefined; + /** + *

The name of the TTL attribute for items in the table.

+ * @public + */ + AttributeName?: string | undefined; +} +/** + *

Contains the details of the features enabled on the table when the backup was created. + * For example, LSIs, GSIs, streams, TTL.

+ * @public + */ +export interface SourceTableFeatureDetails { + /** + *

Represents the LSI properties for the table when the backup was created. It includes + * the IndexName, KeySchema and Projection for the LSIs on the table at the time of backup. + *

+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndexInfo[] | undefined; + /** + *

Represents the GSI properties for the table when the backup was created. It includes + * the IndexName, KeySchema, Projection, and ProvisionedThroughput for the GSIs on the + * table at the time of backup.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndexInfo[] | undefined; + /** + *

Stream settings on the table when the backup was created.

+ * @public + */ + StreamDescription?: StreamSpecification | undefined; + /** + *

Time to Live settings on the table when the backup was created.

+ * @public + */ + TimeToLiveDescription?: TimeToLiveDescription | undefined; + /** + *

The description of the server-side encryption status on the table when the backup was + * created.

+ * @public + */ + SSEDescription?: SSEDescription | undefined; +} +/** + *

Contains the description of the backup created for the table.

+ * @public + */ +export interface BackupDescription { + /** + *

Contains the details of the backup created for the table.

+ * @public + */ + BackupDetails?: BackupDetails | undefined; + /** + *

Contains the details of the table when the backup was created.

+ * @public + */ + SourceTableDetails?: SourceTableDetails | undefined; + /** + *

Contains the details of the features enabled on the table when the backup was created. + * For example, LSIs, GSIs, streams, TTL.

+ * @public + */ + SourceTableFeatureDetails?: SourceTableFeatureDetails | undefined; +} +/** + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * @public + */ +export declare class BackupInUseException extends __BaseException { + readonly name: "BackupInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Backup not found for the given BackupARN.

+ * @public + */ +export declare class BackupNotFoundException extends __BaseException { + readonly name: "BackupNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Contains details for the backup.

+ * @public + */ +export interface BackupSummary { + /** + *

Name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

Unique identifier for the table.

+ * @public + */ + TableId?: string | undefined; + /** + *

ARN associated with the table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

ARN associated with the backup.

+ * @public + */ + BackupArn?: string | undefined; + /** + *

Name of the specified backup.

+ * @public + */ + BackupName?: string | undefined; + /** + *

Time at which the backup was created.

+ * @public + */ + BackupCreationDateTime?: Date | undefined; + /** + *

Time at which the automatic on-demand backup created by DynamoDB will + * expire. This SYSTEM on-demand backup expires automatically 35 days after + * its creation.

+ * @public + */ + BackupExpiryDateTime?: Date | undefined; + /** + *

Backup can be in one of the following states: CREATING, ACTIVE, DELETED.

+ * @public + */ + BackupStatus?: BackupStatus | undefined; + /** + *

BackupType:

+ *
    + *
  • + *

    + * USER - You create and manage these using the on-demand backup + * feature.

    + *
  • + *
  • + *

    + * SYSTEM - If you delete a table with point-in-time recovery enabled, + * a SYSTEM backup is automatically created and is retained for 35 + * days (at no additional cost). System backups allow you to restore the deleted + * table to the state it was in just before the point of deletion.

    + *
  • + *
  • + *

    + * AWS_BACKUP - On-demand backup created by you from Backup service.

    + *
  • + *
+ * @public + */ + BackupType?: BackupType | undefined; + /** + *

Size of the backup in bytes.

+ * @public + */ + BackupSizeBytes?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const BackupTypeFilter: { + readonly ALL: "ALL"; + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +/** + * @public + */ +export type BackupTypeFilter = (typeof BackupTypeFilter)[keyof typeof BackupTypeFilter]; +/** + * @public + * @enum + */ +export declare const ReturnConsumedCapacity: { + readonly INDEXES: "INDEXES"; + readonly NONE: "NONE"; + readonly TOTAL: "TOTAL"; +}; +/** + * @public + */ +export type ReturnConsumedCapacity = (typeof ReturnConsumedCapacity)[keyof typeof ReturnConsumedCapacity]; +/** + * @public + * @enum + */ +export declare const ReturnValuesOnConditionCheckFailure: { + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; +}; +/** + * @public + */ +export type ReturnValuesOnConditionCheckFailure = (typeof ReturnValuesOnConditionCheckFailure)[keyof typeof ReturnValuesOnConditionCheckFailure]; +/** + *

Represents the amount of provisioned throughput capacity consumed on a table or an + * index.

+ * @public + */ +export interface Capacity { + /** + *

The total number of read capacity units consumed on a table or an index.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The total number of write capacity units consumed on a table or an index.

+ * @public + */ + WriteCapacityUnits?: number | undefined; + /** + *

The total number of capacity units consumed on a table or an index.

+ * @public + */ + CapacityUnits?: number | undefined; +} +/** + *

The capacity units consumed by an operation. The data returned includes the total + * provisioned throughput consumed, along with statistics for the table and any indexes + * involved in the operation. ConsumedCapacity is only returned if the request + * asked for it. For more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface ConsumedCapacity { + /** + *

The name of the table that was affected by the operation. If you had specified the + * Amazon Resource Name (ARN) of a table in the input, you'll see the table ARN in the response.

+ * @public + */ + TableName?: string | undefined; + /** + *

The total number of capacity units consumed by the operation.

+ * @public + */ + CapacityUnits?: number | undefined; + /** + *

The total number of read capacity units consumed by the operation.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The total number of write capacity units consumed by the operation.

+ * @public + */ + WriteCapacityUnits?: number | undefined; + /** + *

The amount of throughput consumed on the table affected by the operation.

+ * @public + */ + Table?: Capacity | undefined; + /** + *

The amount of throughput consumed on each local index affected by the + * operation.

+ * @public + */ + LocalSecondaryIndexes?: Record | undefined; + /** + *

The amount of throughput consumed on each global index affected by the + * operation.

+ * @public + */ + GlobalSecondaryIndexes?: Record | undefined; +} +/** + * @public + * @enum + */ +export declare const BatchStatementErrorCodeEnum: { + readonly AccessDenied: "AccessDenied"; + readonly ConditionalCheckFailed: "ConditionalCheckFailed"; + readonly DuplicateItem: "DuplicateItem"; + readonly InternalServerError: "InternalServerError"; + readonly ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded"; + readonly ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded"; + readonly RequestLimitExceeded: "RequestLimitExceeded"; + readonly ResourceNotFound: "ResourceNotFound"; + readonly ThrottlingError: "ThrottlingError"; + readonly TransactionConflict: "TransactionConflict"; + readonly ValidationError: "ValidationError"; +}; +/** + * @public + */ +export type BatchStatementErrorCodeEnum = (typeof BatchStatementErrorCodeEnum)[keyof typeof BatchStatementErrorCodeEnum]; +/** + *

An error occurred on the server side.

+ * @public + */ +export declare class InternalServerError extends __BaseException { + readonly name: "InternalServerError"; + readonly $fault: "server"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * @public + */ +export declare class RequestLimitExceeded extends __BaseException { + readonly name: "RequestLimitExceeded"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export declare class InvalidEndpointException extends __BaseException { + readonly name: "InvalidEndpointException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * @public + */ +export declare class ProvisionedThroughputExceededException extends __BaseException { + readonly name: "ProvisionedThroughputExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * @public + */ +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + * @enum + */ +export declare const ReturnItemCollectionMetrics: { + readonly NONE: "NONE"; + readonly SIZE: "SIZE"; +}; +/** + * @public + */ +export type ReturnItemCollectionMetrics = (typeof ReturnItemCollectionMetrics)[keyof typeof ReturnItemCollectionMetrics]; +/** + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * @public + */ +export declare class ItemCollectionSizeLimitExceededException extends __BaseException { + readonly name: "ItemCollectionSizeLimitExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Contains the details for the read/write capacity mode. This page talks about + * PROVISIONED and PAY_PER_REQUEST billing modes. For more + * information about these modes, see Read/write capacity mode.

+ * + *

You may need to switch to on-demand mode at least once in order to return a + * BillingModeSummary response.

+ *
+ * @public + */ +export interface BillingModeSummary { + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PROVISIONED - Sets the read/write capacity mode to + * PROVISIONED. We recommend using PROVISIONED for + * predictable workloads.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - Sets the read/write capacity mode to + * PAY_PER_REQUEST. We recommend using + * PAY_PER_REQUEST for unpredictable workloads.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the time when PAY_PER_REQUEST was last set as the read/write + * capacity mode.

+ * @public + */ + LastUpdateToPayPerRequestDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const ComparisonOperator: { + readonly BEGINS_WITH: "BEGINS_WITH"; + readonly BETWEEN: "BETWEEN"; + readonly CONTAINS: "CONTAINS"; + readonly EQ: "EQ"; + readonly GE: "GE"; + readonly GT: "GT"; + readonly IN: "IN"; + readonly LE: "LE"; + readonly LT: "LT"; + readonly NE: "NE"; + readonly NOT_CONTAINS: "NOT_CONTAINS"; + readonly NOT_NULL: "NOT_NULL"; + readonly NULL: "NULL"; +}; +/** + * @public + */ +export type ComparisonOperator = (typeof ComparisonOperator)[keyof typeof ComparisonOperator]; +/** + * @public + * @enum + */ +export declare const ConditionalOperator: { + readonly AND: "AND"; + readonly OR: "OR"; +}; +/** + * @public + */ +export type ConditionalOperator = (typeof ConditionalOperator)[keyof typeof ConditionalOperator]; +/** + * @public + * @enum + */ +export declare const ContinuousBackupsStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +/** + * @public + */ +export type ContinuousBackupsStatus = (typeof ContinuousBackupsStatus)[keyof typeof ContinuousBackupsStatus]; +/** + * @public + * @enum + */ +export declare const PointInTimeRecoveryStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +/** + * @public + */ +export type PointInTimeRecoveryStatus = (typeof PointInTimeRecoveryStatus)[keyof typeof PointInTimeRecoveryStatus]; +/** + *

The description of the point in time settings applied to the table.

+ * @public + */ +export interface PointInTimeRecoveryDescription { + /** + *

The current state of point in time recovery:

+ *
    + *
  • + *

    + * ENABLED - Point in time recovery is enabled.

    + *
  • + *
  • + *

    + * DISABLED - Point in time recovery is disabled.

    + *
  • + *
+ * @public + */ + PointInTimeRecoveryStatus?: PointInTimeRecoveryStatus | undefined; + /** + *

The number of preceding days for which continuous backups are taken and maintained. + * Your table data is only recoverable to any point-in-time from within the configured + * recovery period. This parameter is optional.

+ * @public + */ + RecoveryPeriodInDays?: number | undefined; + /** + *

Specifies the earliest point in time you can restore your table to. You can restore + * your table to any point in time during the last 35 days.

+ * @public + */ + EarliestRestorableDateTime?: Date | undefined; + /** + *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + *

+ * @public + */ + LatestRestorableDateTime?: Date | undefined; +} +/** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ +export interface ContinuousBackupsDescription { + /** + *

+ * ContinuousBackupsStatus can be one of the following states: ENABLED, + * DISABLED

+ * @public + */ + ContinuousBackupsStatus: ContinuousBackupsStatus | undefined; + /** + *

The description of the point in time recovery settings applied to the table.

+ * @public + */ + PointInTimeRecoveryDescription?: PointInTimeRecoveryDescription | undefined; +} +/** + *

Backups have not yet been enabled for this table.

+ * @public + */ +export declare class ContinuousBackupsUnavailableException extends __BaseException { + readonly name: "ContinuousBackupsUnavailableException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + * @enum + */ +export declare const ContributorInsightsAction: { + readonly DISABLE: "DISABLE"; + readonly ENABLE: "ENABLE"; +}; +/** + * @public + */ +export type ContributorInsightsAction = (typeof ContributorInsightsAction)[keyof typeof ContributorInsightsAction]; +/** + * @public + * @enum + */ +export declare const ContributorInsightsStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly FAILED: "FAILED"; +}; +/** + * @public + */ +export type ContributorInsightsStatus = (typeof ContributorInsightsStatus)[keyof typeof ContributorInsightsStatus]; +/** + *

Represents a Contributor Insights summary entry.

+ * @public + */ +export interface ContributorInsightsSummary { + /** + *

Name of the table associated with the summary.

+ * @public + */ + TableName?: string | undefined; + /** + *

Name of the index associated with the summary, if any.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Describes the current status for contributor insights for the given table and index, + * if applicable.

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +/** + * @public + */ +export interface CreateBackupInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Specified name for the backup.

+ * @public + */ + BackupName: string | undefined; +} +/** + * @public + */ +export interface CreateBackupOutput { + /** + *

Contains the details of the backup created for the table.

+ * @public + */ + BackupDetails?: BackupDetails | undefined; +} +/** + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * @public + */ +export declare class LimitExceededException extends __BaseException { + readonly name: "LimitExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

A target table with the specified name is either being created or deleted. + *

+ * @public + */ +export declare class TableInUseException extends __BaseException { + readonly name: "TableInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * @public + */ +export declare class TableNotFoundException extends __BaseException { + readonly name: "TableNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Provides visibility into the number of read and write operations your table or + * secondary index can instantaneously support. The settings can be modified using the + * UpdateTable operation to meet the throughput requirements of an + * upcoming peak event.

+ * @public + */ +export interface WarmThroughput { + /** + *

Represents the number of read operations your base table can instantaneously + * support.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents the number of write operations your base table can instantaneously + * support.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; +} +/** + *

Represents a new global secondary index to be added to an existing table.

+ * @public + */ +export interface CreateGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be created.

+ * @public + */ + IndexName: string | undefined; + /** + *

The key schema for the global secondary index.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into an index. These + * are in addition to the primary key attributes and index key attributes, which are + * automatically projected.

+ * @public + */ + Projection: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The maximum number of read and write units for the global secondary index being + * created. If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both. You must use either + * OnDemand Throughput or ProvisionedThroughput based on your table's + * capacity mode.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) when creating a secondary index.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the properties of a replica.

+ * @public + */ +export interface Replica { + /** + *

The Region where the replica needs to be created.

+ * @public + */ + RegionName?: string | undefined; +} +/** + * @public + */ +export interface CreateGlobalTableInput { + /** + *

The global table name.

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

The Regions where the global table needs to be created.

+ * @public + */ + ReplicationGroup: Replica[] | undefined; +} +/** + * @public + * @enum + */ +export declare const GlobalTableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type GlobalTableStatus = (typeof GlobalTableStatus)[keyof typeof GlobalTableStatus]; +/** + *

Overrides the on-demand throughput settings for this replica table. If you don't + * specify a value for this parameter, it uses the source table's on-demand throughput + * settings.

+ * @public + */ +export interface OnDemandThroughputOverride { + /** + *

Maximum number of read request units for the specified replica table.

+ * @public + */ + MaxReadRequestUnits?: number | undefined; +} +/** + *

Replica-specific provisioned throughput settings. If not specified, uses the source + * table's provisioned throughput settings.

+ * @public + */ +export interface ProvisionedThroughputOverride { + /** + *

Replica-specific read capacity units. If not specified, uses the source table's read + * capacity settings.

+ * @public + */ + ReadCapacityUnits?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const IndexStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type IndexStatus = (typeof IndexStatus)[keyof typeof IndexStatus]; +/** + *

The description of the warm throughput value on a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndexWarmThroughputDescription { + /** + *

Represents warm throughput read units per second value for a global secondary + * index.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents warm throughput write units per second value for a global secondary + * index.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; + /** + *

Represents the warm throughput status being created or updated on a global secondary + * index. The status can only be UPDATING or ACTIVE.

+ * @public + */ + Status?: IndexStatus | undefined; +} +/** + *

Represents the properties of a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

If not described, uses the source table GSI's read capacity settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput for the specified global secondary index in + * the specified replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Represents the warm throughput of the global secondary index for this replica.

+ * @public + */ + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +/** + * @public + * @enum + */ +export declare const ReplicaStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly CREATION_FAILED: "CREATION_FAILED"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly REGION_DISABLED: "REGION_DISABLED"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type ReplicaStatus = (typeof ReplicaStatus)[keyof typeof ReplicaStatus]; +/** + * @public + * @enum + */ +export declare const TableClass: { + readonly STANDARD: "STANDARD"; + readonly STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS"; +}; +/** + * @public + */ +export type TableClass = (typeof TableClass)[keyof typeof TableClass]; +/** + *

Contains details of the table class.

+ * @public + */ +export interface TableClassSummary { + /** + *

The table class of the specified table. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

The date and time at which the table class was last updated.

+ * @public + */ + LastUpdateDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const TableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly ARCHIVED: "ARCHIVED"; + readonly ARCHIVING: "ARCHIVING"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type TableStatus = (typeof TableStatus)[keyof typeof TableStatus]; +/** + *

Represents the warm throughput value (in read units per second and write units per second) + * of the table. Warm throughput is applicable for DynamoDB Standard-IA tables and specifies + * the minimum provisioned capacity maintained for immediate data access.

+ * @public + */ +export interface TableWarmThroughputDescription { + /** + *

Represents the base table's warm throughput value in read units per second.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents the base table's warm throughput value in write units per second.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; + /** + *

Represents warm throughput value of the base table.

+ * @public + */ + Status?: TableStatus | undefined; +} +/** + *

Contains the details of the replica.

+ * @public + */ +export interface ReplicaDescription { + /** + *

The name of the Region.

+ * @public + */ + RegionName?: string | undefined; + /** + *

The current state of the replica:

+ *
    + *
  • + *

    + * CREATING - The replica is being created.

    + *
  • + *
  • + *

    + * UPDATING - The replica is being updated.

    + *
  • + *
  • + *

    + * DELETING - The replica is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The replica is ready for use.

    + *
  • + *
  • + *

    + * REGION_DISABLED - The replica is inaccessible because the Amazon Web Services Region has been disabled.

    + * + *

    If the Amazon Web Services Region remains inaccessible for more than 20 + * hours, DynamoDB will remove this replica from the replication + * group. The replica will not be deleted and replication will stop from and to + * this region.

    + *
    + *
  • + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The KMS key + * used to encrypt the table is inaccessible.

    + * + *

    If the KMS key remains inaccessible for more than 20 hours, + * DynamoDB will remove this replica from the replication group. + * The replica will not be deleted and replication will stop from and to this + * region.

    + *
    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; + /** + *

Detailed information about the replica status.

+ * @public + */ + ReplicaStatusDescription?: string | undefined; + /** + *

Specifies the progress of a Create, Update, or Delete action on the replica as a + * percentage.

+ * @public + */ + ReplicaStatusPercentProgress?: string | undefined; + /** + *

The KMS key of the replica that will be used for KMS + * encryption.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not described, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput settings for the specified replica + * table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Represents the warm throughput value for this replica.

+ * @public + */ + WarmThroughput?: TableWarmThroughputDescription | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexDescription[] | undefined; + /** + *

The time at which the replica was first detected as inaccessible. To determine cause + * of inaccessibility check the ReplicaStatus property.

+ * @public + */ + ReplicaInaccessibleDateTime?: Date | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +/** + *

Contains details about the global table.

+ * @public + */ +export interface GlobalTableDescription { + /** + *

The Regions where the global table has replicas.

+ * @public + */ + ReplicationGroup?: ReplicaDescription[] | undefined; + /** + *

The unique identifier of the global table.

+ * @public + */ + GlobalTableArn?: string | undefined; + /** + *

The creation time of the global table.

+ * @public + */ + CreationDateTime?: Date | undefined; + /** + *

The current state of the global table:

+ *
    + *
  • + *

    + * CREATING - The global table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The global table is being updated.

    + *
  • + *
  • + *

    + * DELETING - The global table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The global table is ready for use.

    + *
  • + *
+ * @public + */ + GlobalTableStatus?: GlobalTableStatus | undefined; + /** + *

The global table name.

+ * @public + */ + GlobalTableName?: string | undefined; +} +/** + * @public + */ +export interface CreateGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The specified global table already exists.

+ * @public + */ +export declare class GlobalTableAlreadyExistsException extends __BaseException { + readonly name: "GlobalTableAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a replica to be added.

+ * @public + */ +export interface CreateReplicaAction { + /** + *

The Region of the replica to be added.

+ * @public + */ + RegionName: string | undefined; +} +/** + *

Represents the properties of a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndex { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName: string | undefined; + /** + *

Replica table GSI-specific provisioned throughput. If not specified, uses the source + * table GSI's read capacity settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput settings for the specified global secondary + * index in the specified replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; +} +/** + *

Represents a replica to be created.

+ * @public + */ +export interface CreateReplicationGroupMemberAction { + /** + *

The Region where the new replica will be created.

+ * @public + */ + RegionName: string | undefined; + /** + *

The KMS key that should be used for KMS encryption in + * the new replica. To specify a key, use its key ID, Amazon Resource Name (ARN), alias + * name, or alias ARN. Note that you should only provide this parameter if the key is + * different from the default DynamoDB KMS key + * alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not specified, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

The maximum on-demand throughput settings for the specified replica table being + * created. You can only modify MaxReadRequestUnits, because you can't modify + * MaxWriteRequestUnits for individual replica tables.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + TableClassOverride?: TableClass | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndex { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use either + * OnDemandThroughput or ProvisionedThroughput based + * on your table's capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The maximum number of read and write units for the specified global secondary index. + * If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both. You must use either + * OnDemandThroughput or ProvisionedThroughput based + * on your table's capacity mode.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) for the specified secondary index. If you use this parameter, you must specify + * ReadUnitsPerSecond, WriteUnitsPerSecond, or both.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the properties of a local secondary index.

+ * @public + */ +export interface LocalSecondaryIndex { + /** + *

The name of the local secondary index. The name must be unique among all other indexes + * on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The complete key schema for the local secondary index, consisting of one or more pairs + * of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the local + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection: Projection | undefined; +} +/** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ +export interface SSESpecification { + /** + *

Indicates whether server-side encryption is done using an Amazon Web Services managed + * key or an Amazon Web Services owned key. If enabled (true), server-side encryption type + * is set to KMS and an Amazon Web Services managed key is used (KMS charges apply). If disabled (false) or not specified, server-side + * encryption is set to Amazon Web Services owned key.

+ * @public + */ + Enabled?: boolean | undefined; + /** + *

Server-side encryption type. The only supported value is:

+ *
    + *
  • + *

    + * KMS - Server-side encryption that uses Key Management Service. The + * key is stored in your account and is managed by KMS (KMS charges apply).

    + *
  • + *
+ * @public + */ + SSEType?: SSEType | undefined; + /** + *

The KMS key that should be used for the KMS encryption. + * To specify a key, use its key ID, Amazon Resource Name (ARN), alias name, or alias ARN. + * Note that you should only provide this parameter if the key is different from the + * default DynamoDB key alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; +} +/** + *

Describes a tag. A tag is a key-value pair. You can add up to 50 tags to a single + * DynamoDB table.

+ *

Amazon Web Services-assigned tag names and values are automatically assigned the + * aws: prefix, which the user cannot assign. Amazon Web Services-assigned + * tag names do not count towards the tag limit of 50. User-assigned tag names have the + * prefix user: in the Cost Allocation Report. You cannot backdate the + * application of a tag.

+ *

For an overview on tagging DynamoDB resources, see Tagging + * for DynamoDB in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface Tag { + /** + *

The key of the tag. Tag keys are case sensitive. Each DynamoDB table can + * only have up to one tag with the same key. If you try to add an existing tag (same key), + * the existing tag value will be updated to the new value.

+ * @public + */ + Key: string | undefined; + /** + *

The value of the tag. Tag values are case-sensitive and can be null.

+ * @public + */ + Value: string | undefined; +} +/** + *

Represents the input of a CreateTable operation.

+ * @public + */ +export interface CreateTableInput { + /** + *

An array of attributes that describe the key schema for the table and indexes.

+ * @public + */ + AttributeDefinitions: AttributeDefinition[] | undefined; + /** + *

The name of the table to create. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Specifies the attributes that make up the primary key for a table or an index. The + * attributes in KeySchema must also be defined in the + * AttributeDefinitions array. For more information, see Data + * Model in the Amazon DynamoDB Developer Guide.

+ *

Each KeySchemaElement in the array is composed of:

+ *
    + *
  • + *

    + * AttributeName - The name of this key attribute.

    + *
  • + *
  • + *

    + * KeyType - The role that the key attribute will assume:

    + *
      + *
    • + *

      + * HASH - partition key

      + *
    • + *
    • + *

      + * RANGE - sort key

      + *
    • + *
    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from the DynamoDB usage + * of an internal hash function to evenly distribute data items across partitions, + * based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ *

For a simple primary key (partition key), you must provide exactly one element with a + * KeyType of HASH.

+ *

For a composite primary key (partition key and sort key), you must provide exactly two + * elements, in this order: The first element must have a KeyType of + * HASH, and the second element must have a KeyType of + * RANGE.

+ *

For more information, see Working with Tables in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

One or more local secondary indexes (the maximum is 5) to be created on the table. + * Each index is scoped to a given partition key value. There is a 10 GB size limit per + * partition key value; otherwise, the size of a local secondary index is + * unconstrained.

+ *

Each local secondary index in the array includes the following:

+ *
    + *
  • + *

    + * IndexName - The name of the local secondary index. Must be unique + * only for this table.

    + *

    + *
  • + *
  • + *

    + * KeySchema - Specifies the key schema for the local secondary index. + * The key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can specify the + * ProjectionType of ALL to project all attributes from the + * source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndex[] | undefined; + /** + *

One or more global secondary indexes (the maximum is 20) to be created on the table. + * Each global secondary index in the array includes the following:

+ *
    + *
  • + *

    + * IndexName - The name of the global secondary index. Must be unique + * only for this table.

    + *

    + *
  • + *
  • + *

    + * KeySchema - Specifies the key schema for the global secondary + * index.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * ProvisionedThroughput - The provisioned throughput settings for the + * global secondary index, consisting of read and write capacity units.

    + *
  • + *
+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for most DynamoDB workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * steady workloads with predictable growth where capacity requirements can be + * reliably forecasted. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the provisioned throughput settings for a specified table or index. The + * settings can be modified using the UpdateTable operation.

+ *

If you set BillingMode as PROVISIONED, you must specify this property. + * If you set BillingMode as PAY_PER_REQUEST, you cannot specify this + * property.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The settings for DynamoDB Streams on the table. These settings consist of:

+ *
    + *
  • + *

    + * StreamEnabled - Indicates whether DynamoDB Streams is to be enabled + * (true) or disabled (false).

    + *
  • + *
  • + *

    + * StreamViewType - When an item in the table is modified, + * StreamViewType determines what information is written to the + * table's stream. Valid values for StreamViewType are:

    + *
      + *
    • + *

      + * KEYS_ONLY - Only the key attributes of the modified item + * are written to the stream.

      + *
    • + *
    • + *

      + * NEW_IMAGE - The entire item, as it appears after it was + * modified, is written to the stream.

      + *
    • + *
    • + *

      + * OLD_IMAGE - The entire item, as it appeared before it was + * modified, is written to the stream.

      + *
    • + *
    • + *

      + * NEW_AND_OLD_IMAGES - Both the new and the old item images + * of the item are written to the stream.

      + *
    • + *
    + *
  • + *
+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

A list of key-value pairs to label the table. For more information, see Tagging + * for DynamoDB.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

The table class of the new table. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

Indicates whether deletion protection is to be enabled (true) or disabled (false) on + * the table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

Represents the warm throughput (in read units per second and write units per second) + * for creating a table.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; + /** + *

An Amazon Web Services resource-based policy document in JSON format that will be + * attached to the table.

+ *

When you attach a resource-based policy while creating a table, the policy application + * is strongly consistent.

+ *

The maximum size supported for a resource-based policy document is 20 KB. DynamoDB counts whitespaces when calculating the size of a policy against this + * limit. For a full list of all considerations that apply for resource-based policies, see + * Resource-based + * policy considerations.

+ * + *

You need to specify the CreateTable and + * PutResourcePolicy + * IAM actions for authorizing a user to create a table with a + * resource-based policy.

+ *
+ * @public + */ + ResourcePolicy?: string | undefined; + /** + *

Sets the maximum number of read and write units for the specified table in on-demand + * capacity mode. If you use this parameter, you must specify + * MaxReadRequestUnits, MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; +} +/** + *

Represents the provisioned throughput settings for the table, consisting of read and + * write capacity units, along with data about increases and decreases.

+ * @public + */ +export interface ProvisionedThroughputDescription { + /** + *

The date and time of the last provisioned throughput increase for this table.

+ * @public + */ + LastIncreaseDateTime?: Date | undefined; + /** + *

The date and time of the last provisioned throughput decrease for this table.

+ * @public + */ + LastDecreaseDateTime?: Date | undefined; + /** + *

The number of provisioned throughput decreases for this table during this UTC calendar + * day. For current maximums on provisioned throughput decreases, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + NumberOfDecreasesToday?: number | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. Eventually consistent reads require less + * effort than strongly consistent reads, so a setting of 50 ReadCapacityUnits + * per second provides 100 eventually consistent ReadCapacityUnits per + * second.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException.

+ * @public + */ + WriteCapacityUnits?: number | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndexDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

The current state of the global secondary index:

+ *
    + *
  • + *

    + * CREATING - The index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The index is being updated.

    + *
  • + *
  • + *

    + * DELETING - The index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

Indicates whether the index is currently backfilling. Backfilling + * is the process of reading items from the table and determining whether they can be added + * to the index. (Not all items will qualify: For example, a partition key cannot have any + * duplicate values.) If an item can be added to the index, DynamoDB will do so. After all + * items have been processed, the backfilling operation is complete and + * Backfilling is false.

+ *

You can delete an index that is being created during the Backfilling + * phase when IndexStatus is set to CREATING and Backfilling is + * true. You can't delete the index that is being created when IndexStatus is + * set to CREATING and Backfilling is false.

+ * + *

For indexes that were created during a CreateTable operation, the + * Backfilling attribute does not appear in the + * DescribeTable output.

+ *
+ * @public + */ + Backfilling?: boolean | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + /** + *

The total size of the specified index, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + IndexSizeBytes?: number | undefined; + /** + *

The number of items in the specified index. DynamoDB updates this value approximately + * every six hours. Recent changes might not be reflected in this value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the index.

+ * @public + */ + IndexArn?: string | undefined; + /** + *

The maximum number of read and write units for the specified global secondary index. + * If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) for the specified secondary index.

+ * @public + */ + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +/** + *

Represents the properties of a local secondary index.

+ * @public + */ +export interface LocalSecondaryIndexDescription { + /** + *

Represents the name of the local secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for the local secondary index, consisting of one or more pairs + * of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

The total size of the specified index, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + IndexSizeBytes?: number | undefined; + /** + *

The number of items in the specified index. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the index.

+ * @public + */ + IndexArn?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const MultiRegionConsistency: { + readonly EVENTUAL: "EVENTUAL"; + readonly STRONG: "STRONG"; +}; +/** + * @public + */ +export type MultiRegionConsistency = (typeof MultiRegionConsistency)[keyof typeof MultiRegionConsistency]; +/** + *

Contains details for the restore.

+ * @public + */ +export interface RestoreSummary { + /** + *

The Amazon Resource Name (ARN) of the backup from which the table was restored.

+ * @public + */ + SourceBackupArn?: string | undefined; + /** + *

The ARN of the source table of the backup that is being restored.

+ * @public + */ + SourceTableArn?: string | undefined; + /** + *

Point in time or source backup time.

+ * @public + */ + RestoreDateTime: Date | undefined; + /** + *

Indicates if a restore is in progress or not.

+ * @public + */ + RestoreInProgress: boolean | undefined; +} +/** + *

Represents the properties of a table.

+ * @public + */ +export interface TableDescription { + /** + *

An array of AttributeDefinition objects. Each of these objects describes + * one attribute in the table and index key schema.

+ *

Each AttributeDefinition object in this array is composed of:

+ *
    + *
  • + *

    + * AttributeName - The name of the attribute.

    + *
  • + *
  • + *

    + * AttributeType - The data type for the attribute.

    + *
  • + *
+ * @public + */ + AttributeDefinitions?: AttributeDefinition[] | undefined; + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The primary key structure for the table. Each KeySchemaElement consists + * of:

+ *
    + *
  • + *

    + * AttributeName - The name of the attribute.

    + *
  • + *
  • + *

    + * KeyType - The role of the attribute:

    + *
      + *
    • + *

      + * HASH - partition key

      + *
    • + *
    • + *

      + * RANGE - sort key

      + *
    • + *
    + * + *

    The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's + * usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

    + *

    The sort key of an item is also known as its range + * attribute. The term "range attribute" derives from the way + * DynamoDB stores items with the same partition key physically close together, + * in sorted order by the sort key value.

    + *
    + *
  • + *
+ *

For more information about primary keys, see Primary Key in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

The current state of the table:

+ *
    + *
  • + *

    + * CREATING - The table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table/index configuration is being updated. The + * table/index remains available for data operations when + * UPDATING.

    + *
  • + *
  • + *

    + * DELETING - The table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The table is ready for use.

    + *
  • + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The KMS key + * used to encrypt the table in inaccessible. Table operations may fail due to + * failure to use the KMS key. DynamoDB will initiate the + * table archival process when a table's KMS key remains + * inaccessible for more than seven days.

    + *
  • + *
  • + *

    + * ARCHIVING - The table is being archived. Operations are not allowed + * until archival is complete.

    + *
  • + *
  • + *

    + * ARCHIVED - The table has been archived. See the ArchivalReason for + * more information.

    + *
  • + *
+ * @public + */ + TableStatus?: TableStatus | undefined; + /** + *

The date and time when the table was created, in UNIX epoch time format.

+ * @public + */ + CreationDateTime?: Date | undefined; + /** + *

The provisioned throughput settings for the table, consisting of read and write + * capacity units, along with data about increases and decreases.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + /** + *

The total size of the specified table, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + TableSizeBytes?: number | undefined; + /** + *

The number of items in the specified table. DynamoDB updates this value approximately + * every six hours. Recent changes might not be reflected in this value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Unique identifier for the table for which the backup was created.

+ * @public + */ + TableId?: string | undefined; + /** + *

Contains the details for the read/write capacity mode.

+ * @public + */ + BillingModeSummary?: BillingModeSummary | undefined; + /** + *

Represents one or more local secondary indexes on the table. Each index is scoped to a + * given partition key value. Tables with one or more local secondary indexes are subject + * to an item collection size limit, where the amount of data within a given item + * collection cannot exceed 10 GB. Each element is composed of:

+ *
    + *
  • + *

    + * IndexName - The name of the local secondary index.

    + *
  • + *
  • + *

    + * KeySchema - Specifies the complete index key schema. The attribute + * names in the key schema must be between 1 and 255 characters (inclusive). The + * key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * IndexSizeBytes - Represents the total size of the index, in bytes. + * DynamoDB updates this value approximately every six hours. Recent changes might + * not be reflected in this value.

    + *
  • + *
  • + *

    + * ItemCount - Represents the number of items in the index. DynamoDB + * updates this value approximately every six hours. Recent changes might not be + * reflected in this value.

    + *
  • + *
+ *

If the table is in the DELETING state, no information about indexes will + * be returned.

+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndexDescription[] | undefined; + /** + *

The global secondary indexes, if any, on the table. Each index is scoped to a given + * partition key value. Each element is composed of:

+ *
    + *
  • + *

    + * Backfilling - If true, then the index is currently in the + * backfilling phase. Backfilling occurs only when a new global secondary index is + * added to the table. It is the process by which DynamoDB populates the new index + * with data from the table. (This attribute does not appear for indexes that were + * created during a CreateTable operation.)

    + *

    You can delete an index that is being created during the + * Backfilling phase when IndexStatus is set to + * CREATING and Backfilling is true. You can't delete the index that + * is being created when IndexStatus is set to CREATING and + * Backfilling is false. (This attribute does not appear for + * indexes that were created during a CreateTable operation.)

    + *
  • + *
  • + *

    + * IndexName - The name of the global secondary index.

    + *
  • + *
  • + *

    + * IndexSizeBytes - The total size of the global secondary index, in + * bytes. DynamoDB updates this value approximately every six hours. Recent changes + * might not be reflected in this value.

    + *
  • + *
  • + *

    + * IndexStatus - The current status of the global secondary + * index:

    + *
      + *
    • + *

      + * CREATING - The index is being created.

      + *
    • + *
    • + *

      + * UPDATING - The index is being updated.

      + *
    • + *
    • + *

      + * DELETING - The index is being deleted.

      + *
    • + *
    • + *

      + * ACTIVE - The index is ready for use.

      + *
    • + *
    + *
  • + *
  • + *

    + * ItemCount - The number of items in the global secondary index. + * DynamoDB updates this value approximately every six hours. Recent changes might + * not be reflected in this value.

    + *
  • + *
  • + *

    + * KeySchema - Specifies the complete index key schema. The attribute + * names in the key schema must be between 1 and 255 characters (inclusive). The + * key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - In addition to the attributes described + * in KEYS_ONLY, the secondary index will include + * other non-key attributes that you specify.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * ProvisionedThroughput - The provisioned throughput settings for the + * global secondary index, consisting of read and write capacity units, along with + * data about increases and decreases.

    + *
  • + *
+ *

If the table is in the DELETING state, no information about indexes will + * be returned.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndexDescription[] | undefined; + /** + *

The current DynamoDB Streams configuration for the table.

+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

A timestamp, in ISO 8601 format, for this stream.

+ *

Note that LatestStreamLabel is not a unique identifier for the stream, + * because it is possible that a stream from another table might have the same timestamp. + * However, the combination of the following three elements is guaranteed to be + * unique:

+ *
    + *
  • + *

    Amazon Web Services customer ID

    + *
  • + *
  • + *

    Table name

    + *
  • + *
  • + *

    + * StreamLabel + *

    + *
  • + *
+ * @public + */ + LatestStreamLabel?: string | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the latest stream for this + * table.

+ * @public + */ + LatestStreamArn?: string | undefined; + /** + *

Represents the version of global tables + * in use, if the table is replicated across Amazon Web Services Regions.

+ * @public + */ + GlobalTableVersion?: string | undefined; + /** + *

Represents replicas of the table.

+ * @public + */ + Replicas?: ReplicaDescription[] | undefined; + /** + *

Contains details for the restore.

+ * @public + */ + RestoreSummary?: RestoreSummary | undefined; + /** + *

The description of the server-side encryption status on the specified table.

+ * @public + */ + SSEDescription?: SSEDescription | undefined; + /** + *

Contains information about the table archive.

+ * @public + */ + ArchivalSummary?: ArchivalSummary | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + TableClassSummary?: TableClassSummary | undefined; + /** + *

Indicates whether deletion protection is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

The maximum number of read and write units for the specified on-demand table. If you + * use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Describes the warm throughput value of the base table.

+ * @public + */ + WarmThroughput?: TableWarmThroughputDescription | undefined; + /** + *

Indicates one of the following consistency modes for a global table:

+ *
    + *
  • + *

    + * EVENTUAL: Indicates that the global table is configured for multi-Region eventual consistency.

    + *
  • + *
  • + *

    + * STRONG: Indicates that the global table is configured for multi-Region strong consistency (preview).

    + * + *

    Multi-Region strong consistency (MRSC) is a new DynamoDB global tables capability currently available in preview mode. For more information, see Global tables multi-Region strong consistency.

    + *
    + *
  • + *
+ *

If you don't specify this field, the global table consistency mode defaults to EVENTUAL.

+ * @public + */ + MultiRegionConsistency?: MultiRegionConsistency | undefined; +} +/** + *

Represents the output of a CreateTable operation.

+ * @public + */ +export interface CreateTableOutput { + /** + *

Represents the properties of the table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * @public + */ +export declare class ResourceInUseException extends __BaseException { + readonly name: "ResourceInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Processing options for the CSV file being imported.

+ * @public + */ +export interface CsvOptions { + /** + *

The delimiter used for separating items in the CSV file being imported.

+ * @public + */ + Delimiter?: string | undefined; + /** + *

List of the headers used to specify a common header for all source CSV files being + * imported. If this field is specified then the first line of each CSV file is treated as + * data instead of the header. If this field is not specified the the first line of each + * CSV file is treated as the header.

+ * @public + */ + HeaderList?: string[] | undefined; +} +/** + * @public + */ +export interface DeleteBackupInput { + /** + *

The ARN associated with the backup.

+ * @public + */ + BackupArn: string | undefined; +} +/** + * @public + */ +export interface DeleteBackupOutput { + /** + *

Contains the description of the backup created for the table.

+ * @public + */ + BackupDescription?: BackupDescription | undefined; +} +/** + *

Represents a global secondary index to be deleted from an existing table.

+ * @public + */ +export interface DeleteGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be deleted.

+ * @public + */ + IndexName: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ReturnValue: { + readonly ALL_NEW: "ALL_NEW"; + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; + readonly UPDATED_NEW: "UPDATED_NEW"; + readonly UPDATED_OLD: "UPDATED_OLD"; +}; +/** + * @public + */ +export type ReturnValue = (typeof ReturnValue)[keyof typeof ReturnValue]; +/** + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * @public + */ +export declare class ReplicatedWriteConflictException extends __BaseException { + readonly name: "ReplicatedWriteConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * @public + */ +export declare class TransactionConflictException extends __BaseException { + readonly name: "TransactionConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a replica to be removed.

+ * @public + */ +export interface DeleteReplicaAction { + /** + *

The Region of the replica to be removed.

+ * @public + */ + RegionName: string | undefined; +} +/** + *

Represents a replica to be deleted.

+ * @public + */ +export interface DeleteReplicationGroupMemberAction { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; +} +/** + * @public + */ +export interface DeleteResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource from which the policy will be + * removed. The resources you can specify include tables and streams. If you remove the + * policy of a table, it will also remove the permissions for the table's indexes defined + * in that policy document. This is because index permissions are defined in the table's + * policy.

+ * @public + */ + ResourceArn: string | undefined; + /** + *

A string value that you can use to conditionally delete your policy. When you provide + * an expected revision ID, if the revision ID of the existing policy on the resource + * doesn't match or if there's no policy attached to the resource, the request will fail + * and return a PolicyNotFoundException.

+ * @public + */ + ExpectedRevisionId?: string | undefined; +} +/** + * @public + */ +export interface DeleteResourcePolicyOutput { + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ *

This value will be empty if you make a request against a resource without a + * policy.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * @public + */ +export declare class PolicyNotFoundException extends __BaseException { + readonly name: "PolicyNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the input of a DeleteTable operation.

+ * @public + */ +export interface DeleteTableInput { + /** + *

The name of the table to delete. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the output of a DeleteTable operation.

+ * @public + */ +export interface DeleteTableOutput { + /** + *

Represents the properties of a table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + * @public + */ +export interface DescribeBackupInput { + /** + *

The Amazon Resource Name (ARN) associated with the backup.

+ * @public + */ + BackupArn: string | undefined; +} +/** + * @public + */ +export interface DescribeBackupOutput { + /** + *

Contains the description of the backup created for the table.

+ * @public + */ + BackupDescription?: BackupDescription | undefined; +} +/** + * @public + */ +export interface DescribeContinuousBackupsInput { + /** + *

Name of the table for which the customer wants to check the continuous backups and + * point in time recovery settings.

+ *

You can also provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + */ +export interface DescribeContinuousBackupsOutput { + /** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +/** + * @public + */ +export interface DescribeContributorInsightsInput { + /** + *

The name of the table to describe. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of the global secondary index to describe, if applicable.

+ * @public + */ + IndexName?: string | undefined; +} +/** + *

Represents a failure a contributor insights operation.

+ * @public + */ +export interface FailureException { + /** + *

Exception name.

+ * @public + */ + ExceptionName?: string | undefined; + /** + *

Description of the failure.

+ * @public + */ + ExceptionDescription?: string | undefined; +} +/** + * @public + */ +export interface DescribeContributorInsightsOutput { + /** + *

The name of the table being described.

+ * @public + */ + TableName?: string | undefined; + /** + *

The name of the global secondary index being described.

+ * @public + */ + IndexName?: string | undefined; + /** + *

List of names of the associated contributor insights rules.

+ * @public + */ + ContributorInsightsRuleList?: string[] | undefined; + /** + *

Current status of contributor insights.

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; + /** + *

Timestamp of the last time the status was changed.

+ * @public + */ + LastUpdateDateTime?: Date | undefined; + /** + *

Returns information about the last failure that was encountered.

+ *

The most common exceptions for a FAILED status are:

+ *
    + *
  • + *

    LimitExceededException - Per-account Amazon CloudWatch Contributor Insights + * rule limit reached. Please disable Contributor Insights for other tables/indexes + * OR disable Contributor Insights rules before retrying.

    + *
  • + *
  • + *

    AccessDeniedException - Amazon CloudWatch Contributor Insights rules cannot be + * modified due to insufficient permissions.

    + *
  • + *
  • + *

    AccessDeniedException - Failed to create service-linked role for Contributor + * Insights due to insufficient permissions.

    + *
  • + *
  • + *

    InternalServerError - Failed to create Amazon CloudWatch Contributor Insights + * rules. Please retry request.

    + *
  • + *
+ * @public + */ + FailureException?: FailureException | undefined; +} +/** + * @public + */ +export interface DescribeEndpointsRequest { +} +/** + *

An endpoint information details.

+ * @public + */ +export interface Endpoint { + /** + *

IP address of the endpoint.

+ * @public + */ + Address: string | undefined; + /** + *

Endpoint cache time to live (TTL) value.

+ * @public + */ + CachePeriodInMinutes: number | undefined; +} +/** + * @public + */ +export interface DescribeEndpointsResponse { + /** + *

List of endpoints.

+ * @public + */ + Endpoints: Endpoint[] | undefined; +} +/** + * @public + */ +export interface DescribeExportInput { + /** + *

The Amazon Resource Name (ARN) associated with the export.

+ * @public + */ + ExportArn: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ExportFormat: { + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +/** + * @public + */ +export type ExportFormat = (typeof ExportFormat)[keyof typeof ExportFormat]; +/** + * @public + * @enum + */ +export declare const ExportStatus: { + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +/** + * @public + */ +export type ExportStatus = (typeof ExportStatus)[keyof typeof ExportStatus]; +/** + * @public + * @enum + */ +export declare const ExportType: { + readonly FULL_EXPORT: "FULL_EXPORT"; + readonly INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT"; +}; +/** + * @public + */ +export type ExportType = (typeof ExportType)[keyof typeof ExportType]; +/** + * @public + * @enum + */ +export declare const ExportViewType: { + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; +}; +/** + * @public + */ +export type ExportViewType = (typeof ExportViewType)[keyof typeof ExportViewType]; +/** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ +export interface IncrementalExportSpecification { + /** + *

Time in the past which provides the inclusive start range for the export table's data, + * counted in seconds from the start of the Unix epoch. The incremental export will reflect + * the table's state including and after this point in time.

+ * @public + */ + ExportFromTime?: Date | undefined; + /** + *

Time in the past which provides the exclusive end range for the export table's data, + * counted in seconds from the start of the Unix epoch. The incremental export will reflect + * the table's state just prior to this point in time. If this is not provided, the latest + * time with data available will be used.

+ * @public + */ + ExportToTime?: Date | undefined; + /** + *

The view type that was chosen for the export. Valid values are + * NEW_AND_OLD_IMAGES and NEW_IMAGES. The default value is + * NEW_AND_OLD_IMAGES.

+ * @public + */ + ExportViewType?: ExportViewType | undefined; +} +/** + * @public + * @enum + */ +export declare const S3SseAlgorithm: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +/** + * @public + */ +export type S3SseAlgorithm = (typeof S3SseAlgorithm)[keyof typeof S3SseAlgorithm]; +/** + *

Represents the properties of the exported table.

+ * @public + */ +export interface ExportDescription { + /** + *

The Amazon Resource Name (ARN) of the table export.

+ * @public + */ + ExportArn?: string | undefined; + /** + *

Export can be in one of the following states: IN_PROGRESS, COMPLETED, or + * FAILED.

+ * @public + */ + ExportStatus?: ExportStatus | undefined; + /** + *

The time at which the export task began.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which the export task completed.

+ * @public + */ + EndTime?: Date | undefined; + /** + *

The name of the manifest file for the export task.

+ * @public + */ + ExportManifest?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the table that was exported.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Unique ID of the table that was exported.

+ * @public + */ + TableId?: string | undefined; + /** + *

Point in time from which table data was exported.

+ * @public + */ + ExportTime?: Date | undefined; + /** + *

The client token that was provided for the export task. A client token makes calls to + * ExportTableToPointInTimeInput idempotent, meaning that multiple + * identical calls have the same effect as one single call.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The name of the Amazon S3 bucket containing the export.

+ * @public + */ + S3Bucket?: string | undefined; + /** + *

The ID of the Amazon Web Services account that owns the bucket containing the + * export.

+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The Amazon S3 bucket prefix used as the file name and path of the exported + * snapshot.

+ * @public + */ + S3Prefix?: string | undefined; + /** + *

Type of encryption used on the bucket where export data is stored. Valid values for + * S3SseAlgorithm are:

+ *
    + *
  • + *

    + * AES256 - server-side encryption with Amazon S3 managed + * keys

    + *
  • + *
  • + *

    + * KMS - server-side encryption with KMS managed + * keys

    + *
  • + *
+ * @public + */ + S3SseAlgorithm?: S3SseAlgorithm | undefined; + /** + *

The ID of the KMS managed key used to encrypt the S3 bucket where + * export data is stored (if applicable).

+ * @public + */ + S3SseKmsKeyId?: string | undefined; + /** + *

Status code for the result of the failed export.

+ * @public + */ + FailureCode?: string | undefined; + /** + *

Export failure reason description.

+ * @public + */ + FailureMessage?: string | undefined; + /** + *

The format of the exported data. Valid values for ExportFormat are + * DYNAMODB_JSON or ION.

+ * @public + */ + ExportFormat?: ExportFormat | undefined; + /** + *

The billable size of the table export.

+ * @public + */ + BilledSizeBytes?: number | undefined; + /** + *

The number of items exported.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The type of export that was performed. Valid values are FULL_EXPORT or + * INCREMENTAL_EXPORT.

+ * @public + */ + ExportType?: ExportType | undefined; + /** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +/** + * @public + */ +export interface DescribeExportOutput { + /** + *

Represents the properties of the export.

+ * @public + */ + ExportDescription?: ExportDescription | undefined; +} +/** + *

The specified export was not found.

+ * @public + */ +export declare class ExportNotFoundException extends __BaseException { + readonly name: "ExportNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeGlobalTableInput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName: string | undefined; +} +/** + * @public + */ +export interface DescribeGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The specified global table does not exist.

+ * @public + */ +export declare class GlobalTableNotFoundException extends __BaseException { + readonly name: "GlobalTableNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeGlobalTableSettingsInput { + /** + *

The name of the global table to describe.

+ * @public + */ + GlobalTableName: string | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexSettingsDescription { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The current status of the global secondary index:

+ *
    + *
  • + *

    + * CREATING - The global secondary index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The global secondary index is being updated.

    + *
  • + *
  • + *

    + * DELETING - The global secondary index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The global secondary index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException.

+ * @public + */ + ProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global secondary index replica's read capacity + * units.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException.

+ * @public + */ + ProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global secondary index replica's write capacity + * units.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; +} +/** + *

Represents the properties of a replica.

+ * @public + */ +export interface ReplicaSettingsDescription { + /** + *

The Region name of the replica.

+ * @public + */ + RegionName: string | undefined; + /** + *

The current state of the Region:

+ *
    + *
  • + *

    + * CREATING - The Region is being created.

    + *
  • + *
  • + *

    + * UPDATING - The Region is being updated.

    + *
  • + *
  • + *

    + * DELETING - The Region is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The Region is ready for use.

    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; + /** + *

The read/write capacity mode of the replica.

+ * @public + */ + ReplicaBillingModeSummary?: BillingModeSummary | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global table replica's read capacity units.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global table replica's write capacity units.

+ * @public + */ + ReplicaProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Replica global secondary index settings for the global table.

+ * @public + */ + ReplicaGlobalSecondaryIndexSettings?: ReplicaGlobalSecondaryIndexSettingsDescription[] | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +/** + * @public + */ +export interface DescribeGlobalTableSettingsOutput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Region-specific settings for the global table.

+ * @public + */ + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +/** + * @public + */ +export interface DescribeImportInput { + /** + *

The Amazon Resource Name (ARN) associated with the table you're importing to.

+ * @public + */ + ImportArn: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ImportStatus: { + readonly CANCELLED: "CANCELLED"; + readonly CANCELLING: "CANCELLING"; + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +/** + * @public + */ +export type ImportStatus = (typeof ImportStatus)[keyof typeof ImportStatus]; +/** + * @public + * @enum + */ +export declare const InputCompressionType: { + readonly GZIP: "GZIP"; + readonly NONE: "NONE"; + readonly ZSTD: "ZSTD"; +}; +/** + * @public + */ +export type InputCompressionType = (typeof InputCompressionType)[keyof typeof InputCompressionType]; +/** + * @public + * @enum + */ +export declare const InputFormat: { + readonly CSV: "CSV"; + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +/** + * @public + */ +export type InputFormat = (typeof InputFormat)[keyof typeof InputFormat]; +/** + *

The format options for the data that was imported into the target table. There is one + * value, CsvOption.

+ * @public + */ +export interface InputFormatOptions { + /** + *

The options for imported source files in CSV format. The values are Delimiter and + * HeaderList.

+ * @public + */ + Csv?: CsvOptions | undefined; +} +/** + *

The S3 bucket that is being imported from.

+ * @public + */ +export interface S3BucketSource { + /** + *

The account number of the S3 bucket that is being imported from. If the bucket is + * owned by the requester this is optional.

+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The S3 bucket that is being imported from.

+ * @public + */ + S3Bucket: string | undefined; + /** + *

The key prefix shared by all S3 Objects that are being imported.

+ * @public + */ + S3KeyPrefix?: string | undefined; +} +/** + *

The parameters for the table created as part of the import operation.

+ * @public + */ +export interface TableCreationParameters { + /** + *

The name of the table created as part of the import operation.

+ * @public + */ + TableName: string | undefined; + /** + *

The attributes of the table created as part of the import operation.

+ * @public + */ + AttributeDefinitions: AttributeDefinition[] | undefined; + /** + *

The primary key and option sort key of the table created as part of the import + * operation.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

The billing mode for provisioning the table created as part of the import operation. + *

+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use ProvisionedThroughput or + * OnDemandThroughput based on your table’s capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

The Global Secondary Indexes (GSI) of the table to be created as part of the import + * operation.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; +} +/** + *

Represents the properties of the table being imported into. + *

+ * @public + */ +export interface ImportTableDescription { + /** + *

The Amazon Resource Number (ARN) corresponding to the import request. + *

+ * @public + */ + ImportArn?: string | undefined; + /** + *

The status of the import.

+ * @public + */ + ImportStatus?: ImportStatus | undefined; + /** + *

The Amazon Resource Number (ARN) of the table being imported into. + *

+ * @public + */ + TableArn?: string | undefined; + /** + *

The table id corresponding to the table created by import table process. + *

+ * @public + */ + TableId?: string | undefined; + /** + *

The client token that was provided for the import task. Reusing the client token on + * retry makes a call to ImportTable idempotent.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

Values for the S3 bucket the source file is imported from. Includes bucket name + * (required), key prefix (optional) and bucket account owner ID (optional).

+ * @public + */ + S3BucketSource?: S3BucketSource | undefined; + /** + *

The number of errors occurred on importing the source file into the target table. + *

+ * @public + */ + ErrorCount?: number | undefined; + /** + *

The Amazon Resource Number (ARN) of the Cloudwatch Log Group associated with the + * target table.

+ * @public + */ + CloudWatchLogGroupArn?: string | undefined; + /** + *

The format of the source data going into the target table. + *

+ * @public + */ + InputFormat?: InputFormat | undefined; + /** + *

The format options for the data that was imported into the target table. There is one + * value, CsvOption.

+ * @public + */ + InputFormatOptions?: InputFormatOptions | undefined; + /** + *

The compression options for the data that has been imported into the target table. + * The values are NONE, GZIP, or ZSTD.

+ * @public + */ + InputCompressionType?: InputCompressionType | undefined; + /** + *

The parameters for the new table that is being imported into.

+ * @public + */ + TableCreationParameters?: TableCreationParameters | undefined; + /** + *

The time when this import task started.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which the creation of the table associated with this import task + * completed.

+ * @public + */ + EndTime?: Date | undefined; + /** + *

The total size of data processed from the source file, in Bytes.

+ * @public + */ + ProcessedSizeBytes?: number | undefined; + /** + *

The total number of items processed from the source file.

+ * @public + */ + ProcessedItemCount?: number | undefined; + /** + *

The number of items successfully imported into the new table.

+ * @public + */ + ImportedItemCount?: number | undefined; + /** + *

The error code corresponding to the failure that the import job ran into during + * execution.

+ * @public + */ + FailureCode?: string | undefined; + /** + *

The error message corresponding to the failure that the import job ran into during + * execution.

+ * @public + */ + FailureMessage?: string | undefined; +} +/** + * @public + */ +export interface DescribeImportOutput { + /** + *

Represents the properties of the table created for the import, and parameters of the + * import. The import parameters include import status, how many items were processed, and + * how many errors were encountered.

+ * @public + */ + ImportTableDescription: ImportTableDescription | undefined; +} +/** + *

+ * The specified import was not found. + *

+ * @public + */ +export declare class ImportNotFoundException extends __BaseException { + readonly name: "ImportNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeKinesisStreamingDestinationInput { + /** + *

The name of the table being described. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + * @enum + */ +export declare const DestinationStatus: { + readonly ACTIVE: "ACTIVE"; + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLE_FAILED: "ENABLE_FAILED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type DestinationStatus = (typeof DestinationStatus)[keyof typeof DestinationStatus]; +/** + *

Describes a Kinesis data stream destination.

+ * @public + */ +export interface KinesisDataStreamDestination { + /** + *

The ARN for a specific Kinesis data stream.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The current status of replication.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The human-readable string that corresponds to the replica status.

+ * @public + */ + DestinationStatusDescription?: string | undefined; + /** + *

The precision of the Kinesis data stream timestamp. The values are either + * MILLISECOND or MICROSECOND.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface DescribeKinesisStreamingDestinationOutput { + /** + *

The name of the table being described.

+ * @public + */ + TableName?: string | undefined; + /** + *

The list of replica structures for the table being described.

+ * @public + */ + KinesisDataStreamDestinations?: KinesisDataStreamDestination[] | undefined; +} +/** + *

Represents the input of a DescribeLimits operation. Has no + * content.

+ * @public + */ +export interface DescribeLimitsInput { +} +/** + *

Represents the output of a DescribeLimits operation.

+ * @public + */ +export interface DescribeLimitsOutput { + /** + *

The maximum total read capacity units that your account allows you to provision across + * all of your tables in this Region.

+ * @public + */ + AccountMaxReadCapacityUnits?: number | undefined; + /** + *

The maximum total write capacity units that your account allows you to provision + * across all of your tables in this Region.

+ * @public + */ + AccountMaxWriteCapacityUnits?: number | undefined; + /** + *

The maximum read capacity units that your account allows you to provision for a new + * table that you are creating in this Region, including the read capacity units + * provisioned for its global secondary indexes (GSIs).

+ * @public + */ + TableMaxReadCapacityUnits?: number | undefined; + /** + *

The maximum write capacity units that your account allows you to provision for a new + * table that you are creating in this Region, including the write capacity units + * provisioned for its global secondary indexes (GSIs).

+ * @public + */ + TableMaxWriteCapacityUnits?: number | undefined; +} +/** + *

Represents the input of a DescribeTable operation.

+ * @public + */ +export interface DescribeTableInput { + /** + *

The name of the table to describe. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the output of a DescribeTable operation.

+ * @public + */ +export interface DescribeTableOutput { + /** + *

The properties of the table.

+ * @public + */ + Table?: TableDescription | undefined; +} +/** + * @public + */ +export interface DescribeTableReplicaAutoScalingInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the auto scaling configuration for a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexAutoScalingDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The current state of the replica global secondary index:

+ *
    + *
  • + *

    + * CREATING - The index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table/index configuration is being updated. The + * table/index remains available for data operations when + * UPDATING + *

    + *
  • + *
  • + *

    + * DELETING - The index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; +} +/** + *

Represents the auto scaling settings of the replica.

+ * @public + */ +export interface ReplicaAutoScalingDescription { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName?: string | undefined; + /** + *

Replica-specific global secondary index auto scaling settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexAutoScalingDescription[] | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ReplicaProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The current state of the replica:

+ *
    + *
  • + *

    + * CREATING - The replica is being created.

    + *
  • + *
  • + *

    + * UPDATING - The replica is being updated.

    + *
  • + *
  • + *

    + * DELETING - The replica is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The replica is ready for use.

    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; +} +/** + *

Represents the auto scaling configuration for a global table.

+ * @public + */ +export interface TableAutoScalingDescription { + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The current state of the table:

+ *
    + *
  • + *

    + * CREATING - The table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table is being updated.

    + *
  • + *
  • + *

    + * DELETING - The table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The table is ready for use.

    + *
  • + *
+ * @public + */ + TableStatus?: TableStatus | undefined; + /** + *

Represents replicas of the global table.

+ * @public + */ + Replicas?: ReplicaAutoScalingDescription[] | undefined; +} +/** + * @public + */ +export interface DescribeTableReplicaAutoScalingOutput { + /** + *

Represents the auto scaling properties of the table.

+ * @public + */ + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +/** + * @public + */ +export interface DescribeTimeToLiveInput { + /** + *

The name of the table to be described. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + */ +export interface DescribeTimeToLiveOutput { + /** + *

+ * @public + */ + TimeToLiveDescription?: TimeToLiveDescription | undefined; +} +/** + *

Enables setting the configuration for Kinesis Streaming.

+ * @public + */ +export interface EnableKinesisStreamingConfiguration { + /** + *

Toggle for the precision of Kinesis data stream timestamp. The values are either + * MILLISECOND or MICROSECOND.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface KinesisStreamingDestinationInput { + /** + *

The name of the DynamoDB table. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The ARN for a Kinesis data stream.

+ * @public + */ + StreamArn: string | undefined; + /** + *

The source for the Kinesis streaming information that is being enabled.

+ * @public + */ + EnableKinesisStreamingConfiguration?: EnableKinesisStreamingConfiguration | undefined; +} +/** + * @public + */ +export interface KinesisStreamingDestinationOutput { + /** + *

The name of the table being modified.

+ * @public + */ + TableName?: string | undefined; + /** + *

The ARN for the specific Kinesis data stream.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The current status of the replication.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The destination for the Kinesis streaming information that is being enabled.

+ * @public + */ + EnableKinesisStreamingConfiguration?: EnableKinesisStreamingConfiguration | undefined; +} +/** + *

There was an attempt to insert an item with the same primary key as an item that + * already exists in the DynamoDB table.

+ * @public + */ +export declare class DuplicateItemException extends __BaseException { + readonly name: "DuplicateItemException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * @public + */ +export declare class IdempotentParameterMismatchException extends __BaseException { + readonly name: "IdempotentParameterMismatchException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * @public + */ +export declare class TransactionInProgressException extends __BaseException { + readonly name: "TransactionInProgressException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

There was a conflict when writing to the specified S3 bucket.

+ * @public + */ +export declare class ExportConflictException extends __BaseException { + readonly name: "ExportConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ExportTableToPointInTimeInput { + /** + *

The Amazon Resource Name (ARN) associated with the table to export.

+ * @public + */ + TableArn: string | undefined; + /** + *

Time in the past from which to export table data, counted in seconds from the start of + * the Unix epoch. The table export will be a snapshot of the table's state at this point + * in time.

+ * @public + */ + ExportTime?: Date | undefined; + /** + *

Providing a ClientToken makes the call to + * ExportTableToPointInTimeInput idempotent, meaning that multiple + * identical calls have the same effect as one single call.

+ *

A client token is valid for 8 hours after the first request that uses it is completed. + * After 8 hours, any request with the same client token is treated as a new request. Do + * not resubmit the same request with the same client token for more than 8 hours, or the + * result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 8-hour idempotency window, DynamoDB returns an + * ImportConflictException.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The name of the Amazon S3 bucket to export the snapshot to.

+ * @public + */ + S3Bucket: string | undefined; + /** + *

The ID of the Amazon Web Services account that owns the bucket the export will be + * stored in.

+ * + *

S3BucketOwner is a required parameter when exporting to a S3 bucket in another + * account.

+ *
+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The Amazon S3 bucket prefix to use as the file name and path of the exported + * snapshot.

+ * @public + */ + S3Prefix?: string | undefined; + /** + *

Type of encryption used on the bucket where export data will be stored. Valid values + * for S3SseAlgorithm are:

+ *
    + *
  • + *

    + * AES256 - server-side encryption with Amazon S3 managed + * keys

    + *
  • + *
  • + *

    + * KMS - server-side encryption with KMS managed + * keys

    + *
  • + *
+ * @public + */ + S3SseAlgorithm?: S3SseAlgorithm | undefined; + /** + *

The ID of the KMS managed key used to encrypt the S3 bucket where + * export data will be stored (if applicable).

+ * @public + */ + S3SseKmsKeyId?: string | undefined; + /** + *

The format for the exported data. Valid values for ExportFormat are + * DYNAMODB_JSON or ION.

+ * @public + */ + ExportFormat?: ExportFormat | undefined; + /** + *

Choice of whether to execute as a full export or incremental export. Valid values are + * FULL_EXPORT or INCREMENTAL_EXPORT. The default value is FULL_EXPORT. If + * INCREMENTAL_EXPORT is provided, the IncrementalExportSpecification must also be + * used.

+ * @public + */ + ExportType?: ExportType | undefined; + /** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +/** + * @public + */ +export interface ExportTableToPointInTimeOutput { + /** + *

Contains a description of the table export.

+ * @public + */ + ExportDescription?: ExportDescription | undefined; +} +/** + *

The specified ExportTime is outside of the point in time recovery + * window.

+ * @public + */ +export declare class InvalidExportTimeException extends __BaseException { + readonly name: "InvalidExportTimeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Point in time recovery has not yet been enabled for this source table.

+ * @public + */ +export declare class PointInTimeRecoveryUnavailableException extends __BaseException { + readonly name: "PointInTimeRecoveryUnavailableException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface GetResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource to which the policy is attached. The + * resources you can specify include tables and streams.

+ * @public + */ + ResourceArn: string | undefined; +} +/** + * @public + */ +export interface GetResourcePolicyOutput { + /** + *

The resource-based policy document attached to the resource, which can be a table or + * stream, in JSON format.

+ * @public + */ + Policy?: string | undefined; + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + *

+ * There was a conflict when importing from the specified S3 source. + * This can occur when the current import conflicts with a previous import request + * that had the same client token. + *

+ * @public + */ +export declare class ImportConflictException extends __BaseException { + readonly name: "ImportConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ImportTableInput { + /** + *

Providing a ClientToken makes the call to ImportTableInput + * idempotent, meaning that multiple identical calls have the same effect as one single + * call.

+ *

A client token is valid for 8 hours after the first request that uses it is completed. + * After 8 hours, any request with the same client token is treated as a new request. Do + * not resubmit the same request with the same client token for more than 8 hours, or the + * result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 8-hour idempotency window, DynamoDB returns an + * IdempotentParameterMismatch exception.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The S3 bucket that provides the source for the import.

+ * @public + */ + S3BucketSource: S3BucketSource | undefined; + /** + *

The format of the source data. Valid values for ImportFormat are + * CSV, DYNAMODB_JSON or ION.

+ * @public + */ + InputFormat: InputFormat | undefined; + /** + *

Additional properties that specify how the input is formatted,

+ * @public + */ + InputFormatOptions?: InputFormatOptions | undefined; + /** + *

Type of compression to be used on the input coming from the imported table.

+ * @public + */ + InputCompressionType?: InputCompressionType | undefined; + /** + *

Parameters for the table to import the data into.

+ * @public + */ + TableCreationParameters: TableCreationParameters | undefined; +} +/** + * @public + */ +export interface ImportTableOutput { + /** + *

Represents the properties of the table created for the import, and parameters of the + * import. The import parameters include import status, how many items were processed, and + * how many errors were encountered.

+ * @public + */ + ImportTableDescription: ImportTableDescription | undefined; +} +/** + * @public + */ +export interface ListBackupsInput { + /** + *

Lists the backups from the table specified in TableName. You can also + * provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName?: string | undefined; + /** + *

Maximum number of backups to return at once.

+ * @public + */ + Limit?: number | undefined; + /** + *

Only backups created after this time are listed. TimeRangeLowerBound is + * inclusive.

+ * @public + */ + TimeRangeLowerBound?: Date | undefined; + /** + *

Only backups created before this time are listed. TimeRangeUpperBound is + * exclusive.

+ * @public + */ + TimeRangeUpperBound?: Date | undefined; + /** + *

+ * LastEvaluatedBackupArn is the Amazon Resource Name (ARN) of the backup last + * evaluated when the current page of results was returned, inclusive of the current page + * of results. This value may be specified as the ExclusiveStartBackupArn of a + * new ListBackups operation in order to fetch the next page of results. + *

+ * @public + */ + ExclusiveStartBackupArn?: string | undefined; + /** + *

The backups from the table specified by BackupType are listed.

+ *

Where BackupType can be:

+ *
    + *
  • + *

    + * USER - On-demand backup created by you. (The default setting if no + * other backup types are specified.)

    + *
  • + *
  • + *

    + * SYSTEM - On-demand backup automatically created by DynamoDB.

    + *
  • + *
  • + *

    + * ALL - All types of on-demand backups (USER and SYSTEM).

    + *
  • + *
+ * @public + */ + BackupType?: BackupTypeFilter | undefined; +} +/** + * @public + */ +export interface ListBackupsOutput { + /** + *

List of BackupSummary objects.

+ * @public + */ + BackupSummaries?: BackupSummary[] | undefined; + /** + *

The ARN of the backup last evaluated when the current page of results was returned, + * inclusive of the current page of results. This value may be specified as the + * ExclusiveStartBackupArn of a new ListBackups operation in + * order to fetch the next page of results.

+ *

If LastEvaluatedBackupArn is empty, then the last page of results has + * been processed and there are no more results to be retrieved.

+ *

If LastEvaluatedBackupArn is not empty, this may or may not indicate + * that there is more data to be returned. All results are guaranteed to have been returned + * if and only if no value for LastEvaluatedBackupArn is returned.

+ * @public + */ + LastEvaluatedBackupArn?: string | undefined; +} +/** + * @public + */ +export interface ListContributorInsightsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName?: string | undefined; + /** + *

A token to for the desired page, if there is one.

+ * @public + */ + NextToken?: string | undefined; + /** + *

Maximum number of results to return per page.

+ * @public + */ + MaxResults?: number | undefined; +} +/** + * @public + */ +export interface ListContributorInsightsOutput { + /** + *

A list of ContributorInsightsSummary.

+ * @public + */ + ContributorInsightsSummaries?: ContributorInsightsSummary[] | undefined; + /** + *

A token to go to the next page if there is one.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListExportsInput { + /** + *

The Amazon Resource Name (ARN) associated with the exported table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Maximum number of results to return per page.

+ * @public + */ + MaxResults?: number | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListExports. When provided in this manner, the API fetches the next + * page of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Summary information about an export task.

+ * @public + */ +export interface ExportSummary { + /** + *

The Amazon Resource Name (ARN) of the export.

+ * @public + */ + ExportArn?: string | undefined; + /** + *

Export can be in one of the following states: IN_PROGRESS, COMPLETED, or + * FAILED.

+ * @public + */ + ExportStatus?: ExportStatus | undefined; + /** + *

The type of export that was performed. Valid values are FULL_EXPORT or + * INCREMENTAL_EXPORT.

+ * @public + */ + ExportType?: ExportType | undefined; +} +/** + * @public + */ +export interface ListExportsOutput { + /** + *

A list of ExportSummary objects.

+ * @public + */ + ExportSummaries?: ExportSummary[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListExports again, with NextToken set to this + * value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListGlobalTablesInput { + /** + *

The first global table name that this operation will evaluate.

+ * @public + */ + ExclusiveStartGlobalTableName?: string | undefined; + /** + *

The maximum number of table names to return, if the parameter is not specified + * DynamoDB defaults to 100.

+ *

If the number of global tables DynamoDB finds reaches this limit, it stops the + * operation and returns the table names collected up to that point, with a table name in + * the LastEvaluatedGlobalTableName to apply in a subsequent operation to the + * ExclusiveStartGlobalTableName parameter.

+ * @public + */ + Limit?: number | undefined; + /** + *

Lists the global tables in a specific Region.

+ * @public + */ + RegionName?: string | undefined; +} +/** + *

Represents the properties of a global table.

+ * @public + */ +export interface GlobalTable { + /** + *

The global table name.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Regions where the global table has replicas.

+ * @public + */ + ReplicationGroup?: Replica[] | undefined; +} +/** + * @public + */ +export interface ListGlobalTablesOutput { + /** + *

List of global table names.

+ * @public + */ + GlobalTables?: GlobalTable[] | undefined; + /** + *

Last evaluated global table name.

+ * @public + */ + LastEvaluatedGlobalTableName?: string | undefined; +} +/** + * @public + */ +export interface ListImportsInput { + /** + *

The Amazon Resource Name (ARN) associated with the table that was imported to. + *

+ * @public + */ + TableArn?: string | undefined; + /** + *

The number of ImportSummary objects returned in a single page.

+ * @public + */ + PageSize?: number | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListImports. When provided in this manner, the API fetches the next + * page of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Summary information about the source file for the import. + *

+ * @public + */ +export interface ImportSummary { + /** + *

The Amazon Resource Number (ARN) corresponding to the import request.

+ * @public + */ + ImportArn?: string | undefined; + /** + *

The status of the import operation.

+ * @public + */ + ImportStatus?: ImportStatus | undefined; + /** + *

The Amazon Resource Number (ARN) of the table being imported into.

+ * @public + */ + TableArn?: string | undefined; + /** + *

The path and S3 bucket of the source file that is being imported. This includes the + * S3Bucket (required), S3KeyPrefix (optional) and S3BucketOwner (optional if the bucket is + * owned by the requester).

+ * @public + */ + S3BucketSource?: S3BucketSource | undefined; + /** + *

The Amazon Resource Number (ARN) of the Cloudwatch Log Group associated with this + * import task.

+ * @public + */ + CloudWatchLogGroupArn?: string | undefined; + /** + *

The format of the source data. Valid values are CSV, + * DYNAMODB_JSON or ION.

+ * @public + */ + InputFormat?: InputFormat | undefined; + /** + *

The time at which this import task began.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which this import task ended. (Does this include the successful complete + * creation of the table it was imported to?)

+ * @public + */ + EndTime?: Date | undefined; +} +/** + * @public + */ +export interface ListImportsOutput { + /** + *

A list of ImportSummary objects.

+ * @public + */ + ImportSummaryList?: ImportSummary[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListImports again, with NextToken set to this + * value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Represents the input of a ListTables operation.

+ * @public + */ +export interface ListTablesInput { + /** + *

The first table name that this operation will evaluate. Use the value that was + * returned for LastEvaluatedTableName in a previous operation, so that you + * can obtain the next page of results.

+ * @public + */ + ExclusiveStartTableName?: string | undefined; + /** + *

A maximum number of table names to return. If this parameter is not specified, the + * limit is 100.

+ * @public + */ + Limit?: number | undefined; +} +/** + *

Represents the output of a ListTables operation.

+ * @public + */ +export interface ListTablesOutput { + /** + *

The names of the tables associated with the current account at the current endpoint. + * The maximum size of this array is 100.

+ *

If LastEvaluatedTableName also appears in the output, you can use this + * value as the ExclusiveStartTableName parameter in a subsequent + * ListTables request and obtain the next page of results.

+ * @public + */ + TableNames?: string[] | undefined; + /** + *

The name of the last table in the current page of results. Use this value as the + * ExclusiveStartTableName in a new request to obtain the next page of + * results, until all the table names are returned.

+ *

If you do not receive a LastEvaluatedTableName value in the response, + * this means that there are no more table names to be retrieved.

+ * @public + */ + LastEvaluatedTableName?: string | undefined; +} +/** + * @public + */ +export interface ListTagsOfResourceInput { + /** + *

The Amazon DynamoDB resource with tags to be listed. This value is an Amazon Resource + * Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListTagOfResource. When provided in this manner, this API fetches the next page + * of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListTagsOfResourceOutput { + /** + *

The tags currently associated with the Amazon DynamoDB resource.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListTagsOfResource again, with NextToken set to this value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface PutResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource to which the policy will be attached. + * The resources you can specify include tables and streams.

+ *

You can control index permissions using the base table's policy. To specify the same permission level for your table and its indexes, you can provide both the table and index Amazon Resource Name (ARN)s in the Resource field of a given Statement in your policy document. Alternatively, to specify different permissions for your table, indexes, or both, you can define multiple Statement fields in your policy document.

+ * @public + */ + ResourceArn: string | undefined; + /** + *

An Amazon Web Services resource-based policy document in JSON format.

+ *
    + *
  • + *

    The maximum size supported for a resource-based policy document is 20 KB. + * DynamoDB counts whitespaces when calculating the size of a policy + * against this limit.

    + *
  • + *
  • + *

    Within a resource-based policy, if the action for a DynamoDB + * service-linked role (SLR) to replicate data for a global table is denied, adding + * or deleting a replica will fail with an error.

    + *
  • + *
+ *

For a full list of all considerations that apply while attaching a resource-based + * policy, see Resource-based + * policy considerations.

+ * @public + */ + Policy: string | undefined; + /** + *

A string value that you can use to conditionally update your policy. You can provide + * the revision ID of your existing policy to make mutating requests against that + * policy.

+ * + *

When you provide an expected revision ID, if the revision ID of the existing + * policy on the resource doesn't match or if there's no policy attached to the + * resource, your request will be rejected with a + * PolicyNotFoundException.

+ *
+ *

To conditionally attach a policy when no policy exists for the resource, specify + * NO_POLICY for the revision ID.

+ * @public + */ + ExpectedRevisionId?: string | undefined; + /** + *

Set this parameter to true to confirm that you want to remove your + * permissions to change the policy of this resource in the future.

+ * @public + */ + ConfirmRemoveSelfResourceAccess?: boolean | undefined; +} +/** + * @public + */ +export interface PutResourcePolicyOutput { + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const Select: { + readonly ALL_ATTRIBUTES: "ALL_ATTRIBUTES"; + readonly ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES"; + readonly COUNT: "COUNT"; + readonly SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES"; +}; +/** + * @public + */ +export type Select = (typeof Select)[keyof typeof Select]; +/** + * @public + */ +export interface RestoreTableFromBackupInput { + /** + *

The name of the new table to which the backup must be restored.

+ * @public + */ + TargetTableName: string | undefined; + /** + *

The Amazon Resource Name (ARN) associated with the backup.

+ * @public + */ + BackupArn: string | undefined; + /** + *

The billing mode of the restored table.

+ * @public + */ + BillingModeOverride?: BillingMode | undefined; + /** + *

List of global secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + /** + *

List of local secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + /** + *

Provisioned throughput settings for the restored table.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + /** + *

The new server-side encryption settings for the restored table.

+ * @public + */ + SSESpecificationOverride?: SSESpecification | undefined; +} +/** + * @public + */ +export interface RestoreTableFromBackupOutput { + /** + *

The description of the table created from an existing backup.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

A target table with the specified name already exists.

+ * @public + */ +export declare class TableAlreadyExistsException extends __BaseException { + readonly name: "TableAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

An invalid restore time was specified. RestoreDateTime must be between + * EarliestRestorableDateTime and LatestRestorableDateTime.

+ * @public + */ +export declare class InvalidRestoreTimeException extends __BaseException { + readonly name: "InvalidRestoreTimeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface RestoreTableToPointInTimeInput { + /** + *

The DynamoDB table that will be restored. This value is an Amazon Resource Name + * (ARN).

+ * @public + */ + SourceTableArn?: string | undefined; + /** + *

Name of the source table that is being restored.

+ * @public + */ + SourceTableName?: string | undefined; + /** + *

The name of the new table to which it must be restored to.

+ * @public + */ + TargetTableName: string | undefined; + /** + *

Restore the table to the latest possible time. LatestRestorableDateTime + * is typically 5 minutes before the current time.

+ * @public + */ + UseLatestRestorableTime?: boolean | undefined; + /** + *

Time in the past to restore the table to.

+ * @public + */ + RestoreDateTime?: Date | undefined; + /** + *

The billing mode of the restored table.

+ * @public + */ + BillingModeOverride?: BillingMode | undefined; + /** + *

List of global secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + /** + *

List of local secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + /** + *

Provisioned throughput settings for the restored table.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + /** + *

The new server-side encryption settings for the restored table.

+ * @public + */ + SSESpecificationOverride?: SSESpecification | undefined; +} +/** + * @public + */ +export interface RestoreTableToPointInTimeOutput { + /** + *

Represents the properties of a table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + * @public + */ +export interface TagResourceInput { + /** + *

Identifies the Amazon DynamoDB resource to which tags should be added. This value is + * an Amazon Resource Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

The tags to be assigned to the Amazon DynamoDB resource.

+ * @public + */ + Tags: Tag[] | undefined; +} +/** + * @public + */ +export interface UntagResourceInput { + /** + *

The DynamoDB resource that the tags will be removed from. This value is an Amazon + * Resource Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

A list of tag keys. Existing tags of the resource whose keys are members of this list + * will be removed from the DynamoDB resource.

+ * @public + */ + TagKeys: string[] | undefined; +} +/** + *

Represents the settings used to enable point in time recovery.

+ * @public + */ +export interface PointInTimeRecoverySpecification { + /** + *

Indicates whether point in time recovery is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + PointInTimeRecoveryEnabled: boolean | undefined; + /** + *

The number of preceding days for which continuous backups are taken and maintained. + * Your table data is only recoverable to any point-in-time from within the configured + * recovery period. This parameter is optional. If no value is provided, the value will + * default to 35.

+ * @public + */ + RecoveryPeriodInDays?: number | undefined; +} +/** + * @public + */ +export interface UpdateContinuousBackupsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the settings used to enable point in time recovery.

+ * @public + */ + PointInTimeRecoverySpecification: PointInTimeRecoverySpecification | undefined; +} +/** + * @public + */ +export interface UpdateContinuousBackupsOutput { + /** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +/** + * @public + */ +export interface UpdateContributorInsightsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The global secondary index name, if applicable.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the contributor insights action.

+ * @public + */ + ContributorInsightsAction: ContributorInsightsAction | undefined; +} +/** + * @public + */ +export interface UpdateContributorInsightsOutput { + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The name of the global secondary index, if applicable.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The status of contributor insights

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +/** + *

The specified replica is already part of the global table.

+ * @public + */ +export declare class ReplicaAlreadyExistsException extends __BaseException { + readonly name: "ReplicaAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The specified replica is no longer part of the global table.

+ * @public + */ +export declare class ReplicaNotFoundException extends __BaseException { + readonly name: "ReplicaNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new replica to be added to an existing global table.

    + *
  • + *
  • + *

    New parameters for an existing replica.

    + *
  • + *
  • + *

    An existing replica to be removed from an existing global table.

    + *
  • + *
+ * @public + */ +export interface ReplicaUpdate { + /** + *

The parameters required for creating a replica on an existing global table.

+ * @public + */ + Create?: CreateReplicaAction | undefined; + /** + *

The name of the existing replica to be removed.

+ * @public + */ + Delete?: DeleteReplicaAction | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableInput { + /** + *

The global table name.

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

A list of Regions that should be added or removed from the global table.

+ * @public + */ + ReplicaUpdates: ReplicaUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The operation tried to access a nonexistent index.

+ * @public + */ +export declare class IndexNotFoundException extends __BaseException { + readonly name: "IndexNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ +export interface GlobalTableGlobalSecondaryIndexSettingsUpdate { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. + *

+ * @public + */ + ProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global secondary index's write capacity + * units.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexSettingsUpdate { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException.

+ * @public + */ + ProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global secondary index replica's read capacity + * units.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the settings for a global table in a Region that will be modified.

+ * @public + */ +export interface ReplicaSettingsUpdate { + /** + *

The Region of the replica to be added.

+ * @public + */ + RegionName: string | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global table replica's read capacity + * units.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ + ReplicaGlobalSecondaryIndexSettingsUpdate?: ReplicaGlobalSecondaryIndexSettingsUpdate[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + ReplicaTableClass?: TableClass | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableSettingsInput { + /** + *

The name of the global table

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

The billing mode of the global table. If GlobalTableBillingMode is not + * specified, the global table defaults to PROVISIONED capacity billing + * mode.

+ *
    + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * predictable workloads. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for unpredictable workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
+ * @public + */ + GlobalTableBillingMode?: BillingMode | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. + *

+ * @public + */ + GlobalTableProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing provisioned write capacity for the global + * table.

+ * @public + */ + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ + GlobalTableGlobalSecondaryIndexSettingsUpdate?: GlobalTableGlobalSecondaryIndexSettingsUpdate[] | undefined; + /** + *

Represents the settings for a global table in a Region that will be modified.

+ * @public + */ + ReplicaSettingsUpdate?: ReplicaSettingsUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableSettingsOutput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Region-specific settings for the global table.

+ * @public + */ + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +/** + *

Enables updating the configuration for Kinesis Streaming.

+ * @public + */ +export interface UpdateKinesisStreamingConfiguration { + /** + *

Enables updating the precision of Kinesis data stream timestamp.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface UpdateKinesisStreamingDestinationInput { + /** + *

The table name for the Kinesis streaming destination input. You can also provide the + * ARN of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The Amazon Resource Name (ARN) for the Kinesis stream input.

+ * @public + */ + StreamArn: string | undefined; + /** + *

The command to update the Kinesis stream configuration.

+ * @public + */ + UpdateKinesisStreamingConfiguration?: UpdateKinesisStreamingConfiguration | undefined; +} +/** + * @public + */ +export interface UpdateKinesisStreamingDestinationOutput { + /** + *

The table name for the Kinesis streaming destination output.

+ * @public + */ + TableName?: string | undefined; + /** + *

The ARN for the Kinesis stream input.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The status of the attempt to update the Kinesis streaming destination output.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The command to update the Kinesis streaming destination configuration.

+ * @public + */ + UpdateKinesisStreamingConfiguration?: UpdateKinesisStreamingConfiguration | undefined; +} +/** + *

Represents the new provisioned throughput settings to be applied to a global secondary + * index.

+ * @public + */ +export interface UpdateGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be updated.

+ * @public + */ + IndexName: string | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Updates the maximum number of read and write units for the specified global secondary + * index. If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value of the new provisioned throughput settings to be + * applied to a global secondary index.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new global secondary index to be added to an existing table.

    + *
  • + *
  • + *

    New provisioned throughput parameters for an existing global secondary + * index.

    + *
  • + *
  • + *

    An existing global secondary index to be removed from an existing + * table.

    + *
  • + *
+ * @public + */ +export interface GlobalSecondaryIndexUpdate { + /** + *

The name of an existing global secondary index, along with new provisioned throughput + * settings to be applied to that index.

+ * @public + */ + Update?: UpdateGlobalSecondaryIndexAction | undefined; + /** + *

The parameters required for creating a global secondary index on an existing + * table:

+ *
    + *
  • + *

    + * IndexName + *

    + *
  • + *
  • + *

    + * KeySchema + *

    + *
  • + *
  • + *

    + * AttributeDefinitions + *

    + *
  • + *
  • + *

    + * Projection + *

    + *
  • + *
  • + *

    + * ProvisionedThroughput + *

    + *
  • + *
+ * @public + */ + Create?: CreateGlobalSecondaryIndexAction | undefined; + /** + *

The name of an existing global secondary index to be removed.

+ * @public + */ + Delete?: DeleteGlobalSecondaryIndexAction | undefined; +} +/** + *

Represents a replica to be modified.

+ * @public + */ +export interface UpdateReplicationGroupMemberAction { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; + /** + *

The KMS key of the replica that should be used for KMS + * encryption. To specify a key, use its key ID, Amazon Resource Name (ARN), alias name, or + * alias ARN. Note that you should only provide this parameter if the key is different from + * the default DynamoDB KMS key alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not specified, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput for the replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + TableClassOverride?: TableClass | undefined; +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new replica to be added to an existing regional table or global table. This + * request invokes the CreateTableReplica action in the destination + * Region.

    + *
  • + *
  • + *

    New parameters for an existing replica. This request invokes the + * UpdateTable action in the destination Region.

    + *
  • + *
  • + *

    An existing replica to be deleted. The request invokes the + * DeleteTableReplica action in the destination Region, deleting + * the replica and all if its items in the destination Region.

    + *
  • + *
+ * + *

When you manually remove a table or global table replica, you do not automatically + * remove any associated scalable targets, scaling policies, or CloudWatch + * alarms.

+ *
+ * @public + */ +export interface ReplicationGroupUpdate { + /** + *

The parameters required for creating a replica for the table.

+ * @public + */ + Create?: CreateReplicationGroupMemberAction | undefined; + /** + *

The parameters required for updating a replica for the table.

+ * @public + */ + Update?: UpdateReplicationGroupMemberAction | undefined; + /** + *

The parameters required for deleting a replica for the table.

+ * @public + */ + Delete?: DeleteReplicationGroupMemberAction | undefined; +} +/** + *

Represents the input of an UpdateTable operation.

+ * @public + */ +export interface UpdateTableInput { + /** + *

An array of attributes that describe the key schema for the table and indexes. If you + * are adding a new global secondary index to the table, AttributeDefinitions + * must include the key element(s) of the new index.

+ * @public + */ + AttributeDefinitions?: AttributeDefinition[] | undefined; + /** + *

The name of the table to be updated. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. When switching from pay-per-request to provisioned capacity, initial + * provisioned capacity values must be set. The initial provisioned capacity values are + * estimated based on the consumed read and write capacity of your table and global + * secondary indexes over the past 30 minutes.

+ *
    + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for most DynamoDB workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * steady workloads with predictable growth where capacity requirements can be + * reliably forecasted. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

The new provisioned throughput settings for the specified table or index.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

An array of one or more global secondary indexes for the table. For each index in the + * array, you can request one action:

+ *
    + *
  • + *

    + * Create - add a new global secondary index to the table.

    + *
  • + *
  • + *

    + * Update - modify the provisioned throughput settings of an existing + * global secondary index.

    + *
  • + *
  • + *

    + * Delete - remove a global secondary index from the table.

    + *
  • + *
+ *

You can create or delete only one global secondary index per UpdateTable + * operation.

+ *

For more information, see Managing Global + * Secondary Indexes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexUpdate[] | undefined; + /** + *

Represents the DynamoDB Streams configuration for the table.

+ * + *

You receive a ValidationException if you try to enable a stream on a + * table that already has a stream, or if you try to disable a stream on a table that + * doesn't have a stream.

+ *
+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

The new server-side encryption settings for the specified table.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

A list of replica update actions (create, delete, or update) for the table.

+ * + *

For global tables, this property only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @public + */ + ReplicaUpdates?: ReplicationGroupUpdate[] | undefined; + /** + *

The table class of the table to be updated. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

Indicates whether deletion protection is to be enabled (true) or disabled (false) on + * the table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

Specifies the consistency mode for a new global table. This parameter is only valid + * when you create a global table by specifying one or more Create actions in the ReplicaUpdates action list.

+ *

You can specify one of the following consistency modes:

+ *
    + *
  • + *

    + * EVENTUAL: Configures a new global table for multi-Region eventual + * consistency. This is the default consistency mode for global tables.

    + *
  • + *
  • + *

    + * STRONG: Configures a new global table for multi-Region strong + * consistency (preview).

    + * + *

    Multi-Region strong consistency (MRSC) is a new DynamoDB global + * tables capability currently available in preview mode. For more information, + * see Global tables multi-Region strong consistency.

    + *
    + *
  • + *
+ *

If you don't specify this parameter, the global table consistency mode defaults to + * EVENTUAL.

+ * @public + */ + MultiRegionConsistency?: MultiRegionConsistency | undefined; + /** + *

Updates the maximum number of read and write units for the specified table in + * on-demand capacity mode. If you use this parameter, you must specify + * MaxReadRequestUnits, MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput (in read units per second and write units per second) + * for updating a table.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the output of an UpdateTable operation.

+ * @public + */ +export interface UpdateTableOutput { + /** + *

Represents the properties of the table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

Represents the auto scaling settings of a global secondary index for a global table + * that will be modified.

+ * @public + */ +export interface GlobalSecondaryIndexAutoScalingUpdate { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the auto scaling settings of a global secondary index for a replica that + * will be modified.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexAutoScalingUpdate { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedReadCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the auto scaling settings of a replica that will be modified.

+ * @public + */ +export interface ReplicaAutoScalingUpdate { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; + /** + *

Represents the auto scaling settings of global secondary indexes that will be + * modified.

+ * @public + */ + ReplicaGlobalSecondaryIndexUpdates?: ReplicaGlobalSecondaryIndexAutoScalingUpdate[] | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + * @public + */ +export interface UpdateTableReplicaAutoScalingInput { + /** + *

Represents the auto scaling settings of the global secondary indexes of the replica to + * be updated.

+ * @public + */ + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexAutoScalingUpdate[] | undefined; + /** + *

The name of the global table to be updated. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the auto scaling settings of replicas of the table that will be + * modified.

+ * @public + */ + ReplicaUpdates?: ReplicaAutoScalingUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateTableReplicaAutoScalingOutput { + /** + *

Returns information about the auto scaling settings of a table with replicas.

+ * @public + */ + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +/** + *

Represents the settings used to enable or disable Time to Live (TTL) for the specified + * table.

+ * @public + */ +export interface TimeToLiveSpecification { + /** + *

Indicates whether TTL is to be enabled (true) or disabled (false) on the table.

+ * @public + */ + Enabled: boolean | undefined; + /** + *

The name of the TTL attribute used to store the expiration time for items in the + * table.

+ * @public + */ + AttributeName: string | undefined; +} +/** + *

Represents the input of an UpdateTimeToLive operation.

+ * @public + */ +export interface UpdateTimeToLiveInput { + /** + *

The name of the table to be configured. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the settings used to enable or disable Time to Live for the specified + * table.

+ * @public + */ + TimeToLiveSpecification: TimeToLiveSpecification | undefined; +} +/** + * @public + */ +export interface UpdateTimeToLiveOutput { + /** + *

Represents the output of an UpdateTimeToLive operation.

+ * @public + */ + TimeToLiveSpecification?: TimeToLiveSpecification | undefined; +} +/** + *

Represents the data for an attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export type AttributeValue = AttributeValue.BMember | AttributeValue.BOOLMember | AttributeValue.BSMember | AttributeValue.LMember | AttributeValue.MMember | AttributeValue.NMember | AttributeValue.NSMember | AttributeValue.NULLMember | AttributeValue.SMember | AttributeValue.SSMember | AttributeValue.$UnknownMember; +/** + * @public + */ +export declare namespace AttributeValue { + /** + *

An attribute of type String. For example:

+ *

+ * "S": "Hello" + *

+ * @public + */ + interface SMember { + S: string; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Number. For example:

+ *

+ * "N": "123.45" + *

+ *

Numbers are sent across the network to DynamoDB as strings, to maximize compatibility + * across languages and libraries. However, DynamoDB treats them as number type attributes + * for mathematical operations.

+ * @public + */ + interface NMember { + S?: never; + N: string; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Binary. For example:

+ *

+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk" + *

+ * @public + */ + interface BMember { + S?: never; + N?: never; + B: Uint8Array; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type String Set. For example:

+ *

+ * "SS": ["Giraffe", "Hippo" ,"Zebra"] + *

+ * @public + */ + interface SSMember { + S?: never; + N?: never; + B?: never; + SS: string[]; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Number Set. For example:

+ *

+ * "NS": ["42.2", "-19", "7.5", "3.14"] + *

+ *

Numbers are sent across the network to DynamoDB as strings, to maximize compatibility + * across languages and libraries. However, DynamoDB treats them as number type attributes + * for mathematical operations.

+ * @public + */ + interface NSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS: string[]; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Binary Set. For example:

+ *

+ * "BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="] + *

+ * @public + */ + interface BSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS: Uint8Array[]; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Map. For example:

+ *

+ * "M": \{"Name": \{"S": "Joe"\}, "Age": \{"N": "35"\}\} + *

+ * @public + */ + interface MMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M: Record; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type List. For example:

+ *

+ * "L": [ \{"S": "Cookies"\} , \{"S": "Coffee"\}, \{"N": "3.14159"\}] + *

+ * @public + */ + interface LMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L: AttributeValue[]; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Null. For example:

+ *

+ * "NULL": true + *

+ * @public + */ + interface NULLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL: boolean; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Boolean. For example:

+ *

+ * "BOOL": true + *

+ * @public + */ + interface BOOLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL: boolean; + $unknown?: never; + } + /** + * @public + */ + interface $UnknownMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown: [string, any]; + } + interface Visitor { + S: (value: string) => T; + N: (value: string) => T; + B: (value: Uint8Array) => T; + SS: (value: string[]) => T; + NS: (value: string[]) => T; + BS: (value: Uint8Array[]) => T; + M: (value: Record) => T; + L: (value: AttributeValue[]) => T; + NULL: (value: boolean) => T; + BOOL: (value: boolean) => T; + _: (name: string, value: any) => T; + } + const visit: (value: AttributeValue, visitor: Visitor) => T; +} +/** + *

For the UpdateItem operation, represents the attributes to be modified, + * the action to perform on each, and the new value for each.

+ * + *

You cannot use UpdateItem to update any primary key attributes. + * Instead, you will need to delete the item, and then use PutItem to + * create a new item with new attributes.

+ *
+ *

Attribute values cannot be null; string and binary type attributes must have lengths + * greater than zero; and set type attributes must not be empty. Requests with empty values + * will be rejected with a ValidationException exception.

+ * @public + */ +export interface AttributeValueUpdate { + /** + *

Represents the data for an attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer Guide. + *

+ * @public + */ + Value?: AttributeValue | undefined; + /** + *

Specifies how to perform the update. Valid values are PUT (default), + * DELETE, and ADD. The behavior depends on whether the + * specified primary key already exists in the table.

+ *

+ * If an item with the specified Key is found in + * the table: + *

+ *
    + *
  • + *

    + * PUT - Adds the specified attribute to the item. If the attribute + * already exists, it is replaced by the new value.

    + *
  • + *
  • + *

    + * DELETE - If no value is specified, the attribute and its value are + * removed from the item. The data type of the specified value must match the + * existing value's data type.

    + *

    If a set of values is specified, then those values are + * subtracted from the old set. For example, if the attribute value was the set + * [a,b,c] and the DELETE action specified + * [a,c], then the final attribute value would be + * [b]. Specifying an empty set is an error.

    + *
  • + *
  • + *

    + * ADD - If the attribute does not already exist, then the attribute + * and its values are added to the item. If the attribute does exist, then the + * behavior of ADD depends on the data type of the attribute:

    + *
      + *
    • + *

      If the existing attribute is a number, and if Value is + * also a number, then the Value is mathematically added to + * the existing attribute. If Value is a negative number, then + * it is subtracted from the existing attribute.

      + * + *

      If you use ADD to increment or decrement a number + * value for an item that doesn't exist before the update, DynamoDB + * uses 0 as the initial value.

      + *

      In addition, if you use ADD to update an existing + * item, and intend to increment or decrement an attribute value which + * does not yet exist, DynamoDB uses 0 as the initial + * value. For example, suppose that the item you want to update does + * not yet have an attribute named itemcount, but + * you decide to ADD the number 3 to this + * attribute anyway, even though it currently does not exist. DynamoDB + * will create the itemcount attribute, set its + * initial value to 0, and finally add 3 to + * it. The result will be a new itemcount + * attribute in the item, with a value of 3.

      + *
      + *
    • + *
    • + *

      If the existing data type is a set, and if the Value is + * also a set, then the Value is added to the existing set. + * (This is a set operation, not mathematical + * addition.) For example, if the attribute value was the set + * [1,2], and the ADD action specified + * [3], then the final attribute value would be + * [1,2,3]. An error occurs if an Add action is specified + * for a set attribute and the attribute type specified does not match the + * existing set type.

      + *

      Both sets must have the same primitive data type. For example, if the + * existing data type is a set of strings, the Value must also + * be a set of strings. The same holds true for number sets and binary + * sets.

      + *
    • + *
    + *

    This action is only valid for an existing attribute whose data type is number + * or is a set. Do not use ADD for any other data types.

    + *
  • + *
+ *

+ * If no item with the specified Key is + * found: + *

+ *
    + *
  • + *

    + * PUT - DynamoDB creates a new item with the specified primary key, + * and then adds the attribute.

    + *
  • + *
  • + *

    + * DELETE - Nothing happens; there is no attribute to delete.

    + *
  • + *
  • + *

    + * ADD - DynamoDB creates a new item with the supplied primary key and + * number (or set) for the attribute value. The only data types allowed are number, + * number set, string set or binary set.

    + *
  • + *
+ * @public + */ + Action?: AttributeAction | undefined; +} +/** + *

An error associated with a statement in a PartiQL batch that was run.

+ * @public + */ +export interface BatchStatementError { + /** + *

The error code associated with the failed PartiQL batch statement.

+ * @public + */ + Code?: BatchStatementErrorCodeEnum | undefined; + /** + *

The error message associated with the PartiQL batch response.

+ * @public + */ + Message?: string | undefined; + /** + *

The item which caused the condition check to fail. This will be set if + * ReturnValuesOnConditionCheckFailure is specified as ALL_OLD.

+ * @public + */ + Item?: Record | undefined; +} +/** + *

A PartiQL batch statement request.

+ * @public + */ +export interface BatchStatementRequest { + /** + *

A valid PartiQL statement.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameters associated with a PartiQL statement in the batch request.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

The read consistency of the PartiQL batch request.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

An optional parameter that returns the item attributes for a PartiQL batch request + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

An ordered list of errors for each item in the request which caused the transaction to + * get cancelled. The values of the list are ordered according to the ordering of the + * TransactWriteItems request parameter. If no error occurred for the + * associated item an error with a Null code and Null message will be present.

+ * @public + */ +export interface CancellationReason { + /** + *

Item in the request which caused the transaction to get cancelled.

+ * @public + */ + Item?: Record | undefined; + /** + *

Status code for the result of the cancelled transaction.

+ * @public + */ + Code?: string | undefined; + /** + *

Cancellation reason message description.

+ * @public + */ + Message?: string | undefined; +} +/** + *

Represents the selection criteria for a Query or Scan + * operation:

+ *
    + *
  • + *

    For a Query operation, Condition is used for + * specifying the KeyConditions to use when querying a table or an + * index. For KeyConditions, only the following comparison operators + * are supported:

    + *

    + * EQ | LE | LT | GE | GT | BEGINS_WITH | BETWEEN + *

    + *

    + * Condition is also used in a QueryFilter, which + * evaluates the query results and returns only the desired values.

    + *
  • + *
  • + *

    For a Scan operation, Condition is used in a + * ScanFilter, which evaluates the scan results and returns only + * the desired values.

    + *
  • + *
+ * @public + */ +export interface Condition { + /** + *

One or more values to evaluate against the supplied attribute. The number of values in + * the list depends on the ComparisonOperator being used.

+ *

For type Number, value comparisons are numeric.

+ *

String value comparisons for greater than, equals, or less than are based on ASCII + * character code values. For example, a is greater than A, and + * a is greater than B. For a list of code values, see http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters.

+ *

For Binary, DynamoDB treats each byte of the binary data as unsigned when it + * compares binary values.

+ * @public + */ + AttributeValueList?: AttributeValue[] | undefined; + /** + *

A comparator for evaluating attributes. For example, equals, greater than, less than, + * etc.

+ *

The following comparison operators are available:

+ *

+ * EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | + * BEGINS_WITH | IN | BETWEEN + *

+ *

The following are descriptions of each comparison operator.

+ *
    + *
  • + *

    + * EQ : Equal. EQ is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, Binary, String Set, Number Set, or Binary Set. + * If an item contains an AttributeValue element of a different type + * than the one provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NE : Not equal. NE is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, Binary, String Set, Number Set, or Binary Set. If an + * item contains an AttributeValue of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LE : Less than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LT : Less than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, or Binary (not a set type). If an item contains an + * AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GE : Greater than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GT : Greater than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NOT_NULL : The attribute exists. NOT_NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the existence of an attribute, not its data type. + * If the data type of attribute "a" is null, and you evaluate it + * using NOT_NULL, the result is a Boolean true. This + * result is because the attribute "a" exists; its data type is + * not relevant to the NOT_NULL comparison operator.

    + *
    + *
  • + *
  • + *

    + * NULL : The attribute does not exist. NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the nonexistence of an attribute, not its data + * type. If the data type of attribute "a" is null, and you + * evaluate it using NULL, the result is a Boolean + * false. This is because the attribute "a" + * exists; its data type is not relevant to the NULL comparison + * operator.

    + *
    + *
  • + *
  • + *

    + * CONTAINS : Checks for a subsequence, or value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is of type String, then the operator checks for a + * substring match. If the target attribute of the comparison is of type Binary, + * then the operator looks for a subsequence of the target that matches the input. + * If the target attribute of the comparison is a set ("SS", + * "NS", or "BS"), then the operator evaluates to + * true if it finds an exact match with any member of the set.

    + *

    CONTAINS is supported for lists: When evaluating "a CONTAINS b", + * "a" can be a list; however, "b" cannot be a set, a + * map, or a list.

    + *
  • + *
  • + *

    + * NOT_CONTAINS : Checks for absence of a subsequence, or absence of a + * value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is a String, then the operator checks for the + * absence of a substring match. If the target attribute of the comparison is + * Binary, then the operator checks for the absence of a subsequence of the target + * that matches the input. If the target attribute of the comparison is a set + * ("SS", "NS", or "BS"), then the + * operator evaluates to true if it does not find an exact + * match with any member of the set.

    + *

    NOT_CONTAINS is supported for lists: When evaluating "a NOT CONTAINS + * b", "a" can be a list; however, "b" cannot + * be a set, a map, or a list.

    + *
  • + *
  • + *

    + * BEGINS_WITH : Checks for a prefix.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String or Binary (not a Number or a set type). The target attribute of + * the comparison must be of type String or Binary (not a Number or a set + * type).

    + *

    + *
  • + *
  • + *

    + * IN : Checks for matching elements in a list.

    + *

    + * AttributeValueList can contain one or more + * AttributeValue elements of type String, Number, or Binary. + * These attributes are compared against an existing attribute of an item. If any + * elements of the input are equal to the item attribute, the expression evaluates + * to true.

    + *
  • + *
  • + *

    + * BETWEEN : Greater than or equal to the first value, and less than + * or equal to the second value.

    + *

    + * AttributeValueList must contain two AttributeValue + * elements of the same type, either String, Number, or Binary (not a set type). A + * target attribute matches if the target value is greater than, or equal to, the + * first element and less than, or equal to, the second element. If an item + * contains an AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not compare to \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\} + *

    + *
  • + *
+ *

For usage examples of AttributeValueList and + * ComparisonOperator, see Legacy + * Conditional Parameters in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ComparisonOperator: ComparisonOperator | undefined; +} +/** + *

A condition specified in the operation failed to be evaluated.

+ * @public + */ +export declare class ConditionalCheckFailedException extends __BaseException { + readonly name: "ConditionalCheckFailedException"; + readonly $fault: "client"; + /** + *

Item which caused the ConditionalCheckFailedException.

+ * @public + */ + Item?: Record | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a request to perform a DeleteItem operation on an item.

+ * @public + */ +export interface DeleteRequest { + /** + *

A map of attribute name to attribute values, representing the primary key of the item + * to delete. All of the table's primary key attributes must be specified, and their data + * types must match those of the table's key schema.

+ * @public + */ + Key: Record | undefined; +} +/** + * @public + */ +export interface ExecuteStatementInput { + /** + *

The PartiQL statement representing the operation to run.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameters for the PartiQL statement, if any.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

The consistency of a read operation. If set to true, then a strongly + * consistent read is used; otherwise, an eventually consistent read is used.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

Set this value to get remaining results, if NextToken was returned in the + * statement response.

+ * @public + */ + NextToken?: string | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, along + * with a key in LastEvaluatedKey to apply in a subsequent operation so you + * can pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation.

+ * @public + */ + Limit?: number | undefined; + /** + *

An optional parameter that returns the item attributes for an + * ExecuteStatement operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Specifies an item and related attribute values to retrieve in a + * TransactGetItem object.

+ * @public + */ +export interface Get { + /** + *

A map of attribute names to AttributeValue objects that specifies the + * primary key of the item to retrieve.

+ * @public + */ + Key: Record | undefined; + /** + *

The name of the table from which to retrieve the specified item. You can also provide + * the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A string that identifies one or more attributes of the specified item to retrieve from + * the table. The attributes in the expression must be separated by commas. If no attribute + * names are specified, then all attributes of the specified item are returned. If any of + * the requested attributes are not found, they do not appear in the result.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in the ProjectionExpression + * parameter.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Represents the input of a GetItem operation.

+ * @public + */ +export interface GetItemInput { + /** + *

The name of the table containing the requested item. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute names to AttributeValue objects, representing the + * primary key of the item to retrieve.

+ *

For the primary key, you must provide all of the attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

Determines the read consistency model: If set to true, then the operation + * uses strongly consistent reads; otherwise, the operation uses eventually consistent + * reads.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes are returned. If any of the + * requested attributes are not found, they do not appear in the result.

+ *

For more information, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Represents the output of a GetItem operation.

+ * @public + */ +export interface GetItemOutput { + /** + *

A map of attribute names to AttributeValue objects, as specified by + * ProjectionExpression.

+ * @public + */ + Item?: Record | undefined; + /** + *

The capacity units consumed by the GetItem operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Information about item collections, if any, that were affected by the operation. + * ItemCollectionMetrics is only returned if the request asked for it. If + * the table does not have any local secondary indexes, this information is not returned in + * the response.

+ * @public + */ +export interface ItemCollectionMetrics { + /** + *

The partition key value of the item collection. This value is the same as the + * partition key value of the item.

+ * @public + */ + ItemCollectionKey?: Record | undefined; + /** + *

An estimate of item collection size, in gigabytes. This value is a two-element array + * containing a lower bound and an upper bound for the estimate. The estimate includes the + * size of all the items in the table, plus the size of all attributes projected into all + * of the local secondary indexes on that table. Use this estimate to measure whether a + * local secondary index is approaching its size limit.

+ *

The estimate is subject to change over time; therefore, do not rely on the precision + * or accuracy of the estimate.

+ * @public + */ + SizeEstimateRangeGB?: number[] | undefined; +} +/** + *

Details for the requested item.

+ * @public + */ +export interface ItemResponse { + /** + *

Map of attribute data consisting of the data type and attribute value.

+ * @public + */ + Item?: Record | undefined; +} +/** + *

Represents a PartiQL statement that uses parameters.

+ * @public + */ +export interface ParameterizedStatement { + /** + *

A PartiQL statement that uses parameters.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameter values.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

An optional parameter that returns the item attributes for a PartiQL + * ParameterizedStatement operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a PutItem operation on an item.

+ * @public + */ +export interface PutRequest { + /** + *

A map of attribute name to attribute values, representing the primary key of an item + * to be processed by PutItem. All of the table's primary key attributes must + * be specified, and their data types must match those of the table's key schema. If any + * attributes are present in the item that are part of an index key schema for the table, + * their types must match the index key schema.

+ * @public + */ + Item: Record | undefined; +} +/** + *

Represents a set of primary keys and, for each key, the attributes to retrieve from + * the table.

+ *

For each primary key, you must provide all of the key attributes. + * For example, with a simple primary key, you only need to provide the partition key. For + * a composite primary key, you must provide both the partition key + * and the sort key.

+ * @public + */ +export interface KeysAndAttributes { + /** + *

The primary key attribute values that define the items and the attributes associated + * with the items.

+ * @public + */ + Keys: Record[] | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see Legacy + * Conditional Parameters in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The consistency of a read operation. If set to true, then a strongly + * consistent read is used; otherwise, an eventually consistent read is used.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the ProjectionExpression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Specifies an item to be retrieved as part of the transaction.

+ * @public + */ +export interface TransactGetItem { + /** + *

Contains the primary key that identifies the item to get, together with the name of + * the table that contains the item, and optionally the specific attributes of the item to + * retrieve.

+ * @public + */ + Get: Get | undefined; +} +/** + * @public + */ +export interface BatchExecuteStatementInput { + /** + *

The list of PartiQL statements representing the batch to run.

+ * @public + */ + Statements: BatchStatementRequest[] | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface ExecuteTransactionInput { + /** + *

The list of PartiQL statements representing the transaction to run.

+ * @public + */ + TransactStatements: ParameterizedStatement[] | undefined; + /** + *

Set this value to get remaining results, if NextToken was returned in the + * statement response.

+ * @public + */ + ClientRequestToken?: string | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response. For more information, see TransactGetItems and TransactWriteItems.

+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface ExecuteTransactionOutput { + /** + *

The response to a PartiQL transaction.

+ * @public + */ + Responses?: ItemResponse[] | undefined; + /** + *

The capacity units consumed by the entire operation. The values of the list are + * ordered according to the ordering of the statements.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + * @public + */ +export interface TransactGetItemsOutput { + /** + *

If the ReturnConsumedCapacity value was TOTAL, this + * is an array of ConsumedCapacity objects, one for each table addressed by + * TransactGetItem objects in the TransactItems + * parameter. These ConsumedCapacity objects report the read-capacity units + * consumed by the TransactGetItems call in that table.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; + /** + *

An ordered array of up to 100 ItemResponse objects, each of which + * corresponds to the TransactGetItem object in the same position in the + * TransactItems array. Each ItemResponse object + * contains a Map of the name-value pairs that are the projected attributes of the + * requested item.

+ *

If a requested item could not be retrieved, the corresponding + * ItemResponse object is Null, or if the requested item has no projected + * attributes, the corresponding ItemResponse object is an empty Map.

+ * @public + */ + Responses?: ItemResponse[] | undefined; +} +/** + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * @public + */ +export declare class TransactionCanceledException extends __BaseException { + readonly name: "TransactionCanceledException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + *

A list of cancellation reasons.

+ * @public + */ + CancellationReasons?: CancellationReason[] | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the input of a BatchGetItem operation.

+ * @public + */ +export interface BatchGetItemInput { + /** + *

A map of one or more table names or table ARNs and, for each table, a map that + * describes one or more items to retrieve from that table. Each table name or ARN can be + * used only once per BatchGetItem request.

+ *

Each element in the map of items to retrieve consists of the following:

+ *
    + *
  • + *

    + * ConsistentRead - If true, a strongly consistent read + * is used; if false (the default), an eventually consistent read is + * used.

    + *
  • + *
  • + *

    + * ExpressionAttributeNames - One or more substitution tokens for + * attribute names in the ProjectionExpression parameter. The + * following are some use cases for using + * ExpressionAttributeNames:

    + *
      + *
    • + *

      To access an attribute whose name conflicts with a DynamoDB reserved + * word.

      + *
    • + *
    • + *

      To create a placeholder for repeating occurrences of an attribute name + * in an expression.

      + *
    • + *
    • + *

      To prevent special characters in an attribute name from being + * misinterpreted in an expression.

      + *
    • + *
    + *

    Use the # character in an expression to + * dereference an attribute name. For example, consider the following attribute + * name:

    + *
      + *
    • + *

      + * Percentile + *

      + *
    • + *
    + *

    The name of this attribute conflicts with a reserved word, so it cannot be + * used directly in an expression. (For the complete list of reserved words, see + * Reserved + * Words in the Amazon DynamoDB Developer Guide). + * To work around this, you could specify the following for + * ExpressionAttributeNames:

    + *
      + *
    • + *

      + * \{"#P":"Percentile"\} + *

      + *
    • + *
    + *

    You could then use this substitution in an expression, as in this + * example:

    + *
      + *
    • + *

      + * #P = :val + *

      + *
    • + *
    + * + *

    Tokens that begin with the : character + * are expression attribute values, which are placeholders + * for the actual value at runtime.

    + *
    + *

    For more information about expression attribute names, see Accessing Item Attributes in the Amazon DynamoDB + * Developer Guide.

    + *
  • + *
  • + *

    + * Keys - An array of primary key attribute values that define + * specific items in the table. For each primary key, you must provide + * all of the key attributes. For example, with a simple + * primary key, you only need to provide the partition key value. For a composite + * key, you must provide both the partition key value and the + * sort key value.

    + *
  • + *
  • + *

    + * ProjectionExpression - A string that identifies one or more + * attributes to retrieve from the table. These attributes can include scalars, + * sets, or elements of a JSON document. The attributes in the expression must be + * separated by commas.

    + *

    If no attribute names are specified, then all attributes are returned. If any + * of the requested attributes are not found, they do not appear in the + * result.

    + *

    For more information, see Accessing Item Attributes in the Amazon DynamoDB + * Developer Guide.

    + *
  • + *
  • + *

    + * AttributesToGet - This is a legacy parameter. Use + * ProjectionExpression instead. For more information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

    + *
  • + *
+ * @public + */ + RequestItems: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + *

Represents a condition to be compared with an attribute value. This condition can be + * used with DeleteItem, PutItem, or UpdateItem + * operations; if the comparison evaluates to true, the operation succeeds; if not, the + * operation fails. You can use ExpectedAttributeValue in one of two different + * ways:

+ *
    + *
  • + *

    Use AttributeValueList to specify one or more values to compare + * against an attribute. Use ComparisonOperator to specify how you + * want to perform the comparison. If the comparison evaluates to true, then the + * conditional operation succeeds.

    + *
  • + *
  • + *

    Use Value to specify a value that DynamoDB will compare against + * an attribute. If the values match, then ExpectedAttributeValue + * evaluates to true and the conditional operation succeeds. Optionally, you can + * also set Exists to false, indicating that you do + * not expect to find the attribute value in the table. In this + * case, the conditional operation succeeds only if the comparison evaluates to + * false.

    + *
  • + *
+ *

+ * Value and Exists are incompatible with + * AttributeValueList and ComparisonOperator. Note that if + * you use both sets of parameters at once, DynamoDB will return a + * ValidationException exception.

+ * @public + */ +export interface ExpectedAttributeValue { + /** + *

Represents the data for the expected attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Value?: AttributeValue | undefined; + /** + *

Causes DynamoDB to evaluate the value before attempting a conditional + * operation:

+ *
    + *
  • + *

    If Exists is true, DynamoDB will check to + * see if that attribute value already exists in the table. If it is found, then + * the operation succeeds. If it is not found, the operation fails with a + * ConditionCheckFailedException.

    + *
  • + *
  • + *

    If Exists is false, DynamoDB assumes that + * the attribute value does not exist in the table. If in fact the value does not + * exist, then the assumption is valid and the operation succeeds. If the value is + * found, despite the assumption that it does not exist, the operation fails with a + * ConditionCheckFailedException.

    + *
  • + *
+ *

The default setting for Exists is true. If you supply a + * Value all by itself, DynamoDB assumes the attribute exists: + * You don't have to set Exists to true, because it is + * implied.

+ *

DynamoDB returns a ValidationException if:

+ *
    + *
  • + *

    + * Exists is true but there is no Value to + * check. (You expect a value to exist, but don't specify what that value + * is.)

    + *
  • + *
  • + *

    + * Exists is false but you also provide a + * Value. (You cannot expect an attribute to have a value, while + * also expecting it not to exist.)

    + *
  • + *
+ * @public + */ + Exists?: boolean | undefined; + /** + *

A comparator for evaluating attributes in the AttributeValueList. For + * example, equals, greater than, less than, etc.

+ *

The following comparison operators are available:

+ *

+ * EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | + * BEGINS_WITH | IN | BETWEEN + *

+ *

The following are descriptions of each comparison operator.

+ *
    + *
  • + *

    + * EQ : Equal. EQ is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, Binary, String Set, Number Set, or Binary Set. + * If an item contains an AttributeValue element of a different type + * than the one provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NE : Not equal. NE is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, Binary, String Set, Number Set, or Binary Set. If an + * item contains an AttributeValue of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LE : Less than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LT : Less than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, or Binary (not a set type). If an item contains an + * AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GE : Greater than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GT : Greater than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NOT_NULL : The attribute exists. NOT_NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the existence of an attribute, not its data type. + * If the data type of attribute "a" is null, and you evaluate it + * using NOT_NULL, the result is a Boolean true. This + * result is because the attribute "a" exists; its data type is + * not relevant to the NOT_NULL comparison operator.

    + *
    + *
  • + *
  • + *

    + * NULL : The attribute does not exist. NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the nonexistence of an attribute, not its data + * type. If the data type of attribute "a" is null, and you + * evaluate it using NULL, the result is a Boolean + * false. This is because the attribute "a" + * exists; its data type is not relevant to the NULL comparison + * operator.

    + *
    + *
  • + *
  • + *

    + * CONTAINS : Checks for a subsequence, or value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is of type String, then the operator checks for a + * substring match. If the target attribute of the comparison is of type Binary, + * then the operator looks for a subsequence of the target that matches the input. + * If the target attribute of the comparison is a set ("SS", + * "NS", or "BS"), then the operator evaluates to + * true if it finds an exact match with any member of the set.

    + *

    CONTAINS is supported for lists: When evaluating "a CONTAINS b", + * "a" can be a list; however, "b" cannot be a set, a + * map, or a list.

    + *
  • + *
  • + *

    + * NOT_CONTAINS : Checks for absence of a subsequence, or absence of a + * value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is a String, then the operator checks for the + * absence of a substring match. If the target attribute of the comparison is + * Binary, then the operator checks for the absence of a subsequence of the target + * that matches the input. If the target attribute of the comparison is a set + * ("SS", "NS", or "BS"), then the + * operator evaluates to true if it does not find an exact + * match with any member of the set.

    + *

    NOT_CONTAINS is supported for lists: When evaluating "a NOT CONTAINS + * b", "a" can be a list; however, "b" cannot + * be a set, a map, or a list.

    + *
  • + *
  • + *

    + * BEGINS_WITH : Checks for a prefix.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String or Binary (not a Number or a set type). The target attribute of + * the comparison must be of type String or Binary (not a Number or a set + * type).

    + *

    + *
  • + *
  • + *

    + * IN : Checks for matching elements in a list.

    + *

    + * AttributeValueList can contain one or more + * AttributeValue elements of type String, Number, or Binary. + * These attributes are compared against an existing attribute of an item. If any + * elements of the input are equal to the item attribute, the expression evaluates + * to true.

    + *
  • + *
  • + *

    + * BETWEEN : Greater than or equal to the first value, and less than + * or equal to the second value.

    + *

    + * AttributeValueList must contain two AttributeValue + * elements of the same type, either String, Number, or Binary (not a set type). A + * target attribute matches if the target value is greater than, or equal to, the + * first element and less than, or equal to, the second element. If an item + * contains an AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not compare to \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\} + *

    + *
  • + *
+ * @public + */ + ComparisonOperator?: ComparisonOperator | undefined; + /** + *

One or more values to evaluate against the supplied attribute. The number of values in + * the list depends on the ComparisonOperator being used.

+ *

For type Number, value comparisons are numeric.

+ *

String value comparisons for greater than, equals, or less than are based on ASCII + * character code values. For example, a is greater than A, and + * a is greater than B. For a list of code values, see http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters.

+ *

For Binary, DynamoDB treats each byte of the binary data as unsigned when it + * compares binary values.

+ *

For information on specifying data types in JSON, see JSON Data Format + * in the Amazon DynamoDB Developer Guide.

+ * @public + */ + AttributeValueList?: AttributeValue[] | undefined; +} +/** + * @public + */ +export interface TransactGetItemsInput { + /** + *

An ordered array of up to 100 TransactGetItem objects, each of which + * contains a Get structure.

+ * @public + */ + TransactItems: TransactGetItem[] | undefined; + /** + *

A value of TOTAL causes consumed capacity information to be returned, and + * a value of NONE prevents that information from being returned. No other + * value is valid.

+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface TransactWriteItemsOutput { + /** + *

The capacity units consumed by the entire TransactWriteItems operation. + * The values of the list are ordered according to the ordering of the + * TransactItems request parameter.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; + /** + *

A list of tables that were processed by TransactWriteItems and, for each + * table, information about any item collections that were affected by individual + * UpdateItem, PutItem, or DeleteItem + * operations.

+ * @public + */ + ItemCollectionMetrics?: Record | undefined; +} +/** + *

Represents a request to perform a check that an item exists or to check the condition + * of specific attributes of the item.

+ * @public + */ +export interface ConditionCheck { + /** + *

The primary key of the item to be checked. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

Name of the table for the check item request. You can also provide the Amazon Resource Name (ARN) of + * the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to succeed. For + * more information, see Condition expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. For more + * information, see Expression attribute names in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression. For more information, see + * Condition expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * ConditionCheck condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a DeleteItem operation.

+ * @public + */ +export interface Delete { + /** + *

The primary key of the item to be deleted. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

Name of the table in which the item to be deleted resides. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional delete to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Delete condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a PutItem operation.

+ * @public + */ +export interface Put { + /** + *

A map of attribute name to attribute values, representing the primary key of the item + * to be written by PutItem. All of the table's primary key attributes must be + * specified, and their data types must match those of the table's key schema. If any + * attributes are present in the item that are part of an index key schema for the table, + * their types must match the index key schema.

+ * @public + */ + Item: Record | undefined; + /** + *

Name of the table in which to write the item. You can also provide the Amazon Resource Name (ARN) of + * the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Put condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform an UpdateItem operation.

+ * @public + */ +export interface Update { + /** + *

The primary key of the item to be updated. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

An expression that defines one or more attributes to be updated, the action to be + * performed on them, and new value(s) for them.

+ * @public + */ + UpdateExpression: string | undefined; + /** + *

Name of the table for the UpdateItem request. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Update condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

A PartiQL batch statement response..

+ * @public + */ +export interface BatchStatementResponse { + /** + *

The error associated with a failed PartiQL batch statement.

+ * @public + */ + Error?: BatchStatementError | undefined; + /** + *

The table name associated with a failed PartiQL batch statement.

+ * @public + */ + TableName?: string | undefined; + /** + *

A DynamoDB item associated with a BatchStatementResponse

+ * @public + */ + Item?: Record | undefined; +} +/** + *

Represents the output of a DeleteItem operation.

+ * @public + */ +export interface DeleteItemOutput { + /** + *

A map of attribute names to AttributeValue objects, representing the item + * as it appeared before the DeleteItem operation. This map appears in the + * response only if ReturnValues was specified as ALL_OLD in the + * request.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the DeleteItem operation. The data + * returned includes the total provisioned throughput consumed, along with statistics for + * the table and any indexes involved in the operation. ConsumedCapacity is + * only returned if the ReturnConsumedCapacity parameter was specified. For + * more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * DeleteItem operation. ItemCollectionMetrics is only + * returned if the ReturnItemCollectionMetrics parameter was specified. If the + * table does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + * @public + */ +export interface ExecuteStatementOutput { + /** + *

If a read operation was used, this property will contain the result of the read + * operation; a map of attribute names and their values. For the write operations this + * value will be empty.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

If the response of a read request exceeds the response payload limit DynamoDB will set + * this value in the response. If set, you can use that this value in the subsequent + * request to get the remaining results.

+ * @public + */ + NextToken?: string | undefined; + /** + *

The capacity units consumed by an operation. The data returned includes the total + * provisioned throughput consumed, along with statistics for the table and any indexes + * involved in the operation. ConsumedCapacity is only returned if the request + * asked for it. For more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request. If LastEvaluatedKey is empty, then the "last page" of results has + * been processed and there is no more data to be retrieved. If + * LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; +} +/** + *

Represents the output of a PutItem operation.

+ * @public + */ +export interface PutItemOutput { + /** + *

The attribute values as they appeared before the PutItem operation, but + * only if ReturnValues is specified as ALL_OLD in the request. + * Each element consists of an attribute name and an attribute value.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the PutItem operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unity consumption for write operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * PutItem operation. ItemCollectionMetrics is only returned + * if the ReturnItemCollectionMetrics parameter was specified. If the table + * does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + *

Represents the output of a Query operation.

+ * @public + */ +export interface QueryOutput { + /** + *

An array of item attributes that match the query criteria. Each element in this array + * consists of an attribute name and the value for that attribute.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

The number of items in the response.

+ *

If you used a QueryFilter in the request, then Count is the + * number of items returned after the filter was applied, and ScannedCount is + * the number of matching items before the filter was applied.

+ *

If you did not use a filter in the request, then Count and + * ScannedCount are the same.

+ * @public + */ + Count?: number | undefined; + /** + *

The number of items evaluated, before any QueryFilter is applied. A high + * ScannedCount value with few, or no, Count results + * indicates an inefficient Query operation. For more information, see Count and ScannedCount in the Amazon DynamoDB Developer + * Guide.

+ *

If you did not use a filter in the request, then ScannedCount is the same + * as Count.

+ * @public + */ + ScannedCount?: number | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request.

+ *

If LastEvaluatedKey is empty, then the "last page" of results has been + * processed and there is no more data to be retrieved.

+ *

If LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; + /** + *

The capacity units consumed by the Query operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Represents the output of a Scan operation.

+ * @public + */ +export interface ScanOutput { + /** + *

An array of item attributes that match the scan criteria. Each element in this array + * consists of an attribute name and the value for that attribute.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

The number of items in the response.

+ *

If you set ScanFilter in the request, then Count is the + * number of items returned after the filter was applied, and ScannedCount is + * the number of matching items before the filter was applied.

+ *

If you did not use a filter in the request, then Count is the same as + * ScannedCount.

+ * @public + */ + Count?: number | undefined; + /** + *

The number of items evaluated, before any ScanFilter is applied. A high + * ScannedCount value with few, or no, Count results + * indicates an inefficient Scan operation. For more information, see Count and + * ScannedCount in the Amazon DynamoDB Developer + * Guide.

+ *

If you did not use a filter in the request, then ScannedCount is the same + * as Count.

+ * @public + */ + ScannedCount?: number | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request.

+ *

If LastEvaluatedKey is empty, then the "last page" of results has been + * processed and there is no more data to be retrieved.

+ *

If LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; + /** + *

The capacity units consumed by the Scan operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Represents the output of an UpdateItem operation.

+ * @public + */ +export interface UpdateItemOutput { + /** + *

A map of attribute values as they appear before or after the UpdateItem + * operation, as determined by the ReturnValues parameter.

+ *

The Attributes map is only present if the update was successful and + * ReturnValues was specified as something other than NONE in + * the request. Each element represents one attribute.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the UpdateItem operation. The data + * returned includes the total provisioned throughput consumed, along with statistics for + * the table and any indexes involved in the operation. ConsumedCapacity is + * only returned if the ReturnConsumedCapacity parameter was specified. For + * more information, see Capacity unity consumption for write operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * UpdateItem operation. ItemCollectionMetrics is only + * returned if the ReturnItemCollectionMetrics parameter was specified. If the + * table does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + *

Represents an operation to perform - either DeleteItem or + * PutItem. You can only request one of these operations, not both, in a + * single WriteRequest. If you do need to perform both of these operations, + * you need to provide two separate WriteRequest objects.

+ * @public + */ +export interface WriteRequest { + /** + *

A request to perform a PutItem operation.

+ * @public + */ + PutRequest?: PutRequest | undefined; + /** + *

A request to perform a DeleteItem operation.

+ * @public + */ + DeleteRequest?: DeleteRequest | undefined; +} +/** + * @public + */ +export interface BatchExecuteStatementOutput { + /** + *

The response to each PartiQL statement in the batch. The values of the list are + * ordered according to the ordering of the request statements.

+ * @public + */ + Responses?: BatchStatementResponse[] | undefined; + /** + *

The capacity units consumed by the entire operation. The values of the list are + * ordered according to the ordering of the statements.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the output of a BatchGetItem operation.

+ * @public + */ +export interface BatchGetItemOutput { + /** + *

A map of table name or table ARN to a list of items. Each object in + * Responses consists of a table name or ARN, along with a map of + * attribute data consisting of the data type and attribute value.

+ * @public + */ + Responses?: Record[]> | undefined; + /** + *

A map of tables and their respective keys that were not processed with the current + * response. The UnprocessedKeys value is in the same form as + * RequestItems, so the value can be provided directly to a subsequent + * BatchGetItem operation. For more information, see + * RequestItems in the Request Parameters section.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * Keys - An array of primary key attribute values that define + * specific items in the table.

    + *
  • + *
  • + *

    + * ProjectionExpression - One or more attributes to be retrieved from + * the table or index. By default, all attributes are returned. If a requested + * attribute is not found, it does not appear in the result.

    + *
  • + *
  • + *

    + * ConsistentRead - The consistency of a read operation. If set to + * true, then a strongly consistent read is used; otherwise, an + * eventually consistent read is used.

    + *
  • + *
+ *

If there are no unprocessed keys remaining, the response contains an empty + * UnprocessedKeys map.

+ * @public + */ + UnprocessedKeys?: Record | undefined; + /** + *

The read capacity units consumed by the entire BatchGetItem + * operation.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * TableName - The table that consumed the provisioned + * throughput.

    + *
  • + *
  • + *

    + * CapacityUnits - The total number of capacity units consumed.

    + *
  • + *
+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the input of a Scan operation.

+ * @public + */ +export interface ScanInput { + /** + *

The name of the table containing the requested items or if you provide + * IndexName, the name of the table to which that index belongs.

+ *

You can also provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of a secondary index to scan. This index can be any local secondary index or + * global secondary index. Note that if you use the IndexName parameter, you + * must also provide TableName.

+ * @public + */ + IndexName?: string | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, and a + * key in LastEvaluatedKey to apply in a subsequent operation, so that you can + * pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation. For more information, see Working with Queries in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Limit?: number | undefined; + /** + *

The attributes to be returned in the result. You can retrieve all item attributes, + * specific item attributes, the count of matching items, or in the case of an index, some + * or all of the attributes projected into the index.

+ *
    + *
  • + *

    + * ALL_ATTRIBUTES - Returns all of the item attributes from the + * specified table or index. If you query a local secondary index, then for each + * matching item in the index, DynamoDB fetches the entire item from the parent + * table. If the index is configured to project all item attributes, then all of + * the data can be obtained from the local secondary index, and no fetching is + * required.

    + *
  • + *
  • + *

    + * ALL_PROJECTED_ATTRIBUTES - Allowed only when querying an index. + * Retrieves all attributes that have been projected into the index. If the index + * is configured to project all attributes, this return value is equivalent to + * specifying ALL_ATTRIBUTES.

    + *
  • + *
  • + *

    + * COUNT - Returns the number of matching items, rather than the + * matching items themselves. Note that this uses the same quantity of read + * capacity units as getting the items, and is subject to the same item size + * calculations.

    + *
  • + *
  • + *

    + * SPECIFIC_ATTRIBUTES - Returns only the attributes listed in + * ProjectionExpression. This return value is equivalent to + * specifying ProjectionExpression without specifying any value for + * Select.

    + *

    If you query or scan a local secondary index and request only attributes that + * are projected into that index, the operation reads only the index and not the + * table. If any of the requested attributes are not projected into the local + * secondary index, DynamoDB fetches each of these attributes from the parent + * table. This extra fetching incurs additional throughput cost and latency.

    + *

    If you query or scan a global secondary index, you can only request attributes + * that are projected into the index. Global secondary index queries cannot fetch + * attributes from the parent table.

    + *
  • + *
+ *

If neither Select nor ProjectionExpression are specified, + * DynamoDB defaults to ALL_ATTRIBUTES when accessing a table, and + * ALL_PROJECTED_ATTRIBUTES when accessing an index. You cannot use both + * Select and ProjectionExpression together in a single + * request, unless the value for Select is SPECIFIC_ATTRIBUTES. + * (This usage is equivalent to specifying ProjectionExpression without any + * value for Select.)

+ * + *

If you use the ProjectionExpression parameter, then the value for + * Select can only be SPECIFIC_ATTRIBUTES. Any other + * value for Select will return an error.

+ *
+ * @public + */ + Select?: Select | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ScanFilter in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ScanFilter?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

The primary key of the first item that this operation will evaluate. Use the value + * that was returned for LastEvaluatedKey in the previous operation.

+ *

The data type for ExclusiveStartKey must be String, Number or Binary. No + * set data types are allowed.

+ *

In a parallel scan, a Scan request that includes + * ExclusiveStartKey must specify the same segment whose previous + * Scan returned the corresponding value of + * LastEvaluatedKey.

+ * @public + */ + ExclusiveStartKey?: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

For a parallel Scan request, TotalSegments represents the + * total number of segments into which the Scan operation will be divided. The + * value of TotalSegments corresponds to the number of application workers + * that will perform the parallel scan. For example, if you want to use four application + * threads to scan a table or an index, specify a TotalSegments value of + * 4.

+ *

The value for TotalSegments must be greater than or equal to 1, and less + * than or equal to 1000000. If you specify a TotalSegments value of 1, the + * Scan operation will be sequential rather than parallel.

+ *

If you specify TotalSegments, you must also specify + * Segment.

+ * @public + */ + TotalSegments?: number | undefined; + /** + *

For a parallel Scan request, Segment identifies an + * individual segment to be scanned by an application worker.

+ *

Segment IDs are zero-based, so the first segment is always 0. For example, if you want + * to use four application threads to scan a table or an index, then the first thread + * specifies a Segment value of 0, the second thread specifies 1, and so + * on.

+ *

The value of LastEvaluatedKey returned from a parallel Scan + * request must be used as ExclusiveStartKey with the same segment ID in a + * subsequent Scan operation.

+ *

The value for Segment must be greater than or equal to 0, and less than + * the value provided for TotalSegments.

+ *

If you provide Segment, you must also provide + * TotalSegments.

+ * @public + */ + Segment?: number | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the specified table + * or index. These attributes can include scalars, sets, or elements of a JSON document. + * The attributes in the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

A string that contains conditions that DynamoDB applies after the Scan + * operation, but before the data is returned to you. Items that do not satisfy the + * FilterExpression criteria are not returned.

+ * + *

A FilterExpression is applied after the items have already been read; + * the process of filtering does not consume any additional read capacity units.

+ *
+ *

For more information, see Filter + * Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + FilterExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

A Boolean value that determines the read consistency model during the scan:

+ *
    + *
  • + *

    If ConsistentRead is false, then the data returned + * from Scan might not contain the results from other recently + * completed write operations (PutItem, UpdateItem, or + * DeleteItem).

    + *
  • + *
  • + *

    If ConsistentRead is true, then all of the write + * operations that completed before the Scan began are guaranteed to + * be contained in the Scan response.

    + *
  • + *
+ *

The default setting for ConsistentRead is false.

+ *

The ConsistentRead parameter is not supported on global secondary + * indexes. If you scan a global secondary index with ConsistentRead set to + * true, you will receive a ValidationException.

+ * @public + */ + ConsistentRead?: boolean | undefined; +} +/** + *

Represents the input of a BatchWriteItem operation.

+ * @public + */ +export interface BatchWriteItemInput { + /** + *

A map of one or more table names or table ARNs and, for each table, a list of + * operations to be performed (DeleteRequest or PutRequest). Each + * element in the map consists of the following:

+ *
    + *
  • + *

    + * DeleteRequest - Perform a DeleteItem operation on the + * specified item. The item to be deleted is identified by a Key + * subelement:

    + *
      + *
    • + *

      + * Key - A map of primary key attribute values that uniquely + * identify the item. Each entry in this map consists of an attribute name + * and an attribute value. For each primary key, you must provide + * all of the key attributes. For example, with a + * simple primary key, you only need to provide a value for the partition + * key. For a composite primary key, you must provide values for + * both the partition key and the sort key.

      + *
    • + *
    + *
  • + *
  • + *

    + * PutRequest - Perform a PutItem operation on the + * specified item. The item to be put is identified by an Item + * subelement:

    + *
      + *
    • + *

      + * Item - A map of attributes and their values. Each entry in + * this map consists of an attribute name and an attribute value. Attribute + * values must not be null; string and binary type attributes must have + * lengths greater than zero; and set type attributes must not be empty. + * Requests that contain empty values are rejected with a + * ValidationException exception.

      + *

      If you specify any attributes that are part of an index key, then the + * data types for those attributes must match those of the schema in the + * table's attribute definition.

      + *
    • + *
    + *
  • + *
+ * @public + */ + RequestItems: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; +} +/** + *

Represents the input of a DeleteItem operation.

+ * @public + */ +export interface DeleteItemInput { + /** + *

The name of the table from which to delete the item. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute names to AttributeValue objects, representing the + * primary key of the item to delete.

+ *

For the primary key, you must provide all of the key attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appeared + * before they were deleted. For DeleteItem, the valid values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - The content of the old item is returned.

    + *
  • + *
+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * + *

The ReturnValues parameter is used by several DynamoDB operations; + * however, DeleteItem does not recognize any values other than + * NONE or ALL_OLD.

+ *
+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

A condition that must be satisfied in order for a conditional DeleteItem + * to succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information about condition expressions, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for a DeleteItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents the input of a PutItem operation.

+ * @public + */ +export interface PutItemInput { + /** + *

The name of the table to contain the item. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute name/value pairs, one for each attribute. Only the primary key + * attributes are required; you can optionally provide other attribute name-value pairs for + * the item.

+ *

You must provide all of the attributes for the primary key. For example, with a simple + * primary key, you only need to provide a value for the partition key. For a composite + * primary key, you must provide both values for both the partition key and the sort + * key.

+ *

If you specify any attributes that are part of an index key, then the data types for + * those attributes must match those of the schema in the table's attribute + * definition.

+ *

Empty String and Binary attribute values are allowed. Attribute values of type String + * and Binary must have a length greater than zero if the attribute is used as a key + * attribute for a table or index.

+ *

For more information about primary keys, see Primary Key in the Amazon DynamoDB Developer + * Guide.

+ *

Each element in the Item map is an AttributeValue + * object.

+ * @public + */ + Item: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appeared + * before they were updated with the PutItem request. For + * PutItem, the valid values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - If PutItem overwrote an attribute name-value + * pair, then the content of the old item is returned.

    + *
  • + *
+ *

The values returned are strongly consistent.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * + *

The ReturnValues parameter is used by several DynamoDB operations; + * however, PutItem does not recognize any values other than + * NONE or ALL_OLD.

+ *
+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

A condition that must be satisfied in order for a conditional PutItem + * operation to succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information on condition expressions, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for a PutItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents the input of a Query operation.

+ * @public + */ +export interface QueryInput { + /** + *

The name of the table containing the requested items. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of an index to query. This index can be any local secondary index or global + * secondary index on the table. Note that if you use the IndexName parameter, + * you must also provide TableName. + *

+ * @public + */ + IndexName?: string | undefined; + /** + *

The attributes to be returned in the result. You can retrieve all item attributes, + * specific item attributes, the count of matching items, or in the case of an index, some + * or all of the attributes projected into the index.

+ *
    + *
  • + *

    + * ALL_ATTRIBUTES - Returns all of the item attributes from the + * specified table or index. If you query a local secondary index, then for each + * matching item in the index, DynamoDB fetches the entire item from the parent + * table. If the index is configured to project all item attributes, then all of + * the data can be obtained from the local secondary index, and no fetching is + * required.

    + *
  • + *
  • + *

    + * ALL_PROJECTED_ATTRIBUTES - Allowed only when querying an index. + * Retrieves all attributes that have been projected into the index. If the index + * is configured to project all attributes, this return value is equivalent to + * specifying ALL_ATTRIBUTES.

    + *
  • + *
  • + *

    + * COUNT - Returns the number of matching items, rather than the + * matching items themselves. Note that this uses the same quantity of read + * capacity units as getting the items, and is subject to the same item size + * calculations.

    + *
  • + *
  • + *

    + * SPECIFIC_ATTRIBUTES - Returns only the attributes listed in + * ProjectionExpression. This return value is equivalent to + * specifying ProjectionExpression without specifying any value for + * Select.

    + *

    If you query or scan a local secondary index and request only attributes that + * are projected into that index, the operation will read only the index and not + * the table. If any of the requested attributes are not projected into the local + * secondary index, DynamoDB fetches each of these attributes from the parent + * table. This extra fetching incurs additional throughput cost and latency.

    + *

    If you query or scan a global secondary index, you can only request attributes + * that are projected into the index. Global secondary index queries cannot fetch + * attributes from the parent table.

    + *
  • + *
+ *

If neither Select nor ProjectionExpression are specified, + * DynamoDB defaults to ALL_ATTRIBUTES when accessing a table, and + * ALL_PROJECTED_ATTRIBUTES when accessing an index. You cannot use both + * Select and ProjectionExpression together in a single + * request, unless the value for Select is SPECIFIC_ATTRIBUTES. + * (This usage is equivalent to specifying ProjectionExpression without any + * value for Select.)

+ * + *

If you use the ProjectionExpression parameter, then the value for + * Select can only be SPECIFIC_ATTRIBUTES. Any other + * value for Select will return an error.

+ *
+ * @public + */ + Select?: Select | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, and a + * key in LastEvaluatedKey to apply in a subsequent operation, so that you can + * pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation. For more information, see Query and Scan in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Limit?: number | undefined; + /** + *

Determines the read consistency model: If set to true, then the operation + * uses strongly consistent reads; otherwise, the operation uses eventually consistent + * reads.

+ *

Strongly consistent reads are not supported on global secondary indexes. If you query + * a global secondary index with ConsistentRead set to true, you + * will receive a ValidationException.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

This is a legacy parameter. Use KeyConditionExpression instead. For more + * information, see KeyConditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeyConditions?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see QueryFilter in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + QueryFilter?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Specifies the order for index traversal: If true (default), the traversal + * is performed in ascending order; if false, the traversal is performed in + * descending order.

+ *

Items with the same partition key value are stored in sorted order by sort key. If the + * sort key data type is Number, the results are stored in numeric order. For type String, + * the results are stored in order of UTF-8 bytes. For type Binary, DynamoDB treats each + * byte of the binary data as unsigned.

+ *

If ScanIndexForward is true, DynamoDB returns the results in + * the order in which they are stored (by sort key value). This is the default behavior. If + * ScanIndexForward is false, DynamoDB reads the results in + * reverse order by sort key value, and then returns the results to the client.

+ * @public + */ + ScanIndexForward?: boolean | undefined; + /** + *

The primary key of the first item that this operation will evaluate. Use the value + * that was returned for LastEvaluatedKey in the previous operation.

+ *

The data type for ExclusiveStartKey must be String, Number, or Binary. No + * set data types are allowed.

+ * @public + */ + ExclusiveStartKey?: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

A string that contains conditions that DynamoDB applies after the Query + * operation, but before the data is returned to you. Items that do not satisfy the + * FilterExpression criteria are not returned.

+ *

A FilterExpression does not allow key attributes. You cannot define a + * filter expression based on a partition key or a sort key.

+ * + *

A FilterExpression is applied after the items have already been read; + * the process of filtering does not consume any additional read capacity units.

+ *
+ *

For more information, see Filter + * Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + FilterExpression?: string | undefined; + /** + *

The condition that specifies the key values for items to be retrieved by the + * Query action.

+ *

The condition must perform an equality test on a single partition key value.

+ *

The condition can optionally perform one of several comparison tests on a single sort + * key value. This allows Query to retrieve one item with a given partition + * key value and sort key value, or several items that have the same partition key value + * but different sort key values.

+ *

The partition key equality test is required, and must be specified in the following + * format:

+ *

+ * partitionKeyName + * = + * :partitionkeyval + *

+ *

If you also want to provide a condition for the sort key, it must be combined using + * AND with the condition for the sort key. Following is an example, using + * the = comparison operator for the sort key:

+ *

+ * partitionKeyName + * = + * :partitionkeyval + * AND + * sortKeyName + * = + * :sortkeyval + *

+ *

Valid comparisons for the sort key condition are as follows:

+ *
    + *
  • + *

    + * sortKeyName + * = + * :sortkeyval - true if the sort key value is equal to + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * < + * :sortkeyval - true if the sort key value is less than + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * <= + * :sortkeyval - true if the sort key value is less than or equal to + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * > + * :sortkeyval - true if the sort key value is greater than + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * >= + * :sortkeyval - true if the sort key value is greater than or equal + * to :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * BETWEEN + * :sortkeyval1 + * AND + * :sortkeyval2 - true if the sort key value is greater than or equal + * to :sortkeyval1, and less than or equal to + * :sortkeyval2.

    + *
  • + *
  • + *

    + * begins_with ( + * sortKeyName, :sortkeyval + * ) - true if the sort key value begins with a particular operand. + * (You cannot use this function with a sort key that is of type Number.) Note that + * the function name begins_with is case-sensitive.

    + *
  • + *
+ *

Use the ExpressionAttributeValues parameter to replace tokens such as + * :partitionval and :sortval with actual values at + * runtime.

+ *

You can optionally use the ExpressionAttributeNames parameter to replace + * the names of the partition key and sort key with placeholder tokens. This option might + * be necessary if an attribute name conflicts with a DynamoDB reserved word. For example, + * the following KeyConditionExpression parameter causes an error because + * Size is a reserved word:

+ *
    + *
  • + *

    + * Size = :myval + *

    + *
  • + *
+ *

To work around this, define a placeholder (such a #S) to represent the + * attribute name Size. KeyConditionExpression then is as + * follows:

+ *
    + *
  • + *

    + * #S = :myval + *

    + *
  • + *
+ *

For a list of reserved words, see Reserved Words + * in the Amazon DynamoDB Developer Guide.

+ *

For more information on ExpressionAttributeNames and + * ExpressionAttributeValues, see Using + * Placeholders for Attribute Names and Values in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + KeyConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Specifying Conditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; +} +/** + *

Represents the output of a BatchWriteItem operation.

+ * @public + */ +export interface BatchWriteItemOutput { + /** + *

A map of tables and requests against those tables that were not processed. The + * UnprocessedItems value is in the same form as + * RequestItems, so you can provide this value directly to a subsequent + * BatchWriteItem operation. For more information, see + * RequestItems in the Request Parameters section.

+ *

Each UnprocessedItems entry consists of a table name or table ARN + * and, for that table, a list of operations to perform (DeleteRequest or + * PutRequest).

+ *
    + *
  • + *

    + * DeleteRequest - Perform a DeleteItem operation on the + * specified item. The item to be deleted is identified by a Key + * subelement:

    + *
      + *
    • + *

      + * Key - A map of primary key attribute values that uniquely + * identify the item. Each entry in this map consists of an attribute name + * and an attribute value.

      + *
    • + *
    + *
  • + *
  • + *

    + * PutRequest - Perform a PutItem operation on the + * specified item. The item to be put is identified by an Item + * subelement:

    + *
      + *
    • + *

      + * Item - A map of attributes and their values. Each entry in + * this map consists of an attribute name and an attribute value. Attribute + * values must not be null; string and binary type attributes must have + * lengths greater than zero; and set type attributes must not be empty. + * Requests that contain empty values will be rejected with a + * ValidationException exception.

      + *

      If you specify any attributes that are part of an index key, then the + * data types for those attributes must match those of the schema in the + * table's attribute definition.

      + *
    • + *
    + *
  • + *
+ *

If there are no unprocessed items remaining, the response contains an empty + * UnprocessedItems map.

+ * @public + */ + UnprocessedItems?: Record | undefined; + /** + *

A list of tables that were processed by BatchWriteItem and, for each + * table, information about any item collections that were affected by individual + * DeleteItem or PutItem operations.

+ *

Each entry consists of the following subelements:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, + * expressed in GB. This is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on the table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: Record | undefined; + /** + *

The capacity units consumed by the entire BatchWriteItem + * operation.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * TableName - The table that consumed the provisioned + * throughput.

    + *
  • + *
  • + *

    + * CapacityUnits - The total number of capacity units consumed.

    + *
  • + *
+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the input of an UpdateItem operation.

+ * @public + */ +export interface UpdateItemInput { + /** + *

The name of the table containing the item to update. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The primary key of the item to be updated. Each element consists of an attribute name + * and a value for that attribute.

+ *

For the primary key, you must provide all of the attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use UpdateExpression instead. For more + * information, see AttributeUpdates in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributeUpdates?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appear + * before or after they are successfully updated. For UpdateItem, the valid + * values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - Returns all of the attributes of the item, as they + * appeared before the UpdateItem operation.

    + *
  • + *
  • + *

    + * UPDATED_OLD - Returns only the updated attributes, as they appeared + * before the UpdateItem operation.

    + *
  • + *
  • + *

    + * ALL_NEW - Returns all of the attributes of the item, as they appear + * after the UpdateItem operation.

    + *
  • + *
  • + *

    + * UPDATED_NEW - Returns only the updated attributes, as they appear + * after the UpdateItem operation.

    + *
  • + *
+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ *

The values returned are strongly consistent.

+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

An expression that defines one or more attributes to be updated, the action to be + * performed on them, and new values for them.

+ *

The following action values are available for UpdateExpression.

+ *
    + *
  • + *

    + * SET - Adds one or more attributes and values to an item. If any of + * these attributes already exist, they are replaced by the new values. You can + * also use SET to add or subtract from an attribute that is of type + * Number. For example: SET myNum = myNum + :val + *

    + *

    + * SET supports the following functions:

    + *
      + *
    • + *

      + * if_not_exists (path, operand) - if the item does not + * contain an attribute at the specified path, then + * if_not_exists evaluates to operand; otherwise, it + * evaluates to path. You can use this function to avoid overwriting an + * attribute that may already be present in the item.

      + *
    • + *
    • + *

      + * list_append (operand, operand) - evaluates to a list with a + * new element added to it. You can append the new element to the start or + * the end of the list by reversing the order of the operands.

      + *
    • + *
    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    + * REMOVE - Removes one or more attributes from an item.

    + *
  • + *
  • + *

    + * ADD - Adds the specified value to the item, if the attribute does + * not already exist. If the attribute does exist, then the behavior of + * ADD depends on the data type of the attribute:

    + *
      + *
    • + *

      If the existing attribute is a number, and if Value is + * also a number, then Value is mathematically added to the + * existing attribute. If Value is a negative number, then it + * is subtracted from the existing attribute.

      + * + *

      If you use ADD to increment or decrement a number + * value for an item that doesn't exist before the update, DynamoDB + * uses 0 as the initial value.

      + *

      Similarly, if you use ADD for an existing item to + * increment or decrement an attribute value that doesn't exist before + * the update, DynamoDB uses 0 as the initial value. For + * example, suppose that the item you want to update doesn't have an + * attribute named itemcount, but you decide to + * ADD the number 3 to this attribute + * anyway. DynamoDB will create the itemcount attribute, + * set its initial value to 0, and finally add + * 3 to it. The result will be a new + * itemcount attribute in the item, with a value of + * 3.

      + *
      + *
    • + *
    • + *

      If the existing data type is a set and if Value is also a + * set, then Value is added to the existing set. For example, + * if the attribute value is the set [1,2], and the + * ADD action specified [3], then the final + * attribute value is [1,2,3]. An error occurs if an + * ADD action is specified for a set attribute and the + * attribute type specified does not match the existing set type.

      + *

      Both sets must have the same primitive data type. For example, if the + * existing data type is a set of strings, the Value must also + * be a set of strings.

      + *
    • + *
    + * + *

    The ADD action only supports Number and set data types. In + * addition, ADD can only be used on top-level attributes, not + * nested attributes.

    + *
    + *
  • + *
  • + *

    + * DELETE - Deletes an element from a set.

    + *

    If a set of values is specified, then those values are subtracted from the old + * set. For example, if the attribute value was the set [a,b,c] and + * the DELETE action specifies [a,c], then the final + * attribute value is [b]. Specifying an empty set is an error.

    + * + *

    The DELETE action only supports set data types. In addition, + * DELETE can only be used on top-level attributes, not nested + * attributes.

    + *
    + *
  • + *
+ *

You can have many actions in a single expression, such as the following: SET + * a=:value1, b=:value2 DELETE :value3, :value4, :value5 + *

+ *

For more information on update expressions, see Modifying + * Items and Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + UpdateExpression?: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information about condition expressions, see Specifying Conditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide.) To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information about expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for an UpdateItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

A list of requests that can perform update, put, delete, or check operations on + * multiple items in one or more tables atomically.

+ * @public + */ +export interface TransactWriteItem { + /** + *

A request to perform a check item operation.

+ * @public + */ + ConditionCheck?: ConditionCheck | undefined; + /** + *

A request to perform a PutItem operation.

+ * @public + */ + Put?: Put | undefined; + /** + *

A request to perform a DeleteItem operation.

+ * @public + */ + Delete?: Delete | undefined; + /** + *

A request to perform an UpdateItem operation.

+ * @public + */ + Update?: Update | undefined; +} +/** + * @public + */ +export interface TransactWriteItemsInput { + /** + *

An ordered array of up to 100 TransactWriteItem objects, each of which + * contains a ConditionCheck, Put, Update, or + * Delete object. These can operate on items in different tables, but the + * tables must reside in the same Amazon Web Services account and Region, and no two of them + * can operate on the same item.

+ * @public + */ + TransactItems: TransactWriteItem[] | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections (if any), that were modified + * during the operation and are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

Providing a ClientRequestToken makes the call to + * TransactWriteItems idempotent, meaning that multiple identical calls + * have the same effect as one single call.

+ *

Although multiple identical calls using the same client request token produce the same + * result on the server (no side effects), the responses to the calls might not be the + * same. If the ReturnConsumedCapacity parameter is set, then the initial + * TransactWriteItems call returns the amount of write capacity units + * consumed in making the changes. Subsequent TransactWriteItems calls with + * the same client token return the number of read capacity units consumed in reading the + * item.

+ *

A client request token is valid for 10 minutes after the first request that uses it is + * completed. After 10 minutes, any request with the same client token is treated as a new + * request. Do not resubmit the same request with the same client token for more than 10 + * minutes, or the result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 10-minute idempotency window, DynamoDB returns an + * IdempotentParameterMismatch exception.

+ * @public + */ + ClientRequestToken?: string | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..b27919e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * @public + */ +export interface DynamoDBPaginationConfiguration extends PaginationConfiguration { + client: DynamoDBClient; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts new file mode 100644 index 0000000..2ca65b1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "../commands/ListContributorInsightsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListContributorInsights: (config: DynamoDBPaginationConfiguration, input: ListContributorInsightsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts new file mode 100644 index 0000000..304892a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "../commands/ListExportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListExports: (config: DynamoDBPaginationConfiguration, input: ListExportsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts new file mode 100644 index 0000000..0a2639c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "../commands/ListImportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListImports: (config: DynamoDBPaginationConfiguration, input: ListImportsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts new file mode 100644 index 0000000..38cff29 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "../commands/ListTablesCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListTables: (config: DynamoDBPaginationConfiguration, input: ListTablesCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..d6e9c31 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateQuery: (config: DynamoDBPaginationConfiguration, input: QueryCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..4902f31 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateScan: (config: DynamoDBPaginationConfiguration, input: ScanCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts new file mode 100644 index 0000000..963dc98 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts @@ -0,0 +1,515 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "../commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "../commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "../commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "../commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "../commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "../commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "../commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "../commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "../commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "../commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "../commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "../commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "../commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "../commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "../commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "../commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "../commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "../commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "../commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "../commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "../commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "../commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "../commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "../commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "../commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "../commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "../commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "../commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "../commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "../commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "../commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "../commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "../commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "../commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "../commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "../commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "../commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "../commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "../commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "../commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "../commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "../commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "../commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "../commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "../commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "../commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "../commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "../commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "../commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "../commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "../commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "../commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "../commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "../commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "../commands/UpdateTimeToLiveCommand"; +/** + * serializeAws_json1_0BatchExecuteStatementCommand + */ +export declare const se_BatchExecuteStatementCommand: (input: BatchExecuteStatementCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0BatchGetItemCommand + */ +export declare const se_BatchGetItemCommand: (input: BatchGetItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0BatchWriteItemCommand + */ +export declare const se_BatchWriteItemCommand: (input: BatchWriteItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateBackupCommand + */ +export declare const se_CreateBackupCommand: (input: CreateBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateGlobalTableCommand + */ +export declare const se_CreateGlobalTableCommand: (input: CreateGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateTableCommand + */ +export declare const se_CreateTableCommand: (input: CreateTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteBackupCommand + */ +export declare const se_DeleteBackupCommand: (input: DeleteBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteItemCommand + */ +export declare const se_DeleteItemCommand: (input: DeleteItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteResourcePolicyCommand + */ +export declare const se_DeleteResourcePolicyCommand: (input: DeleteResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteTableCommand + */ +export declare const se_DeleteTableCommand: (input: DeleteTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeBackupCommand + */ +export declare const se_DescribeBackupCommand: (input: DescribeBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeContinuousBackupsCommand + */ +export declare const se_DescribeContinuousBackupsCommand: (input: DescribeContinuousBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeContributorInsightsCommand + */ +export declare const se_DescribeContributorInsightsCommand: (input: DescribeContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeEndpointsCommand + */ +export declare const se_DescribeEndpointsCommand: (input: DescribeEndpointsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeExportCommand + */ +export declare const se_DescribeExportCommand: (input: DescribeExportCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeGlobalTableCommand + */ +export declare const se_DescribeGlobalTableCommand: (input: DescribeGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeGlobalTableSettingsCommand + */ +export declare const se_DescribeGlobalTableSettingsCommand: (input: DescribeGlobalTableSettingsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeImportCommand + */ +export declare const se_DescribeImportCommand: (input: DescribeImportCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeKinesisStreamingDestinationCommand + */ +export declare const se_DescribeKinesisStreamingDestinationCommand: (input: DescribeKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeLimitsCommand + */ +export declare const se_DescribeLimitsCommand: (input: DescribeLimitsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTableCommand + */ +export declare const se_DescribeTableCommand: (input: DescribeTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTableReplicaAutoScalingCommand + */ +export declare const se_DescribeTableReplicaAutoScalingCommand: (input: DescribeTableReplicaAutoScalingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTimeToLiveCommand + */ +export declare const se_DescribeTimeToLiveCommand: (input: DescribeTimeToLiveCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DisableKinesisStreamingDestinationCommand + */ +export declare const se_DisableKinesisStreamingDestinationCommand: (input: DisableKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0EnableKinesisStreamingDestinationCommand + */ +export declare const se_EnableKinesisStreamingDestinationCommand: (input: EnableKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExecuteStatementCommand + */ +export declare const se_ExecuteStatementCommand: (input: ExecuteStatementCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExecuteTransactionCommand + */ +export declare const se_ExecuteTransactionCommand: (input: ExecuteTransactionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExportTableToPointInTimeCommand + */ +export declare const se_ExportTableToPointInTimeCommand: (input: ExportTableToPointInTimeCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0GetItemCommand + */ +export declare const se_GetItemCommand: (input: GetItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0GetResourcePolicyCommand + */ +export declare const se_GetResourcePolicyCommand: (input: GetResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ImportTableCommand + */ +export declare const se_ImportTableCommand: (input: ImportTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListBackupsCommand + */ +export declare const se_ListBackupsCommand: (input: ListBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListContributorInsightsCommand + */ +export declare const se_ListContributorInsightsCommand: (input: ListContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListExportsCommand + */ +export declare const se_ListExportsCommand: (input: ListExportsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListGlobalTablesCommand + */ +export declare const se_ListGlobalTablesCommand: (input: ListGlobalTablesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListImportsCommand + */ +export declare const se_ListImportsCommand: (input: ListImportsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListTablesCommand + */ +export declare const se_ListTablesCommand: (input: ListTablesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListTagsOfResourceCommand + */ +export declare const se_ListTagsOfResourceCommand: (input: ListTagsOfResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0PutItemCommand + */ +export declare const se_PutItemCommand: (input: PutItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0PutResourcePolicyCommand + */ +export declare const se_PutResourcePolicyCommand: (input: PutResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0QueryCommand + */ +export declare const se_QueryCommand: (input: QueryCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0RestoreTableFromBackupCommand + */ +export declare const se_RestoreTableFromBackupCommand: (input: RestoreTableFromBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0RestoreTableToPointInTimeCommand + */ +export declare const se_RestoreTableToPointInTimeCommand: (input: RestoreTableToPointInTimeCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ScanCommand + */ +export declare const se_ScanCommand: (input: ScanCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TagResourceCommand + */ +export declare const se_TagResourceCommand: (input: TagResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TransactGetItemsCommand + */ +export declare const se_TransactGetItemsCommand: (input: TransactGetItemsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TransactWriteItemsCommand + */ +export declare const se_TransactWriteItemsCommand: (input: TransactWriteItemsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UntagResourceCommand + */ +export declare const se_UntagResourceCommand: (input: UntagResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateContinuousBackupsCommand + */ +export declare const se_UpdateContinuousBackupsCommand: (input: UpdateContinuousBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateContributorInsightsCommand + */ +export declare const se_UpdateContributorInsightsCommand: (input: UpdateContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateGlobalTableCommand + */ +export declare const se_UpdateGlobalTableCommand: (input: UpdateGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateGlobalTableSettingsCommand + */ +export declare const se_UpdateGlobalTableSettingsCommand: (input: UpdateGlobalTableSettingsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateItemCommand + */ +export declare const se_UpdateItemCommand: (input: UpdateItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateKinesisStreamingDestinationCommand + */ +export declare const se_UpdateKinesisStreamingDestinationCommand: (input: UpdateKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTableCommand + */ +export declare const se_UpdateTableCommand: (input: UpdateTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTableReplicaAutoScalingCommand + */ +export declare const se_UpdateTableReplicaAutoScalingCommand: (input: UpdateTableReplicaAutoScalingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTimeToLiveCommand + */ +export declare const se_UpdateTimeToLiveCommand: (input: UpdateTimeToLiveCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_json1_0BatchExecuteStatementCommand + */ +export declare const de_BatchExecuteStatementCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0BatchGetItemCommand + */ +export declare const de_BatchGetItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0BatchWriteItemCommand + */ +export declare const de_BatchWriteItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateBackupCommand + */ +export declare const de_CreateBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateGlobalTableCommand + */ +export declare const de_CreateGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateTableCommand + */ +export declare const de_CreateTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteBackupCommand + */ +export declare const de_DeleteBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteItemCommand + */ +export declare const de_DeleteItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteResourcePolicyCommand + */ +export declare const de_DeleteResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteTableCommand + */ +export declare const de_DeleteTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeBackupCommand + */ +export declare const de_DescribeBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeContinuousBackupsCommand + */ +export declare const de_DescribeContinuousBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeContributorInsightsCommand + */ +export declare const de_DescribeContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeEndpointsCommand + */ +export declare const de_DescribeEndpointsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeExportCommand + */ +export declare const de_DescribeExportCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeGlobalTableCommand + */ +export declare const de_DescribeGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeGlobalTableSettingsCommand + */ +export declare const de_DescribeGlobalTableSettingsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeImportCommand + */ +export declare const de_DescribeImportCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeKinesisStreamingDestinationCommand + */ +export declare const de_DescribeKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeLimitsCommand + */ +export declare const de_DescribeLimitsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTableCommand + */ +export declare const de_DescribeTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTableReplicaAutoScalingCommand + */ +export declare const de_DescribeTableReplicaAutoScalingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTimeToLiveCommand + */ +export declare const de_DescribeTimeToLiveCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DisableKinesisStreamingDestinationCommand + */ +export declare const de_DisableKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0EnableKinesisStreamingDestinationCommand + */ +export declare const de_EnableKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExecuteStatementCommand + */ +export declare const de_ExecuteStatementCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExecuteTransactionCommand + */ +export declare const de_ExecuteTransactionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExportTableToPointInTimeCommand + */ +export declare const de_ExportTableToPointInTimeCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0GetItemCommand + */ +export declare const de_GetItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0GetResourcePolicyCommand + */ +export declare const de_GetResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ImportTableCommand + */ +export declare const de_ImportTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListBackupsCommand + */ +export declare const de_ListBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListContributorInsightsCommand + */ +export declare const de_ListContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListExportsCommand + */ +export declare const de_ListExportsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListGlobalTablesCommand + */ +export declare const de_ListGlobalTablesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListImportsCommand + */ +export declare const de_ListImportsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListTablesCommand + */ +export declare const de_ListTablesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListTagsOfResourceCommand + */ +export declare const de_ListTagsOfResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0PutItemCommand + */ +export declare const de_PutItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0PutResourcePolicyCommand + */ +export declare const de_PutResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0QueryCommand + */ +export declare const de_QueryCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0RestoreTableFromBackupCommand + */ +export declare const de_RestoreTableFromBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0RestoreTableToPointInTimeCommand + */ +export declare const de_RestoreTableToPointInTimeCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ScanCommand + */ +export declare const de_ScanCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TagResourceCommand + */ +export declare const de_TagResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TransactGetItemsCommand + */ +export declare const de_TransactGetItemsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TransactWriteItemsCommand + */ +export declare const de_TransactWriteItemsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UntagResourceCommand + */ +export declare const de_UntagResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateContinuousBackupsCommand + */ +export declare const de_UpdateContinuousBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateContributorInsightsCommand + */ +export declare const de_UpdateContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateGlobalTableCommand + */ +export declare const de_UpdateGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateGlobalTableSettingsCommand + */ +export declare const de_UpdateGlobalTableSettingsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateItemCommand + */ +export declare const de_UpdateItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateKinesisStreamingDestinationCommand + */ +export declare const de_UpdateKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTableCommand + */ +export declare const de_UpdateTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTableReplicaAutoScalingCommand + */ +export declare const de_UpdateTableReplicaAutoScalingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTimeToLiveCommand + */ +export declare const de_UpdateTimeToLiveCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..e8b4a74 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts @@ -0,0 +1,55 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + accountIdEndpointMode: "disabled" | "preferred" | "required" | (() => Promise); + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts new file mode 100644 index 0000000..01479fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts @@ -0,0 +1,55 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + accountIdEndpointMode: "disabled" | "preferred" | "required" | import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((init?: import("@aws-sdk/credential-provider-node").DefaultProviderInit | undefined) => import("@smithy/types").MemoizedProvider); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts new file mode 100644 index 0000000..0288659 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts @@ -0,0 +1,54 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + accountIdEndpointMode: "disabled" | "preferred" | "required" | (() => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..36f4e1e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts @@ -0,0 +1,21 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts new file mode 100644 index 0000000..ac1a4bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: DynamoDBExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts new file mode 100644 index 0000000..cf606cb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts @@ -0,0 +1,1000 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "./commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "./commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "./commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "./commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "./commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "./commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "./commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "./commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "./commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "./commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "./commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "./commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "./commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "./commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "./commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "./commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "./commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "./commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "./commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "./commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "./commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "./commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "./commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "./commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "./commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "./commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "./commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "./commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "./commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "./commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "./commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "./commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "./commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "./commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "./commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "./commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "./commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "./commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "./commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "./commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "./commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "./commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "./commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "./commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "./commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +export interface DynamoDB { + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchGetItem( + args: BatchGetItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchGetItem( + args: BatchGetItemCommandInput, + cb: (err: any, data?: BatchGetItemCommandOutput) => void + ): void; + batchGetItem( + args: BatchGetItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchGetItemCommandOutput) => void + ): void; + batchWriteItem( + args: BatchWriteItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchWriteItem( + args: BatchWriteItemCommandInput, + cb: (err: any, data?: BatchWriteItemCommandOutput) => void + ): void; + batchWriteItem( + args: BatchWriteItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchWriteItemCommandOutput) => void + ): void; + createBackup( + args: CreateBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createBackup( + args: CreateBackupCommandInput, + cb: (err: any, data?: CreateBackupCommandOutput) => void + ): void; + createBackup( + args: CreateBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateBackupCommandOutput) => void + ): void; + createGlobalTable( + args: CreateGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createGlobalTable( + args: CreateGlobalTableCommandInput, + cb: (err: any, data?: CreateGlobalTableCommandOutput) => void + ): void; + createGlobalTable( + args: CreateGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateGlobalTableCommandOutput) => void + ): void; + createTable( + args: CreateTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createTable( + args: CreateTableCommandInput, + cb: (err: any, data?: CreateTableCommandOutput) => void + ): void; + createTable( + args: CreateTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateTableCommandOutput) => void + ): void; + deleteBackup( + args: DeleteBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBackup( + args: DeleteBackupCommandInput, + cb: (err: any, data?: DeleteBackupCommandOutput) => void + ): void; + deleteBackup( + args: DeleteBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBackupCommandOutput) => void + ): void; + deleteItem( + args: DeleteItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteItem( + args: DeleteItemCommandInput, + cb: (err: any, data?: DeleteItemCommandOutput) => void + ): void; + deleteItem( + args: DeleteItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteItemCommandOutput) => void + ): void; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void + ): void; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void + ): void; + deleteTable( + args: DeleteTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteTable( + args: DeleteTableCommandInput, + cb: (err: any, data?: DeleteTableCommandOutput) => void + ): void; + deleteTable( + args: DeleteTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteTableCommandOutput) => void + ): void; + describeBackup( + args: DescribeBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeBackup( + args: DescribeBackupCommandInput, + cb: (err: any, data?: DescribeBackupCommandOutput) => void + ): void; + describeBackup( + args: DescribeBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeBackupCommandOutput) => void + ): void; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void + ): void; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void + ): void; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void + ): void; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void + ): void; + describeEndpoints(): Promise; + describeEndpoints( + args: DescribeEndpointsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeEndpoints( + args: DescribeEndpointsCommandInput, + cb: (err: any, data?: DescribeEndpointsCommandOutput) => void + ): void; + describeEndpoints( + args: DescribeEndpointsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeEndpointsCommandOutput) => void + ): void; + describeExport( + args: DescribeExportCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeExport( + args: DescribeExportCommandInput, + cb: (err: any, data?: DescribeExportCommandOutput) => void + ): void; + describeExport( + args: DescribeExportCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeExportCommandOutput) => void + ): void; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void + ): void; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void + ): void; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void + ): void; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void + ): void; + describeImport( + args: DescribeImportCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeImport( + args: DescribeImportCommandInput, + cb: (err: any, data?: DescribeImportCommandOutput) => void + ): void; + describeImport( + args: DescribeImportCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeImportCommandOutput) => void + ): void; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: DescribeKinesisStreamingDestinationCommandOutput + ) => void + ): void; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DescribeKinesisStreamingDestinationCommandOutput + ) => void + ): void; + describeLimits(): Promise; + describeLimits( + args: DescribeLimitsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeLimits( + args: DescribeLimitsCommandInput, + cb: (err: any, data?: DescribeLimitsCommandOutput) => void + ): void; + describeLimits( + args: DescribeLimitsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeLimitsCommandOutput) => void + ): void; + describeTable( + args: DescribeTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTable( + args: DescribeTableCommandInput, + cb: (err: any, data?: DescribeTableCommandOutput) => void + ): void; + describeTable( + args: DescribeTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTableCommandOutput) => void + ): void; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void + ): void; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void + ): void; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void + ): void; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void + ): void; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: DisableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DisableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: EnableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: EnableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeStatement( + args: ExecuteStatementCommandInput, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeTransaction( + args: ExecuteTransactionCommandInput, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + options?: __HttpHandlerOptions + ): Promise; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void + ): void; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void + ): void; + getItem( + args: GetItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getItem( + args: GetItemCommandInput, + cb: (err: any, data?: GetItemCommandOutput) => void + ): void; + getItem( + args: GetItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetItemCommandOutput) => void + ): void; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + cb: (err: any, data?: GetResourcePolicyCommandOutput) => void + ): void; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetResourcePolicyCommandOutput) => void + ): void; + importTable( + args: ImportTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + importTable( + args: ImportTableCommandInput, + cb: (err: any, data?: ImportTableCommandOutput) => void + ): void; + importTable( + args: ImportTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ImportTableCommandOutput) => void + ): void; + listBackups(): Promise; + listBackups( + args: ListBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listBackups( + args: ListBackupsCommandInput, + cb: (err: any, data?: ListBackupsCommandOutput) => void + ): void; + listBackups( + args: ListBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListBackupsCommandOutput) => void + ): void; + listContributorInsights(): Promise; + listContributorInsights( + args: ListContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listContributorInsights( + args: ListContributorInsightsCommandInput, + cb: (err: any, data?: ListContributorInsightsCommandOutput) => void + ): void; + listContributorInsights( + args: ListContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListContributorInsightsCommandOutput) => void + ): void; + listExports(): Promise; + listExports( + args: ListExportsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listExports( + args: ListExportsCommandInput, + cb: (err: any, data?: ListExportsCommandOutput) => void + ): void; + listExports( + args: ListExportsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListExportsCommandOutput) => void + ): void; + listGlobalTables(): Promise; + listGlobalTables( + args: ListGlobalTablesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listGlobalTables( + args: ListGlobalTablesCommandInput, + cb: (err: any, data?: ListGlobalTablesCommandOutput) => void + ): void; + listGlobalTables( + args: ListGlobalTablesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListGlobalTablesCommandOutput) => void + ): void; + listImports(): Promise; + listImports( + args: ListImportsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listImports( + args: ListImportsCommandInput, + cb: (err: any, data?: ListImportsCommandOutput) => void + ): void; + listImports( + args: ListImportsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListImportsCommandOutput) => void + ): void; + listTables(): Promise; + listTables( + args: ListTablesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listTables( + args: ListTablesCommandInput, + cb: (err: any, data?: ListTablesCommandOutput) => void + ): void; + listTables( + args: ListTablesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTablesCommandOutput) => void + ): void; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void + ): void; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void + ): void; + putItem( + args: PutItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putItem( + args: PutItemCommandInput, + cb: (err: any, data?: PutItemCommandOutput) => void + ): void; + putItem( + args: PutItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutItemCommandOutput) => void + ): void; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + cb: (err: any, data?: PutResourcePolicyCommandOutput) => void + ): void; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutResourcePolicyCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options?: __HttpHandlerOptions + ): Promise; + query( + args: QueryCommandInput, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void + ): void; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void + ): void; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + options?: __HttpHandlerOptions + ): Promise; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void + ): void; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options?: __HttpHandlerOptions + ): Promise; + scan( + args: ScanCommandInput, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + tagResource( + args: TagResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + tagResource( + args: TagResourceCommandInput, + cb: (err: any, data?: TagResourceCommandOutput) => void + ): void; + tagResource( + args: TagResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TagResourceCommandOutput) => void + ): void; + transactGetItems( + args: TransactGetItemsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactGetItems( + args: TransactGetItemsCommandInput, + cb: (err: any, data?: TransactGetItemsCommandOutput) => void + ): void; + transactGetItems( + args: TransactGetItemsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactGetItemsCommandOutput) => void + ): void; + transactWriteItems( + args: TransactWriteItemsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactWriteItems( + args: TransactWriteItemsCommandInput, + cb: (err: any, data?: TransactWriteItemsCommandOutput) => void + ): void; + transactWriteItems( + args: TransactWriteItemsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactWriteItemsCommandOutput) => void + ): void; + untagResource( + args: UntagResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + untagResource( + args: UntagResourceCommandInput, + cb: (err: any, data?: UntagResourceCommandOutput) => void + ): void; + untagResource( + args: UntagResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UntagResourceCommandOutput) => void + ): void; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void + ): void; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void + ): void; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void + ): void; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void + ): void; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void + ): void; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void + ): void; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void + ): void; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void + ): void; + updateItem( + args: UpdateItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateItem( + args: UpdateItemCommandInput, + cb: (err: any, data?: UpdateItemCommandOutput) => void + ): void; + updateItem( + args: UpdateItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateItemCommandOutput) => void + ): void; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: UpdateKinesisStreamingDestinationCommandOutput + ) => void + ): void; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: UpdateKinesisStreamingDestinationCommandOutput + ) => void + ): void; + updateTable( + args: UpdateTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTable( + args: UpdateTableCommandInput, + cb: (err: any, data?: UpdateTableCommandOutput) => void + ): void; + updateTable( + args: UpdateTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTableCommandOutput) => void + ): void; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void + ): void; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void + ): void; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void + ): void; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void + ): void; +} +export declare class DynamoDB extends DynamoDBClient implements DynamoDB {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts new file mode 100644 index 0000000..87aca7c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts @@ -0,0 +1,472 @@ +import { + AccountIdEndpointMode, + AccountIdEndpointModeInputConfig, + AccountIdEndpointModeResolvedConfig, +} from "@aws-sdk/core/account-id-endpoint"; +import { + EndpointDiscoveryInputConfig, + EndpointDiscoveryResolvedConfig, +} from "@aws-sdk/middleware-endpoint-discovery"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + AwsCredentialIdentityProvider, + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "./commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "./commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "./commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "./commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "./commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "./commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "./commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "./commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "./commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "./commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "./commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "./commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "./commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "./commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "./commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "./commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "./commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "./commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "./commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "./commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "./commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "./commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "./commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "./commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "./commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "./commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "./commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "./commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "./commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "./commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "./commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "./commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "./commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "./commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "./commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "./commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "./commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "./commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "./commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "./commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "./commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "./commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "./commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "./commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "./commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "./commands/UpdateTimeToLiveCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | BatchExecuteStatementCommandInput + | BatchGetItemCommandInput + | BatchWriteItemCommandInput + | CreateBackupCommandInput + | CreateGlobalTableCommandInput + | CreateTableCommandInput + | DeleteBackupCommandInput + | DeleteItemCommandInput + | DeleteResourcePolicyCommandInput + | DeleteTableCommandInput + | DescribeBackupCommandInput + | DescribeContinuousBackupsCommandInput + | DescribeContributorInsightsCommandInput + | DescribeEndpointsCommandInput + | DescribeExportCommandInput + | DescribeGlobalTableCommandInput + | DescribeGlobalTableSettingsCommandInput + | DescribeImportCommandInput + | DescribeKinesisStreamingDestinationCommandInput + | DescribeLimitsCommandInput + | DescribeTableCommandInput + | DescribeTableReplicaAutoScalingCommandInput + | DescribeTimeToLiveCommandInput + | DisableKinesisStreamingDestinationCommandInput + | EnableKinesisStreamingDestinationCommandInput + | ExecuteStatementCommandInput + | ExecuteTransactionCommandInput + | ExportTableToPointInTimeCommandInput + | GetItemCommandInput + | GetResourcePolicyCommandInput + | ImportTableCommandInput + | ListBackupsCommandInput + | ListContributorInsightsCommandInput + | ListExportsCommandInput + | ListGlobalTablesCommandInput + | ListImportsCommandInput + | ListTablesCommandInput + | ListTagsOfResourceCommandInput + | PutItemCommandInput + | PutResourcePolicyCommandInput + | QueryCommandInput + | RestoreTableFromBackupCommandInput + | RestoreTableToPointInTimeCommandInput + | ScanCommandInput + | TagResourceCommandInput + | TransactGetItemsCommandInput + | TransactWriteItemsCommandInput + | UntagResourceCommandInput + | UpdateContinuousBackupsCommandInput + | UpdateContributorInsightsCommandInput + | UpdateGlobalTableCommandInput + | UpdateGlobalTableSettingsCommandInput + | UpdateItemCommandInput + | UpdateKinesisStreamingDestinationCommandInput + | UpdateTableCommandInput + | UpdateTableReplicaAutoScalingCommandInput + | UpdateTimeToLiveCommandInput; +export type ServiceOutputTypes = + | BatchExecuteStatementCommandOutput + | BatchGetItemCommandOutput + | BatchWriteItemCommandOutput + | CreateBackupCommandOutput + | CreateGlobalTableCommandOutput + | CreateTableCommandOutput + | DeleteBackupCommandOutput + | DeleteItemCommandOutput + | DeleteResourcePolicyCommandOutput + | DeleteTableCommandOutput + | DescribeBackupCommandOutput + | DescribeContinuousBackupsCommandOutput + | DescribeContributorInsightsCommandOutput + | DescribeEndpointsCommandOutput + | DescribeExportCommandOutput + | DescribeGlobalTableCommandOutput + | DescribeGlobalTableSettingsCommandOutput + | DescribeImportCommandOutput + | DescribeKinesisStreamingDestinationCommandOutput + | DescribeLimitsCommandOutput + | DescribeTableCommandOutput + | DescribeTableReplicaAutoScalingCommandOutput + | DescribeTimeToLiveCommandOutput + | DisableKinesisStreamingDestinationCommandOutput + | EnableKinesisStreamingDestinationCommandOutput + | ExecuteStatementCommandOutput + | ExecuteTransactionCommandOutput + | ExportTableToPointInTimeCommandOutput + | GetItemCommandOutput + | GetResourcePolicyCommandOutput + | ImportTableCommandOutput + | ListBackupsCommandOutput + | ListContributorInsightsCommandOutput + | ListExportsCommandOutput + | ListGlobalTablesCommandOutput + | ListImportsCommandOutput + | ListTablesCommandOutput + | ListTagsOfResourceCommandOutput + | PutItemCommandOutput + | PutResourcePolicyCommandOutput + | QueryCommandOutput + | RestoreTableFromBackupCommandOutput + | RestoreTableToPointInTimeCommandOutput + | ScanCommandOutput + | TagResourceCommandOutput + | TransactGetItemsCommandOutput + | TransactWriteItemsCommandOutput + | UntagResourceCommandOutput + | UpdateContinuousBackupsCommandOutput + | UpdateContributorInsightsCommandOutput + | UpdateGlobalTableCommandOutput + | UpdateGlobalTableSettingsCommandOutput + | UpdateItemCommandOutput + | UpdateKinesisStreamingDestinationCommandOutput + | UpdateTableCommandOutput + | UpdateTableReplicaAutoScalingCommandOutput + | UpdateTimeToLiveCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + accountIdEndpointMode?: + | AccountIdEndpointMode + | __Provider; + defaultUserAgentProvider?: Provider<__UserAgent>; + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; + endpointDiscoveryEnabledProvider?: __Provider; +} +export type DynamoDBClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + AccountIdEndpointModeInputConfig & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + EndpointDiscoveryInputConfig & + ClientInputEndpointParameters; +export interface DynamoDBClientConfig extends DynamoDBClientConfigType {} +export type DynamoDBClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + AccountIdEndpointModeResolvedConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + EndpointDiscoveryResolvedConfig & + ClientResolvedEndpointParameters; +export interface DynamoDBClientResolvedConfig + extends DynamoDBClientResolvedConfigType {} +export declare class DynamoDBClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + DynamoDBClientResolvedConfig +> { + readonly config: DynamoDBClientResolvedConfig; + constructor( + ...[configuration]: __CheckOptionalClientConfig + ); + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..236dccc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { DynamoDBHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): DynamoDBHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..299733c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,47 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { DynamoDBClientResolvedConfig } from "../DynamoDBClient"; +export interface DynamoDBHttpAuthSchemeParameters + extends HttpAuthSchemeParameters { + region?: string; +} +export interface DynamoDBHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + DynamoDBClientResolvedConfig, + HandlerExecutionContext, + DynamoDBHttpAuthSchemeParameters, + object + > {} +export declare const defaultDynamoDBHttpAuthSchemeParametersProvider: ( + config: DynamoDBClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface DynamoDBHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultDynamoDBHttpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: DynamoDBHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..f7c8eb9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + BatchExecuteStatementInput, + BatchExecuteStatementOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchExecuteStatementCommandInput + extends BatchExecuteStatementInput {} +export interface BatchExecuteStatementCommandOutput + extends BatchExecuteStatementOutput, + __MetadataBearer {} +declare const BatchExecuteStatementCommand_base: { + new ( + input: BatchExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchExecuteStatementCommand extends BatchExecuteStatementCommand_base { + protected static __types: { + api: { + input: BatchExecuteStatementInput; + output: BatchExecuteStatementOutput; + }; + sdk: { + input: BatchExecuteStatementCommandInput; + output: BatchExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts new file mode 100644 index 0000000..7e11a34 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { BatchGetItemInput, BatchGetItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchGetItemCommandInput extends BatchGetItemInput {} +export interface BatchGetItemCommandOutput + extends BatchGetItemOutput, + __MetadataBearer {} +declare const BatchGetItemCommand_base: { + new ( + input: BatchGetItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchGetItemCommandInput, + BatchGetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchGetItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchGetItemCommandInput, + BatchGetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchGetItemCommand extends BatchGetItemCommand_base { + protected static __types: { + api: { + input: BatchGetItemInput; + output: BatchGetItemOutput; + }; + sdk: { + input: BatchGetItemCommandInput; + output: BatchGetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts new file mode 100644 index 0000000..0542d2f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { BatchWriteItemInput, BatchWriteItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchWriteItemCommandInput extends BatchWriteItemInput {} +export interface BatchWriteItemCommandOutput + extends BatchWriteItemOutput, + __MetadataBearer {} +declare const BatchWriteItemCommand_base: { + new ( + input: BatchWriteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchWriteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchWriteItemCommand extends BatchWriteItemCommand_base { + protected static __types: { + api: { + input: BatchWriteItemInput; + output: BatchWriteItemOutput; + }; + sdk: { + input: BatchWriteItemCommandInput; + output: BatchWriteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts new file mode 100644 index 0000000..6692e00 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { CreateBackupInput, CreateBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateBackupCommandInput extends CreateBackupInput {} +export interface CreateBackupCommandOutput + extends CreateBackupOutput, + __MetadataBearer {} +declare const CreateBackupCommand_base: { + new ( + input: CreateBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBackupCommandInput, + CreateBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBackupCommandInput, + CreateBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateBackupCommand extends CreateBackupCommand_base { + protected static __types: { + api: { + input: CreateBackupInput; + output: CreateBackupOutput; + }; + sdk: { + input: CreateBackupCommandInput; + output: CreateBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts new file mode 100644 index 0000000..65564e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + CreateGlobalTableInput, + CreateGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateGlobalTableCommandInput extends CreateGlobalTableInput {} +export interface CreateGlobalTableCommandOutput + extends CreateGlobalTableOutput, + __MetadataBearer {} +declare const CreateGlobalTableCommand_base: { + new ( + input: CreateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateGlobalTableCommand extends CreateGlobalTableCommand_base { + protected static __types: { + api: { + input: CreateGlobalTableInput; + output: CreateGlobalTableOutput; + }; + sdk: { + input: CreateGlobalTableCommandInput; + output: CreateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts new file mode 100644 index 0000000..5761cdb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { CreateTableInput, CreateTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateTableCommandInput extends CreateTableInput {} +export interface CreateTableCommandOutput + extends CreateTableOutput, + __MetadataBearer {} +declare const CreateTableCommand_base: { + new ( + input: CreateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTableCommandInput, + CreateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTableCommandInput, + CreateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateTableCommand extends CreateTableCommand_base { + protected static __types: { + api: { + input: CreateTableInput; + output: CreateTableOutput; + }; + sdk: { + input: CreateTableCommandInput; + output: CreateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts new file mode 100644 index 0000000..0b19c93 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteBackupInput, DeleteBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBackupCommandInput extends DeleteBackupInput {} +export interface DeleteBackupCommandOutput + extends DeleteBackupOutput, + __MetadataBearer {} +declare const DeleteBackupCommand_base: { + new ( + input: DeleteBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBackupCommandInput, + DeleteBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBackupCommandInput, + DeleteBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBackupCommand extends DeleteBackupCommand_base { + protected static __types: { + api: { + input: DeleteBackupInput; + output: DeleteBackupOutput; + }; + sdk: { + input: DeleteBackupCommandInput; + output: DeleteBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts new file mode 100644 index 0000000..de7976c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteItemInput, DeleteItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteItemCommandInput extends DeleteItemInput {} +export interface DeleteItemCommandOutput + extends DeleteItemOutput, + __MetadataBearer {} +declare const DeleteItemCommand_base: { + new ( + input: DeleteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteItemCommandInput, + DeleteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteItemCommandInput, + DeleteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteItemCommand extends DeleteItemCommand_base { + protected static __types: { + api: { + input: DeleteItemInput; + output: DeleteItemOutput; + }; + sdk: { + input: DeleteItemCommandInput; + output: DeleteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts new file mode 100644 index 0000000..4aad4b7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DeleteResourcePolicyInput, + DeleteResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteResourcePolicyCommandInput + extends DeleteResourcePolicyInput {} +export interface DeleteResourcePolicyCommandOutput + extends DeleteResourcePolicyOutput, + __MetadataBearer {} +declare const DeleteResourcePolicyCommand_base: { + new ( + input: DeleteResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteResourcePolicyCommand extends DeleteResourcePolicyCommand_base { + protected static __types: { + api: { + input: DeleteResourcePolicyInput; + output: DeleteResourcePolicyOutput; + }; + sdk: { + input: DeleteResourcePolicyCommandInput; + output: DeleteResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts new file mode 100644 index 0000000..5dc9c2b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteTableInput, DeleteTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteTableCommandInput extends DeleteTableInput {} +export interface DeleteTableCommandOutput + extends DeleteTableOutput, + __MetadataBearer {} +declare const DeleteTableCommand_base: { + new ( + input: DeleteTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteTableCommandInput, + DeleteTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteTableCommandInput, + DeleteTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteTableCommand extends DeleteTableCommand_base { + protected static __types: { + api: { + input: DeleteTableInput; + output: DeleteTableOutput; + }; + sdk: { + input: DeleteTableCommandInput; + output: DeleteTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts new file mode 100644 index 0000000..e8a3f6f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeBackupInput, DescribeBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeBackupCommandInput extends DescribeBackupInput {} +export interface DescribeBackupCommandOutput + extends DescribeBackupOutput, + __MetadataBearer {} +declare const DescribeBackupCommand_base: { + new ( + input: DescribeBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeBackupCommandInput, + DescribeBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeBackupCommandInput, + DescribeBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeBackupCommand extends DescribeBackupCommand_base { + protected static __types: { + api: { + input: DescribeBackupInput; + output: DescribeBackupOutput; + }; + sdk: { + input: DescribeBackupCommandInput; + output: DescribeBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..4bcc737 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeContinuousBackupsInput, + DescribeContinuousBackupsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeContinuousBackupsCommandInput + extends DescribeContinuousBackupsInput {} +export interface DescribeContinuousBackupsCommandOutput + extends DescribeContinuousBackupsOutput, + __MetadataBearer {} +declare const DescribeContinuousBackupsCommand_base: { + new ( + input: DescribeContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeContinuousBackupsCommand extends DescribeContinuousBackupsCommand_base { + protected static __types: { + api: { + input: DescribeContinuousBackupsInput; + output: DescribeContinuousBackupsOutput; + }; + sdk: { + input: DescribeContinuousBackupsCommandInput; + output: DescribeContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts new file mode 100644 index 0000000..09a11d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeContributorInsightsInput, + DescribeContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeContributorInsightsCommandInput + extends DescribeContributorInsightsInput {} +export interface DescribeContributorInsightsCommandOutput + extends DescribeContributorInsightsOutput, + __MetadataBearer {} +declare const DescribeContributorInsightsCommand_base: { + new ( + input: DescribeContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeContributorInsightsCommand extends DescribeContributorInsightsCommand_base { + protected static __types: { + api: { + input: DescribeContributorInsightsInput; + output: DescribeContributorInsightsOutput; + }; + sdk: { + input: DescribeContributorInsightsCommandInput; + output: DescribeContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts new file mode 100644 index 0000000..2f88f7e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeEndpointsRequest, + DescribeEndpointsResponse, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeEndpointsCommandInput + extends DescribeEndpointsRequest {} +export interface DescribeEndpointsCommandOutput + extends DescribeEndpointsResponse, + __MetadataBearer {} +declare const DescribeEndpointsCommand_base: { + new ( + input: DescribeEndpointsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [DescribeEndpointsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeEndpointsCommand extends DescribeEndpointsCommand_base { + protected static __types: { + api: { + input: {}; + output: DescribeEndpointsResponse; + }; + sdk: { + input: DescribeEndpointsCommandInput; + output: DescribeEndpointsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts new file mode 100644 index 0000000..81e570f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeExportInput, DescribeExportOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeExportCommandInput extends DescribeExportInput {} +export interface DescribeExportCommandOutput + extends DescribeExportOutput, + __MetadataBearer {} +declare const DescribeExportCommand_base: { + new ( + input: DescribeExportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeExportCommandInput, + DescribeExportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeExportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeExportCommandInput, + DescribeExportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeExportCommand extends DescribeExportCommand_base { + protected static __types: { + api: { + input: DescribeExportInput; + output: DescribeExportOutput; + }; + sdk: { + input: DescribeExportCommandInput; + output: DescribeExportCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts new file mode 100644 index 0000000..55ef067 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeGlobalTableInput, + DescribeGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeGlobalTableCommandInput + extends DescribeGlobalTableInput {} +export interface DescribeGlobalTableCommandOutput + extends DescribeGlobalTableOutput, + __MetadataBearer {} +declare const DescribeGlobalTableCommand_base: { + new ( + input: DescribeGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeGlobalTableCommand extends DescribeGlobalTableCommand_base { + protected static __types: { + api: { + input: DescribeGlobalTableInput; + output: DescribeGlobalTableOutput; + }; + sdk: { + input: DescribeGlobalTableCommandInput; + output: DescribeGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..7cf5373 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeGlobalTableSettingsInput, + DescribeGlobalTableSettingsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeGlobalTableSettingsCommandInput + extends DescribeGlobalTableSettingsInput {} +export interface DescribeGlobalTableSettingsCommandOutput + extends DescribeGlobalTableSettingsOutput, + __MetadataBearer {} +declare const DescribeGlobalTableSettingsCommand_base: { + new ( + input: DescribeGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeGlobalTableSettingsCommand extends DescribeGlobalTableSettingsCommand_base { + protected static __types: { + api: { + input: DescribeGlobalTableSettingsInput; + output: DescribeGlobalTableSettingsOutput; + }; + sdk: { + input: DescribeGlobalTableSettingsCommandInput; + output: DescribeGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts new file mode 100644 index 0000000..eeaa9b9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeImportInput, DescribeImportOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeImportCommandInput extends DescribeImportInput {} +export interface DescribeImportCommandOutput + extends DescribeImportOutput, + __MetadataBearer {} +declare const DescribeImportCommand_base: { + new ( + input: DescribeImportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeImportCommandInput, + DescribeImportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeImportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeImportCommandInput, + DescribeImportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeImportCommand extends DescribeImportCommand_base { + protected static __types: { + api: { + input: DescribeImportInput; + output: DescribeImportOutput; + }; + sdk: { + input: DescribeImportCommandInput; + output: DescribeImportCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..9801ad1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeKinesisStreamingDestinationInput, + DescribeKinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeKinesisStreamingDestinationCommandInput + extends DescribeKinesisStreamingDestinationInput {} +export interface DescribeKinesisStreamingDestinationCommandOutput + extends DescribeKinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const DescribeKinesisStreamingDestinationCommand_base: { + new ( + input: DescribeKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeKinesisStreamingDestinationCommand extends DescribeKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: DescribeKinesisStreamingDestinationInput; + output: DescribeKinesisStreamingDestinationOutput; + }; + sdk: { + input: DescribeKinesisStreamingDestinationCommandInput; + output: DescribeKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts new file mode 100644 index 0000000..52ce46b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeLimitsInput, DescribeLimitsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeLimitsCommandInput extends DescribeLimitsInput {} +export interface DescribeLimitsCommandOutput + extends DescribeLimitsOutput, + __MetadataBearer {} +declare const DescribeLimitsCommand_base: { + new ( + input: DescribeLimitsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [DescribeLimitsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeLimitsCommand extends DescribeLimitsCommand_base { + protected static __types: { + api: { + input: {}; + output: DescribeLimitsOutput; + }; + sdk: { + input: DescribeLimitsCommandInput; + output: DescribeLimitsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts new file mode 100644 index 0000000..d38362c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeTableInput, DescribeTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTableCommandInput extends DescribeTableInput {} +export interface DescribeTableCommandOutput + extends DescribeTableOutput, + __MetadataBearer {} +declare const DescribeTableCommand_base: { + new ( + input: DescribeTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableCommandInput, + DescribeTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableCommandInput, + DescribeTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTableCommand extends DescribeTableCommand_base { + protected static __types: { + api: { + input: DescribeTableInput; + output: DescribeTableOutput; + }; + sdk: { + input: DescribeTableCommandInput; + output: DescribeTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..07328ed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeTableReplicaAutoScalingInput, + DescribeTableReplicaAutoScalingOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTableReplicaAutoScalingCommandInput + extends DescribeTableReplicaAutoScalingInput {} +export interface DescribeTableReplicaAutoScalingCommandOutput + extends DescribeTableReplicaAutoScalingOutput, + __MetadataBearer {} +declare const DescribeTableReplicaAutoScalingCommand_base: { + new ( + input: DescribeTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTableReplicaAutoScalingCommand extends DescribeTableReplicaAutoScalingCommand_base { + protected static __types: { + api: { + input: DescribeTableReplicaAutoScalingInput; + output: DescribeTableReplicaAutoScalingOutput; + }; + sdk: { + input: DescribeTableReplicaAutoScalingCommandInput; + output: DescribeTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts new file mode 100644 index 0000000..7f8588f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeTimeToLiveInput, + DescribeTimeToLiveOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTimeToLiveCommandInput + extends DescribeTimeToLiveInput {} +export interface DescribeTimeToLiveCommandOutput + extends DescribeTimeToLiveOutput, + __MetadataBearer {} +declare const DescribeTimeToLiveCommand_base: { + new ( + input: DescribeTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTimeToLiveCommand extends DescribeTimeToLiveCommand_base { + protected static __types: { + api: { + input: DescribeTimeToLiveInput; + output: DescribeTimeToLiveOutput; + }; + sdk: { + input: DescribeTimeToLiveCommandInput; + output: DescribeTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..22257f0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + KinesisStreamingDestinationInput, + KinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DisableKinesisStreamingDestinationCommandInput + extends KinesisStreamingDestinationInput {} +export interface DisableKinesisStreamingDestinationCommandOutput + extends KinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const DisableKinesisStreamingDestinationCommand_base: { + new ( + input: DisableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DisableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DisableKinesisStreamingDestinationCommand extends DisableKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: DisableKinesisStreamingDestinationCommandInput; + output: DisableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..5d40389 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + KinesisStreamingDestinationInput, + KinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface EnableKinesisStreamingDestinationCommandInput + extends KinesisStreamingDestinationInput {} +export interface EnableKinesisStreamingDestinationCommandOutput + extends KinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const EnableKinesisStreamingDestinationCommand_base: { + new ( + input: EnableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: EnableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class EnableKinesisStreamingDestinationCommand extends EnableKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: EnableKinesisStreamingDestinationCommandInput; + output: EnableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..5b73eee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExecuteStatementInput, + ExecuteStatementOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExecuteStatementCommandInput extends ExecuteStatementInput {} +export interface ExecuteStatementCommandOutput + extends ExecuteStatementOutput, + __MetadataBearer {} +declare const ExecuteStatementCommand_base: { + new ( + input: ExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExecuteStatementCommand extends ExecuteStatementCommand_base { + protected static __types: { + api: { + input: ExecuteStatementInput; + output: ExecuteStatementOutput; + }; + sdk: { + input: ExecuteStatementCommandInput; + output: ExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..2b94d7c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExecuteTransactionInput, + ExecuteTransactionOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExecuteTransactionCommandInput + extends ExecuteTransactionInput {} +export interface ExecuteTransactionCommandOutput + extends ExecuteTransactionOutput, + __MetadataBearer {} +declare const ExecuteTransactionCommand_base: { + new ( + input: ExecuteTransactionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExecuteTransactionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExecuteTransactionCommand extends ExecuteTransactionCommand_base { + protected static __types: { + api: { + input: ExecuteTransactionInput; + output: ExecuteTransactionOutput; + }; + sdk: { + input: ExecuteTransactionCommandInput; + output: ExecuteTransactionCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..f65fac9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExportTableToPointInTimeInput, + ExportTableToPointInTimeOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExportTableToPointInTimeCommandInput + extends ExportTableToPointInTimeInput {} +export interface ExportTableToPointInTimeCommandOutput + extends ExportTableToPointInTimeOutput, + __MetadataBearer {} +declare const ExportTableToPointInTimeCommand_base: { + new ( + input: ExportTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExportTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExportTableToPointInTimeCommand extends ExportTableToPointInTimeCommand_base { + protected static __types: { + api: { + input: ExportTableToPointInTimeInput; + output: ExportTableToPointInTimeOutput; + }; + sdk: { + input: ExportTableToPointInTimeCommandInput; + output: ExportTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts new file mode 100644 index 0000000..0e7ffb9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { GetItemInput, GetItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface GetItemCommandInput extends GetItemInput {} +export interface GetItemCommandOutput extends GetItemOutput, __MetadataBearer {} +declare const GetItemCommand_base: { + new (input: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl< + GetItemCommandInput, + GetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl< + GetItemCommandInput, + GetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetItemCommand extends GetItemCommand_base { + protected static __types: { + api: { + input: GetItemInput; + output: GetItemOutput; + }; + sdk: { + input: GetItemCommandInput; + output: GetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts new file mode 100644 index 0000000..3691e80 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + GetResourcePolicyInput, + GetResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface GetResourcePolicyCommandInput extends GetResourcePolicyInput {} +export interface GetResourcePolicyCommandOutput + extends GetResourcePolicyOutput, + __MetadataBearer {} +declare const GetResourcePolicyCommand_base: { + new ( + input: GetResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetResourcePolicyCommand extends GetResourcePolicyCommand_base { + protected static __types: { + api: { + input: GetResourcePolicyInput; + output: GetResourcePolicyOutput; + }; + sdk: { + input: GetResourcePolicyCommandInput; + output: GetResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts new file mode 100644 index 0000000..57c8b04 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ImportTableInput, ImportTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ImportTableCommandInput extends ImportTableInput {} +export interface ImportTableCommandOutput + extends ImportTableOutput, + __MetadataBearer {} +declare const ImportTableCommand_base: { + new ( + input: ImportTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ImportTableCommandInput, + ImportTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ImportTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ImportTableCommandInput, + ImportTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ImportTableCommand extends ImportTableCommand_base { + protected static __types: { + api: { + input: ImportTableInput; + output: ImportTableOutput; + }; + sdk: { + input: ImportTableCommandInput; + output: ImportTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts new file mode 100644 index 0000000..2b00a39 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListBackupsInput, ListBackupsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListBackupsCommandInput extends ListBackupsInput {} +export interface ListBackupsCommandOutput + extends ListBackupsOutput, + __MetadataBearer {} +declare const ListBackupsCommand_base: { + new ( + input: ListBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBackupsCommandInput, + ListBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListBackupsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListBackupsCommandInput, + ListBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListBackupsCommand extends ListBackupsCommand_base { + protected static __types: { + api: { + input: ListBackupsInput; + output: ListBackupsOutput; + }; + sdk: { + input: ListBackupsCommandInput; + output: ListBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts new file mode 100644 index 0000000..455495e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListContributorInsightsInput, + ListContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListContributorInsightsCommandInput + extends ListContributorInsightsInput {} +export interface ListContributorInsightsCommandOutput + extends ListContributorInsightsOutput, + __MetadataBearer {} +declare const ListContributorInsightsCommand_base: { + new ( + input: ListContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListContributorInsightsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListContributorInsightsCommand extends ListContributorInsightsCommand_base { + protected static __types: { + api: { + input: ListContributorInsightsInput; + output: ListContributorInsightsOutput; + }; + sdk: { + input: ListContributorInsightsCommandInput; + output: ListContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts new file mode 100644 index 0000000..b968746 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListExportsInput, ListExportsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListExportsCommandInput extends ListExportsInput {} +export interface ListExportsCommandOutput + extends ListExportsOutput, + __MetadataBearer {} +declare const ListExportsCommand_base: { + new ( + input: ListExportsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListExportsCommandInput, + ListExportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListExportsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListExportsCommandInput, + ListExportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListExportsCommand extends ListExportsCommand_base { + protected static __types: { + api: { + input: ListExportsInput; + output: ListExportsOutput; + }; + sdk: { + input: ListExportsCommandInput; + output: ListExportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts new file mode 100644 index 0000000..3428e57 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListGlobalTablesInput, + ListGlobalTablesOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListGlobalTablesCommandInput extends ListGlobalTablesInput {} +export interface ListGlobalTablesCommandOutput + extends ListGlobalTablesOutput, + __MetadataBearer {} +declare const ListGlobalTablesCommand_base: { + new ( + input: ListGlobalTablesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListGlobalTablesCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListGlobalTablesCommand extends ListGlobalTablesCommand_base { + protected static __types: { + api: { + input: ListGlobalTablesInput; + output: ListGlobalTablesOutput; + }; + sdk: { + input: ListGlobalTablesCommandInput; + output: ListGlobalTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts new file mode 100644 index 0000000..07bfebf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListImportsInput, ListImportsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListImportsCommandInput extends ListImportsInput {} +export interface ListImportsCommandOutput + extends ListImportsOutput, + __MetadataBearer {} +declare const ListImportsCommand_base: { + new ( + input: ListImportsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListImportsCommandInput, + ListImportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListImportsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListImportsCommandInput, + ListImportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListImportsCommand extends ListImportsCommand_base { + protected static __types: { + api: { + input: ListImportsInput; + output: ListImportsOutput; + }; + sdk: { + input: ListImportsCommandInput; + output: ListImportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts new file mode 100644 index 0000000..497f02f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListTablesInput, ListTablesOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListTablesCommandInput extends ListTablesInput {} +export interface ListTablesCommandOutput + extends ListTablesOutput, + __MetadataBearer {} +declare const ListTablesCommand_base: { + new ( + input: ListTablesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTablesCommandInput, + ListTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListTablesCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListTablesCommandInput, + ListTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListTablesCommand extends ListTablesCommand_base { + protected static __types: { + api: { + input: ListTablesInput; + output: ListTablesOutput; + }; + sdk: { + input: ListTablesCommandInput; + output: ListTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts new file mode 100644 index 0000000..8bffe40 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListTagsOfResourceInput, + ListTagsOfResourceOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListTagsOfResourceCommandInput + extends ListTagsOfResourceInput {} +export interface ListTagsOfResourceCommandOutput + extends ListTagsOfResourceOutput, + __MetadataBearer {} +declare const ListTagsOfResourceCommand_base: { + new ( + input: ListTagsOfResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListTagsOfResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListTagsOfResourceCommand extends ListTagsOfResourceCommand_base { + protected static __types: { + api: { + input: ListTagsOfResourceInput; + output: ListTagsOfResourceOutput; + }; + sdk: { + input: ListTagsOfResourceCommandInput; + output: ListTagsOfResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts new file mode 100644 index 0000000..7ae0d3e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { PutItemInput, PutItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface PutItemCommandInput extends PutItemInput {} +export interface PutItemCommandOutput extends PutItemOutput, __MetadataBearer {} +declare const PutItemCommand_base: { + new (input: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl< + PutItemCommandInput, + PutItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl< + PutItemCommandInput, + PutItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutItemCommand extends PutItemCommand_base { + protected static __types: { + api: { + input: PutItemInput; + output: PutItemOutput; + }; + sdk: { + input: PutItemCommandInput; + output: PutItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts new file mode 100644 index 0000000..2c83af7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + PutResourcePolicyInput, + PutResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface PutResourcePolicyCommandInput extends PutResourcePolicyInput {} +export interface PutResourcePolicyCommandOutput + extends PutResourcePolicyOutput, + __MetadataBearer {} +declare const PutResourcePolicyCommand_base: { + new ( + input: PutResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutResourcePolicyCommand extends PutResourcePolicyCommand_base { + protected static __types: { + api: { + input: PutResourcePolicyInput; + output: PutResourcePolicyOutput; + }; + sdk: { + input: PutResourcePolicyCommandInput; + output: PutResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts new file mode 100644 index 0000000..125753a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { QueryInput, QueryOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface QueryCommandInput extends QueryInput {} +export interface QueryCommandOutput extends QueryOutput, __MetadataBearer {} +declare const QueryCommand_base: { + new (input: QueryCommandInput): import("@smithy/smithy-client").CommandImpl< + QueryCommandInput, + QueryCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: QueryCommandInput): import("@smithy/smithy-client").CommandImpl< + QueryCommandInput, + QueryCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class QueryCommand extends QueryCommand_base { + protected static __types: { + api: { + input: QueryInput; + output: QueryOutput; + }; + sdk: { + input: QueryCommandInput; + output: QueryCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts new file mode 100644 index 0000000..954e61b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + RestoreTableFromBackupInput, + RestoreTableFromBackupOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface RestoreTableFromBackupCommandInput + extends RestoreTableFromBackupInput {} +export interface RestoreTableFromBackupCommandOutput + extends RestoreTableFromBackupOutput, + __MetadataBearer {} +declare const RestoreTableFromBackupCommand_base: { + new ( + input: RestoreTableFromBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: RestoreTableFromBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class RestoreTableFromBackupCommand extends RestoreTableFromBackupCommand_base { + protected static __types: { + api: { + input: RestoreTableFromBackupInput; + output: RestoreTableFromBackupOutput; + }; + sdk: { + input: RestoreTableFromBackupCommandInput; + output: RestoreTableFromBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..8b243f5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + RestoreTableToPointInTimeInput, + RestoreTableToPointInTimeOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface RestoreTableToPointInTimeCommandInput + extends RestoreTableToPointInTimeInput {} +export interface RestoreTableToPointInTimeCommandOutput + extends RestoreTableToPointInTimeOutput, + __MetadataBearer {} +declare const RestoreTableToPointInTimeCommand_base: { + new ( + input: RestoreTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: RestoreTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class RestoreTableToPointInTimeCommand extends RestoreTableToPointInTimeCommand_base { + protected static __types: { + api: { + input: RestoreTableToPointInTimeInput; + output: RestoreTableToPointInTimeOutput; + }; + sdk: { + input: RestoreTableToPointInTimeCommandInput; + output: RestoreTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts new file mode 100644 index 0000000..fe66b35 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ScanInput, ScanOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ScanCommandInput extends ScanInput {} +export interface ScanCommandOutput extends ScanOutput, __MetadataBearer {} +declare const ScanCommand_base: { + new (input: ScanCommandInput): import("@smithy/smithy-client").CommandImpl< + ScanCommandInput, + ScanCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: ScanCommandInput): import("@smithy/smithy-client").CommandImpl< + ScanCommandInput, + ScanCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ScanCommand extends ScanCommand_base { + protected static __types: { + api: { + input: ScanInput; + output: ScanOutput; + }; + sdk: { + input: ScanCommandInput; + output: ScanCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts new file mode 100644 index 0000000..2791246 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { TagResourceInput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TagResourceCommandInput extends TagResourceInput {} +export interface TagResourceCommandOutput extends __MetadataBearer {} +declare const TagResourceCommand_base: { + new ( + input: TagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TagResourceCommandInput, + TagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TagResourceCommandInput, + TagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TagResourceCommand extends TagResourceCommand_base { + protected static __types: { + api: { + input: TagResourceInput; + output: {}; + }; + sdk: { + input: TagResourceCommandInput; + output: TagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts new file mode 100644 index 0000000..0068549 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + TransactGetItemsInput, + TransactGetItemsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TransactGetItemsCommandInput extends TransactGetItemsInput {} +export interface TransactGetItemsCommandOutput + extends TransactGetItemsOutput, + __MetadataBearer {} +declare const TransactGetItemsCommand_base: { + new ( + input: TransactGetItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TransactGetItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TransactGetItemsCommand extends TransactGetItemsCommand_base { + protected static __types: { + api: { + input: TransactGetItemsInput; + output: TransactGetItemsOutput; + }; + sdk: { + input: TransactGetItemsCommandInput; + output: TransactGetItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts new file mode 100644 index 0000000..f945bc5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + TransactWriteItemsInput, + TransactWriteItemsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TransactWriteItemsCommandInput + extends TransactWriteItemsInput {} +export interface TransactWriteItemsCommandOutput + extends TransactWriteItemsOutput, + __MetadataBearer {} +declare const TransactWriteItemsCommand_base: { + new ( + input: TransactWriteItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TransactWriteItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TransactWriteItemsCommand extends TransactWriteItemsCommand_base { + protected static __types: { + api: { + input: TransactWriteItemsInput; + output: TransactWriteItemsOutput; + }; + sdk: { + input: TransactWriteItemsCommandInput; + output: TransactWriteItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts new file mode 100644 index 0000000..7744ef6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UntagResourceInput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UntagResourceCommandInput extends UntagResourceInput {} +export interface UntagResourceCommandOutput extends __MetadataBearer {} +declare const UntagResourceCommand_base: { + new ( + input: UntagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UntagResourceCommandInput, + UntagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UntagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UntagResourceCommandInput, + UntagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UntagResourceCommand extends UntagResourceCommand_base { + protected static __types: { + api: { + input: UntagResourceInput; + output: {}; + }; + sdk: { + input: UntagResourceCommandInput; + output: UntagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..d771ccd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateContinuousBackupsInput, + UpdateContinuousBackupsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateContinuousBackupsCommandInput + extends UpdateContinuousBackupsInput {} +export interface UpdateContinuousBackupsCommandOutput + extends UpdateContinuousBackupsOutput, + __MetadataBearer {} +declare const UpdateContinuousBackupsCommand_base: { + new ( + input: UpdateContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateContinuousBackupsCommand extends UpdateContinuousBackupsCommand_base { + protected static __types: { + api: { + input: UpdateContinuousBackupsInput; + output: UpdateContinuousBackupsOutput; + }; + sdk: { + input: UpdateContinuousBackupsCommandInput; + output: UpdateContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts new file mode 100644 index 0000000..07ce57b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateContributorInsightsInput, + UpdateContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateContributorInsightsCommandInput + extends UpdateContributorInsightsInput {} +export interface UpdateContributorInsightsCommandOutput + extends UpdateContributorInsightsOutput, + __MetadataBearer {} +declare const UpdateContributorInsightsCommand_base: { + new ( + input: UpdateContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateContributorInsightsCommand extends UpdateContributorInsightsCommand_base { + protected static __types: { + api: { + input: UpdateContributorInsightsInput; + output: UpdateContributorInsightsOutput; + }; + sdk: { + input: UpdateContributorInsightsCommandInput; + output: UpdateContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts new file mode 100644 index 0000000..415ecd8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateGlobalTableInput, + UpdateGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateGlobalTableCommandInput extends UpdateGlobalTableInput {} +export interface UpdateGlobalTableCommandOutput + extends UpdateGlobalTableOutput, + __MetadataBearer {} +declare const UpdateGlobalTableCommand_base: { + new ( + input: UpdateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateGlobalTableCommand extends UpdateGlobalTableCommand_base { + protected static __types: { + api: { + input: UpdateGlobalTableInput; + output: UpdateGlobalTableOutput; + }; + sdk: { + input: UpdateGlobalTableCommandInput; + output: UpdateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..653ae6c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateGlobalTableSettingsInput, + UpdateGlobalTableSettingsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateGlobalTableSettingsCommandInput + extends UpdateGlobalTableSettingsInput {} +export interface UpdateGlobalTableSettingsCommandOutput + extends UpdateGlobalTableSettingsOutput, + __MetadataBearer {} +declare const UpdateGlobalTableSettingsCommand_base: { + new ( + input: UpdateGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateGlobalTableSettingsCommand extends UpdateGlobalTableSettingsCommand_base { + protected static __types: { + api: { + input: UpdateGlobalTableSettingsInput; + output: UpdateGlobalTableSettingsOutput; + }; + sdk: { + input: UpdateGlobalTableSettingsCommandInput; + output: UpdateGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts new file mode 100644 index 0000000..b302067 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UpdateItemInput, UpdateItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateItemCommandInput extends UpdateItemInput {} +export interface UpdateItemCommandOutput + extends UpdateItemOutput, + __MetadataBearer {} +declare const UpdateItemCommand_base: { + new ( + input: UpdateItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateItemCommandInput, + UpdateItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateItemCommandInput, + UpdateItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateItemCommand extends UpdateItemCommand_base { + protected static __types: { + api: { + input: UpdateItemInput; + output: UpdateItemOutput; + }; + sdk: { + input: UpdateItemCommandInput; + output: UpdateItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..a36aa7d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateKinesisStreamingDestinationInput, + UpdateKinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateKinesisStreamingDestinationCommandInput + extends UpdateKinesisStreamingDestinationInput {} +export interface UpdateKinesisStreamingDestinationCommandOutput + extends UpdateKinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const UpdateKinesisStreamingDestinationCommand_base: { + new ( + input: UpdateKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateKinesisStreamingDestinationCommand extends UpdateKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: UpdateKinesisStreamingDestinationInput; + output: UpdateKinesisStreamingDestinationOutput; + }; + sdk: { + input: UpdateKinesisStreamingDestinationCommandInput; + output: UpdateKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts new file mode 100644 index 0000000..59afb16 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UpdateTableInput, UpdateTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTableCommandInput extends UpdateTableInput {} +export interface UpdateTableCommandOutput + extends UpdateTableOutput, + __MetadataBearer {} +declare const UpdateTableCommand_base: { + new ( + input: UpdateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableCommandInput, + UpdateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableCommandInput, + UpdateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTableCommand extends UpdateTableCommand_base { + protected static __types: { + api: { + input: UpdateTableInput; + output: UpdateTableOutput; + }; + sdk: { + input: UpdateTableCommandInput; + output: UpdateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..9f2925f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateTableReplicaAutoScalingInput, + UpdateTableReplicaAutoScalingOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTableReplicaAutoScalingCommandInput + extends UpdateTableReplicaAutoScalingInput {} +export interface UpdateTableReplicaAutoScalingCommandOutput + extends UpdateTableReplicaAutoScalingOutput, + __MetadataBearer {} +declare const UpdateTableReplicaAutoScalingCommand_base: { + new ( + input: UpdateTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTableReplicaAutoScalingCommand extends UpdateTableReplicaAutoScalingCommand_base { + protected static __types: { + api: { + input: UpdateTableReplicaAutoScalingInput; + output: UpdateTableReplicaAutoScalingOutput; + }; + sdk: { + input: UpdateTableReplicaAutoScalingCommandInput; + output: UpdateTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts new file mode 100644 index 0000000..a3f7b3b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateTimeToLiveInput, + UpdateTimeToLiveOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTimeToLiveCommandInput extends UpdateTimeToLiveInput {} +export interface UpdateTimeToLiveCommandOutput + extends UpdateTimeToLiveOutput, + __MetadataBearer {} +declare const UpdateTimeToLiveCommand_base: { + new ( + input: UpdateTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTimeToLiveCommand extends UpdateTimeToLiveCommand_base { + protected static __types: { + api: { + input: UpdateTimeToLiveInput; + output: UpdateTimeToLiveOutput; + }; + sdk: { + input: UpdateTimeToLiveCommandInput; + output: UpdateTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..bef37c3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts @@ -0,0 +1,65 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; + accountId?: string | Provider; + accountIdEndpointMode?: string | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly AccountId: { + readonly type: "builtInParams"; + readonly name: "accountId"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; + readonly AccountIdEndpointMode: { + readonly type: "builtInParams"; + readonly name: "accountIdEndpointMode"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + AccountId?: string; + AccountIdEndpointMode?: string; + ResourceArn?: string; + ResourceArnList?: string[]; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts new file mode 100644 index 0000000..7db993d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface DynamoDBExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..58fb2ff --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,10 @@ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts new file mode 100644 index 0000000..e5bd2c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class DynamoDBServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts new file mode 100644 index 0000000..3acf86a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts @@ -0,0 +1,2036 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +export declare const ApproximateCreationDateTimePrecision: { + readonly MICROSECOND: "MICROSECOND"; + readonly MILLISECOND: "MILLISECOND"; +}; +export type ApproximateCreationDateTimePrecision = + (typeof ApproximateCreationDateTimePrecision)[keyof typeof ApproximateCreationDateTimePrecision]; +export interface ArchivalSummary { + ArchivalDateTime?: Date | undefined; + ArchivalReason?: string | undefined; + ArchivalBackupArn?: string | undefined; +} +export declare const AttributeAction: { + readonly ADD: "ADD"; + readonly DELETE: "DELETE"; + readonly PUT: "PUT"; +}; +export type AttributeAction = + (typeof AttributeAction)[keyof typeof AttributeAction]; +export declare const ScalarAttributeType: { + readonly B: "B"; + readonly N: "N"; + readonly S: "S"; +}; +export type ScalarAttributeType = + (typeof ScalarAttributeType)[keyof typeof ScalarAttributeType]; +export interface AttributeDefinition { + AttributeName: string | undefined; + AttributeType: ScalarAttributeType | undefined; +} +export interface AutoScalingTargetTrackingScalingPolicyConfigurationDescription { + DisableScaleIn?: boolean | undefined; + ScaleInCooldown?: number | undefined; + ScaleOutCooldown?: number | undefined; + TargetValue: number | undefined; +} +export interface AutoScalingPolicyDescription { + PolicyName?: string | undefined; + TargetTrackingScalingPolicyConfiguration?: + | AutoScalingTargetTrackingScalingPolicyConfigurationDescription + | undefined; +} +export interface AutoScalingTargetTrackingScalingPolicyConfigurationUpdate { + DisableScaleIn?: boolean | undefined; + ScaleInCooldown?: number | undefined; + ScaleOutCooldown?: number | undefined; + TargetValue: number | undefined; +} +export interface AutoScalingPolicyUpdate { + PolicyName?: string | undefined; + TargetTrackingScalingPolicyConfiguration: + | AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + | undefined; +} +export interface AutoScalingSettingsDescription { + MinimumUnits?: number | undefined; + MaximumUnits?: number | undefined; + AutoScalingDisabled?: boolean | undefined; + AutoScalingRoleArn?: string | undefined; + ScalingPolicies?: AutoScalingPolicyDescription[] | undefined; +} +export interface AutoScalingSettingsUpdate { + MinimumUnits?: number | undefined; + MaximumUnits?: number | undefined; + AutoScalingDisabled?: boolean | undefined; + AutoScalingRoleArn?: string | undefined; + ScalingPolicyUpdate?: AutoScalingPolicyUpdate | undefined; +} +export declare const BackupStatus: { + readonly AVAILABLE: "AVAILABLE"; + readonly CREATING: "CREATING"; + readonly DELETED: "DELETED"; +}; +export type BackupStatus = (typeof BackupStatus)[keyof typeof BackupStatus]; +export declare const BackupType: { + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +export type BackupType = (typeof BackupType)[keyof typeof BackupType]; +export interface BackupDetails { + BackupArn: string | undefined; + BackupName: string | undefined; + BackupSizeBytes?: number | undefined; + BackupStatus: BackupStatus | undefined; + BackupType: BackupType | undefined; + BackupCreationDateTime: Date | undefined; + BackupExpiryDateTime?: Date | undefined; +} +export declare const BillingMode: { + readonly PAY_PER_REQUEST: "PAY_PER_REQUEST"; + readonly PROVISIONED: "PROVISIONED"; +}; +export type BillingMode = (typeof BillingMode)[keyof typeof BillingMode]; +export declare const KeyType: { + readonly HASH: "HASH"; + readonly RANGE: "RANGE"; +}; +export type KeyType = (typeof KeyType)[keyof typeof KeyType]; +export interface KeySchemaElement { + AttributeName: string | undefined; + KeyType: KeyType | undefined; +} +export interface OnDemandThroughput { + MaxReadRequestUnits?: number | undefined; + MaxWriteRequestUnits?: number | undefined; +} +export interface ProvisionedThroughput { + ReadCapacityUnits: number | undefined; + WriteCapacityUnits: number | undefined; +} +export interface SourceTableDetails { + TableName: string | undefined; + TableId: string | undefined; + TableArn?: string | undefined; + TableSizeBytes?: number | undefined; + KeySchema: KeySchemaElement[] | undefined; + TableCreationDateTime: Date | undefined; + ProvisionedThroughput: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + ItemCount?: number | undefined; + BillingMode?: BillingMode | undefined; +} +export declare const ProjectionType: { + readonly ALL: "ALL"; + readonly INCLUDE: "INCLUDE"; + readonly KEYS_ONLY: "KEYS_ONLY"; +}; +export type ProjectionType = + (typeof ProjectionType)[keyof typeof ProjectionType]; +export interface Projection { + ProjectionType?: ProjectionType | undefined; + NonKeyAttributes?: string[] | undefined; +} +export interface GlobalSecondaryIndexInfo { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; +} +export interface LocalSecondaryIndexInfo { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; +} +export declare const SSEType: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +export type SSEType = (typeof SSEType)[keyof typeof SSEType]; +export declare const SSEStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +export type SSEStatus = (typeof SSEStatus)[keyof typeof SSEStatus]; +export interface SSEDescription { + Status?: SSEStatus | undefined; + SSEType?: SSEType | undefined; + KMSMasterKeyArn?: string | undefined; + InaccessibleEncryptionDateTime?: Date | undefined; +} +export declare const StreamViewType: { + readonly KEYS_ONLY: "KEYS_ONLY"; + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; + readonly OLD_IMAGE: "OLD_IMAGE"; +}; +export type StreamViewType = + (typeof StreamViewType)[keyof typeof StreamViewType]; +export interface StreamSpecification { + StreamEnabled: boolean | undefined; + StreamViewType?: StreamViewType | undefined; +} +export declare const TimeToLiveStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; +}; +export type TimeToLiveStatus = + (typeof TimeToLiveStatus)[keyof typeof TimeToLiveStatus]; +export interface TimeToLiveDescription { + TimeToLiveStatus?: TimeToLiveStatus | undefined; + AttributeName?: string | undefined; +} +export interface SourceTableFeatureDetails { + LocalSecondaryIndexes?: LocalSecondaryIndexInfo[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndexInfo[] | undefined; + StreamDescription?: StreamSpecification | undefined; + TimeToLiveDescription?: TimeToLiveDescription | undefined; + SSEDescription?: SSEDescription | undefined; +} +export interface BackupDescription { + BackupDetails?: BackupDetails | undefined; + SourceTableDetails?: SourceTableDetails | undefined; + SourceTableFeatureDetails?: SourceTableFeatureDetails | undefined; +} +export declare class BackupInUseException extends __BaseException { + readonly name: "BackupInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class BackupNotFoundException extends __BaseException { + readonly name: "BackupNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface BackupSummary { + TableName?: string | undefined; + TableId?: string | undefined; + TableArn?: string | undefined; + BackupArn?: string | undefined; + BackupName?: string | undefined; + BackupCreationDateTime?: Date | undefined; + BackupExpiryDateTime?: Date | undefined; + BackupStatus?: BackupStatus | undefined; + BackupType?: BackupType | undefined; + BackupSizeBytes?: number | undefined; +} +export declare const BackupTypeFilter: { + readonly ALL: "ALL"; + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +export type BackupTypeFilter = + (typeof BackupTypeFilter)[keyof typeof BackupTypeFilter]; +export declare const ReturnConsumedCapacity: { + readonly INDEXES: "INDEXES"; + readonly NONE: "NONE"; + readonly TOTAL: "TOTAL"; +}; +export type ReturnConsumedCapacity = + (typeof ReturnConsumedCapacity)[keyof typeof ReturnConsumedCapacity]; +export declare const ReturnValuesOnConditionCheckFailure: { + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; +}; +export type ReturnValuesOnConditionCheckFailure = + (typeof ReturnValuesOnConditionCheckFailure)[keyof typeof ReturnValuesOnConditionCheckFailure]; +export interface Capacity { + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; + CapacityUnits?: number | undefined; +} +export interface ConsumedCapacity { + TableName?: string | undefined; + CapacityUnits?: number | undefined; + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; + Table?: Capacity | undefined; + LocalSecondaryIndexes?: Record | undefined; + GlobalSecondaryIndexes?: Record | undefined; +} +export declare const BatchStatementErrorCodeEnum: { + readonly AccessDenied: "AccessDenied"; + readonly ConditionalCheckFailed: "ConditionalCheckFailed"; + readonly DuplicateItem: "DuplicateItem"; + readonly InternalServerError: "InternalServerError"; + readonly ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded"; + readonly ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded"; + readonly RequestLimitExceeded: "RequestLimitExceeded"; + readonly ResourceNotFound: "ResourceNotFound"; + readonly ThrottlingError: "ThrottlingError"; + readonly TransactionConflict: "TransactionConflict"; + readonly ValidationError: "ValidationError"; +}; +export type BatchStatementErrorCodeEnum = + (typeof BatchStatementErrorCodeEnum)[keyof typeof BatchStatementErrorCodeEnum]; +export declare class InternalServerError extends __BaseException { + readonly name: "InternalServerError"; + readonly $fault: "server"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class RequestLimitExceeded extends __BaseException { + readonly name: "RequestLimitExceeded"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidEndpointException extends __BaseException { + readonly name: "InvalidEndpointException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ProvisionedThroughputExceededException extends __BaseException { + readonly name: "ProvisionedThroughputExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ProvisionedThroughputExceededException, + __BaseException + > + ); +} +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare const ReturnItemCollectionMetrics: { + readonly NONE: "NONE"; + readonly SIZE: "SIZE"; +}; +export type ReturnItemCollectionMetrics = + (typeof ReturnItemCollectionMetrics)[keyof typeof ReturnItemCollectionMetrics]; +export declare class ItemCollectionSizeLimitExceededException extends __BaseException { + readonly name: "ItemCollectionSizeLimitExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ItemCollectionSizeLimitExceededException, + __BaseException + > + ); +} +export interface BillingModeSummary { + BillingMode?: BillingMode | undefined; + LastUpdateToPayPerRequestDateTime?: Date | undefined; +} +export declare const ComparisonOperator: { + readonly BEGINS_WITH: "BEGINS_WITH"; + readonly BETWEEN: "BETWEEN"; + readonly CONTAINS: "CONTAINS"; + readonly EQ: "EQ"; + readonly GE: "GE"; + readonly GT: "GT"; + readonly IN: "IN"; + readonly LE: "LE"; + readonly LT: "LT"; + readonly NE: "NE"; + readonly NOT_CONTAINS: "NOT_CONTAINS"; + readonly NOT_NULL: "NOT_NULL"; + readonly NULL: "NULL"; +}; +export type ComparisonOperator = + (typeof ComparisonOperator)[keyof typeof ComparisonOperator]; +export declare const ConditionalOperator: { + readonly AND: "AND"; + readonly OR: "OR"; +}; +export type ConditionalOperator = + (typeof ConditionalOperator)[keyof typeof ConditionalOperator]; +export declare const ContinuousBackupsStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +export type ContinuousBackupsStatus = + (typeof ContinuousBackupsStatus)[keyof typeof ContinuousBackupsStatus]; +export declare const PointInTimeRecoveryStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +export type PointInTimeRecoveryStatus = + (typeof PointInTimeRecoveryStatus)[keyof typeof PointInTimeRecoveryStatus]; +export interface PointInTimeRecoveryDescription { + PointInTimeRecoveryStatus?: PointInTimeRecoveryStatus | undefined; + RecoveryPeriodInDays?: number | undefined; + EarliestRestorableDateTime?: Date | undefined; + LatestRestorableDateTime?: Date | undefined; +} +export interface ContinuousBackupsDescription { + ContinuousBackupsStatus: ContinuousBackupsStatus | undefined; + PointInTimeRecoveryDescription?: PointInTimeRecoveryDescription | undefined; +} +export declare class ContinuousBackupsUnavailableException extends __BaseException { + readonly name: "ContinuousBackupsUnavailableException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ContinuousBackupsUnavailableException, + __BaseException + > + ); +} +export declare const ContributorInsightsAction: { + readonly DISABLE: "DISABLE"; + readonly ENABLE: "ENABLE"; +}; +export type ContributorInsightsAction = + (typeof ContributorInsightsAction)[keyof typeof ContributorInsightsAction]; +export declare const ContributorInsightsStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly FAILED: "FAILED"; +}; +export type ContributorInsightsStatus = + (typeof ContributorInsightsStatus)[keyof typeof ContributorInsightsStatus]; +export interface ContributorInsightsSummary { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +export interface CreateBackupInput { + TableName: string | undefined; + BackupName: string | undefined; +} +export interface CreateBackupOutput { + BackupDetails?: BackupDetails | undefined; +} +export declare class LimitExceededException extends __BaseException { + readonly name: "LimitExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TableInUseException extends __BaseException { + readonly name: "TableInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TableNotFoundException extends __BaseException { + readonly name: "TableNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface WarmThroughput { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; +} +export interface CreateGlobalSecondaryIndexAction { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface Replica { + RegionName?: string | undefined; +} +export interface CreateGlobalTableInput { + GlobalTableName: string | undefined; + ReplicationGroup: Replica[] | undefined; +} +export declare const GlobalTableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +export type GlobalTableStatus = + (typeof GlobalTableStatus)[keyof typeof GlobalTableStatus]; +export interface OnDemandThroughputOverride { + MaxReadRequestUnits?: number | undefined; +} +export interface ProvisionedThroughputOverride { + ReadCapacityUnits?: number | undefined; +} +export declare const IndexStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +export type IndexStatus = (typeof IndexStatus)[keyof typeof IndexStatus]; +export interface GlobalSecondaryIndexWarmThroughputDescription { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; + Status?: IndexStatus | undefined; +} +export interface ReplicaGlobalSecondaryIndexDescription { + IndexName?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +export declare const ReplicaStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly CREATION_FAILED: "CREATION_FAILED"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly REGION_DISABLED: "REGION_DISABLED"; + readonly UPDATING: "UPDATING"; +}; +export type ReplicaStatus = (typeof ReplicaStatus)[keyof typeof ReplicaStatus]; +export declare const TableClass: { + readonly STANDARD: "STANDARD"; + readonly STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS"; +}; +export type TableClass = (typeof TableClass)[keyof typeof TableClass]; +export interface TableClassSummary { + TableClass?: TableClass | undefined; + LastUpdateDateTime?: Date | undefined; +} +export declare const TableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly ARCHIVED: "ARCHIVED"; + readonly ARCHIVING: "ARCHIVING"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly UPDATING: "UPDATING"; +}; +export type TableStatus = (typeof TableStatus)[keyof typeof TableStatus]; +export interface TableWarmThroughputDescription { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; + Status?: TableStatus | undefined; +} +export interface ReplicaDescription { + RegionName?: string | undefined; + ReplicaStatus?: ReplicaStatus | undefined; + ReplicaStatusDescription?: string | undefined; + ReplicaStatusPercentProgress?: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + WarmThroughput?: TableWarmThroughputDescription | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexDescription[] | undefined; + ReplicaInaccessibleDateTime?: Date | undefined; + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +export interface GlobalTableDescription { + ReplicationGroup?: ReplicaDescription[] | undefined; + GlobalTableArn?: string | undefined; + CreationDateTime?: Date | undefined; + GlobalTableStatus?: GlobalTableStatus | undefined; + GlobalTableName?: string | undefined; +} +export interface CreateGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class GlobalTableAlreadyExistsException extends __BaseException { + readonly name: "GlobalTableAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + GlobalTableAlreadyExistsException, + __BaseException + > + ); +} +export interface CreateReplicaAction { + RegionName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndex { + IndexName: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; +} +export interface CreateReplicationGroupMemberAction { + RegionName: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + TableClassOverride?: TableClass | undefined; +} +export interface GlobalSecondaryIndex { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface LocalSecondaryIndex { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; +} +export interface SSESpecification { + Enabled?: boolean | undefined; + SSEType?: SSEType | undefined; + KMSMasterKeyId?: string | undefined; +} +export interface Tag { + Key: string | undefined; + Value: string | undefined; +} +export interface CreateTableInput { + AttributeDefinitions: AttributeDefinition[] | undefined; + TableName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + LocalSecondaryIndexes?: LocalSecondaryIndex[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + StreamSpecification?: StreamSpecification | undefined; + SSESpecification?: SSESpecification | undefined; + Tags?: Tag[] | undefined; + TableClass?: TableClass | undefined; + DeletionProtectionEnabled?: boolean | undefined; + WarmThroughput?: WarmThroughput | undefined; + ResourcePolicy?: string | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; +} +export interface ProvisionedThroughputDescription { + LastIncreaseDateTime?: Date | undefined; + LastDecreaseDateTime?: Date | undefined; + NumberOfDecreasesToday?: number | undefined; + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; +} +export interface GlobalSecondaryIndexDescription { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + IndexStatus?: IndexStatus | undefined; + Backfilling?: boolean | undefined; + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + IndexSizeBytes?: number | undefined; + ItemCount?: number | undefined; + IndexArn?: string | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +export interface LocalSecondaryIndexDescription { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + IndexSizeBytes?: number | undefined; + ItemCount?: number | undefined; + IndexArn?: string | undefined; +} +export declare const MultiRegionConsistency: { + readonly EVENTUAL: "EVENTUAL"; + readonly STRONG: "STRONG"; +}; +export type MultiRegionConsistency = + (typeof MultiRegionConsistency)[keyof typeof MultiRegionConsistency]; +export interface RestoreSummary { + SourceBackupArn?: string | undefined; + SourceTableArn?: string | undefined; + RestoreDateTime: Date | undefined; + RestoreInProgress: boolean | undefined; +} +export interface TableDescription { + AttributeDefinitions?: AttributeDefinition[] | undefined; + TableName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + TableStatus?: TableStatus | undefined; + CreationDateTime?: Date | undefined; + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + TableSizeBytes?: number | undefined; + ItemCount?: number | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + BillingModeSummary?: BillingModeSummary | undefined; + LocalSecondaryIndexes?: LocalSecondaryIndexDescription[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndexDescription[] | undefined; + StreamSpecification?: StreamSpecification | undefined; + LatestStreamLabel?: string | undefined; + LatestStreamArn?: string | undefined; + GlobalTableVersion?: string | undefined; + Replicas?: ReplicaDescription[] | undefined; + RestoreSummary?: RestoreSummary | undefined; + SSEDescription?: SSEDescription | undefined; + ArchivalSummary?: ArchivalSummary | undefined; + TableClassSummary?: TableClassSummary | undefined; + DeletionProtectionEnabled?: boolean | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: TableWarmThroughputDescription | undefined; + MultiRegionConsistency?: MultiRegionConsistency | undefined; +} +export interface CreateTableOutput { + TableDescription?: TableDescription | undefined; +} +export declare class ResourceInUseException extends __BaseException { + readonly name: "ResourceInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface CsvOptions { + Delimiter?: string | undefined; + HeaderList?: string[] | undefined; +} +export interface DeleteBackupInput { + BackupArn: string | undefined; +} +export interface DeleteBackupOutput { + BackupDescription?: BackupDescription | undefined; +} +export interface DeleteGlobalSecondaryIndexAction { + IndexName: string | undefined; +} +export declare const ReturnValue: { + readonly ALL_NEW: "ALL_NEW"; + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; + readonly UPDATED_NEW: "UPDATED_NEW"; + readonly UPDATED_OLD: "UPDATED_OLD"; +}; +export type ReturnValue = (typeof ReturnValue)[keyof typeof ReturnValue]; +export declare class ReplicatedWriteConflictException extends __BaseException { + readonly name: "ReplicatedWriteConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ReplicatedWriteConflictException, + __BaseException + > + ); +} +export declare class TransactionConflictException extends __BaseException { + readonly name: "TransactionConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DeleteReplicaAction { + RegionName: string | undefined; +} +export interface DeleteReplicationGroupMemberAction { + RegionName: string | undefined; +} +export interface DeleteResourcePolicyInput { + ResourceArn: string | undefined; + ExpectedRevisionId?: string | undefined; +} +export interface DeleteResourcePolicyOutput { + RevisionId?: string | undefined; +} +export declare class PolicyNotFoundException extends __BaseException { + readonly name: "PolicyNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DeleteTableInput { + TableName: string | undefined; +} +export interface DeleteTableOutput { + TableDescription?: TableDescription | undefined; +} +export interface DescribeBackupInput { + BackupArn: string | undefined; +} +export interface DescribeBackupOutput { + BackupDescription?: BackupDescription | undefined; +} +export interface DescribeContinuousBackupsInput { + TableName: string | undefined; +} +export interface DescribeContinuousBackupsOutput { + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +export interface DescribeContributorInsightsInput { + TableName: string | undefined; + IndexName?: string | undefined; +} +export interface FailureException { + ExceptionName?: string | undefined; + ExceptionDescription?: string | undefined; +} +export interface DescribeContributorInsightsOutput { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsRuleList?: string[] | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; + LastUpdateDateTime?: Date | undefined; + FailureException?: FailureException | undefined; +} +export interface DescribeEndpointsRequest {} +export interface Endpoint { + Address: string | undefined; + CachePeriodInMinutes: number | undefined; +} +export interface DescribeEndpointsResponse { + Endpoints: Endpoint[] | undefined; +} +export interface DescribeExportInput { + ExportArn: string | undefined; +} +export declare const ExportFormat: { + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +export type ExportFormat = (typeof ExportFormat)[keyof typeof ExportFormat]; +export declare const ExportStatus: { + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +export type ExportStatus = (typeof ExportStatus)[keyof typeof ExportStatus]; +export declare const ExportType: { + readonly FULL_EXPORT: "FULL_EXPORT"; + readonly INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT"; +}; +export type ExportType = (typeof ExportType)[keyof typeof ExportType]; +export declare const ExportViewType: { + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; +}; +export type ExportViewType = + (typeof ExportViewType)[keyof typeof ExportViewType]; +export interface IncrementalExportSpecification { + ExportFromTime?: Date | undefined; + ExportToTime?: Date | undefined; + ExportViewType?: ExportViewType | undefined; +} +export declare const S3SseAlgorithm: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +export type S3SseAlgorithm = + (typeof S3SseAlgorithm)[keyof typeof S3SseAlgorithm]; +export interface ExportDescription { + ExportArn?: string | undefined; + ExportStatus?: ExportStatus | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; + ExportManifest?: string | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + ExportTime?: Date | undefined; + ClientToken?: string | undefined; + S3Bucket?: string | undefined; + S3BucketOwner?: string | undefined; + S3Prefix?: string | undefined; + S3SseAlgorithm?: S3SseAlgorithm | undefined; + S3SseKmsKeyId?: string | undefined; + FailureCode?: string | undefined; + FailureMessage?: string | undefined; + ExportFormat?: ExportFormat | undefined; + BilledSizeBytes?: number | undefined; + ItemCount?: number | undefined; + ExportType?: ExportType | undefined; + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +export interface DescribeExportOutput { + ExportDescription?: ExportDescription | undefined; +} +export declare class ExportNotFoundException extends __BaseException { + readonly name: "ExportNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeGlobalTableInput { + GlobalTableName: string | undefined; +} +export interface DescribeGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class GlobalTableNotFoundException extends __BaseException { + readonly name: "GlobalTableNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeGlobalTableSettingsInput { + GlobalTableName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndexSettingsDescription { + IndexName: string | undefined; + IndexStatus?: IndexStatus | undefined; + ProvisionedReadCapacityUnits?: number | undefined; + ProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ProvisionedWriteCapacityUnits?: number | undefined; + ProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; +} +export interface ReplicaSettingsDescription { + RegionName: string | undefined; + ReplicaStatus?: ReplicaStatus | undefined; + ReplicaBillingModeSummary?: BillingModeSummary | undefined; + ReplicaProvisionedReadCapacityUnits?: number | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaProvisionedWriteCapacityUnits?: number | undefined; + ReplicaProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaGlobalSecondaryIndexSettings?: + | ReplicaGlobalSecondaryIndexSettingsDescription[] + | undefined; + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +export interface DescribeGlobalTableSettingsOutput { + GlobalTableName?: string | undefined; + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +export interface DescribeImportInput { + ImportArn: string | undefined; +} +export declare const ImportStatus: { + readonly CANCELLED: "CANCELLED"; + readonly CANCELLING: "CANCELLING"; + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +export type ImportStatus = (typeof ImportStatus)[keyof typeof ImportStatus]; +export declare const InputCompressionType: { + readonly GZIP: "GZIP"; + readonly NONE: "NONE"; + readonly ZSTD: "ZSTD"; +}; +export type InputCompressionType = + (typeof InputCompressionType)[keyof typeof InputCompressionType]; +export declare const InputFormat: { + readonly CSV: "CSV"; + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +export type InputFormat = (typeof InputFormat)[keyof typeof InputFormat]; +export interface InputFormatOptions { + Csv?: CsvOptions | undefined; +} +export interface S3BucketSource { + S3BucketOwner?: string | undefined; + S3Bucket: string | undefined; + S3KeyPrefix?: string | undefined; +} +export interface TableCreationParameters { + TableName: string | undefined; + AttributeDefinitions: AttributeDefinition[] | undefined; + KeySchema: KeySchemaElement[] | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + SSESpecification?: SSESpecification | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; +} +export interface ImportTableDescription { + ImportArn?: string | undefined; + ImportStatus?: ImportStatus | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + ClientToken?: string | undefined; + S3BucketSource?: S3BucketSource | undefined; + ErrorCount?: number | undefined; + CloudWatchLogGroupArn?: string | undefined; + InputFormat?: InputFormat | undefined; + InputFormatOptions?: InputFormatOptions | undefined; + InputCompressionType?: InputCompressionType | undefined; + TableCreationParameters?: TableCreationParameters | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; + ProcessedSizeBytes?: number | undefined; + ProcessedItemCount?: number | undefined; + ImportedItemCount?: number | undefined; + FailureCode?: string | undefined; + FailureMessage?: string | undefined; +} +export interface DescribeImportOutput { + ImportTableDescription: ImportTableDescription | undefined; +} +export declare class ImportNotFoundException extends __BaseException { + readonly name: "ImportNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeKinesisStreamingDestinationInput { + TableName: string | undefined; +} +export declare const DestinationStatus: { + readonly ACTIVE: "ACTIVE"; + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLE_FAILED: "ENABLE_FAILED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +export type DestinationStatus = + (typeof DestinationStatus)[keyof typeof DestinationStatus]; +export interface KinesisDataStreamDestination { + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + DestinationStatusDescription?: string | undefined; + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface DescribeKinesisStreamingDestinationOutput { + TableName?: string | undefined; + KinesisDataStreamDestinations?: KinesisDataStreamDestination[] | undefined; +} +export interface DescribeLimitsInput {} +export interface DescribeLimitsOutput { + AccountMaxReadCapacityUnits?: number | undefined; + AccountMaxWriteCapacityUnits?: number | undefined; + TableMaxReadCapacityUnits?: number | undefined; + TableMaxWriteCapacityUnits?: number | undefined; +} +export interface DescribeTableInput { + TableName: string | undefined; +} +export interface DescribeTableOutput { + Table?: TableDescription | undefined; +} +export interface DescribeTableReplicaAutoScalingInput { + TableName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndexAutoScalingDescription { + IndexName?: string | undefined; + IndexStatus?: IndexStatus | undefined; + ProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; +} +export interface ReplicaAutoScalingDescription { + RegionName?: string | undefined; + GlobalSecondaryIndexes?: + | ReplicaGlobalSecondaryIndexAutoScalingDescription[] + | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaStatus?: ReplicaStatus | undefined; +} +export interface TableAutoScalingDescription { + TableName?: string | undefined; + TableStatus?: TableStatus | undefined; + Replicas?: ReplicaAutoScalingDescription[] | undefined; +} +export interface DescribeTableReplicaAutoScalingOutput { + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +export interface DescribeTimeToLiveInput { + TableName: string | undefined; +} +export interface DescribeTimeToLiveOutput { + TimeToLiveDescription?: TimeToLiveDescription | undefined; +} +export interface EnableKinesisStreamingConfiguration { + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface KinesisStreamingDestinationInput { + TableName: string | undefined; + StreamArn: string | undefined; + EnableKinesisStreamingConfiguration?: + | EnableKinesisStreamingConfiguration + | undefined; +} +export interface KinesisStreamingDestinationOutput { + TableName?: string | undefined; + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + EnableKinesisStreamingConfiguration?: + | EnableKinesisStreamingConfiguration + | undefined; +} +export declare class DuplicateItemException extends __BaseException { + readonly name: "DuplicateItemException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class IdempotentParameterMismatchException extends __BaseException { + readonly name: "IdempotentParameterMismatchException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType< + IdempotentParameterMismatchException, + __BaseException + > + ); +} +export declare class TransactionInProgressException extends __BaseException { + readonly name: "TransactionInProgressException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ExportConflictException extends __BaseException { + readonly name: "ExportConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ExportTableToPointInTimeInput { + TableArn: string | undefined; + ExportTime?: Date | undefined; + ClientToken?: string | undefined; + S3Bucket: string | undefined; + S3BucketOwner?: string | undefined; + S3Prefix?: string | undefined; + S3SseAlgorithm?: S3SseAlgorithm | undefined; + S3SseKmsKeyId?: string | undefined; + ExportFormat?: ExportFormat | undefined; + ExportType?: ExportType | undefined; + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +export interface ExportTableToPointInTimeOutput { + ExportDescription?: ExportDescription | undefined; +} +export declare class InvalidExportTimeException extends __BaseException { + readonly name: "InvalidExportTimeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class PointInTimeRecoveryUnavailableException extends __BaseException { + readonly name: "PointInTimeRecoveryUnavailableException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + PointInTimeRecoveryUnavailableException, + __BaseException + > + ); +} +export interface GetResourcePolicyInput { + ResourceArn: string | undefined; +} +export interface GetResourcePolicyOutput { + Policy?: string | undefined; + RevisionId?: string | undefined; +} +export declare class ImportConflictException extends __BaseException { + readonly name: "ImportConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ImportTableInput { + ClientToken?: string | undefined; + S3BucketSource: S3BucketSource | undefined; + InputFormat: InputFormat | undefined; + InputFormatOptions?: InputFormatOptions | undefined; + InputCompressionType?: InputCompressionType | undefined; + TableCreationParameters: TableCreationParameters | undefined; +} +export interface ImportTableOutput { + ImportTableDescription: ImportTableDescription | undefined; +} +export interface ListBackupsInput { + TableName?: string | undefined; + Limit?: number | undefined; + TimeRangeLowerBound?: Date | undefined; + TimeRangeUpperBound?: Date | undefined; + ExclusiveStartBackupArn?: string | undefined; + BackupType?: BackupTypeFilter | undefined; +} +export interface ListBackupsOutput { + BackupSummaries?: BackupSummary[] | undefined; + LastEvaluatedBackupArn?: string | undefined; +} +export interface ListContributorInsightsInput { + TableName?: string | undefined; + NextToken?: string | undefined; + MaxResults?: number | undefined; +} +export interface ListContributorInsightsOutput { + ContributorInsightsSummaries?: ContributorInsightsSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListExportsInput { + TableArn?: string | undefined; + MaxResults?: number | undefined; + NextToken?: string | undefined; +} +export interface ExportSummary { + ExportArn?: string | undefined; + ExportStatus?: ExportStatus | undefined; + ExportType?: ExportType | undefined; +} +export interface ListExportsOutput { + ExportSummaries?: ExportSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListGlobalTablesInput { + ExclusiveStartGlobalTableName?: string | undefined; + Limit?: number | undefined; + RegionName?: string | undefined; +} +export interface GlobalTable { + GlobalTableName?: string | undefined; + ReplicationGroup?: Replica[] | undefined; +} +export interface ListGlobalTablesOutput { + GlobalTables?: GlobalTable[] | undefined; + LastEvaluatedGlobalTableName?: string | undefined; +} +export interface ListImportsInput { + TableArn?: string | undefined; + PageSize?: number | undefined; + NextToken?: string | undefined; +} +export interface ImportSummary { + ImportArn?: string | undefined; + ImportStatus?: ImportStatus | undefined; + TableArn?: string | undefined; + S3BucketSource?: S3BucketSource | undefined; + CloudWatchLogGroupArn?: string | undefined; + InputFormat?: InputFormat | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; +} +export interface ListImportsOutput { + ImportSummaryList?: ImportSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListTablesInput { + ExclusiveStartTableName?: string | undefined; + Limit?: number | undefined; +} +export interface ListTablesOutput { + TableNames?: string[] | undefined; + LastEvaluatedTableName?: string | undefined; +} +export interface ListTagsOfResourceInput { + ResourceArn: string | undefined; + NextToken?: string | undefined; +} +export interface ListTagsOfResourceOutput { + Tags?: Tag[] | undefined; + NextToken?: string | undefined; +} +export interface PutResourcePolicyInput { + ResourceArn: string | undefined; + Policy: string | undefined; + ExpectedRevisionId?: string | undefined; + ConfirmRemoveSelfResourceAccess?: boolean | undefined; +} +export interface PutResourcePolicyOutput { + RevisionId?: string | undefined; +} +export declare const Select: { + readonly ALL_ATTRIBUTES: "ALL_ATTRIBUTES"; + readonly ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES"; + readonly COUNT: "COUNT"; + readonly SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES"; +}; +export type Select = (typeof Select)[keyof typeof Select]; +export interface RestoreTableFromBackupInput { + TargetTableName: string | undefined; + BackupArn: string | undefined; + BillingModeOverride?: BillingMode | undefined; + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + SSESpecificationOverride?: SSESpecification | undefined; +} +export interface RestoreTableFromBackupOutput { + TableDescription?: TableDescription | undefined; +} +export declare class TableAlreadyExistsException extends __BaseException { + readonly name: "TableAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidRestoreTimeException extends __BaseException { + readonly name: "InvalidRestoreTimeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface RestoreTableToPointInTimeInput { + SourceTableArn?: string | undefined; + SourceTableName?: string | undefined; + TargetTableName: string | undefined; + UseLatestRestorableTime?: boolean | undefined; + RestoreDateTime?: Date | undefined; + BillingModeOverride?: BillingMode | undefined; + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + SSESpecificationOverride?: SSESpecification | undefined; +} +export interface RestoreTableToPointInTimeOutput { + TableDescription?: TableDescription | undefined; +} +export interface TagResourceInput { + ResourceArn: string | undefined; + Tags: Tag[] | undefined; +} +export interface UntagResourceInput { + ResourceArn: string | undefined; + TagKeys: string[] | undefined; +} +export interface PointInTimeRecoverySpecification { + PointInTimeRecoveryEnabled: boolean | undefined; + RecoveryPeriodInDays?: number | undefined; +} +export interface UpdateContinuousBackupsInput { + TableName: string | undefined; + PointInTimeRecoverySpecification: + | PointInTimeRecoverySpecification + | undefined; +} +export interface UpdateContinuousBackupsOutput { + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +export interface UpdateContributorInsightsInput { + TableName: string | undefined; + IndexName?: string | undefined; + ContributorInsightsAction: ContributorInsightsAction | undefined; +} +export interface UpdateContributorInsightsOutput { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +export declare class ReplicaAlreadyExistsException extends __BaseException { + readonly name: "ReplicaAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ReplicaNotFoundException extends __BaseException { + readonly name: "ReplicaNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ReplicaUpdate { + Create?: CreateReplicaAction | undefined; + Delete?: DeleteReplicaAction | undefined; +} +export interface UpdateGlobalTableInput { + GlobalTableName: string | undefined; + ReplicaUpdates: ReplicaUpdate[] | undefined; +} +export interface UpdateGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class IndexNotFoundException extends __BaseException { + readonly name: "IndexNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface GlobalTableGlobalSecondaryIndexSettingsUpdate { + IndexName: string | undefined; + ProvisionedWriteCapacityUnits?: number | undefined; + ProvisionedWriteCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaGlobalSecondaryIndexSettingsUpdate { + IndexName: string | undefined; + ProvisionedReadCapacityUnits?: number | undefined; + ProvisionedReadCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaSettingsUpdate { + RegionName: string | undefined; + ReplicaProvisionedReadCapacityUnits?: number | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; + ReplicaGlobalSecondaryIndexSettingsUpdate?: + | ReplicaGlobalSecondaryIndexSettingsUpdate[] + | undefined; + ReplicaTableClass?: TableClass | undefined; +} +export interface UpdateGlobalTableSettingsInput { + GlobalTableName: string | undefined; + GlobalTableBillingMode?: BillingMode | undefined; + GlobalTableProvisionedWriteCapacityUnits?: number | undefined; + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; + GlobalTableGlobalSecondaryIndexSettingsUpdate?: + | GlobalTableGlobalSecondaryIndexSettingsUpdate[] + | undefined; + ReplicaSettingsUpdate?: ReplicaSettingsUpdate[] | undefined; +} +export interface UpdateGlobalTableSettingsOutput { + GlobalTableName?: string | undefined; + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +export interface UpdateKinesisStreamingConfiguration { + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface UpdateKinesisStreamingDestinationInput { + TableName: string | undefined; + StreamArn: string | undefined; + UpdateKinesisStreamingConfiguration?: + | UpdateKinesisStreamingConfiguration + | undefined; +} +export interface UpdateKinesisStreamingDestinationOutput { + TableName?: string | undefined; + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + UpdateKinesisStreamingConfiguration?: + | UpdateKinesisStreamingConfiguration + | undefined; +} +export interface UpdateGlobalSecondaryIndexAction { + IndexName: string | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface GlobalSecondaryIndexUpdate { + Update?: UpdateGlobalSecondaryIndexAction | undefined; + Create?: CreateGlobalSecondaryIndexAction | undefined; + Delete?: DeleteGlobalSecondaryIndexAction | undefined; +} +export interface UpdateReplicationGroupMemberAction { + RegionName: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + TableClassOverride?: TableClass | undefined; +} +export interface ReplicationGroupUpdate { + Create?: CreateReplicationGroupMemberAction | undefined; + Update?: UpdateReplicationGroupMemberAction | undefined; + Delete?: DeleteReplicationGroupMemberAction | undefined; +} +export interface UpdateTableInput { + AttributeDefinitions?: AttributeDefinition[] | undefined; + TableName: string | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexUpdate[] | undefined; + StreamSpecification?: StreamSpecification | undefined; + SSESpecification?: SSESpecification | undefined; + ReplicaUpdates?: ReplicationGroupUpdate[] | undefined; + TableClass?: TableClass | undefined; + DeletionProtectionEnabled?: boolean | undefined; + MultiRegionConsistency?: MultiRegionConsistency | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface UpdateTableOutput { + TableDescription?: TableDescription | undefined; +} +export interface GlobalSecondaryIndexAutoScalingUpdate { + IndexName?: string | undefined; + ProvisionedWriteCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaGlobalSecondaryIndexAutoScalingUpdate { + IndexName?: string | undefined; + ProvisionedReadCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaAutoScalingUpdate { + RegionName: string | undefined; + ReplicaGlobalSecondaryIndexUpdates?: + | ReplicaGlobalSecondaryIndexAutoScalingUpdate[] + | undefined; + ReplicaProvisionedReadCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface UpdateTableReplicaAutoScalingInput { + GlobalSecondaryIndexUpdates?: + | GlobalSecondaryIndexAutoScalingUpdate[] + | undefined; + TableName: string | undefined; + ProvisionedWriteCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; + ReplicaUpdates?: ReplicaAutoScalingUpdate[] | undefined; +} +export interface UpdateTableReplicaAutoScalingOutput { + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +export interface TimeToLiveSpecification { + Enabled: boolean | undefined; + AttributeName: string | undefined; +} +export interface UpdateTimeToLiveInput { + TableName: string | undefined; + TimeToLiveSpecification: TimeToLiveSpecification | undefined; +} +export interface UpdateTimeToLiveOutput { + TimeToLiveSpecification?: TimeToLiveSpecification | undefined; +} +export type AttributeValue = + | AttributeValue.BMember + | AttributeValue.BOOLMember + | AttributeValue.BSMember + | AttributeValue.LMember + | AttributeValue.MMember + | AttributeValue.NMember + | AttributeValue.NSMember + | AttributeValue.NULLMember + | AttributeValue.SMember + | AttributeValue.SSMember + | AttributeValue.$UnknownMember; +export declare namespace AttributeValue { + interface SMember { + S: string; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NMember { + S?: never; + N: string; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface BMember { + S?: never; + N?: never; + B: Uint8Array; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface SSMember { + S?: never; + N?: never; + B?: never; + SS: string[]; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS: string[]; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface BSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS: Uint8Array[]; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface MMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M: Record; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface LMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L: AttributeValue[]; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NULLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL: boolean; + BOOL?: never; + $unknown?: never; + } + interface BOOLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL: boolean; + $unknown?: never; + } + interface $UnknownMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown: [string, any]; + } + interface Visitor { + S: (value: string) => T; + N: (value: string) => T; + B: (value: Uint8Array) => T; + SS: (value: string[]) => T; + NS: (value: string[]) => T; + BS: (value: Uint8Array[]) => T; + M: (value: Record) => T; + L: (value: AttributeValue[]) => T; + NULL: (value: boolean) => T; + BOOL: (value: boolean) => T; + _: (name: string, value: any) => T; + } + const visit: (value: AttributeValue, visitor: Visitor) => T; +} +export interface AttributeValueUpdate { + Value?: AttributeValue | undefined; + Action?: AttributeAction | undefined; +} +export interface BatchStatementError { + Code?: BatchStatementErrorCodeEnum | undefined; + Message?: string | undefined; + Item?: Record | undefined; +} +export interface BatchStatementRequest { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ConsistentRead?: boolean | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface CancellationReason { + Item?: Record | undefined; + Code?: string | undefined; + Message?: string | undefined; +} +export interface Condition { + AttributeValueList?: AttributeValue[] | undefined; + ComparisonOperator: ComparisonOperator | undefined; +} +export declare class ConditionalCheckFailedException extends __BaseException { + readonly name: "ConditionalCheckFailedException"; + readonly $fault: "client"; + Item?: Record | undefined; + constructor( + opts: __ExceptionOptionType< + ConditionalCheckFailedException, + __BaseException + > + ); +} +export interface DeleteRequest { + Key: Record | undefined; +} +export interface ExecuteStatementInput { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ConsistentRead?: boolean | undefined; + NextToken?: string | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + Limit?: number | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Get { + Key: Record | undefined; + TableName: string | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface GetItemInput { + TableName: string | undefined; + Key: Record | undefined; + AttributesToGet?: string[] | undefined; + ConsistentRead?: boolean | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface GetItemOutput { + Item?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface ItemCollectionMetrics { + ItemCollectionKey?: Record | undefined; + SizeEstimateRangeGB?: number[] | undefined; +} +export interface ItemResponse { + Item?: Record | undefined; +} +export interface ParameterizedStatement { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface PutRequest { + Item: Record | undefined; +} +export interface KeysAndAttributes { + Keys: Record[] | undefined; + AttributesToGet?: string[] | undefined; + ConsistentRead?: boolean | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface TransactGetItem { + Get: Get | undefined; +} +export interface BatchExecuteStatementInput { + Statements: BatchStatementRequest[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExecuteTransactionInput { + TransactStatements: ParameterizedStatement[] | undefined; + ClientRequestToken?: string | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExecuteTransactionOutput { + Responses?: ItemResponse[] | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface TransactGetItemsOutput { + ConsumedCapacity?: ConsumedCapacity[] | undefined; + Responses?: ItemResponse[] | undefined; +} +export declare class TransactionCanceledException extends __BaseException { + readonly name: "TransactionCanceledException"; + readonly $fault: "client"; + Message?: string | undefined; + CancellationReasons?: CancellationReason[] | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export interface BatchGetItemInput { + RequestItems: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExpectedAttributeValue { + Value?: AttributeValue | undefined; + Exists?: boolean | undefined; + ComparisonOperator?: ComparisonOperator | undefined; + AttributeValueList?: AttributeValue[] | undefined; +} +export interface TransactGetItemsInput { + TransactItems: TransactGetItem[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface TransactWriteItemsOutput { + ConsumedCapacity?: ConsumedCapacity[] | undefined; + ItemCollectionMetrics?: Record | undefined; +} +export interface ConditionCheck { + Key: Record | undefined; + TableName: string | undefined; + ConditionExpression: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Delete { + Key: Record | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Put { + Item: Record | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Update { + Key: Record | undefined; + UpdateExpression: string | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface BatchStatementResponse { + Error?: BatchStatementError | undefined; + TableName?: string | undefined; + Item?: Record | undefined; +} +export interface DeleteItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface ExecuteStatementOutput { + Items?: Record[] | undefined; + NextToken?: string | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + LastEvaluatedKey?: Record | undefined; +} +export interface PutItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface QueryOutput { + Items?: Record[] | undefined; + Count?: number | undefined; + ScannedCount?: number | undefined; + LastEvaluatedKey?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface ScanOutput { + Items?: Record[] | undefined; + Count?: number | undefined; + ScannedCount?: number | undefined; + LastEvaluatedKey?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface UpdateItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface WriteRequest { + PutRequest?: PutRequest | undefined; + DeleteRequest?: DeleteRequest | undefined; +} +export interface BatchExecuteStatementOutput { + Responses?: BatchStatementResponse[] | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface BatchGetItemOutput { + Responses?: Record[]> | undefined; + UnprocessedKeys?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface ScanInput { + TableName: string | undefined; + IndexName?: string | undefined; + AttributesToGet?: string[] | undefined; + Limit?: number | undefined; + Select?: Select | undefined; + ScanFilter?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ExclusiveStartKey?: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + TotalSegments?: number | undefined; + Segment?: number | undefined; + ProjectionExpression?: string | undefined; + FilterExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ConsistentRead?: boolean | undefined; +} +export interface BatchWriteItemInput { + RequestItems: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; +} +export interface DeleteItemInput { + TableName: string | undefined; + Key: Record | undefined; + Expected?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface PutItemInput { + TableName: string | undefined; + Item: Record | undefined; + Expected?: Record | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface QueryInput { + TableName: string | undefined; + IndexName?: string | undefined; + Select?: Select | undefined; + AttributesToGet?: string[] | undefined; + Limit?: number | undefined; + ConsistentRead?: boolean | undefined; + KeyConditions?: Record | undefined; + QueryFilter?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ScanIndexForward?: boolean | undefined; + ExclusiveStartKey?: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ProjectionExpression?: string | undefined; + FilterExpression?: string | undefined; + KeyConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +} +export interface BatchWriteItemOutput { + UnprocessedItems?: Record | undefined; + ItemCollectionMetrics?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface UpdateItemInput { + TableName: string | undefined; + Key: Record | undefined; + AttributeUpdates?: Record | undefined; + Expected?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + UpdateExpression?: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface TransactWriteItem { + ConditionCheck?: ConditionCheck | undefined; + Put?: Put | undefined; + Delete?: Delete | undefined; + Update?: Update | undefined; +} +export interface TransactWriteItemsInput { + TransactItems: TransactWriteItem[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ClientRequestToken?: string | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..109e7f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,6 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBClient } from "../DynamoDBClient"; +export interface DynamoDBPaginationConfiguration + extends PaginationConfiguration { + client: DynamoDBClient; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts new file mode 100644 index 0000000..f4961a0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "../commands/ListContributorInsightsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListContributorInsights: ( + config: DynamoDBPaginationConfiguration, + input: ListContributorInsightsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts new file mode 100644 index 0000000..29a8603 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "../commands/ListExportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListExports: ( + config: DynamoDBPaginationConfiguration, + input: ListExportsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts new file mode 100644 index 0000000..f8903e8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "../commands/ListImportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListImports: ( + config: DynamoDBPaginationConfiguration, + input: ListImportsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts new file mode 100644 index 0000000..6246d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "../commands/ListTablesCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListTables: ( + config: DynamoDBPaginationConfiguration, + input: ListTablesCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..2ff4976 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateQuery: ( + config: DynamoDBPaginationConfiguration, + input: QueryCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..a84dfd1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts @@ -0,0 +1,8 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateScan: ( + config: DynamoDBPaginationConfiguration, + input: ScanCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts new file mode 100644 index 0000000..83e200c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts @@ -0,0 +1,686 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "../commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "../commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "../commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "../commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "../commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "../commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "../commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "../commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "../commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "../commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "../commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "../commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "../commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "../commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "../commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "../commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "../commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "../commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "../commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "../commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "../commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "../commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "../commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "../commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "../commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "../commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "../commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "../commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "../commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "../commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "../commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "../commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "../commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "../commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "../commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "../commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "../commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "../commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "../commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "../commands/PutResourcePolicyCommand"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "../commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "../commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "../commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "../commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "../commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "../commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "../commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "../commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "../commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "../commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "../commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "../commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "../commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "../commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "../commands/UpdateTimeToLiveCommand"; +export declare const se_BatchExecuteStatementCommand: ( + input: BatchExecuteStatementCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_BatchGetItemCommand: ( + input: BatchGetItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_BatchWriteItemCommand: ( + input: BatchWriteItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateBackupCommand: ( + input: CreateBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateGlobalTableCommand: ( + input: CreateGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateTableCommand: ( + input: CreateTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBackupCommand: ( + input: DeleteBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteItemCommand: ( + input: DeleteItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteResourcePolicyCommand: ( + input: DeleteResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteTableCommand: ( + input: DeleteTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeBackupCommand: ( + input: DescribeBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeContinuousBackupsCommand: ( + input: DescribeContinuousBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeContributorInsightsCommand: ( + input: DescribeContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeEndpointsCommand: ( + input: DescribeEndpointsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeExportCommand: ( + input: DescribeExportCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeGlobalTableCommand: ( + input: DescribeGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeGlobalTableSettingsCommand: ( + input: DescribeGlobalTableSettingsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeImportCommand: ( + input: DescribeImportCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeKinesisStreamingDestinationCommand: ( + input: DescribeKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeLimitsCommand: ( + input: DescribeLimitsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTableCommand: ( + input: DescribeTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTableReplicaAutoScalingCommand: ( + input: DescribeTableReplicaAutoScalingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTimeToLiveCommand: ( + input: DescribeTimeToLiveCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DisableKinesisStreamingDestinationCommand: ( + input: DisableKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_EnableKinesisStreamingDestinationCommand: ( + input: EnableKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExecuteStatementCommand: ( + input: ExecuteStatementCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExecuteTransactionCommand: ( + input: ExecuteTransactionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExportTableToPointInTimeCommand: ( + input: ExportTableToPointInTimeCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetItemCommand: ( + input: GetItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetResourcePolicyCommand: ( + input: GetResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ImportTableCommand: ( + input: ImportTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListBackupsCommand: ( + input: ListBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListContributorInsightsCommand: ( + input: ListContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListExportsCommand: ( + input: ListExportsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListGlobalTablesCommand: ( + input: ListGlobalTablesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListImportsCommand: ( + input: ListImportsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListTablesCommand: ( + input: ListTablesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListTagsOfResourceCommand: ( + input: ListTagsOfResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutItemCommand: ( + input: PutItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutResourcePolicyCommand: ( + input: PutResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_QueryCommand: ( + input: QueryCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_RestoreTableFromBackupCommand: ( + input: RestoreTableFromBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_RestoreTableToPointInTimeCommand: ( + input: RestoreTableToPointInTimeCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ScanCommand: ( + input: ScanCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TagResourceCommand: ( + input: TagResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TransactGetItemsCommand: ( + input: TransactGetItemsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TransactWriteItemsCommand: ( + input: TransactWriteItemsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UntagResourceCommand: ( + input: UntagResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateContinuousBackupsCommand: ( + input: UpdateContinuousBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateContributorInsightsCommand: ( + input: UpdateContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateGlobalTableCommand: ( + input: UpdateGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateGlobalTableSettingsCommand: ( + input: UpdateGlobalTableSettingsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateItemCommand: ( + input: UpdateItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateKinesisStreamingDestinationCommand: ( + input: UpdateKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTableCommand: ( + input: UpdateTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTableReplicaAutoScalingCommand: ( + input: UpdateTableReplicaAutoScalingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTimeToLiveCommand: ( + input: UpdateTimeToLiveCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_BatchExecuteStatementCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_BatchGetItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_BatchWriteItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeContinuousBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeEndpointsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeExportCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeGlobalTableSettingsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeImportCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeLimitsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTableReplicaAutoScalingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTimeToLiveCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DisableKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_EnableKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExecuteStatementCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExecuteTransactionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExportTableToPointInTimeCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ImportTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListExportsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListGlobalTablesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListImportsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListTablesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListTagsOfResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_QueryCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_RestoreTableFromBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_RestoreTableToPointInTimeCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ScanCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TagResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TransactGetItemsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TransactWriteItemsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UntagResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateContinuousBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateGlobalTableSettingsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTableReplicaAutoScalingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTimeToLiveCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..80f326c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts @@ -0,0 +1,111 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | (() => Promise); + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts new file mode 100644 index 0000000..32d5489 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts @@ -0,0 +1,111 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | import("@smithy/types").Provider< + import("@aws-sdk/core/account-id-endpoint").AccountIdEndpointMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + init?: + | import("@aws-sdk/credential-provider-node").DefaultProviderInit + | undefined + ) => import("@smithy/types").MemoizedProvider< + import("@smithy/types").AwsCredentialIdentity + >); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts new file mode 100644 index 0000000..f3583eb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts @@ -0,0 +1,115 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | (() => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..5d480e8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts @@ -0,0 +1,21 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts new file mode 100644 index 0000000..632b45b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: DynamoDBExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts new file mode 100644 index 0000000..5dba224 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export declare const waitForTableExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; +export declare const waitUntilTableExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts new file mode 100644 index 0000000..1df1b34 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export declare const waitForTableNotExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; +export declare const waitUntilTableNotExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts new file mode 100644 index 0000000..a8ae496 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * + * @deprecated Use waitUntilTableExists instead. waitForTableExists does not throw error in non-success cases. + */ +export declare const waitForTableExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to DescribeTableCommand for polling. + */ +export declare const waitUntilTableExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts new file mode 100644 index 0000000..df87d8a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * + * @deprecated Use waitUntilTableNotExists instead. waitForTableNotExists does not throw error in non-success cases. + */ +export declare const waitForTableNotExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to DescribeTableCommand for polling. + */ +export declare const waitUntilTableNotExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/package.json new file mode 100644 index 0000000..9377573 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-dynamodb/package.json @@ -0,0 +1,103 @@ +{ + "name": "@aws-sdk/client-dynamodb", + "description": "AWS SDK for JavaScript Dynamodb Client for Node.js, Browser and React Native", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline client-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "generate:client": "node ../../scripts/generate-clients/single-service --solo dynamodb" + }, + "main": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@tsconfig/node18": "18.2.4", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "browser": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser" + }, + "react-native": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.native" + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-dynamodb" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/README.md new file mode 100644 index 0000000..09d5fe3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/README.md @@ -0,0 +1,252 @@ + + +# @aws-sdk/client-sso + +## Description + +AWS SDK for JavaScript SSO Client for Node.js, Browser and React Native. + +

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to +IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles +assigned to them and get federated into the application.

+ +

Although AWS Single Sign-On was renamed, the sso and +identitystore API namespaces will continue to retain their original name for +backward compatibility purposes. For more information, see IAM Identity Center rename.

+
+

This reference guide describes the IAM Identity Center Portal operations that you can call +programatically and includes detailed information on data types and errors.

+ +

AWS provides SDKs that consist of libraries and sample code for various programming +languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a +convenient way to create programmatic access to IAM Identity Center and other AWS services. For more +information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+
+ +## Installing + +To install this package, simply type add or install @aws-sdk/client-sso +using your favorite package manager: + +- `npm install @aws-sdk/client-sso` +- `yarn add @aws-sdk/client-sso` +- `pnpm add @aws-sdk/client-sso` + +## Getting Started + +### Import + +The AWS SDK is modulized by clients and commands. +To send a request, you only need to import the `SSOClient` and +the commands you need, for example `ListAccountsCommand`: + +```js +// ES5 example +const { SSOClient, ListAccountsCommand } = require("@aws-sdk/client-sso"); +``` + +```ts +// ES6+ example +import { SSOClient, ListAccountsCommand } from "@aws-sdk/client-sso"; +``` + +### Usage + +To send a request, you: + +- Initiate client with configuration (e.g. credentials, region). +- Initiate command with input parameters. +- Call `send` operation on client with command object as input. +- If you are using a custom http handler, you may call `destroy()` to close open connections. + +```js +// a client can be shared by different commands. +const client = new SSOClient({ region: "REGION" }); + +const params = { + /** input parameters */ +}; +const command = new ListAccountsCommand(params); +``` + +#### Async/await + +We recommend using [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) +operator to wait for the promise returned by send operation as follows: + +```js +// async/await. +try { + const data = await client.send(command); + // process data. +} catch (error) { + // error handling. +} finally { + // finally. +} +``` + +Async-await is clean, concise, intuitive, easy to debug and has better error handling +as compared to using Promise chains or callbacks. + +#### Promises + +You can also use [Promise chaining](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises#chaining) +to execute send operation. + +```js +client.send(command).then( + (data) => { + // process data. + }, + (error) => { + // error handling. + } +); +``` + +Promises can also be called using `.catch()` and `.finally()` as follows: + +```js +client + .send(command) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }) + .finally(() => { + // finally. + }); +``` + +#### Callbacks + +We do not recommend using callbacks because of [callback hell](http://callbackhell.com/), +but they are supported by the send operation. + +```js +// callbacks. +client.send(command, (err, data) => { + // process err and data. +}); +``` + +#### v2 compatible style + +The client can also send requests using v2 compatible style. +However, it results in a bigger bundle size and may be dropped in next major version. More details in the blog post +on [modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/) + +```ts +import * as AWS from "@aws-sdk/client-sso"; +const client = new AWS.SSO({ region: "REGION" }); + +// async/await. +try { + const data = await client.listAccounts(params); + // process data. +} catch (error) { + // error handling. +} + +// Promises. +client + .listAccounts(params) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }); + +// callbacks. +client.listAccounts(params, (err, data) => { + // process err and data. +}); +``` + +### Troubleshooting + +When the service returns an exception, the error will include the exception information, +as well as response metadata (e.g. request id). + +```js +try { + const data = await client.send(command); + // process data. +} catch (error) { + const { requestId, cfId, extendedRequestId } = error.$metadata; + console.log({ requestId, cfId, extendedRequestId }); + /** + * The keys within exceptions are also parsed. + * You can access them by specifying exception names: + * if (error.name === 'SomeServiceException') { + * const value = error.specialKeyInException; + * } + */ +} +``` + +## Getting Help + +Please use these community resources for getting help. +We use the GitHub issues for tracking bugs and feature requests, but have limited bandwidth to address them. + +- Visit [Developer Guide](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) + or [API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html). +- Check out the blog posts tagged with [`aws-sdk-js`](https://aws.amazon.com/blogs/developer/tag/aws-sdk-js/) + on AWS Developer Blog. +- Ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/aws-sdk-js) and tag it with `aws-sdk-js`. +- Join the AWS JavaScript community on [gitter](https://gitter.im/aws/aws-sdk-js-v3). +- If it turns out that you may have found a bug, please [open an issue](https://github.com/aws/aws-sdk-js-v3/issues/new/choose). + +To test your universal JavaScript code in Node.js, browser and react-native environments, +visit our [code samples repo](https://github.com/aws-samples/aws-sdk-js-tests). + +## Contributing + +This client code is generated automatically. Any modifications will be overwritten the next time the `@aws-sdk/client-sso` package is updated. +To contribute to client you can check our [generate clients scripts](https://github.com/aws/aws-sdk-js-v3/tree/main/scripts/generate-clients). + +## License + +This SDK is distributed under the +[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0), +see LICENSE for more information. + +## Client Commands (Operations List) + +
+ +GetRoleCredentials + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/GetRoleCredentialsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/GetRoleCredentialsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/GetRoleCredentialsCommandOutput/) + +
+
+ +ListAccountRoles + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/ListAccountRolesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountRolesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountRolesCommandOutput/) + +
+
+ +ListAccounts + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/ListAccountsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountsCommandOutput/) + +
+
+ +Logout + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/LogoutCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/LogoutCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/LogoutCommandOutput/) + +
diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..2c256ee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOHttpAuthSchemeProvider = exports.defaultSSOHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOHttpAuthSchemeParametersProvider = defaultSSOHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOHttpAuthSchemeProvider = defaultSSOHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js new file mode 100644 index 0000000..7258a35 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js new file mode 100644 index 0000000..4321ed9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/index.js new file mode 100644 index 0000000..8383b07 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/index.js @@ -0,0 +1,625 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + GetRoleCredentialsCommand: () => GetRoleCredentialsCommand, + GetRoleCredentialsRequestFilterSensitiveLog: () => GetRoleCredentialsRequestFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog: () => GetRoleCredentialsResponseFilterSensitiveLog, + InvalidRequestException: () => InvalidRequestException, + ListAccountRolesCommand: () => ListAccountRolesCommand, + ListAccountRolesRequestFilterSensitiveLog: () => ListAccountRolesRequestFilterSensitiveLog, + ListAccountsCommand: () => ListAccountsCommand, + ListAccountsRequestFilterSensitiveLog: () => ListAccountsRequestFilterSensitiveLog, + LogoutCommand: () => LogoutCommand, + LogoutRequestFilterSensitiveLog: () => LogoutRequestFilterSensitiveLog, + ResourceNotFoundException: () => ResourceNotFoundException, + RoleCredentialsFilterSensitiveLog: () => RoleCredentialsFilterSensitiveLog, + SSO: () => SSO, + SSOClient: () => SSOClient, + SSOServiceException: () => SSOServiceException, + TooManyRequestsException: () => TooManyRequestsException, + UnauthorizedException: () => UnauthorizedException, + __Client: () => import_smithy_client.Client, + paginateListAccountRoles: () => paginateListAccountRoles, + paginateListAccounts: () => paginateListAccounts +}); +module.exports = __toCommonJS(index_exports); + +// src/SSOClient.ts +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_retry = require("@smithy/middleware-retry"); + +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/SSOClient.ts +var import_runtimeConfig = require("././runtimeConfig"); + +// src/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/SSOClient.ts +var SSOClient = class extends import_smithy_client.Client { + static { + __name(this, "SSOClient"); + } + /** + * The resolved configuration of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/SSO.ts + + +// src/commands/GetRoleCredentialsCommand.ts + +var import_middleware_serde = require("@smithy/middleware-serde"); + + +// src/models/models_0.ts + + +// src/models/SSOServiceException.ts + +var SSOServiceException = class _SSOServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "SSOServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOServiceException.prototype); + } +}; + +// src/models/models_0.ts +var InvalidRequestException = class _InvalidRequestException extends SSOServiceException { + static { + __name(this, "InvalidRequestException"); + } + name = "InvalidRequestException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + } +}; +var ResourceNotFoundException = class _ResourceNotFoundException extends SSOServiceException { + static { + __name(this, "ResourceNotFoundException"); + } + name = "ResourceNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + } +}; +var TooManyRequestsException = class _TooManyRequestsException extends SSOServiceException { + static { + __name(this, "TooManyRequestsException"); + } + name = "TooManyRequestsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TooManyRequestsException.prototype); + } +}; +var UnauthorizedException = class _UnauthorizedException extends SSOServiceException { + static { + __name(this, "UnauthorizedException"); + } + name = "UnauthorizedException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnauthorizedException.prototype); + } +}; +var GetRoleCredentialsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "GetRoleCredentialsRequestFilterSensitiveLog"); +var RoleCredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.secretAccessKey && { secretAccessKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.sessionToken && { sessionToken: import_smithy_client.SENSITIVE_STRING } +}), "RoleCredentialsFilterSensitiveLog"); +var GetRoleCredentialsResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) } +}), "GetRoleCredentialsResponseFilterSensitiveLog"); +var ListAccountRolesRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountRolesRequestFilterSensitiveLog"); +var ListAccountsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountsRequestFilterSensitiveLog"); +var LogoutRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "LogoutRequestFilterSensitiveLog"); + +// src/protocols/Aws_restJson1.ts +var import_core2 = require("@aws-sdk/core"); + + +var se_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/federation/credentials"); + const query = (0, import_smithy_client.map)({ + [_rn]: [, (0, import_smithy_client.expectNonNull)(input[_rN], `roleName`)], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetRoleCredentialsCommand"); +var se_ListAccountRolesCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/roles"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountRolesCommand"); +var se_ListAccountsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/accounts"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountsCommand"); +var se_LogoutCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_LogoutCommand"); +var de_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + roleCredentials: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_GetRoleCredentialsCommand"); +var de_ListAccountRolesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + nextToken: import_smithy_client.expectString, + roleList: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountRolesCommand"); +var de_ListAccountsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + accountList: import_smithy_client._json, + nextToken: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountsCommand"); +var de_LogoutCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_LogoutCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(SSOServiceException); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_ResourceNotFoundExceptionRes"); +var de_TooManyRequestsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_TooManyRequestsExceptionRes"); +var de_UnauthorizedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var _aI = "accountId"; +var _aT = "accessToken"; +var _ai = "account_id"; +var _mR = "maxResults"; +var _mr = "max_result"; +var _nT = "nextToken"; +var _nt = "next_token"; +var _rN = "roleName"; +var _rn = "role_name"; +var _xasbt = "x-amz-sso_bearer_token"; + +// src/commands/GetRoleCredentialsCommand.ts +var GetRoleCredentialsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "GetRoleCredentials", {}).n("SSOClient", "GetRoleCredentialsCommand").f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog).ser(se_GetRoleCredentialsCommand).de(de_GetRoleCredentialsCommand).build() { + static { + __name(this, "GetRoleCredentialsCommand"); + } +}; + +// src/commands/ListAccountRolesCommand.ts + + + +var ListAccountRolesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccountRoles", {}).n("SSOClient", "ListAccountRolesCommand").f(ListAccountRolesRequestFilterSensitiveLog, void 0).ser(se_ListAccountRolesCommand).de(de_ListAccountRolesCommand).build() { + static { + __name(this, "ListAccountRolesCommand"); + } +}; + +// src/commands/ListAccountsCommand.ts + + + +var ListAccountsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccounts", {}).n("SSOClient", "ListAccountsCommand").f(ListAccountsRequestFilterSensitiveLog, void 0).ser(se_ListAccountsCommand).de(de_ListAccountsCommand).build() { + static { + __name(this, "ListAccountsCommand"); + } +}; + +// src/commands/LogoutCommand.ts + + + +var LogoutCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "Logout", {}).n("SSOClient", "LogoutCommand").f(LogoutRequestFilterSensitiveLog, void 0).ser(se_LogoutCommand).de(de_LogoutCommand).build() { + static { + __name(this, "LogoutCommand"); + } +}; + +// src/SSO.ts +var commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand +}; +var SSO = class extends SSOClient { + static { + __name(this, "SSO"); + } +}; +(0, import_smithy_client.createAggregatedClient)(commands, SSO); + +// src/pagination/ListAccountRolesPaginator.ts + +var paginateListAccountRoles = (0, import_core.createPaginator)(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); + +// src/pagination/ListAccountsPaginator.ts + +var paginateListAccounts = (0, import_core.createPaginator)(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + SSOServiceException, + __Client, + SSOClient, + SSO, + $Command, + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand, + paginateListAccountRoles, + paginateListAccounts, + InvalidRequestException, + ResourceNotFoundException, + TooManyRequestsException, + UnauthorizedException, + GetRoleCredentialsRequestFilterSensitiveLog, + RoleCredentialsFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog, + ListAccountRolesRequestFilterSensitiveLog, + ListAccountsRequestFilterSensitiveLog, + LogoutRequestFilterSensitiveLog +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js new file mode 100644 index 0000000..3b40936 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js new file mode 100644 index 0000000..befc739 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js new file mode 100644 index 0000000..24a378c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/SSO.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/SSO.js new file mode 100644 index 0000000..04d3169 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/SSO.js @@ -0,0 +1,15 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { GetRoleCredentialsCommand, } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommand, } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommand, } from "./commands/ListAccountsCommand"; +import { LogoutCommand } from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +const commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand, +}; +export class SSO extends SSOClient { +} +createAggregatedClient(commands, SSO); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js new file mode 100644 index 0000000..890a848 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSSOHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class SSOClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..f7ff90f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js @@ -0,0 +1,62 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js new file mode 100644 index 0000000..aa4c2e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_GetRoleCredentialsCommand, se_GetRoleCredentialsCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class GetRoleCredentialsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "GetRoleCredentials", {}) + .n("SSOClient", "GetRoleCredentialsCommand") + .f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog) + .ser(se_GetRoleCredentialsCommand) + .de(de_GetRoleCredentialsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js new file mode 100644 index 0000000..d5bcc14 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListAccountRolesRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_ListAccountRolesCommand, se_ListAccountRolesCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class ListAccountRolesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "ListAccountRoles", {}) + .n("SSOClient", "ListAccountRolesCommand") + .f(ListAccountRolesRequestFilterSensitiveLog, void 0) + .ser(se_ListAccountRolesCommand) + .de(de_ListAccountRolesCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js new file mode 100644 index 0000000..d4ab8ba --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListAccountsRequestFilterSensitiveLog } from "../models/models_0"; +import { de_ListAccountsCommand, se_ListAccountsCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class ListAccountsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "ListAccounts", {}) + .n("SSOClient", "ListAccountsCommand") + .f(ListAccountsRequestFilterSensitiveLog, void 0) + .ser(se_ListAccountsCommand) + .de(de_ListAccountsCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js new file mode 100644 index 0000000..29a37ed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { LogoutRequestFilterSensitiveLog } from "../models/models_0"; +import { de_LogoutCommand, se_LogoutCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class LogoutCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "Logout", {}) + .n("SSOClient", "LogoutCommand") + .f(LogoutRequestFilterSensitiveLog, void 0) + .ser(se_LogoutCommand) + .de(de_LogoutCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js new file mode 100644 index 0000000..77e34f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js @@ -0,0 +1,13 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js new file mode 100644 index 0000000..0ac15bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js new file mode 100644 index 0000000..c48673d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +export const ruleSet = _data; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/index.js new file mode 100644 index 0000000..b297556 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./SSOClient"; +export * from "./SSO"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js new file mode 100644 index 0000000..fa5d8fb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class SSOServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOServiceException.prototype); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js new file mode 100644 index 0000000..56ec16d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js @@ -0,0 +1,75 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +export class InvalidRequestException extends __BaseException { + name = "InvalidRequestException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequestException.prototype); + } +} +export class ResourceNotFoundException extends __BaseException { + name = "ResourceNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +export class TooManyRequestsException extends __BaseException { + name = "TooManyRequestsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TooManyRequestsException.prototype); + } +} +export class UnauthorizedException extends __BaseException { + name = "UnauthorizedException"; + $fault = "client"; + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnauthorizedException.prototype); + } +} +export const GetRoleCredentialsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const RoleCredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.secretAccessKey && { secretAccessKey: SENSITIVE_STRING }), + ...(obj.sessionToken && { sessionToken: SENSITIVE_STRING }), +}); +export const GetRoleCredentialsResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) }), +}); +export const ListAccountRolesRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const ListAccountsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const LogoutRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js new file mode 100644 index 0000000..b18c3a8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListAccountRolesCommand, } from "../commands/ListAccountRolesCommand"; +import { SSOClient } from "../SSOClient"; +export const paginateListAccountRoles = createPaginator(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js new file mode 100644 index 0000000..342c663 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListAccountsCommand, } from "../commands/ListAccountsCommand"; +import { SSOClient } from "../SSOClient"; +export const paginateListAccounts = createPaginator(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js new file mode 100644 index 0000000..11b1892 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js @@ -0,0 +1,210 @@ +import { loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { requestBuilder as rb } from "@smithy/core"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, isSerializableHeaderValue, map, take, withBaseException, } from "@smithy/smithy-client"; +import { InvalidRequestException, ResourceNotFoundException, TooManyRequestsException, UnauthorizedException, } from "../models/models_0"; +import { SSOServiceException as __BaseException } from "../models/SSOServiceException"; +export const se_GetRoleCredentialsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/federation/credentials"); + const query = map({ + [_rn]: [, __expectNonNull(input[_rN], `roleName`)], + [_ai]: [, __expectNonNull(input[_aI], `accountId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListAccountRolesCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/assignment/roles"); + const query = map({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, __expectNonNull(input[_aI], `accountId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListAccountsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/assignment/accounts"); + const query = map({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_LogoutCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_GetRoleCredentialsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + roleCredentials: _json, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_ListAccountRolesCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + nextToken: __expectString, + roleList: _json, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_ListAccountsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + accountList: _json, + nextToken: __expectString, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_LogoutCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_TooManyRequestsExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnauthorizedExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const _aI = "accountId"; +const _aT = "accessToken"; +const _ai = "account_id"; +const _mR = "maxResults"; +const _mr = "max_result"; +const _nT = "nextToken"; +const _nt = "next_token"; +const _rN = "roleName"; +const _rn = "role_name"; +const _xasbt = "x-amz-sso_bearer_token"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js new file mode 100644 index 0000000..7c8fe85 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js @@ -0,0 +1,33 @@ +import packageInfo from "../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js new file mode 100644 index 0000000..d8440b7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js @@ -0,0 +1,46 @@ +import packageInfo from "../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js new file mode 100644 index 0000000..3dfac58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSSOHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts new file mode 100644 index 0000000..8500e0c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts @@ -0,0 +1,53 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "./commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +export interface SSO { + /** + * @see {@link GetRoleCredentialsCommand} + */ + getRoleCredentials(args: GetRoleCredentialsCommandInput, options?: __HttpHandlerOptions): Promise; + getRoleCredentials(args: GetRoleCredentialsCommandInput, cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void): void; + getRoleCredentials(args: GetRoleCredentialsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void): void; + /** + * @see {@link ListAccountRolesCommand} + */ + listAccountRoles(args: ListAccountRolesCommandInput, options?: __HttpHandlerOptions): Promise; + listAccountRoles(args: ListAccountRolesCommandInput, cb: (err: any, data?: ListAccountRolesCommandOutput) => void): void; + listAccountRoles(args: ListAccountRolesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListAccountRolesCommandOutput) => void): void; + /** + * @see {@link ListAccountsCommand} + */ + listAccounts(args: ListAccountsCommandInput, options?: __HttpHandlerOptions): Promise; + listAccounts(args: ListAccountsCommandInput, cb: (err: any, data?: ListAccountsCommandOutput) => void): void; + listAccounts(args: ListAccountsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListAccountsCommandOutput) => void): void; + /** + * @see {@link LogoutCommand} + */ + logout(args: LogoutCommandInput, options?: __HttpHandlerOptions): Promise; + logout(args: LogoutCommandInput, cb: (err: any, data?: LogoutCommandOutput) => void): void; + logout(args: LogoutCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: LogoutCommandOutput) => void): void; +} +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * @public + */ +export declare class SSO extends SSOClient implements SSO { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts new file mode 100644 index 0000000..acfb2fd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts @@ -0,0 +1,200 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "./commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "./commands/LogoutCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = GetRoleCredentialsCommandInput | ListAccountRolesCommandInput | ListAccountsCommandInput | LogoutCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = GetRoleCredentialsCommandOutput | ListAccountRolesCommandOutput | ListAccountsCommandOutput | LogoutCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type SSOClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of SSOClient class constructor that set the region, credentials and other options. + */ +export interface SSOClientConfig extends SSOClientConfigType { +} +/** + * @public + */ +export type SSOClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ +export interface SSOClientResolvedConfig extends SSOClientResolvedConfigType { +} +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * @public + */ +export declare class SSOClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig> { + /** + * The resolved configuration of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ + readonly config: SSOClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..7e7ff4c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { SSOHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: SSOHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): SSOHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..bf3aad6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { SSOClientResolvedConfig } from "../SSOClient"; +/** + * @internal + */ +export interface SSOHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface SSOHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSSOHttpAuthSchemeParametersProvider: (config: SSOClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface SSOHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSSOHttpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: SSOHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts new file mode 100644 index 0000000..f306bd5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetRoleCredentialsRequest, GetRoleCredentialsResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetRoleCredentialsCommand}. + */ +export interface GetRoleCredentialsCommandInput extends GetRoleCredentialsRequest { +} +/** + * @public + * + * The output of {@link GetRoleCredentialsCommand}. + */ +export interface GetRoleCredentialsCommandOutput extends GetRoleCredentialsResponse, __MetadataBearer { +} +declare const GetRoleCredentialsCommand_base: { + new (input: GetRoleCredentialsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetRoleCredentialsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the STS short-term credentials for a given role name that is assigned to the + * user.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, GetRoleCredentialsCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, GetRoleCredentialsCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // GetRoleCredentialsRequest + * roleName: "STRING_VALUE", // required + * accountId: "STRING_VALUE", // required + * accessToken: "STRING_VALUE", // required + * }; + * const command = new GetRoleCredentialsCommand(input); + * const response = await client.send(command); + * // { // GetRoleCredentialsResponse + * // roleCredentials: { // RoleCredentials + * // accessKeyId: "STRING_VALUE", + * // secretAccessKey: "STRING_VALUE", + * // sessionToken: "STRING_VALUE", + * // expiration: Number("long"), + * // }, + * // }; + * + * ``` + * + * @param GetRoleCredentialsCommandInput - {@link GetRoleCredentialsCommandInput} + * @returns {@link GetRoleCredentialsCommandOutput} + * @see {@link GetRoleCredentialsCommandInput} for command's `input` shape. + * @see {@link GetRoleCredentialsCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class GetRoleCredentialsCommand extends GetRoleCredentialsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetRoleCredentialsRequest; + output: GetRoleCredentialsResponse; + }; + sdk: { + input: GetRoleCredentialsCommandInput; + output: GetRoleCredentialsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts new file mode 100644 index 0000000..8ce6a04 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts @@ -0,0 +1,96 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountRolesRequest, ListAccountRolesResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListAccountRolesCommand}. + */ +export interface ListAccountRolesCommandInput extends ListAccountRolesRequest { +} +/** + * @public + * + * The output of {@link ListAccountRolesCommand}. + */ +export interface ListAccountRolesCommandOutput extends ListAccountRolesResponse, __MetadataBearer { +} +declare const ListAccountRolesCommand_base: { + new (input: ListAccountRolesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListAccountRolesCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all roles that are assigned to the user for a given AWS account.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, ListAccountRolesCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, ListAccountRolesCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // ListAccountRolesRequest + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * accessToken: "STRING_VALUE", // required + * accountId: "STRING_VALUE", // required + * }; + * const command = new ListAccountRolesCommand(input); + * const response = await client.send(command); + * // { // ListAccountRolesResponse + * // nextToken: "STRING_VALUE", + * // roleList: [ // RoleListType + * // { // RoleInfo + * // roleName: "STRING_VALUE", + * // accountId: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListAccountRolesCommandInput - {@link ListAccountRolesCommandInput} + * @returns {@link ListAccountRolesCommandOutput} + * @see {@link ListAccountRolesCommandInput} for command's `input` shape. + * @see {@link ListAccountRolesCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class ListAccountRolesCommand extends ListAccountRolesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListAccountRolesRequest; + output: ListAccountRolesResponse; + }; + sdk: { + input: ListAccountRolesCommandInput; + output: ListAccountRolesCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts new file mode 100644 index 0000000..cffc47e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts @@ -0,0 +1,98 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountsRequest, ListAccountsResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListAccountsCommand}. + */ +export interface ListAccountsCommandInput extends ListAccountsRequest { +} +/** + * @public + * + * The output of {@link ListAccountsCommand}. + */ +export interface ListAccountsCommandOutput extends ListAccountsResponse, __MetadataBearer { +} +declare const ListAccountsCommand_base: { + new (input: ListAccountsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListAccountsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all AWS accounts assigned to the user. These AWS accounts are assigned by the + * administrator of the account. For more information, see Assign User Access in the IAM Identity Center User Guide. This operation + * returns a paginated response.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, ListAccountsCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, ListAccountsCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // ListAccountsRequest + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * accessToken: "STRING_VALUE", // required + * }; + * const command = new ListAccountsCommand(input); + * const response = await client.send(command); + * // { // ListAccountsResponse + * // nextToken: "STRING_VALUE", + * // accountList: [ // AccountListType + * // { // AccountInfo + * // accountId: "STRING_VALUE", + * // accountName: "STRING_VALUE", + * // emailAddress: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListAccountsCommandInput - {@link ListAccountsCommandInput} + * @returns {@link ListAccountsCommandOutput} + * @see {@link ListAccountsCommandInput} for command's `input` shape. + * @see {@link ListAccountsCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class ListAccountsCommand extends ListAccountsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListAccountsRequest; + output: ListAccountsResponse; + }; + sdk: { + input: ListAccountsCommandInput; + output: ListAccountsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts new file mode 100644 index 0000000..e85fe33 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { LogoutRequest } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link LogoutCommand}. + */ +export interface LogoutCommandInput extends LogoutRequest { +} +/** + * @public + * + * The output of {@link LogoutCommand}. + */ +export interface LogoutCommandOutput extends __MetadataBearer { +} +declare const LogoutCommand_base: { + new (input: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Removes the locally stored SSO tokens from the client-side cache and sends an API call to + * the IAM Identity Center service to invalidate the corresponding server-side IAM Identity Center sign in + * session.

+ * + *

If a user uses IAM Identity Center to access the AWS CLI, the user’s IAM Identity Center sign in session is + * used to obtain an IAM session, as specified in the corresponding IAM Identity Center permission set. + * More specifically, IAM Identity Center assumes an IAM role in the target account on behalf of the user, + * and the corresponding temporary AWS credentials are returned to the client.

+ *

After user logout, any existing IAM role sessions that were created by using IAM Identity Center + * permission sets continue based on the duration configured in the permission set. + * For more information, see User + * authentications in the IAM Identity Center User + * Guide.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, LogoutCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, LogoutCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // LogoutRequest + * accessToken: "STRING_VALUE", // required + * }; + * const command = new LogoutCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param LogoutCommandInput - {@link LogoutCommandInput} + * @returns {@link LogoutCommandOutput} + * @see {@link LogoutCommandInput} for command's `input` shape. + * @see {@link LogoutCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class LogoutCommand extends LogoutCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: LogoutRequest; + output: {}; + }; + sdk: { + input: LogoutCommandInput; + output: LogoutCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..23f42e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts @@ -0,0 +1,40 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts new file mode 100644 index 0000000..0f76dd3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface SSOExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts new file mode 100644 index 0000000..3b3bcea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts @@ -0,0 +1,29 @@ +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * + * @packageDocumentation + */ +export * from "./SSOClient"; +export * from "./SSO"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { SSOExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts new file mode 100644 index 0000000..9172f1a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from SSO service. + */ +export declare class SSOServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts new file mode 100644 index 0000000..0d40fa7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts @@ -0,0 +1,266 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +/** + *

Provides information about your AWS account.

+ * @public + */ +export interface AccountInfo { + /** + *

The identifier of the AWS account that is assigned to the user.

+ * @public + */ + accountId?: string | undefined; + /** + *

The display name of the AWS account that is assigned to the user.

+ * @public + */ + accountName?: string | undefined; + /** + *

The email address of the AWS account that is assigned to the user.

+ * @public + */ + emailAddress?: string | undefined; +} +/** + * @public + */ +export interface GetRoleCredentialsRequest { + /** + *

The friendly name of the role that is assigned to the user.

+ * @public + */ + roleName: string | undefined; + /** + *

The identifier for the AWS account that is assigned to the user.

+ * @public + */ + accountId: string | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + *

Provides information about the role credentials that are assigned to the user.

+ * @public + */ +export interface RoleCredentials { + /** + *

The identifier used for the temporary security credentials. For more information, see + * Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + accessKeyId?: string | undefined; + /** + *

The key that is used to sign the request. For more information, see Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + secretAccessKey?: string | undefined; + /** + *

The token used for temporary credentials. For more information, see Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + sessionToken?: string | undefined; + /** + *

The date on which temporary security credentials expire.

+ * @public + */ + expiration?: number | undefined; +} +/** + * @public + */ +export interface GetRoleCredentialsResponse { + /** + *

The credentials for the role that is assigned to the user.

+ * @public + */ + roleCredentials?: RoleCredentials | undefined; +} +/** + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * @public + */ +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The specified resource doesn't exist.

+ * @public + */ +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * @public + */ +export declare class TooManyRequestsException extends __BaseException { + readonly name: "TooManyRequestsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * @public + */ +export declare class UnauthorizedException extends __BaseException { + readonly name: "UnauthorizedException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ListAccountRolesRequest { + /** + *

The page token from the previous response output when you request subsequent pages.

+ * @public + */ + nextToken?: string | undefined; + /** + *

The number of items that clients can request per page.

+ * @public + */ + maxResults?: number | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; + /** + *

The identifier for the AWS account that is assigned to the user.

+ * @public + */ + accountId: string | undefined; +} +/** + *

Provides information about the role that is assigned to the user.

+ * @public + */ +export interface RoleInfo { + /** + *

The friendly name of the role that is assigned to the user.

+ * @public + */ + roleName?: string | undefined; + /** + *

The identifier of the AWS account assigned to the user.

+ * @public + */ + accountId?: string | undefined; +} +/** + * @public + */ +export interface ListAccountRolesResponse { + /** + *

The page token client that is used to retrieve the list of accounts.

+ * @public + */ + nextToken?: string | undefined; + /** + *

A paginated response with the list of roles and the next token if more results are + * available.

+ * @public + */ + roleList?: RoleInfo[] | undefined; +} +/** + * @public + */ +export interface ListAccountsRequest { + /** + *

(Optional) When requesting subsequent pages, this is the page token from the previous + * response output.

+ * @public + */ + nextToken?: string | undefined; + /** + *

This is the number of items clients can request per page.

+ * @public + */ + maxResults?: number | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + * @public + */ +export interface ListAccountsResponse { + /** + *

The page token client that is used to retrieve the list of accounts.

+ * @public + */ + nextToken?: string | undefined; + /** + *

A paginated response with the list of account information and the next token if more + * results are available.

+ * @public + */ + accountList?: AccountInfo[] | undefined; +} +/** + * @public + */ +export interface LogoutRequest { + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + * @internal + */ +export declare const GetRoleCredentialsRequestFilterSensitiveLog: (obj: GetRoleCredentialsRequest) => any; +/** + * @internal + */ +export declare const RoleCredentialsFilterSensitiveLog: (obj: RoleCredentials) => any; +/** + * @internal + */ +export declare const GetRoleCredentialsResponseFilterSensitiveLog: (obj: GetRoleCredentialsResponse) => any; +/** + * @internal + */ +export declare const ListAccountRolesRequestFilterSensitiveLog: (obj: ListAccountRolesRequest) => any; +/** + * @internal + */ +export declare const ListAccountsRequestFilterSensitiveLog: (obj: ListAccountsRequest) => any; +/** + * @internal + */ +export declare const LogoutRequestFilterSensitiveLog: (obj: LogoutRequest) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..81addca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { SSOClient } from "../SSOClient"; +/** + * @public + */ +export interface SSOPaginationConfiguration extends PaginationConfiguration { + client: SSOClient; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts new file mode 100644 index 0000000..fa309d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "../commands/ListAccountRolesCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListAccountRoles: (config: SSOPaginationConfiguration, input: ListAccountRolesCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts new file mode 100644 index 0000000..21c2559 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "../commands/ListAccountsCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListAccounts: (config: SSOPaginationConfiguration, input: ListAccountsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..02d97aa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts @@ -0,0 +1,38 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "../commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "../commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "../commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "../commands/LogoutCommand"; +/** + * serializeAws_restJson1GetRoleCredentialsCommand + */ +export declare const se_GetRoleCredentialsCommand: (input: GetRoleCredentialsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1ListAccountRolesCommand + */ +export declare const se_ListAccountRolesCommand: (input: ListAccountRolesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1ListAccountsCommand + */ +export declare const se_ListAccountsCommand: (input: ListAccountsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1LogoutCommand + */ +export declare const se_LogoutCommand: (input: LogoutCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restJson1GetRoleCredentialsCommand + */ +export declare const de_GetRoleCredentialsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1ListAccountRolesCommand + */ +export declare const de_ListAccountRolesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1ListAccountsCommand + */ +export declare const de_ListAccountsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1LogoutCommand + */ +export declare const de_LogoutCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..c593515 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts @@ -0,0 +1,57 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts new file mode 100644 index 0000000..4194fd5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts new file mode 100644 index 0000000..38c1d33 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts @@ -0,0 +1,56 @@ +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..20ab682 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts new file mode 100644 index 0000000..a0f078c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { SSOExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: SSOExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts new file mode 100644 index 0000000..9a242fc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts @@ -0,0 +1,73 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "./commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "./commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "./commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +export interface SSO { + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void + ): void; + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void + ): void; + listAccountRoles( + args: ListAccountRolesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listAccountRoles( + args: ListAccountRolesCommandInput, + cb: (err: any, data?: ListAccountRolesCommandOutput) => void + ): void; + listAccountRoles( + args: ListAccountRolesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAccountRolesCommandOutput) => void + ): void; + listAccounts( + args: ListAccountsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listAccounts( + args: ListAccountsCommandInput, + cb: (err: any, data?: ListAccountsCommandOutput) => void + ): void; + listAccounts( + args: ListAccountsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAccountsCommandOutput) => void + ): void; + logout( + args: LogoutCommandInput, + options?: __HttpHandlerOptions + ): Promise; + logout( + args: LogoutCommandInput, + cb: (err: any, data?: LogoutCommandOutput) => void + ): void; + logout( + args: LogoutCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: LogoutCommandOutput) => void + ): void; +} +export declare class SSO extends SSOClient implements SSO {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts new file mode 100644 index 0000000..efd5a5f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts @@ -0,0 +1,138 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "./commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "./commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "./commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "./commands/LogoutCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | GetRoleCredentialsCommandInput + | ListAccountRolesCommandInput + | ListAccountsCommandInput + | LogoutCommandInput; +export type ServiceOutputTypes = + | GetRoleCredentialsCommandOutput + | ListAccountRolesCommandOutput + | ListAccountsCommandOutput + | LogoutCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type SSOClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface SSOClientConfig extends SSOClientConfigType {} +export type SSOClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface SSOClientResolvedConfig extends SSOClientResolvedConfigType {} +export declare class SSOClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig +> { + readonly config: SSOClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..29f38b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { SSOHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): SSOHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..864f755 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,46 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { SSOClientResolvedConfig } from "../SSOClient"; +export interface SSOHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface SSOHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + SSOClientResolvedConfig, + HandlerExecutionContext, + SSOHttpAuthSchemeParameters, + object + > {} +export declare const defaultSSOHttpAuthSchemeParametersProvider: ( + config: SSOClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface SSOHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSSOHttpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: SSOHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts new file mode 100644 index 0000000..7c1b358 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetRoleCredentialsRequest, + GetRoleCredentialsResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface GetRoleCredentialsCommandInput + extends GetRoleCredentialsRequest {} +export interface GetRoleCredentialsCommandOutput + extends GetRoleCredentialsResponse, + __MetadataBearer {} +declare const GetRoleCredentialsCommand_base: { + new ( + input: GetRoleCredentialsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetRoleCredentialsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetRoleCredentialsCommand extends GetRoleCredentialsCommand_base { + protected static __types: { + api: { + input: GetRoleCredentialsRequest; + output: GetRoleCredentialsResponse; + }; + sdk: { + input: GetRoleCredentialsCommandInput; + output: GetRoleCredentialsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts new file mode 100644 index 0000000..3b898a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListAccountRolesRequest, + ListAccountRolesResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface ListAccountRolesCommandInput extends ListAccountRolesRequest {} +export interface ListAccountRolesCommandOutput + extends ListAccountRolesResponse, + __MetadataBearer {} +declare const ListAccountRolesCommand_base: { + new ( + input: ListAccountRolesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListAccountRolesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListAccountRolesCommand extends ListAccountRolesCommand_base { + protected static __types: { + api: { + input: ListAccountRolesRequest; + output: ListAccountRolesResponse; + }; + sdk: { + input: ListAccountRolesCommandInput; + output: ListAccountRolesCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts new file mode 100644 index 0000000..3a00cc1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountsRequest, ListAccountsResponse } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface ListAccountsCommandInput extends ListAccountsRequest {} +export interface ListAccountsCommandOutput + extends ListAccountsResponse, + __MetadataBearer {} +declare const ListAccountsCommand_base: { + new ( + input: ListAccountsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountsCommandInput, + ListAccountsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListAccountsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountsCommandInput, + ListAccountsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListAccountsCommand extends ListAccountsCommand_base { + protected static __types: { + api: { + input: ListAccountsRequest; + output: ListAccountsResponse; + }; + sdk: { + input: ListAccountsCommandInput; + output: ListAccountsCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts new file mode 100644 index 0000000..2599250 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { LogoutRequest } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface LogoutCommandInput extends LogoutRequest {} +export interface LogoutCommandOutput extends __MetadataBearer {} +declare const LogoutCommand_base: { + new (input: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl< + LogoutCommandInput, + LogoutCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl< + LogoutCommandInput, + LogoutCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class LogoutCommand extends LogoutCommand_base { + protected static __types: { + api: { + input: LogoutRequest; + output: {}; + }; + sdk: { + input: LogoutCommandInput; + output: LogoutCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..7f24540 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts @@ -0,0 +1,51 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts new file mode 100644 index 0000000..c1b43ff --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface SSOExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..891aed3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +export * from "./SSOClient"; +export * from "./SSO"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { SSOExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts new file mode 100644 index 0000000..1ad045d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class SSOServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts new file mode 100644 index 0000000..4bbe08c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts @@ -0,0 +1,93 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +export interface AccountInfo { + accountId?: string | undefined; + accountName?: string | undefined; + emailAddress?: string | undefined; +} +export interface GetRoleCredentialsRequest { + roleName: string | undefined; + accountId: string | undefined; + accessToken: string | undefined; +} +export interface RoleCredentials { + accessKeyId?: string | undefined; + secretAccessKey?: string | undefined; + sessionToken?: string | undefined; + expiration?: number | undefined; +} +export interface GetRoleCredentialsResponse { + roleCredentials?: RoleCredentials | undefined; +} +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TooManyRequestsException extends __BaseException { + readonly name: "TooManyRequestsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class UnauthorizedException extends __BaseException { + readonly name: "UnauthorizedException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ListAccountRolesRequest { + nextToken?: string | undefined; + maxResults?: number | undefined; + accessToken: string | undefined; + accountId: string | undefined; +} +export interface RoleInfo { + roleName?: string | undefined; + accountId?: string | undefined; +} +export interface ListAccountRolesResponse { + nextToken?: string | undefined; + roleList?: RoleInfo[] | undefined; +} +export interface ListAccountsRequest { + nextToken?: string | undefined; + maxResults?: number | undefined; + accessToken: string | undefined; +} +export interface ListAccountsResponse { + nextToken?: string | undefined; + accountList?: AccountInfo[] | undefined; +} +export interface LogoutRequest { + accessToken: string | undefined; +} +export declare const GetRoleCredentialsRequestFilterSensitiveLog: ( + obj: GetRoleCredentialsRequest +) => any; +export declare const RoleCredentialsFilterSensitiveLog: ( + obj: RoleCredentials +) => any; +export declare const GetRoleCredentialsResponseFilterSensitiveLog: ( + obj: GetRoleCredentialsResponse +) => any; +export declare const ListAccountRolesRequestFilterSensitiveLog: ( + obj: ListAccountRolesRequest +) => any; +export declare const ListAccountsRequestFilterSensitiveLog: ( + obj: ListAccountsRequest +) => any; +export declare const LogoutRequestFilterSensitiveLog: ( + obj: LogoutRequest +) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..2970898 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,5 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { SSOClient } from "../SSOClient"; +export interface SSOPaginationConfiguration extends PaginationConfiguration { + client: SSOClient; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts new file mode 100644 index 0000000..174f32b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "../commands/ListAccountRolesCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +export declare const paginateListAccountRoles: ( + config: SSOPaginationConfiguration, + input: ListAccountRolesCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts new file mode 100644 index 0000000..bb5e66d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "../commands/ListAccountsCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +export declare const paginateListAccounts: ( + config: SSOPaginationConfiguration, + input: ListAccountsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..74eebdc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts @@ -0,0 +1,53 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "../commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "../commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "../commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "../commands/LogoutCommand"; +export declare const se_GetRoleCredentialsCommand: ( + input: GetRoleCredentialsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListAccountRolesCommand: ( + input: ListAccountRolesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListAccountsCommand: ( + input: ListAccountsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_LogoutCommand: ( + input: LogoutCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_GetRoleCredentialsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListAccountRolesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListAccountsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_LogoutCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..4042bcf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts @@ -0,0 +1,120 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts new file mode 100644 index 0000000..7152445 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts @@ -0,0 +1,114 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts new file mode 100644 index 0000000..3dc6c95 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts @@ -0,0 +1,124 @@ +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..00b2942 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts @@ -0,0 +1,49 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts new file mode 100644 index 0000000..fbec1e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { SSOExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: SSOExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/package.json new file mode 100644 index 0000000..971fcd1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/client-sso/package.json @@ -0,0 +1,98 @@ +{ + "name": "@aws-sdk/client-sso", + "description": "AWS SDK for JavaScript Sso Client for Node.js, Browser and React Native", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline client-sso", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "generate:client": "node ../../scripts/generate-clients/single-service --solo sso" + }, + "main": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/node18": "18.2.4", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "browser": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser" + }, + "react-native": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.native" + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sso", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-sso" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/README.md new file mode 100644 index 0000000..6056468 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/README.md @@ -0,0 +1,39 @@ +# `@aws-sdk/core` + +This package provides common or core functionality to the AWS SDK for JavaScript (v3). + +You do not need to explicitly install this package, since it will be transitively installed by AWS SDK clients. + +## `@aws-sdk/core` submodules + +Core submodules are organized for distribution via the `package.json` `exports` field. + +`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be +enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support). + +Think of `@aws-sdk/core` as a mono-package within the monorepo. +It preserves the benefits of modularization, for example to optimize Node.js initialization speed, +while making it easier to have a consistent version of core dependencies, reducing package sprawl when +installing an SDK client. + +### Guide for submodules + +- Each `index.ts` file corresponding to the pattern `./src/submodules//index.ts` will be + published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script. +- create a folder as `./src/submodules/` including an `index.ts` file and a `README.md` file. + - The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible. +- a submodule is equivalent to a standalone `@aws-sdk/` package in that importing it in Node.js will resolve a separate bundle. +- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import. + - The linter will check for this and throw an error. +- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@aws-sdk/core`. + The linter runs during `yarn build` and also as `yarn lint`. + +### When should I create an `@aws-sdk/core/submodule` vs. `@aws-sdk/new-package`? + +Keep in mind that the core package is installed by all AWS SDK clients. + +If the component functionality is upstream of multiple clients, it is +a good candidate for a core submodule. For example, XML serialization. + +If the component's functionality is downstream of a client, for example S3 pre-signing, +it should be a standalone package with potentially a peer or runtime dependency on an AWS SDK client. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/account-id-endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/account-id-endpoint.d.ts new file mode 100644 index 0000000..60f14d1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/account-id-endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/account-id-endpoint" { + export * from "@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/account-id-endpoint.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/account-id-endpoint.js new file mode 100644 index 0000000..b2550f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/account-id-endpoint.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/account-id-endpoint/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/client.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/client.d.ts new file mode 100644 index 0000000..ce995ae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/client.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/client" { + export * from "@aws-sdk/core/dist-types/submodules/client/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/client.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/client.js new file mode 100644 index 0000000..e3a644b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/client.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/client/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/index.js new file mode 100644 index 0000000..cddde6a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/index.js @@ -0,0 +1,6 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./submodules/client/index"), exports); +tslib_1.__exportStar(require("./submodules/httpAuthSchemes/index"), exports); +tslib_1.__exportStar(require("./submodules/protocols/index"), exports); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js new file mode 100644 index 0000000..c277b79 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js @@ -0,0 +1,95 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/account-id-endpoint/index.ts +var index_exports = {}; +__export(index_exports, { + ACCOUNT_ID_ENDPOINT_MODE_VALUES: () => ACCOUNT_ID_ENDPOINT_MODE_VALUES, + CONFIG_ACCOUNT_ID_ENDPOINT_MODE: () => CONFIG_ACCOUNT_ID_ENDPOINT_MODE, + DEFAULT_ACCOUNT_ID_ENDPOINT_MODE: () => DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, + ENV_ACCOUNT_ID_ENDPOINT_MODE: () => ENV_ACCOUNT_ID_ENDPOINT_MODE, + NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: () => NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, + resolveAccountIdEndpointModeConfig: () => resolveAccountIdEndpointModeConfig, + validateAccountIdEndpointMode: () => validateAccountIdEndpointMode +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.ts +var import_util_middleware = require("@smithy/util-middleware"); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConstants.ts +var DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +var ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"]; +function validateAccountIdEndpointMode(value) { + return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value); +} +__name(validateAccountIdEndpointMode, "validateAccountIdEndpointMode"); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.ts +var resolveAccountIdEndpointModeConfig = /* @__PURE__ */ __name((input) => { + const { accountIdEndpointMode } = input; + const accountIdEndpointModeProvider = (0, import_util_middleware.normalizeProvider)(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE); + return Object.assign(input, { + accountIdEndpointMode: /* @__PURE__ */ __name(async () => { + const accIdMode = await accountIdEndpointModeProvider(); + if (!validateAccountIdEndpointMode(accIdMode)) { + throw new Error( + `Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".` + ); + } + return accIdMode; + }, "accountIdEndpointMode") + }); +}, "resolveAccountIdEndpointModeConfig"); + +// src/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.ts +var err = "Invalid AccountIdEndpointMode value"; +var _throw = /* @__PURE__ */ __name((message) => { + throw new Error(message); +}, "_throw"); +var ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +var CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +var NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => { + const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, "configFileSelector"), + default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + ACCOUNT_ID_ENDPOINT_MODE_VALUES, + CONFIG_ACCOUNT_ID_ENDPOINT_MODE, + DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, + ENV_ACCOUNT_ID_ENDPOINT_MODE, + NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, + resolveAccountIdEndpointModeConfig, + validateAccountIdEndpointMode +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js new file mode 100644 index 0000000..ebd6c61 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js @@ -0,0 +1,78 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/client/index.ts +var index_exports = {}; +__export(index_exports, { + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + setCredentialFeature: () => setCredentialFeature, + setFeature: () => setFeature, + state: () => state +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/client/emitWarningIfUnsupportedVersion.ts +var state = { + warningEmitted: false +}; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning( + `NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI` + ); + } +}, "emitWarningIfUnsupportedVersion"); + +// src/submodules/client/setCredentialFeature.ts +function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} +__name(setCredentialFeature, "setCredentialFeature"); + +// src/submodules/client/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {} + }; + } else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} +__name(setFeature, "setFeature"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + emitWarningIfUnsupportedVersion, + setCredentialFeature, + setFeature, + state +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js new file mode 100644 index 0000000..82db91e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js @@ -0,0 +1,382 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/httpAuthSchemes/index.ts +var index_exports = {}; +__export(index_exports, { + AWSSDKSigV4Signer: () => AWSSDKSigV4Signer, + AwsSdkSigV4ASigner: () => AwsSdkSigV4ASigner, + AwsSdkSigV4Signer: () => AwsSdkSigV4Signer, + NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: () => NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, + NODE_SIGV4A_CONFIG_OPTIONS: () => NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config: () => resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig: () => resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config: () => resolveAwsSdkSigV4Config, + validateSigningProperties: () => validateSigningProperties +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var import_protocol_http2 = require("@smithy/protocol-http"); + +// src/submodules/httpAuthSchemes/utils/getDateHeader.ts +var import_protocol_http = require("@smithy/protocol-http"); +var getDateHeader = /* @__PURE__ */ __name((response) => import_protocol_http.HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : void 0, "getDateHeader"); + +// src/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.ts +var getSkewCorrectedDate = /* @__PURE__ */ __name((systemClockOffset) => new Date(Date.now() + systemClockOffset), "getSkewCorrectedDate"); + +// src/submodules/httpAuthSchemes/utils/isClockSkewed.ts +var isClockSkewed = /* @__PURE__ */ __name((clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 3e5, "isClockSkewed"); + +// src/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.ts +var getUpdatedSystemClockOffset = /* @__PURE__ */ __name((clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}, "getUpdatedSystemClockOffset"); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var throwSigningPropertyError = /* @__PURE__ */ __name((name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}, "throwSigningPropertyError"); +var validateSigningProperties = /* @__PURE__ */ __name(async (signingProperties) => { + const context = throwSigningPropertyError( + "context", + signingProperties.context + ); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = context.endpointV2?.properties?.authSchemes?.[0]; + const signerFunction = throwSigningPropertyError( + "signer", + config.signer + ); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties?.signingRegion; + const signingRegionSet = signingProperties?.signingRegionSet; + const signingName = signingProperties?.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName + }; +}, "validateSigningProperties"); +var AwsSdkSigV4Signer = class { + static { + __name(this, "AwsSdkSigV4Signer"); + } + async sign(httpRequest, identity, signingProperties) { + if (!import_protocol_http2.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if (first?.name === "sigv4a" && second?.name === "sigv4") { + signingRegion = second?.signingRegion ?? signingRegion; + signingName = second?.signingName ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion, + signingService: signingName + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +}; +var AWSSDKSigV4Signer = AwsSdkSigV4Signer; + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.ts +var import_protocol_http3 = require("@smithy/protocol-http"); +var AwsSdkSigV4ASigner = class extends AwsSdkSigV4Signer { + static { + __name(this, "AwsSdkSigV4ASigner"); + } + async sign(httpRequest, identity, signingProperties) { + if (!import_protocol_http3.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties( + signingProperties + ); + const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.(); + const multiRegionOverride = (configResolvedSigningRegionSet ?? signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName + }); + return signedRequest; + } +}; + +// src/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.ts +var getArrayForCommaSeparatedString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : [], "getArrayForCommaSeparatedString"); + +// src/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.ts +var NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE"; +var NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference"; +var NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = { + /** + * Retrieves auth scheme preference from environment variables + * @param env - Node process environment object + * @returns Array of auth scheme strings if preference is set, undefined otherwise + */ + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env)) return void 0; + return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]); + }, "environmentVariableSelector"), + /** + * Retrieves auth scheme preference from config file + * @param profile - Config profile object + * @returns Array of auth scheme strings if preference is set, undefined otherwise + */ + configFileSelector: /* @__PURE__ */ __name((profile) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile)) return void 0; + return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]); + }, "configFileSelector"), + /** + * Default auth scheme preference if not specified in environment or config + */ + default: [] +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.ts +var import_core = require("@smithy/core"); +var import_property_provider = require("@smithy/property-provider"); +var resolveAwsSdkSigV4AConfig = /* @__PURE__ */ __name((config) => { + config.sigv4aSigningRegionSet = (0, import_core.normalizeProvider)(config.sigv4aSigningRegionSet); + return config; +}, "resolveAwsSdkSigV4AConfig"); +var NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true + }); + }, + default: void 0 +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.ts +var import_client = require("@aws-sdk/core/client"); +var import_core2 = require("@smithy/core"); +var import_signature_v4 = require("@smithy/signature-v4"); +var resolveAwsSdkSigV4Config = /* @__PURE__ */ __name((config) => { + let inputCredentials = config.credentials; + let isUserSupplied = !!config.credentials; + let resolvedCredentials = void 0; + Object.defineProperty(config, "credentials", { + set(credentials) { + if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) { + isUserSupplied = true; + } + inputCredentials = credentials; + const memoizedProvider = normalizeCredentialProvider(config, { + credentials: inputCredentials, + credentialDefaultProvider: config.credentialDefaultProvider + }); + const boundProvider = bindCallerConfig(config, memoizedProvider); + if (isUserSupplied && !boundProvider.attributed) { + resolvedCredentials = /* @__PURE__ */ __name(async (options) => boundProvider(options).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_CODE", "e") + ), "resolvedCredentials"); + resolvedCredentials.memoized = boundProvider.memoized; + resolvedCredentials.configBound = boundProvider.configBound; + resolvedCredentials.attributed = true; + } else { + resolvedCredentials = boundProvider; + } + }, + get() { + return resolvedCredentials; + }, + enumerable: true, + configurable: true + }); + config.credentials = inputCredentials; + const { + // Default for signingEscapePath + signingEscapePath = true, + // Default for systemClockOffset + systemClockOffset = config.systemClockOffset || 0, + // No default for sha256 since it is platform dependent + sha256 + } = config; + let signer; + if (config.signer) { + signer = (0, import_core2.normalizeProvider)(config.signer); + } else if (config.regionInfoProvider) { + signer = /* @__PURE__ */ __name(() => (0, import_core2.normalizeProvider)(config.region)().then( + async (region) => [ + await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint() + }) || {}, + region + ] + ).then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }), "signer"); + } else { + signer = /* @__PURE__ */ __name(async (authScheme) => { + authScheme = Object.assign( + {}, + { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await (0, import_core2.normalizeProvider)(config.region)(), + properties: {} + }, + authScheme + ); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }, "signer"); + } + const resolvedConfig = Object.assign(config, { + systemClockOffset, + signingEscapePath, + signer + }); + return resolvedConfig; +}, "resolveAwsSdkSigV4Config"); +var resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +function normalizeCredentialProvider(config, { + credentials, + credentialDefaultProvider +}) { + let credentialsProvider; + if (credentials) { + if (!credentials?.memoized) { + credentialsProvider = (0, import_core2.memoizeIdentityProvider)(credentials, import_core2.isIdentityExpired, import_core2.doesIdentityRequireRefresh); + } else { + credentialsProvider = credentials; + } + } else { + if (credentialDefaultProvider) { + credentialsProvider = (0, import_core2.normalizeProvider)( + credentialDefaultProvider( + Object.assign({}, config, { + parentClientConfig: config + }) + ) + ); + } else { + credentialsProvider = /* @__PURE__ */ __name(async () => { + throw new Error( + "@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured." + ); + }, "credentialsProvider"); + } + } + credentialsProvider.memoized = true; + return credentialsProvider; +} +__name(normalizeCredentialProvider, "normalizeCredentialProvider"); +function bindCallerConfig(config, credentialsProvider) { + if (credentialsProvider.configBound) { + return credentialsProvider; + } + const fn = /* @__PURE__ */ __name(async (options) => credentialsProvider({ ...options, callerClientConfig: config }), "fn"); + fn.memoized = credentialsProvider.memoized; + fn.configBound = true; + return fn; +} +__name(bindCallerConfig, "bindCallerConfig"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + AWSSDKSigV4Signer, + AwsSdkSigV4ASigner, + AwsSdkSigV4Signer, + NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, + NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config, + validateSigningProperties +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js new file mode 100644 index 0000000..d84c65b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js @@ -0,0 +1,227 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var index_exports = {}; +__export(index_exports, { + _toBool: () => _toBool, + _toNum: () => _toNum, + _toStr: () => _toStr, + awsExpectUnion: () => awsExpectUnion, + loadRestJsonErrorCode: () => loadRestJsonErrorCode, + loadRestXmlErrorCode: () => loadRestXmlErrorCode, + parseJsonBody: () => parseJsonBody, + parseJsonErrorBody: () => parseJsonErrorBody, + parseXmlBody: () => parseXmlBody, + parseXmlErrorBody: () => parseXmlErrorBody +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/protocols/coercing-serializers.ts +var _toStr = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}, "_toStr"); +var _toBool = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}, "_toBool"); +var _toNum = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}, "_toNum"); + +// src/submodules/protocols/json/awsExpectUnion.ts +var import_smithy_client = require("@smithy/smithy-client"); +var awsExpectUnion = /* @__PURE__ */ __name((value) => { + if (value == null) { + return void 0; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return (0, import_smithy_client.expectUnion)(value); +}, "awsExpectUnion"); + +// src/submodules/protocols/common.ts +var import_smithy_client2 = require("@smithy/smithy-client"); +var collectBodyString = /* @__PURE__ */ __name((streamBody, context) => (0, import_smithy_client2.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)), "collectBodyString"); + +// src/submodules/protocols/json/parseJsonBody.ts +var parseJsonBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } catch (e) { + if (e?.name === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + } + return {}; +}), "parseJsonBody"); +var parseJsonErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}, "parseJsonErrorBody"); +var loadRestJsonErrorCode = /* @__PURE__ */ __name((output, data) => { + const findKey = /* @__PURE__ */ __name((object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()), "findKey"); + const sanitizeErrorCode = /* @__PURE__ */ __name((rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }, "sanitizeErrorCode"); + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== void 0) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== void 0) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== void 0) { + return sanitizeErrorCode(data["__type"]); + } +}, "loadRestJsonErrorCode"); + +// src/submodules/protocols/xml/parseXmlBody.ts +var import_smithy_client3 = require("@smithy/smithy-client"); +var import_fast_xml_parser = require("fast-xml-parser"); +var parseXmlBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new import_fast_xml_parser.XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: /* @__PURE__ */ __name((_, val) => val.trim() === "" && val.includes("\n") ? "" : void 0, "tagValueProcessor") + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return (0, import_smithy_client3.getValueFromTextNode)(parsedObjToReturn); + } + return {}; +}), "parseXmlBody"); +var parseXmlErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}, "parseXmlErrorBody"); +var loadRestXmlErrorCode = /* @__PURE__ */ __name((output, data) => { + if (data?.Error?.Code !== void 0) { + return data.Error.Code; + } + if (data?.Code !== void 0) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadRestXmlErrorCode"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + _toBool, + _toNum, + _toStr, + awsExpectUnion, + loadRestJsonErrorCode, + loadRestXmlErrorCode, + parseJsonBody, + parseJsonErrorBody, + parseXmlBody, + parseXmlErrorBody +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/index.js new file mode 100644 index 0000000..239de7a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js new file mode 100644 index 0000000..cc0c55a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js @@ -0,0 +1,15 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants"; +export const resolveAccountIdEndpointModeConfig = (input) => { + const { accountIdEndpointMode } = input; + const accountIdEndpointModeProvider = normalizeProvider(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE); + return Object.assign(input, { + accountIdEndpointMode: async () => { + const accIdMode = await accountIdEndpointModeProvider(); + if (!validateAccountIdEndpointMode(accIdMode)) { + throw new Error(`Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".`); + } + return accIdMode; + }, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js new file mode 100644 index 0000000..e7a2ca0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js @@ -0,0 +1,5 @@ +export const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +export const ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"]; +export function validateAccountIdEndpointMode(value) { + return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js new file mode 100644 index 0000000..54832d5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js @@ -0,0 +1,24 @@ +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants"; +const err = "Invalid AccountIdEndpointMode value"; +const _throw = (message) => { + throw new Error(message); +}; +export const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +export const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +export const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, + default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..d1dab1d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js @@ -0,0 +1,15 @@ +export const state = { + warningEmitted: false, +}; +export const emitWarningIfUnsupportedVersion = (version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning(`NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI`); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js new file mode 100644 index 0000000..a489c40 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js @@ -0,0 +1,7 @@ +export function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js new file mode 100644 index 0000000..2d8804b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js @@ -0,0 +1,11 @@ +export function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {}, + }; + } + else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js new file mode 100644 index 0000000..548fefb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js @@ -0,0 +1,20 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getSkewCorrectedDate } from "../utils"; +import { AwsSdkSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + if (!HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties(signingProperties); + const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.(); + const multiRegionOverride = (configResolvedSigningRegionSet ?? + signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName, + }); + return signedRequest; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js new file mode 100644 index 0000000..ee236cd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js @@ -0,0 +1,72 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getDateHeader, getSkewCorrectedDate, getUpdatedSystemClockOffset } from "../utils"; +const throwSigningPropertyError = (name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}; +export const validateSigningProperties = async (signingProperties) => { + const context = throwSigningPropertyError("context", signingProperties.context); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = context.endpointV2?.properties?.authSchemes?.[0]; + const signerFunction = throwSigningPropertyError("signer", config.signer); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties?.signingRegion; + const signingRegionSet = signingProperties?.signingRegionSet; + const signingName = signingProperties?.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName, + }; +}; +export class AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + if (!HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if (first?.name === "sigv4a" && second?.name === "sigv4") { + signingRegion = second?.signingRegion ?? signingRegion; + signingName = second?.signingName ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: signingRegion, + signingService: signingName, + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +} +export const AWSSDKSigV4Signer = AwsSdkSigV4Signer; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js new file mode 100644 index 0000000..17e3d2e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js @@ -0,0 +1,16 @@ +import { getArrayForCommaSeparatedString } from "../utils/getArrayForCommaSeparatedString"; +const NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE"; +const NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference"; +export const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = { + environmentVariableSelector: (env) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env)) + return undefined; + return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]); + }, + configFileSelector: (profile) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile)) + return undefined; + return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]); + }, + default: [], +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js new file mode 100644 index 0000000..4071225 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js @@ -0,0 +1,5 @@ +export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js new file mode 100644 index 0000000..0e62ef0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js @@ -0,0 +1,25 @@ +import { normalizeProvider } from "@smithy/core"; +import { ProviderError } from "@smithy/property-provider"; +export const resolveAwsSdkSigV4AConfig = (config) => { + config.sigv4aSigningRegionSet = normalizeProvider(config.sigv4aSigningRegionSet); + return config; +}; +export const NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true, + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true, + }); + }, + default: undefined, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js new file mode 100644 index 0000000..6da968d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js @@ -0,0 +1,131 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { doesIdentityRequireRefresh, isIdentityExpired, memoizeIdentityProvider, normalizeProvider, } from "@smithy/core"; +import { SignatureV4 } from "@smithy/signature-v4"; +export const resolveAwsSdkSigV4Config = (config) => { + let inputCredentials = config.credentials; + let isUserSupplied = !!config.credentials; + let resolvedCredentials = undefined; + Object.defineProperty(config, "credentials", { + set(credentials) { + if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) { + isUserSupplied = true; + } + inputCredentials = credentials; + const memoizedProvider = normalizeCredentialProvider(config, { + credentials: inputCredentials, + credentialDefaultProvider: config.credentialDefaultProvider, + }); + const boundProvider = bindCallerConfig(config, memoizedProvider); + if (isUserSupplied && !boundProvider.attributed) { + resolvedCredentials = async (options) => boundProvider(options).then((creds) => setCredentialFeature(creds, "CREDENTIALS_CODE", "e")); + resolvedCredentials.memoized = boundProvider.memoized; + resolvedCredentials.configBound = boundProvider.configBound; + resolvedCredentials.attributed = true; + } + else { + resolvedCredentials = boundProvider; + } + }, + get() { + return resolvedCredentials; + }, + enumerable: true, + configurable: true, + }); + config.credentials = inputCredentials; + const { signingEscapePath = true, systemClockOffset = config.systemClockOffset || 0, sha256, } = config; + let signer; + if (config.signer) { + signer = normalizeProvider(config.signer); + } + else if (config.regionInfoProvider) { + signer = () => normalizeProvider(config.region)() + .then(async (region) => [ + (await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint(), + })) || {}, + region, + ]) + .then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = config.signerConstructor || SignatureV4; + return new SignerCtor(params); + }); + } + else { + signer = async (authScheme) => { + authScheme = Object.assign({}, { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await normalizeProvider(config.region)(), + properties: {}, + }, authScheme); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = config.signerConstructor || SignatureV4; + return new SignerCtor(params); + }; + } + const resolvedConfig = Object.assign(config, { + systemClockOffset, + signingEscapePath, + signer, + }); + return resolvedConfig; +}; +export const resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +function normalizeCredentialProvider(config, { credentials, credentialDefaultProvider, }) { + let credentialsProvider; + if (credentials) { + if (!credentials?.memoized) { + credentialsProvider = memoizeIdentityProvider(credentials, isIdentityExpired, doesIdentityRequireRefresh); + } + else { + credentialsProvider = credentials; + } + } + else { + if (credentialDefaultProvider) { + credentialsProvider = normalizeProvider(credentialDefaultProvider(Object.assign({}, config, { + parentClientConfig: config, + }))); + } + else { + credentialsProvider = async () => { + throw new Error("@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured."); + }; + } + } + credentialsProvider.memoized = true; + return credentialsProvider; +} +function bindCallerConfig(config, credentialsProvider) { + if (credentialsProvider.configBound) { + return credentialsProvider; + } + const fn = async (options) => credentialsProvider({ ...options, callerClientConfig: config }); + fn.memoized = credentialsProvider.memoized; + fn.configBound = true; + return fn; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js new file mode 100644 index 0000000..aa60799 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js @@ -0,0 +1 @@ +export const getArrayForCommaSeparatedString = (str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : []; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js new file mode 100644 index 0000000..449c182 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js @@ -0,0 +1,2 @@ +import { HttpResponse } from "@smithy/protocol-http"; +export const getDateHeader = (response) => HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js new file mode 100644 index 0000000..6ee8036 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js @@ -0,0 +1 @@ +export const getSkewCorrectedDate = (systemClockOffset) => new Date(Date.now() + systemClockOffset); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js new file mode 100644 index 0000000..859c41a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js @@ -0,0 +1,8 @@ +import { isClockSkewed } from "./isClockSkewed"; +export const getUpdatedSystemClockOffset = (clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js new file mode 100644 index 0000000..086d7a8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js @@ -0,0 +1,2 @@ +import { getSkewCorrectedDate } from "./getSkewCorrectedDate"; +export const isClockSkewed = (clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 300000; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js new file mode 100644 index 0000000..fce893b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js @@ -0,0 +1,53 @@ +export const _toStr = (val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}; +export const _toBool = (val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}; +export const _toNum = (val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js new file mode 100644 index 0000000..4348b08 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js @@ -0,0 +1,2 @@ +import { collectBody } from "@smithy/smithy-client"; +export const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js new file mode 100644 index 0000000..1c6cc32 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js @@ -0,0 +1,10 @@ +import { expectUnion } from "@smithy/smithy-client"; +export const awsExpectUnion = (value) => { + if (value == null) { + return undefined; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return expectUnion(value); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js new file mode 100644 index 0000000..d9c1564 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js @@ -0,0 +1,51 @@ +import { collectBodyString } from "../common"; +export const parseJsonBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } + catch (e) { + if (e?.name === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded, + }); + } + throw e; + } + } + return {}; +}); +export const parseJsonErrorBody = async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +export const loadRestJsonErrorCode = (output, data) => { + const findKey = (object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js new file mode 100644 index 0000000..556a967 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js @@ -0,0 +1,57 @@ +import { getValueFromTextNode } from "@smithy/smithy-client"; +import { XMLParser } from "fast-xml-parser"; +import { collectBodyString } from "../common"; +export const parseXmlBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: (_, val) => (val.trim() === "" && val.includes("\n") ? "" : undefined), + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } + catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded, + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return getValueFromTextNode(parsedObjToReturn); + } + return {}; +}); +export const parseXmlErrorBody = async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}; +export const loadRestXmlErrorCode = (output, data) => { + if (data?.Error?.Code !== undefined) { + return data.Error.Code; + } + if (data?.Code !== undefined) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts new file mode 100644 index 0000000..e83f927 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts @@ -0,0 +1,5 @@ +export * from "./index"; +export * from "./submodules/account-id-endpoint/index"; +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/index.d.ts new file mode 100644 index 0000000..5d51cdb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/index.d.ts @@ -0,0 +1,22 @@ +/** + * Submodules annotated with "Legacy" are from prior to the submodule system. + * They are exported from the package's root index to preserve backwards compatibility. + * + * New development should go in a proper submodule and not be exported from the root index. + */ +/** + * Legacy submodule. + */ +export * from "./submodules/client/index"; +/** + * Legacy submodule. + */ +export * from "./submodules/httpAuthSchemes/index"; +/** + * Legacy submodule. + */ +export * from "./submodules/protocols/index"; +/** + * Warning: do not export any additional submodules from the root of this package. See readme.md for + * guide on developing submodules. + */ diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts new file mode 100644 index 0000000..bf612a2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts @@ -0,0 +1,27 @@ +import { Provider } from "@smithy/types"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +/** + * @public + */ +export interface AccountIdEndpointModeInputConfig { + /** + * The account ID endpoint mode to use. + */ + accountIdEndpointMode?: AccountIdEndpointMode | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface AccountIdEndpointModeResolvedConfig { + accountIdEndpointMode: Provider; +} +/** + * @internal + */ +export declare const resolveAccountIdEndpointModeConfig: (input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved) => T & AccountIdEndpointModeResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts new file mode 100644 index 0000000..640a747 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts @@ -0,0 +1,16 @@ +/** + * @public + */ +export type AccountIdEndpointMode = "disabled" | "preferred" | "required"; +/** + * @internal + */ +export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +/** + * @internal + */ +export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[]; +/** + * @internal + */ +export declare function validateAccountIdEndpointMode(value: any): value is AccountIdEndpointMode; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..96b8059 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts @@ -0,0 +1,14 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +/** + * @internal + */ +export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +/** + * @internal + */ +export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..d97bc8c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,12 @@ +export declare const state: { + warningEmitted: boolean; +}; +/** + * @internal + * + * Emits warning if the provided Node.js version string is + * pending deprecation by AWS SDK JSv3. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts new file mode 100644 index 0000000..b3b4a68 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts @@ -0,0 +1,7 @@ +import type { AttributedAwsCredentialIdentity, AwsSdkCredentialsFeatures } from "@aws-sdk/types"; +/** + * @internal + * + * @returns the credentials with source feature attribution. + */ +export declare function setCredentialFeature(credentials: AttributedAwsCredentialIdentity, feature: F, value: AwsSdkCredentialsFeatures[F]): AttributedAwsCredentialIdentity; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts new file mode 100644 index 0000000..93458bf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts @@ -0,0 +1,12 @@ +import type { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the SDK not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: AwsHandlerExecutionContext, feature: F, value: AwsSdkFeatures[F]): void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts new file mode 100644 index 0000000..051b17c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts @@ -0,0 +1,10 @@ +import { AwsCredentialIdentity, HttpRequest as IHttpRequest } from "@smithy/types"; +import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer"; +/** + * @internal + * Note: this is not a signing algorithm implementation. The sign method + * accepts the real signer as an input parameter. + */ +export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + sign(httpRequest: IHttpRequest, identity: AwsCredentialIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts new file mode 100644 index 0000000..7c1b550 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts @@ -0,0 +1,43 @@ +import { AuthScheme, AwsCredentialIdentity, HttpRequest as IHttpRequest, HttpResponse, HttpSigner, RequestSigner } from "@smithy/types"; +import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig"; +/** + * @internal + */ +interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig { + systemClockOffset: number; + signer: (authScheme?: AuthScheme) => Promise; +} +/** + * @internal + */ +interface AwsSdkSigV4AuthSigningProperties { + config: AwsSdkSigV4Config; + signer: RequestSigner; + signingRegion?: string; + signingRegionSet?: string[]; + signingName?: string; +} +/** + * @internal + */ +export declare const validateSigningProperties: (signingProperties: Record) => Promise; +/** + * Note: this is not a signing algorithm implementation. The sign method + * accepts the real signer as an input parameter. + * @internal + */ +export declare class AwsSdkSigV4Signer implements HttpSigner { + sign(httpRequest: IHttpRequest, + /** + * `identity` is bound in {@link resolveAWSSDKSigV4Config} + */ + identity: AwsCredentialIdentity, signingProperties: Record): Promise; + errorHandler(signingProperties: Record): (error: Error) => never; + successHandler(httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4Signer} + */ +export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts new file mode 100644 index 0000000..edf3162 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts @@ -0,0 +1,5 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @public + */ +export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts new file mode 100644 index 0000000..4071225 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts @@ -0,0 +1,5 @@ +export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts new file mode 100644 index 0000000..f741625 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts @@ -0,0 +1,38 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface AwsSdkSigV4AAuthInputConfig { + /** + * This option will override the AWS sigv4a + * signing regionSet from any other source. + * + * The lookup order is: + * 1. this value + * 2. configuration file value of sigv4a_signing_region_set. + * 3. environment value of AWS_SIGV4A_SIGNING_REGION_SET. + * 4. signingRegionSet given by endpoint resolution. + * 5. the singular region of the SDK client. + */ + sigv4aSigningRegionSet?: string[] | undefined | Provider; +} +/** + * @internal + */ +export interface AwsSdkSigV4APreviouslyResolved { +} +/** + * @internal + */ +export interface AwsSdkSigV4AAuthResolvedConfig { + sigv4aSigningRegionSet: Provider; +} +/** + * @internal + */ +export declare const resolveAwsSdkSigV4AConfig: (config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved) => T & AwsSdkSigV4AAuthResolvedConfig; +/** + * @internal + */ +export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts new file mode 100644 index 0000000..cf42eec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts @@ -0,0 +1,117 @@ +import type { MergeFunctions } from "@aws-sdk/types"; +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { AuthScheme, AwsCredentialIdentity, AwsCredentialIdentityProvider, ChecksumConstructor, HashConstructor, MemoizedProvider, Provider, RegionInfoProvider, RequestSigner } from "@smithy/types"; +/** + * @public + */ +export interface AwsSdkSigV4AuthInputConfig { + /** + * The credentials used to sign requests. + */ + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + /** + * The signer to use when signing requests. + */ + signer?: RequestSigner | ((authScheme?: AuthScheme) => Promise); + /** + * Whether to escape request path when signing the request. + */ + signingEscapePath?: boolean; + /** + * An offset value in milliseconds to apply to all signing times. + */ + systemClockOffset?: number; + /** + * The region where you want to sign your request against. This + * can be different to the region in the endpoint. + */ + signingRegion?: string; + /** + * The injectable SigV4-compatible signer class constructor. If not supplied, + * regular SignatureV4 constructor will be used. + * + * @internal + */ + signerConstructor?: new (options: SignatureV4Init & SignatureV4CryptoInit) => RequestSigner; +} +/** + * Used to indicate whether a credential provider function was memoized by this resolver. + * @public + */ +export type AwsSdkSigV4Memoized = { + /** + * The credential provider has been memoized by the AWS SDK SigV4 config resolver. + */ + memoized?: boolean; + /** + * The credential provider has the caller client config object bound to its arguments. + */ + configBound?: boolean; + /** + * Function is wrapped with attribution transform. + */ + attributed?: boolean; +}; +/** + * @internal + */ +export interface AwsSdkSigV4PreviouslyResolved { + credentialDefaultProvider?: (input: any) => MemoizedProvider; + region: string | Provider; + sha256: ChecksumConstructor | HashConstructor; + signingName?: string; + regionInfoProvider?: RegionInfoProvider; + defaultSigningName?: string; + serviceId: string; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +/** + * @internal + */ +export interface AwsSdkSigV4AuthResolvedConfig { + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.credentials} + * This provider MAY memoize the loaded credentials for certain period. + */ + credentials: MergeFunctions> & AwsSdkSigV4Memoized; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signer} + */ + signer: (authScheme?: AuthScheme) => Promise; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signingEscapePath} + */ + signingEscapePath: boolean; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.systemClockOffset} + */ + systemClockOffset: number; +} +/** + * @internal + */ +export declare const resolveAwsSdkSigV4Config: (config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig; +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4AuthInputConfig} + */ +export interface AWSSDKSigV4AuthInputConfig extends AwsSdkSigV4AuthInputConfig { +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4PreviouslyResolved} + */ +export interface AWSSDKSigV4PreviouslyResolved extends AwsSdkSigV4PreviouslyResolved { +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4AuthResolvedConfig} + */ +export interface AWSSDKSigV4AuthResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { +} +/** + * @internal + * @deprecated renamed to {@link resolveAwsSdkSigV4Config} + */ +export declare const resolveAWSSDKSigV4Config: (config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts new file mode 100644 index 0000000..823921b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a comma-separated string into an array of trimmed strings + * @param str The comma-separated input string to split + * @returns Array of trimmed strings split from the input + * + * @internal + */ +export declare const getArrayForCommaSeparatedString: (str: string) => string[]; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts new file mode 100644 index 0000000..2c9157b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getDateHeader: (response: unknown) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts new file mode 100644 index 0000000..4b72690 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Returns a date that is corrected for clock skew. + * + * @param systemClockOffset The offset of the system clock in milliseconds. + */ +export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts new file mode 100644 index 0000000..2d554b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + * + * If clock is skewed, it returns the difference between serverTime and current time. + * If clock is not skewed, it returns currentSystemClockOffset. + * + * @param clockTime The string value of the server time. + * @param currentSystemClockOffset The current system clock offset. + */ +export declare const getUpdatedSystemClockOffset: (clockTime: string, currentSystemClockOffset: number) => number; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts new file mode 100644 index 0000000..970fa15 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + * + * Checks if the provided date is within the skew window of 300000ms. + * + * @param clockTime - The time to check for skew in milliseconds. + * @param systemClockOffset - The offset of the system clock in milliseconds. + */ +export declare const isClockSkewed: (clockTime: number, systemClockOffset: number) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts new file mode 100644 index 0000000..10d9d39 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts @@ -0,0 +1,18 @@ +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toStr: (val: unknown) => string | undefined; +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toBool: (val: unknown) => boolean | undefined; +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toNum: (val: unknown) => number | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts new file mode 100644 index 0000000..ec78fb2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts @@ -0,0 +1,2 @@ +import type { SerdeContext } from "@smithy/types"; +export declare const collectBodyString: (streamBody: any, context: SerdeContext) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts new file mode 100644 index 0000000..98607ea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Forwards to Smithy's expectUnion function, but also ignores + * the `__type` field if it is present. + */ +export declare const awsExpectUnion: (value: unknown) => Record | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts new file mode 100644 index 0000000..827ffe9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts @@ -0,0 +1,13 @@ +import type { HttpResponse, SerdeContext } from "@smithy/types"; +/** + * @internal + */ +export declare const parseJsonBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const parseJsonErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadRestJsonErrorCode: (output: HttpResponse, data: any) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts new file mode 100644 index 0000000..30cfc30 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts @@ -0,0 +1,13 @@ +import type { HttpResponse, SerdeContext } from "@smithy/types"; +/** + * @internal + */ +export declare const parseXmlBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const parseXmlErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadRestXmlErrorCode: (output: HttpResponse, data: any) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts new file mode 100644 index 0000000..e83f927 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts @@ -0,0 +1,5 @@ +export * from "./index"; +export * from "./submodules/account-id-endpoint/index"; +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..239de7a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts new file mode 100644 index 0000000..10d5c21 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts @@ -0,0 +1,15 @@ +import { Provider } from "@smithy/types"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +export interface AccountIdEndpointModeInputConfig { + accountIdEndpointMode?: + | AccountIdEndpointMode + | Provider; +} +interface PreviouslyResolved {} +export interface AccountIdEndpointModeResolvedConfig { + accountIdEndpointMode: Provider; +} +export declare const resolveAccountIdEndpointModeConfig: ( + input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved +) => T & AccountIdEndpointModeResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts new file mode 100644 index 0000000..27bdce9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts @@ -0,0 +1,6 @@ +export type AccountIdEndpointMode = "disabled" | "preferred" | "required"; +export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[]; +export declare function validateAccountIdEndpointMode( + value: any +): value is AccountIdEndpointMode; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..9b04566 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts @@ -0,0 +1,7 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = + "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = + "account_id_endpoint_mode"; +export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..84af567 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,4 @@ +export declare const state: { + warningEmitted: boolean; +}; +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts new file mode 100644 index 0000000..1336619 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts @@ -0,0 +1,11 @@ +import { + AttributedAwsCredentialIdentity, + AwsSdkCredentialsFeatures, +} from "@aws-sdk/types"; +export declare function setCredentialFeature< + F extends keyof AwsSdkCredentialsFeatures +>( + credentials: AttributedAwsCredentialIdentity, + feature: F, + value: AwsSdkCredentialsFeatures[F] +): AttributedAwsCredentialIdentity; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts new file mode 100644 index 0000000..84482ee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts @@ -0,0 +1,6 @@ +import { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types"; +export declare function setFeature( + context: AwsHandlerExecutionContext, + feature: F, + value: AwsSdkFeatures[F] +): void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts new file mode 100644 index 0000000..b8c2b74 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts @@ -0,0 +1,12 @@ +import { + AwsCredentialIdentity, + HttpRequest as IHttpRequest, +} from "@smithy/types"; +import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer"; +export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + sign( + httpRequest: IHttpRequest, + identity: AwsCredentialIdentity, + signingProperties: Record + ): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts new file mode 100644 index 0000000..0be6b41 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts @@ -0,0 +1,39 @@ +import { + AuthScheme, + AwsCredentialIdentity, + HttpRequest as IHttpRequest, + HttpResponse, + HttpSigner, + RequestSigner, +} from "@smithy/types"; +import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig"; +interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig { + systemClockOffset: number; + signer: (authScheme?: AuthScheme) => Promise; +} +interface AwsSdkSigV4AuthSigningProperties { + config: AwsSdkSigV4Config; + signer: RequestSigner; + signingRegion?: string; + signingRegionSet?: string[]; + signingName?: string; +} +export declare const validateSigningProperties: ( + signingProperties: Record +) => Promise; +export declare class AwsSdkSigV4Signer implements HttpSigner { + sign( + httpRequest: IHttpRequest, + identity: AwsCredentialIdentity, + signingProperties: Record + ): Promise; + errorHandler( + signingProperties: Record + ): (error: Error) => never; + successHandler( + httpResponse: HttpResponse | unknown, + signingProperties: Record + ): void; +} +export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts new file mode 100644 index 0000000..effc1e0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors< + string[] +>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts new file mode 100644 index 0000000..6047921 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts @@ -0,0 +1,9 @@ +export { + AwsSdkSigV4Signer, + AWSSDKSigV4Signer, + validateSigningProperties, +} from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts new file mode 100644 index 0000000..9f949b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts @@ -0,0 +1,18 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider } from "@smithy/types"; +export interface AwsSdkSigV4AAuthInputConfig { + sigv4aSigningRegionSet?: + | string[] + | undefined + | Provider; +} +export interface AwsSdkSigV4APreviouslyResolved {} +export interface AwsSdkSigV4AAuthResolvedConfig { + sigv4aSigningRegionSet: Provider; +} +export declare const resolveAwsSdkSigV4AConfig: ( + config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved +) => T & AwsSdkSigV4AAuthResolvedConfig; +export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors< + string[] | undefined +>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts new file mode 100644 index 0000000..fc562d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts @@ -0,0 +1,65 @@ +import { MergeFunctions } from "@aws-sdk/types"; +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { + AuthScheme, + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + ChecksumConstructor, + HashConstructor, + MemoizedProvider, + Provider, + RegionInfoProvider, + RequestSigner, +} from "@smithy/types"; +export interface AwsSdkSigV4AuthInputConfig { + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + signer?: + | RequestSigner + | ((authScheme?: AuthScheme) => Promise); + signingEscapePath?: boolean; + systemClockOffset?: number; + signingRegion?: string; + signerConstructor?: new ( + options: SignatureV4Init & SignatureV4CryptoInit + ) => RequestSigner; +} +export type AwsSdkSigV4Memoized = { + memoized?: boolean; + configBound?: boolean; + attributed?: boolean; +}; +export interface AwsSdkSigV4PreviouslyResolved { + credentialDefaultProvider?: ( + input: any + ) => MemoizedProvider; + region: string | Provider; + sha256: ChecksumConstructor | HashConstructor; + signingName?: string; + regionInfoProvider?: RegionInfoProvider; + defaultSigningName?: string; + serviceId: string; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +export interface AwsSdkSigV4AuthResolvedConfig { + credentials: MergeFunctions< + AwsCredentialIdentityProvider, + MemoizedProvider + > & + AwsSdkSigV4Memoized; + signer: (authScheme?: AuthScheme) => Promise; + signingEscapePath: boolean; + systemClockOffset: number; +} +export declare const resolveAwsSdkSigV4Config: ( + config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & AwsSdkSigV4AuthResolvedConfig; +export interface AWSSDKSigV4AuthInputConfig + extends AwsSdkSigV4AuthInputConfig {} +export interface AWSSDKSigV4PreviouslyResolved + extends AwsSdkSigV4PreviouslyResolved {} +export interface AWSSDKSigV4AuthResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig {} +export declare const resolveAWSSDKSigV4Config: ( + config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & AwsSdkSigV4AuthResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts new file mode 100644 index 0000000..aee2328 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts @@ -0,0 +1 @@ +export declare const getArrayForCommaSeparatedString: (str: string) => string[]; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts new file mode 100644 index 0000000..73fc529 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts @@ -0,0 +1 @@ +export declare const getDateHeader: (response: unknown) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts new file mode 100644 index 0000000..741c5ea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts @@ -0,0 +1 @@ +export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts new file mode 100644 index 0000000..eae3311 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts @@ -0,0 +1,4 @@ +export declare const getUpdatedSystemClockOffset: ( + clockTime: string, + currentSystemClockOffset: number +) => number; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts new file mode 100644 index 0000000..9f994f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts @@ -0,0 +1,4 @@ +export declare const isClockSkewed: ( + clockTime: number, + systemClockOffset: number +) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts new file mode 100644 index 0000000..7657ceb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts @@ -0,0 +1,3 @@ +export declare const _toStr: (val: unknown) => string | undefined; +export declare const _toBool: (val: unknown) => boolean | undefined; +export declare const _toNum: (val: unknown) => number | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts new file mode 100644 index 0000000..73486db --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts @@ -0,0 +1,5 @@ +import { SerdeContext } from "@smithy/types"; +export declare const collectBodyString: ( + streamBody: any, + context: SerdeContext +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts new file mode 100644 index 0000000..fdc331e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts @@ -0,0 +1,3 @@ +export declare const awsExpectUnion: ( + value: unknown +) => Record | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts new file mode 100644 index 0000000..b400419 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse, SerdeContext } from "@smithy/types"; +export declare const parseJsonBody: ( + streamBody: any, + context: SerdeContext +) => any; +export declare const parseJsonErrorBody: ( + errorBody: any, + context: SerdeContext +) => Promise; +export declare const loadRestJsonErrorCode: ( + output: HttpResponse, + data: any +) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts new file mode 100644 index 0000000..f151834 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse, SerdeContext } from "@smithy/types"; +export declare const parseXmlBody: ( + streamBody: any, + context: SerdeContext +) => any; +export declare const parseXmlErrorBody: ( + errorBody: any, + context: SerdeContext +) => Promise; +export declare const loadRestXmlErrorCode: ( + output: HttpResponse, + data: any +) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts new file mode 100644 index 0000000..3783b5e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/httpAuthSchemes" { + export * from "@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/httpAuthSchemes.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/httpAuthSchemes.js new file mode 100644 index 0000000..17685b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/httpAuthSchemes.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/httpAuthSchemes/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/package.json new file mode 100644 index 0000000..a41d77a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/package.json @@ -0,0 +1,119 @@ +{ + "name": "@aws-sdk/core", + "version": "3.799.0", + "description": "Core functions & classes shared by multiple AWS SDK clients.", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline core && rimraf ./dist-cjs/api-extractor-type-index.js", + "build:es": "tsc -p tsconfig.es.json && rimraf ./dist-es/api-extractor-type-index.js", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "lint": "node ../../scripts/validation/submodules-linter.js --pkg core", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "exports": { + ".": { + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "node": "./dist-cjs/index.js", + "import": "./dist-es/index.js", + "require": "./dist-cjs/index.js" + }, + "./package.json": { + "module": "./package.json", + "node": "./package.json", + "import": "./package.json", + "require": "./package.json" + }, + "./client": { + "types": "./dist-types/submodules/client/index.d.ts", + "module": "./dist-es/submodules/client/index.js", + "node": "./dist-cjs/submodules/client/index.js", + "import": "./dist-es/submodules/client/index.js", + "require": "./dist-cjs/submodules/client/index.js" + }, + "./httpAuthSchemes": { + "types": "./dist-types/submodules/httpAuthSchemes/index.d.ts", + "module": "./dist-es/submodules/httpAuthSchemes/index.js", + "node": "./dist-cjs/submodules/httpAuthSchemes/index.js", + "import": "./dist-es/submodules/httpAuthSchemes/index.js", + "require": "./dist-cjs/submodules/httpAuthSchemes/index.js" + }, + "./account-id-endpoint": { + "types": "./dist-types/submodules/account-id-endpoint/index.d.ts", + "module": "./dist-es/submodules/account-id-endpoint/index.js", + "node": "./dist-cjs/submodules/account-id-endpoint/index.js", + "import": "./dist-es/submodules/account-id-endpoint/index.js", + "require": "./dist-cjs/submodules/account-id-endpoint/index.js" + }, + "./protocols": { + "types": "./dist-types/submodules/protocols/index.d.ts", + "module": "./dist-es/submodules/protocols/index.js", + "node": "./dist-cjs/submodules/protocols/index.js", + "import": "./dist-es/submodules/protocols/index.js", + "require": "./dist-cjs/submodules/protocols/index.js" + } + }, + "files": [ + "./account-id-endpoint.d.ts", + "./account-id-endpoint.js", + "./client.d.ts", + "./client.js", + "./httpAuthSchemes.d.ts", + "./httpAuthSchemes.js", + "./protocols.d.ts", + "./protocols.js", + "dist-*/**" + ], + "sideEffects": false, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/core", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/core" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/protocols.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/protocols.d.ts new file mode 100644 index 0000000..7a36334 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/protocols.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/protocols" { + export * from "@aws-sdk/core/dist-types/submodules/protocols/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/protocols.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/protocols.js new file mode 100644 index 0000000..e2916e8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/core/protocols.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/protocols/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/README.md new file mode 100644 index 0000000..61a6436 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-env + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-env/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-env) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-env.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-env) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js new file mode 100644 index 0000000..c906a6b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js @@ -0,0 +1,76 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ENV_ACCOUNT_ID: () => ENV_ACCOUNT_ID, + ENV_CREDENTIAL_SCOPE: () => ENV_CREDENTIAL_SCOPE, + ENV_EXPIRATION: () => ENV_EXPIRATION, + ENV_KEY: () => ENV_KEY, + ENV_SECRET: () => ENV_SECRET, + ENV_SESSION: () => ENV_SESSION, + fromEnv: () => fromEnv +}); +module.exports = __toCommonJS(index_exports); + +// src/fromEnv.ts +var import_client = require("@aws-sdk/core/client"); +var import_property_provider = require("@smithy/property-provider"); +var ENV_KEY = "AWS_ACCESS_KEY_ID"; +var ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +var ENV_SESSION = "AWS_SESSION_TOKEN"; +var ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +var ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +var ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +var fromEnv = /* @__PURE__ */ __name((init) => async () => { + init?.logger?.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...sessionToken && { sessionToken }, + ...expiry && { expiration: new Date(expiry) }, + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new import_property_provider.CredentialsProviderError("Unable to find environment variable credentials.", { logger: init?.logger }); +}, "fromEnv"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_KEY, + ENV_SECRET, + ENV_SESSION, + ENV_EXPIRATION, + ENV_CREDENTIAL_SCOPE, + ENV_ACCOUNT_ID, + fromEnv +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js new file mode 100644 index 0000000..a6a2928 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +export const ENV_KEY = "AWS_ACCESS_KEY_ID"; +export const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +export const ENV_SESSION = "AWS_SESSION_TOKEN"; +export const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +export const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +export const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +export const fromEnv = (init) => async () => { + init?.logger?.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }), + ...(expiry && { expiration: new Date(expiry) }), + ...(credentialScope && { credentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new CredentialsProviderError("Unable to find environment variable credentials.", { logger: init?.logger }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js new file mode 100644 index 0000000..17bf6da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromEnv"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts new file mode 100644 index 0000000..541aa69 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts @@ -0,0 +1,36 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export interface FromEnvInit extends CredentialProviderOptions { +} +/** + * @internal + */ +export declare const ENV_KEY = "AWS_ACCESS_KEY_ID"; +/** + * @internal + */ +export declare const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +/** + * @internal + */ +export declare const ENV_SESSION = "AWS_SESSION_TOKEN"; +/** + * @internal + */ +export declare const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +/** + * @internal + */ +export declare const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +/** + * @internal + */ +export declare const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +/** + * @internal + * + * Source AWS credentials from known environment variables. If either the + * `AWS_ACCESS_KEY_ID` or `AWS_SECRET_ACCESS_KEY` environment variable is not + * set in this process, the provider will return a rejected promise. + */ +export declare const fromEnv: (init?: FromEnvInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts new file mode 100644 index 0000000..fe76e31 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromEnv"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts new file mode 100644 index 0000000..55c454e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts @@ -0,0 +1,12 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export interface FromEnvInit extends CredentialProviderOptions {} +export declare const ENV_KEY = "AWS_ACCESS_KEY_ID"; +export declare const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +export declare const ENV_SESSION = "AWS_SESSION_TOKEN"; +export declare const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +export declare const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +export declare const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +export declare const fromEnv: ( + init?: FromEnvInit +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..17bf6da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromEnv"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/package.json new file mode 100644 index 0000000..a66a0de --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-env/package.json @@ -0,0 +1,62 @@ +{ + "name": "@aws-sdk/credential-provider-env", + "version": "3.799.0", + "description": "AWS credential provider that sources credentials from known environment variables", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-env", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-env", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-env" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/README.md new file mode 100644 index 0000000..e8f19f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/credential-provider-http + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-http/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-http) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-http.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-http) + +> An internal transitively required package. + +## Usage + +See https://www.npmjs.com/package/@aws-sdk/credential-providers diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js new file mode 100644 index 0000000..c4adb5f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkUrl = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new property_provider_1.CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; +exports.checkUrl = checkUrl; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js new file mode 100644 index 0000000..d7c0efa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const property_provider_1 = require("@smithy/property-provider"); +const checkUrl_1 = require("./checkUrl"); +const requestHelpers_1 = require("./requestHelpers"); +const retry_wrapper_1 = require("./retry-wrapper"); +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const full = options.credentialsFullUri; + if (full) { + host = full; + } + else { + throw new property_provider_1.CredentialsProviderError("No HTTP credential provider host provided.", { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new fetch_http_handler_1.FetchHttpHandler(); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (options.authorizationToken) { + request.headers.Authorization = options.authorizationToken; + } + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response); + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js new file mode 100644 index 0000000..6e0269a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +const tslib_1 = require("tslib"); +const client_1 = require("@aws-sdk/core/client"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const property_provider_1 = require("@smithy/property-provider"); +const promises_1 = tslib_1.__importDefault(require("fs/promises")); +const checkUrl_1 = require("./checkUrl"); +const requestHelpers_1 = require("./requestHelpers"); +const retry_wrapper_1 = require("./retry-wrapper"); +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new property_provider_1.CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new node_http_handler_1.NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await promises_1.default.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response).then((creds) => (0, client_1.setCredentialFeature)(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js new file mode 100644 index 0000000..5229d79 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getCredentials = exports.createGetRequest = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const protocol_http_1 = require("@smithy/protocol-http"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_stream_1 = require("@smithy/util-stream"); +function createGetRequest(url) { + return new protocol_http_1.HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +exports.createGetRequest = createGetRequest; +async function getCredentials(response, logger) { + const stream = (0, util_stream_1.sdkStreamMixin)(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new property_provider_1.CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: (0, smithy_client_1.parseRfc3339DateTime)(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} +exports.getCredentials = getCredentials; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js new file mode 100644 index 0000000..b99b2ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryWrapper = void 0; +const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; +exports.retryWrapper = retryWrapper; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js new file mode 100644 index 0000000..9300747 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +var fromHttp_browser_1 = require("./fromHttp/fromHttp.browser"); +Object.defineProperty(exports, "fromHttp", { enumerable: true, get: function () { return fromHttp_browser_1.fromHttp; } }); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js new file mode 100644 index 0000000..0286ea0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +var fromHttp_1 = require("./fromHttp/fromHttp"); +Object.defineProperty(exports, "fromHttp", { enumerable: true, get: function () { return fromHttp_1.fromHttp; } }); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js new file mode 100644 index 0000000..2a42ed7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js @@ -0,0 +1,42 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +export const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js new file mode 100644 index 0000000..7189b92 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js @@ -0,0 +1,27 @@ +import { FetchHttpHandler } from "@smithy/fetch-http-handler"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { checkUrl } from "./checkUrl"; +import { createGetRequest, getCredentials } from "./requestHelpers"; +import { retryWrapper } from "./retry-wrapper"; +export const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const full = options.credentialsFullUri; + if (full) { + host = full; + } + else { + throw new CredentialsProviderError("No HTTP credential provider host provided.", { logger: options.logger }); + } + const url = new URL(host); + checkUrl(url, options.logger); + const requestHandler = new FetchHttpHandler(); + return retryWrapper(async () => { + const request = createGetRequest(url); + if (options.authorizationToken) { + request.headers.Authorization = options.authorizationToken; + } + const result = await requestHandler.handle(request); + return getCredentials(result.response); + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js new file mode 100644 index 0000000..36dd8a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js @@ -0,0 +1,63 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { NodeHttpHandler } from "@smithy/node-http-handler"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import fs from "fs/promises"; +import { checkUrl } from "./checkUrl"; +import { createGetRequest, getCredentials } from "./requestHelpers"; +import { retryWrapper } from "./retry-wrapper"; +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +export const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + checkUrl(url, options.logger); + const requestHandler = new NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return retryWrapper(async () => { + const request = createGetRequest(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await fs.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return getCredentials(result.response).then((creds) => setCredentialFeature(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js new file mode 100644 index 0000000..9e271ce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js @@ -0,0 +1,49 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { HttpRequest } from "@smithy/protocol-http"; +import { parseRfc3339DateTime } from "@smithy/smithy-client"; +import { sdkStreamMixin } from "@smithy/util-stream"; +export function createGetRequest(url) { + return new HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +export async function getCredentials(response, logger) { + const stream = sdkStreamMixin(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: parseRfc3339DateTime(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js new file mode 100644 index 0000000..7006f3c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js @@ -0,0 +1,13 @@ +export const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js new file mode 100644 index 0000000..98204c5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js @@ -0,0 +1 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js new file mode 100644 index 0000000..2911386 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js @@ -0,0 +1 @@ +export { fromHttp } from "./fromHttp/fromHttp"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts new file mode 100644 index 0000000..933b12c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts @@ -0,0 +1,9 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + * + * @param url - to be validated. + * @param logger - passed to CredentialsProviderError. + * @throws if not acceptable to this provider. + */ +export declare const checkUrl: (url: URL, logger?: Logger) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts new file mode 100644 index 0000000..cb3a03b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import type { FromHttpOptions } from "./fromHttpTypes"; +/** + * Creates a provider that gets credentials via HTTP request. + */ +export declare const fromHttp: (options?: FromHttpOptions) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts new file mode 100644 index 0000000..cb3a03b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import type { FromHttpOptions } from "./fromHttpTypes"; +/** + * Creates a provider that gets credentials via HTTP request. + */ +export declare const fromHttp: (options?: FromHttpOptions) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts new file mode 100644 index 0000000..b751ded --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts @@ -0,0 +1,69 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +/** + * @public + * + * Input for the fromHttp function in the HTTP Credentials Provider for Node.js. + */ +export interface FromHttpOptions extends CredentialProviderOptions { + /** + * If this value is provided, it will be used as-is. + * + * For browser environments, use instead {@link credentialsFullUri}. + */ + awsContainerCredentialsFullUri?: string; + /** + * If this value is provided instead of the full URI, it + * will be appended to the default link local host of 169.254.170.2. + * + * Not supported in browsers. + */ + awsContainerCredentialsRelativeUri?: string; + /** + * Will be read on each credentials request to + * add an Authorization request header value. + * + * Not supported in browsers. + */ + awsContainerAuthorizationTokenFile?: string; + /** + * An alternative to awsContainerAuthorizationTokenFile, + * this is the token value itself. + * + * For browser environments, use instead {@link authorizationToken}. + */ + awsContainerAuthorizationToken?: string; + /** + * BROWSER ONLY. + * + * In browsers, a relative URI is not allowed, and a full URI must be provided. + * HTTPS is required. + * + * This value is required for the browser environment. + */ + credentialsFullUri?: string; + /** + * BROWSER ONLY. + * + * Providing this value will set an "Authorization" request + * header value on the GET request. + */ + authorizationToken?: string; + /** + * Default is 3 retry attempts or 4 total attempts. + */ + maxRetries?: number; + /** + * Default is 1000ms. Time in milliseconds to spend waiting between retry attempts. + */ + timeout?: number; +} +/** + * @public + */ +export type HttpProviderCredentials = { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + AccountId?: string; + Expiration: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts new file mode 100644 index 0000000..6d1c16e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts @@ -0,0 +1,11 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpResponse, Logger } from "@smithy/types"; +/** + * @internal + */ +export declare function createGetRequest(url: URL): HttpRequest; +/** + * @internal + */ +export declare function getCredentials(response: HttpResponse, logger?: Logger): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts new file mode 100644 index 0000000..bf63add --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retryWrapper: (toRetry: RetryableProvider, maxRetries: number, delayMs: number) => RetryableProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts new file mode 100644 index 0000000..2a9e4ec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts @@ -0,0 +1,2 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; +export type { FromHttpOptions, HttpProviderCredentials } from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts new file mode 100644 index 0000000..b1e9985 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export { fromHttp } from "./fromHttp/fromHttp"; +export type { FromHttpOptions, HttpProviderCredentials } from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts new file mode 100644 index 0000000..9f518b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts @@ -0,0 +1,2 @@ +import { Logger } from "@smithy/types"; +export declare const checkUrl: (url: URL, logger?: Logger) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts new file mode 100644 index 0000000..00f1506 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromHttpOptions } from "./fromHttpTypes"; +export declare const fromHttp: ( + options?: FromHttpOptions +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts new file mode 100644 index 0000000..00f1506 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromHttpOptions } from "./fromHttpTypes"; +export declare const fromHttp: ( + options?: FromHttpOptions +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts new file mode 100644 index 0000000..767b6b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts @@ -0,0 +1,18 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +export interface FromHttpOptions extends CredentialProviderOptions { + awsContainerCredentialsFullUri?: string; + awsContainerCredentialsRelativeUri?: string; + awsContainerAuthorizationTokenFile?: string; + awsContainerAuthorizationToken?: string; + credentialsFullUri?: string; + authorizationToken?: string; + maxRetries?: number; + timeout?: number; +} +export type HttpProviderCredentials = { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + AccountId?: string; + Expiration: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts new file mode 100644 index 0000000..68a3285 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts @@ -0,0 +1,8 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpResponse, Logger } from "@smithy/types"; +export declare function createGetRequest(url: URL): HttpRequest; +export declare function getCredentials( + response: HttpResponse, + logger?: Logger +): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts new file mode 100644 index 0000000..f992038 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts @@ -0,0 +1,8 @@ +export interface RetryableProvider { + (): Promise; +} +export declare const retryWrapper: ( + toRetry: RetryableProvider, + maxRetries: number, + delayMs: number +) => RetryableProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts new file mode 100644 index 0000000..40696b9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts @@ -0,0 +1,5 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; +export { + FromHttpOptions, + HttpProviderCredentials, +} from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..560256f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +export { fromHttp } from "./fromHttp/fromHttp"; +export { + FromHttpOptions, + HttpProviderCredentials, +} from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/package.json new file mode 100644 index 0000000..2ad154b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-http/package.json @@ -0,0 +1,69 @@ +{ + "name": "@aws-sdk/credential-provider-http", + "version": "3.799.0", + "description": "AWS credential provider for containers and HTTP sources", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": "./dist-es/index.browser.js", + "react-native": "./dist-es/index.browser.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-http", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-http", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-http" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/README.md new file mode 100644 index 0000000..b4f3af1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-ini + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-ini/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-ini) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-ini.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-ini) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js new file mode 100644 index 0000000..e9b6049 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js @@ -0,0 +1,276 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromIni: () => fromIni +}); +module.exports = __toCommonJS(index_exports); + +// src/fromIni.ts + + +// src/resolveProfileData.ts + + +// src/resolveAssumeRoleCredentials.ts + + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/resolveCredentialSource.ts +var import_client = require("@aws-sdk/core/client"); +var import_property_provider = require("@smithy/property-provider"); +var resolveCredentialSource = /* @__PURE__ */ __name((credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: /* @__PURE__ */ __name(async (options) => { + const { fromHttp } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-http"))); + const { fromContainerMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => (0, import_property_provider.chain)(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, "EcsContainer"), + Ec2InstanceMetadata: /* @__PURE__ */ __name(async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, "Ec2InstanceMetadata"), + Environment: /* @__PURE__ */ __name(async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-env"))); + return async () => fromEnv(options)().then(setNamedProvider); + }, "Environment") + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } else { + throw new import_property_provider.CredentialsProviderError( + `Unsupported credential source in profile ${profileName}. Got ${credentialSource}, expected EcsContainer or Ec2InstanceMetadata or Environment.`, + { logger } + ); + } +}, "resolveCredentialSource"); +var setNamedProvider = /* @__PURE__ */ __name((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"), "setNamedProvider"); + +// src/resolveAssumeRoleCredentials.ts +var isAssumeRoleProfile = /* @__PURE__ */ __name((arg, { profile = "default", logger } = {}) => { + return Boolean(arg) && typeof arg === "object" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger })); +}, "isAssumeRoleProfile"); +var isAssumeRoleWithSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + logger?.debug?.(` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}, "isAssumeRoleWithSourceProfile"); +var isCredentialSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + logger?.debug?.(` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}, "isCredentialSourceProfile"); +var resolveAssumeRoleCredentials = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sts"))); + options.roleAssumer = getDefaultRoleAssumer( + { + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options?.parentClientConfig, + region: region ?? options?.parentClientConfig?.region + } + }, + options.clientPlugins + ); + } + if (source_profile && source_profile in visitedProfiles) { + throw new import_property_provider.CredentialsProviderError( + `Detected a cycle attempting to resolve credentials for profile ${(0, import_shared_ini_file_loader.getProfileName)(options)}. Profiles visited: ` + Object.keys(visitedProfiles).join(", "), + { logger: options.logger } + ); + } + options.logger?.debug( + `@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}` + ); + const sourceCredsProvider = source_profile ? resolveProfileData( + source_profile, + profiles, + options, + { + ...visitedProfiles, + [source_profile]: true + }, + isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {}) + ) : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10) + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new import_property_provider.CredentialsProviderError( + `Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, + { logger: options.logger, tryNextLink: false } + ); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o") + ); + } +}, "resolveAssumeRoleCredentials"); +var isCredentialSourceWithoutRoleArn = /* @__PURE__ */ __name((section) => { + return !section.role_arn && !!section.credential_source; +}, "isCredentialSourceWithoutRoleArn"); + +// src/resolveProcessCredentials.ts + +var isProcessProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string", "isProcessProfile"); +var resolveProcessCredentials = /* @__PURE__ */ __name(async (options, profile) => Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-process"))).then( + ({ fromProcess }) => fromProcess({ + ...options, + profile + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_PROCESS", "v")) +), "resolveProcessCredentials"); + +// src/resolveSsoCredentials.ts + +var resolveSsoCredentials = /* @__PURE__ */ __name(async (profile, profileData, options = {}) => { + const { fromSSO } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-sso"))); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig + })().then((creds) => { + if (profileData.sso_session) { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } else { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}, "resolveSsoCredentials"); +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveStaticCredentials.ts + +var isStaticCredsProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.aws_access_key_id === "string" && typeof arg.aws_secret_access_key === "string" && ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1, "isStaticCredsProfile"); +var resolveStaticCredentials = /* @__PURE__ */ __name(async (profile, options) => { + options?.logger?.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }, + ...profile.aws_account_id && { accountId: profile.aws_account_id } + }; + return (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROFILE", "n"); +}, "resolveStaticCredentials"); + +// src/resolveWebIdentityCredentials.ts + +var isWebIdentityProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.web_identity_token_file === "string" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1, "isWebIdentityProfile"); +var resolveWebIdentityCredentials = /* @__PURE__ */ __name(async (profile, options) => Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-web-identity"))).then( + ({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q")) +), "resolveWebIdentityCredentials"); + +// src/resolveProfileData.ts +var resolveProfileData = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new import_property_provider.CredentialsProviderError( + `Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, + { logger: options.logger } + ); +}, "resolveProfileData"); + +// src/fromIni.ts +var fromIni = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + init.logger?.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProfileData( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: _init.profile ?? callerClientConfig?.profile + }), + profiles, + init + ); +}, "fromIni"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromIni +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js new file mode 100644 index 0000000..ccf0397 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js @@ -0,0 +1,16 @@ +import { getProfileName, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { resolveProfileData } from "./resolveProfileData"; +export const fromIni = (_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig, + }, + }; + init.logger?.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await parseKnownFiles(init); + return resolveProfileData(getProfileName({ + profile: _init.profile ?? callerClientConfig?.profile, + }), profiles, init); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js new file mode 100644 index 0000000..b019131 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromIni"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js new file mode 100644 index 0000000..1411318 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js @@ -0,0 +1,80 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName } from "@smithy/shared-ini-file-loader"; +import { resolveCredentialSource } from "./resolveCredentialSource"; +import { resolveProfileData } from "./resolveProfileData"; +export const isAssumeRoleProfile = (arg, { profile = "default", logger } = {}) => { + return (Boolean(arg) && + typeof arg === "object" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && + ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && + ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && + (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger }))); +}; +const isAssumeRoleWithSourceProfile = (arg, { profile, logger }) => { + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + logger?.debug?.(` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}; +const isCredentialSourceProfile = (arg, { profile, logger }) => { + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + logger?.debug?.(` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}; +export const resolveAssumeRoleCredentials = async (profileName, profiles, options, visitedProfiles = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await import("@aws-sdk/nested-clients/sts"); + options.roleAssumer = getDefaultRoleAssumer({ + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options?.parentClientConfig, + region: region ?? options?.parentClientConfig?.region, + }, + }, options.clientPlugins); + } + if (source_profile && source_profile in visitedProfiles) { + throw new CredentialsProviderError(`Detected a cycle attempting to resolve credentials for profile` + + ` ${getProfileName(options)}. Profiles visited: ` + + Object.keys(visitedProfiles).join(", "), { logger: options.logger }); + } + options.logger?.debug(`@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}`); + const sourceCredsProvider = source_profile + ? resolveProfileData(source_profile, profiles, options, { + ...visitedProfiles, + [source_profile]: true, + }, isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {})) + : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } + else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10), + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new CredentialsProviderError(`Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, { logger: options.logger, tryNextLink: false }); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } +}; +const isCredentialSourceWithoutRoleArn = (section) => { + return !section.role_arn && !!section.credential_source; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js new file mode 100644 index 0000000..b004933 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { chain, CredentialsProviderError } from "@smithy/property-provider"; +export const resolveCredentialSource = (credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: async (options) => { + const { fromHttp } = await import("@aws-sdk/credential-provider-http"); + const { fromContainerMetadata } = await import("@smithy/credential-provider-imds"); + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => chain(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, + Ec2InstanceMetadata: async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await import("@smithy/credential-provider-imds"); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, + Environment: async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await import("@aws-sdk/credential-provider-env"); + return async () => fromEnv(options)().then(setNamedProvider); + }, + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } + else { + throw new CredentialsProviderError(`Unsupported credential source in profile ${profileName}. Got ${credentialSource}, ` + + `expected EcsContainer or Ec2InstanceMetadata or Environment.`, { logger }); + } +}; +const setNamedProvider = (creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js new file mode 100644 index 0000000..5a9f975 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js @@ -0,0 +1,6 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isProcessProfile = (arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string"; +export const resolveProcessCredentials = async (options, profile) => import("@aws-sdk/credential-provider-process").then(({ fromProcess }) => fromProcess({ + ...options, + profile, +})().then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_PROCESS", "v"))); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js new file mode 100644 index 0000000..3e64e9e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js @@ -0,0 +1,28 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { isAssumeRoleProfile, resolveAssumeRoleCredentials } from "./resolveAssumeRoleCredentials"; +import { isProcessProfile, resolveProcessCredentials } from "./resolveProcessCredentials"; +import { isSsoProfile, resolveSsoCredentials } from "./resolveSsoCredentials"; +import { isStaticCredsProfile, resolveStaticCredentials } from "./resolveStaticCredentials"; +import { isWebIdentityProfile, resolveWebIdentityCredentials } from "./resolveWebIdentityCredentials"; +export const resolveProfileData = async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new CredentialsProviderError(`Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, { logger: options.logger }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js new file mode 100644 index 0000000..5da74da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js @@ -0,0 +1,23 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const resolveSsoCredentials = async (profile, profileData, options = {}) => { + const { fromSSO } = await import("@aws-sdk/credential-provider-sso"); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig, + })().then((creds) => { + if (profileData.sso_session) { + return setCredentialFeature(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } + else { + return setCredentialFeature(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}; +export const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js new file mode 100644 index 0000000..c04435f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js @@ -0,0 +1,18 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isStaticCredsProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.aws_access_key_id === "string" && + typeof arg.aws_secret_access_key === "string" && + ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && + ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1; +export const resolveStaticCredentials = async (profile, options) => { + options?.logger?.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...(profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }), + ...(profile.aws_account_id && { accountId: profile.aws_account_id }), + }; + return setCredentialFeature(credentials, "CREDENTIALS_PROFILE", "n"); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js new file mode 100644 index 0000000..10adfe7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js @@ -0,0 +1,14 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isWebIdentityProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.web_identity_token_file === "string" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1; +export const resolveWebIdentityCredentials = async (profile, options) => import("@aws-sdk/credential-provider-web-identity").then(({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig, +})().then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q"))); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts new file mode 100644 index 0000000..5554125 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts @@ -0,0 +1,55 @@ +import type { AssumeRoleWithWebIdentityParams } from "@aws-sdk/credential-provider-web-identity"; +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import type { RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import type { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +import { AssumeRoleParams } from "./resolveAssumeRoleCredentials"; +/** + * @public + */ +export interface FromIniInit extends SourceProfileInit, CredentialProviderOptions { + /** + * A function that returns a promise fulfilled with an MFA token code for + * the provided MFA Serial code. If a profile requires an MFA code and + * `mfaCodeProvider` is not a valid function, the credential provider + * promise will be rejected. + * + * @param mfaSerial The serial code of the MFA device specified. + */ + mfaCodeProvider?: (mfaSerial: string) => Promise; + /** + * A function that assumes a role and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param sourceCreds The credentials with which to assume a role. + * @param params + */ + roleAssumer?: (sourceCreds: AwsCredentialIdentity, params: AssumeRoleParams) => Promise; + /** + * A function that assumes a role with web identity and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param sourceCreds The credentials with which to assume a role. + * @param params + */ + roleAssumerWithWebIdentity?: (params: AssumeRoleWithWebIdentityParams) => Promise; + /** + * STSClientConfig or SSOClientConfig to be used for creating inner client + * for auth operations. + * @internal + */ + clientConfig?: any; + clientPlugins?: Pluggable[]; + /** + * When true, always reload credentials from the file system instead of using cached values. + * This is useful when you need to detect changes to the credentials file. + */ + ignoreCache?: boolean; +} +/** + * @internal + * + * Creates a credential provider that will read from ini files and supports + * role assumption and multi-factor authentication. + */ +export declare const fromIni: (_init?: FromIniInit) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts new file mode 100644 index 0000000..75680c0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromIni"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts new file mode 100644 index 0000000..dd9a896 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts @@ -0,0 +1,47 @@ +import { Logger, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + * + * @see http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/STS.html#assumeRole-property + * TODO update the above to link to V3 docs + */ +export interface AssumeRoleParams { + /** + * The identifier of the role to be assumed. + */ + RoleArn: string; + /** + * A name for the assumed role session. + */ + RoleSessionName: string; + /** + * A unique identifier that is used by third parties when assuming roles in + * their customers' accounts. + */ + ExternalId?: string; + /** + * The identification number of the MFA device that is associated with the + * user who is making the `AssumeRole` call. + */ + SerialNumber?: string; + /** + * The value provided by the MFA device. + */ + TokenCode?: string; + /** + * The duration, in seconds, of the role session. + */ + DurationSeconds?: number; +} +/** + * @internal + */ +export declare const isAssumeRoleProfile: (arg: any, { profile, logger }?: { + profile?: string | undefined; + logger?: Logger | undefined; +}) => boolean; +/** + * @internal + */ +export declare const resolveAssumeRoleCredentials: (profileName: string, profiles: ParsedIniData, options: FromIniInit, visitedProfiles?: Record) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts new file mode 100644 index 0000000..6f1c9b7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts @@ -0,0 +1,12 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider, Logger } from "@smithy/types"; +/** + * @internal + * + * Resolve the `credential_source` entry from the profile, and return the + * credential providers respectively. No memoization is needed for the + * credential source providers because memoization should be added outside the + * fromIni() provider. The source credential needs to be refreshed every time + * fromIni() is called. + */ +export declare const resolveCredentialSource: (credentialSource: string, profileName: string, logger?: Logger) => (options?: CredentialProviderOptions) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..7194518 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts @@ -0,0 +1,16 @@ +import { Credentials, Profile } from "@aws-sdk/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface ProcessProfile extends Profile { + credential_process: string; +} +/** + * @internal + */ +export declare const isProcessProfile: (arg: any) => arg is ProcessProfile; +/** + * @internal + */ +export declare const resolveProcessCredentials: (options: FromIniInit, profile: string) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts new file mode 100644 index 0000000..e59ca93 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts @@ -0,0 +1,6 @@ +import type { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export declare const resolveProfileData: (profileName: string, profiles: ParsedIniData, options: FromIniInit, visitedProfiles?: Record, isAssumeRoleRecursiveCall?: boolean) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts new file mode 100644 index 0000000..1909a51 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts @@ -0,0 +1,12 @@ +import type { SsoProfile } from "@aws-sdk/credential-provider-sso"; +import type { IniSection, Profile } from "@smithy/types"; +import type { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export declare const resolveSsoCredentials: (profile: string, profileData: IniSection, options?: FromIniInit) => Promise; +/** + * @internal + * duplicated from \@aws-sdk/credential-provider-sso to defer import. + */ +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts new file mode 100644 index 0000000..e04cf26 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts @@ -0,0 +1,20 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface StaticCredsProfile extends Profile { + aws_access_key_id: string; + aws_secret_access_key: string; + aws_session_token?: string; + aws_credential_scope?: string; + aws_account_id?: string; +} +/** + * @internal + */ +export declare const isStaticCredsProfile: (arg: any) => arg is StaticCredsProfile; +/** + * @internal + */ +export declare const resolveStaticCredentials: (profile: StaticCredsProfile, options?: FromIniInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts new file mode 100644 index 0000000..acb1d45 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts @@ -0,0 +1,18 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface WebIdentityProfile extends Profile { + web_identity_token_file: string; + role_arn: string; + role_session_name?: string; +} +/** + * @internal + */ +export declare const isWebIdentityProfile: (arg: any) => arg is WebIdentityProfile; +/** + * @internal + */ +export declare const resolveWebIdentityCredentials: (profile: WebIdentityProfile, options: FromIniInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts new file mode 100644 index 0000000..9d640a0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts @@ -0,0 +1,24 @@ +import { AssumeRoleWithWebIdentityParams } from "@aws-sdk/credential-provider-web-identity"; +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +import { AssumeRoleParams } from "./resolveAssumeRoleCredentials"; +export interface FromIniInit + extends SourceProfileInit, + CredentialProviderOptions { + mfaCodeProvider?: (mfaSerial: string) => Promise; + roleAssumer?: ( + sourceCreds: AwsCredentialIdentity, + params: AssumeRoleParams + ) => Promise; + roleAssumerWithWebIdentity?: ( + params: AssumeRoleWithWebIdentityParams + ) => Promise; + clientConfig?: any; + clientPlugins?: Pluggable[]; + ignoreCache?: boolean; +} +export declare const fromIni: ( + _init?: FromIniInit +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..b019131 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromIni"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts new file mode 100644 index 0000000..eb782f3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts @@ -0,0 +1,26 @@ +import { Logger, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface AssumeRoleParams { + RoleArn: string; + RoleSessionName: string; + ExternalId?: string; + SerialNumber?: string; + TokenCode?: string; + DurationSeconds?: number; +} +export declare const isAssumeRoleProfile: ( + arg: any, + { + profile, + logger, + }?: { + profile?: string | undefined; + logger?: Logger | undefined; + } +) => boolean; +export declare const resolveAssumeRoleCredentials: ( + profileName: string, + profiles: ParsedIniData, + options: FromIniInit, + visitedProfiles?: Record +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts new file mode 100644 index 0000000..21a7f9f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts @@ -0,0 +1,9 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider, Logger } from "@smithy/types"; +export declare const resolveCredentialSource: ( + credentialSource: string, + profileName: string, + logger?: Logger +) => ( + options?: CredentialProviderOptions +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..dbd5583 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts @@ -0,0 +1,10 @@ +import { Credentials, Profile } from "@aws-sdk/types"; +import { FromIniInit } from "./fromIni"; +export interface ProcessProfile extends Profile { + credential_process: string; +} +export declare const isProcessProfile: (arg: any) => arg is ProcessProfile; +export declare const resolveProcessCredentials: ( + options: FromIniInit, + profile: string +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts new file mode 100644 index 0000000..d821bb4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts @@ -0,0 +1,9 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export declare const resolveProfileData: ( + profileName: string, + profiles: ParsedIniData, + options: FromIniInit, + visitedProfiles?: Record, + isAssumeRoleRecursiveCall?: boolean +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts new file mode 100644 index 0000000..88bec34 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts @@ -0,0 +1,9 @@ +import { SsoProfile } from "@aws-sdk/credential-provider-sso"; +import { IniSection, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export declare const resolveSsoCredentials: ( + profile: string, + profileData: IniSection, + options?: FromIniInit +) => Promise; +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts new file mode 100644 index 0000000..5f5daa9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts @@ -0,0 +1,16 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface StaticCredsProfile extends Profile { + aws_access_key_id: string; + aws_secret_access_key: string; + aws_session_token?: string; + aws_credential_scope?: string; + aws_account_id?: string; +} +export declare const isStaticCredsProfile: ( + arg: any +) => arg is StaticCredsProfile; +export declare const resolveStaticCredentials: ( + profile: StaticCredsProfile, + options?: FromIniInit +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts new file mode 100644 index 0000000..4179f94 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts @@ -0,0 +1,14 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface WebIdentityProfile extends Profile { + web_identity_token_file: string; + role_arn: string; + role_session_name?: string; +} +export declare const isWebIdentityProfile: ( + arg: any +) => arg is WebIdentityProfile; +export declare const resolveWebIdentityCredentials: ( + profile: WebIdentityProfile, + options: FromIniInit +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/package.json new file mode 100644 index 0000000..313a386 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-ini/package.json @@ -0,0 +1,72 @@ +{ + "name": "@aws-sdk/credential-provider-ini", + "version": "3.803.0", + "description": "AWS credential provider that sources credentials from ~/.aws/credentials and ~/.aws/config", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-ini", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-ini", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-ini" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/README.md new file mode 100644 index 0000000..7957cc0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/README.md @@ -0,0 +1,104 @@ +# @aws-sdk/credential-provider-node + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-node/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-node) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-node.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-node) + +## AWS Credential Provider for Node.JS + +This module provides a factory function, `defaultProvider`, that will attempt to +source AWS credentials from a Node.JS environment. It will attempt to find +credentials from the following sources (listed in order of precedence): + +- Environment variables exposed via `process.env` +- SSO credentials from token cache +- Web identity token credentials +- Shared credentials and config ini files +- The EC2/ECS Instance Metadata Service + +The default credential provider will invoke one provider at a time and only +continue to the next if no credentials have been located. For example, if the +process finds values defined via the `AWS_ACCESS_KEY_ID` and +`AWS_SECRET_ACCESS_KEY` environment variables, the files at `~/.aws/credentials` +and `~/.aws/config` will not be read, nor will any messages be sent to the +Instance Metadata Service. + +If invalid configuration is encountered (such as a profile in +`~/.aws/credentials` specifying as its `source_profile` the name of a profile +that does not exist), then the chained provider will be rejected with an error +and will not invoke the next provider in the list. + +_IMPORTANT_: if you intend to acquire credentials using EKS +[IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html), +then you must explicitly specify a value for `roleAssumerWithWebIdentity`. There is a +default function available in `@aws-sdk/client-sts` package. An example of using +this: + +```js +const { getDefaultRoleAssumerWithWebIdentity } = require("@aws-sdk/client-sts"); +const { defaultProvider } = require("@aws-sdk/credential-provider-node"); +const { S3Client, GetObjectCommand } = require("@aws-sdk/client-s3"); + +const provider = defaultProvider({ + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity({ + // You must explicitly pass a region if you are not using us-east-1 + region: "eu-west-1" + }), +}); + +const client = new S3Client({ credentialDefaultProvider: provider }); +``` + +_IMPORTANT_: We provide a wrapper of this provider in `@aws-sdk/credential-providers` +package to save you from importing `getDefaultRoleAssumerWithWebIdentity()` or +`getDefaultRoleAssume()` from STS package. Similarly, you can do: + +```js +const { fromNodeProviderChain } = require("@aws-sdk/credential-providers"); + +const credentials = fromNodeProviderChain(); + +const client = new S3Client({ credentials }); +``` + +## Supported configuration + +You may customize how credentials are resolved by providing an options hash to +the `defaultProvider` factory function. The following options are +supported: + +- `profile` - The configuration profile to use. If not specified, the provider + will use the value in the `AWS_PROFILE` environment variable or a default of + `default`. +- `filepath` - The path to the shared credentials file. If not specified, the + provider will use the value in the `AWS_SHARED_CREDENTIALS_FILE` environment + variable or a default of `~/.aws/credentials`. +- `configFilepath` - The path to the shared config file. If not specified, the + provider will use the value in the `AWS_CONFIG_FILE` environment variable or a + default of `~/.aws/config`. +- `mfaCodeProvider` - A function that returns a a promise fulfilled with an + MFA token code for the provided MFA Serial code. If a profile requires an MFA + code and `mfaCodeProvider` is not a valid function, the credential provider + promise will be rejected. +- `roleAssumer` - A function that assumes a role and returns a promise + fulfilled with credentials for the assumed role. If not specified, no role + will be assumed, and an error will be thrown. +- `roleArn` - ARN to assume. If not specified, the provider will use the value + in the `AWS_ROLE_ARN` environment variable. +- `webIdentityTokenFile` - File location of where the `OIDC` token is stored. + If not specified, the provider will use the value in the `AWS_WEB_IDENTITY_TOKEN_FILE` + environment variable. +- `roleAssumerWithWebIdentity` - A function that assumes a role with web identity and + returns a promise fulfilled with credentials for the assumed role. +- `timeout` - The connection timeout (in milliseconds) to apply to any remote + requests. If not specified, a default value of `1000` (one second) is used. +- `maxRetries` - The maximum number of times any HTTP connections should be + retried. If not specified, a default value of `0` will be used. + +## Related packages: + +- [AWS Credential Provider for Node.JS - Environment Variables](../credential-provider-env) +- [AWS Credential Provider for Node.JS - SSO](../credential-provider-sso) +- [AWS Credential Provider for Node.JS - Web Identity](../credential-provider-web-identity) +- [AWS Credential Provider for Node.JS - Shared Configuration Files](../credential-provider-ini) +- [AWS Credential Provider for Node.JS - Instance and Container Metadata](../credential-provider-imds) +- [AWS Shared Configuration File Loader](../shared-ini-file-loader) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js new file mode 100644 index 0000000..be4d2b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js @@ -0,0 +1,147 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + credentialsTreatedAsExpired: () => credentialsTreatedAsExpired, + credentialsWillNeedRefresh: () => credentialsWillNeedRefresh, + defaultProvider: () => defaultProvider +}); +module.exports = __toCommonJS(index_exports); + +// src/defaultProvider.ts +var import_credential_provider_env = require("@aws-sdk/credential-provider-env"); + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/remoteProvider.ts +var import_property_provider = require("@smithy/property-provider"); +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var remoteProvider = /* @__PURE__ */ __name(async (init) => { + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-http"))); + return (0, import_property_provider.chain)(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED] && process.env[ENV_IMDS_DISABLED] !== "false") { + return async () => { + throw new import_property_provider.CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}, "remoteProvider"); + +// src/defaultProvider.ts +var multipleCredentialSourceWarningEmitted = false; +var defaultProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + async () => { + const profile = init.profile ?? process.env[import_shared_ini_file_loader.ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[import_credential_provider_env.ENV_KEY] && process.env[import_credential_provider_env.ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = init.logger?.warn && init.logger?.constructor?.name !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn( + `@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +` + ); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new import_property_provider.CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true + }); + } + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return (0, import_credential_provider_env.fromEnv)(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new import_property_provider.CredentialsProviderError( + "Skipping SSO provider in default chain (inputs do not include SSO fields).", + { logger: init.logger } + ); + } + const { fromSSO } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-sso"))); + return fromSSO(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-ini"))); + return fromIni(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-process"))); + return fromProcess(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-web-identity"))); + return fromTokenFile(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); + }, + async () => { + throw new import_property_provider.CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger + }); + } + ), + credentialsTreatedAsExpired, + credentialsWillNeedRefresh +), "defaultProvider"); +var credentialsWillNeedRefresh = /* @__PURE__ */ __name((credentials) => credentials?.expiration !== void 0, "credentialsWillNeedRefresh"); +var credentialsTreatedAsExpired = /* @__PURE__ */ __name((credentials) => credentials?.expiration !== void 0 && credentials.expiration.getTime() - Date.now() < 3e5, "credentialsTreatedAsExpired"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + defaultProvider, + credentialsWillNeedRefresh, + credentialsTreatedAsExpired +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js new file mode 100644 index 0000000..d582cf8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js @@ -0,0 +1,62 @@ +import { ENV_KEY, ENV_SECRET, fromEnv } from "@aws-sdk/credential-provider-env"; +import { chain, CredentialsProviderError, memoize } from "@smithy/property-provider"; +import { ENV_PROFILE } from "@smithy/shared-ini-file-loader"; +import { remoteProvider } from "./remoteProvider"; +let multipleCredentialSourceWarningEmitted = false; +export const defaultProvider = (init = {}) => memoize(chain(async () => { + const profile = init.profile ?? process.env[ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[ENV_KEY] && process.env[ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = init.logger?.warn && init.logger?.constructor?.name !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn(`@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +`); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true, + }); + } + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return fromEnv(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new CredentialsProviderError("Skipping SSO provider in default chain (inputs do not include SSO fields).", { logger: init.logger }); + } + const { fromSSO } = await import("@aws-sdk/credential-provider-sso"); + return fromSSO(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await import("@aws-sdk/credential-provider-ini"); + return fromIni(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await import("@aws-sdk/credential-provider-process"); + return fromProcess(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await import("@aws-sdk/credential-provider-web-identity"); + return fromTokenFile(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); +}, async () => { + throw new CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger, + }); +}), credentialsTreatedAsExpired, credentialsWillNeedRefresh); +export const credentialsWillNeedRefresh = (credentials) => credentials?.expiration !== undefined; +export const credentialsTreatedAsExpired = (credentials) => credentials?.expiration !== undefined && credentials.expiration.getTime() - Date.now() < 300000; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js new file mode 100644 index 0000000..c455bc1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js @@ -0,0 +1,17 @@ +import { chain, CredentialsProviderError } from "@smithy/property-provider"; +export const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export const remoteProvider = async (init) => { + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await import("@smithy/credential-provider-imds"); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await import("@aws-sdk/credential-provider-http"); + return chain(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED] && process.env[ENV_IMDS_DISABLED] !== "false") { + return async () => { + throw new CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts new file mode 100644 index 0000000..fd40150 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts @@ -0,0 +1,58 @@ +import type { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import type { FromIniInit } from "@aws-sdk/credential-provider-ini"; +import type { FromProcessInit } from "@aws-sdk/credential-provider-process"; +import type { FromSSOInit, SsoCredentialsParameters } from "@aws-sdk/credential-provider-sso"; +import type { FromTokenFileInit } from "@aws-sdk/credential-provider-web-identity"; +import type { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentity, MemoizedProvider } from "@smithy/types"; +/** + * @public + */ +export type DefaultProviderInit = FromIniInit & FromHttpOptions & RemoteProviderInit & FromProcessInit & (FromSSOInit & Partial) & FromTokenFileInit; +/** + * Creates a credential provider that will attempt to find credentials from the + * following sources (listed in order of precedence): + * * Environment variables exposed via `process.env` + * * SSO credentials from token cache + * * Web identity token credentials + * * Shared credentials and config ini files + * * The EC2/ECS Instance Metadata Service + * + * The default credential provider will invoke one provider at a time and only + * continue to the next if no credentials have been located. For example, if + * the process finds values defined via the `AWS_ACCESS_KEY_ID` and + * `AWS_SECRET_ACCESS_KEY` environment variables, the files at + * `~/.aws/credentials` and `~/.aws/config` will not be read, nor will any + * messages be sent to the Instance Metadata Service. + * + * @param init Configuration that is passed to each individual + * provider + * + * @see {@link fromEnv} The function used to source credentials from + * environment variables. + * @see {@link fromSSO} The function used to source credentials from + * resolved SSO token cache. + * @see {@link fromTokenFile} The function used to source credentials from + * token file. + * @see {@link fromIni} The function used to source credentials from INI + * files. + * @see {@link fromProcess} The function used to sources credentials from + * credential_process in INI files. + * @see {@link fromInstanceMetadata} The function used to source credentials from the + * EC2 Instance Metadata Service. + * @see {@link fromContainerMetadata} The function used to source credentials from the + * ECS Container Metadata Service. + */ +export declare const defaultProvider: (init?: DefaultProviderInit) => MemoizedProvider; +/** + * @internal + * + * @returns credentials have expiration. + */ +export declare const credentialsWillNeedRefresh: (credentials: AwsCredentialIdentity) => boolean; +/** + * @internal + * + * @returns credentials with less than 5 minutes left. + */ +export declare const credentialsTreatedAsExpired: (credentials: AwsCredentialIdentity) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts new file mode 100644 index 0000000..4022a4e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts @@ -0,0 +1,11 @@ +import type { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import type { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import type { AwsCredentialIdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const remoteProvider: (init: RemoteProviderInit | FromHttpOptions) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts new file mode 100644 index 0000000..e1f1a8d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts @@ -0,0 +1,25 @@ +import { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import { FromIniInit } from "@aws-sdk/credential-provider-ini"; +import { FromProcessInit } from "@aws-sdk/credential-provider-process"; +import { + FromSSOInit, + SsoCredentialsParameters, +} from "@aws-sdk/credential-provider-sso"; +import { FromTokenFileInit } from "@aws-sdk/credential-provider-web-identity"; +import { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentity, MemoizedProvider } from "@smithy/types"; +export type DefaultProviderInit = FromIniInit & + FromHttpOptions & + RemoteProviderInit & + FromProcessInit & + (FromSSOInit & Partial) & + FromTokenFileInit; +export declare const defaultProvider: ( + init?: DefaultProviderInit +) => MemoizedProvider; +export declare const credentialsWillNeedRefresh: ( + credentials: AwsCredentialIdentity +) => boolean; +export declare const credentialsTreatedAsExpired: ( + credentials: AwsCredentialIdentity +) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts new file mode 100644 index 0000000..90948cc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts @@ -0,0 +1,7 @@ +import { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export declare const remoteProvider: ( + init: RemoteProviderInit | FromHttpOptions +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/package.json new file mode 100644 index 0000000..40b3009 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-node/package.json @@ -0,0 +1,70 @@ +{ + "name": "@aws-sdk/credential-provider-node", + "version": "3.803.0", + "description": "AWS credential provider that sources credentials from a Node.JS environment. ", + "engines": { + "node": ">=18.0.0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-node", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:jest -c jest.config.integ.js", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-node", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-node" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/README.md new file mode 100644 index 0000000..4e9d9bd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-process + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-process/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-process) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-process.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-process) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js new file mode 100644 index 0000000..57146de --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js @@ -0,0 +1,114 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromProcess: () => fromProcess +}); +module.exports = __toCommonJS(index_exports); + +// src/fromProcess.ts +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/resolveProcessCredentials.ts +var import_property_provider = require("@smithy/property-provider"); +var import_child_process = require("child_process"); +var import_util = require("util"); + +// src/getValidatedProcessCredentials.ts +var import_client = require("@aws-sdk/core/client"); +var getValidatedProcessCredentials = /* @__PURE__ */ __name((profileName, data, profiles) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === void 0 || data.SecretAccessKey === void 0) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = /* @__PURE__ */ new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && profiles?.[profileName]?.aws_account_id) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...data.SessionToken && { sessionToken: data.SessionToken }, + ...data.Expiration && { expiration: new Date(data.Expiration) }, + ...data.CredentialScope && { credentialScope: data.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}, "getValidatedProcessCredentials"); + +// src/resolveProcessCredentials.ts +var resolveProcessCredentials = /* @__PURE__ */ __name(async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== void 0) { + const execPromise = (0, import_util.promisify)(import_child_process.exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } catch (error) { + throw new import_property_provider.CredentialsProviderError(error.message, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger + }); + } +}, "resolveProcessCredentials"); + +// src/fromProcess.ts +var fromProcess = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProcessCredentials( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }), + profiles, + init.logger + ); +}, "fromProcess"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromProcess +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js new file mode 100644 index 0000000..9e1e800 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js @@ -0,0 +1,9 @@ +import { getProfileName, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { resolveProcessCredentials } from "./resolveProcessCredentials"; +export const fromProcess = (init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await parseKnownFiles(init); + return resolveProcessCredentials(getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }), profiles, init.logger); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js new file mode 100644 index 0000000..caa0dd1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const getValidatedProcessCredentials = (profileName, data, profiles) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === undefined || data.SecretAccessKey === undefined) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && profiles?.[profileName]?.aws_account_id) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...(data.SessionToken && { sessionToken: data.SessionToken }), + ...(data.Expiration && { expiration: new Date(data.Expiration) }), + ...(data.CredentialScope && { credentialScope: data.CredentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js new file mode 100644 index 0000000..b921d35 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromProcess"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js new file mode 100644 index 0000000..334e0af --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js @@ -0,0 +1,35 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { exec } from "child_process"; +import { promisify } from "util"; +import { getValidatedProcessCredentials } from "./getValidatedProcessCredentials"; +export const resolveProcessCredentials = async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== undefined) { + const execPromise = promisify(exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } + catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } + catch (error) { + throw new CredentialsProviderError(error.message, { logger }); + } + } + else { + throw new CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } + else { + throw new CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger, + }); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts new file mode 100644 index 0000000..a4e6b46 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export type ProcessCredentials = { + Version: number; + AccessKeyId: string; + SecretAccessKey: string; + SessionToken?: string; + Expiration?: number; + CredentialScope?: string; + AccountId?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts new file mode 100644 index 0000000..2177630 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts @@ -0,0 +1,14 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +/** + * @internal + */ +export interface FromProcessInit extends SourceProfileInit, CredentialProviderOptions { +} +/** + * @internal + * + * Creates a credential provider that will read from a credential_process specified + * in ini files. + */ +export declare const fromProcess: (init?: FromProcessInit) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts new file mode 100644 index 0000000..1e86d6b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { ProcessCredentials } from "./ProcessCredentials"; +/** + * @internal + */ +export declare const getValidatedProcessCredentials: (profileName: string, data: ProcessCredentials, profiles: ParsedIniData) => AwsCredentialIdentity; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts new file mode 100644 index 0000000..adad939 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromProcess"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..4f69ca7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentity, Logger, ParsedIniData } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveProcessCredentials: (profileName: string, profiles: ParsedIniData, logger?: Logger) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts new file mode 100644 index 0000000..45acf5e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts @@ -0,0 +1,9 @@ +export type ProcessCredentials = { + Version: number; + AccessKeyId: string; + SecretAccessKey: string; + SessionToken?: string; + Expiration?: number; + CredentialScope?: string; + AccountId?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts new file mode 100644 index 0000000..8e39656 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts @@ -0,0 +1,11 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromProcessInit + extends SourceProfileInit, + CredentialProviderOptions {} +export declare const fromProcess: ( + init?: FromProcessInit +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts new file mode 100644 index 0000000..f44c81c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { ProcessCredentials } from "./ProcessCredentials"; +export declare const getValidatedProcessCredentials: ( + profileName: string, + data: ProcessCredentials, + profiles: ParsedIniData +) => AwsCredentialIdentity; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..b921d35 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromProcess"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..a204db4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity, Logger, ParsedIniData } from "@smithy/types"; +export declare const resolveProcessCredentials: ( + profileName: string, + profiles: ParsedIniData, + logger?: Logger +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/package.json new file mode 100644 index 0000000..fb1f383 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-process/package.json @@ -0,0 +1,63 @@ +{ + "name": "@aws-sdk/credential-provider-process", + "version": "3.799.0", + "description": "AWS credential provider that sources credential_process from ~/.aws/credentials and ~/.aws/config", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-process", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-process", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-process" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/README.md new file mode 100644 index 0000000..aba3fa8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-sso + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-sso/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-sso) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-sso.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-sso) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js new file mode 100644 index 0000000..ab7549a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js @@ -0,0 +1,246 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __esm = (fn, res) => function __init() { + return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res; +}; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/loadSso.ts +var loadSso_exports = {}; +__export(loadSso_exports, { + GetRoleCredentialsCommand: () => import_client_sso.GetRoleCredentialsCommand, + SSOClient: () => import_client_sso.SSOClient +}); +var import_client_sso; +var init_loadSso = __esm({ + "src/loadSso.ts"() { + "use strict"; + import_client_sso = require("@aws-sdk/client-sso"); + } +}); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromSSO: () => fromSSO, + isSsoProfile: () => isSsoProfile, + validateSsoProfile: () => validateSsoProfile +}); +module.exports = __toCommonJS(index_exports); + +// src/fromSSO.ts + + + +// src/isSsoProfile.ts +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveSSOCredentials.ts +var import_client = require("@aws-sdk/core/client"); +var import_token_providers = require("@aws-sdk/token-providers"); +var import_property_provider = require("@smithy/property-provider"); +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var SHOULD_FAIL_CREDENTIAL_CHAIN = false; +var resolveSSOCredentials = /* @__PURE__ */ __name(async ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger +}) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await (0, import_token_providers.fromSso)({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString() + }; + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } else { + try { + token = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoStartUrl); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { accessToken } = token; + const { SSOClient: SSOClient2, GetRoleCredentialsCommand: GetRoleCredentialsCommand2 } = await Promise.resolve().then(() => (init_loadSso(), loadSso_exports)); + const sso = ssoClient || new SSOClient2( + Object.assign({}, clientConfig ?? {}, { + logger: clientConfig?.logger ?? parentClientConfig?.logger, + region: clientConfig?.region ?? ssoRegion + }) + ); + let ssoResp; + try { + ssoResp = await sso.send( + new GetRoleCredentialsCommand2({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken + }) + ); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { + roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {} + } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new import_property_provider.CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + if (ssoSession) { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO", "s"); + } else { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}, "resolveSSOCredentials"); + +// src/validateSsoProfile.ts + +var validateSsoProfile = /* @__PURE__ */ __name((profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new import_property_provider.CredentialsProviderError( + `Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", "sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join( + ", " + )} +Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, + { tryNextLink: false, logger } + ); + } + return profile; +}, "validateSsoProfile"); + +// src/fromSSO.ts +var fromSSO = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger + }); + } + if (profile?.sso_session) { + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile( + profile, + init.logger + ); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new import_property_provider.CredentialsProviderError( + 'Incomplete configuration. The fromSSO() argument hash must include "ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', + { tryNextLink: false, logger: init.logger } + ); + } else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } +}, "fromSSO"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromSSO, + isSsoProfile, + validateSsoProfile +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js new file mode 100644 index 0000000..75f1860 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js @@ -0,0 +1,73 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName, loadSsoSessionData, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { isSsoProfile } from "./isSsoProfile"; +import { resolveSSOCredentials } from "./resolveSSOCredentials"; +import { validateSsoProfile } from "./validateSsoProfile"; +export const fromSSO = (init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await parseKnownFiles(init); + const profile = profiles[profileName]; + if (!profile) { + throw new CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger, + }); + } + if (profile?.sso_session) { + const ssoSessions = await loadSsoSessionData(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger, + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger, + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile(profile, init.logger); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient: ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName, + }); + } + else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new CredentialsProviderError("Incomplete configuration. The fromSSO() argument hash must include " + + '"ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', { tryNextLink: false, logger: init.logger }); + } + else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName, + }); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js new file mode 100644 index 0000000..7215fb6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js @@ -0,0 +1,4 @@ +export * from "./fromSSO"; +export * from "./isSsoProfile"; +export * from "./types"; +export * from "./validateSsoProfile"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js new file mode 100644 index 0000000..e655438 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js @@ -0,0 +1,6 @@ +export const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js new file mode 100644 index 0000000..6a4414f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js @@ -0,0 +1,2 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js new file mode 100644 index 0000000..979d9b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js @@ -0,0 +1,84 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { fromSso as getSsoTokenProvider } from "@aws-sdk/token-providers"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getSSOTokenFromFile } from "@smithy/shared-ini-file-loader"; +const SHOULD_FAIL_CREDENTIAL_CHAIN = false; +export const resolveSSOCredentials = async ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, clientConfig, parentClientConfig, profile, logger, }) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await getSsoTokenProvider({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString(), + }; + } + catch (e) { + throw new CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + } + else { + try { + token = await getSSOTokenFromFile(ssoStartUrl); + } + catch (e) { + throw new CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const { accessToken } = token; + const { SSOClient, GetRoleCredentialsCommand } = await import("./loadSso"); + const sso = ssoClient || + new SSOClient(Object.assign({}, clientConfig ?? {}, { + logger: clientConfig?.logger ?? parentClientConfig?.logger, + region: clientConfig?.region ?? ssoRegion, + })); + let ssoResp; + try { + ssoResp = await sso.send(new GetRoleCredentialsCommand({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken, + })); + } + catch (e) { + throw new CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const { roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {}, } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...(credentialScope && { credentialScope }), + ...(accountId && { accountId }), + }; + if (ssoSession) { + setCredentialFeature(credentials, "CREDENTIALS_SSO", "s"); + } + else { + setCredentialFeature(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js new file mode 100644 index 0000000..94174b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js @@ -0,0 +1,9 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +export const validateSsoProfile = (profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new CredentialsProviderError(`Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", ` + + `"sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join(", ")}\nReference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, { tryNextLink: false, logger }); + } + return profile; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts new file mode 100644 index 0000000..47521a6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts @@ -0,0 +1,68 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import type { SSOClient, SSOClientConfig } from "./loadSso"; +/** + * @internal + */ +export interface SsoCredentialsParameters { + /** + * The URL to the AWS SSO service. + */ + ssoStartUrl: string; + /** + * SSO session identifier. + * Presence implies usage of the SSOTokenProvider. + */ + ssoSession?: string; + /** + * The ID of the AWS account to use for temporary credentials. + */ + ssoAccountId: string; + /** + * The AWS region to use for temporary credentials. + */ + ssoRegion: string; + /** + * The name of the AWS role to assume. + */ + ssoRoleName: string; +} +/** + * @internal + */ +export interface FromSSOInit extends SourceProfileInit, CredentialProviderOptions { + ssoClient?: SSOClient; + clientConfig?: SSOClientConfig; +} +/** + * @internal + * + * Creates a credential provider that will read from a credential_process specified + * in ini files. + * + * The SSO credential provider must support both + * + * 1. the legacy profile format, + * @example + * ``` + * [profile sample-profile] + * sso_account_id = 012345678901 + * sso_region = us-east-1 + * sso_role_name = SampleRole + * sso_start_url = https://www.....com/start + * ``` + * + * 2. and the profile format for SSO Token Providers. + * @example + * ``` + * [profile sso-profile] + * sso_session = dev + * sso_account_id = 012345678901 + * sso_role_name = SampleRole + * + * [sso-session dev] + * sso_region = us-east-1 + * sso_start_url = https://www.....com/start + * ``` + */ +export declare const fromSSO: (init?: FromSSOInit & Partial) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts new file mode 100644 index 0000000..d851c15 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./fromSSO"; +/** + * @internal + */ +export * from "./isSsoProfile"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export * from "./validateSsoProfile"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts new file mode 100644 index 0000000..77c1fb2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Profile } from "@smithy/types"; +import { SsoProfile } from "./types"; +/** + * @internal + */ +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts new file mode 100644 index 0000000..f44232f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts @@ -0,0 +1,3 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; +export type { SSOClientConfig, GetRoleCredentialsCommandOutput } from "@aws-sdk/client-sso"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts new file mode 100644 index 0000000..e4e3fcc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +import { FromSSOInit, SsoCredentialsParameters } from "./fromSSO"; +/** + * @internal + */ +export declare const resolveSSOCredentials: ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, clientConfig, parentClientConfig, profile, logger, }: FromSSOInit & SsoCredentialsParameters) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts new file mode 100644 index 0000000..bf50b63 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts @@ -0,0 +1,22 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { SSOClient, SSOClientConfig } from "./loadSso"; +export interface SsoCredentialsParameters { + ssoStartUrl: string; + ssoSession?: string; + ssoAccountId: string; + ssoRegion: string; + ssoRoleName: string; +} +export interface FromSSOInit + extends SourceProfileInit, + CredentialProviderOptions { + ssoClient?: SSOClient; + clientConfig?: SSOClientConfig; +} +export declare const fromSSO: ( + init?: FromSSOInit & Partial +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..7215fb6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +export * from "./fromSSO"; +export * from "./isSsoProfile"; +export * from "./types"; +export * from "./validateSsoProfile"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts new file mode 100644 index 0000000..b4e8bdd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts @@ -0,0 +1,3 @@ +import { Profile } from "@smithy/types"; +import { SsoProfile } from "./types"; +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts new file mode 100644 index 0000000..2d3249f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts @@ -0,0 +1,6 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; +export { + SSOClientConfig, + GetRoleCredentialsCommandOutput, +} from "@aws-sdk/client-sso"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts new file mode 100644 index 0000000..cc16554 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts @@ -0,0 +1,14 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +import { FromSSOInit, SsoCredentialsParameters } from "./fromSSO"; +export declare const resolveSSOCredentials: ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger, +}: FromSSOInit & SsoCredentialsParameters) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..4a3986b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts @@ -0,0 +1,14 @@ +import { Profile } from "@smithy/types"; +export interface SSOToken { + accessToken: string; + expiresAt: string; + region?: string; + startUrl?: string; +} +export interface SsoProfile extends Profile { + sso_start_url: string; + sso_session?: string; + sso_account_id: string; + sso_region: string; + sso_role_name: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts new file mode 100644 index 0000000..6572fc4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { SsoProfile } from "./types"; +export declare const validateSsoProfile: ( + profile: Partial, + logger?: Logger +) => SsoProfile; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts new file mode 100644 index 0000000..551d678 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts @@ -0,0 +1,22 @@ +import { Profile } from "@smithy/types"; +/** + * @internal + * + * Cached SSO token retrieved from SSO login flow. + */ +export interface SSOToken { + accessToken: string; + expiresAt: string; + region?: string; + startUrl?: string; +} +/** + * @internal + */ +export interface SsoProfile extends Profile { + sso_start_url: string; + sso_session?: string; + sso_account_id: string; + sso_region: string; + sso_role_name: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts new file mode 100644 index 0000000..8b0ab31 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { SsoProfile } from "./types"; +/** + * @internal + */ +export declare const validateSsoProfile: (profile: Partial, logger?: Logger) => SsoProfile; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/package.json new file mode 100644 index 0000000..0d5cfb5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-sso/package.json @@ -0,0 +1,65 @@ +{ + "name": "@aws-sdk/credential-provider-sso", + "version": "3.803.0", + "description": "AWS credential provider that exchanges a resolved SSO login token file for temporary AWS credentials", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-sso", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-sso", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-sso" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/README.md new file mode 100644 index 0000000..e4858a4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-web-identity + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-web-identity/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-web-identity) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-web-identity.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-web-identity) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js new file mode 100644 index 0000000..2be7727 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromTokenFile = void 0; +const client_1 = require("@aws-sdk/core/client"); +const property_provider_1 = require("@smithy/property-provider"); +const fs_1 = require("fs"); +const fromWebToken_1 = require("./fromWebToken"); +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new property_provider_1.CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await (0, fromWebToken_1.fromWebToken)({ + ...init, + webIdentityToken: (0, fs_1.readFileSync)(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + (0, client_1.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; +exports.fromTokenFile = fromTokenFile; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js new file mode 100644 index 0000000..f8eafde --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js @@ -0,0 +1,52 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromWebToken = void 0; +const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await Promise.resolve().then(() => __importStar(require("@aws-sdk/nested-clients/sts"))); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; +exports.fromWebToken = fromWebToken; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js new file mode 100644 index 0000000..5dc29db --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js @@ -0,0 +1,28 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +module.exports = __toCommonJS(index_exports); +__reExport(index_exports, require("././fromTokenFile"), module.exports); +__reExport(index_exports, require("././fromWebToken"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromTokenFile, + fromWebToken +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js new file mode 100644 index 0000000..64a5032 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js @@ -0,0 +1,28 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { readFileSync } from "fs"; +import { fromWebToken } from "./fromWebToken"; +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +export const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await fromWebToken({ + ...init, + webIdentityToken: readFileSync(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + setCredentialFeature(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js new file mode 100644 index 0000000..268e0aa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js @@ -0,0 +1,25 @@ +export const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await import("@aws-sdk/nested-clients/sts"); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js new file mode 100644 index 0000000..0e900c0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fromTokenFile"; +export * from "./fromWebToken"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts new file mode 100644 index 0000000..58f885f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts @@ -0,0 +1,18 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import type { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromWebTokenInit } from "./fromWebToken"; +/** + * @public + */ +export interface FromTokenFileInit extends Partial>, CredentialProviderOptions { + /** + * File location of where the `OIDC` token is stored. + */ + webIdentityTokenFile?: string; +} +/** + * @internal + * + * Represents OIDC credentials from a file on disk. + */ +export declare const fromTokenFile: (init?: FromTokenFileInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts new file mode 100644 index 0000000..6b5e066 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts @@ -0,0 +1,145 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import type { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +/** + * @public + */ +export interface AssumeRoleWithWebIdentityParams { + /** + *

The Amazon Resource Name (ARN) of the role that the caller is assuming.

+ */ + RoleArn: string; + /** + *

An identifier for the assumed role session. Typically, you pass the name or identifier + * that is associated with the user who is using your application. That way, the temporary + * security credentials that your application will use are associated with that user. This + * session name is included as part of the ARN and assumed role ID in the + * AssumedRoleUser response element.

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ */ + RoleSessionName: string; + /** + *

The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity + * provider. Your application must get this token by authenticating the user who is using your + * application with a web identity provider before the application makes an + * AssumeRoleWithWebIdentity call.

+ */ + WebIdentityToken: string; + /** + *

The fully qualified host component of the domain name of the identity provider.

+ *

Specify this value only for OAuth 2.0 access tokens. Currently + * www.amazon.com and graph.facebook.com are the only supported + * identity providers for OAuth 2.0 access tokens. Do not include URL schemes and port + * numbers.

+ *

Do not specify this value for OpenID Connect ID tokens.

+ */ + ProviderId?: string; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plain text that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and AWS + * Service Namespaces in the AWS General Reference.

+ * + *

An AWS conversion compresses the passed session policies and session tags into a + * packed binary format that has a separate limit. Your request can fail for this limit + * even if your plain text meets the other requirements. The PackedPolicySize + * response element indicates by percentage how close the policies and tags for your + * request are to the upper size limit. + *

+ *
+ * + *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent AWS API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ */ + PolicyArns?: { + arn?: string; + }[]; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent AWS API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plain text that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ * + *

An AWS conversion compresses the passed session policies and session tags into a + * packed binary format that has a separate limit. Your request can fail for this limit + * even if your plain text meets the other requirements. The PackedPolicySize + * response element indicates by percentage how close the policies and tags for your + * request are to the upper size limit. + *

+ *
+ */ + Policy?: string; + /** + *

The duration, in seconds, of the role session. The value can range from 900 seconds (15 + * minutes) up to the maximum session duration setting for the role. This setting can have a + * value from 1 hour to 12 hours. If you specify a value higher than this setting, the + * operation fails. For example, if you specify a session duration of 12 hours, but your + * administrator set the maximum session duration to 6 hours, your operation fails. To learn + * how to view the maximum value for your role, see View the + * Maximum Session Duration Setting for a Role in the + * IAM User Guide.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the AWS Management Console in the + * IAM User Guide.

+ *
+ */ + DurationSeconds?: number; +} +type LowerCaseKey = { + [K in keyof T as `${Uncapitalize}`]: T[K]; +}; +/** + * @public + */ +export interface FromWebTokenInit extends Omit, "roleSessionName">, CredentialProviderOptions { + /** + * The IAM session name used to distinguish sessions. + */ + roleSessionName?: string; + /** + * A function that assumes a role with web identity and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param params input parameter of sts:AssumeRoleWithWebIdentity API. + */ + roleAssumerWithWebIdentity?: (params: AssumeRoleWithWebIdentityParams) => Promise; + /** + * STSClientConfig to be used for creating STS Client for assuming role. + * @internal + */ + clientConfig?: any; + /** + * @internal + */ + clientPlugins?: Pluggable[]; +} +/** + * @internal + */ +export declare const fromWebToken: (init: FromWebTokenInit) => RuntimeConfigAwsCredentialIdentityProvider; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts new file mode 100644 index 0000000..36c15dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./fromTokenFile"; +/** + * @internal + */ +export * from "./fromWebToken"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts new file mode 100644 index 0000000..4f67356 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts @@ -0,0 +1,16 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromWebTokenInit } from "./fromWebToken"; +export interface FromTokenFileInit + extends Partial< + Pick< + FromWebTokenInit, + Exclude + > + >, + CredentialProviderOptions { + webIdentityTokenFile?: string; +} +export declare const fromTokenFile: ( + init?: FromTokenFileInit +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts new file mode 100644 index 0000000..73529a1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts @@ -0,0 +1,39 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +export interface AssumeRoleWithWebIdentityParams { + RoleArn: string; + RoleSessionName: string; + WebIdentityToken: string; + ProviderId?: string; + PolicyArns?: { + arn?: string; + }[]; + Policy?: string; + DurationSeconds?: number; +} +type LowerCaseKey = { + [K in keyof T as `${Uncapitalize}`]: T[K]; +}; +export interface FromWebTokenInit + extends Pick< + LowerCaseKey, + Exclude< + keyof LowerCaseKey, + "roleSessionName" + > + >, + CredentialProviderOptions { + roleSessionName?: string; + roleAssumerWithWebIdentity?: ( + params: AssumeRoleWithWebIdentityParams + ) => Promise; + clientConfig?: any; + clientPlugins?: Pluggable[]; +} +export declare const fromWebToken: ( + init: FromWebTokenInit +) => RuntimeConfigAwsCredentialIdentityProvider; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..0e900c0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromTokenFile"; +export * from "./fromWebToken"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/package.json new file mode 100644 index 0000000..500eb21 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/credential-provider-web-identity/package.json @@ -0,0 +1,71 @@ +{ + "name": "@aws-sdk/credential-provider-web-identity", + "version": "3.803.0", + "description": "AWS credential provider that calls STS assumeRole for temporary AWS credentials", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-web-identity", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "browser": { + "./dist-cjs/fromTokenFile": false, + "./dist-es/fromTokenFile": false + }, + "react-native": { + "./dist-es/fromTokenFile": false, + "./dist-cjs/fromTokenFile": false + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-web-identity", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-web-identity" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/README.md new file mode 100644 index 0000000..5d72b8c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/README.md @@ -0,0 +1,17 @@ +# @aws-sdk/endpoint-cache + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/endpoint-cache/latest.svg)](https://www.npmjs.com/package/@aws-sdk/endpoint-cache) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/endpoint-cache.svg)](https://www.npmjs.com/package/@aws-sdk/endpoint-cache) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. + +## EndpointCache + +- uses `mnemonist/lru-cache` for storing the cache. +- the `set` operation stores milliseconds elapsed since the UNIX epoch in Expires param based on CachePeriodInMinutes provided in Endpoint. +- the `get` operation returns all un-expired endpoints with their Expires values. +- the `getEndpoint` operation returns a randomly selected un-expired endpoint. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js new file mode 100644 index 0000000..6975621 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js @@ -0,0 +1,140 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EndpointCache: () => EndpointCache +}); +module.exports = __toCommonJS(src_exports); + +// src/EndpointCache.ts +var import_lru_cache = __toESM(require("mnemonist/lru-cache")); +var EndpointCache = class { + static { + __name(this, "EndpointCache"); + } + cache; + constructor(capacity) { + this.cache = new import_lru_cache.default(capacity); + } + /** + * Returns an un-expired endpoint for the given key. + * + * @param endpointsWithExpiry + * @returns + */ + getEndpoint(key) { + const endpointsWithExpiry = this.get(key); + if (!endpointsWithExpiry || endpointsWithExpiry.length === 0) { + return void 0; + } + const endpoints = endpointsWithExpiry.map((endpoint) => endpoint.Address); + return endpoints[Math.floor(Math.random() * endpoints.length)]; + } + /** + * Returns un-expired endpoints for the given key. + * + * @param key + * @returns + */ + get(key) { + if (!this.has(key)) { + return; + } + const value = this.cache.get(key); + if (!value) { + return; + } + const now = Date.now(); + const endpointsWithExpiry = value.filter((endpoint) => now < endpoint.Expires); + if (endpointsWithExpiry.length === 0) { + this.delete(key); + return void 0; + } + return endpointsWithExpiry; + } + /** + * Stores the endpoints passed for the key in cache. + * If not defined, uses empty string for the Address in endpoint. + * If not defined, uses one minute for CachePeriodInMinutes in endpoint. + * Stores milliseconds elapsed since the UNIX epoch in Expires param based + * on value provided in CachePeriodInMinutes. + * + * @param key + * @param endpoints + */ + set(key, endpoints) { + const now = Date.now(); + this.cache.set( + key, + endpoints.map(({ Address, CachePeriodInMinutes }) => ({ + Address, + Expires: now + CachePeriodInMinutes * 60 * 1e3 + })) + ); + } + /** + * Deletes the value for the given key in the cache. + * + * @param {string} key + */ + delete(key) { + this.cache.set(key, []); + } + /** + * Checks whether the key exists in cache. + * + * @param {string} key + * @returns {boolean} + */ + has(key) { + if (!this.cache.has(key)) { + return false; + } + const endpoints = this.cache.peek(key); + if (!endpoints) { + return false; + } + return endpoints.length > 0; + } + /** + * Clears the cache. + */ + clear() { + this.cache.clear(); + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EndpointCache +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js new file mode 100644 index 0000000..decd3f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js @@ -0,0 +1,54 @@ +import LRUCache from "mnemonist/lru-cache"; +export class EndpointCache { + cache; + constructor(capacity) { + this.cache = new LRUCache(capacity); + } + getEndpoint(key) { + const endpointsWithExpiry = this.get(key); + if (!endpointsWithExpiry || endpointsWithExpiry.length === 0) { + return undefined; + } + const endpoints = endpointsWithExpiry.map((endpoint) => endpoint.Address); + return endpoints[Math.floor(Math.random() * endpoints.length)]; + } + get(key) { + if (!this.has(key)) { + return; + } + const value = this.cache.get(key); + if (!value) { + return; + } + const now = Date.now(); + const endpointsWithExpiry = value.filter((endpoint) => now < endpoint.Expires); + if (endpointsWithExpiry.length === 0) { + this.delete(key); + return undefined; + } + return endpointsWithExpiry; + } + set(key, endpoints) { + const now = Date.now(); + this.cache.set(key, endpoints.map(({ Address, CachePeriodInMinutes }) => ({ + Address, + Expires: now + CachePeriodInMinutes * 60 * 1000, + }))); + } + delete(key) { + this.cache.set(key, []); + } + has(key) { + if (!this.cache.has(key)) { + return false; + } + const endpoints = this.cache.peek(key); + if (!endpoints) { + return false; + } + return endpoints.length > 0; + } + clear() { + this.cache.clear(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js new file mode 100644 index 0000000..41fce6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./Endpoint"; +export * from "./EndpointCache"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts new file mode 100644 index 0000000..17b37cf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts @@ -0,0 +1,13 @@ +/** + * @internal + */ +export interface Endpoint { + /** + *

An endpoint address.

+ */ + Address: string; + /** + *

The TTL for the endpoint, in minutes.

+ */ + CachePeriodInMinutes: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts new file mode 100644 index 0000000..5128e14 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts @@ -0,0 +1,56 @@ +import { Endpoint } from "./Endpoint"; +/** + * @internal + */ +export interface EndpointWithExpiry extends Pick { + Expires: number; +} +/** + * @internal + */ +export declare class EndpointCache { + private readonly cache; + constructor(capacity: number); + /** + * Returns an un-expired endpoint for the given key. + * + * @param endpointsWithExpiry + * @returns + */ + getEndpoint(key: string): string | undefined; + /** + * Returns un-expired endpoints for the given key. + * + * @param key + * @returns + */ + get(key: string): EndpointWithExpiry[] | undefined; + /** + * Stores the endpoints passed for the key in cache. + * If not defined, uses empty string for the Address in endpoint. + * If not defined, uses one minute for CachePeriodInMinutes in endpoint. + * Stores milliseconds elapsed since the UNIX epoch in Expires param based + * on value provided in CachePeriodInMinutes. + * + * @param key + * @param endpoints + */ + set(key: string, endpoints: Endpoint[]): void; + /** + * Deletes the value for the given key in the cache. + * + * @param {string} key + */ + delete(key: string): void; + /** + * Checks whether the key exists in cache. + * + * @param {string} key + * @returns {boolean} + */ + has(key: string): boolean; + /** + * Clears the cache. + */ + clear(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts new file mode 100644 index 0000000..f2f149f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./Endpoint"; +/** + * @internal + */ +export * from "./EndpointCache"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts new file mode 100644 index 0000000..c1caacb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts @@ -0,0 +1,4 @@ +export interface Endpoint { + Address: string; + CachePeriodInMinutes: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts new file mode 100644 index 0000000..c01e2b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts @@ -0,0 +1,14 @@ +import { Endpoint } from "./Endpoint"; +export interface EndpointWithExpiry extends Pick { + Expires: number; +} +export declare class EndpointCache { + private readonly cache; + constructor(capacity: number); + getEndpoint(key: string): string | undefined; + get(key: string): EndpointWithExpiry[] | undefined; + set(key: string, endpoints: Endpoint[]): void; + delete(key: string): void; + has(key: string): boolean; + clear(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..41fce6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./Endpoint"; +export * from "./EndpointCache"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/package.json new file mode 100644 index 0000000..13edb6f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/endpoint-cache/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/endpoint-cache", + "version": "3.723.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline endpoint-cache", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/endpoint-cache", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/endpoint-cache" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/README.md new file mode 100644 index 0000000..fc88a48 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/README.md @@ -0,0 +1,393 @@ +# @aws-sdk/lib-dynamodb + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/lib-dynamodb/latest.svg)](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/lib-dynamodb.svg)](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) + +## Overview + +The document client simplifies working with items in Amazon DynamoDB by +abstracting away the notion of attribute values. This abstraction annotates native +JavaScript types supplied as input parameters, as well as converts annotated +response data to native JavaScript types. + +## Marshalling Input and Unmarshalling Response Data + +The document client affords developers the use of native JavaScript types +instead of `AttributeValue`s to simplify the JavaScript development +experience with Amazon DynamoDB. JavaScript objects passed in as parameters +are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. +Responses from DynamoDB are unmarshalled into plain JavaScript objects +by the `DocumentClient`. The `DocumentClient` does not accept +`AttributeValue`s in favor of native JavaScript types. + +| JavaScript Type | DynamoDB AttributeValue | +| :--------------------------------: | ----------------------- | +| String | S | +| Number / BigInt / NumberValue | N | +| Boolean | BOOL | +| null | NULL | +| Array | L | +| Object | M | +| Set\ | BS | +| Set\ | NS | +| Set\ | SS | +| Uint8Array, Buffer, File, Blob... | B | + +### Example + +Here is an example list which is sent to DynamoDB client in an operation: + +```json +{ "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } +``` + +The DynamoDB document client abstracts the attribute values as follows in +both input and output: + +```json +[null, false, 1, "two"] +``` + +## Usage + +To create document client, you need to create DynamoDB client first as follows: + +```js +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; // ES6 import +// const { DynamoDBClient } = require("@aws-sdk/client-dynamodb"); // CommonJS import + +// Bare-bones DynamoDB Client +const client = new DynamoDBClient({}); +``` + +```js +import { DynamoDB } from "@aws-sdk/client-dynamodb"; // ES6 import +// const { DynamoDB } = require("@aws-sdk/client-dynamodb"); // CommonJS import + +// Full DynamoDB Client +const client = new DynamoDB({}); +``` + +The bare-bones clients are more modular. They reduce bundle size and improve +loading performance over full clients as explained in blog post on +[modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/). + +### Constructor + +Once DynamoDB client is created, you can either create the bare-bones +document client or full document client as follows: + +```js +import { DynamoDBDocumentClient } from "@aws-sdk/lib-dynamodb"; // ES6 import +// const { DynamoDBDocumentClient } = require("@aws-sdk/lib-dynamodb"); // CommonJS import + +// Bare-bones document client +const ddbDocClient = DynamoDBDocumentClient.from(client); // client is DynamoDB client +``` + +```js +import { DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; // ES6 import +// const { DynamoDBDocument } = require("@aws-sdk/lib-dynamodb"); // CommonJS import + +// Full document client +const ddbDocClient = DynamoDBDocument.from(client); // client is DynamoDB client +``` + +### Configuration + +The configuration for marshalling and unmarshalling can be sent as an optional +second parameter during creation of document client as follows: + +```ts +export interface marshallOptions { + /** + * Whether to automatically convert empty strings, blobs, and sets to `null` + */ + convertEmptyValues?: boolean; + /** + * Whether to remove undefined values from JS arrays/Sets/objects + * when marshalling to DynamoDB lists/sets/maps respectively. + * + * A DynamoDB item is not itself considered a map. Only + * attributes of an item are examined. + */ + removeUndefinedValues?: boolean; + /** + * Whether to convert typeof object to map attribute. + */ + convertClassInstanceToMap?: boolean; + /** + * Whether to convert the top level container + * if it is a map or list. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the marshall function (backwards compatibility). + */ + convertTopLevelContainer?: boolean; + /** + * Whether to allow numbers beyond Number.MAX_SAFE_INTEGER during marshalling. + * When set to true, allows numbers that may lose precision when converted to JavaScript numbers. + * When false (default), throws an error if a number exceeds Number.MAX_SAFE_INTEGER to prevent + * unintended loss of precision. Consider using the NumberValue type from @aws-sdk/lib-dynamodb + * for precise handling of large numbers. + */ + allowImpreciseNumbers?: boolean; +} + +export interface unmarshallOptions { + /** + * Whether to modify how numbers are unmarshalled from DynamoDB. + * When set to true, returns numbers as NumberValue instances instead of native JavaScript numbers. + * This allows for the safe round-trip transport of numbers of arbitrary size. + * + * If a function is provided, it will be called with the string representation of numbers to handle + * custom conversions (e.g., using BigInt or decimal libraries). + */ + wrapNumbers?: boolean | ((value: string) => number | bigint | NumberValue | any); + /** + * When true, skip wrapping the data in `{ M: data }` before converting. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the unmarshall function (backwards compatibility). + */ + convertWithoutMapWrapper?: boolean; +} + +const marshallOptions: marshallOptions = {}; +const unmarshallOptions: unmarshallOptions = {}; + +const translateConfig = { marshallOptions, unmarshallOptions }; + +const client = new DynamoDBClient({}); +const ddbDocClient = DynamoDBDocument.from(client, translateConfig); +``` + +### Calling operations + +You can call the document client operations using command objects on bare-bones +client as follows: + +```js +import { DynamoDBDocumentClient, PutCommand } from "@aws-sdk/lib-dynamodb"; + +// ... DynamoDB client creation + +const ddbDocClient = DynamoDBDocumentClient.from(client); +// Call using bare-bones client and Command object. +await ddbDocClient.send( + new PutCommand({ + TableName, + Item: { + id: "1", + content: "content from DynamoDBDocumentClient", + }, + }) +); +``` + +You can also call operations on full client as follows: + +```js +import { DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +// ... DynamoDB client creation + +const ddbDocClient = DynamoDBDocument.from(client); +// Call using full client. +await ddbDocClient.put({ + TableName, + Item: { + id: "2", + content: "content from DynamoDBDocument", + }, +}); +``` + +### Large Numbers and `NumberValue`. + +On the input or marshalling side, the class `NumberValue` can be used +anywhere to represent a DynamoDB number value, even small numbers. + +```ts +import { DynamoDB } from "@aws-sdk/client-dynamodb"; +import { NumberValue, DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +// Note, the client will not validate the acceptability of the number +// in terms of size or format. +// It is only here to preserve your precise representation. +const client = DynamoDBDocument.from(new DynamoDB({})); + +await client.put({ + Item: { + id: 1, + smallNumber: NumberValue.from("123"), + bigNumber: NumberValue.from("1000000000000000000000.000000000001"), + nSet: new Set([123, NumberValue.from("456"), 789]), + }, +}); +``` + +On the output or unmarshalling side, the class `NumberValue` is used +depending on your setting for the `unmarshallOptions` flag `wrapnumbers`, +shown above. + +```ts +import { DynamoDB } from "@aws-sdk/client-dynamodb"; +import { NumberValue, DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +const client = DynamoDBDocument.from(new DynamoDB({})); + +const response = await client.get({ + Key: { + id: 1, + }, +}); + +/** + * Numbers in the response may be a number, a BigInt, or a NumberValue depending + * on how you set `wrapNumbers`. + */ +const value = response.Item.bigNumber; +``` + +You can also provide a custom function to handle number conversion during unmarshalling: + +```typescript +const client = DynamoDBDocument.from(new DynamoDB({}), { + unmarshallOptions: { + // Use BigInt for all numbers + wrapNumbers: (str) => BigInt(str), + }, +}); + +const response = await client.get({ + Key: { id: 1 }, +}); + +// Numbers in response will be BigInt instead of NumberValue or regular numbers +``` + +`NumberValue` does not provide a way to do mathematical operations on itself. +To do mathematical operations, take the string value of `NumberValue` by calling +`.toString()` and supply it to your chosen big number implementation. + +The client protects against precision loss by throwing an error on large numbers, but you can either +allow imprecise values with `allowImpreciseNumbers` or maintain exact precision using `NumberValue`. + +```typescript +const preciseValue = "34567890123456789012345678901234567890"; + +// 1. Default behavior - will throw error +await client.send( + new PutCommand({ + TableName: "Table", + Item: { + id: "1", + number: Number(preciseValue), // Throws error: Number is greater than Number.MAX_SAFE_INTEGER + }, + }) +); + +// 2. Using allowImpreciseNumbers - will store but loses precision (mimics the v2 implicit behavior) +const impreciseClient = DynamoDBDocumentClient.from(new DynamoDBClient({}), { + marshallOptions: { allowImpreciseNumbers: true }, +}); +await impreciseClient.send( + new PutCommand({ + TableName: "Table", + Item: { + id: "2", + number: Number(preciseValue), // Loses precision 34567890123456790000000000000000000000n + }, + }) +); +``` + +### Client and Command middleware stacks + +As with other AWS SDK for JavaScript v3 clients, you can apply middleware functions +both on the client itself and individual `Command`s. + +For individual `Command`s, here are examples of how to add middleware before and after +both marshalling and unmarshalling. We will use `QueryCommand` as an example. +Others follow the same pattern. + +```js +import { DynamoDBDocumentClient, QueryCommand } from "@aws-sdk/lib-dynamodb"; + +const client = new DynamoDBClient({ + /*...*/ +}); +const doc = DynamoDBDocumentClient.from(client); +const command = new QueryCommand({ + /*...*/ +}); +``` + +Before and after marshalling: + +```js +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + console.log("pre-marshall", args.input); + return next(args); + }, + { + relation: "before", + toMiddleware: "DocumentMarshall", + } +); +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + console.log("post-marshall", args.input); + return next(args); + }, + { + relation: "after", + toMiddleware: "DocumentMarshall", + } +); +``` + +Before and after unmarshalling: + +```js +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + const result = await next(args); + console.log("pre-unmarshall", result.output.Items); + return result; + }, + { + relation: "after", // <- after for pre-unmarshall + toMiddleware: "DocumentUnmarshall", + } +); +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + const result = await next(args); + console.log("post-unmarshall", result.output.Items); + return result; + }, + { + relation: "before", // <- before for post-unmarshall + toMiddleware: "DocumentUnmarshall", + } +); +``` + +### Destroying document client + +The `destroy()` call on document client is a no-op as document client does not +create a new DynamoDB client. You need to call `destroy()` on DynamoDB client to +clean resources used by it as shown below. + +```js +const client = new DynamoDBClient({}); +const ddbDocClient = DynamoDBDocumentClient.from(client); + +// Perform operations on document client. + +ddbDocClient.destroy(); // no-op +client.destroy(); // destroys DynamoDBClient +``` diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..b8cf754 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js @@ -0,0 +1,1053 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + BatchExecuteStatementCommand: () => BatchExecuteStatementCommand, + BatchGetCommand: () => BatchGetCommand, + BatchWriteCommand: () => BatchWriteCommand, + DeleteCommand: () => DeleteCommand, + DynamoDBDocument: () => DynamoDBDocument, + DynamoDBDocumentClient: () => DynamoDBDocumentClient, + DynamoDBDocumentClientCommand: () => DynamoDBDocumentClientCommand, + ExecuteStatementCommand: () => ExecuteStatementCommand, + ExecuteTransactionCommand: () => ExecuteTransactionCommand, + GetCommand: () => GetCommand, + NativeAttributeBinary: () => import_util_dynamodb.NativeAttributeBinary, + NativeAttributeValue: () => import_util_dynamodb.NativeAttributeValue, + NativeScalarAttributeValue: () => import_util_dynamodb.NativeScalarAttributeValue, + NumberValue: () => import_util_dynamodb.NumberValueImpl, + PaginationConfiguration: () => import_types.PaginationConfiguration, + PutCommand: () => PutCommand, + QueryCommand: () => QueryCommand, + ScanCommand: () => ScanCommand, + TransactGetCommand: () => TransactGetCommand, + TransactWriteCommand: () => TransactWriteCommand, + UpdateCommand: () => UpdateCommand, + __Client: () => import_smithy_client.Client, + marshallOptions: () => import_util_dynamodb.marshallOptions, + paginateQuery: () => paginateQuery, + paginateScan: () => paginateScan, + unmarshallOptions: () => import_util_dynamodb.unmarshallOptions +}); +module.exports = __toCommonJS(index_exports); + +// src/commands/BatchExecuteStatementCommand.ts +var import_client_dynamodb = require("@aws-sdk/client-dynamodb"); + + +// src/baseCommand/DynamoDBDocumentClientCommand.ts +var import_core = require("@aws-sdk/core"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/commands/utils.ts +var import_util_dynamodb = require("@aws-sdk/util-dynamodb"); +var SELF = null; +var ALL_VALUES = {}; +var ALL_MEMBERS = []; +var NEXT_LEVEL = "*"; +var processObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + if (obj !== void 0) { + if (keyNodes == null) { + return processFunc(obj); + } else { + const keys = Object.keys(keyNodes); + const goToNextLevel = keys.length === 1 && keys[0] === NEXT_LEVEL; + const someChildren = keys.length >= 1 && !goToNextLevel; + const allChildren = keys.length === 0; + if (someChildren) { + return processKeysInObj(obj, processFunc, keyNodes); + } else if (allChildren) { + return processAllKeysInObj(obj, processFunc, SELF); + } else if (goToNextLevel) { + return Object.entries(obj ?? {}).reduce((acc, [k, v]) => { + if (typeof v !== "function") { + acc[k] = processObj(v, processFunc, keyNodes[NEXT_LEVEL]); + } + return acc; + }, Array.isArray(obj) ? [] : {}); + } + } + } + return void 0; +}, "processObj"); +var processKeysInObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + let accumulator; + if (Array.isArray(obj)) { + accumulator = obj.filter((item) => typeof item !== "function"); + } else { + accumulator = {}; + for (const [k, v] of Object.entries(obj)) { + if (typeof v !== "function") { + accumulator[k] = v; + } + } + } + for (const [nodeKey, nodes] of Object.entries(keyNodes)) { + if (typeof obj[nodeKey] === "function") { + continue; + } + const processedValue = processObj(obj[nodeKey], processFunc, nodes); + if (processedValue !== void 0 && typeof processedValue !== "function") { + accumulator[nodeKey] = processedValue; + } + } + return accumulator; +}, "processKeysInObj"); +var processAllKeysInObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + if (Array.isArray(obj)) { + return obj.filter((item) => typeof item !== "function").map((item) => processObj(item, processFunc, keyNodes)); + } + return Object.entries(obj).reduce((acc, [key, value]) => { + if (typeof value === "function") { + return acc; + } + const processedValue = processObj(value, processFunc, keyNodes); + if (processedValue !== void 0 && typeof processedValue !== "function") { + acc[key] = processedValue; + } + return acc; + }, {}); +}, "processAllKeysInObj"); +var marshallInput = /* @__PURE__ */ __name((obj, keyNodes, options) => { + const marshallFunc = /* @__PURE__ */ __name((toMarshall) => (0, import_util_dynamodb.marshall)(toMarshall, options), "marshallFunc"); + return processKeysInObj(obj, marshallFunc, keyNodes); +}, "marshallInput"); +var unmarshallOutput = /* @__PURE__ */ __name((obj, keyNodes, options) => { + const unmarshallFunc = /* @__PURE__ */ __name((toMarshall) => (0, import_util_dynamodb.unmarshall)(toMarshall, options), "unmarshallFunc"); + return processKeysInObj(obj, unmarshallFunc, keyNodes); +}, "unmarshallOutput"); + +// src/baseCommand/DynamoDBDocumentClientCommand.ts +var DynamoDBDocumentClientCommand = class extends import_smithy_client.Command { + static { + __name(this, "DynamoDBDocumentClientCommand"); + } + addMarshallingMiddleware(configuration) { + const { marshallOptions: marshallOptions3 = {}, unmarshallOptions: unmarshallOptions3 = {} } = configuration.translateConfig || {}; + marshallOptions3.convertTopLevelContainer = marshallOptions3.convertTopLevelContainer ?? true; + unmarshallOptions3.convertWithoutMapWrapper = unmarshallOptions3.convertWithoutMapWrapper ?? true; + this.clientCommand.middlewareStack.addRelativeTo( + (next, context) => async (args) => { + (0, import_core.setFeature)(context, "DDB_MAPPER", "d"); + args.input = marshallInput(args.input, this.inputKeyNodes, marshallOptions3); + return next(args); + }, + { + name: "DocumentMarshall", + relation: "before", + toMiddleware: "serializerMiddleware", + override: true + } + ); + this.clientCommand.middlewareStack.addRelativeTo( + (next, context) => async (args) => { + const deserialized = await next(args); + deserialized.output = unmarshallOutput(deserialized.output, this.outputKeyNodes, unmarshallOptions3); + return deserialized; + }, + { + name: "DocumentUnmarshall", + relation: "before", + toMiddleware: "deserializerMiddleware", + override: true + } + ); + } +}; + +// src/commands/BatchExecuteStatementCommand.ts +var BatchExecuteStatementCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchExecuteStatementCommand"); + } + inputKeyNodes = { + Statements: { + "*": { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Error: { + Item: ALL_VALUES + // map with AttributeValue + }, + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/BatchGetCommand.ts + + +var BatchGetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchGetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchGetCommand"); + } + inputKeyNodes = { + RequestItems: { + "*": { + Keys: { + "*": ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + "*": ALL_VALUES + // map with AttributeValue + } + }, + UnprocessedKeys: { + "*": { + Keys: { + "*": ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/BatchWriteCommand.ts + + +var BatchWriteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchWriteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchWriteCommand"); + } + inputKeyNodes = { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES + // map with AttributeValue + }, + DeleteRequest: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + } + }; + outputKeyNodes = { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES + // map with AttributeValue + }, + DeleteRequest: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + }, + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/DeleteCommand.ts + + +var DeleteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.DeleteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "DeleteCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES, + // map with AttributeValue + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ExecuteStatementCommand.ts + + +var ExecuteStatementCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ExecuteStatementCommand"); + } + inputKeyNodes = { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ExecuteTransactionCommand.ts + + +var ExecuteTransactionCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ExecuteTransactionCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ExecuteTransactionCommand"); + } + inputKeyNodes = { + TransactStatements: { + "*": { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/GetCommand.ts + + +var GetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.GetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "GetCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Item: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/PutCommand.ts + + +var PutCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.PutItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "PutCommand"); + } + inputKeyNodes = { + Item: ALL_VALUES, + // map with AttributeValue + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/QueryCommand.ts + + +var QueryCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.QueryCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "QueryCommand"); + } + inputKeyNodes = { + KeyConditions: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + QueryFilter: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExclusiveStartKey: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ScanCommand.ts + + +var ScanCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ScanCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ScanCommand"); + } + inputKeyNodes = { + ScanFilter: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExclusiveStartKey: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/TransactGetCommand.ts + + +var TransactGetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.TransactGetItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "TransactGetCommand"); + } + inputKeyNodes = { + TransactItems: { + "*": { + Get: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/TransactWriteCommand.ts + + +var TransactWriteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.TransactWriteItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "TransactWriteCommand"); + } + inputKeyNodes = { + TransactItems: { + "*": { + ConditionCheck: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Put: { + Item: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Delete: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Update: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/UpdateCommand.ts + + +var UpdateCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.UpdateItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "UpdateCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES, + // map with AttributeValue + AttributeUpdates: { + "*": { + Value: SELF + } + }, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/DynamoDBDocumentClient.ts + +var DynamoDBDocumentClient = class _DynamoDBDocumentClient extends import_smithy_client.Client { + static { + __name(this, "DynamoDBDocumentClient"); + } + config; + constructor(client, translateConfig) { + super(client.config); + this.config = client.config; + this.config.translateConfig = translateConfig; + this.middlewareStack = client.middlewareStack; + if (this.config?.cacheMiddleware) { + throw new Error( + "@aws-sdk/lib-dynamodb - cacheMiddleware=true is not compatible with the DynamoDBDocumentClient. This option must be set to false." + ); + } + } + static from(client, translateConfig) { + return new _DynamoDBDocumentClient(client, translateConfig); + } + destroy() { + } +}; + +// src/DynamoDBDocument.ts +var DynamoDBDocument = class _DynamoDBDocument extends DynamoDBDocumentClient { + static { + __name(this, "DynamoDBDocument"); + } + static from(client, translateConfig) { + return new _DynamoDBDocument(client, translateConfig); + } + batchExecuteStatement(args, optionsOrCb, cb) { + const command = new BatchExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + batchGet(args, optionsOrCb, cb) { + const command = new BatchGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + batchWrite(args, optionsOrCb, cb) { + const command = new BatchWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + delete(args, optionsOrCb, cb) { + const command = new DeleteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + executeStatement(args, optionsOrCb, cb) { + const command = new ExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + executeTransaction(args, optionsOrCb, cb) { + const command = new ExecuteTransactionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + get(args, optionsOrCb, cb) { + const command = new GetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + put(args, optionsOrCb, cb) { + const command = new PutCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + query(args, optionsOrCb, cb) { + const command = new QueryCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + scan(args, optionsOrCb, cb) { + const command = new ScanCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + transactGet(args, optionsOrCb, cb) { + const command = new TransactGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + transactWrite(args, optionsOrCb, cb) { + const command = new TransactWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + update(args, optionsOrCb, cb) { + const command = new UpdateCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +}; + +// src/pagination/Interfaces.ts +var import_types = require("@smithy/types"); + +// src/pagination/QueryPaginator.ts +var import_core2 = require("@smithy/core"); + +var paginateQuery = (0, import_core2.createPaginator)(DynamoDBDocumentClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/pagination/ScanPaginator.ts +var import_core3 = require("@smithy/core"); + +var paginateScan = (0, import_core3.createPaginator)(DynamoDBDocumentClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/index.ts + + + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NumberValue, + DynamoDBDocument, + __Client, + DynamoDBDocumentClient, + DynamoDBDocumentClientCommand, + $Command, + BatchExecuteStatementCommand, + BatchGetCommand, + BatchWriteCommand, + DeleteCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + GetCommand, + PutCommand, + QueryCommand, + ScanCommand, + TransactGetCommand, + TransactWriteCommand, + UpdateCommand, + paginateQuery, + paginateScan +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js new file mode 100644 index 0000000..206f25a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js @@ -0,0 +1,214 @@ +import { BatchExecuteStatementCommand, } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommand } from "./commands/BatchGetCommand"; +import { BatchWriteCommand } from "./commands/BatchWriteCommand"; +import { DeleteCommand } from "./commands/DeleteCommand"; +import { ExecuteStatementCommand, } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommand, } from "./commands/ExecuteTransactionCommand"; +import { GetCommand } from "./commands/GetCommand"; +import { PutCommand } from "./commands/PutCommand"; +import { QueryCommand } from "./commands/QueryCommand"; +import { ScanCommand } from "./commands/ScanCommand"; +import { TransactGetCommand } from "./commands/TransactGetCommand"; +import { TransactWriteCommand, } from "./commands/TransactWriteCommand"; +import { UpdateCommand } from "./commands/UpdateCommand"; +import { DynamoDBDocumentClient } from "./DynamoDBDocumentClient"; +export class DynamoDBDocument extends DynamoDBDocumentClient { + static from(client, translateConfig) { + return new DynamoDBDocument(client, translateConfig); + } + batchExecuteStatement(args, optionsOrCb, cb) { + const command = new BatchExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + batchGet(args, optionsOrCb, cb) { + const command = new BatchGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + batchWrite(args, optionsOrCb, cb) { + const command = new BatchWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + delete(args, optionsOrCb, cb) { + const command = new DeleteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + executeStatement(args, optionsOrCb, cb) { + const command = new ExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + executeTransaction(args, optionsOrCb, cb) { + const command = new ExecuteTransactionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + get(args, optionsOrCb, cb) { + const command = new GetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + put(args, optionsOrCb, cb) { + const command = new PutCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + query(args, optionsOrCb, cb) { + const command = new QueryCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + scan(args, optionsOrCb, cb) { + const command = new ScanCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + transactGet(args, optionsOrCb, cb) { + const command = new TransactGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + transactWrite(args, optionsOrCb, cb) { + const command = new TransactWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + update(args, optionsOrCb, cb) { + const command = new UpdateCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js new file mode 100644 index 0000000..f8dc9bb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js @@ -0,0 +1,20 @@ +import { Client as __Client } from "@smithy/smithy-client"; +export { __Client }; +export class DynamoDBDocumentClient extends __Client { + config; + constructor(client, translateConfig) { + super(client.config); + this.config = client.config; + this.config.translateConfig = translateConfig; + this.middlewareStack = client.middlewareStack; + if (this.config?.cacheMiddleware) { + throw new Error("@aws-sdk/lib-dynamodb - cacheMiddleware=true is not compatible with the" + + " DynamoDBDocumentClient. This option must be set to false."); + } + } + static from(client, translateConfig) { + return new DynamoDBDocumentClient(client, translateConfig); + } + destroy() { + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js new file mode 100644 index 0000000..5f751c1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js @@ -0,0 +1,30 @@ +import { setFeature } from "@aws-sdk/core"; +import { Command as $Command } from "@smithy/smithy-client"; +import { marshallInput, unmarshallOutput } from "../commands/utils"; +export class DynamoDBDocumentClientCommand extends $Command { + addMarshallingMiddleware(configuration) { + const { marshallOptions = {}, unmarshallOptions = {} } = configuration.translateConfig || {}; + marshallOptions.convertTopLevelContainer = marshallOptions.convertTopLevelContainer ?? true; + unmarshallOptions.convertWithoutMapWrapper = unmarshallOptions.convertWithoutMapWrapper ?? true; + this.clientCommand.middlewareStack.addRelativeTo((next, context) => async (args) => { + setFeature(context, "DDB_MAPPER", "d"); + args.input = marshallInput(args.input, this.inputKeyNodes, marshallOptions); + return next(args); + }, { + name: "DocumentMarshall", + relation: "before", + toMiddleware: "serializerMiddleware", + override: true, + }); + this.clientCommand.middlewareStack.addRelativeTo((next, context) => async (args) => { + const deserialized = await next(args); + deserialized.output = unmarshallOutput(deserialized.output, this.outputKeyNodes, unmarshallOptions); + return deserialized; + }, { + name: "DocumentUnmarshall", + relation: "before", + toMiddleware: "deserializerMiddleware", + override: true, + }); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js new file mode 100644 index 0000000..ee5acdc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js @@ -0,0 +1,39 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Statements: { + "*": { + Parameters: ALL_MEMBERS, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Error: { + Item: ALL_VALUES, + }, + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js new file mode 100644 index 0000000..bab370e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js @@ -0,0 +1,45 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchGetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + RequestItems: { + "*": { + Keys: { + "*": ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + "*": ALL_VALUES, + }, + }, + UnprocessedKeys: { + "*": { + Keys: { + "*": ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchGetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js new file mode 100644 index 0000000..c2a480a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js @@ -0,0 +1,57 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchWriteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES, + }, + DeleteRequest: { + Key: ALL_VALUES, + }, + }, + }, + }, + }; + outputKeyNodes = { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES, + }, + DeleteRequest: { + Key: ALL_VALUES, + }, + }, + }, + }, + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchWriteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js new file mode 100644 index 0000000..5622ae6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js @@ -0,0 +1,38 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class DeleteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __DeleteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js new file mode 100644 index 0000000..e3f3cfa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js @@ -0,0 +1,31 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ExecuteStatementCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Parameters: ALL_MEMBERS, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js new file mode 100644 index 0000000..3c575aa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js @@ -0,0 +1,36 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactStatements: { + "*": { + Parameters: ALL_MEMBERS, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ExecuteTransactionCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js new file mode 100644 index 0000000..14ae2f3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js @@ -0,0 +1,28 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class GetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + }; + outputKeyNodes = { + Item: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __GetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js new file mode 100644 index 0000000..51ae545 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js @@ -0,0 +1,38 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class PutCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Item: ALL_VALUES, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __PutItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js new file mode 100644 index 0000000..36a5418 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js @@ -0,0 +1,42 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class QueryCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + KeyConditions: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + QueryFilter: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + ExclusiveStartKey: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __QueryCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js new file mode 100644 index 0000000..a5ba2c9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js @@ -0,0 +1,37 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ScanCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + ScanFilter: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + ExclusiveStartKey: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ScanCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js new file mode 100644 index 0000000..3de3ec5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js @@ -0,0 +1,38 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class TransactGetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactItems: { + "*": { + Get: { + Key: ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __TransactGetItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js new file mode 100644 index 0000000..5f1fea0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js @@ -0,0 +1,53 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class TransactWriteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactItems: { + "*": { + ConditionCheck: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Put: { + Item: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Delete: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Update: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __TransactWriteItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js new file mode 100644 index 0000000..a5a2da1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js @@ -0,0 +1,43 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class UpdateCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + AttributeUpdates: { + "*": { + Value: SELF, + }, + }, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __UpdateItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js new file mode 100644 index 0000000..19c4e99 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js @@ -0,0 +1,80 @@ +import { marshall, unmarshall } from "@aws-sdk/util-dynamodb"; +export const SELF = null; +export const ALL_VALUES = {}; +export const ALL_MEMBERS = []; +const NEXT_LEVEL = "*"; +const processObj = (obj, processFunc, keyNodes) => { + if (obj !== undefined) { + if (keyNodes == null) { + return processFunc(obj); + } + else { + const keys = Object.keys(keyNodes); + const goToNextLevel = keys.length === 1 && keys[0] === NEXT_LEVEL; + const someChildren = keys.length >= 1 && !goToNextLevel; + const allChildren = keys.length === 0; + if (someChildren) { + return processKeysInObj(obj, processFunc, keyNodes); + } + else if (allChildren) { + return processAllKeysInObj(obj, processFunc, SELF); + } + else if (goToNextLevel) { + return Object.entries(obj ?? {}).reduce((acc, [k, v]) => { + if (typeof v !== "function") { + acc[k] = processObj(v, processFunc, keyNodes[NEXT_LEVEL]); + } + return acc; + }, (Array.isArray(obj) ? [] : {})); + } + } + } + return undefined; +}; +const processKeysInObj = (obj, processFunc, keyNodes) => { + let accumulator; + if (Array.isArray(obj)) { + accumulator = obj.filter((item) => typeof item !== "function"); + } + else { + accumulator = {}; + for (const [k, v] of Object.entries(obj)) { + if (typeof v !== "function") { + accumulator[k] = v; + } + } + } + for (const [nodeKey, nodes] of Object.entries(keyNodes)) { + if (typeof obj[nodeKey] === "function") { + continue; + } + const processedValue = processObj(obj[nodeKey], processFunc, nodes); + if (processedValue !== undefined && typeof processedValue !== "function") { + accumulator[nodeKey] = processedValue; + } + } + return accumulator; +}; +const processAllKeysInObj = (obj, processFunc, keyNodes) => { + if (Array.isArray(obj)) { + return obj.filter((item) => typeof item !== "function").map((item) => processObj(item, processFunc, keyNodes)); + } + return Object.entries(obj).reduce((acc, [key, value]) => { + if (typeof value === "function") { + return acc; + } + const processedValue = processObj(value, processFunc, keyNodes); + if (processedValue !== undefined && typeof processedValue !== "function") { + acc[key] = processedValue; + } + return acc; + }, {}); +}; +export const marshallInput = (obj, keyNodes, options) => { + const marshallFunc = (toMarshall) => marshall(toMarshall, options); + return processKeysInObj(obj, marshallFunc, keyNodes); +}; +export const unmarshallOutput = (obj, keyNodes, options) => { + const unmarshallFunc = (toMarshall) => unmarshall(toMarshall, options); + return processKeysInObj(obj, unmarshallFunc, keyNodes); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js new file mode 100644 index 0000000..77c5a74 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js new file mode 100644 index 0000000..da9e058 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { QueryCommand } from "../commands/QueryCommand"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export const paginateQuery = createPaginator(DynamoDBDocumentClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js new file mode 100644 index 0000000..ae01799 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ScanCommand } from "../commands/ScanCommand"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export const paginateScan = createPaginator(DynamoDBDocumentClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts new file mode 100644 index 0000000..38d4a20 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts @@ -0,0 +1,195 @@ +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommandInput, BatchGetCommandOutput } from "./commands/BatchGetCommand"; +import { BatchWriteCommandInput, BatchWriteCommandOutput } from "./commands/BatchWriteCommand"; +import { DeleteCommandInput, DeleteCommandOutput } from "./commands/DeleteCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TransactGetCommandInput, TransactGetCommandOutput } from "./commands/TransactGetCommand"; +import { TransactWriteCommandInput, TransactWriteCommandOutput } from "./commands/TransactWriteCommand"; +import { UpdateCommandInput, UpdateCommandOutput } from "./commands/UpdateCommand"; +import { DynamoDBDocumentClient, TranslateConfig } from "./DynamoDBDocumentClient"; +/** + * The document client simplifies working with items in Amazon DynamoDB by + * abstracting away the notion of attribute values. This abstraction annotates native + * JavaScript types supplied as input parameters, as well as converts annotated + * response data to native JavaScript types. + * + * ## Marshalling Input and Unmarshalling Response Data + * + * The document client affords developers the use of native JavaScript types + * instead of `AttributeValue`s to simplify the JavaScript development + * experience with Amazon DynamoDB. JavaScript objects passed in as parameters + * are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. + * Responses from DynamoDB are unmarshalled into plain JavaScript objects + * by the `DocumentClient`. The `DocumentClient` does not accept + * `AttributeValue`s in favor of native JavaScript types. + * + * | JavaScript Type | DynamoDB AttributeValue | + * | :-------------------------------: | ----------------------- | + * | String | S | + * | Number / BigInt | N | + * | Boolean | BOOL | + * | null | NULL | + * | Array | L | + * | Object | M | + * | Set\ | BS | + * | Set\ | NS | + * | Set\ | SS | + * | Uint8Array, Buffer, File, Blob... | B | + * + * ### Example + * + * Here is an example list which is sent to DynamoDB client in an operation: + * + * ```json + * { "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } + * ``` + * + * The DynamoDB document client abstracts the attribute values as follows in + * both input and output: + * + * ```json + * [null, false, 1, "two"] + * ``` + * + * @see {@link https://www.npmjs.com/package/@aws-sdk/client-dynamodb | @aws-sdk/client-dynamodb} + */ +export declare class DynamoDBDocument extends DynamoDBDocumentClient { + static from(client: DynamoDBClient, translateConfig?: TranslateConfig): DynamoDBDocument; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#BatchExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchGetItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchGetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchGet(args: BatchGetCommandInput, options?: __HttpHandlerOptions): Promise; + batchGet(args: BatchGetCommandInput, cb: (err: any, data?: BatchGetCommandOutput) => void): void; + batchGet(args: BatchGetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchGetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchWriteItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchWriteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchWrite(args: BatchWriteCommandInput, options?: __HttpHandlerOptions): Promise; + batchWrite(args: BatchWriteCommandInput, cb: (err: any, data?: BatchWriteCommandOutput) => void): void; + batchWrite(args: BatchWriteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchWriteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * DeleteItemCommand operation from {@link @aws-sdk/client-dynamodb#DeleteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + delete(args: DeleteCommandInput, options?: __HttpHandlerOptions): Promise; + delete(args: DeleteCommandInput, cb: (err: any, data?: DeleteCommandOutput) => void): void; + delete(args: DeleteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + executeStatement(args: ExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + executeStatement(args: ExecuteStatementCommandInput, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + executeStatement(args: ExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteTransactionCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteTransactionCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + executeTransaction(args: ExecuteTransactionCommandInput, options?: __HttpHandlerOptions): Promise; + executeTransaction(args: ExecuteTransactionCommandInput, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + executeTransaction(args: ExecuteTransactionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * GetItemCommand operation from {@link @aws-sdk/client-dynamodb#GetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + get(args: GetCommandInput, options?: __HttpHandlerOptions): Promise; + get(args: GetCommandInput, cb: (err: any, data?: GetCommandOutput) => void): void; + get(args: GetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * PutItemCommand operation from {@link @aws-sdk/client-dynamodb#PutItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + put(args: PutCommandInput, options?: __HttpHandlerOptions): Promise; + put(args: PutCommandInput, cb: (err: any, data?: PutCommandOutput) => void): void; + put(args: PutCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * QueryCommand operation from {@link @aws-sdk/client-dynamodb#QueryCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + query(args: QueryCommandInput, options?: __HttpHandlerOptions): Promise; + query(args: QueryCommandInput, cb: (err: any, data?: QueryCommandOutput) => void): void; + query(args: QueryCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: QueryCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ScanCommand operation from {@link @aws-sdk/client-dynamodb#ScanCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + scan(args: ScanCommandInput, options?: __HttpHandlerOptions): Promise; + scan(args: ScanCommandInput, cb: (err: any, data?: ScanCommandOutput) => void): void; + scan(args: ScanCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ScanCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactGetItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactGetItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + transactGet(args: TransactGetCommandInput, options?: __HttpHandlerOptions): Promise; + transactGet(args: TransactGetCommandInput, cb: (err: any, data?: TransactGetCommandOutput) => void): void; + transactGet(args: TransactGetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactGetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactWriteItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactWriteItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + transactWrite(args: TransactWriteCommandInput, options?: __HttpHandlerOptions): Promise; + transactWrite(args: TransactWriteCommandInput, cb: (err: any, data?: TransactWriteCommandOutput) => void): void; + transactWrite(args: TransactWriteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactWriteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * UpdateItemCommand operation from {@link @aws-sdk/client-dynamodb#UpdateItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + update(args: UpdateCommandInput, options?: __HttpHandlerOptions): Promise; + update(args: UpdateCommandInput, cb: (err: any, data?: UpdateCommandOutput) => void): void; + update(args: UpdateCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateCommandOutput) => void): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts new file mode 100644 index 0000000..260830f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts @@ -0,0 +1,96 @@ +import { DynamoDBClient, DynamoDBClientResolvedConfig, ServiceInputTypes as __ServiceInputTypes, ServiceOutputTypes as __ServiceOutputTypes } from "@aws-sdk/client-dynamodb"; +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +import { Client as __Client } from "@smithy/smithy-client"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommandInput, BatchGetCommandOutput } from "./commands/BatchGetCommand"; +import { BatchWriteCommandInput, BatchWriteCommandOutput } from "./commands/BatchWriteCommand"; +import { DeleteCommandInput, DeleteCommandOutput } from "./commands/DeleteCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TransactGetCommandInput, TransactGetCommandOutput } from "./commands/TransactGetCommand"; +import { TransactWriteCommandInput, TransactWriteCommandOutput } from "./commands/TransactWriteCommand"; +import { UpdateCommandInput, UpdateCommandOutput } from "./commands/UpdateCommand"; +/** + * @public + */ +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = __ServiceInputTypes | BatchExecuteStatementCommandInput | BatchGetCommandInput | BatchWriteCommandInput | DeleteCommandInput | ExecuteStatementCommandInput | ExecuteTransactionCommandInput | GetCommandInput | PutCommandInput | QueryCommandInput | ScanCommandInput | TransactGetCommandInput | TransactWriteCommandInput | UpdateCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = __ServiceOutputTypes | BatchExecuteStatementCommandOutput | BatchGetCommandOutput | BatchWriteCommandOutput | DeleteCommandOutput | ExecuteStatementCommandOutput | ExecuteTransactionCommandOutput | GetCommandOutput | PutCommandOutput | QueryCommandOutput | ScanCommandOutput | TransactGetCommandOutput | TransactWriteCommandOutput | UpdateCommandOutput; +/** + * @public + */ +export type TranslateConfig = { + marshallOptions?: marshallOptions; + unmarshallOptions?: unmarshallOptions; +}; +/** + * @public + */ +export type DynamoDBDocumentClientResolvedConfig = DynamoDBClientResolvedConfig & { + translateConfig?: TranslateConfig; +}; +/** + * The document client simplifies working with items in Amazon DynamoDB by + * abstracting away the notion of attribute values. This abstraction annotates native + * JavaScript types supplied as input parameters, as well as converts annotated + * response data to native JavaScript types. + * + * ## Marshalling Input and Unmarshalling Response Data + * + * The document client affords developers the use of native JavaScript types + * instead of `AttributeValue`s to simplify the JavaScript development + * experience with Amazon DynamoDB. JavaScript objects passed in as parameters + * are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. + * Responses from DynamoDB are unmarshalled into plain JavaScript objects + * by the `DocumentClient`. The `DocumentClient` does not accept + * `AttributeValue`s in favor of native JavaScript types. + * + * | JavaScript Type | DynamoDB AttributeValue | + * | :-------------------------------: | ----------------------- | + * | String | S | + * | Number / BigInt | N | + * | Boolean | BOOL | + * | null | NULL | + * | Array | L | + * | Object | M | + * | Set\ | BS | + * | Set\ | NS | + * | Set\ | SS | + * | Uint8Array, Buffer, File, Blob... | B | + * + * ### Example + * + * Here is an example list which is sent to DynamoDB client in an operation: + * + * ```json + * { "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } + * ``` + * + * The DynamoDB document client abstracts the attribute values as follows in + * both input and output: + * + * ```json + * [null, false, 1, "two"] + * ``` + * + * @see {@link https://www.npmjs.com/package/@aws-sdk/client-dynamodb | @aws-sdk/client-dynamodb} + * + * @public + */ +export declare class DynamoDBDocumentClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, DynamoDBDocumentClientResolvedConfig> { + readonly config: DynamoDBDocumentClientResolvedConfig; + protected constructor(client: DynamoDBClient, translateConfig?: TranslateConfig); + static from(client: DynamoDBClient, translateConfig?: TranslateConfig): DynamoDBDocumentClient; + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts new file mode 100644 index 0000000..e78690a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts @@ -0,0 +1,17 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MiddlewareStack } from "@smithy/types"; +import { KeyNodeChildren } from "../commands/utils"; +import { DynamoDBDocumentClientResolvedConfig } from "../DynamoDBDocumentClient"; +/** + * Base class for Commands in lib-dynamodb used to pass middleware to + * the underlying DynamoDBClient Commands. + * + * @public + */ +export declare abstract class DynamoDBDocumentClientCommand extends $Command { + protected abstract readonly inputKeyNodes: KeyNodeChildren; + protected abstract readonly outputKeyNodes: KeyNodeChildren; + protected abstract clientCommand: $Command; + abstract middlewareStack: MiddlewareStack; + protected addMarshallingMiddleware(configuration: DynamoDBDocumentClientResolvedConfig): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..8f55283 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,66 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchExecuteStatementCommandInput = Omit<__BatchExecuteStatementCommandInput, "Statements"> & { + Statements: (Omit & { + Parameters?: NativeAttributeValue[] | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type BatchExecuteStatementCommandOutput = Omit<__BatchExecuteStatementCommandOutput, "Responses"> & { + Responses?: (Omit & { + Error?: (Omit & { + Item?: Record | undefined; + }) | undefined; + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#BatchExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Statements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Error: { + Item: import("../commands/utils").KeyNodeChildren; + }; + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __BatchExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchExecuteStatementCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchExecuteStatementCommandInput as __BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput as __BatchExecuteStatementCommandOutput, BatchStatementError, BatchStatementRequest, BatchStatementResponse } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts new file mode 100644 index 0000000..60cd689 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts @@ -0,0 +1,70 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchGetCommandInput = Omit<__BatchGetItemCommandInput, "RequestItems"> & { + RequestItems: Record & { + Keys: Record[] | undefined; + }> | undefined; +}; +/** + * @public + */ +export type BatchGetCommandOutput = Omit<__BatchGetItemCommandOutput, "Responses" | "UnprocessedKeys"> & { + Responses?: Record[]> | undefined; + UnprocessedKeys?: Record & { + Keys: Record[] | undefined; + }> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchGetItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchGetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchGetCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchGetCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + UnprocessedKeys: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchGetItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchGetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchGetItemCommandInput as __BatchGetItemCommandInput, BatchGetItemCommandOutput as __BatchGetItemCommandOutput, KeysAndAttributes } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts new file mode 100644 index 0000000..930d3f5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts @@ -0,0 +1,94 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchWriteCommandInput = Omit<__BatchWriteItemCommandInput, "RequestItems"> & { + RequestItems: Record & { + PutRequest?: (Omit & { + Item: Record | undefined; + }) | undefined; + DeleteRequest?: (Omit & { + Key: Record | undefined; + }) | undefined; + })[]> | undefined; +}; +/** + * @public + */ +export type BatchWriteCommandOutput = Omit<__BatchWriteItemCommandOutput, "UnprocessedItems" | "ItemCollectionMetrics"> & { + UnprocessedItems?: Record & { + PutRequest?: (Omit & { + Item: Record | undefined; + }) | undefined; + DeleteRequest?: (Omit & { + Key: Record | undefined; + }) | undefined; + })[]> | undefined; + ItemCollectionMetrics?: Record & { + ItemCollectionKey?: Record | undefined; + })[]> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchWriteItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchWriteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchWriteCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchWriteCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchWriteItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchWriteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchWriteItemCommandInput as __BatchWriteItemCommandInput, BatchWriteItemCommandOutput as __BatchWriteItemCommandOutput, DeleteRequest, ItemCollectionMetrics, PutRequest, WriteRequest } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts new file mode 100644 index 0000000..c091175 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts @@ -0,0 +1,66 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type DeleteCommandInput = Omit<__DeleteItemCommandInput, "Key" | "Expected" | "ExpressionAttributeValues"> & { + Key: Record | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type DeleteCommandOutput = Omit<__DeleteItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * DeleteItemCommand operation from {@link @aws-sdk/client-dynamodb#DeleteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class DeleteCommand extends DynamoDBDocumentClientCommand { + readonly input: DeleteCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __DeleteItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: DeleteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { DeleteItemCommandInput as __DeleteItemCommandInput, DeleteItemCommandOutput as __DeleteItemCommandOutput, ExpectedAttributeValue, ItemCollectionMetrics } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..4498320 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,52 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ExecuteStatementCommandInput = Omit<__ExecuteStatementCommandInput, "Parameters"> & { + Parameters?: NativeAttributeValue[] | undefined; +}; +/** + * @public + */ +export type ExecuteStatementCommandOutput = Omit<__ExecuteStatementCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ExecuteStatementCommand extends DynamoDBDocumentClientCommand { + readonly input: ExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ExecuteStatementCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExecuteStatementCommandInput as __ExecuteStatementCommandInput, ExecuteStatementCommandOutput as __ExecuteStatementCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..04873aa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,60 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ExecuteTransactionCommandInput = Omit<__ExecuteTransactionCommandInput, "TransactStatements"> & { + TransactStatements: (Omit & { + Parameters?: NativeAttributeValue[] | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type ExecuteTransactionCommandOutput = Omit<__ExecuteTransactionCommandOutput, "Responses"> & { + Responses?: (Omit & { + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteTransactionCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteTransactionCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand { + readonly input: ExecuteTransactionCommandInput; + protected readonly inputKeyNodes: { + TransactStatements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __ExecuteTransactionCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ExecuteTransactionCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExecuteTransactionCommandInput as __ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput as __ExecuteTransactionCommandOutput, ItemResponse, ParameterizedStatement } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts new file mode 100644 index 0000000..d425131 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts @@ -0,0 +1,48 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type GetCommandInput = Omit<__GetItemCommandInput, "Key"> & { + Key: Record | undefined; +}; +/** + * @public + */ +export type GetCommandOutput = Omit<__GetItemCommandOutput, "Item"> & { + Item?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * GetItemCommand operation from {@link @aws-sdk/client-dynamodb#GetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class GetCommand extends DynamoDBDocumentClientCommand { + readonly input: GetCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __GetItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: GetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { GetItemCommandInput as __GetItemCommandInput, GetItemCommandOutput as __GetItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts new file mode 100644 index 0000000..babe831 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts @@ -0,0 +1,66 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type PutCommandInput = Omit<__PutItemCommandInput, "Item" | "Expected" | "ExpressionAttributeValues"> & { + Item: Record | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type PutCommandOutput = Omit<__PutItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * PutItemCommand operation from {@link @aws-sdk/client-dynamodb#PutItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class PutCommand extends DynamoDBDocumentClientCommand { + readonly input: PutCommandInput; + protected readonly inputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __PutItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: PutCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExpectedAttributeValue, ItemCollectionMetrics, PutItemCommandInput as __PutItemCommandInput, PutItemCommandOutput as __PutItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts new file mode 100644 index 0000000..4948a70 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts @@ -0,0 +1,70 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type QueryCommandInput = Omit<__QueryCommandInput, "KeyConditions" | "QueryFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues"> & { + KeyConditions?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + QueryFilter?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type QueryCommandOutput = Omit<__QueryCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * QueryCommand operation from {@link @aws-sdk/client-dynamodb#QueryCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class QueryCommand extends DynamoDBDocumentClientCommand { + readonly input: QueryCommandInput; + protected readonly inputKeyNodes: { + KeyConditions: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + QueryFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __QueryCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: QueryCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Condition, QueryCommandInput as __QueryCommandInput, QueryCommandOutput as __QueryCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts new file mode 100644 index 0000000..86b0089 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts @@ -0,0 +1,62 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ScanCommandInput = Omit<__ScanCommandInput, "ScanFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues"> & { + ScanFilter?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type ScanCommandOutput = Omit<__ScanCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ScanCommand operation from {@link @aws-sdk/client-dynamodb#ScanCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ScanCommand extends DynamoDBDocumentClientCommand { + readonly input: ScanCommandInput; + protected readonly inputKeyNodes: { + ScanFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ScanCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ScanCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Condition, ScanCommandInput as __ScanCommandInput, ScanCommandOutput as __ScanCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts new file mode 100644 index 0000000..3355455 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts @@ -0,0 +1,64 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type TransactGetCommandInput = Omit<__TransactGetItemsCommandInput, "TransactItems"> & { + TransactItems: (Omit & { + Get: (Omit & { + Key: Record | undefined; + }) | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type TransactGetCommandOutput = Omit<__TransactGetItemsCommandOutput, "Responses"> & { + Responses?: (Omit & { + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactGetItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactGetItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class TransactGetCommand extends DynamoDBDocumentClientCommand { + readonly input: TransactGetCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + Get: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __TransactGetItemsCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: TransactGetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Get, ItemResponse, TransactGetItem, TransactGetItemsCommandInput as __TransactGetItemsCommandInput, TransactGetItemsCommandOutput as __TransactGetItemsCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts new file mode 100644 index 0000000..eef56d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts @@ -0,0 +1,92 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type TransactWriteCommandInput = Omit<__TransactWriteItemsCommandInput, "TransactItems"> & { + TransactItems: (Omit & { + ConditionCheck?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Put?: (Omit & { + Item: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Delete?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Update?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type TransactWriteCommandOutput = Omit<__TransactWriteItemsCommandOutput, "ItemCollectionMetrics"> & { + ItemCollectionMetrics?: Record & { + ItemCollectionKey?: Record | undefined; + })[]> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactWriteItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactWriteItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class TransactWriteCommand extends DynamoDBDocumentClientCommand { + readonly input: TransactWriteCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + ConditionCheck: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Put: { + Item: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Delete: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Update: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __TransactWriteItemsCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: TransactWriteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ConditionCheck, Delete, ItemCollectionMetrics, Put, TransactWriteItem, TransactWriteItemsCommandInput as __TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput as __TransactWriteItemsCommandOutput, Update } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts new file mode 100644 index 0000000..905e751 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts @@ -0,0 +1,74 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type UpdateCommandInput = Omit<__UpdateItemCommandInput, "Key" | "AttributeUpdates" | "Expected" | "ExpressionAttributeValues"> & { + Key: Record | undefined; + AttributeUpdates?: Record & { + Value?: NativeAttributeValue | undefined; + }> | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type UpdateCommandOutput = Omit<__UpdateItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * UpdateItemCommand operation from {@link @aws-sdk/client-dynamodb#UpdateItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class UpdateCommand extends DynamoDBDocumentClientCommand { + readonly input: UpdateCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + AttributeUpdates: { + "*": { + Value: null; + }; + }; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __UpdateItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: UpdateCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { AttributeValueUpdate, ExpectedAttributeValue, ItemCollectionMetrics, UpdateItemCommandInput as __UpdateItemCommandInput, UpdateItemCommandOutput as __UpdateItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts new file mode 100644 index 0000000..d24d22c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts @@ -0,0 +1,33 @@ +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +/** + * @internal + */ +export type KeyNodeSelf = null; +/** + * @internal + */ +export declare const SELF: KeyNodeSelf; +/** + * @internal + */ +export type KeyNodeChildren = Record; +/** + * @internal + */ +export declare const ALL_VALUES: KeyNodeChildren; +/** + * @internal + */ +export declare const ALL_MEMBERS: KeyNodeChildren; +/** + * @internal + */ +export type KeyNodes = KeyNodeSelf | KeyNodeChildren; +/** + * @internal + */ +export declare const marshallInput: (obj: any, keyNodes: KeyNodeChildren, options?: marshallOptions) => any; +/** + * @internal + */ +export declare const unmarshallOutput: (obj: any, keyNodes: KeyNodeChildren, options?: unmarshallOptions) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..957530d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; +export { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export { NativeAttributeValue, NativeAttributeBinary, NativeScalarAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..f98a7b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,13 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBDocument } from "../DynamoDBDocument"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { PaginationConfiguration }; +/** + * @public + */ +export interface DynamoDBDocumentPaginationConfiguration extends PaginationConfiguration { + client: DynamoDBDocument | DynamoDBDocumentClient; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..692d9f9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export { Paginator }; +/** + * @public + */ +export declare const paginateQuery: (config: DynamoDBDocumentPaginationConfiguration, input: QueryCommandInput, ...additionalArguments: any) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..b47d0a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export { Paginator }; +/** + * @public + */ +export declare const paginateScan: (config: DynamoDBDocumentPaginationConfiguration, input: ScanCommandInput, ...additionalArguments: any) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts new file mode 100644 index 0000000..fbcd8b7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts @@ -0,0 +1,221 @@ +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetCommandInput, + BatchGetCommandOutput, +} from "./commands/BatchGetCommand"; +import { + BatchWriteCommandInput, + BatchWriteCommandOutput, +} from "./commands/BatchWriteCommand"; +import { + DeleteCommandInput, + DeleteCommandOutput, +} from "./commands/DeleteCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TransactGetCommandInput, + TransactGetCommandOutput, +} from "./commands/TransactGetCommand"; +import { + TransactWriteCommandInput, + TransactWriteCommandOutput, +} from "./commands/TransactWriteCommand"; +import { + UpdateCommandInput, + UpdateCommandOutput, +} from "./commands/UpdateCommand"; +import { + DynamoDBDocumentClient, + TranslateConfig, +} from "./DynamoDBDocumentClient"; +export declare class DynamoDBDocument extends DynamoDBDocumentClient { + static from( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ): DynamoDBDocument; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchGet( + args: BatchGetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchGet( + args: BatchGetCommandInput, + cb: (err: any, data?: BatchGetCommandOutput) => void + ): void; + batchGet( + args: BatchGetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchGetCommandOutput) => void + ): void; + batchWrite( + args: BatchWriteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchWrite( + args: BatchWriteCommandInput, + cb: (err: any, data?: BatchWriteCommandOutput) => void + ): void; + batchWrite( + args: BatchWriteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchWriteCommandOutput) => void + ): void; + delete( + args: DeleteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + delete( + args: DeleteCommandInput, + cb: (err: any, data?: DeleteCommandOutput) => void + ): void; + delete( + args: DeleteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeStatement( + args: ExecuteStatementCommandInput, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeTransaction( + args: ExecuteTransactionCommandInput, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + get( + args: GetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + get( + args: GetCommandInput, + cb: (err: any, data?: GetCommandOutput) => void + ): void; + get( + args: GetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetCommandOutput) => void + ): void; + put( + args: PutCommandInput, + options?: __HttpHandlerOptions + ): Promise; + put( + args: PutCommandInput, + cb: (err: any, data?: PutCommandOutput) => void + ): void; + put( + args: PutCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options?: __HttpHandlerOptions + ): Promise; + query( + args: QueryCommandInput, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options?: __HttpHandlerOptions + ): Promise; + scan( + args: ScanCommandInput, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + transactGet( + args: TransactGetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactGet( + args: TransactGetCommandInput, + cb: (err: any, data?: TransactGetCommandOutput) => void + ): void; + transactGet( + args: TransactGetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactGetCommandOutput) => void + ): void; + transactWrite( + args: TransactWriteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactWrite( + args: TransactWriteCommandInput, + cb: (err: any, data?: TransactWriteCommandOutput) => void + ): void; + transactWrite( + args: TransactWriteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactWriteCommandOutput) => void + ): void; + update( + args: UpdateCommandInput, + options?: __HttpHandlerOptions + ): Promise; + update( + args: UpdateCommandInput, + cb: (err: any, data?: UpdateCommandOutput) => void + ): void; + update( + args: UpdateCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateCommandOutput) => void + ): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts new file mode 100644 index 0000000..67494ec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts @@ -0,0 +1,105 @@ +import { + DynamoDBClient, + DynamoDBClientResolvedConfig, + ServiceInputTypes as __ServiceInputTypes, + ServiceOutputTypes as __ServiceOutputTypes, +} from "@aws-sdk/client-dynamodb"; +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +import { Client as __Client } from "@smithy/smithy-client"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetCommandInput, + BatchGetCommandOutput, +} from "./commands/BatchGetCommand"; +import { + BatchWriteCommandInput, + BatchWriteCommandOutput, +} from "./commands/BatchWriteCommand"; +import { + DeleteCommandInput, + DeleteCommandOutput, +} from "./commands/DeleteCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TransactGetCommandInput, + TransactGetCommandOutput, +} from "./commands/TransactGetCommand"; +import { + TransactWriteCommandInput, + TransactWriteCommandOutput, +} from "./commands/TransactWriteCommand"; +import { + UpdateCommandInput, + UpdateCommandOutput, +} from "./commands/UpdateCommand"; +export { __Client }; +export type ServiceInputTypes = + | __ServiceInputTypes + | BatchExecuteStatementCommandInput + | BatchGetCommandInput + | BatchWriteCommandInput + | DeleteCommandInput + | ExecuteStatementCommandInput + | ExecuteTransactionCommandInput + | GetCommandInput + | PutCommandInput + | QueryCommandInput + | ScanCommandInput + | TransactGetCommandInput + | TransactWriteCommandInput + | UpdateCommandInput; +export type ServiceOutputTypes = + | __ServiceOutputTypes + | BatchExecuteStatementCommandOutput + | BatchGetCommandOutput + | BatchWriteCommandOutput + | DeleteCommandOutput + | ExecuteStatementCommandOutput + | ExecuteTransactionCommandOutput + | GetCommandOutput + | PutCommandOutput + | QueryCommandOutput + | ScanCommandOutput + | TransactGetCommandOutput + | TransactWriteCommandOutput + | UpdateCommandOutput; +export type TranslateConfig = { + marshallOptions?: marshallOptions; + unmarshallOptions?: unmarshallOptions; +}; +export type DynamoDBDocumentClientResolvedConfig = + DynamoDBClientResolvedConfig & { + translateConfig?: TranslateConfig; + }; +export declare class DynamoDBDocumentClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + DynamoDBDocumentClientResolvedConfig +> { + readonly config: DynamoDBDocumentClientResolvedConfig; + protected constructor( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ); + static from( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ): DynamoDBDocumentClient; + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts new file mode 100644 index 0000000..17c787f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts @@ -0,0 +1,30 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MiddlewareStack } from "@smithy/types"; +import { KeyNodeChildren } from "../commands/utils"; +import { DynamoDBDocumentClientResolvedConfig } from "../DynamoDBDocumentClient"; +export declare abstract class DynamoDBDocumentClientCommand< + Input extends object, + Output extends object, + BaseInput extends object, + BaseOutput extends object, + ResolvedClientConfiguration +> extends $Command< + Input | BaseInput, + Output | BaseOutput, + ResolvedClientConfiguration +> { + protected abstract readonly inputKeyNodes: KeyNodeChildren; + protected abstract readonly outputKeyNodes: KeyNodeChildren; + protected abstract clientCommand: $Command< + Input | BaseInput, + Output | BaseOutput, + ResolvedClientConfiguration + >; + abstract middlewareStack: MiddlewareStack< + Input | BaseInput, + Output | BaseOutput + >; + protected addMarshallingMiddleware( + configuration: DynamoDBDocumentClientResolvedConfig + ): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..d8f3dfe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,96 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchExecuteStatementCommandInput = Pick< + __BatchExecuteStatementCommandInput, + Exclude +> & { + Statements: + | (Pick< + BatchStatementRequest, + Exclude + > & { + Parameters?: NativeAttributeValue[] | undefined; + })[] + | undefined; +}; +export type BatchExecuteStatementCommandOutput = Pick< + __BatchExecuteStatementCommandOutput, + Exclude +> & { + Responses?: + | (Pick< + BatchStatementResponse, + Exclude + > & { + Error?: + | (Pick< + BatchStatementError, + Exclude + > & { + Item?: Record | undefined; + }) + | undefined; + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + __BatchExecuteStatementCommandInput, + __BatchExecuteStatementCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Statements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Error: { + Item: import("../commands/utils").KeyNodeChildren; + }; + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __BatchExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack< + BatchExecuteStatementCommandInput | __BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput | __BatchExecuteStatementCommandOutput + >; + constructor(input: BatchExecuteStatementCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput + >; +} +import { + BatchExecuteStatementCommandInput as __BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput as __BatchExecuteStatementCommandOutput, + BatchStatementError, + BatchStatementRequest, + BatchStatementResponse, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts new file mode 100644 index 0000000..6203cf6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts @@ -0,0 +1,92 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchGetCommandInput = Pick< + __BatchGetItemCommandInput, + Exclude +> & { + RequestItems: + | Record< + string, + Pick> & { + Keys: Record[] | undefined; + } + > + | undefined; +}; +export type BatchGetCommandOutput = Pick< + __BatchGetItemCommandOutput, + Exclude +> & { + Responses?: + | Record[]> + | undefined; + UnprocessedKeys?: + | Record< + string, + Pick> & { + Keys: Record[] | undefined; + } + > + | undefined; +}; +export declare class BatchGetCommand extends DynamoDBDocumentClientCommand< + BatchGetCommandInput, + BatchGetCommandOutput, + __BatchGetItemCommandInput, + __BatchGetItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchGetCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + UnprocessedKeys: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchGetItemCommand; + readonly middlewareStack: MiddlewareStack< + BatchGetCommandInput | __BatchGetItemCommandInput, + BatchGetCommandOutput | __BatchGetItemCommandOutput + >; + constructor(input: BatchGetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + BatchGetItemCommandInput as __BatchGetItemCommandInput, + BatchGetItemCommandOutput as __BatchGetItemCommandOutput, + KeysAndAttributes, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts new file mode 100644 index 0000000..a02d177 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts @@ -0,0 +1,142 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchWriteCommandInput = Pick< + __BatchWriteItemCommandInput, + Exclude +> & { + RequestItems: + | Record< + string, + (Pick< + WriteRequest, + Exclude + > & { + PutRequest?: + | (Pick> & { + Item: Record | undefined; + }) + | undefined; + DeleteRequest?: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + > + | undefined; +}; +export type BatchWriteCommandOutput = Pick< + __BatchWriteItemCommandOutput, + Exclude< + keyof __BatchWriteItemCommandOutput, + "UnprocessedItems" | "ItemCollectionMetrics" + > +> & { + UnprocessedItems?: + | Record< + string, + (Pick< + WriteRequest, + Exclude + > & { + PutRequest?: + | (Pick> & { + Item: Record | undefined; + }) + | undefined; + DeleteRequest?: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + > + | undefined; + ItemCollectionMetrics?: + | Record< + string, + (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + })[] + > + | undefined; +}; +export declare class BatchWriteCommand extends DynamoDBDocumentClientCommand< + BatchWriteCommandInput, + BatchWriteCommandOutput, + __BatchWriteItemCommandInput, + __BatchWriteItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchWriteCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchWriteItemCommand; + readonly middlewareStack: MiddlewareStack< + BatchWriteCommandInput | __BatchWriteItemCommandInput, + BatchWriteCommandOutput | __BatchWriteItemCommandOutput + >; + constructor(input: BatchWriteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + BatchWriteItemCommandInput as __BatchWriteItemCommandInput, + BatchWriteItemCommandOutput as __BatchWriteItemCommandOutput, + DeleteRequest, + ItemCollectionMetrics, + PutRequest, + WriteRequest, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts new file mode 100644 index 0000000..9906c10 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts @@ -0,0 +1,96 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type DeleteCommandInput = Pick< + __DeleteItemCommandInput, + Exclude< + keyof __DeleteItemCommandInput, + "Key" | "Expected" | "ExpressionAttributeValues" + > +> & { + Key: Record | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type DeleteCommandOutput = Pick< + __DeleteItemCommandOutput, + Exclude< + keyof __DeleteItemCommandOutput, + "Attributes" | "ItemCollectionMetrics" + > +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class DeleteCommand extends DynamoDBDocumentClientCommand< + DeleteCommandInput, + DeleteCommandOutput, + __DeleteItemCommandInput, + __DeleteItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: DeleteCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __DeleteItemCommand; + readonly middlewareStack: MiddlewareStack< + DeleteCommandInput | __DeleteItemCommandInput, + DeleteCommandOutput | __DeleteItemCommandOutput + >; + constructor(input: DeleteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + DeleteItemCommandInput as __DeleteItemCommandInput, + DeleteItemCommandOutput as __DeleteItemCommandOutput, + ExpectedAttributeValue, + ItemCollectionMetrics, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..938727c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,61 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ExecuteStatementCommandInput = Pick< + __ExecuteStatementCommandInput, + Exclude +> & { + Parameters?: NativeAttributeValue[] | undefined; +}; +export type ExecuteStatementCommandOutput = Pick< + __ExecuteStatementCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class ExecuteStatementCommand extends DynamoDBDocumentClientCommand< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + __ExecuteStatementCommandInput, + __ExecuteStatementCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack< + ExecuteStatementCommandInput | __ExecuteStatementCommandInput, + ExecuteStatementCommandOutput | __ExecuteStatementCommandOutput + >; + constructor(input: ExecuteStatementCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExecuteStatementCommandInput as __ExecuteStatementCommandInput, + ExecuteStatementCommandOutput as __ExecuteStatementCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..10205be --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,78 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ExecuteTransactionCommandInput = Pick< + __ExecuteTransactionCommandInput, + Exclude +> & { + TransactStatements: + | (Pick< + ParameterizedStatement, + Exclude + > & { + Parameters?: NativeAttributeValue[] | undefined; + })[] + | undefined; +}; +export type ExecuteTransactionCommandOutput = Pick< + __ExecuteTransactionCommandOutput, + Exclude +> & { + Responses?: + | (Pick> & { + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + __ExecuteTransactionCommandInput, + __ExecuteTransactionCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ExecuteTransactionCommandInput; + protected readonly inputKeyNodes: { + TransactStatements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __ExecuteTransactionCommand; + readonly middlewareStack: MiddlewareStack< + ExecuteTransactionCommandInput | __ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput | __ExecuteTransactionCommandOutput + >; + constructor(input: ExecuteTransactionCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExecuteTransactionCommandInput as __ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput as __ExecuteTransactionCommandOutput, + ItemResponse, + ParameterizedStatement, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts new file mode 100644 index 0000000..dba5fd4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts @@ -0,0 +1,57 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type GetCommandInput = Pick< + __GetItemCommandInput, + Exclude +> & { + Key: Record | undefined; +}; +export type GetCommandOutput = Pick< + __GetItemCommandOutput, + Exclude +> & { + Item?: Record | undefined; +}; +export declare class GetCommand extends DynamoDBDocumentClientCommand< + GetCommandInput, + GetCommandOutput, + __GetItemCommandInput, + __GetItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: GetCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __GetItemCommand; + readonly middlewareStack: MiddlewareStack< + GetCommandInput | __GetItemCommandInput, + GetCommandOutput | __GetItemCommandOutput + >; + constructor(input: GetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + GetItemCommandInput as __GetItemCommandInput, + GetItemCommandOutput as __GetItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts new file mode 100644 index 0000000..af2ca2c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts @@ -0,0 +1,93 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type PutCommandInput = Pick< + __PutItemCommandInput, + Exclude< + keyof __PutItemCommandInput, + "Item" | "Expected" | "ExpressionAttributeValues" + > +> & { + Item: Record | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type PutCommandOutput = Pick< + __PutItemCommandOutput, + Exclude +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class PutCommand extends DynamoDBDocumentClientCommand< + PutCommandInput, + PutCommandOutput, + __PutItemCommandInput, + __PutItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: PutCommandInput; + protected readonly inputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __PutItemCommand; + readonly middlewareStack: MiddlewareStack< + PutCommandInput | __PutItemCommandInput, + PutCommandOutput | __PutItemCommandOutput + >; + constructor(input: PutCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExpectedAttributeValue, + ItemCollectionMetrics, + PutItemCommandInput as __PutItemCommandInput, + PutItemCommandOutput as __PutItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts new file mode 100644 index 0000000..80c57e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts @@ -0,0 +1,96 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type QueryCommandInput = Pick< + __QueryCommandInput, + Exclude< + keyof __QueryCommandInput, + | "KeyConditions" + | "QueryFilter" + | "ExclusiveStartKey" + | "ExpressionAttributeValues" + > +> & { + KeyConditions?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + QueryFilter?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type QueryCommandOutput = Pick< + __QueryCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class QueryCommand extends DynamoDBDocumentClientCommand< + QueryCommandInput, + QueryCommandOutput, + __QueryCommandInput, + __QueryCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: QueryCommandInput; + protected readonly inputKeyNodes: { + KeyConditions: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + QueryFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __QueryCommand; + readonly middlewareStack: MiddlewareStack< + QueryCommandInput | __QueryCommandInput, + QueryCommandOutput | __QueryCommandOutput + >; + constructor(input: QueryCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Condition, + QueryCommandInput as __QueryCommandInput, + QueryCommandOutput as __QueryCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts new file mode 100644 index 0000000..c2dc93b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts @@ -0,0 +1,80 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ScanCommandInput = Pick< + __ScanCommandInput, + Exclude< + keyof __ScanCommandInput, + "ScanFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues" + > +> & { + ScanFilter?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type ScanCommandOutput = Pick< + __ScanCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class ScanCommand extends DynamoDBDocumentClientCommand< + ScanCommandInput, + ScanCommandOutput, + __ScanCommandInput, + __ScanCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ScanCommandInput; + protected readonly inputKeyNodes: { + ScanFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ScanCommand; + readonly middlewareStack: MiddlewareStack< + ScanCommandInput | __ScanCommandInput, + ScanCommandOutput | __ScanCommandOutput + >; + constructor(input: ScanCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Condition, + ScanCommandInput as __ScanCommandInput, + ScanCommandOutput as __ScanCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts new file mode 100644 index 0000000..6568c81 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts @@ -0,0 +1,82 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type TransactGetCommandInput = Pick< + __TransactGetItemsCommandInput, + Exclude +> & { + TransactItems: + | (Pick> & { + Get: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + | undefined; +}; +export type TransactGetCommandOutput = Pick< + __TransactGetItemsCommandOutput, + Exclude +> & { + Responses?: + | (Pick> & { + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class TransactGetCommand extends DynamoDBDocumentClientCommand< + TransactGetCommandInput, + TransactGetCommandOutput, + __TransactGetItemsCommandInput, + __TransactGetItemsCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: TransactGetCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + Get: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __TransactGetItemsCommand; + readonly middlewareStack: MiddlewareStack< + TransactGetCommandInput | __TransactGetItemsCommandInput, + TransactGetCommandOutput | __TransactGetItemsCommandOutput + >; + constructor(input: TransactGetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Get, + ItemResponse, + TransactGetItem, + TransactGetItemsCommandInput as __TransactGetItemsCommandInput, + TransactGetItemsCommandOutput as __TransactGetItemsCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts new file mode 100644 index 0000000..91ba34e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts @@ -0,0 +1,151 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type TransactWriteCommandInput = Pick< + __TransactWriteItemsCommandInput, + Exclude +> & { + TransactItems: + | (Pick< + TransactWriteItem, + Exclude< + keyof TransactWriteItem, + "ConditionCheck" | "Put" | "Delete" | "Update" + > + > & { + ConditionCheck?: + | (Pick< + ConditionCheck, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Put?: + | (Pick< + Put, + Exclude + > & { + Item: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Delete?: + | (Pick< + Delete, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Update?: + | (Pick< + Update, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + })[] + | undefined; +}; +export type TransactWriteCommandOutput = Pick< + __TransactWriteItemsCommandOutput, + Exclude +> & { + ItemCollectionMetrics?: + | Record< + string, + (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + })[] + > + | undefined; +}; +export declare class TransactWriteCommand extends DynamoDBDocumentClientCommand< + TransactWriteCommandInput, + TransactWriteCommandOutput, + __TransactWriteItemsCommandInput, + __TransactWriteItemsCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: TransactWriteCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + ConditionCheck: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Put: { + Item: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Delete: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Update: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __TransactWriteItemsCommand; + readonly middlewareStack: MiddlewareStack< + TransactWriteCommandInput | __TransactWriteItemsCommandInput, + TransactWriteCommandOutput | __TransactWriteItemsCommandOutput + >; + constructor(input: TransactWriteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ConditionCheck, + Delete, + ItemCollectionMetrics, + Put, + TransactWriteItem, + TransactWriteItemsCommandInput as __TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput as __TransactWriteItemsCommandOutput, + Update, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts new file mode 100644 index 0000000..7c97b97 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts @@ -0,0 +1,113 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type UpdateCommandInput = Pick< + __UpdateItemCommandInput, + Exclude< + keyof __UpdateItemCommandInput, + "Key" | "AttributeUpdates" | "Expected" | "ExpressionAttributeValues" + > +> & { + Key: Record | undefined; + AttributeUpdates?: + | Record< + string, + Pick< + AttributeValueUpdate, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + } + > + | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type UpdateCommandOutput = Pick< + __UpdateItemCommandOutput, + Exclude< + keyof __UpdateItemCommandOutput, + "Attributes" | "ItemCollectionMetrics" + > +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class UpdateCommand extends DynamoDBDocumentClientCommand< + UpdateCommandInput, + UpdateCommandOutput, + __UpdateItemCommandInput, + __UpdateItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: UpdateCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + AttributeUpdates: { + "*": { + Value: null; + }; + }; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __UpdateItemCommand; + readonly middlewareStack: MiddlewareStack< + UpdateCommandInput | __UpdateItemCommandInput, + UpdateCommandOutput | __UpdateItemCommandOutput + >; + constructor(input: UpdateCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + AttributeValueUpdate, + ExpectedAttributeValue, + ItemCollectionMetrics, + UpdateItemCommandInput as __UpdateItemCommandInput, + UpdateItemCommandOutput as __UpdateItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts new file mode 100644 index 0000000..c0473c3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts @@ -0,0 +1,17 @@ +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export type KeyNodeSelf = null; +export declare const SELF: KeyNodeSelf; +export type KeyNodeChildren = Record; +export declare const ALL_VALUES: KeyNodeChildren; +export declare const ALL_MEMBERS: KeyNodeChildren; +export type KeyNodes = KeyNodeSelf | KeyNodeChildren; +export declare const marshallInput: ( + obj: any, + keyNodes: KeyNodeChildren, + options?: marshallOptions +) => any; +export declare const unmarshallOutput: ( + obj: any, + keyNodes: KeyNodeChildren, + options?: unmarshallOptions +) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab7a55d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,11 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; +export { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export { + NativeAttributeValue, + NativeAttributeBinary, + NativeScalarAttributeValue, +} from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..5bd45d2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBDocument } from "../DynamoDBDocument"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export { PaginationConfiguration }; +export interface DynamoDBDocumentPaginationConfiguration + extends PaginationConfiguration { + client: DynamoDBDocument | DynamoDBDocumentClient; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..93d4aff --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts @@ -0,0 +1,12 @@ +import { Paginator } from "@smithy/types"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +export { Paginator }; +export declare const paginateQuery: ( + config: DynamoDBDocumentPaginationConfiguration, + input: QueryCommandInput, + ...additionalArguments: any +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..0a2c6d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts @@ -0,0 +1,9 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +export { Paginator }; +export declare const paginateScan: ( + config: DynamoDBDocumentPaginationConfiguration, + input: ScanCommandInput, + ...additionalArguments: any +) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/package.json new file mode 100644 index 0000000..0b72a76 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/lib-dynamodb/package.json @@ -0,0 +1,66 @@ +{ + "name": "@aws-sdk/lib-dynamodb", + "version": "3.803.0", + "description": "The document client simplifies working with items in Amazon DynamoDB by abstracting away the notion of attribute values.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline lib-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts --mode development", + "test:watch": "yarn g:vitest watch", + "test:e2e:watch": "yarn g:vitest watch -c vitest.config.e2e.ts" + }, + "engines": { + "node": ">=18.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + }, + "devDependencies": { + "@aws-sdk/client-dynamodb": "3.803.0", + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/lib/lib-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "lib/lib-dynamodb" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md new file mode 100644 index 0000000..4a50903 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-endpoint-discovery + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-endpoint-discovery/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-endpoint-discovery) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-endpoint-discovery.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-endpoint-discovery) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js new file mode 100644 index 0000000..f534fd5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js @@ -0,0 +1,229 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: () => NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, + endpointDiscoveryMiddlewareOptions: () => endpointDiscoveryMiddlewareOptions, + getEndpointDiscoveryOptionalPlugin: () => getEndpointDiscoveryOptionalPlugin, + getEndpointDiscoveryPlugin: () => getEndpointDiscoveryPlugin, + getEndpointDiscoveryRequiredPlugin: () => getEndpointDiscoveryRequiredPlugin, + resolveEndpointDiscoveryConfig: () => resolveEndpointDiscoveryConfig +}); +module.exports = __toCommonJS(index_exports); + +// src/configurations.ts +var ENV_ENDPOINT_DISCOVERY = ["AWS_ENABLE_ENDPOINT_DISCOVERY", "AWS_ENDPOINT_DISCOVERY_ENABLED"]; +var CONFIG_ENDPOINT_DISCOVERY = "endpoint_discovery_enabled"; +var isFalsy = /* @__PURE__ */ __name((value) => ["false", "0"].indexOf(value) >= 0, "isFalsy"); +var NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + for (let i = 0; i < ENV_ENDPOINT_DISCOVERY.length; i++) { + const envKey = ENV_ENDPOINT_DISCOVERY[i]; + if (envKey in env) { + const value = env[envKey]; + if (value === "") { + throw Error(`Environment variable ${envKey} can't be empty of undefined, got "${value}"`); + } + return !isFalsy(value); + } + } + }, "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => { + if (CONFIG_ENDPOINT_DISCOVERY in profile) { + const value = profile[CONFIG_ENDPOINT_DISCOVERY]; + if (value === void 0) { + throw Error(`Shared config entry ${CONFIG_ENDPOINT_DISCOVERY} can't be undefined, got "${value}"`); + } + return !isFalsy(value); + } + }, "configFileSelector"), + default: void 0 +}; + +// src/endpointDiscoveryMiddleware.ts +var import_protocol_http = require("@smithy/protocol-http"); + +// src/getCacheKey.ts +var getCacheKey = /* @__PURE__ */ __name(async (commandName, config, options) => { + const { accessKeyId } = await config.credentials(); + const { identifiers } = options; + return JSON.stringify({ + ...accessKeyId && { accessKeyId }, + ...identifiers && { + commandName, + identifiers: Object.entries(identifiers).sort().reduce((acc, [key, value]) => ({ ...acc, [key]: value }), {}) + } + }); +}, "getCacheKey"); + +// src/updateDiscoveredEndpointInCache.ts +var requestQueue = {}; +var updateDiscoveredEndpointInCache = /* @__PURE__ */ __name(async (config, options) => new Promise((resolve, reject) => { + const { endpointCache } = config; + const { cacheKey, commandName, identifiers } = options; + const endpoints = endpointCache.get(cacheKey); + if (endpoints && endpoints.length === 1 && endpoints[0].Address === "") { + if (options.isDiscoveredEndpointRequired) { + if (!requestQueue[cacheKey]) requestQueue[cacheKey] = []; + requestQueue[cacheKey].push({ resolve, reject }); + } else { + resolve(); + } + } else if (endpoints && endpoints.length > 0) { + resolve(); + } else { + const placeholderEndpoints = [{ Address: "", CachePeriodInMinutes: 1 }]; + endpointCache.set(cacheKey, placeholderEndpoints); + const command = new options.endpointDiscoveryCommandCtor({ + Operation: commandName.slice(0, -7), + // strip "Command" + Identifiers: identifiers + }); + const handler = command.resolveMiddleware(options.clientStack, config, options.options); + handler(command).then((result) => { + endpointCache.set(cacheKey, result.output.Endpoints); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ resolve: resolve2 }) => { + resolve2(); + }); + delete requestQueue[cacheKey]; + } + resolve(); + }).catch((error) => { + endpointCache.delete(cacheKey); + const errorToThrow = Object.assign( + new Error( + `The operation to discover endpoint failed. Please retry, or provide a custom endpoint and disable endpoint discovery to proceed.` + ), + { reason: error } + ); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ reject: reject2 }) => { + reject2(errorToThrow); + }); + delete requestQueue[cacheKey]; + } + if (options.isDiscoveredEndpointRequired) { + reject(errorToThrow); + } else { + endpointCache.set(cacheKey, placeholderEndpoints); + resolve(); + } + }); + } +}), "updateDiscoveredEndpointInCache"); + +// src/endpointDiscoveryMiddleware.ts +var endpointDiscoveryMiddleware = /* @__PURE__ */ __name((config, middlewareConfig) => (next, context) => async (args) => { + if (config.isCustomEndpoint) { + if (config.isClientEndpointDiscoveryEnabled) { + throw new Error(`Custom endpoint is supplied; endpointDiscoveryEnabled must not be true.`); + } + return next(args); + } + const { endpointDiscoveryCommandCtor } = config; + const { isDiscoveredEndpointRequired, identifiers } = middlewareConfig; + const clientName = context.clientName; + const commandName = context.commandName; + const isEndpointDiscoveryEnabled = await config.endpointDiscoveryEnabled(); + const cacheKey = await getCacheKey(commandName, config, { identifiers }); + if (isDiscoveredEndpointRequired) { + if (isEndpointDiscoveryEnabled === false) { + throw new Error( + `Endpoint Discovery is disabled but ${commandName} on ${clientName} requires it. Please check your configurations.` + ); + } + await updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor + }); + } else if (isEndpointDiscoveryEnabled) { + updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor + }); + } + const { request } = args; + if (cacheKey && import_protocol_http.HttpRequest.isInstance(request)) { + const endpoint = config.endpointCache.getEndpoint(cacheKey); + if (endpoint) { + request.hostname = endpoint; + } + } + return next(args); +}, "endpointDiscoveryMiddleware"); + +// src/getEndpointDiscoveryPlugin.ts +var endpointDiscoveryMiddlewareOptions = { + name: "endpointDiscoveryMiddleware", + step: "build", + tags: ["ENDPOINT_DISCOVERY"], + override: true +}; +var getEndpointDiscoveryPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, middlewareConfig), endpointDiscoveryMiddlewareOptions); + }, "applyToStack") +}), "getEndpointDiscoveryPlugin"); +var getEndpointDiscoveryRequiredPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add( + endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: true }), + endpointDiscoveryMiddlewareOptions + ); + }, "applyToStack") +}), "getEndpointDiscoveryRequiredPlugin"); +var getEndpointDiscoveryOptionalPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add( + endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: false }), + endpointDiscoveryMiddlewareOptions + ); + }, "applyToStack") +}), "getEndpointDiscoveryOptionalPlugin"); + +// src/resolveEndpointDiscoveryConfig.ts +var import_endpoint_cache = require("@aws-sdk/endpoint-cache"); +var resolveEndpointDiscoveryConfig = /* @__PURE__ */ __name((input, { endpointDiscoveryCommandCtor }) => { + const { endpointCacheSize, endpointDiscoveryEnabled, endpointDiscoveryEnabledProvider } = input; + return Object.assign(input, { + endpointDiscoveryCommandCtor, + endpointCache: new import_endpoint_cache.EndpointCache(endpointCacheSize ?? 1e3), + endpointDiscoveryEnabled: endpointDiscoveryEnabled !== void 0 ? () => Promise.resolve(endpointDiscoveryEnabled) : endpointDiscoveryEnabledProvider, + isClientEndpointDiscoveryEnabled: endpointDiscoveryEnabled !== void 0 + }); +}, "resolveEndpointDiscoveryConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, + endpointDiscoveryMiddlewareOptions, + getEndpointDiscoveryPlugin, + getEndpointDiscoveryRequiredPlugin, + getEndpointDiscoveryOptionalPlugin, + resolveEndpointDiscoveryConfig +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js new file mode 100644 index 0000000..8a5fdd6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js @@ -0,0 +1,27 @@ +const ENV_ENDPOINT_DISCOVERY = ["AWS_ENABLE_ENDPOINT_DISCOVERY", "AWS_ENDPOINT_DISCOVERY_ENABLED"]; +const CONFIG_ENDPOINT_DISCOVERY = "endpoint_discovery_enabled"; +const isFalsy = (value) => ["false", "0"].indexOf(value) >= 0; +export const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + for (let i = 0; i < ENV_ENDPOINT_DISCOVERY.length; i++) { + const envKey = ENV_ENDPOINT_DISCOVERY[i]; + if (envKey in env) { + const value = env[envKey]; + if (value === "") { + throw Error(`Environment variable ${envKey} can't be empty of undefined, got "${value}"`); + } + return !isFalsy(value); + } + } + }, + configFileSelector: (profile) => { + if (CONFIG_ENDPOINT_DISCOVERY in profile) { + const value = profile[CONFIG_ENDPOINT_DISCOVERY]; + if (value === undefined) { + throw Error(`Shared config entry ${CONFIG_ENDPOINT_DISCOVERY} can't be undefined, got "${value}"`); + } + return !isFalsy(value); + } + }, + default: undefined, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js new file mode 100644 index 0000000..80672eb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js @@ -0,0 +1,45 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getCacheKey } from "./getCacheKey"; +import { updateDiscoveredEndpointInCache } from "./updateDiscoveredEndpointInCache"; +export const endpointDiscoveryMiddleware = (config, middlewareConfig) => (next, context) => async (args) => { + if (config.isCustomEndpoint) { + if (config.isClientEndpointDiscoveryEnabled) { + throw new Error(`Custom endpoint is supplied; endpointDiscoveryEnabled must not be true.`); + } + return next(args); + } + const { endpointDiscoveryCommandCtor } = config; + const { isDiscoveredEndpointRequired, identifiers } = middlewareConfig; + const clientName = context.clientName; + const commandName = context.commandName; + const isEndpointDiscoveryEnabled = await config.endpointDiscoveryEnabled(); + const cacheKey = await getCacheKey(commandName, config, { identifiers }); + if (isDiscoveredEndpointRequired) { + if (isEndpointDiscoveryEnabled === false) { + throw new Error(`Endpoint Discovery is disabled but ${commandName} on ${clientName} requires it.` + + ` Please check your configurations.`); + } + await updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor, + }); + } + else if (isEndpointDiscoveryEnabled) { + updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor, + }); + } + const { request } = args; + if (cacheKey && HttpRequest.isInstance(request)) { + const endpoint = config.endpointCache.getEndpoint(cacheKey); + if (endpoint) { + request.hostname = endpoint; + } + } + return next(args); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js new file mode 100644 index 0000000..ca72e41 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js @@ -0,0 +1,13 @@ +export const getCacheKey = async (commandName, config, options) => { + const { accessKeyId } = await config.credentials(); + const { identifiers } = options; + return JSON.stringify({ + ...(accessKeyId && { accessKeyId }), + ...(identifiers && { + commandName, + identifiers: Object.entries(identifiers) + .sort() + .reduce((acc, [key, value]) => ({ ...acc, [key]: value }), {}), + }), + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js new file mode 100644 index 0000000..656e7fe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js @@ -0,0 +1,22 @@ +import { endpointDiscoveryMiddleware } from "./endpointDiscoveryMiddleware"; +export const endpointDiscoveryMiddlewareOptions = { + name: "endpointDiscoveryMiddleware", + step: "build", + tags: ["ENDPOINT_DISCOVERY"], + override: true, +}; +export const getEndpointDiscoveryPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, middlewareConfig), endpointDiscoveryMiddlewareOptions); + }, +}); +export const getEndpointDiscoveryRequiredPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: true }), endpointDiscoveryMiddlewareOptions); + }, +}); +export const getEndpointDiscoveryOptionalPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: false }), endpointDiscoveryMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js new file mode 100644 index 0000000..cc1cc9c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js @@ -0,0 +1,12 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +export const resolveEndpointDiscoveryConfig = (input, { endpointDiscoveryCommandCtor }) => { + const { endpointCacheSize, endpointDiscoveryEnabled, endpointDiscoveryEnabledProvider } = input; + return Object.assign(input, { + endpointDiscoveryCommandCtor, + endpointCache: new EndpointCache(endpointCacheSize ?? 1000), + endpointDiscoveryEnabled: endpointDiscoveryEnabled !== undefined + ? () => Promise.resolve(endpointDiscoveryEnabled) + : endpointDiscoveryEnabledProvider, + isClientEndpointDiscoveryEnabled: endpointDiscoveryEnabled !== undefined, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js new file mode 100644 index 0000000..c0a9831 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js @@ -0,0 +1,57 @@ +const requestQueue = {}; +export const updateDiscoveredEndpointInCache = async (config, options) => new Promise((resolve, reject) => { + const { endpointCache } = config; + const { cacheKey, commandName, identifiers } = options; + const endpoints = endpointCache.get(cacheKey); + if (endpoints && endpoints.length === 1 && endpoints[0].Address === "") { + if (options.isDiscoveredEndpointRequired) { + if (!requestQueue[cacheKey]) + requestQueue[cacheKey] = []; + requestQueue[cacheKey].push({ resolve, reject }); + } + else { + resolve(); + } + } + else if (endpoints && endpoints.length > 0) { + resolve(); + } + else { + const placeholderEndpoints = [{ Address: "", CachePeriodInMinutes: 1 }]; + endpointCache.set(cacheKey, placeholderEndpoints); + const command = new options.endpointDiscoveryCommandCtor({ + Operation: commandName.slice(0, -7), + Identifiers: identifiers, + }); + const handler = command.resolveMiddleware(options.clientStack, config, options.options); + handler(command) + .then((result) => { + endpointCache.set(cacheKey, result.output.Endpoints); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ resolve }) => { + resolve(); + }); + delete requestQueue[cacheKey]; + } + resolve(); + }) + .catch((error) => { + endpointCache.delete(cacheKey); + const errorToThrow = Object.assign(new Error(`The operation to discover endpoint failed.` + + ` Please retry, or provide a custom endpoint and disable endpoint discovery to proceed.`), { reason: error }); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ reject }) => { + reject(errorToThrow); + }); + delete requestQueue[cacheKey]; + } + if (options.isDiscoveredEndpointRequired) { + reject(errorToThrow); + } + else { + endpointCache.set(cacheKey, placeholderEndpoints); + resolve(); + } + }); + } +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts new file mode 100644 index 0000000..428209a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts @@ -0,0 +1,5 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts new file mode 100644 index 0000000..0116bfc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts @@ -0,0 +1,4 @@ +import { BuildHandler, HandlerExecutionContext, MetadataBearer } from "@smithy/types"; +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddleware: (config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: EndpointDiscoveryMiddlewareConfig) => (next: BuildHandler, context: HandlerExecutionContext) => BuildHandler; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts new file mode 100644 index 0000000..153a5b9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts @@ -0,0 +1,9 @@ +import { AwsCredentialIdentity, Provider } from "@smithy/types"; +/** + * Generate key to index the endpoints in the cache + */ +export declare const getCacheKey: (commandName: string, config: { + credentials: Provider; +}, options: { + identifiers?: Record; +}) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts new file mode 100644 index 0000000..06565e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts @@ -0,0 +1,29 @@ +import { BuildHandlerOptions, HttpHandlerOptions, MiddlewareStack, Pluggable } from "@smithy/types"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +/** + * @internal + */ +export declare const endpointDiscoveryMiddlewareOptions: BuildHandlerOptions; +/** + * @public + */ +export interface EndpointDiscoveryMiddlewareConfig { + isDiscoveredEndpointRequired: boolean; + clientStack: MiddlewareStack; + options?: HttpHandlerOptions; + identifiers?: Record; +} +/** + * @internal + */ +export declare const getEndpointDiscoveryPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: EndpointDiscoveryMiddlewareConfig) => Pluggable; +/** + * @internal + * @deprecated Use getEndpointDiscoveryPlugin + */ +export declare const getEndpointDiscoveryRequiredPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: Omit) => Pluggable; +/** + * @internal + * @deprecated Use getEndpointDiscoveryPlugin + */ +export declare const getEndpointDiscoveryOptionalPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: Omit) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts new file mode 100644 index 0000000..dd132a4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts @@ -0,0 +1,60 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +import { AwsCredentialIdentity, MemoizedProvider, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + isCustomEndpoint?: boolean; + credentials: MemoizedProvider; + endpointDiscoveryEnabledProvider: Provider; +} +/** + * @public + */ +export interface EndpointDiscoveryInputConfig { + /** + * The size of the client cache storing endpoints from endpoint discovery operations. + * Defaults to 1000. + */ + endpointCacheSize?: number; + /** + * Whether to call operations with endpoints given by service dynamically. + * Setting this config to `true` will enable endpoint discovery for all applicable operations. + * Setting it to `false` will explicitly disable endpoint discovery even though operations that + * require endpoint discovery will presumably fail. Leaving it to undefined means SDK only do + * endpoint discovery when it's required. Defaults to `undefined`. + */ + endpointDiscoveryEnabled?: boolean | undefined; +} +export interface EndpointDiscoveryResolvedConfig { + /** + * LRU Cache which stores endpoints from endpoint discovery operations. + * The size is either provided by {@link EndpointDiscoveryInputConfig.endpointCacheSize}. + */ + endpointCache: EndpointCache; + /** + * The constructor of the Command used for discovering endpoints. + * @internal + */ + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; + /** + * Resolved value for input config {@link EndpointDiscoveryInputConfig.endpointDiscoveryEnabled}. + */ + endpointDiscoveryEnabled: Provider; + /** + * Stores whether endpoint discovery configuration is set locally by passing + * {@link EndpointDiscoveryInputConfig.endpointDiscoveryEnabled} during client creation. + * @internal + */ + isClientEndpointDiscoveryEnabled: boolean; +} +export interface EndpointDiscoveryConfigOptions { + /** + * The constructor of the Command used for discovering endpoints. + */ + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +/** + * @internal + */ +export declare const resolveEndpointDiscoveryConfig: (input: T & PreviouslyResolved & EndpointDiscoveryInputConfig, { endpointDiscoveryCommandCtor }: EndpointDiscoveryConfigOptions) => T & EndpointDiscoveryResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..366f145 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: LoadedConfigSelectors< + boolean | undefined +>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts new file mode 100644 index 0000000..ceff474 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts @@ -0,0 +1,17 @@ +import { + BuildHandler, + HandlerExecutionContext, + MetadataBearer, +} from "@smithy/types"; +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddleware: ( + config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: EndpointDiscoveryMiddlewareConfig +) => ( + next: BuildHandler, + context: HandlerExecutionContext +) => BuildHandler; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts new file mode 100644 index 0000000..d9be17e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts @@ -0,0 +1,10 @@ +import { AwsCredentialIdentity, Provider } from "@smithy/types"; +export declare const getCacheKey: ( + commandName: string, + config: { + credentials: Provider; + }, + options: { + identifiers?: Record; + } +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts new file mode 100644 index 0000000..8c60174 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts @@ -0,0 +1,41 @@ +import { + BuildHandlerOptions, + HttpHandlerOptions, + MiddlewareStack, + Pluggable, +} from "@smithy/types"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddlewareOptions: BuildHandlerOptions; +export interface EndpointDiscoveryMiddlewareConfig { + isDiscoveredEndpointRequired: boolean; + clientStack: MiddlewareStack; + options?: HttpHandlerOptions; + identifiers?: Record; +} +export declare const getEndpointDiscoveryPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: EndpointDiscoveryMiddlewareConfig +) => Pluggable; +export declare const getEndpointDiscoveryRequiredPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: Pick< + EndpointDiscoveryMiddlewareConfig, + Exclude< + keyof EndpointDiscoveryMiddlewareConfig, + "isDiscoveredEndpointRequired" + > + > +) => Pluggable; +export declare const getEndpointDiscoveryOptionalPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: Pick< + EndpointDiscoveryMiddlewareConfig, + Exclude< + keyof EndpointDiscoveryMiddlewareConfig, + "isDiscoveredEndpointRequired" + > + > +) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts new file mode 100644 index 0000000..eaa95f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts @@ -0,0 +1,28 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +import { + AwsCredentialIdentity, + MemoizedProvider, + Provider, +} from "@smithy/types"; +export interface PreviouslyResolved { + isCustomEndpoint?: boolean; + credentials: MemoizedProvider; + endpointDiscoveryEnabledProvider: Provider; +} +export interface EndpointDiscoveryInputConfig { + endpointCacheSize?: number; + endpointDiscoveryEnabled?: boolean | undefined; +} +export interface EndpointDiscoveryResolvedConfig { + endpointCache: EndpointCache; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; + endpointDiscoveryEnabled: Provider; + isClientEndpointDiscoveryEnabled: boolean; +} +export interface EndpointDiscoveryConfigOptions { + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const resolveEndpointDiscoveryConfig: ( + input: T & PreviouslyResolved & EndpointDiscoveryInputConfig, + { endpointDiscoveryCommandCtor }: EndpointDiscoveryConfigOptions +) => T & EndpointDiscoveryResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts new file mode 100644 index 0000000..0887cb5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts @@ -0,0 +1,15 @@ +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export interface UpdateDiscoveredEndpointInCacheOptions + extends EndpointDiscoveryMiddlewareConfig { + cacheKey: string; + commandName: string; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const updateDiscoveredEndpointInCache: ( + config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + options: UpdateDiscoveredEndpointInCacheOptions +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts new file mode 100644 index 0000000..993753d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts @@ -0,0 +1,8 @@ +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +export interface UpdateDiscoveredEndpointInCacheOptions extends EndpointDiscoveryMiddlewareConfig { + cacheKey: string; + commandName: string; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const updateDiscoveredEndpointInCache: (config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, options: UpdateDiscoveredEndpointInCacheOptions) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json new file mode 100644 index 0000000..44fec2e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json @@ -0,0 +1,60 @@ +{ + "name": "@aws-sdk/middleware-endpoint-discovery", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-endpoint-discovery", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-endpoint-discovery", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-endpoint-discovery" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/README.md new file mode 100644 index 0000000..123940e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-host-header + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-host-header/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-host-header) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-host-header.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-host-header) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js new file mode 100644 index 0000000..bdfe2a5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js @@ -0,0 +1,69 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getHostHeaderPlugin: () => getHostHeaderPlugin, + hostHeaderMiddleware: () => hostHeaderMiddleware, + hostHeaderMiddlewareOptions: () => hostHeaderMiddlewareOptions, + resolveHostHeaderConfig: () => resolveHostHeaderConfig +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +function resolveHostHeaderConfig(input) { + return input; +} +__name(resolveHostHeaderConfig, "resolveHostHeaderConfig"); +var hostHeaderMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}, "hostHeaderMiddleware"); +var hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true +}; +var getHostHeaderPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + }, "applyToStack") +}), "getHostHeaderPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveHostHeaderConfig, + hostHeaderMiddleware, + hostHeaderMiddlewareOptions, + getHostHeaderPlugin +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js new file mode 100644 index 0000000..2e2fb62 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js @@ -0,0 +1,33 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export function resolveHostHeaderConfig(input) { + return input; +} +export const hostHeaderMiddleware = (options) => (next) => async (args) => { + if (!HttpRequest.isInstance(args.request)) + return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } + else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) + host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}; +export const hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true, +}; +export const getHostHeaderPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts new file mode 100644 index 0000000..752bb00 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts @@ -0,0 +1,35 @@ +import { AbsoluteLocation, BuildHandlerOptions, BuildMiddleware, Pluggable, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface HostHeaderInputConfig { +} +interface PreviouslyResolved { + requestHandler: RequestHandler; +} +/** + * @internal + */ +export interface HostHeaderResolvedConfig { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler: RequestHandler; +} +/** + * @internal + */ +export declare function resolveHostHeaderConfig(input: T & PreviouslyResolved & HostHeaderInputConfig): T & HostHeaderResolvedConfig; +/** + * @internal + */ +export declare const hostHeaderMiddleware: (options: HostHeaderResolvedConfig) => BuildMiddleware; +/** + * @internal + */ +export declare const hostHeaderMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getHostHeaderPlugin: (options: HostHeaderResolvedConfig) => Pluggable; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..3ca5561 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts @@ -0,0 +1,29 @@ +import { + AbsoluteLocation, + BuildHandlerOptions, + BuildMiddleware, + Pluggable, + RequestHandler, +} from "@smithy/types"; +export interface HostHeaderInputConfig {} +interface PreviouslyResolved { + requestHandler: RequestHandler; +} +export interface HostHeaderResolvedConfig { + requestHandler: RequestHandler; +} +export declare function resolveHostHeaderConfig( + input: T & PreviouslyResolved & HostHeaderInputConfig +): T & HostHeaderResolvedConfig; +export declare const hostHeaderMiddleware: < + Input extends object, + Output extends object +>( + options: HostHeaderResolvedConfig +) => BuildMiddleware; +export declare const hostHeaderMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getHostHeaderPlugin: ( + options: HostHeaderResolvedConfig +) => Pluggable; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/package.json new file mode 100644 index 0000000..523f8a1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-host-header/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/middleware-host-header", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-host-header", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-host-header", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-host-header" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/README.md new file mode 100644 index 0000000..861fa43 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-logger + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-logger/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-logger) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-logger.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-logger) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js new file mode 100644 index 0000000..b1db308 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js @@ -0,0 +1,79 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getLoggerPlugin: () => getLoggerPlugin, + loggerMiddleware: () => loggerMiddleware, + loggerMiddlewareOptions: () => loggerMiddlewareOptions +}); +module.exports = __toCommonJS(index_exports); + +// src/loggerMiddleware.ts +var loggerMiddleware = /* @__PURE__ */ __name(() => (next, context) => async (args) => { + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + logger?.info?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata + }); + return response; + } catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + logger?.error?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata + }); + throw error; + } +}, "loggerMiddleware"); +var loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true +}; +var getLoggerPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + }, "applyToStack") +}), "getLoggerPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + loggerMiddleware, + loggerMiddlewareOptions, + getLoggerPlugin +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-es/index.js new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-es/index.js @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js new file mode 100644 index 0000000..50da4cc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js @@ -0,0 +1,42 @@ +export const loggerMiddleware = () => (next, context) => async (args) => { + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + logger?.info?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata, + }); + return response; + } + catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + logger?.error?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata, + }); + throw error; + } +}; +export const loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true, +}; +export const getLoggerPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts new file mode 100644 index 0000000..5712017 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts @@ -0,0 +1,4 @@ +import { AbsoluteLocation, HandlerExecutionContext, InitializeHandler, InitializeHandlerOptions, MetadataBearer, Pluggable } from "@smithy/types"; +export declare const loggerMiddleware: () => (next: InitializeHandler, context: HandlerExecutionContext) => InitializeHandler; +export declare const loggerMiddlewareOptions: InitializeHandlerOptions & AbsoluteLocation; +export declare const getLoggerPlugin: (options: any) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts new file mode 100644 index 0000000..10ded9e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts @@ -0,0 +1,17 @@ +import { + AbsoluteLocation, + HandlerExecutionContext, + InitializeHandler, + InitializeHandlerOptions, + MetadataBearer, + Pluggable, +} from "@smithy/types"; +export declare const loggerMiddleware: () => < + Output extends MetadataBearer = MetadataBearer +>( + next: InitializeHandler, + context: HandlerExecutionContext +) => InitializeHandler; +export declare const loggerMiddlewareOptions: InitializeHandlerOptions & + AbsoluteLocation; +export declare const getLoggerPlugin: (options: any) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/package.json new file mode 100644 index 0000000..7187da9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-logger/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/middleware-logger", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-logger", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-logger", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-logger" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/README.md new file mode 100644 index 0000000..2d5437e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/middleware-recursion-detection + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-recursion-detection/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-recursion-detection) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-recursion-detection.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-recursion-detection) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js new file mode 100644 index 0000000..a387687 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js @@ -0,0 +1,72 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + addRecursionDetectionMiddlewareOptions: () => addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin: () => getRecursionDetectionPlugin, + recursionDetectionMiddleware: () => recursionDetectionMiddleware +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +var TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +var ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +var ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +var recursionDetectionMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request) || options.runtime !== "node") { + return next(args); + } + const traceIdHeader = Object.keys(request.headers ?? {}).find((h) => h.toLowerCase() === TRACE_ID_HEADER_NAME.toLowerCase()) ?? TRACE_ID_HEADER_NAME; + if (request.headers.hasOwnProperty(traceIdHeader)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0, "nonEmptyString"); + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request + }); +}, "recursionDetectionMiddleware"); +var addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low" +}; +var getRecursionDetectionPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + }, "applyToStack") +}), "getRecursionDetectionPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + recursionDetectionMiddleware, + addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js new file mode 100644 index 0000000..8ac4748 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js @@ -0,0 +1,37 @@ +import { HttpRequest } from "@smithy/protocol-http"; +const TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +const ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +const ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +export const recursionDetectionMiddleware = (options) => (next) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request) || options.runtime !== "node") { + return next(args); + } + const traceIdHeader = Object.keys(request.headers ?? {}).find((h) => h.toLowerCase() === TRACE_ID_HEADER_NAME.toLowerCase()) ?? + TRACE_ID_HEADER_NAME; + if (request.headers.hasOwnProperty(traceIdHeader)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = (str) => typeof str === "string" && str.length > 0; + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request, + }); +}; +export const addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low", +}; +export const getRecursionDetectionPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts new file mode 100644 index 0000000..9f92984 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; +} +/** + * Inject to trace ID to request header to detect recursion invocation in Lambda. + * @internal + */ +export declare const recursionDetectionMiddleware: (options: PreviouslyResolved) => BuildMiddleware; +/** + * @internal + */ +export declare const addRecursionDetectionMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRecursionDetectionPlugin: (options: PreviouslyResolved) => Pluggable; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..8d1658b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts @@ -0,0 +1,18 @@ +import { + AbsoluteLocation, + BuildHandlerOptions, + BuildMiddleware, + Pluggable, +} from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; +} +export declare const recursionDetectionMiddleware: ( + options: PreviouslyResolved +) => BuildMiddleware; +export declare const addRecursionDetectionMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getRecursionDetectionPlugin: ( + options: PreviouslyResolved +) => Pluggable; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/package.json new file mode 100644 index 0000000..7c831f9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-recursion-detection/package.json @@ -0,0 +1,57 @@ +{ + "name": "@aws-sdk/middleware-recursion-detection", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-recursion-detection", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-recursion-detection", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-recursion-detection" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/README.md new file mode 100644 index 0000000..a0bf1a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-user-agent + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-user-agent/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-user-agent) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-user-agent.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-user-agent) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js new file mode 100644 index 0000000..aaf267c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js @@ -0,0 +1,227 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + DEFAULT_UA_APP_ID: () => DEFAULT_UA_APP_ID, + getUserAgentMiddlewareOptions: () => getUserAgentMiddlewareOptions, + getUserAgentPlugin: () => getUserAgentPlugin, + resolveUserAgentConfig: () => resolveUserAgentConfig, + userAgentMiddleware: () => userAgentMiddleware +}); +module.exports = __toCommonJS(index_exports); + +// src/configurations.ts +var import_core = require("@smithy/core"); +var DEFAULT_UA_APP_ID = void 0; +function isValidUserAgentAppId(appId) { + if (appId === void 0) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +__name(isValidUserAgentAppId, "isValidUserAgentAppId"); +function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = (0, import_core.normalizeProvider)(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + const { customUserAgent } = input; + return Object.assign(input, { + customUserAgent: typeof customUserAgent === "string" ? [[customUserAgent]] : customUserAgent, + userAgentAppId: /* @__PURE__ */ __name(async () => { + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = input.logger?.constructor?.name === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger?.warn("userAgentAppId must be a string or undefined."); + } else if (appId.length > 50) { + logger?.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + }, "userAgentAppId") + }); +} +__name(resolveUserAgentConfig, "resolveUserAgentConfig"); + +// src/user-agent-middleware.ts +var import_util_endpoints = require("@aws-sdk/util-endpoints"); +var import_protocol_http = require("@smithy/protocol-http"); + +// src/check-features.ts +var import_core2 = require("@aws-sdk/core"); +var ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +async function checkFeatures(context, config, args) { + const request = args.request; + if (request?.headers?.["smithy-protocol"] === "rpc-v2-cbor") { + (0, import_core2.setFeature)(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if (retryStrategy.constructor?.name?.includes("Adaptive")) { + (0, import_core2.setFeature)(context, "RETRY_MODE_ADAPTIVE", "F"); + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_STANDARD", "E"); + } + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String(endpointV2?.url?.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + (0, import_core2.setFeature)(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await config.accountIdEndpointMode?.()) { + case "disabled": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = context.__smithy_context?.selectedHttpAuthScheme?.identity; + if (identity?.$source) { + const credentials = identity; + if (credentials.accountId) { + (0, import_core2.setFeature)(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + (0, import_core2.setFeature)(context, key, value); + } + } +} +__name(checkFeatures, "checkFeatures"); + +// src/constants.ts +var USER_AGENT = "user-agent"; +var X_AMZ_USER_AGENT = "x-amz-user-agent"; +var SPACE = " "; +var UA_NAME_SEPARATOR = "/"; +var UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +var UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +var UA_ESCAPE_CHAR = "-"; + +// src/encode-features.ts +var BYTE_LIMIT = 1024; +function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} +__name(encodeFeatures, "encodeFeatures"); + +// src/user-agent-middleware.ts +var userAgentMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = context?.userAgent?.map(escapeUserAgent) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push( + `m/${encodeFeatures( + Object.assign({}, context.__smithy_context?.features, awsContext.__aws_sdk_context?.features) + )}` + ); + const customUserAgent = options?.customUserAgent?.map(escapeUserAgent) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = (0, import_util_endpoints.getUserAgentPrefix)(); + const sdkUserAgentValue = (prefix ? [prefix] : []).concat([...defaultUserAgent, ...userAgent, ...customUserAgent]).join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] ? `${headers[USER_AGENT]} ${normalUAValue}` : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request + }); +}, "userAgentMiddleware"); +var escapeUserAgent = /* @__PURE__ */ __name((userAgentPair) => { + const name = userAgentPair[0].split(UA_NAME_SEPARATOR).map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)).join(UA_NAME_SEPARATOR); + const version = userAgentPair[1]?.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version].filter((item) => item && item.length > 0).reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}, "escapeUserAgent"); +var getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true +}; +var getUserAgentPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + }, "applyToStack") +}), "getUserAgentPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DEFAULT_UA_APP_ID, + resolveUserAgentConfig, + userAgentMiddleware, + getUserAgentMiddlewareOptions, + getUserAgentPlugin +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js new file mode 100644 index 0000000..1f115a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js @@ -0,0 +1,49 @@ +import { setFeature } from "@aws-sdk/core"; +const ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +export async function checkFeatures(context, config, args) { + const request = args.request; + if (request?.headers?.["smithy-protocol"] === "rpc-v2-cbor") { + setFeature(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if (retryStrategy.constructor?.name?.includes("Adaptive")) { + setFeature(context, "RETRY_MODE_ADAPTIVE", "F"); + } + else { + setFeature(context, "RETRY_MODE_STANDARD", "E"); + } + } + else { + setFeature(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String(endpointV2?.url?.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + setFeature(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await config.accountIdEndpointMode?.()) { + case "disabled": + setFeature(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + setFeature(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + setFeature(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = context.__smithy_context?.selectedHttpAuthScheme?.identity; + if (identity?.$source) { + const credentials = identity; + if (credentials.accountId) { + setFeature(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + setFeature(context, key, value); + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js new file mode 100644 index 0000000..7fff087 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js @@ -0,0 +1,28 @@ +import { normalizeProvider } from "@smithy/core"; +export const DEFAULT_UA_APP_ID = undefined; +function isValidUserAgentAppId(appId) { + if (appId === undefined) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +export function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = normalizeProvider(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + const { customUserAgent } = input; + return Object.assign(input, { + customUserAgent: typeof customUserAgent === "string" ? [[customUserAgent]] : customUserAgent, + userAgentAppId: async () => { + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = input.logger?.constructor?.name === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger?.warn("userAgentAppId must be a string or undefined."); + } + else if (appId.length > 50) { + logger?.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + }, + }); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js new file mode 100644 index 0000000..33e3391 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js @@ -0,0 +1,7 @@ +export const USER_AGENT = "user-agent"; +export const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export const SPACE = " "; +export const UA_NAME_SEPARATOR = "/"; +export const UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +export const UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +export const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js new file mode 100644 index 0000000..23002b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js @@ -0,0 +1,18 @@ +const BYTE_LIMIT = 1024; +export function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } + else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js new file mode 100644 index 0000000..188bda0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js @@ -0,0 +1,82 @@ +import { getUserAgentPrefix } from "@aws-sdk/util-endpoints"; +import { HttpRequest } from "@smithy/protocol-http"; +import { checkFeatures } from "./check-features"; +import { SPACE, UA_ESCAPE_CHAR, UA_NAME_ESCAPE_REGEX, UA_NAME_SEPARATOR, UA_VALUE_ESCAPE_REGEX, USER_AGENT, X_AMZ_USER_AGENT, } from "./constants"; +import { encodeFeatures } from "./encode-features"; +export const userAgentMiddleware = (options) => (next, context) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = context?.userAgent?.map(escapeUserAgent) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push(`m/${encodeFeatures(Object.assign({}, context.__smithy_context?.features, awsContext.__aws_sdk_context?.features))}`); + const customUserAgent = options?.customUserAgent?.map(escapeUserAgent) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = getUserAgentPrefix(); + const sdkUserAgentValue = (prefix ? [prefix] : []) + .concat([...defaultUserAgent, ...userAgent, ...customUserAgent]) + .join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent, + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] + ? `${headers[USER_AGENT]} ${normalUAValue}` + : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } + else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request, + }); +}; +const escapeUserAgent = (userAgentPair) => { + const name = userAgentPair[0] + .split(UA_NAME_SEPARATOR) + .map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)) + .join(UA_NAME_SEPARATOR); + const version = userAgentPair[1]?.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version] + .filter((item) => item && item.length > 0) + .reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}; +export const getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true, +}; +export const getUserAgentPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts new file mode 100644 index 0000000..a75d08b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts @@ -0,0 +1,18 @@ +import type { AccountIdEndpointMode } from "@aws-sdk/core/account-id-endpoint"; +import type { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import type { AwsCredentialIdentityProvider, BuildHandlerArguments, Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +type PreviouslyResolved = Partial<{ + credentials?: AwsCredentialIdentityProvider; + accountIdEndpointMode?: Provider; + retryStrategy?: Provider; +}>; +/** + * @internal + * Check for features that don't have a middleware activation site but + * may be detected on the context, client config, or request. + */ +export declare function checkFeatures(context: AwsHandlerExecutionContext, config: PreviouslyResolved, args: BuildHandlerArguments): Promise; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts new file mode 100644 index 0000000..f8183f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts @@ -0,0 +1,44 @@ +import { Logger, Provider, UserAgent } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_UA_APP_ID: undefined; +/** + * @public + */ +export interface UserAgentInputConfig { + /** + * The custom user agent header that would be appended to default one + */ + customUserAgent?: string | UserAgent; + /** + * The application ID used to identify the application. + */ + userAgentAppId?: string | undefined | Provider; +} +interface PreviouslyResolved { + defaultUserAgentProvider: Provider; + runtime: string; + logger?: Logger; +} +export interface UserAgentResolvedConfig { + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header. + * @internal + */ + defaultUserAgentProvider: Provider; + /** + * The custom user agent header that would be appended to default one + */ + customUserAgent?: UserAgent; + /** + * The runtime environment + */ + runtime: string; + /** + * Resolved value for input config {config.userAgentAppId} + */ + userAgentAppId: Provider; +} +export declare function resolveUserAgentConfig(input: T & PreviouslyResolved & UserAgentInputConfig): T & UserAgentResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts new file mode 100644 index 0000000..8c0dfc9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts @@ -0,0 +1,7 @@ +export declare const USER_AGENT = "user-agent"; +export declare const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export declare const SPACE = " "; +export declare const UA_NAME_SEPARATOR = "/"; +export declare const UA_NAME_ESCAPE_REGEX: RegExp; +export declare const UA_VALUE_ESCAPE_REGEX: RegExp; +export declare const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts new file mode 100644 index 0000000..d6079ae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts @@ -0,0 +1,5 @@ +import type { AwsSdkFeatures } from "@aws-sdk/types"; +/** + * @internal + */ +export declare function encodeFeatures(features: AwsSdkFeatures): string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts new file mode 100644 index 0000000..d8fc201 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts @@ -0,0 +1,20 @@ +import { AccountIdEndpointMode } from "@aws-sdk/core/account-id-endpoint"; +import { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { + AwsCredentialIdentityProvider, + BuildHandlerArguments, + Provider, + RetryStrategy, + RetryStrategyV2, +} from "@smithy/types"; +type PreviouslyResolved = Partial<{ + credentials?: AwsCredentialIdentityProvider; + accountIdEndpointMode?: Provider; + retryStrategy?: Provider; +}>; +export declare function checkFeatures( + context: AwsHandlerExecutionContext, + config: PreviouslyResolved, + args: BuildHandlerArguments +): Promise; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..a4a1b10 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,21 @@ +import { Logger, Provider, UserAgent } from "@smithy/types"; +export declare const DEFAULT_UA_APP_ID: undefined; +export interface UserAgentInputConfig { + customUserAgent?: string | UserAgent; + userAgentAppId?: string | undefined | Provider; +} +interface PreviouslyResolved { + defaultUserAgentProvider: Provider; + runtime: string; + logger?: Logger; +} +export interface UserAgentResolvedConfig { + defaultUserAgentProvider: Provider; + customUserAgent?: UserAgent; + runtime: string; + userAgentAppId: Provider; +} +export declare function resolveUserAgentConfig( + input: T & PreviouslyResolved & UserAgentInputConfig +): T & UserAgentResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..8c0dfc9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,7 @@ +export declare const USER_AGENT = "user-agent"; +export declare const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export declare const SPACE = " "; +export declare const UA_NAME_SEPARATOR = "/"; +export declare const UA_NAME_ESCAPE_REGEX: RegExp; +export declare const UA_VALUE_ESCAPE_REGEX: RegExp; +export declare const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts new file mode 100644 index 0000000..a7be5b7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts @@ -0,0 +1,2 @@ +import { AwsSdkFeatures } from "@aws-sdk/types"; +export declare function encodeFeatures(features: AwsSdkFeatures): string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts new file mode 100644 index 0000000..a4da01e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts @@ -0,0 +1,21 @@ +import { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { + AbsoluteLocation, + BuildHandler, + BuildHandlerOptions, + HandlerExecutionContext, + MetadataBearer, + Pluggable, +} from "@smithy/types"; +import { UserAgentResolvedConfig } from "./configurations"; +export declare const userAgentMiddleware: ( + options: UserAgentResolvedConfig +) => ( + next: BuildHandler, + context: HandlerExecutionContext | AwsHandlerExecutionContext +) => BuildHandler; +export declare const getUserAgentMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getUserAgentPlugin: ( + config: UserAgentResolvedConfig +) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts new file mode 100644 index 0000000..d36dee5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts @@ -0,0 +1,18 @@ +import type { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { AbsoluteLocation, BuildHandler, BuildHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { UserAgentResolvedConfig } from "./configurations"; +/** + * Build user agent header sections from: + * 1. runtime-specific default user agent provider; + * 2. custom user agent from `customUserAgent` client config; + * 3. handler execution context set by internal SDK components; + * The built user agent will be set to `x-amz-user-agent` header for ALL the + * runtimes. + * Please note that any override to the `user-agent` or `x-amz-user-agent` header + * in the HTTP request is discouraged. Please use `customUserAgent` client + * config or middleware setting the `userAgent` context to generate desired user + * agent. + */ +export declare const userAgentMiddleware: (options: UserAgentResolvedConfig) => (next: BuildHandler, context: HandlerExecutionContext | AwsHandlerExecutionContext) => BuildHandler; +export declare const getUserAgentMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +export declare const getUserAgentPlugin: (config: UserAgentResolvedConfig) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/package.json new file mode 100644 index 0000000..34cff38 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/middleware-user-agent/package.json @@ -0,0 +1,61 @@ +{ + "name": "@aws-sdk/middleware-user-agent", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-user-agent", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-user-agent", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-user-agent" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/README.md new file mode 100644 index 0000000..1182bbd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/README.md @@ -0,0 +1,13 @@ +# @aws-sdk/nested-clients + +## Description + +This is an internal package. Do not install this as a direct dependency. + +This package contains separate internal implementations of the STS and SSO-OIDC AWS SDK clients +to be used by the AWS SDK credential providers to break a cyclic dependency. + +### Bundlers + +This package may be marked as external if you do not use STS nor SSO-OIDC +in your credential resolution process. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..7a9f28a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOOIDCHttpAuthSchemeProvider = exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = defaultSSOOIDCHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOOIDCHttpAuthSchemeProvider = defaultSSOOIDCHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js new file mode 100644 index 0000000..7258a35 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js new file mode 100644 index 0000000..72e0adc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js new file mode 100644 index 0000000..55f595a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js @@ -0,0 +1,872 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/sso-oidc/index.ts +var index_exports = {}; +__export(index_exports, { + $Command: () => import_smithy_client6.Command, + AccessDeniedException: () => AccessDeniedException, + AuthorizationPendingException: () => AuthorizationPendingException, + CreateTokenCommand: () => CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog: () => CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog: () => CreateTokenResponseFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + InternalServerException: () => InternalServerException, + InvalidClientException: () => InvalidClientException, + InvalidGrantException: () => InvalidGrantException, + InvalidRequestException: () => InvalidRequestException, + InvalidScopeException: () => InvalidScopeException, + SSOOIDC: () => SSOOIDC, + SSOOIDCClient: () => SSOOIDCClient, + SSOOIDCServiceException: () => SSOOIDCServiceException, + SlowDownException: () => SlowDownException, + UnauthorizedClientException: () => UnauthorizedClientException, + UnsupportedGrantTypeException: () => UnsupportedGrantTypeException, + __Client: () => import_smithy_client2.Client +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_retry = require("@smithy/middleware-retry"); +var import_smithy_client2 = require("@smithy/smithy-client"); +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/submodules/sso-oidc/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var import_runtimeConfig = require("./runtimeConfig"); + +// src/submodules/sso-oidc/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/submodules/sso-oidc/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var SSOOIDCClient = class extends import_smithy_client2.Client { + static { + __name(this, "SSOOIDCClient"); + } + /** + * The resolved configuration of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/submodules/sso-oidc/SSOOIDC.ts +var import_smithy_client7 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/commands/CreateTokenCommand.ts +var import_middleware_endpoint2 = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); +var import_smithy_client6 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/models/models_0.ts +var import_smithy_client4 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/models/SSOOIDCServiceException.ts +var import_smithy_client3 = require("@smithy/smithy-client"); +var SSOOIDCServiceException = class _SSOOIDCServiceException extends import_smithy_client3.ServiceException { + static { + __name(this, "SSOOIDCServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOOIDCServiceException.prototype); + } +}; + +// src/submodules/sso-oidc/models/models_0.ts +var AccessDeniedException = class _AccessDeniedException extends SSOOIDCServiceException { + static { + __name(this, "AccessDeniedException"); + } + name = "AccessDeniedException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be access_denied.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var AuthorizationPendingException = class _AuthorizationPendingException extends SSOOIDCServiceException { + static { + __name(this, "AuthorizationPendingException"); + } + name = "AuthorizationPendingException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * authorization_pending.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var CreateTokenRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.clientSecret && { clientSecret: import_smithy_client4.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.codeVerifier && { codeVerifier: import_smithy_client4.SENSITIVE_STRING } +}), "CreateTokenRequestFilterSensitiveLog"); +var CreateTokenResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.idToken && { idToken: import_smithy_client4.SENSITIVE_STRING } +}), "CreateTokenResponseFilterSensitiveLog"); +var ExpiredTokenException = class _ExpiredTokenException extends SSOOIDCServiceException { + static { + __name(this, "ExpiredTokenException"); + } + name = "ExpiredTokenException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be expired_token.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InternalServerException = class _InternalServerException extends SSOOIDCServiceException { + static { + __name(this, "InternalServerException"); + } + name = "InternalServerException"; + $fault = "server"; + /** + *

Single error code. For this exception the value will be server_error.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts + }); + Object.setPrototypeOf(this, _InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidClientException = class _InvalidClientException extends SSOOIDCServiceException { + static { + __name(this, "InvalidClientException"); + } + name = "InvalidClientException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * invalid_client.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidGrantException = class _InvalidGrantException extends SSOOIDCServiceException { + static { + __name(this, "InvalidGrantException"); + } + name = "InvalidGrantException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be invalid_grant.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidRequestException = class _InvalidRequestException extends SSOOIDCServiceException { + static { + __name(this, "InvalidRequestException"); + } + name = "InvalidRequestException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * invalid_request.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidScopeException = class _InvalidScopeException extends SSOOIDCServiceException { + static { + __name(this, "InvalidScopeException"); + } + name = "InvalidScopeException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be invalid_scope.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var SlowDownException = class _SlowDownException extends SSOOIDCServiceException { + static { + __name(this, "SlowDownException"); + } + name = "SlowDownException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be slow_down.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var UnauthorizedClientException = class _UnauthorizedClientException extends SSOOIDCServiceException { + static { + __name(this, "UnauthorizedClientException"); + } + name = "UnauthorizedClientException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * unauthorized_client.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var UnsupportedGrantTypeException = class _UnsupportedGrantTypeException extends SSOOIDCServiceException { + static { + __name(this, "UnsupportedGrantTypeException"); + } + name = "UnsupportedGrantTypeException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * unsupported_grant_type.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; + +// src/submodules/sso-oidc/protocols/Aws_restJson1.ts +var import_core2 = require("@aws-sdk/core"); +var import_core3 = require("@smithy/core"); +var import_smithy_client5 = require("@smithy/smithy-client"); +var se_CreateTokenCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core3.requestBuilder)(input, context); + const headers = { + "content-type": "application/json" + }; + b.bp("/token"); + let body; + body = JSON.stringify( + (0, import_smithy_client5.take)(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: /* @__PURE__ */ __name((_) => (0, import_smithy_client5._json)(_), "scope") + }) + ); + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_CreateTokenCommand"); +var de_CreateTokenCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client5.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client5.expectNonNull)((0, import_smithy_client5.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client5.take)(data, { + accessToken: import_smithy_client5.expectString, + expiresIn: import_smithy_client5.expectInt32, + idToken: import_smithy_client5.expectString, + refreshToken: import_smithy_client5.expectString, + tokenType: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_CreateTokenCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client5.withBaseException)(SSOOIDCServiceException); +var de_AccessDeniedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_AccessDeniedExceptionRes"); +var de_AuthorizationPendingExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_AuthorizationPendingExceptionRes"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_ExpiredTokenExceptionRes"); +var de_InternalServerExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InternalServerExceptionRes"); +var de_InvalidClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidClientExceptionRes"); +var de_InvalidGrantExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidGrantExceptionRes"); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_InvalidScopeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidScopeExceptionRes"); +var de_SlowDownExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_SlowDownExceptionRes"); +var de_UnauthorizedClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedClientExceptionRes"); +var de_UnsupportedGrantTypeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnsupportedGrantTypeExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/submodules/sso-oidc/commands/CreateTokenCommand.ts +var CreateTokenCommand = class extends import_smithy_client6.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint2.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSSOOIDCService", "CreateToken", {}).n("SSOOIDCClient", "CreateTokenCommand").f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog).ser(se_CreateTokenCommand).de(de_CreateTokenCommand).build() { + static { + __name(this, "CreateTokenCommand"); + } +}; + +// src/submodules/sso-oidc/SSOOIDC.ts +var commands = { + CreateTokenCommand +}; +var SSOOIDC = class extends SSOOIDCClient { + static { + __name(this, "SSOOIDC"); + } +}; +(0, import_smithy_client7.createAggregatedClient)(commands, SSOOIDC); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + $Command, + AccessDeniedException, + AuthorizationPendingException, + CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog, + ExpiredTokenException, + InternalServerException, + InvalidClientException, + InvalidGrantException, + InvalidRequestException, + InvalidScopeException, + SSOOIDC, + SSOOIDCClient, + SSOOIDCServiceException, + SlowDownException, + UnauthorizedClientException, + UnsupportedGrantTypeException, + __Client +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js new file mode 100644 index 0000000..6654024 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js new file mode 100644 index 0000000..9cc237f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js new file mode 100644 index 0000000..a305a1b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js new file mode 100644 index 0000000..13c3c74 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js @@ -0,0 +1,52 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.STSClient = exports.__Client = void 0; +const middleware_host_header_1 = require("@aws-sdk/middleware-host-header"); +const middleware_logger_1 = require("@aws-sdk/middleware-logger"); +const middleware_recursion_detection_1 = require("@aws-sdk/middleware-recursion-detection"); +const middleware_user_agent_1 = require("@aws-sdk/middleware-user-agent"); +const config_resolver_1 = require("@smithy/config-resolver"); +const core_1 = require("@smithy/core"); +const middleware_content_length_1 = require("@smithy/middleware-content-length"); +const middleware_endpoint_1 = require("@smithy/middleware-endpoint"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const smithy_client_1 = require("@smithy/smithy-client"); +Object.defineProperty(exports, "__Client", { enumerable: true, get: function () { return smithy_client_1.Client; } }); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const EndpointParameters_1 = require("./endpoint/EndpointParameters"); +const runtimeConfig_1 = require("./runtimeConfig"); +const runtimeExtensions_1 = require("./runtimeExtensions"); +class STSClient extends smithy_client_1.Client { + config; + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, middleware_retry_1.resolveRetryConfig)(_config_2); + const _config_4 = (0, config_resolver_1.resolveRegionConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_5); + const _config_7 = (0, httpAuthSchemeProvider_1.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = (0, runtimeExtensions_1.resolveRuntimeExtensions)(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, core_1.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new core_1.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use((0, core_1.getHttpSigningPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.STSClient = STSClient; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..239095e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthRuntimeConfig = exports.getHttpAuthExtensionConfiguration = void 0; +const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +exports.getHttpAuthExtensionConfiguration = getHttpAuthExtensionConfiguration; +const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; +exports.resolveHttpAuthRuntimeConfig = resolveHttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..842241a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.resolveStsAuthConfig = exports.defaultSTSHttpAuthSchemeProvider = exports.defaultSTSHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const STSClient_1 = require("../STSClient"); +const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSTSHttpAuthSchemeParametersProvider = defaultSTSHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSTSHttpAuthSchemeProvider = defaultSTSHttpAuthSchemeProvider; +const resolveStsAuthConfig = (input) => Object.assign(input, { + stsClientCtor: STSClient_1.STSClient, +}); +exports.resolveStsAuthConfig = resolveStsAuthConfig; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, exports.resolveStsAuthConfig)(config); + const config_1 = (0, core_1.resolveAwsSdkSigV4Config)(config_0); + return Object.assign(config_1, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js new file mode 100644 index 0000000..3aec6a5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.commonParams = exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }); +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; +exports.commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js new file mode 100644 index 0000000..6bfb6e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js new file mode 100644 index 0000000..7428259 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js new file mode 100644 index 0000000..bb0c42a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js @@ -0,0 +1,951 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/sts/index.ts +var index_exports = {}; +__export(index_exports, { + AssumeRoleCommand: () => AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog: () => AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand: () => AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog: () => AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog: () => AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + ClientInputEndpointParameters: () => import_EndpointParameters3.ClientInputEndpointParameters, + CredentialsFilterSensitiveLog: () => CredentialsFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + IDPCommunicationErrorException: () => IDPCommunicationErrorException, + IDPRejectedClaimException: () => IDPRejectedClaimException, + InvalidIdentityTokenException: () => InvalidIdentityTokenException, + MalformedPolicyDocumentException: () => MalformedPolicyDocumentException, + PackedPolicyTooLargeException: () => PackedPolicyTooLargeException, + RegionDisabledException: () => RegionDisabledException, + STS: () => STS, + STSServiceException: () => STSServiceException, + decorateDefaultCredentialProvider: () => decorateDefaultCredentialProvider, + getDefaultRoleAssumer: () => getDefaultRoleAssumer2, + getDefaultRoleAssumerWithWebIdentity: () => getDefaultRoleAssumerWithWebIdentity2 +}); +module.exports = __toCommonJS(index_exports); +__reExport(index_exports, require("./STSClient"), module.exports); + +// src/submodules/sts/STS.ts +var import_smithy_client6 = require("@smithy/smithy-client"); + +// src/submodules/sts/commands/AssumeRoleCommand.ts +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); +var import_smithy_client4 = require("@smithy/smithy-client"); +var import_EndpointParameters = require("./endpoint/EndpointParameters"); + +// src/submodules/sts/models/models_0.ts +var import_smithy_client2 = require("@smithy/smithy-client"); + +// src/submodules/sts/models/STSServiceException.ts +var import_smithy_client = require("@smithy/smithy-client"); +var STSServiceException = class _STSServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "STSServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _STSServiceException.prototype); + } +}; + +// src/submodules/sts/models/models_0.ts +var CredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretAccessKey && { SecretAccessKey: import_smithy_client2.SENSITIVE_STRING } +}), "CredentialsFilterSensitiveLog"); +var AssumeRoleResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleResponseFilterSensitiveLog"); +var ExpiredTokenException = class _ExpiredTokenException extends STSServiceException { + static { + __name(this, "ExpiredTokenException"); + } + name = "ExpiredTokenException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + } +}; +var MalformedPolicyDocumentException = class _MalformedPolicyDocumentException extends STSServiceException { + static { + __name(this, "MalformedPolicyDocumentException"); + } + name = "MalformedPolicyDocumentException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _MalformedPolicyDocumentException.prototype); + } +}; +var PackedPolicyTooLargeException = class _PackedPolicyTooLargeException extends STSServiceException { + static { + __name(this, "PackedPolicyTooLargeException"); + } + name = "PackedPolicyTooLargeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PackedPolicyTooLargeException.prototype); + } +}; +var RegionDisabledException = class _RegionDisabledException extends STSServiceException { + static { + __name(this, "RegionDisabledException"); + } + name = "RegionDisabledException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _RegionDisabledException.prototype); + } +}; +var IDPRejectedClaimException = class _IDPRejectedClaimException extends STSServiceException { + static { + __name(this, "IDPRejectedClaimException"); + } + name = "IDPRejectedClaimException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IDPRejectedClaimException.prototype); + } +}; +var InvalidIdentityTokenException = class _InvalidIdentityTokenException extends STSServiceException { + static { + __name(this, "InvalidIdentityTokenException"); + } + name = "InvalidIdentityTokenException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidIdentityTokenException.prototype); + } +}; +var AssumeRoleWithWebIdentityRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.WebIdentityToken && { WebIdentityToken: import_smithy_client2.SENSITIVE_STRING } +}), "AssumeRoleWithWebIdentityRequestFilterSensitiveLog"); +var AssumeRoleWithWebIdentityResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleWithWebIdentityResponseFilterSensitiveLog"); +var IDPCommunicationErrorException = class _IDPCommunicationErrorException extends STSServiceException { + static { + __name(this, "IDPCommunicationErrorException"); + } + name = "IDPCommunicationErrorException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IDPCommunicationErrorException.prototype); + } +}; + +// src/submodules/sts/protocols/Aws_query.ts +var import_core = require("@aws-sdk/core"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client3 = require("@smithy/smithy-client"); +var se_AssumeRoleCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleCommand"); +var se_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleWithWebIdentityCommand"); +var de_AssumeRoleCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleCommand"); +var de_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleWithWebIdentityCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseXmlErrorBody)(output.body, context) + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode + }); + } +}, "de_CommandError"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_ExpiredTokenExceptionRes"); +var de_IDPCommunicationErrorExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_IDPCommunicationErrorExceptionRes"); +var de_IDPRejectedClaimExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_IDPRejectedClaimExceptionRes"); +var de_InvalidIdentityTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_InvalidIdentityTokenExceptionRes"); +var de_MalformedPolicyDocumentExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_MalformedPolicyDocumentExceptionRes"); +var de_PackedPolicyTooLargeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_PackedPolicyTooLargeExceptionRes"); +var de_RegionDisabledExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_RegionDisabledExceptionRes"); +var se_AssumeRoleRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (input[_T]?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (input[_TTK]?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (input[_PC]?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}, "se_AssumeRoleRequest"); +var se_AssumeRoleWithWebIdentityRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}, "se_AssumeRoleWithWebIdentityRequest"); +var se_policyDescriptorListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_policyDescriptorListType"); +var se_PolicyDescriptorType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}, "se_PolicyDescriptorType"); +var se_ProvidedContext = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}, "se_ProvidedContext"); +var se_ProvidedContextsListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_ProvidedContextsListType"); +var se_Tag = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}, "se_Tag"); +var se_tagKeyListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}, "se_tagKeyListType"); +var se_tagListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_tagListType"); +var de_AssumedRoleUser = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = (0, import_smithy_client3.expectString)(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = (0, import_smithy_client3.expectString)(output[_Ar]); + } + return contents; +}, "de_AssumedRoleUser"); +var de_AssumeRoleResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client3.strictParseInt32)(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client3.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleResponse"); +var de_AssumeRoleWithWebIdentityResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = (0, import_smithy_client3.expectString)(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client3.strictParseInt32)(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = (0, import_smithy_client3.expectString)(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = (0, import_smithy_client3.expectString)(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client3.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleWithWebIdentityResponse"); +var de_Credentials = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = (0, import_smithy_client3.expectString)(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = (0, import_smithy_client3.expectString)(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = (0, import_smithy_client3.expectString)(output[_ST]); + } + if (output[_E] != null) { + contents[_E] = (0, import_smithy_client3.expectNonNull)((0, import_smithy_client3.parseRfc3339DateTimeWithOffset)(output[_E])); + } + return contents; +}, "de_Credentials"); +var de_ExpiredTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_ExpiredTokenException"); +var de_IDPCommunicationErrorException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_IDPCommunicationErrorException"); +var de_IDPRejectedClaimException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_IDPRejectedClaimException"); +var de_InvalidIdentityTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_InvalidIdentityTokenException"); +var de_MalformedPolicyDocumentException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_MalformedPolicyDocumentException"); +var de_PackedPolicyTooLargeException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_PackedPolicyTooLargeException"); +var de_RegionDisabledException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_RegionDisabledException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client3.withBaseException)(STSServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +var SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded" +}; +var _ = "2011-06-15"; +var _A = "Action"; +var _AKI = "AccessKeyId"; +var _AR = "AssumeRole"; +var _ARI = "AssumedRoleId"; +var _ARU = "AssumedRoleUser"; +var _ARWWI = "AssumeRoleWithWebIdentity"; +var _Ar = "Arn"; +var _Au = "Audience"; +var _C = "Credentials"; +var _CA = "ContextAssertion"; +var _DS = "DurationSeconds"; +var _E = "Expiration"; +var _EI = "ExternalId"; +var _K = "Key"; +var _P = "Policy"; +var _PA = "PolicyArns"; +var _PAr = "ProviderArn"; +var _PC = "ProvidedContexts"; +var _PI = "ProviderId"; +var _PPS = "PackedPolicySize"; +var _Pr = "Provider"; +var _RA = "RoleArn"; +var _RSN = "RoleSessionName"; +var _SAK = "SecretAccessKey"; +var _SFWIT = "SubjectFromWebIdentityToken"; +var _SI = "SourceIdentity"; +var _SN = "SerialNumber"; +var _ST = "SessionToken"; +var _T = "Tags"; +var _TC = "TokenCode"; +var _TTK = "TransitiveTagKeys"; +var _V = "Version"; +var _Va = "Value"; +var _WIT = "WebIdentityToken"; +var _a = "arn"; +var _m = "message"; +var buildFormUrlencodedString = /* @__PURE__ */ __name((formEntries) => Object.entries(formEntries).map(([key, value]) => (0, import_smithy_client3.extendedEncodeURIComponent)(key) + "=" + (0, import_smithy_client3.extendedEncodeURIComponent)(value)).join("&"), "buildFormUrlencodedString"); +var loadQueryErrorCode = /* @__PURE__ */ __name((output, data) => { + if (data.Error?.Code !== void 0) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadQueryErrorCode"); + +// src/submodules/sts/commands/AssumeRoleCommand.ts +var AssumeRoleCommand = class extends import_smithy_client4.Command.classBuilder().ep(import_EndpointParameters.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}).n("STSClient", "AssumeRoleCommand").f(void 0, AssumeRoleResponseFilterSensitiveLog).ser(se_AssumeRoleCommand).de(de_AssumeRoleCommand).build() { + static { + __name(this, "AssumeRoleCommand"); + } +}; + +// src/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.ts +var import_middleware_endpoint2 = require("@smithy/middleware-endpoint"); +var import_middleware_serde2 = require("@smithy/middleware-serde"); +var import_smithy_client5 = require("@smithy/smithy-client"); +var import_EndpointParameters2 = require("./endpoint/EndpointParameters"); +var AssumeRoleWithWebIdentityCommand = class extends import_smithy_client5.Command.classBuilder().ep(import_EndpointParameters2.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde2.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint2.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}).n("STSClient", "AssumeRoleWithWebIdentityCommand").f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog).ser(se_AssumeRoleWithWebIdentityCommand).de(de_AssumeRoleWithWebIdentityCommand).build() { + static { + __name(this, "AssumeRoleWithWebIdentityCommand"); + } +}; + +// src/submodules/sts/STS.ts +var import_STSClient = require("./STSClient"); +var commands = { + AssumeRoleCommand, + AssumeRoleWithWebIdentityCommand +}; +var STS = class extends import_STSClient.STSClient { + static { + __name(this, "STS"); + } +}; +(0, import_smithy_client6.createAggregatedClient)(commands, STS); + +// src/submodules/sts/index.ts +var import_EndpointParameters3 = require("./endpoint/EndpointParameters"); + +// src/submodules/sts/defaultStsRoleAssumers.ts +var import_client = require("@aws-sdk/core/client"); +var ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +var getAccountIdFromAssumedRoleUser = /* @__PURE__ */ __name((assumedRoleUser) => { + if (typeof assumedRoleUser?.Arn === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return void 0; +}, "getAccountIdFromAssumedRoleUser"); +var resolveRegion = /* @__PURE__ */ __name(async (_region, _parentRegion, credentialProviderLogger) => { + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + credentialProviderLogger?.debug?.( + "@aws-sdk/client-sts::resolveRegion", + "accepting first of:", + `${region} (provider)`, + `${parentRegion} (parent client)`, + `${ASSUME_ROLE_DEFAULT_REGION} (STS default)` + ); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}, "resolveRegion"); +var getDefaultRoleAssumer = /* @__PURE__ */ __name((stsOptions, STSClient3) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { + logger = stsOptions?.parentClientConfig?.logger, + region, + requestHandler = stsOptions?.parentClientConfig?.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + stsOptions?.parentClientConfig?.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient3({ + profile: stsOptions?.parentClientConfig?.profile, + // A hack to make sts client uses the credential in current closure. + credentialDefaultProvider: /* @__PURE__ */ __name(() => async () => closureSourceCreds, "credentialDefaultProvider"), + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}, "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity = /* @__PURE__ */ __name((stsOptions, STSClient3) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { + logger = stsOptions?.parentClientConfig?.logger, + region, + requestHandler = stsOptions?.parentClientConfig?.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + stsOptions?.parentClientConfig?.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient3({ + profile: stsOptions?.parentClientConfig?.profile, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + if (accountId) { + (0, import_client.setCredentialFeature)(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}, "getDefaultRoleAssumerWithWebIdentity"); +var isH2 = /* @__PURE__ */ __name((requestHandler) => { + return requestHandler?.metadata?.handlerProtocol === "h2"; +}, "isH2"); + +// src/submodules/sts/defaultRoleAssumers.ts +var import_STSClient2 = require("./STSClient"); +var getCustomizableStsClientCtor = /* @__PURE__ */ __name((baseCtor, customizations) => { + if (!customizations) return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + static { + __name(this, "CustomizableSTSClient"); + } + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}, "getCustomizableStsClientCtor"); +var getDefaultRoleAssumer2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumerWithWebIdentity"); +var decorateDefaultCredentialProvider = /* @__PURE__ */ __name((provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer2(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity2(input), + ...input +}), "decorateDefaultCredentialProvider"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + ClientInputEndpointParameters, + CredentialsFilterSensitiveLog, + ExpiredTokenException, + IDPCommunicationErrorException, + IDPRejectedClaimException, + InvalidIdentityTokenException, + MalformedPolicyDocumentException, + PackedPolicyTooLargeException, + RegionDisabledException, + STS, + STSServiceException, + decorateDefaultCredentialProvider, + getDefaultRoleAssumer, + getDefaultRoleAssumerWithWebIdentity, + ...require("./STSClient") +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js new file mode 100644 index 0000000..63cedb1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js new file mode 100644 index 0000000..de3b0e7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js @@ -0,0 +1,65 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const core_2 = require("@smithy/core"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await config.credentialDefaultProvider(idProps?.__config || {})()), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js new file mode 100644 index 0000000..1e03d8b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js new file mode 100644 index 0000000..a50ebec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveRuntimeExtensions = void 0; +const region_config_resolver_1 = require("@aws-sdk/region-config-resolver"); +const protocol_http_1 = require("@smithy/protocol-http"); +const smithy_client_1 = require("@smithy/smithy-client"); +const httpAuthExtensionConfiguration_1 = require("./auth/httpAuthExtensionConfiguration"); +const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign((0, region_config_resolver_1.getAwsRegionExtensionConfiguration)(runtimeConfig), (0, smithy_client_1.getDefaultExtensionConfiguration)(runtimeConfig), (0, protocol_http_1.getHttpHandlerExtensionConfiguration)(runtimeConfig), (0, httpAuthExtensionConfiguration_1.getHttpAuthExtensionConfiguration)(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, (0, region_config_resolver_1.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), (0, smithy_client_1.resolveDefaultRuntimeConfig)(extensionConfiguration), (0, protocol_http_1.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), (0, httpAuthExtensionConfiguration_1.resolveHttpAuthRuntimeConfig)(extensionConfiguration)); +}; +exports.resolveRuntimeExtensions = resolveRuntimeExtensions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/index.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/index.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js new file mode 100644 index 0000000..bcb161f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js @@ -0,0 +1,9 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { CreateTokenCommand } from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +const commands = { + CreateTokenCommand, +}; +export class SSOOIDC extends SSOOIDCClient { +} +createAggregatedClient(commands, SSOOIDC); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js new file mode 100644 index 0000000..003cad7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSSOOIDCHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class SSOOIDCClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..a5e9eab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js @@ -0,0 +1,50 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js new file mode 100644 index 0000000..7863247 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_CreateTokenCommand, se_CreateTokenCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class CreateTokenCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSSOOIDCService", "CreateToken", {}) + .n("SSOOIDCClient", "CreateTokenCommand") + .f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog) + .ser(se_CreateTokenCommand) + .de(de_CreateTokenCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js new file mode 100644 index 0000000..2b26c44 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js @@ -0,0 +1,13 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js new file mode 100644 index 0000000..0ac15bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js new file mode 100644 index 0000000..040ea39 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +export const ruleSet = _data; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js new file mode 100644 index 0000000..c2894a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js @@ -0,0 +1,5 @@ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js new file mode 100644 index 0000000..176cec3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class SSOOIDCServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOOIDCServiceException.prototype); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js new file mode 100644 index 0000000..b350ef1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js @@ -0,0 +1,190 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +export class AccessDeniedException extends __BaseException { + name = "AccessDeniedException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class AuthorizationPendingException extends __BaseException { + name = "AuthorizationPendingException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export const CreateTokenRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.clientSecret && { clientSecret: SENSITIVE_STRING }), + ...(obj.refreshToken && { refreshToken: SENSITIVE_STRING }), + ...(obj.codeVerifier && { codeVerifier: SENSITIVE_STRING }), +}); +export const CreateTokenResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), + ...(obj.refreshToken && { refreshToken: SENSITIVE_STRING }), + ...(obj.idToken && { idToken: SENSITIVE_STRING }), +}); +export class ExpiredTokenException extends __BaseException { + name = "ExpiredTokenException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InternalServerException extends __BaseException { + name = "InternalServerException"; + $fault = "server"; + error; + error_description; + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts, + }); + Object.setPrototypeOf(this, InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidClientException extends __BaseException { + name = "InvalidClientException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidGrantException extends __BaseException { + name = "InvalidGrantException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidRequestException extends __BaseException { + name = "InvalidRequestException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidScopeException extends __BaseException { + name = "InvalidScopeException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class SlowDownException extends __BaseException { + name = "SlowDownException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class UnauthorizedClientException extends __BaseException { + name = "UnauthorizedClientException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class UnsupportedGrantTypeException extends __BaseException { + name = "UnsupportedGrantTypeException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js new file mode 100644 index 0000000..b58850b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js @@ -0,0 +1,255 @@ +import { loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { requestBuilder as rb } from "@smithy/core"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectInt32 as __expectInt32, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, map, take, withBaseException, } from "@smithy/smithy-client"; +import { AccessDeniedException, AuthorizationPendingException, ExpiredTokenException, InternalServerException, InvalidClientException, InvalidGrantException, InvalidRequestException, InvalidScopeException, SlowDownException, UnauthorizedClientException, UnsupportedGrantTypeException, } from "../models/models_0"; +import { SSOOIDCServiceException as __BaseException } from "../models/SSOOIDCServiceException"; +export const se_CreateTokenCommand = async (input, context) => { + const b = rb(input, context); + const headers = { + "content-type": "application/json", + }; + b.bp("/token"); + let body; + body = JSON.stringify(take(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: (_) => _json(_), + })); + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_CreateTokenCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + accessToken: __expectString, + expiresIn: __expectInt32, + idToken: __expectString, + refreshToken: __expectString, + tokenType: __expectString, + }); + Object.assign(contents, doc); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_AccessDeniedExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_AuthorizationPendingExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InternalServerExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidClientExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidGrantExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidScopeExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_SlowDownExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnauthorizedClientExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnsupportedGrantTypeExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js new file mode 100644 index 0000000..94d7b87 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js @@ -0,0 +1,33 @@ +import packageInfo from "../../../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js new file mode 100644 index 0000000..32d413c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js @@ -0,0 +1,46 @@ +import packageInfo from "../../../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js new file mode 100644 index 0000000..49a0235 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSSOOIDCHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js new file mode 100644 index 0000000..71edef7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js @@ -0,0 +1,11 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { AssumeRoleCommand } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommand, } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +const commands = { + AssumeRoleCommand, + AssumeRoleWithWebIdentityCommand, +}; +export class STS extends STSClient { +} +createAggregatedClient(commands, STS); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js new file mode 100644 index 0000000..81b1040 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSTSHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class STSClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..3ea1e49 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js @@ -0,0 +1,55 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +import { STSClient } from "../STSClient"; +export const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveStsAuthConfig = (input) => Object.assign(input, { + stsClientCtor: STSClient, +}); +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveStsAuthConfig(config); + const config_1 = resolveAwsSdkSigV4Config(config_0); + return Object.assign(config_1, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js new file mode 100644 index 0000000..bcb8589 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { AssumeRoleResponseFilterSensitiveLog } from "../models/models_0"; +import { de_AssumeRoleCommand, se_AssumeRoleCommand } from "../protocols/Aws_query"; +export { $Command }; +export class AssumeRoleCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}) + .n("STSClient", "AssumeRoleCommand") + .f(void 0, AssumeRoleResponseFilterSensitiveLog) + .ser(se_AssumeRoleCommand) + .de(de_AssumeRoleCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js new file mode 100644 index 0000000..e4ecc2e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_AssumeRoleWithWebIdentityCommand, se_AssumeRoleWithWebIdentityCommand } from "../protocols/Aws_query"; +export { $Command }; +export class AssumeRoleWithWebIdentityCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}) + .n("STSClient", "AssumeRoleWithWebIdentityCommand") + .f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog) + .ser(se_AssumeRoleWithWebIdentityCommand) + .de(de_AssumeRoleWithWebIdentityCommand) + .build() { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js new file mode 100644 index 0000000..aafb8c4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js @@ -0,0 +1,22 @@ +import { getDefaultRoleAssumer as StsGetDefaultRoleAssumer, getDefaultRoleAssumerWithWebIdentity as StsGetDefaultRoleAssumerWithWebIdentity, } from "./defaultStsRoleAssumers"; +import { STSClient } from "./STSClient"; +const getCustomizableStsClientCtor = (baseCtor, customizations) => { + if (!customizations) + return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}; +export const getDefaultRoleAssumer = (stsOptions = {}, stsPlugins) => StsGetDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(STSClient, stsPlugins)); +export const getDefaultRoleAssumerWithWebIdentity = (stsOptions = {}, stsPlugins) => StsGetDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(STSClient, stsPlugins)); +export const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity(input), + ...input, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js new file mode 100644 index 0000000..e7c7a90 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js @@ -0,0 +1,95 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { AssumeRoleCommand } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommand, } from "./commands/AssumeRoleWithWebIdentityCommand"; +const ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +const getAccountIdFromAssumedRoleUser = (assumedRoleUser) => { + if (typeof assumedRoleUser?.Arn === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return undefined; +}; +const resolveRegion = async (_region, _parentRegion, credentialProviderLogger) => { + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + credentialProviderLogger?.debug?.("@aws-sdk/client-sts::resolveRegion", "accepting first of:", `${region} (provider)`, `${parentRegion} (parent client)`, `${ASSUME_ROLE_DEFAULT_REGION} (STS default)`); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}; +export const getDefaultRoleAssumer = (stsOptions, STSClient) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { logger = stsOptions?.parentClientConfig?.logger, region, requestHandler = stsOptions?.parentClientConfig?.requestHandler, credentialProviderLogger, } = stsOptions; + const resolvedRegion = await resolveRegion(region, stsOptions?.parentClientConfig?.region, credentialProviderLogger); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient({ + profile: stsOptions?.parentClientConfig?.profile, + credentialDefaultProvider: () => async () => closureSourceCreds, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : undefined, + logger: logger, + }); + } + const { Credentials, AssumedRoleUser } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser); + const credentials = { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + ...(Credentials.CredentialScope && { credentialScope: Credentials.CredentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}; +export const getDefaultRoleAssumerWithWebIdentity = (stsOptions, STSClient) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { logger = stsOptions?.parentClientConfig?.logger, region, requestHandler = stsOptions?.parentClientConfig?.requestHandler, credentialProviderLogger, } = stsOptions; + const resolvedRegion = await resolveRegion(region, stsOptions?.parentClientConfig?.region, credentialProviderLogger); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient({ + profile: stsOptions?.parentClientConfig?.profile, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : undefined, + logger: logger, + }); + } + const { Credentials, AssumedRoleUser } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser); + const credentials = { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + ...(Credentials.CredentialScope && { credentialScope: Credentials.CredentialScope }), + ...(accountId && { accountId }), + }; + if (accountId) { + setCredentialFeature(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + setCredentialFeature(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}; +export const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer(input, input.stsClientCtor), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity(input, input.stsClientCtor), + ...input, +}); +const isH2 = (requestHandler) => { + return requestHandler?.metadata?.handlerProtocol === "h2"; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js new file mode 100644 index 0000000..1c74b01 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js @@ -0,0 +1,15 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }); +}; +export const commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js new file mode 100644 index 0000000..f54d279 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js new file mode 100644 index 0000000..99a438a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +export const ruleSet = _data; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js new file mode 100644 index 0000000..fa366be --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js @@ -0,0 +1,6 @@ +export * from "./STSClient"; +export * from "./STS"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js new file mode 100644 index 0000000..6d2963c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class STSServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, STSServiceException.prototype); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js new file mode 100644 index 0000000..63e9c52 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js @@ -0,0 +1,102 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +export const CredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SecretAccessKey && { SecretAccessKey: SENSITIVE_STRING }), +}); +export const AssumeRoleResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export class ExpiredTokenException extends __BaseException { + name = "ExpiredTokenException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + } +} +export class MalformedPolicyDocumentException extends __BaseException { + name = "MalformedPolicyDocumentException"; + $fault = "client"; + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, MalformedPolicyDocumentException.prototype); + } +} +export class PackedPolicyTooLargeException extends __BaseException { + name = "PackedPolicyTooLargeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PackedPolicyTooLargeException.prototype); + } +} +export class RegionDisabledException extends __BaseException { + name = "RegionDisabledException"; + $fault = "client"; + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, RegionDisabledException.prototype); + } +} +export class IDPRejectedClaimException extends __BaseException { + name = "IDPRejectedClaimException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IDPRejectedClaimException.prototype); + } +} +export class InvalidIdentityTokenException extends __BaseException { + name = "InvalidIdentityTokenException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidIdentityTokenException.prototype); + } +} +export const AssumeRoleWithWebIdentityRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.WebIdentityToken && { WebIdentityToken: SENSITIVE_STRING }), +}); +export const AssumeRoleWithWebIdentityResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export class IDPCommunicationErrorException extends __BaseException { + name = "IDPCommunicationErrorException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IDPCommunicationErrorException.prototype); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js new file mode 100644 index 0000000..a98e41a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js @@ -0,0 +1,528 @@ +import { parseXmlBody as parseBody, parseXmlErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { collectBody, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectString as __expectString, extendedEncodeURIComponent as __extendedEncodeURIComponent, parseRfc3339DateTimeWithOffset as __parseRfc3339DateTimeWithOffset, strictParseInt32 as __strictParseInt32, withBaseException, } from "@smithy/smithy-client"; +import { ExpiredTokenException, IDPCommunicationErrorException, IDPRejectedClaimException, InvalidIdentityTokenException, MalformedPolicyDocumentException, PackedPolicyTooLargeException, RegionDisabledException, } from "../models/models_0"; +import { STSServiceException as __BaseException } from "../models/STSServiceException"; +export const se_AssumeRoleCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _, + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_AssumeRoleWithWebIdentityCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _, + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const de_AssumeRoleCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_AssumeRoleWithWebIdentityCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IDPCommunicationErrorExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IDPRejectedClaimExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidIdentityTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_MalformedPolicyDocumentExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PackedPolicyTooLargeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_RegionDisabledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const se_AssumeRoleRequest = (input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (input[_T]?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (input[_TTK]?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (input[_PC]?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}; +const se_AssumeRoleWithWebIdentityRequest = (input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}; +const se_policyDescriptorListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_PolicyDescriptorType = (input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}; +const se_ProvidedContext = (input, context) => { + const entries = {}; + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}; +const se_ProvidedContextsListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_Tag = (input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}; +const se_tagKeyListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}; +const se_tagListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const de_AssumedRoleUser = (output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = __expectString(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = __expectString(output[_Ar]); + } + return contents; +}; +const de_AssumeRoleResponse = (output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = __strictParseInt32(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = __expectString(output[_SI]); + } + return contents; +}; +const de_AssumeRoleWithWebIdentityResponse = (output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = __expectString(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = __strictParseInt32(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = __expectString(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = __expectString(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = __expectString(output[_SI]); + } + return contents; +}; +const de_Credentials = (output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = __expectString(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = __expectString(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = __expectString(output[_ST]); + } + if (output[_E] != null) { + contents[_E] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_E])); + } + return contents; +}; +const de_ExpiredTokenException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_IDPCommunicationErrorException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_IDPRejectedClaimException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_InvalidIdentityTokenException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_MalformedPolicyDocumentException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_PackedPolicyTooLargeException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_RegionDisabledException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = withBaseException(__BaseException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new __HttpRequest(contents); +}; +const SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded", +}; +const _ = "2011-06-15"; +const _A = "Action"; +const _AKI = "AccessKeyId"; +const _AR = "AssumeRole"; +const _ARI = "AssumedRoleId"; +const _ARU = "AssumedRoleUser"; +const _ARWWI = "AssumeRoleWithWebIdentity"; +const _Ar = "Arn"; +const _Au = "Audience"; +const _C = "Credentials"; +const _CA = "ContextAssertion"; +const _DS = "DurationSeconds"; +const _E = "Expiration"; +const _EI = "ExternalId"; +const _K = "Key"; +const _P = "Policy"; +const _PA = "PolicyArns"; +const _PAr = "ProviderArn"; +const _PC = "ProvidedContexts"; +const _PI = "ProviderId"; +const _PPS = "PackedPolicySize"; +const _Pr = "Provider"; +const _RA = "RoleArn"; +const _RSN = "RoleSessionName"; +const _SAK = "SecretAccessKey"; +const _SFWIT = "SubjectFromWebIdentityToken"; +const _SI = "SourceIdentity"; +const _SN = "SerialNumber"; +const _ST = "SessionToken"; +const _T = "Tags"; +const _TC = "TokenCode"; +const _TTK = "TransitiveTagKeys"; +const _V = "Version"; +const _Va = "Value"; +const _WIT = "WebIdentityToken"; +const _a = "arn"; +const _m = "message"; +const buildFormUrlencodedString = (formEntries) => Object.entries(formEntries) + .map(([key, value]) => __extendedEncodeURIComponent(key) + "=" + __extendedEncodeURIComponent(value)) + .join("&"); +const loadQueryErrorCode = (output, data) => { + if (data.Error?.Code !== undefined) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js new file mode 100644 index 0000000..f45dbd3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js @@ -0,0 +1,34 @@ +import packageInfo from "../../../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js new file mode 100644 index 0000000..6ac2412 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js @@ -0,0 +1,60 @@ +import packageInfo from "../../../package.json"; +import { AwsSdkSigV4Signer, NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion, } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { NoAuthSigner } from "@smithy/core"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await config.credentialDefaultProvider(idProps?.__config || {})()), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js new file mode 100644 index 0000000..5c6df20 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSTSHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts new file mode 100644 index 0000000..9d99a73 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts @@ -0,0 +1,7 @@ +/** + * This package exports nothing at the root. + * Use submodules e.g. \@aws-sdk/nested-clients/client-sts. + * + * @internal + */ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts new file mode 100644 index 0000000..ebec5e6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts @@ -0,0 +1,55 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +export interface SSOOIDC { + /** + * @see {@link CreateTokenCommand} + */ + createToken(args: CreateTokenCommandInput, options?: __HttpHandlerOptions): Promise; + createToken(args: CreateTokenCommandInput, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; + createToken(args: CreateTokenCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; +} +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * @public + */ +export declare class SSOOIDC extends SSOOIDCClient implements SSOOIDC { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts new file mode 100644 index 0000000..5490889 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts @@ -0,0 +1,220 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = CreateTokenCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = CreateTokenCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type SSOOIDCClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of SSOOIDCClient class constructor that set the region, credentials and other options. + */ +export interface SSOOIDCClientConfig extends SSOOIDCClientConfigType { +} +/** + * @public + */ +export type SSOOIDCClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ +export interface SSOOIDCClientResolvedConfig extends SSOOIDCClientResolvedConfigType { +} +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * @public + */ +export declare class SSOOIDCClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, SSOOIDCClientResolvedConfig> { + /** + * The resolved configuration of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ + readonly config: SSOOIDCClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..a56a608 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { SSOOIDCHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): SSOOIDCHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..8fc989a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSSOOIDCHttpAuthSchemeParametersProvider: (config: SSOOIDCClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSSOOIDCHttpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: SSOOIDCHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts new file mode 100644 index 0000000..042fb52 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts @@ -0,0 +1,174 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateTokenCommand}. + */ +export interface CreateTokenCommandInput extends CreateTokenRequest { +} +/** + * @public + * + * The output of {@link CreateTokenCommand}. + */ +export interface CreateTokenCommandOutput extends CreateTokenResponse, __MetadataBearer { +} +declare const CreateTokenCommand_base: { + new (input: CreateTokenCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateTokenCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates and returns access and refresh tokens for clients that are authenticated using + * client secrets. The access token can be used to fetch short-lived credentials for the assigned + * AWS accounts or to access application APIs using bearer authentication.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOOIDCClient, CreateTokenCommand } from "@aws-sdk/client-sso-oidc"; // ES Modules import + * // const { SSOOIDCClient, CreateTokenCommand } = require("@aws-sdk/client-sso-oidc"); // CommonJS import + * const client = new SSOOIDCClient(config); + * const input = { // CreateTokenRequest + * clientId: "STRING_VALUE", // required + * clientSecret: "STRING_VALUE", // required + * grantType: "STRING_VALUE", // required + * deviceCode: "STRING_VALUE", + * code: "STRING_VALUE", + * refreshToken: "STRING_VALUE", + * scope: [ // Scopes + * "STRING_VALUE", + * ], + * redirectUri: "STRING_VALUE", + * codeVerifier: "STRING_VALUE", + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * // { // CreateTokenResponse + * // accessToken: "STRING_VALUE", + * // tokenType: "STRING_VALUE", + * // expiresIn: Number("int"), + * // refreshToken: "STRING_VALUE", + * // idToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param CreateTokenCommandInput - {@link CreateTokenCommandInput} + * @returns {@link CreateTokenCommandOutput} + * @see {@link CreateTokenCommandInput} for command's `input` shape. + * @see {@link CreateTokenCommandOutput} for command's `response` shape. + * @see {@link SSOOIDCClientResolvedConfig | config} for SSOOIDCClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *

You do not have sufficient access to perform this action.

+ * + * @throws {@link AuthorizationPendingException} (client fault) + *

Indicates that a request to authorize a client with an access user session token is + * pending.

+ * + * @throws {@link ExpiredTokenException} (client fault) + *

Indicates that the token issued by the service is expired and is no longer valid.

+ * + * @throws {@link InternalServerException} (server fault) + *

Indicates that an error from the service occurred while trying to process a + * request.

+ * + * @throws {@link InvalidClientException} (client fault) + *

Indicates that the clientId or clientSecret in the request is + * invalid. For example, this can occur when a client sends an incorrect clientId or + * an expired clientSecret.

+ * + * @throws {@link InvalidGrantException} (client fault) + *

Indicates that a request contains an invalid grant. This can occur if a client makes a + * CreateToken request with an invalid grant type.

+ * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that something is wrong with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link InvalidScopeException} (client fault) + *

Indicates that the scope provided in the request is invalid.

+ * + * @throws {@link SlowDownException} (client fault) + *

Indicates that the client is making the request too frequently and is more than the + * service can handle.

+ * + * @throws {@link UnauthorizedClientException} (client fault) + *

Indicates that the client is not currently authorized to make the request. This can happen + * when a clientId is not issued for a public client.

+ * + * @throws {@link UnsupportedGrantTypeException} (client fault) + *

Indicates that the grant type in the request is not supported by the service.

+ * + * @throws {@link SSOOIDCServiceException} + *

Base exception class for all service exceptions from SSOOIDC service.

+ * + * + * @example Call OAuth/OIDC /token endpoint for Device Code grant with Secret authentication + * ```javascript + * // + * const input = { + * clientId: "_yzkThXVzLWVhc3QtMQEXAMPLECLIENTID", + * clientSecret: "VERYLONGSECRETeyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0", + * deviceCode: "yJraWQiOiJrZXktMTU2Njk2ODA4OCIsImFsZyI6IkhTMzIn0EXAMPLEDEVICECODE", + * grantType: "urn:ietf:params:oauth:grant-type:device-code" + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * /* response is + * { + * accessToken: "aoal-YigITUDiNX1xZwOMXM5MxOWDL0E0jg9P6_C_jKQPxS_SKCP6f0kh1Up4g7TtvQqkMnD-GJiU_S1gvug6SrggAkc0:MGYCMQD3IatVjV7jAJU91kK3PkS/SfA2wtgWzOgZWDOR7sDGN9t0phCZz5It/aes/3C1Zj0CMQCKWOgRaiz6AIhza3DSXQNMLjRKXC8F8ceCsHlgYLMZ7hZidEXAMPLEACCESSTOKEN", + * expiresIn: 1579729529, + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * tokenType: "Bearer" + * } + * *\/ + * ``` + * + * @example Call OAuth/OIDC /token endpoint for Refresh Token grant with Secret authentication + * ```javascript + * // + * const input = { + * clientId: "_yzkThXVzLWVhc3QtMQEXAMPLECLIENTID", + * clientSecret: "VERYLONGSECRETeyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0", + * grantType: "refresh_token", + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * scope: [ + * "codewhisperer:completions" + * ] + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * /* response is + * { + * accessToken: "aoal-YigITUDiNX1xZwOMXM5MxOWDL0E0jg9P6_C_jKQPxS_SKCP6f0kh1Up4g7TtvQqkMnD-GJiU_S1gvug6SrggAkc0:MGYCMQD3IatVjV7jAJU91kK3PkS/SfA2wtgWzOgZWDOR7sDGN9t0phCZz5It/aes/3C1Zj0CMQCKWOgRaiz6AIhza3DSXQNMLjRKXC8F8ceCsHlgYLMZ7hZidEXAMPLEACCESSTOKEN", + * expiresIn: 1579729529, + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * tokenType: "Bearer" + * } + * *\/ + * ``` + * + * @public + */ +export declare class CreateTokenCommand extends CreateTokenCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateTokenRequest; + output: CreateTokenResponse; + }; + sdk: { + input: CreateTokenCommandInput; + output: CreateTokenCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..23f42e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts @@ -0,0 +1,40 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts new file mode 100644 index 0000000..c78de85 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface SSOOIDCExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts new file mode 100644 index 0000000..54c46dd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts @@ -0,0 +1,51 @@ +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * + * @packageDocumentation + */ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts new file mode 100644 index 0000000..d45f71a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from SSOOIDC service. + */ +export declare class SSOOIDCServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts new file mode 100644 index 0000000..2d3c3f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts @@ -0,0 +1,387 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +/** + *

You do not have sufficient access to perform this action.

+ * @public + */ +export declare class AccessDeniedException extends __BaseException { + readonly name: "AccessDeniedException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be access_denied.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that a request to authorize a client with an access user session token is + * pending.

+ * @public + */ +export declare class AuthorizationPendingException extends __BaseException { + readonly name: "AuthorizationPendingException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * authorization_pending.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface CreateTokenRequest { + /** + *

The unique identifier string for the client or application. This value comes from the + * result of the RegisterClient API.

+ * @public + */ + clientId: string | undefined; + /** + *

A secret string generated for the client. This value should come from the persisted result + * of the RegisterClient API.

+ * @public + */ + clientSecret: string | undefined; + /** + *

Supports the following OAuth grant types: Authorization Code, Device Code, and Refresh + * Token. Specify one of the following values, depending on the grant type that you want:

+ *

* Authorization Code - authorization_code + *

+ *

* Device Code - urn:ietf:params:oauth:grant-type:device_code + *

+ *

* Refresh Token - refresh_token + *

+ * @public + */ + grantType: string | undefined; + /** + *

Used only when calling this API for the Device Code grant type. This short-lived code is + * used to identify this authorization request. This comes from the result of the StartDeviceAuthorization API.

+ * @public + */ + deviceCode?: string | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. The short-lived + * code is used to identify this authorization request.

+ * @public + */ + code?: string | undefined; + /** + *

Used only when calling this API for the Refresh Token grant type. This token is used to + * refresh short-lived tokens, such as the access token, that might expire.

+ *

For more information about the features and limitations of the current IAM Identity Center OIDC + * implementation, see Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ * @public + */ + refreshToken?: string | undefined; + /** + *

The list of scopes for which authorization is requested. The access token that is issued + * is limited to the scopes that are granted. If this value is not specified, IAM Identity Center authorizes + * all scopes that are configured for the client during the call to RegisterClient.

+ * @public + */ + scope?: string[] | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. This value + * specifies the location of the client or application that has registered to receive the + * authorization code.

+ * @public + */ + redirectUri?: string | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. This value is + * generated by the client and presented to validate the original code challenge value the client + * passed at authorization time.

+ * @public + */ + codeVerifier?: string | undefined; +} +/** + * @internal + */ +export declare const CreateTokenRequestFilterSensitiveLog: (obj: CreateTokenRequest) => any; +/** + * @public + */ +export interface CreateTokenResponse { + /** + *

A bearer token to access Amazon Web Services accounts and applications assigned to a user.

+ * @public + */ + accessToken?: string | undefined; + /** + *

Used to notify the client that the returned token is an access token. The supported token + * type is Bearer.

+ * @public + */ + tokenType?: string | undefined; + /** + *

Indicates the time in seconds when an access token will expire.

+ * @public + */ + expiresIn?: number | undefined; + /** + *

A token that, if present, can be used to refresh a previously issued access token that + * might have expired.

+ *

For more information about the features and limitations of the current IAM Identity Center OIDC + * implementation, see Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ * @public + */ + refreshToken?: string | undefined; + /** + *

The idToken is not implemented or supported. For more information about the + * features and limitations of the current IAM Identity Center OIDC implementation, see + * Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ *

A JSON Web Token (JWT) that identifies who is associated with the issued access token. + *

+ * @public + */ + idToken?: string | undefined; +} +/** + * @internal + */ +export declare const CreateTokenResponseFilterSensitiveLog: (obj: CreateTokenResponse) => any; +/** + *

Indicates that the token issued by the service is expired and is no longer valid.

+ * @public + */ +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be expired_token.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that an error from the service occurred while trying to process a + * request.

+ * @public + */ +export declare class InternalServerException extends __BaseException { + readonly name: "InternalServerException"; + readonly $fault: "server"; + /** + *

Single error code. For this exception the value will be server_error.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the clientId or clientSecret in the request is + * invalid. For example, this can occur when a client sends an incorrect clientId or + * an expired clientSecret.

+ * @public + */ +export declare class InvalidClientException extends __BaseException { + readonly name: "InvalidClientException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * invalid_client.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that a request contains an invalid grant. This can occur if a client makes a + * CreateToken request with an invalid grant type.

+ * @public + */ +export declare class InvalidGrantException extends __BaseException { + readonly name: "InvalidGrantException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be invalid_grant.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that something is wrong with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * @public + */ +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * invalid_request.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the scope provided in the request is invalid.

+ * @public + */ +export declare class InvalidScopeException extends __BaseException { + readonly name: "InvalidScopeException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be invalid_scope.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the client is making the request too frequently and is more than the + * service can handle.

+ * @public + */ +export declare class SlowDownException extends __BaseException { + readonly name: "SlowDownException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be slow_down.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the client is not currently authorized to make the request. This can happen + * when a clientId is not issued for a public client.

+ * @public + */ +export declare class UnauthorizedClientException extends __BaseException { + readonly name: "UnauthorizedClientException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * unauthorized_client.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the grant type in the request is not supported by the service.

+ * @public + */ +export declare class UnsupportedGrantTypeException extends __BaseException { + readonly name: "UnsupportedGrantTypeException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * unsupported_grant_type.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..d4e38b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts @@ -0,0 +1,11 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "../commands/CreateTokenCommand"; +/** + * serializeAws_restJson1CreateTokenCommand + */ +export declare const se_CreateTokenCommand: (input: CreateTokenCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restJson1CreateTokenCommand + */ +export declare const de_CreateTokenCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..26c727f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts @@ -0,0 +1,57 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts new file mode 100644 index 0000000..1819a97 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts new file mode 100644 index 0000000..86acac7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts @@ -0,0 +1,56 @@ +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..e110017 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts new file mode 100644 index 0000000..1bdf704 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: SSOOIDCExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts new file mode 100644 index 0000000..bee83a5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts @@ -0,0 +1,27 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +export interface STS { + /** + * @see {@link AssumeRoleCommand} + */ + assumeRole(args: AssumeRoleCommandInput, options?: __HttpHandlerOptions): Promise; + assumeRole(args: AssumeRoleCommandInput, cb: (err: any, data?: AssumeRoleCommandOutput) => void): void; + assumeRole(args: AssumeRoleCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AssumeRoleCommandOutput) => void): void; + /** + * @see {@link AssumeRoleWithWebIdentityCommand} + */ + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, options?: __HttpHandlerOptions): Promise; + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void): void; + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void): void; +} +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * @public + */ +export declare class STS extends STSClient implements STS { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts new file mode 100644 index 0000000..bd21c4b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts @@ -0,0 +1,192 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { AwsCredentialIdentityProvider, BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = AssumeRoleCommandInput | AssumeRoleWithWebIdentityCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = AssumeRoleCommandOutput | AssumeRoleWithWebIdentityCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Default credentials provider; Not available in browser runtime. + * @deprecated + * @internal + */ + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type STSClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of STSClient class constructor that set the region, credentials and other options. + */ +export interface STSClientConfig extends STSClientConfigType { +} +/** + * @public + */ +export type STSClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of STSClient class. This is resolved and normalized from the {@link STSClientConfig | constructor configuration interface}. + */ +export interface STSClientResolvedConfig extends STSClientResolvedConfigType { +} +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * @public + */ +export declare class STSClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig> { + /** + * The resolved configuration of STSClient class. This is resolved and normalized from the {@link STSClientConfig | constructor configuration interface}. + */ + readonly config: STSClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..1066c88 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { STSHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: STSHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): STSHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: STSHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..8e39cbe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,85 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { Client, HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { STSClientResolvedConfig } from "../STSClient"; +/** + * @internal + */ +export interface STSHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface STSHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSTSHttpAuthSchemeParametersProvider: (config: STSClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface STSHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSTSHttpAuthSchemeProvider: STSHttpAuthSchemeProvider; +export interface StsAuthInputConfig { +} +export interface StsAuthResolvedConfig { + /** + * Reference to STSClient class constructor. + * @internal + */ + stsClientCtor: new (clientConfig: any) => Client; +} +export declare const resolveStsAuthConfig: (input: T & StsAuthInputConfig) => T & StsAuthResolvedConfig; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends StsAuthInputConfig, AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: STSHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends StsAuthResolvedConfig, AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: STSHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts new file mode 100644 index 0000000..f9e6ccd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts @@ -0,0 +1,269 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleRequest, AssumeRoleResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig } from "../STSClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AssumeRoleCommand}. + */ +export interface AssumeRoleCommandInput extends AssumeRoleRequest { +} +/** + * @public + * + * The output of {@link AssumeRoleCommand}. + */ +export interface AssumeRoleCommandOutput extends AssumeRoleResponse, __MetadataBearer { +} +declare const AssumeRoleCommand_base: { + new (input: AssumeRoleCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AssumeRoleCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a set of temporary security credentials that you can use to access Amazon Web Services + * resources. These temporary credentials consist of an access key ID, a secret access key, + * and a security token. Typically, you use AssumeRole within your account or for + * cross-account access. For a comparison of AssumeRole with other API operations + * that produce temporary credentials, see Requesting Temporary Security + * Credentials and Compare STS + * credentials in the IAM User Guide.

+ *

+ * Permissions + *

+ *

The temporary security credentials created by AssumeRole can be used to + * make API calls to any Amazon Web Services service with the following exception: You cannot call the + * Amazon Web Services STS GetFederationToken or GetSessionToken API + * operations.

+ *

(Optional) You can pass inline or managed session policies to this operation. You can + * pass a single JSON policy document to use as an inline session policy. You can also specify + * up to 10 managed policy Amazon Resource Names (ARNs) to use as managed session policies. + * The plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

When you create a role, you create two policies: a role trust policy that specifies + * who can assume the role, and a permissions policy that specifies + * what can be done with the role. You specify the trusted principal + * that is allowed to assume the role in the role trust policy.

+ *

To assume a role from a different account, your Amazon Web Services account must be trusted by the + * role. The trust relationship is defined in the role's trust policy when the role is + * created. That trust policy states which accounts are allowed to delegate that access to + * users in the account.

+ *

A user who wants to access a role in a different account must also have permissions that + * are delegated from the account administrator. The administrator must attach a policy that + * allows the user to call AssumeRole for the ARN of the role in the other + * account.

+ *

To allow a user to assume a role in the same account, you can do either of the + * following:

+ *
    + *
  • + *

    Attach a policy to the user that allows the user to call AssumeRole + * (as long as the role's trust policy trusts the account).

    + *
  • + *
  • + *

    Add the user as a principal directly in the role's trust policy.

    + *
  • + *
+ *

You can do either because the role’s trust policy acts as an IAM resource-based + * policy. When a resource-based policy grants access to a principal in the same account, no + * additional identity-based policy is required. For more information about trust policies and + * resource-based policies, see IAM Policies in the + * IAM User Guide.

+ *

+ * Tags + *

+ *

(Optional) You can pass tag key-value pairs to your session. These tags are called + * session tags. For more information about session tags, see Passing Session Tags in STS in the + * IAM User Guide.

+ *

An administrator must grant you the permissions necessary to pass session tags. The + * administrator can also create granular permissions to allow you to pass only specific + * session tags. For more information, see Tutorial: Using Tags + * for Attribute-Based Access Control in the + * IAM User Guide.

+ *

You can set the session tags as transitive. Transitive tags persist during role + * chaining. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

+ * Using MFA with AssumeRole + *

+ *

(Optional) You can include multi-factor authentication (MFA) information when you call + * AssumeRole. This is useful for cross-account scenarios to ensure that the + * user that assumes the role has been authenticated with an Amazon Web Services MFA device. In that + * scenario, the trust policy of the role being assumed includes a condition that tests for + * MFA authentication. If the caller does not include valid MFA information, the request to + * assume the role is denied. The condition in a trust policy that tests for MFA + * authentication might look like the following example.

+ *

+ * "Condition": \{"Bool": \{"aws:MultiFactorAuthPresent": true\}\} + *

+ *

For more information, see Configuring MFA-Protected API Access + * in the IAM User Guide guide.

+ *

To use MFA with AssumeRole, you pass values for the + * SerialNumber and TokenCode parameters. The + * SerialNumber value identifies the user's hardware or virtual MFA device. + * The TokenCode is the time-based one-time password (TOTP) that the MFA device + * produces.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { STSClient, AssumeRoleCommand } from "@aws-sdk/client-sts"; // ES Modules import + * // const { STSClient, AssumeRoleCommand } = require("@aws-sdk/client-sts"); // CommonJS import + * const client = new STSClient(config); + * const input = { // AssumeRoleRequest + * RoleArn: "STRING_VALUE", // required + * RoleSessionName: "STRING_VALUE", // required + * PolicyArns: [ // policyDescriptorListType + * { // PolicyDescriptorType + * arn: "STRING_VALUE", + * }, + * ], + * Policy: "STRING_VALUE", + * DurationSeconds: Number("int"), + * Tags: [ // tagListType + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * TransitiveTagKeys: [ // tagKeyListType + * "STRING_VALUE", + * ], + * ExternalId: "STRING_VALUE", + * SerialNumber: "STRING_VALUE", + * TokenCode: "STRING_VALUE", + * SourceIdentity: "STRING_VALUE", + * ProvidedContexts: [ // ProvidedContextsListType + * { // ProvidedContext + * ProviderArn: "STRING_VALUE", + * ContextAssertion: "STRING_VALUE", + * }, + * ], + * }; + * const command = new AssumeRoleCommand(input); + * const response = await client.send(command); + * // { // AssumeRoleResponse + * // Credentials: { // Credentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // AssumedRoleUser: { // AssumedRoleUser + * // AssumedRoleId: "STRING_VALUE", // required + * // Arn: "STRING_VALUE", // required + * // }, + * // PackedPolicySize: Number("int"), + * // SourceIdentity: "STRING_VALUE", + * // }; + * + * ``` + * + * @param AssumeRoleCommandInput - {@link AssumeRoleCommandInput} + * @returns {@link AssumeRoleCommandOutput} + * @see {@link AssumeRoleCommandInput} for command's `input` shape. + * @see {@link AssumeRoleCommandOutput} for command's `response` shape. + * @see {@link STSClientResolvedConfig | config} for STSClient's `config` shape. + * + * @throws {@link ExpiredTokenException} (client fault) + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * + * @throws {@link MalformedPolicyDocumentException} (client fault) + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * + * @throws {@link PackedPolicyTooLargeException} (client fault) + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * + * @throws {@link RegionDisabledException} (client fault) + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * + * @throws {@link STSServiceException} + *

Base exception class for all service exceptions from STS service.

+ * + * + * @example To assume a role + * ```javascript + * // + * const input = { + * ExternalId: "123ABC", + * Policy: `{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:ListAllMyBuckets","Resource":"*"}]}`, + * RoleArn: "arn:aws:iam::123456789012:role/demo", + * RoleSessionName: "testAssumeRoleSession", + * Tags: [ + * { + * Key: "Project", + * Value: "Unicorn" + * }, + * { + * Key: "Team", + * Value: "Automation" + * }, + * { + * Key: "Cost-Center", + * Value: "12345" + * } + * ], + * TransitiveTagKeys: [ + * "Project", + * "Cost-Center" + * ] + * }; + * const command = new AssumeRoleCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AssumedRoleUser: { + * Arn: "arn:aws:sts::123456789012:assumed-role/demo/Bob", + * AssumedRoleId: "ARO123EXAMPLE123:Bob" + * }, + * Credentials: { + * AccessKeyId: "AKIAIOSFODNN7EXAMPLE", + * Expiration: "2011-07-15T23:28:33.359Z", + * SecretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", + * SessionToken: "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==" + * }, + * PackedPolicySize: 8 + * } + * *\/ + * ``` + * + * @public + */ +export declare class AssumeRoleCommand extends AssumeRoleCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AssumeRoleRequest; + output: AssumeRoleResponse; + }; + sdk: { + input: AssumeRoleCommandInput; + output: AssumeRoleCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts new file mode 100644 index 0000000..58d7df8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts @@ -0,0 +1,288 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleWithWebIdentityRequest, AssumeRoleWithWebIdentityResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig } from "../STSClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AssumeRoleWithWebIdentityCommand}. + */ +export interface AssumeRoleWithWebIdentityCommandInput extends AssumeRoleWithWebIdentityRequest { +} +/** + * @public + * + * The output of {@link AssumeRoleWithWebIdentityCommand}. + */ +export interface AssumeRoleWithWebIdentityCommandOutput extends AssumeRoleWithWebIdentityResponse, __MetadataBearer { +} +declare const AssumeRoleWithWebIdentityCommand_base: { + new (input: AssumeRoleWithWebIdentityCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AssumeRoleWithWebIdentityCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a set of temporary security credentials for users who have been authenticated in + * a mobile or web application with a web identity provider. Example providers include the + * OAuth 2.0 providers Login with Amazon and Facebook, or any OpenID Connect-compatible + * identity provider such as Google or Amazon Cognito federated identities.

+ * + *

For mobile applications, we recommend that you use Amazon Cognito. You can use Amazon Cognito with the + * Amazon Web Services SDK for iOS Developer Guide and the Amazon Web Services SDK for Android Developer Guide to uniquely + * identify a user. You can also supply the user with a consistent identity throughout the + * lifetime of an application.

+ *

To learn more about Amazon Cognito, see Amazon Cognito identity + * pools in Amazon Cognito Developer Guide.

+ *
+ *

Calling AssumeRoleWithWebIdentity does not require the use of Amazon Web Services + * security credentials. Therefore, you can distribute an application (for example, on mobile + * devices) that requests temporary security credentials without including long-term Amazon Web Services + * credentials in the application. You also don't need to deploy server-based proxy services + * that use long-term Amazon Web Services credentials. Instead, the identity of the caller is validated by + * using a token from the web identity provider. For a comparison of + * AssumeRoleWithWebIdentity with the other API operations that produce + * temporary credentials, see Requesting Temporary Security + * Credentials and Compare STS + * credentials in the IAM User Guide.

+ *

The temporary security credentials returned by this API consist of an access key ID, a + * secret access key, and a security token. Applications can use these temporary security + * credentials to sign calls to Amazon Web Services service API operations.

+ *

+ * Session Duration + *

+ *

By default, the temporary security credentials created by + * AssumeRoleWithWebIdentity last for one hour. However, you can use the + * optional DurationSeconds parameter to specify the duration of your session. + * You can provide a value from 900 seconds (15 minutes) up to the maximum session duration + * setting for the role. This setting can have a value from 1 hour to 12 hours. To learn how + * to view the maximum value for your role, see Update the maximum session duration for a role in the + * IAM User Guide. The maximum session duration limit applies when + * you use the AssumeRole* API operations or the assume-role* CLI + * commands. However the limit does not apply when you use those operations to create a + * console URL. For more information, see Using IAM Roles in the + * IAM User Guide.

+ *

+ * Permissions + *

+ *

The temporary security credentials created by AssumeRoleWithWebIdentity can + * be used to make API calls to any Amazon Web Services service with the following exception: you cannot + * call the STS GetFederationToken or GetSessionToken API + * operations.

+ *

(Optional) You can pass inline or managed session policies to + * this operation. You can pass a single JSON policy document to use as an inline session + * policy. You can also specify up to 10 managed policy Amazon Resource Names (ARNs) to use as + * managed session policies. The plaintext that you use for both inline and managed session + * policies can't exceed 2,048 characters. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

+ * Tags + *

+ *

(Optional) You can configure your IdP to pass attributes into your web identity token as + * session tags. Each session tag consists of a key name and an associated value. For more + * information about session tags, see Passing Session Tags in STS in the + * IAM User Guide.

+ *

You can pass up to 50 session tags. The plaintext session tag keys can’t exceed 128 + * characters and the values can’t exceed 256 characters. For these and additional limits, see + * IAM + * and STS Character Limits in the IAM User Guide.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

You can pass a session tag with the same key as a tag that is attached to the role. When + * you do, the session tag overrides the role tag with the same key.

+ *

An administrator must grant you the permissions necessary to pass session tags. The + * administrator can also create granular permissions to allow you to pass only specific + * session tags. For more information, see Tutorial: Using Tags + * for Attribute-Based Access Control in the + * IAM User Guide.

+ *

You can set the session tags as transitive. Transitive tags persist during role + * chaining. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

+ * Identities + *

+ *

Before your application can call AssumeRoleWithWebIdentity, you must have + * an identity token from a supported identity provider and create a role that the application + * can assume. The role that your application assumes must trust the identity provider that is + * associated with the identity token. In other words, the identity provider must be specified + * in the role's trust policy.

+ * + *

Calling AssumeRoleWithWebIdentity can result in an entry in your + * CloudTrail logs. The entry includes the Subject of + * the provided web identity token. We recommend that you avoid using any personally + * identifiable information (PII) in this field. For example, you could instead use a GUID + * or a pairwise identifier, as suggested + * in the OIDC specification.

+ *
+ *

For more information about how to use OIDC federation and the + * AssumeRoleWithWebIdentity API, see the following resources:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { STSClient, AssumeRoleWithWebIdentityCommand } from "@aws-sdk/client-sts"; // ES Modules import + * // const { STSClient, AssumeRoleWithWebIdentityCommand } = require("@aws-sdk/client-sts"); // CommonJS import + * const client = new STSClient(config); + * const input = { // AssumeRoleWithWebIdentityRequest + * RoleArn: "STRING_VALUE", // required + * RoleSessionName: "STRING_VALUE", // required + * WebIdentityToken: "STRING_VALUE", // required + * ProviderId: "STRING_VALUE", + * PolicyArns: [ // policyDescriptorListType + * { // PolicyDescriptorType + * arn: "STRING_VALUE", + * }, + * ], + * Policy: "STRING_VALUE", + * DurationSeconds: Number("int"), + * }; + * const command = new AssumeRoleWithWebIdentityCommand(input); + * const response = await client.send(command); + * // { // AssumeRoleWithWebIdentityResponse + * // Credentials: { // Credentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // SubjectFromWebIdentityToken: "STRING_VALUE", + * // AssumedRoleUser: { // AssumedRoleUser + * // AssumedRoleId: "STRING_VALUE", // required + * // Arn: "STRING_VALUE", // required + * // }, + * // PackedPolicySize: Number("int"), + * // Provider: "STRING_VALUE", + * // Audience: "STRING_VALUE", + * // SourceIdentity: "STRING_VALUE", + * // }; + * + * ``` + * + * @param AssumeRoleWithWebIdentityCommandInput - {@link AssumeRoleWithWebIdentityCommandInput} + * @returns {@link AssumeRoleWithWebIdentityCommandOutput} + * @see {@link AssumeRoleWithWebIdentityCommandInput} for command's `input` shape. + * @see {@link AssumeRoleWithWebIdentityCommandOutput} for command's `response` shape. + * @see {@link STSClientResolvedConfig | config} for STSClient's `config` shape. + * + * @throws {@link ExpiredTokenException} (client fault) + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * + * @throws {@link IDPCommunicationErrorException} (client fault) + *

The request could not be fulfilled because the identity provider (IDP) that was asked + * to verify the incoming identity token could not be reached. This is often a transient + * error caused by network conditions. Retry the request a limited number of times so that + * you don't exceed the request rate. If the error persists, the identity provider might be + * down or not responding.

+ * + * @throws {@link IDPRejectedClaimException} (client fault) + *

The identity provider (IdP) reported that authentication failed. This might be because + * the claim is invalid.

+ *

If this error is returned for the AssumeRoleWithWebIdentity operation, it + * can also mean that the claim has expired or has been explicitly revoked.

+ * + * @throws {@link InvalidIdentityTokenException} (client fault) + *

The web identity token that was passed could not be validated by Amazon Web Services. Get a new + * identity token from the identity provider and then retry the request.

+ * + * @throws {@link MalformedPolicyDocumentException} (client fault) + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * + * @throws {@link PackedPolicyTooLargeException} (client fault) + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * + * @throws {@link RegionDisabledException} (client fault) + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * + * @throws {@link STSServiceException} + *

Base exception class for all service exceptions from STS service.

+ * + * + * @example To assume a role as an OpenID Connect-federated user + * ```javascript + * // + * const input = { + * DurationSeconds: 3600, + * Policy: `{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:ListAllMyBuckets","Resource":"*"}]}`, + * ProviderId: "www.amazon.com", + * RoleArn: "arn:aws:iam::123456789012:role/FederatedWebIdentityRole", + * RoleSessionName: "app1", + * WebIdentityToken: "Atza%7CIQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDansFBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFOzTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ" + * }; + * const command = new AssumeRoleWithWebIdentityCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AssumedRoleUser: { + * Arn: "arn:aws:sts::123456789012:assumed-role/FederatedWebIdentityRole/app1", + * AssumedRoleId: "AROACLKWSDQRAOEXAMPLE:app1" + * }, + * Audience: "client.5498841531868486423.1548@apps.example.com", + * Credentials: { + * AccessKeyId: "AKIAIOSFODNN7EXAMPLE", + * Expiration: "2014-10-24T23:00:23Z", + * SecretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", + * SessionToken: "AQoDYXdzEE0a8ANXXXXXXXXNO1ewxE5TijQyp+IEXAMPLE" + * }, + * PackedPolicySize: 123, + * Provider: "www.amazon.com", + * SubjectFromWebIdentityToken: "amzn1.account.AF6RHO7KZU5XRVQJGXK6HEXAMPLE" + * } + * *\/ + * ``` + * + * @public + */ +export declare class AssumeRoleWithWebIdentityCommand extends AssumeRoleWithWebIdentityCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AssumeRoleWithWebIdentityRequest; + output: AssumeRoleWithWebIdentityResponse; + }; + sdk: { + input: AssumeRoleWithWebIdentityCommandInput; + output: AssumeRoleWithWebIdentityCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts new file mode 100644 index 0000000..0e25207 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts @@ -0,0 +1,23 @@ +import { Pluggable } from "@smithy/types"; +import { DefaultCredentialProvider, RoleAssumer, RoleAssumerWithWebIdentity, STSRoleAssumerOptions } from "./defaultStsRoleAssumers"; +import { ServiceInputTypes, ServiceOutputTypes } from "./STSClient"; +/** + * The default role assumer that used by credential providers when sts:AssumeRole API is needed. + */ +export declare const getDefaultRoleAssumer: (stsOptions?: STSRoleAssumerOptions, stsPlugins?: Pluggable[]) => RoleAssumer; +/** + * The default role assumer that used by credential providers when sts:AssumeRoleWithWebIdentity API is needed. + */ +export declare const getDefaultRoleAssumerWithWebIdentity: (stsOptions?: STSRoleAssumerOptions, stsPlugins?: Pluggable[]) => RoleAssumerWithWebIdentity; +/** + * The default credential providers depend STS client to assume role with desired API: sts:assumeRole, + * sts:assumeRoleWithWebIdentity, etc. This function decorates the default credential provider with role assumers which + * encapsulates the process of calling STS commands. This can only be imported by AWS client packages to avoid circular + * dependencies. + * + * @internal + * + * @deprecated this is no longer needed. Use the defaultProvider directly, + * which will load STS if needed. + */ +export declare const decorateDefaultCredentialProvider: (provider: DefaultCredentialProvider) => DefaultCredentialProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts new file mode 100644 index 0000000..c4ba0c4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts @@ -0,0 +1,43 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentity, Logger, Provider } from "@smithy/types"; +import { AssumeRoleCommandInput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import type { STSClient, STSClientConfig } from "./STSClient"; +/** + * @public + */ +export type STSRoleAssumerOptions = Pick & { + credentialProviderLogger?: Logger; + parentClientConfig?: CredentialProviderOptions["parentClientConfig"]; +}; +/** + * @internal + */ +export type RoleAssumer = (sourceCreds: AwsCredentialIdentity, params: AssumeRoleCommandInput) => Promise; +/** + * The default role assumer that used by credential providers when sts:AssumeRole API is needed. + * @internal + */ +export declare const getDefaultRoleAssumer: (stsOptions: STSRoleAssumerOptions, STSClient: new (options: STSClientConfig) => STSClient) => RoleAssumer; +/** + * @internal + */ +export type RoleAssumerWithWebIdentity = (params: AssumeRoleWithWebIdentityCommandInput) => Promise; +/** + * The default role assumer that used by credential providers when sts:AssumeRoleWithWebIdentity API is needed. + * @internal + */ +export declare const getDefaultRoleAssumerWithWebIdentity: (stsOptions: STSRoleAssumerOptions, STSClient: new (options: STSClientConfig) => STSClient) => RoleAssumerWithWebIdentity; +/** + * @internal + */ +export type DefaultCredentialProvider = (input: any) => Provider; +/** + * The default credential providers depend STS client to assume role with desired API: sts:assumeRole, + * sts:assumeRoleWithWebIdentity, etc. This function decorates the default credential provider with role assumers which + * encapsulates the process of calling STS commands. This can only be imported by AWS client packages to avoid circular + * dependencies. + * + * @internal + */ +export declare const decorateDefaultCredentialProvider: (provider: DefaultCredentialProvider) => DefaultCredentialProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..39f6c7e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts @@ -0,0 +1,46 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; + useGlobalEndpoint?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + UseGlobalEndpoint?: boolean; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts new file mode 100644 index 0000000..970e12b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface STSExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts new file mode 100644 index 0000000..98b87b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts @@ -0,0 +1,17 @@ +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * + * @packageDocumentation + */ +export * from "./STSClient"; +export * from "./STS"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { STSExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts new file mode 100644 index 0000000..fd1a9a2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from STS service. + */ +export declare class STSServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts new file mode 100644 index 0000000..5b58b93 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts @@ -0,0 +1,712 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +/** + *

The identifiers for the temporary security credentials that the operation + * returns.

+ * @public + */ +export interface AssumedRoleUser { + /** + *

A unique identifier that contains the role ID and the role session name of the role that + * is being assumed. The role ID is generated by Amazon Web Services when the role is created.

+ * @public + */ + AssumedRoleId: string | undefined; + /** + *

The ARN of the temporary security credentials that are returned from the AssumeRole action. For more information about ARNs and how to use them in + * policies, see IAM Identifiers in the + * IAM User Guide.

+ * @public + */ + Arn: string | undefined; +} +/** + *

A reference to the IAM managed policy that is passed as a session policy for a role + * session or a federated user session.

+ * @public + */ +export interface PolicyDescriptorType { + /** + *

The Amazon Resource Name (ARN) of the IAM managed policy to use as a session policy + * for the role. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * @public + */ + arn?: string | undefined; +} +/** + *

Contains information about the provided context. This includes the signed and encrypted + * trusted context assertion and the context provider ARN from which the trusted context + * assertion was generated.

+ * @public + */ +export interface ProvidedContext { + /** + *

The context provider ARN from which the trusted context assertion was generated.

+ * @public + */ + ProviderArn?: string | undefined; + /** + *

The signed and encrypted trusted context assertion generated by the context provider. + * The trusted context assertion is signed and encrypted by Amazon Web Services STS.

+ * @public + */ + ContextAssertion?: string | undefined; +} +/** + *

You can pass custom key-value pair attributes when you assume a role or federate a user. + * These are called session tags. You can then use the session tags to control access to + * resources. For more information, see Tagging Amazon Web Services STS Sessions in the + * IAM User Guide.

+ * @public + */ +export interface Tag { + /** + *

The key for a session tag.

+ *

You can pass up to 50 session tags. The plain text session tag keys can’t exceed 128 + * characters. For these and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * @public + */ + Key: string | undefined; + /** + *

The value for a session tag.

+ *

You can pass up to 50 session tags. The plain text session tag values can’t exceed 256 + * characters. For these and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * @public + */ + Value: string | undefined; +} +/** + * @public + */ +export interface AssumeRoleRequest { + /** + *

The Amazon Resource Name (ARN) of the role to assume.

+ * @public + */ + RoleArn: string | undefined; + /** + *

An identifier for the assumed role session.

+ *

Use the role session name to uniquely identify a session when the same role is assumed + * by different principals or for different reasons. In cross-account scenarios, the role + * session name is visible to, and can be logged by the account that owns the role. The role + * session name is also used in the ARN of the assumed role principal. This means that + * subsequent cross-account API requests that use the temporary security credentials will + * expose the role session name to the external account in their CloudTrail logs.

+ *

For security purposes, administrators can view this field in CloudTrail logs to help identify who performed an action in Amazon Web Services. Your + * administrator might require that you specify your user name as the session name when you + * assume the role. For more information, see + * sts:RoleSessionName + * .

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + RoleSessionName: string | undefined; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ * @public + */ + PolicyArns?: PolicyDescriptorType[] | undefined; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plaintext that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

For more information about role session permissions, see Session + * policies.

+ * @public + */ + Policy?: string | undefined; + /** + *

The duration, in seconds, of the role session. The value specified can range from 900 + * seconds (15 minutes) up to the maximum session duration set for the role. The maximum + * session duration setting can have a value from 1 hour to 12 hours. If you specify a value + * higher than this setting or the administrator setting (whichever is lower), the operation + * fails. For example, if you specify a session duration of 12 hours, but your administrator + * set the maximum session duration to 6 hours, your operation fails.

+ *

Role chaining limits your Amazon Web Services CLI or Amazon Web Services API role session to a maximum of one hour. + * When you use the AssumeRole API operation to assume a role, you can specify + * the duration of your role session with the DurationSeconds parameter. You can + * specify a parameter value of up to 43200 seconds (12 hours), depending on the maximum + * session duration setting for your role. However, if you assume a role using role chaining + * and provide a DurationSeconds parameter value greater than one hour, the + * operation fails. To learn how to view the maximum value for your role, see Update the maximum session duration for a role.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the Amazon Web Services Management Console in the + * IAM User Guide.

+ *
+ * @public + */ + DurationSeconds?: number | undefined; + /** + *

A list of session tags that you want to pass. Each session tag consists of a key name + * and an associated value. For more information about session tags, see Tagging Amazon Web Services STS + * Sessions in the IAM User Guide.

+ *

This parameter is optional. You can pass up to 50 session tags. The plaintext session + * tag keys can’t exceed 128 characters, and the values can’t exceed 256 characters. For these + * and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

You can pass a session tag with the same key as a tag that is already attached to the + * role. When you do, session tags override a role tag with the same key.

+ *

Tag key–value pairs are not case sensitive, but case is preserved. This means that you + * cannot have separate Department and department tag keys. Assume + * that the role has the Department=Marketing tag and you pass the + * department=engineering session tag. Department + * and department are not saved as separate tags, and the session tag passed in + * the request takes precedence over the role tag.

+ *

Additionally, if you used temporary credentials to perform this operation, the new + * session inherits any transitive session tags from the calling session. If you pass a + * session tag with the same key as an inherited tag, the operation fails. To view the + * inherited tags for a session, see the CloudTrail logs. For more information, see Viewing Session Tags in CloudTrail in the + * IAM User Guide.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

A list of keys for session tags that you want to set as transitive. If you set a tag key + * as transitive, the corresponding key and value passes to subsequent sessions in a role + * chain. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

This parameter is optional. The transitive status of a session tag does not impact its + * packed binary size.

+ *

If you choose not to specify a transitive tag key, then no tags are passed from this + * session to any subsequent sessions.

+ * @public + */ + TransitiveTagKeys?: string[] | undefined; + /** + *

A unique identifier that might be required when you assume a role in another account. If + * the administrator of the account to which the role belongs provided you with an external + * ID, then provide that value in the ExternalId parameter. This value can be any + * string, such as a passphrase or account number. A cross-account role is usually set up to + * trust everyone in an account. Therefore, the administrator of the trusting account might + * send an external ID to the administrator of the trusted account. That way, only someone + * with the ID can assume the role, rather than everyone in the account. For more information + * about the external ID, see How to Use an External ID + * When Granting Access to Your Amazon Web Services Resources to a Third Party in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of + * characters consisting of upper- and lower-case alphanumeric characters with no spaces. + * You can also include underscores or any of the following characters: =,.@:/-

+ * @public + */ + ExternalId?: string | undefined; + /** + *

The identification number of the MFA device that is associated with the user who is + * making the AssumeRole call. Specify this value if the trust policy of the role + * being assumed includes a condition that requires MFA authentication. The value is either + * the serial number for a hardware device (such as GAHT12345678) or an Amazon + * Resource Name (ARN) for a virtual device (such as + * arn:aws:iam::123456789012:mfa/user).

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + SerialNumber?: string | undefined; + /** + *

The value provided by the MFA device, if the trust policy of the role being assumed + * requires MFA. (In other words, if the policy includes a condition that tests for MFA). If + * the role being assumed requires MFA and if the TokenCode value is missing or + * expired, the AssumeRole call returns an "access denied" error.

+ *

The format for this parameter, as described by its regex pattern, is a sequence of six + * numeric digits.

+ * @public + */ + TokenCode?: string | undefined; + /** + *

The source identity specified by the principal that is calling the + * AssumeRole operation. The source identity value persists across chained role sessions.

+ *

You can require users to specify a source identity when they assume a role. You do this + * by using the + * sts:SourceIdentity + * condition key in a role trust policy. You + * can use source identity information in CloudTrail logs to determine who took actions with a + * role. You can use the aws:SourceIdentity condition key to further control + * access to Amazon Web Services resources based on the value of source identity. For more information about + * using source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters consisting of upper- + * and lower-case alphanumeric characters with no spaces. You can also include underscores or + * any of the following characters: +=,.@-. You cannot use a value that begins with the text + * aws:. This prefix is reserved for Amazon Web Services internal use.

+ * @public + */ + SourceIdentity?: string | undefined; + /** + *

A list of previously acquired trusted context assertions in the format of a JSON array. + * The trusted context assertion is signed and encrypted by Amazon Web Services STS.

+ *

The following is an example of a ProvidedContext value that includes a + * single trusted context assertion and the ARN of the context provider from which the trusted + * context assertion was generated.

+ *

+ * [\{"ProviderArn":"arn:aws:iam::aws:contextProvider/IdentityCenter","ContextAssertion":"trusted-context-assertion"\}] + *

+ * @public + */ + ProvidedContexts?: ProvidedContext[] | undefined; +} +/** + *

Amazon Web Services credentials for API authentication.

+ * @public + */ +export interface Credentials { + /** + *

The access key ID that identifies the temporary security credentials.

+ * @public + */ + AccessKeyId: string | undefined; + /** + *

The secret access key that can be used to sign requests.

+ * @public + */ + SecretAccessKey: string | undefined; + /** + *

The token that users must pass to the service API to use the temporary + * credentials.

+ * @public + */ + SessionToken: string | undefined; + /** + *

The date on which the current credentials expire.

+ * @public + */ + Expiration: Date | undefined; +} +/** + * @internal + */ +export declare const CredentialsFilterSensitiveLog: (obj: Credentials) => any; +/** + *

Contains the response to a successful AssumeRole request, including + * temporary Amazon Web Services credentials that can be used to make Amazon Web Services requests.

+ * @public + */ +export interface AssumeRoleResponse { + /** + *

The temporary security credentials, which include an access key ID, a secret access key, + * and a security (or session) token.

+ * + *

The size of the security token that STS API operations return is not fixed. We + * strongly recommend that you make no assumptions about the maximum size.

+ *
+ * @public + */ + Credentials?: Credentials | undefined; + /** + *

The Amazon Resource Name (ARN) and the assumed role ID, which are identifiers that you + * can use to refer to the resulting temporary security credentials. For example, you can + * reference these credentials as a principal in a resource-based policy by using the ARN or + * assumed role ID. The ARN and ID include the RoleSessionName that you specified + * when you called AssumeRole.

+ * @public + */ + AssumedRoleUser?: AssumedRoleUser | undefined; + /** + *

A percentage value that indicates the packed size of the session policies and session + * tags combined passed in the request. The request fails if the packed size is greater than 100 percent, + * which means the policies and tags exceeded the allowed space.

+ * @public + */ + PackedPolicySize?: number | undefined; + /** + *

The source identity specified by the principal that is calling the + * AssumeRole operation.

+ *

You can require users to specify a source identity when they assume a role. You do this + * by using the sts:SourceIdentity condition key in a role trust policy. You can + * use source identity information in CloudTrail logs to determine who took actions with a role. + * You can use the aws:SourceIdentity condition key to further control access to + * Amazon Web Services resources based on the value of source identity. For more information about using + * source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters consisting of upper- + * and lower-case alphanumeric characters with no spaces. You can also include underscores or + * any of the following characters: =,.@-

+ * @public + */ + SourceIdentity?: string | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleResponseFilterSensitiveLog: (obj: AssumeRoleResponse) => any; +/** + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * @public + */ +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * @public + */ +export declare class MalformedPolicyDocumentException extends __BaseException { + readonly name: "MalformedPolicyDocumentException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * @public + */ +export declare class PackedPolicyTooLargeException extends __BaseException { + readonly name: "PackedPolicyTooLargeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * @public + */ +export declare class RegionDisabledException extends __BaseException { + readonly name: "RegionDisabledException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The identity provider (IdP) reported that authentication failed. This might be because + * the claim is invalid.

+ *

If this error is returned for the AssumeRoleWithWebIdentity operation, it + * can also mean that the claim has expired or has been explicitly revoked.

+ * @public + */ +export declare class IDPRejectedClaimException extends __BaseException { + readonly name: "IDPRejectedClaimException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The web identity token that was passed could not be validated by Amazon Web Services. Get a new + * identity token from the identity provider and then retry the request.

+ * @public + */ +export declare class InvalidIdentityTokenException extends __BaseException { + readonly name: "InvalidIdentityTokenException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface AssumeRoleWithWebIdentityRequest { + /** + *

The Amazon Resource Name (ARN) of the role that the caller is assuming.

+ * + *

Additional considerations apply to Amazon Cognito identity pools that assume cross-account IAM roles. The trust policies of these roles must accept the + * cognito-identity.amazonaws.com service principal and must contain the + * cognito-identity.amazonaws.com:aud condition key to restrict role + * assumption to users from your intended identity pools. A policy that trusts Amazon Cognito + * identity pools without this condition creates a risk that a user from an unintended + * identity pool can assume the role. For more information, see Trust policies for + * IAM roles in Basic (Classic) authentication in the Amazon Cognito + * Developer Guide.

+ *
+ * @public + */ + RoleArn: string | undefined; + /** + *

An identifier for the assumed role session. Typically, you pass the name or identifier + * that is associated with the user who is using your application. That way, the temporary + * security credentials that your application will use are associated with that user. This + * session name is included as part of the ARN and assumed role ID in the + * AssumedRoleUser response element.

+ *

For security purposes, administrators can view this field in CloudTrail logs to help identify who performed an action in Amazon Web Services. Your + * administrator might require that you specify your user name as the session name when you + * assume the role. For more information, see + * sts:RoleSessionName + * .

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + RoleSessionName: string | undefined; + /** + *

The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity + * provider. Your application must get this token by authenticating the user who is using your + * application with a web identity provider before the application makes an + * AssumeRoleWithWebIdentity call. Timestamps in the token must be formatted + * as either an integer or a long integer. Tokens must be signed using either RSA keys (RS256, + * RS384, or RS512) or ECDSA keys (ES256, ES384, or ES512).

+ * @public + */ + WebIdentityToken: string | undefined; + /** + *

The fully qualified host component of the domain name of the OAuth 2.0 identity + * provider. Do not specify this value for an OpenID Connect identity provider.

+ *

Currently www.amazon.com and graph.facebook.com are the only + * supported identity providers for OAuth 2.0 access tokens. Do not include URL schemes and + * port numbers.

+ *

Do not specify this value for OpenID Connect ID tokens.

+ * @public + */ + ProviderId?: string | undefined; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ * @public + */ + PolicyArns?: PolicyDescriptorType[] | undefined; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plaintext that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ *

For more information about role session permissions, see Session + * policies.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ * @public + */ + Policy?: string | undefined; + /** + *

The duration, in seconds, of the role session. The value can range from 900 seconds (15 + * minutes) up to the maximum session duration setting for the role. This setting can have a + * value from 1 hour to 12 hours. If you specify a value higher than this setting, the + * operation fails. For example, if you specify a session duration of 12 hours, but your + * administrator set the maximum session duration to 6 hours, your operation fails. To learn + * how to view the maximum value for your role, see View the + * Maximum Session Duration Setting for a Role in the + * IAM User Guide.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the Amazon Web Services Management Console in the + * IAM User Guide.

+ *
+ * @public + */ + DurationSeconds?: number | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleWithWebIdentityRequestFilterSensitiveLog: (obj: AssumeRoleWithWebIdentityRequest) => any; +/** + *

Contains the response to a successful AssumeRoleWithWebIdentity + * request, including temporary Amazon Web Services credentials that can be used to make Amazon Web Services requests.

+ * @public + */ +export interface AssumeRoleWithWebIdentityResponse { + /** + *

The temporary security credentials, which include an access key ID, a secret access key, + * and a security token.

+ * + *

The size of the security token that STS API operations return is not fixed. We + * strongly recommend that you make no assumptions about the maximum size.

+ *
+ * @public + */ + Credentials?: Credentials | undefined; + /** + *

The unique user identifier that is returned by the identity provider. This identifier is + * associated with the WebIdentityToken that was submitted with the + * AssumeRoleWithWebIdentity call. The identifier is typically unique to the + * user and the application that acquired the WebIdentityToken (pairwise + * identifier). For OpenID Connect ID tokens, this field contains the value returned by the + * identity provider as the token's sub (Subject) claim.

+ * @public + */ + SubjectFromWebIdentityToken?: string | undefined; + /** + *

The Amazon Resource Name (ARN) and the assumed role ID, which are identifiers that you + * can use to refer to the resulting temporary security credentials. For example, you can + * reference these credentials as a principal in a resource-based policy by using the ARN or + * assumed role ID. The ARN and ID include the RoleSessionName that you specified + * when you called AssumeRole.

+ * @public + */ + AssumedRoleUser?: AssumedRoleUser | undefined; + /** + *

A percentage value that indicates the packed size of the session policies and session + * tags combined passed in the request. The request fails if the packed size is greater than 100 percent, + * which means the policies and tags exceeded the allowed space.

+ * @public + */ + PackedPolicySize?: number | undefined; + /** + *

The issuing authority of the web identity token presented. For OpenID Connect ID + * tokens, this contains the value of the iss field. For OAuth 2.0 access tokens, + * this contains the value of the ProviderId parameter that was passed in the + * AssumeRoleWithWebIdentity request.

+ * @public + */ + Provider?: string | undefined; + /** + *

The intended audience (also known as client ID) of the web identity token. This is + * traditionally the client identifier issued to the application that requested the web + * identity token.

+ * @public + */ + Audience?: string | undefined; + /** + *

The value of the source identity that is returned in the JSON web token (JWT) from the + * identity provider.

+ *

You can require users to set a source identity value when they assume a role. You do + * this by using the sts:SourceIdentity condition key in a role trust policy. + * That way, actions that are taken with the role are associated with that user. After the + * source identity is set, the value cannot be changed. It is present in the request for all + * actions that are taken by the role and persists across chained role + * sessions. You can configure your identity provider to use an attribute associated with your + * users, like user name or email, as the source identity when calling + * AssumeRoleWithWebIdentity. You do this by adding a claim to the JSON web + * token. To learn more about OIDC tokens and claims, see Using Tokens with User Pools in the Amazon Cognito Developer Guide. + * For more information about using source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + SourceIdentity?: string | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleWithWebIdentityResponseFilterSensitiveLog: (obj: AssumeRoleWithWebIdentityResponse) => any; +/** + *

The request could not be fulfilled because the identity provider (IDP) that was asked + * to verify the incoming identity token could not be reached. This is often a transient + * error caused by network conditions. Retry the request a limited number of times so that + * you don't exceed the request rate. If the error persists, the identity provider might be + * down or not responding.

+ * @public + */ +export declare class IDPCommunicationErrorException extends __BaseException { + readonly name: "IDPCommunicationErrorException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts new file mode 100644 index 0000000..db11c3a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts @@ -0,0 +1,20 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "../commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "../commands/AssumeRoleWithWebIdentityCommand"; +/** + * serializeAws_queryAssumeRoleCommand + */ +export declare const se_AssumeRoleCommand: (input: AssumeRoleCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_queryAssumeRoleWithWebIdentityCommand + */ +export declare const se_AssumeRoleWithWebIdentityCommand: (input: AssumeRoleWithWebIdentityCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_queryAssumeRoleCommand + */ +export declare const de_AssumeRoleCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_queryAssumeRoleWithWebIdentityCommand + */ +export declare const de_AssumeRoleWithWebIdentityCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..5513a9b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts @@ -0,0 +1,59 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts new file mode 100644 index 0000000..c9924b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NoAuthSigner } from "@smithy/core"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + }[]; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + credentialDefaultProvider?: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts new file mode 100644 index 0000000..5bf519f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts @@ -0,0 +1,58 @@ +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..5b99276 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts new file mode 100644 index 0000000..ebd8567 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { STSExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: STSExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts new file mode 100644 index 0000000..10ee849 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts @@ -0,0 +1,22 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +export interface SSOOIDC { + createToken( + args: CreateTokenCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createToken( + args: CreateTokenCommandInput, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; + createToken( + args: CreateTokenCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; +} +export declare class SSOOIDC extends SSOOIDCClient implements SSOOIDC {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts new file mode 100644 index 0000000..d44b7af --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts @@ -0,0 +1,121 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "./commands/CreateTokenCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = CreateTokenCommandInput; +export type ServiceOutputTypes = CreateTokenCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type SSOOIDCClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface SSOOIDCClientConfig extends SSOOIDCClientConfigType {} +export type SSOOIDCClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface SSOOIDCClientResolvedConfig + extends SSOOIDCClientResolvedConfigType {} +export declare class SSOOIDCClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SSOOIDCClientResolvedConfig +> { + readonly config: SSOOIDCClientResolvedConfig; + constructor( + ...[configuration]: __CheckOptionalClientConfig + ); + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..c39ba91 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { SSOOIDCHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): SSOOIDCHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..936b101 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,47 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +export interface SSOOIDCHttpAuthSchemeParameters + extends HttpAuthSchemeParameters { + region?: string; +} +export interface SSOOIDCHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + SSOOIDCClientResolvedConfig, + HandlerExecutionContext, + SSOOIDCHttpAuthSchemeParameters, + object + > {} +export declare const defaultSSOOIDCHttpAuthSchemeParametersProvider: ( + config: SSOOIDCClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface SSOOIDCHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSSOOIDCHttpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: SSOOIDCHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts new file mode 100644 index 0000000..cb1de8b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts @@ -0,0 +1,43 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateTokenCommandInput extends CreateTokenRequest {} +export interface CreateTokenCommandOutput + extends CreateTokenResponse, + __MetadataBearer {} +declare const CreateTokenCommand_base: { + new ( + input: CreateTokenCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTokenCommandInput, + CreateTokenCommandOutput, + SSOOIDCClientResolvedConfig, + CreateTokenCommandInput, + CreateTokenCommandOutput + >; + new ( + __0_0: CreateTokenCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTokenCommandInput, + CreateTokenCommandOutput, + SSOOIDCClientResolvedConfig, + CreateTokenCommandInput, + CreateTokenCommandOutput + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateTokenCommand extends CreateTokenCommand_base { + protected static __types: { + api: { + input: CreateTokenRequest; + output: CreateTokenResponse; + }; + sdk: { + input: CreateTokenCommandInput; + output: CreateTokenCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..7f24540 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts @@ -0,0 +1,51 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts new file mode 100644 index 0000000..c208e33 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface SSOOIDCExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts new file mode 100644 index 0000000..1e9247f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts @@ -0,0 +1,8 @@ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts new file mode 100644 index 0000000..dae636f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class SSOOIDCServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts new file mode 100644 index 0000000..68de714 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts @@ -0,0 +1,123 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +export declare class AccessDeniedException extends __BaseException { + readonly name: "AccessDeniedException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class AuthorizationPendingException extends __BaseException { + readonly name: "AuthorizationPendingException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export interface CreateTokenRequest { + clientId: string | undefined; + clientSecret: string | undefined; + grantType: string | undefined; + deviceCode?: string | undefined; + code?: string | undefined; + refreshToken?: string | undefined; + scope?: string[] | undefined; + redirectUri?: string | undefined; + codeVerifier?: string | undefined; +} +export declare const CreateTokenRequestFilterSensitiveLog: ( + obj: CreateTokenRequest +) => any; +export interface CreateTokenResponse { + accessToken?: string | undefined; + tokenType?: string | undefined; + expiresIn?: number | undefined; + refreshToken?: string | undefined; + idToken?: string | undefined; +} +export declare const CreateTokenResponseFilterSensitiveLog: ( + obj: CreateTokenResponse +) => any; +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InternalServerException extends __BaseException { + readonly name: "InternalServerException"; + readonly $fault: "server"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidClientException extends __BaseException { + readonly name: "InvalidClientException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidGrantException extends __BaseException { + readonly name: "InvalidGrantException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidScopeException extends __BaseException { + readonly name: "InvalidScopeException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class SlowDownException extends __BaseException { + readonly name: "SlowDownException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor(opts: __ExceptionOptionType); +} +export declare class UnauthorizedClientException extends __BaseException { + readonly name: "UnauthorizedClientException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class UnsupportedGrantTypeException extends __BaseException { + readonly name: "UnsupportedGrantTypeException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..d0657b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts @@ -0,0 +1,17 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "../commands/CreateTokenCommand"; +export declare const se_CreateTokenCommand: ( + input: CreateTokenCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_CreateTokenCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..c469a24 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts @@ -0,0 +1,120 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts new file mode 100644 index 0000000..a24c900 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts @@ -0,0 +1,114 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts new file mode 100644 index 0000000..c3610fd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts @@ -0,0 +1,124 @@ +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..130a1e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts @@ -0,0 +1,49 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts new file mode 100644 index 0000000..d226882 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: SSOOIDCExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts new file mode 100644 index 0000000..cca9cbb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts @@ -0,0 +1,39 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "./commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +export interface STS { + assumeRole( + args: AssumeRoleCommandInput, + options?: __HttpHandlerOptions + ): Promise; + assumeRole( + args: AssumeRoleCommandInput, + cb: (err: any, data?: AssumeRoleCommandOutput) => void + ): void; + assumeRole( + args: AssumeRoleCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AssumeRoleCommandOutput) => void + ): void; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + options?: __HttpHandlerOptions + ): Promise; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void + ): void; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void + ): void; +} +export declare class STS extends STSClient implements STS {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts new file mode 100644 index 0000000..8bffddf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts @@ -0,0 +1,128 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + AwsCredentialIdentityProvider, + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "./commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "./commands/AssumeRoleWithWebIdentityCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | AssumeRoleCommandInput + | AssumeRoleWithWebIdentityCommandInput; +export type ServiceOutputTypes = + | AssumeRoleCommandOutput + | AssumeRoleWithWebIdentityCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type STSClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface STSClientConfig extends STSClientConfigType {} +export type STSClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface STSClientResolvedConfig extends STSClientResolvedConfigType {} +export declare class STSClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig +> { + readonly config: STSClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..ef83018 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { STSHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: STSHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): STSHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: STSHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..0e17e2f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,57 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + Client, + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { STSClientResolvedConfig } from "../STSClient"; +export interface STSHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface STSHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + STSClientResolvedConfig, + HandlerExecutionContext, + STSHttpAuthSchemeParameters, + object + > {} +export declare const defaultSTSHttpAuthSchemeParametersProvider: ( + config: STSClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface STSHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSTSHttpAuthSchemeProvider: STSHttpAuthSchemeProvider; +export interface StsAuthInputConfig {} +export interface StsAuthResolvedConfig { + stsClientCtor: new (clientConfig: any) => Client; +} +export declare const resolveStsAuthConfig: ( + input: T & StsAuthInputConfig +) => T & StsAuthResolvedConfig; +export interface HttpAuthSchemeInputConfig + extends StsAuthInputConfig, + AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: STSHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends StsAuthResolvedConfig, + AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: STSHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts new file mode 100644 index 0000000..9333fbb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleRequest, AssumeRoleResponse } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig, +} from "../STSClient"; +export { __MetadataBearer }; +export { $Command }; +export interface AssumeRoleCommandInput extends AssumeRoleRequest {} +export interface AssumeRoleCommandOutput + extends AssumeRoleResponse, + __MetadataBearer {} +declare const AssumeRoleCommand_base: { + new ( + input: AssumeRoleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleCommandInput, + AssumeRoleCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AssumeRoleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleCommandInput, + AssumeRoleCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AssumeRoleCommand extends AssumeRoleCommand_base { + protected static __types: { + api: { + input: AssumeRoleRequest; + output: AssumeRoleResponse; + }; + sdk: { + input: AssumeRoleCommandInput; + output: AssumeRoleCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts new file mode 100644 index 0000000..222e034 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + AssumeRoleWithWebIdentityRequest, + AssumeRoleWithWebIdentityResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig, +} from "../STSClient"; +export { __MetadataBearer }; +export { $Command }; +export interface AssumeRoleWithWebIdentityCommandInput + extends AssumeRoleWithWebIdentityRequest {} +export interface AssumeRoleWithWebIdentityCommandOutput + extends AssumeRoleWithWebIdentityResponse, + __MetadataBearer {} +declare const AssumeRoleWithWebIdentityCommand_base: { + new ( + input: AssumeRoleWithWebIdentityCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AssumeRoleWithWebIdentityCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AssumeRoleWithWebIdentityCommand extends AssumeRoleWithWebIdentityCommand_base { + protected static __types: { + api: { + input: AssumeRoleWithWebIdentityRequest; + output: AssumeRoleWithWebIdentityResponse; + }; + sdk: { + input: AssumeRoleWithWebIdentityCommandInput; + output: AssumeRoleWithWebIdentityCommandOutput; + }; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts new file mode 100644 index 0000000..b6f22cc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts @@ -0,0 +1,19 @@ +import { Pluggable } from "@smithy/types"; +import { + DefaultCredentialProvider, + RoleAssumer, + RoleAssumerWithWebIdentity, + STSRoleAssumerOptions, +} from "./defaultStsRoleAssumers"; +import { ServiceInputTypes, ServiceOutputTypes } from "./STSClient"; +export declare const getDefaultRoleAssumer: ( + stsOptions?: STSRoleAssumerOptions, + stsPlugins?: Pluggable[] +) => RoleAssumer; +export declare const getDefaultRoleAssumerWithWebIdentity: ( + stsOptions?: STSRoleAssumerOptions, + stsPlugins?: Pluggable[] +) => RoleAssumerWithWebIdentity; +export declare const decorateDefaultCredentialProvider: ( + provider: DefaultCredentialProvider +) => DefaultCredentialProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts new file mode 100644 index 0000000..3831379 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts @@ -0,0 +1,33 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentity, Logger, Provider } from "@smithy/types"; +import { AssumeRoleCommandInput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient, STSClientConfig } from "./STSClient"; +export type STSRoleAssumerOptions = Pick< + STSClientConfig, + "logger" | "region" | "requestHandler" +> & { + credentialProviderLogger?: Logger; + parentClientConfig?: CredentialProviderOptions["parentClientConfig"]; +}; +export type RoleAssumer = ( + sourceCreds: AwsCredentialIdentity, + params: AssumeRoleCommandInput +) => Promise; +export declare const getDefaultRoleAssumer: ( + stsOptions: STSRoleAssumerOptions, + STSClient: new (options: STSClientConfig) => STSClient +) => RoleAssumer; +export type RoleAssumerWithWebIdentity = ( + params: AssumeRoleWithWebIdentityCommandInput +) => Promise; +export declare const getDefaultRoleAssumerWithWebIdentity: ( + stsOptions: STSRoleAssumerOptions, + STSClient: new (options: STSClientConfig) => STSClient +) => RoleAssumerWithWebIdentity; +export type DefaultCredentialProvider = ( + input: any +) => Provider; +export declare const decorateDefaultCredentialProvider: ( + provider: DefaultCredentialProvider +) => DefaultCredentialProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..33567fd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts @@ -0,0 +1,57 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; + useGlobalEndpoint?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + UseGlobalEndpoint?: boolean; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts new file mode 100644 index 0000000..14b124b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface STSExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts new file mode 100644 index 0000000..157a306 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts @@ -0,0 +1,9 @@ +export * from "./STSClient"; +export * from "./STS"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { STSExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts new file mode 100644 index 0000000..95fc485 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class STSServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts new file mode 100644 index 0000000..1cba371 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts @@ -0,0 +1,123 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +export interface AssumedRoleUser { + AssumedRoleId: string | undefined; + Arn: string | undefined; +} +export interface PolicyDescriptorType { + arn?: string | undefined; +} +export interface ProvidedContext { + ProviderArn?: string | undefined; + ContextAssertion?: string | undefined; +} +export interface Tag { + Key: string | undefined; + Value: string | undefined; +} +export interface AssumeRoleRequest { + RoleArn: string | undefined; + RoleSessionName: string | undefined; + PolicyArns?: PolicyDescriptorType[] | undefined; + Policy?: string | undefined; + DurationSeconds?: number | undefined; + Tags?: Tag[] | undefined; + TransitiveTagKeys?: string[] | undefined; + ExternalId?: string | undefined; + SerialNumber?: string | undefined; + TokenCode?: string | undefined; + SourceIdentity?: string | undefined; + ProvidedContexts?: ProvidedContext[] | undefined; +} +export interface Credentials { + AccessKeyId: string | undefined; + SecretAccessKey: string | undefined; + SessionToken: string | undefined; + Expiration: Date | undefined; +} +export declare const CredentialsFilterSensitiveLog: (obj: Credentials) => any; +export interface AssumeRoleResponse { + Credentials?: Credentials | undefined; + AssumedRoleUser?: AssumedRoleUser | undefined; + PackedPolicySize?: number | undefined; + SourceIdentity?: string | undefined; +} +export declare const AssumeRoleResponseFilterSensitiveLog: ( + obj: AssumeRoleResponse +) => any; +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class MalformedPolicyDocumentException extends __BaseException { + readonly name: "MalformedPolicyDocumentException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + MalformedPolicyDocumentException, + __BaseException + > + ); +} +export declare class PackedPolicyTooLargeException extends __BaseException { + readonly name: "PackedPolicyTooLargeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class RegionDisabledException extends __BaseException { + readonly name: "RegionDisabledException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class IDPRejectedClaimException extends __BaseException { + readonly name: "IDPRejectedClaimException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidIdentityTokenException extends __BaseException { + readonly name: "InvalidIdentityTokenException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface AssumeRoleWithWebIdentityRequest { + RoleArn: string | undefined; + RoleSessionName: string | undefined; + WebIdentityToken: string | undefined; + ProviderId?: string | undefined; + PolicyArns?: PolicyDescriptorType[] | undefined; + Policy?: string | undefined; + DurationSeconds?: number | undefined; +} +export declare const AssumeRoleWithWebIdentityRequestFilterSensitiveLog: ( + obj: AssumeRoleWithWebIdentityRequest +) => any; +export interface AssumeRoleWithWebIdentityResponse { + Credentials?: Credentials | undefined; + SubjectFromWebIdentityToken?: string | undefined; + AssumedRoleUser?: AssumedRoleUser | undefined; + PackedPolicySize?: number | undefined; + Provider?: string | undefined; + Audience?: string | undefined; + SourceIdentity?: string | undefined; +} +export declare const AssumeRoleWithWebIdentityResponseFilterSensitiveLog: ( + obj: AssumeRoleWithWebIdentityResponse +) => any; +export declare class IDPCommunicationErrorException extends __BaseException { + readonly name: "IDPCommunicationErrorException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts new file mode 100644 index 0000000..1d03deb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts @@ -0,0 +1,29 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "../commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "../commands/AssumeRoleWithWebIdentityCommand"; +export declare const se_AssumeRoleCommand: ( + input: AssumeRoleCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_AssumeRoleWithWebIdentityCommand: ( + input: AssumeRoleWithWebIdentityCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_AssumeRoleCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_AssumeRoleWithWebIdentityCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..54a4e79 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts @@ -0,0 +1,131 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts new file mode 100644 index 0000000..50cd2c7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts @@ -0,0 +1,112 @@ +import { NoAuthSigner } from "@smithy/core"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + }[]; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + credentialDefaultProvider?: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts new file mode 100644 index 0000000..5eda45e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts @@ -0,0 +1,135 @@ +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..860b0c8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts @@ -0,0 +1,51 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts new file mode 100644 index 0000000..d3cd411 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { STSExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: STSExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/package.json new file mode 100644 index 0000000..26191ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/package.json @@ -0,0 +1,115 @@ +{ + "name": "@aws-sdk/nested-clients", + "version": "3.803.0", + "description": "Nested clients for AWS SDK packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline nested-clients", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "lint": "node ../../scripts/validation/submodules-linter.js --pkg nested-clients", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "engines": { + "node": ">=18.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "./sso-oidc.d.ts", + "./sso-oidc.js", + "./sts.d.ts", + "./sts.js", + "dist-*/**" + ], + "browser": { + "./dist-es/submodules/sso-oidc/runtimeConfig": "./dist-es/submodules/sso-oidc/runtimeConfig.browser", + "./dist-es/submodules/sts/runtimeConfig": "./dist-es/submodules/sts/runtimeConfig.browser" + }, + "react-native": {}, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/nested-clients", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/nested-clients" + }, + "exports": { + "./sso-oidc": { + "types": "./dist-types/submodules/sso-oidc/index.d.ts", + "module": "./dist-es/submodules/sso-oidc/index.js", + "node": "./dist-cjs/submodules/sso-oidc/index.js", + "import": "./dist-es/submodules/sso-oidc/index.js", + "require": "./dist-cjs/submodules/sso-oidc/index.js" + }, + "./sts": { + "types": "./dist-types/submodules/sts/index.d.ts", + "module": "./dist-es/submodules/sts/index.js", + "node": "./dist-cjs/submodules/sts/index.js", + "import": "./dist-es/submodules/sts/index.js", + "require": "./dist-cjs/submodules/sts/index.js" + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts new file mode 100644 index 0000000..ab47282 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/nested-clients/sso-oidc" { + export * from "@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.js new file mode 100644 index 0000000..896865c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sso-oidc.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/sso-oidc/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sts.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sts.d.ts new file mode 100644 index 0000000..03b8e68 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sts.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/nested-clients/sts" { + export * from "@aws-sdk/nested-clients/dist-types/submodules/sts/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sts.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sts.js new file mode 100644 index 0000000..8976f12 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/nested-clients/sts.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/sts/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/README.md new file mode 100644 index 0000000..389b765 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/README.md @@ -0,0 +1,12 @@ +# @aws-sdk/region-config-resolver + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/region-config-resolver/latest.svg)](https://www.npmjs.com/package/@aws-sdk/region-config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/region-config-resolver.svg)](https://www.npmjs.com/package/@aws-sdk/region-config-resolver) + +> An internal package + +This package provides utilities for AWS region config resolvers. + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js new file mode 100644 index 0000000..ddc184f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js @@ -0,0 +1,105 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getAwsRegionExtensionConfiguration: () => getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration: () => resolveAwsRegionExtensionConfiguration, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(index_exports); + +// src/extensions/index.ts +var getAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setRegion(region) { + runtimeConfig.region = region; + }, + region() { + return runtimeConfig.region; + } + }; +}, "getAwsRegionExtensionConfiguration"); +var resolveAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region() + }; +}, "resolveAwsRegionExtensionConfiguration"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => env[REGION_ENV_NAME], "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => profile[REGION_INI_NAME], "configFileSelector"), + default: /* @__PURE__ */ __name(() => { + throw new Error("Region is missing"); + }, "default") +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: /* @__PURE__ */ __name(async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, "region"), + useFipsEndpoint: /* @__PURE__ */ __name(async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, "useFipsEndpoint") + }); +}, "resolveRegionConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration, + REGION_ENV_NAME, + REGION_INI_NAME, + NODE_REGION_CONFIG_OPTIONS, + NODE_REGION_CONFIG_FILE_OPTIONS, + resolveRegionConfig +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js new file mode 100644 index 0000000..eb03314 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js @@ -0,0 +1,15 @@ +export const getAwsRegionExtensionConfiguration = (runtimeConfig) => { + return { + setRegion(region) { + runtimeConfig.region = region; + }, + region() { + return runtimeConfig.region; + }, + }; +}; +export const resolveAwsRegionExtensionConfiguration = (awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region(), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js new file mode 100644 index 0000000..7db9896 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js @@ -0,0 +1,12 @@ +export const REGION_ENV_NAME = "AWS_REGION"; +export const REGION_INI_NAME = "region"; +export const NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +export const NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js new file mode 100644 index 0000000..8d1246b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js @@ -0,0 +1,6 @@ +import { isFipsRegion } from "./isFipsRegion"; +export const getRealRegion = (region) => isFipsRegion(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..d758967 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +export const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..f88e00f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js @@ -0,0 +1,24 @@ +import { getRealRegion } from "./getRealRegion"; +import { isFipsRegion } from "./isFipsRegion"; +export const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..7756bad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts @@ -0,0 +1,16 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { Provider } from "@smithy/types"; +export type RegionExtensionRuntimeConfigType = Partial<{ + region: string | Provider; +}>; +/** + * @internal + */ +export declare const getAwsRegionExtensionConfiguration: (runtimeConfig: RegionExtensionRuntimeConfigType) => { + setRegion(region: Provider): void; + region(): Provider; +}; +/** + * @internal + */ +export declare const resolveAwsRegionExtensionConfiguration: (awsRegionExtensionConfiguration: AwsRegionExtensionConfiguration) => RegionExtensionRuntimeConfigType; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts new file mode 100644 index 0000000..d203bb0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..c70fb5b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts new file mode 100644 index 0000000..6dcf5e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..b42cee7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..84ed4d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,37 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..c1328e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,14 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { Provider } from "@smithy/types"; +export type RegionExtensionRuntimeConfigType = Partial<{ + region: string | Provider; +}>; +export declare const getAwsRegionExtensionConfiguration: ( + runtimeConfig: RegionExtensionRuntimeConfigType +) => { + setRegion(region: Provider): void; + region(): Provider; +}; +export declare const resolveAwsRegionExtensionConfiguration: ( + awsRegionExtensionConfiguration: AwsRegionExtensionConfiguration +) => RegionExtensionRuntimeConfigType; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts new file mode 100644 index 0000000..ceb3e02 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts @@ -0,0 +1,8 @@ +import { + LoadedConfigSelectors, + LocalConfigOptions, +} from "@smithy/node-config-provider"; +export declare const REGION_ENV_NAME = "AWS_REGION"; +export declare const REGION_INI_NAME = "region"; +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..f06119b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts @@ -0,0 +1 @@ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..13d34f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts @@ -0,0 +1 @@ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..86b8364 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,14 @@ +import { Provider } from "@smithy/types"; +export interface RegionInputConfig { + region?: string | Provider; + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved {} +export interface RegionResolvedConfig { + region: Provider; + useFipsEndpoint: Provider; +} +export declare const resolveRegionConfig: ( + input: T & RegionInputConfig & PreviouslyResolved +) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/package.json new file mode 100644 index 0000000..605f530 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/region-config-resolver/package.json @@ -0,0 +1,59 @@ +{ + "name": "@aws-sdk/region-config-resolver", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline region-config-resolver", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "jest": "28.1.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/region-config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/region-config-resolver" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/README.md new file mode 100644 index 0000000..9078019 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/README.md @@ -0,0 +1,53 @@ +# @aws-sdk/token-providers + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/token-providers/latest.svg)](https://www.npmjs.com/package/@aws-sdk/token-providers) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/token-providers.svg)](https://www.npmjs.com/package/@aws-sdk/token-providers) + +A collection of all token providers. The token providers should be used when the authorization +type is going to be token based. For example, the `bearer` authorization type set using +[httpBearerAuth trait][http-bearer-auth-trait] in Smithy. + +## Static Token Provider + +```ts +import { fromStatic } from "@aws-sdk/token-providers"; + +const token = { token: "TOKEN" }; +const staticTokenProvider = fromStatic(token); + +const staticToken = await staticTokenProvider(); // returns { token: "TOKEN" } +``` + +## SSO Token Provider + +```ts +import { fromSso } from "@aws-sdk/token-providers"; + +// returns token from SSO token cache or ssoOidc.createToken() call. +const ssoToken = await fromSso(); +``` + +## Token Provider Chain + +```ts +import { nodeProvider } from "@aws-sdk/token-providers"; + +// returns token from default providers. +const token = await nodeProvider(); +``` + +[http-bearer-auth-trait]: https://smithy.io/2.0/spec/authentication-traits.html#smithy-api-httpbearerauth-trait + +--- + +### Development + +This package contains a minimal copy of the SSO OIDC client, instead of relying on the full client, which +would cause a circular dependency. + +When regenerating the bundled version of the SSO OIDC client, run the esbuild.js script and then make the following changes: + +- Remove any dependency of the generated client on the credential chain such that it would create + a circular dependency back to this package. Because we only need the `CreateTokenCommand`, the client, and this command's + associated `Exception`s, it is possible to remove auth dependencies. +- Ensure all required packages are declared in the `package.json` of token-providers. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-cjs/index.js new file mode 100644 index 0000000..51a38df --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-cjs/index.js @@ -0,0 +1,217 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromSso: () => fromSso, + fromStatic: () => fromStatic, + nodeProvider: () => nodeProvider +}); +module.exports = __toCommonJS(index_exports); + +// src/fromSso.ts + + + +// src/constants.ts +var EXPIRE_WINDOW_MS = 5 * 60 * 1e3; +var REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; + +// src/getSsoOidcClient.ts +var getSsoOidcClient = /* @__PURE__ */ __name(async (ssoRegion, init = {}) => { + const { SSOOIDCClient } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sso-oidc"))); + const ssoOidcClient = new SSOOIDCClient( + Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? init.clientConfig?.region, + logger: init.clientConfig?.logger ?? init.parentClientConfig?.logger + }) + ); + return ssoOidcClient; +}, "getSsoOidcClient"); + +// src/getNewSsoOidcToken.ts +var getNewSsoOidcToken = /* @__PURE__ */ __name(async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sso-oidc"))); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send( + new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token" + }) + ); +}, "getNewSsoOidcToken"); + +// src/validateTokenExpiry.ts +var import_property_provider = require("@smithy/property-provider"); +var validateTokenExpiry = /* @__PURE__ */ __name((token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new import_property_provider.TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}, "validateTokenExpiry"); + +// src/validateTokenKey.ts + +var validateTokenKey = /* @__PURE__ */ __name((key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new import_property_provider.TokenProviderError( + `Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, + false + ); + } +}, "validateTokenKey"); + +// src/writeSSOTokenToFile.ts +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var import_fs = require("fs"); +var { writeFile } = import_fs.promises; +var writeSSOTokenToFile = /* @__PURE__ */ __name((id, ssoToken) => { + const tokenFilepath = (0, import_shared_ini_file_loader.getSSOTokenFilepath)(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}, "writeSSOTokenToFile"); + +// src/fromSso.ts +var lastRefreshAttemptTime = /* @__PURE__ */ new Date(0); +var fromSso = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + init.logger?.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } else if (!profile["sso_session"]) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' could not be found in shared credentials file.`, + false + ); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, + false + ); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoSessionName); + } catch (e) { + throw new import_property_provider.TokenProviderError( + `The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, + false + ); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1e3) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1e3); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken + }); + } catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration + }; + } catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}, "fromSso"); + +// src/fromStatic.ts + +var fromStatic = /* @__PURE__ */ __name(({ token, logger }) => async () => { + logger?.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new import_property_provider.TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}, "fromStatic"); + +// src/nodeProvider.ts + +var nodeProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)(fromSso(init), async () => { + throw new import_property_provider.TokenProviderError("Could not load token from any providers", false); + }), + (token) => token.expiration !== void 0 && token.expiration.getTime() - Date.now() < 3e5, + (token) => token.expiration !== void 0 +), "nodeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromSso, + fromStatic, + nodeProvider +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/constants.js new file mode 100644 index 0000000..b84a126 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/constants.js @@ -0,0 +1,2 @@ +export const EXPIRE_WINDOW_MS = 5 * 60 * 1000; +export const REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js new file mode 100644 index 0000000..61d2075 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js @@ -0,0 +1,88 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { getProfileName, getSSOTokenFromFile, loadSsoSessionData, parseKnownFiles, } from "@smithy/shared-ini-file-loader"; +import { EXPIRE_WINDOW_MS, REFRESH_MESSAGE } from "./constants"; +import { getNewSsoOidcToken } from "./getNewSsoOidcToken"; +import { validateTokenExpiry } from "./validateTokenExpiry"; +import { validateTokenKey } from "./validateTokenKey"; +import { writeSSOTokenToFile } from "./writeSSOTokenToFile"; +const lastRefreshAttemptTime = new Date(0); +export const fromSso = (_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig, + }, + }; + init.logger?.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await parseKnownFiles(init); + const profileName = getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }); + const profile = profiles[profileName]; + if (!profile) { + throw new TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } + else if (!profile["sso_session"]) { + throw new TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await loadSsoSessionData(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new TokenProviderError(`Sso session '${ssoSessionName}' could not be found in shared credentials file.`, false); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new TokenProviderError(`Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, false); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await getSSOTokenFromFile(ssoSessionName); + } + catch (e) { + throw new TokenProviderError(`The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, false); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1000) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1000); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken, + }); + } + catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration, + }; + } + catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js new file mode 100644 index 0000000..0704ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js @@ -0,0 +1,8 @@ +import { TokenProviderError } from "@smithy/property-provider"; +export const fromStatic = ({ token, logger }) => async () => { + logger?.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js new file mode 100644 index 0000000..00f7b2c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js @@ -0,0 +1,11 @@ +import { getSsoOidcClient } from "./getSsoOidcClient"; +export const getNewSsoOidcToken = async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await import("@aws-sdk/nested-clients/sso-oidc"); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send(new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token", + })); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js new file mode 100644 index 0000000..689be72 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js @@ -0,0 +1,8 @@ +export const getSsoOidcClient = async (ssoRegion, init = {}) => { + const { SSOOIDCClient } = await import("@aws-sdk/nested-clients/sso-oidc"); + const ssoOidcClient = new SSOOIDCClient(Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? init.clientConfig?.region, + logger: init.clientConfig?.logger ?? init.parentClientConfig?.logger, + })); + return ssoOidcClient; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/index.js new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js new file mode 100644 index 0000000..a0c7b52 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js @@ -0,0 +1,5 @@ +import { chain, memoize, TokenProviderError } from "@smithy/property-provider"; +import { fromSso } from "./fromSso"; +export const nodeProvider = (init = {}) => memoize(chain(fromSso(init), async () => { + throw new TokenProviderError("Could not load token from any providers", false); +}), (token) => token.expiration !== undefined && token.expiration.getTime() - Date.now() < 300000, (token) => token.expiration !== undefined); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js new file mode 100644 index 0000000..8118d7c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js @@ -0,0 +1,7 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { REFRESH_MESSAGE } from "./constants"; +export const validateTokenExpiry = (token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js new file mode 100644 index 0000000..4979638 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js @@ -0,0 +1,7 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { REFRESH_MESSAGE } from "./constants"; +export const validateTokenKey = (key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new TokenProviderError(`Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, false); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js new file mode 100644 index 0000000..6da2c9b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js @@ -0,0 +1,8 @@ +import { getSSOTokenFilepath } from "@smithy/shared-ini-file-loader"; +import { promises as fsPromises } from "fs"; +const { writeFile } = fsPromises; +export const writeSSOTokenToFile = (id, ssoToken) => { + const tokenFilepath = getSSOTokenFilepath(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts new file mode 100644 index 0000000..de28cde --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts @@ -0,0 +1,8 @@ +/** + * The time window (5 mins) that SDK will treat the SSO token expires in before the defined expiration date in token. + * This is needed because server side may have invalidated the token before the defined expiration date. + * + * @internal + */ +export declare const EXPIRE_WINDOW_MS: number; +export declare const REFRESH_MESSAGE = "To refresh this SSO session run 'aws sso login' with the corresponding profile."; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts new file mode 100644 index 0000000..03f5359 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts @@ -0,0 +1,12 @@ +import { CredentialProviderOptions, RuntimeConfigIdentityProvider, TokenIdentity } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromSsoInit extends SourceProfileInit, CredentialProviderOptions { + /** + * @see SSOOIDCClientConfig in \@aws-sdk/client-sso-oidc. + */ + clientConfig?: any; +} +/** + * Creates a token provider that will read from SSO token cache or ssoOidc.createToken() call. + */ +export declare const fromSso: (_init?: FromSsoInit) => RuntimeConfigIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..d496172 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { CredentialProviderOptions, TokenIdentity, TokenIdentityProvider } from "@aws-sdk/types"; +export interface FromStaticInit extends CredentialProviderOptions { + token?: TokenIdentity; +} +/** + * Creates a token provider that will read from static token. + * @public + */ +export declare const fromStatic: ({ token, logger }: FromStaticInit) => TokenIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts new file mode 100644 index 0000000..75c6322 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts @@ -0,0 +1,8 @@ +/// +import { SSOToken } from "@smithy/shared-ini-file-loader"; +import { FromSsoInit } from "./fromSso"; +/** + * Returns a new SSO OIDC token from ssoOids.createToken() API call. + * @internal + */ +export declare const getNewSsoOidcToken: (ssoToken: SSOToken, ssoRegion: string, init?: FromSsoInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts new file mode 100644 index 0000000..5c9dcb4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts @@ -0,0 +1,7 @@ +/// +import { FromSsoInit } from "./fromSso"; +/** + * Returns a SSOOIDC client for the given region. + * @internal + */ +export declare const getSsoOidcClient: (ssoRegion: string, init?: FromSsoInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts new file mode 100644 index 0000000..e4846ec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts @@ -0,0 +1,18 @@ +import { TokenIdentityProvider } from "@aws-sdk/types"; +import { FromSsoInit } from "./fromSso"; +/** + * Creates a token provider that will attempt to find token from the + * following sources (listed in order of precedence): + * * SSO token from SSO cache or ssoOidc.createToken() call + * + * The default token provider is designed to invoke one provider at a time and only + * continue to the next if no token has been located. It currently has only SSO + * Token Provider in the chain. + * + * @param init Configuration that is passed to each individual + * provider + * + * @see fromSso The function used to source credentials from + * SSO cache or ssoOidc.createToken() call + */ +export declare const nodeProvider: (init?: FromSsoInit) => TokenIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..d7e7577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,3 @@ +export declare const EXPIRE_WINDOW_MS: number; +export declare const REFRESH_MESSAGE = + "To refresh this SSO session run 'aws sso login' with the corresponding profile."; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts new file mode 100644 index 0000000..3b5bb60 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts @@ -0,0 +1,14 @@ +import { + CredentialProviderOptions, + RuntimeConfigIdentityProvider, + TokenIdentity, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromSsoInit + extends SourceProfileInit, + CredentialProviderOptions { + clientConfig?: any; +} +export declare const fromSso: ( + _init?: FromSsoInit +) => RuntimeConfigIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..e680012 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,12 @@ +import { + CredentialProviderOptions, + TokenIdentity, + TokenIdentityProvider, +} from "@aws-sdk/types"; +export interface FromStaticInit extends CredentialProviderOptions { + token?: TokenIdentity; +} +export declare const fromStatic: ({ + token, + logger, +}: FromStaticInit) => TokenIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts new file mode 100644 index 0000000..6bcd71d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts @@ -0,0 +1,9 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +import { FromSsoInit } from "./fromSso"; +export declare const getNewSsoOidcToken: ( + ssoToken: SSOToken, + ssoRegion: string, + init?: FromSsoInit +) => Promise< + import("@aws-sdk/nested-clients/sso-oidc").CreateTokenCommandOutput +>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts new file mode 100644 index 0000000..c07dc69 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts @@ -0,0 +1,5 @@ +import { FromSsoInit } from "./fromSso"; +export declare const getSsoOidcClient: ( + ssoRegion: string, + init?: FromSsoInit +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts new file mode 100644 index 0000000..11a9bd4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts @@ -0,0 +1,5 @@ +import { TokenIdentityProvider } from "@aws-sdk/types"; +import { FromSsoInit } from "./fromSso"; +export declare const nodeProvider: ( + init?: FromSsoInit +) => TokenIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts new file mode 100644 index 0000000..9003605 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts @@ -0,0 +1,2 @@ +import { TokenIdentity } from "@aws-sdk/types"; +export declare const validateTokenExpiry: (token: TokenIdentity) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts new file mode 100644 index 0000000..105b2b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts @@ -0,0 +1,5 @@ +export declare const validateTokenKey: ( + key: string, + value: unknown, + forRefresh?: boolean +) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts new file mode 100644 index 0000000..a6d025f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts @@ -0,0 +1,5 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +export declare const writeSSOTokenToFile: ( + id: string, + ssoToken: SSOToken +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts new file mode 100644 index 0000000..1253784 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts @@ -0,0 +1,5 @@ +import { TokenIdentity } from "@aws-sdk/types"; +/** + * Throws TokenProviderError is token is expired. + */ +export declare const validateTokenExpiry: (token: TokenIdentity) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts new file mode 100644 index 0000000..a9618fd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts @@ -0,0 +1,4 @@ +/** + * Throws TokenProviderError if value is undefined for key. + */ +export declare const validateTokenKey: (key: string, value: unknown, forRefresh?: boolean) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts new file mode 100644 index 0000000..a1e17e8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts @@ -0,0 +1,5 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +/** + * Writes SSO token to file based on filepath computed from ssoStartUrl or session name. + */ +export declare const writeSSOTokenToFile: (id: string, ssoToken: SSOToken) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/package.json new file mode 100644 index 0000000..d035426 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/token-providers/package.json @@ -0,0 +1,67 @@ +{ + "name": "@aws-sdk/token-providers", + "version": "3.803.0", + "description": "A collection of token providers", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "sideEffects": false, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline token-providers", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "token" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": {}, + "react-native": {}, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/token-providers", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/token-providers" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/README.md new file mode 100644 index 0000000..a5658db --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/types + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/types/latest.svg)](https://www.npmjs.com/package/@aws-sdk/types) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/types.svg)](https://www.npmjs.com/package/@aws-sdk/types) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-cjs/index.js new file mode 100644 index 0000000..8114db0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-cjs/index.js @@ -0,0 +1,294 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + AbortController: () => import_types.AbortController, + AbortHandler: () => import_types.AbortHandler, + AbortSignal: () => import_types.AbortSignal, + AbsoluteLocation: () => import_types.AbsoluteLocation, + AuthScheme: () => import_types.AuthScheme, + AvailableMessage: () => import_types.AvailableMessage, + AvailableMessages: () => import_types.AvailableMessages, + AwsCredentialIdentity: () => import_types.AwsCredentialIdentity, + AwsCredentialIdentityProvider: () => import_types.AwsCredentialIdentityProvider, + BinaryHeaderValue: () => import_types.BinaryHeaderValue, + BlobTypes: () => import_types.BlobTypes, + BodyLengthCalculator: () => import_types.BodyLengthCalculator, + BooleanHeaderValue: () => import_types.BooleanHeaderValue, + BuildHandler: () => import_types.BuildHandler, + BuildHandlerArguments: () => import_types.BuildHandlerArguments, + BuildHandlerOptions: () => import_types.BuildHandlerOptions, + BuildHandlerOutput: () => import_types.BuildHandlerOutput, + BuildMiddleware: () => import_types.BuildMiddleware, + ByteHeaderValue: () => import_types.ByteHeaderValue, + Checksum: () => import_types.Checksum, + ChecksumConstructor: () => import_types.ChecksumConstructor, + Client: () => import_types.Client, + Command: () => import_types.Command, + ConnectConfiguration: () => import_types.ConnectConfiguration, + ConnectionManager: () => import_types.ConnectionManager, + ConnectionManagerConfiguration: () => import_types.ConnectionManagerConfiguration, + ConnectionPool: () => import_types.ConnectionPool, + DateInput: () => import_types.DateInput, + Decoder: () => import_types.Decoder, + DeserializeHandler: () => import_types.DeserializeHandler, + DeserializeHandlerArguments: () => import_types.DeserializeHandlerArguments, + DeserializeHandlerOptions: () => import_types.DeserializeHandlerOptions, + DeserializeHandlerOutput: () => import_types.DeserializeHandlerOutput, + DeserializeMiddleware: () => import_types.DeserializeMiddleware, + DocumentType: () => import_types.DocumentType, + Encoder: () => import_types.Encoder, + Endpoint: () => import_types.Endpoint, + EndpointARN: () => import_types.EndpointARN, + EndpointBearer: () => import_types.EndpointBearer, + EndpointObjectProperty: () => import_types.EndpointObjectProperty, + EndpointParameters: () => import_types.EndpointParameters, + EndpointPartition: () => import_types.EndpointPartition, + EndpointURL: () => import_types.EndpointURL, + EndpointURLScheme: () => import_types.EndpointURLScheme, + EndpointV2: () => import_types.EndpointV2, + EventSigner: () => import_types.EventSigner, + EventSigningArguments: () => import_types.EventSigningArguments, + EventStreamMarshaller: () => import_types.EventStreamMarshaller, + EventStreamMarshallerDeserFn: () => import_types.EventStreamMarshallerDeserFn, + EventStreamMarshallerSerFn: () => import_types.EventStreamMarshallerSerFn, + EventStreamPayloadHandler: () => import_types.EventStreamPayloadHandler, + EventStreamPayloadHandlerProvider: () => import_types.EventStreamPayloadHandlerProvider, + EventStreamRequestSigner: () => import_types.EventStreamRequestSigner, + EventStreamSerdeContext: () => import_types.EventStreamSerdeContext, + EventStreamSerdeProvider: () => import_types.EventStreamSerdeProvider, + EventStreamSignerProvider: () => import_types.EventStreamSignerProvider, + ExponentialBackoffJitterType: () => import_types.ExponentialBackoffJitterType, + ExponentialBackoffStrategyOptions: () => import_types.ExponentialBackoffStrategyOptions, + FinalizeHandler: () => import_types.FinalizeHandler, + FinalizeHandlerArguments: () => import_types.FinalizeHandlerArguments, + FinalizeHandlerOutput: () => import_types.FinalizeHandlerOutput, + FinalizeRequestHandlerOptions: () => import_types.FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware: () => import_types.FinalizeRequestMiddleware, + FormattedEvent: () => import_types.FormattedEvent, + GetAwsChunkedEncodingStream: () => import_types.GetAwsChunkedEncodingStream, + GetAwsChunkedEncodingStreamOptions: () => import_types.GetAwsChunkedEncodingStreamOptions, + Handler: () => import_types.Handler, + HandlerExecutionContext: () => import_types.HandlerExecutionContext, + HandlerOptions: () => import_types.HandlerOptions, + Hash: () => import_types.Hash, + HashConstructor: () => import_types.HashConstructor, + HeaderBag: () => import_types.HeaderBag, + HostAddressType: () => HostAddressType, + HttpAuthDefinition: () => import_types.HttpAuthDefinition, + HttpAuthLocation: () => import_types.HttpAuthLocation, + HttpHandlerOptions: () => import_types.HttpHandlerOptions, + HttpMessage: () => import_types.HttpMessage, + HttpRequest: () => import_types.HttpRequest, + HttpResponse: () => import_types.HttpResponse, + Identity: () => import_types.Identity, + IniSection: () => import_types.IniSection, + InitializeHandler: () => import_types.InitializeHandler, + InitializeHandlerArguments: () => import_types.InitializeHandlerArguments, + InitializeHandlerOptions: () => import_types.InitializeHandlerOptions, + InitializeHandlerOutput: () => import_types.InitializeHandlerOutput, + InitializeMiddleware: () => import_types.InitializeMiddleware, + Int64: () => import_types.Int64, + IntegerHeaderValue: () => import_types.IntegerHeaderValue, + LongHeaderValue: () => import_types.LongHeaderValue, + MemoizedProvider: () => import_types.MemoizedProvider, + Message: () => import_types.Message, + MessageDecoder: () => import_types.MessageDecoder, + MessageEncoder: () => import_types.MessageEncoder, + MessageHeaderValue: () => import_types.MessageHeaderValue, + MessageHeaders: () => import_types.MessageHeaders, + MessageSigner: () => import_types.MessageSigner, + MetadataBearer: () => import_types.MetadataBearer, + MiddlewareStack: () => import_types.MiddlewareStack, + MiddlewareType: () => import_types.MiddlewareType, + PaginationConfiguration: () => import_types.PaginationConfiguration, + Paginator: () => import_types.Paginator, + ParsedIniData: () => import_types.ParsedIniData, + Pluggable: () => import_types.Pluggable, + Priority: () => import_types.Priority, + Profile: () => import_types.Profile, + Provider: () => import_types.Provider, + QueryParameterBag: () => import_types.QueryParameterBag, + RegionInfo: () => import_types.RegionInfo, + RegionInfoProvider: () => import_types.RegionInfoProvider, + RegionInfoProviderOptions: () => import_types.RegionInfoProviderOptions, + Relation: () => import_types.Relation, + RelativeLocation: () => import_types.RelativeLocation, + RelativeMiddlewareOptions: () => import_types.RelativeMiddlewareOptions, + RequestContext: () => import_types.RequestContext, + RequestHandler: () => import_types.RequestHandler, + RequestHandlerMetadata: () => import_types.RequestHandlerMetadata, + RequestHandlerOutput: () => import_types.RequestHandlerOutput, + RequestHandlerProtocol: () => import_types.RequestHandlerProtocol, + RequestPresigner: () => import_types.RequestPresigner, + RequestPresigningArguments: () => import_types.RequestPresigningArguments, + RequestSerializer: () => import_types.RequestSerializer, + RequestSigner: () => import_types.RequestSigner, + RequestSigningArguments: () => import_types.RequestSigningArguments, + ResponseDeserializer: () => import_types.ResponseDeserializer, + ResponseMetadata: () => import_types.ResponseMetadata, + RetryBackoffStrategy: () => import_types.RetryBackoffStrategy, + RetryErrorInfo: () => import_types.RetryErrorInfo, + RetryErrorType: () => import_types.RetryErrorType, + RetryStrategy: () => import_types.RetryStrategy, + RetryStrategyOptions: () => import_types.RetryStrategyOptions, + RetryStrategyV2: () => import_types.RetryStrategyV2, + RetryToken: () => import_types.RetryToken, + RetryableTrait: () => import_types.RetryableTrait, + SdkError: () => import_types.SdkError, + SdkStream: () => import_types.SdkStream, + SdkStreamMixin: () => import_types.SdkStreamMixin, + SdkStreamMixinInjector: () => import_types.SdkStreamMixinInjector, + SdkStreamSerdeContext: () => import_types.SdkStreamSerdeContext, + SerdeContext: () => import_types.SerdeContext, + SerializeHandler: () => import_types.SerializeHandler, + SerializeHandlerArguments: () => import_types.SerializeHandlerArguments, + SerializeHandlerOptions: () => import_types.SerializeHandlerOptions, + SerializeHandlerOutput: () => import_types.SerializeHandlerOutput, + SerializeMiddleware: () => import_types.SerializeMiddleware, + SharedConfigFiles: () => import_types.SharedConfigFiles, + ShortHeaderValue: () => import_types.ShortHeaderValue, + SignableMessage: () => import_types.SignableMessage, + SignedMessage: () => import_types.SignedMessage, + SigningArguments: () => import_types.SigningArguments, + SmithyException: () => import_types.SmithyException, + SourceData: () => import_types.SourceData, + StandardRetryBackoffStrategy: () => import_types.StandardRetryBackoffStrategy, + StandardRetryToken: () => import_types.StandardRetryToken, + Step: () => import_types.Step, + StreamCollector: () => import_types.StreamCollector, + StreamHasher: () => import_types.StreamHasher, + StringHeaderValue: () => import_types.StringHeaderValue, + StringSigner: () => import_types.StringSigner, + Terminalware: () => import_types.Terminalware, + TimestampHeaderValue: () => import_types.TimestampHeaderValue, + TokenIdentity: () => import_types.TokenIdentity, + TokenIdentityProvider: () => import_types.TokenIdentityProvider, + URI: () => import_types.URI, + UrlParser: () => import_types.UrlParser, + UserAgent: () => import_types.UserAgent, + UserAgentPair: () => import_types.UserAgentPair, + UuidHeaderValue: () => import_types.UuidHeaderValue, + WaiterConfiguration: () => import_types.WaiterConfiguration, + WithSdkStreamMixin: () => import_types.WithSdkStreamMixin, + randomValues: () => import_types.randomValues +}); +module.exports = __toCommonJS(index_exports); + +// src/abort.ts +var import_types = require("@smithy/types"); + +// src/auth.ts + + +// src/blob/blob-types.ts + + +// src/checksum.ts + + +// src/client.ts + + +// src/command.ts + + +// src/connection.ts + + +// src/crypto.ts + + +// src/dns.ts +var HostAddressType = /* @__PURE__ */ ((HostAddressType2) => { + HostAddressType2["AAAA"] = "AAAA"; + HostAddressType2["A"] = "A"; + return HostAddressType2; +})(HostAddressType || {}); + +// src/encode.ts + + +// src/endpoint.ts + + +// src/eventStream.ts + + +// src/http.ts + + +// src/identity/AwsCredentialIdentity.ts + + +// src/identity/Identity.ts + + +// src/identity/TokenIdentity.ts + + +// src/middleware.ts + + +// src/pagination.ts + + +// src/profile.ts + + +// src/response.ts + + +// src/retry.ts + + +// src/serde.ts + + +// src/shapes.ts + + +// src/signature.ts + + +// src/stream.ts + + +// src/transfer.ts + + +// src/uri.ts + + +// src/util.ts + + +// src/waiter.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + HttpAuthLocation, + HostAddressType, + EndpointURLScheme, + RequestHandlerProtocol +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/abort.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/abort.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/abort.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/auth.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/auth.js new file mode 100644 index 0000000..81f903b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/auth.js @@ -0,0 +1 @@ +export { HttpAuthLocation } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/checksum.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/checksum.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/checksum.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/client.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/client.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/client.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/command.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/command.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/command.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/connection.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/connection.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/connection.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/credentials.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/credentials.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/credentials.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/crypto.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/crypto.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/crypto.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/dns.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/dns.js new file mode 100644 index 0000000..c6a2cd9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/dns.js @@ -0,0 +1,5 @@ +export var HostAddressType; +(function (HostAddressType) { + HostAddressType["AAAA"] = "AAAA"; + HostAddressType["A"] = "A"; +})(HostAddressType || (HostAddressType = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/encode.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/encode.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/encode.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/endpoint.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/endpoint.js new file mode 100644 index 0000000..ec53acc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/endpoint.js @@ -0,0 +1 @@ +export { EndpointURLScheme, } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/eventStream.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/eventStream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/eventStream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/extensions/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/extensions/index.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/extensions/index.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/feature-ids.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/feature-ids.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/feature-ids.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/function.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/function.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/function.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/http.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/http.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/http.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/Identity.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/Identity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/Identity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/index.js new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/identity/index.js @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/index.js new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/index.js @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/logger.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/logger.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/logger.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/middleware.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/middleware.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/middleware.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/pagination.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/pagination.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/pagination.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/profile.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/profile.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/profile.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/request.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/request.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/request.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/response.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/response.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/response.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/retry.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/serde.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/serde.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/serde.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/shapes.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/shapes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/shapes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/signature.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/signature.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/signature.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/stream.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/stream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/stream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/token.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/token.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/token.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/transfer.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/transfer.js new file mode 100644 index 0000000..ba57589 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/transfer.js @@ -0,0 +1 @@ +export { RequestHandlerProtocol, } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/uri.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/uri.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/uri.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/util.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/util.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/util.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/waiter.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/waiter.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-es/waiter.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/abort.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/abort.d.ts new file mode 100644 index 0000000..dad6079 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/abort.d.ts @@ -0,0 +1 @@ +export { AbortController, AbortHandler, AbortSignal } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/auth.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/auth.d.ts new file mode 100644 index 0000000..6626c16 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/auth.d.ts @@ -0,0 +1 @@ +export { AuthScheme, HttpAuthDefinition, HttpAuthLocation } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts new file mode 100644 index 0000000..fedb3d5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts @@ -0,0 +1,2 @@ +import { BlobTypes } from '@smithy/types'; +export { BlobTypes }; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/checksum.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/checksum.d.ts new file mode 100644 index 0000000..f805d72 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/checksum.d.ts @@ -0,0 +1 @@ +export { Checksum, ChecksumConstructor } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/client.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/client.d.ts new file mode 100644 index 0000000..d6b3dcf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/client.d.ts @@ -0,0 +1 @@ +export { Client } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/command.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/command.d.ts new file mode 100644 index 0000000..3887267 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/command.d.ts @@ -0,0 +1 @@ +export { Command } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/connection.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/connection.d.ts new file mode 100644 index 0000000..efcb4d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/connection.d.ts @@ -0,0 +1 @@ +export { ConnectConfiguration, ConnectionManager, ConnectionManagerConfiguration, ConnectionPool } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/credentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/credentials.d.ts new file mode 100644 index 0000000..181bf8b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/credentials.d.ts @@ -0,0 +1,50 @@ +import { Logger } from "@smithy/types"; +import { AwsCredentialIdentity } from "./identity"; +import { Provider } from "./util"; +/** + * @public + * + * An object representing temporary or permanent AWS credentials. + * + * @deprecated Use {@link AwsCredentialIdentity} + */ +export interface Credentials extends AwsCredentialIdentity { +} +/** + * @public + * + * @deprecated Use {@link AwsCredentialIdentityProvider} + */ +export type CredentialProvider = Provider; +/** + * @public + * + * Common options for credential providers. + */ +export type CredentialProviderOptions = { + /** + * This logger is only used to provide information + * on what credential providers were used during resolution. + * + * It does not log credentials. + */ + logger?: Logger; + /** + * Present if the credential provider was created by calling + * the defaultCredentialProvider in a client's middleware, having + * access to the client's config. + * + * The region of that parent or outer client is important because + * an inner client used by the credential provider may need + * to match its default partition or region with that of + * the outer client. + * + * @internal + * @deprecated - not truly deprecated, marked as a warning to not use this. + */ + parentClientConfig?: { + region?: string | Provider; + profile?: string; + [key: string]: unknown; + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/crypto.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/crypto.d.ts new file mode 100644 index 0000000..aeeea50 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/crypto.d.ts @@ -0,0 +1 @@ +export { Hash, HashConstructor, StreamHasher, randomValues, SourceData } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/dns.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/dns.d.ts new file mode 100644 index 0000000..8348cc4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/dns.d.ts @@ -0,0 +1,85 @@ +/** + * @public + * + * DNS record types + */ +export declare enum HostAddressType { + /** + * IPv6 + */ + AAAA = "AAAA", + /** + * IPv4 + */ + A = "A" +} +/** + * @public + */ +export interface HostAddress { + /** + * The {@link HostAddressType} of the host address. + */ + addressType: HostAddressType; + /** + * The resolved numerical address represented as a + * string. + */ + address: string; + /** + * The host name the {@link address} was resolved from. + */ + hostName: string; + /** + * The service record of {@link hostName}. + */ + service?: string; +} +/** + * @public + */ +export interface HostResolverArguments { + /** + * The host name to resolve. + */ + hostName: string; + /** + * The service record of {@link hostName}. + */ + service?: string; +} +/** + * @public + * + * Host Resolver interface for DNS queries + */ +export interface HostResolver { + /** + * Resolves the address(es) for {@link HostResolverArguments} and returns a + * list of addresses with (most likely) two addresses, one {@link HostAddressType.AAAA} + * and one {@link HostAddressType.A}. Calls to this function will likely alter + * the cache (if implemented) so that if there's multiple addresses, a different + * set will be returned on the next call. + * In the case of multi-answer, still only a maximum of two records should be + * returned. The resolver implementation is responsible for caching and rotation + * of the multiple addresses that get returned. + * Implementations don't have to explictly call getaddrinfo(), they can use + * high level abstractions provided in their language runtimes/libraries. + * @param args - arguments with host name query addresses for + * @returns promise with a list of {@link HostAddress} + */ + resolveAddress(args: HostResolverArguments): Promise; + /** + * Reports a failure on a {@link HostAddress} so that the cache (if implemented) + * can accomodate the failure and likely not return the address until it recovers. + * @param addr - host address to report a failure on + */ + reportFailureOnAddress(addr: HostAddress): void; + /** + * Empties the cache (if implemented) for a {@link HostResolverArguments.hostName}. + * If {@link HostResolverArguments.hostName} is not provided, the cache (if + * implemented) is emptied for all host names. + * @param args - optional arguments to empty the cache for + */ + purgeCache(args?: HostResolverArguments): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/encode.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/encode.d.ts new file mode 100644 index 0000000..128ee57 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/encode.d.ts @@ -0,0 +1 @@ +export { MessageDecoder, MessageEncoder, AvailableMessage, AvailableMessages } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts new file mode 100644 index 0000000..f2ffaf5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts @@ -0,0 +1 @@ +export { EndpointARN, EndpointPartition, EndpointURLScheme, EndpointURL, EndpointObjectProperty, EndpointV2, EndpointParameters, } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts new file mode 100644 index 0000000..cee02f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts @@ -0,0 +1 @@ +export { Message, MessageHeaders, BooleanHeaderValue, ByteHeaderValue, ShortHeaderValue, IntegerHeaderValue, LongHeaderValue, BinaryHeaderValue, StringHeaderValue, TimestampHeaderValue, UuidHeaderValue, MessageHeaderValue, Int64, EventStreamSerdeContext, EventStreamMarshaller, EventStreamMarshallerDeserFn, EventStreamMarshallerSerFn, EventStreamPayloadHandler, EventStreamPayloadHandlerProvider, EventStreamRequestSigner, EventStreamSerdeProvider, EventStreamSignerProvider, } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..5a45bcb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts @@ -0,0 +1,8 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface AwsRegionExtensionConfiguration { + setRegion(region: Provider): void; + region(): Provider; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts new file mode 100644 index 0000000..f1679fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts @@ -0,0 +1,59 @@ +/** + * @internal + */ +export type AwsSdkFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + S3_EXPRESS_BUCKET: "J"; + S3_ACCESS_GRANTS: "K"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + ACCOUNT_ID_ENDPOINT: "O"; + ACCOUNT_ID_MODE_PREFERRED: "P"; + ACCOUNT_ID_MODE_DISABLED: "Q"; + ACCOUNT_ID_MODE_REQUIRED: "R"; + SIGV4A_SIGNING: "S"; + FLEXIBLE_CHECKSUMS_REQ_CRC32: "U"; + FLEXIBLE_CHECKSUMS_REQ_CRC32C: "V"; + FLEXIBLE_CHECKSUMS_REQ_CRC64: "W"; + FLEXIBLE_CHECKSUMS_REQ_SHA1: "X"; + FLEXIBLE_CHECKSUMS_REQ_SHA256: "Y"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED: "Z"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED: "a"; + FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED: "b"; + FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED: "c"; + DDB_MAPPER: "d"; +}> & AwsSdkCredentialsFeatures; +/** + * @internal + */ +export type AwsSdkCredentialsFeatures = Partial<{ + RESOLVED_ACCOUNT_ID: "T"; + CREDENTIALS_CODE: "e"; + CREDENTIALS_ENV_VARS: "g"; + CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN: "h"; + CREDENTIALS_STS_ASSUME_ROLE: "i"; + CREDENTIALS_STS_ASSUME_ROLE_SAML: "j"; + CREDENTIALS_STS_ASSUME_ROLE_WEB_ID: "k"; + CREDENTIALS_STS_FEDERATION_TOKEN: "l"; + CREDENTIALS_STS_SESSION_TOKEN: "m"; + CREDENTIALS_PROFILE: "n"; + CREDENTIALS_PROFILE_SOURCE_PROFILE: "o"; + CREDENTIALS_PROFILE_NAMED_PROVIDER: "p"; + CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN: "q"; + CREDENTIALS_PROFILE_SSO: "r"; + CREDENTIALS_SSO: "s"; + CREDENTIALS_PROFILE_SSO_LEGACY: "t"; + CREDENTIALS_SSO_LEGACY: "u"; + CREDENTIALS_PROFILE_PROCESS: "v"; + CREDENTIALS_PROCESS: "w"; + CREDENTIALS_BOTO2_CONFIG_FILE: "x"; + CREDENTIALS_AWS_SDK_STORE: "y"; + CREDENTIALS_HTTP: "z"; + CREDENTIALS_IMDS: "0"; +}>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/function.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/function.d.ts new file mode 100644 index 0000000..3c777fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/function.d.ts @@ -0,0 +1,7 @@ +/** + * Resolves a function that accepts both the object argument fields of F1 and F2. + * The function returns an intersection of what F1 and F2 return. + * + * @public + */ +export type MergeFunctions = F1 extends (arg: infer A1) => infer R1 ? F2 extends (arg: infer A2) => infer R2 ? R1 extends Promise ? (arg?: A1 & A2) => Promise & Awaited> : (arg?: A1 & A2) => R1 & R2 : never : never; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/http.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/http.d.ts new file mode 100644 index 0000000..7594b5a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/http.d.ts @@ -0,0 +1,33 @@ +import { HttpResponse } from "@smithy/types"; +export { Endpoint, HeaderBag, HttpHandlerOptions, HttpMessage, HttpRequest, HttpResponse, QueryParameterBag, } from "@smithy/types"; +/** + * @public + * + * A collection of key/value pairs with case-insensitive keys. + */ +export interface Headers extends Map { + /** + * Returns a new instance of Headers with the specified header set to the + * provided value. Does not modify the original Headers instance. + * + * @param headerName - The name of the header to add or overwrite + * @param headerValue - The value to which the header should be set + */ + withHeader(headerName: string, headerValue: string): Headers; + /** + * Returns a new instance of Headers without the specified header. Does not + * modify the original Headers instance. + * + * @param headerName - The name of the header to remove + */ + withoutHeader(headerName: string): Headers; +} +/** + * @public + * + * Represents HTTP message whose body has been resolved to a string. This is + * used in parsing http message. + */ +export interface ResolvedHttpResponse extends HttpResponse { + body: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts new file mode 100644 index 0000000..c7006e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts @@ -0,0 +1,6 @@ +import { Identity } from "./Identity"; +/** + * @public + */ +export interface AnonymousIdentity extends Identity { +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts new file mode 100644 index 0000000..c94b6c4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts @@ -0,0 +1,60 @@ +import type { AwsCredentialIdentity, AwsCredentialIdentityProvider, Logger, RequestHandler } from "@smithy/types"; +import type { AwsSdkCredentialsFeatures } from "../feature-ids"; +export { AwsCredentialIdentity, AwsCredentialIdentityProvider, IdentityProvider } from "@smithy/types"; +/** + * @public + */ +export interface AwsIdentityProperties { + /** + * These are resolved client config values, and may be async providers. + */ + callerClientConfig?: { + /** + * It is likely a programming error if you use + * the caller client config credentials in a credential provider, since + * it will recurse. + * + * @deprecated do not use. + */ + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + /** + * @internal + * @deprecated minimize use. + */ + credentialDefaultProvider?: (input?: any) => AwsCredentialIdentityProvider; + logger?: Logger; + profile?: string; + region(): Promise; + requestHandler?: RequestHandler; + }; +} +/** + * @public + * + * Variation of {@link IdentityProvider} which accepts a contextual + * client configuration that includes an AWS region and potentially other + * configurable fields. + * + * Used to link a credential provider to a client if it is being called + * in the context of a client. + */ +export type RuntimeConfigIdentityProvider = (awsIdentityProperties?: AwsIdentityProperties) => Promise; +/** + * @public + * + * Variation of {@link AwsCredentialIdentityProvider} which accepts a contextual + * client configuration that includes an AWS region and potentially other + * configurable fields. + * + * Used to link a credential provider to a client if it is being called + * in the context of a client. + */ +export type RuntimeConfigAwsCredentialIdentityProvider = RuntimeConfigIdentityProvider; +/** + * @public + * + * AwsCredentialIdentity with source attribution metadata. + */ +export type AttributedAwsCredentialIdentity = AwsCredentialIdentity & { + $source?: AwsSdkCredentialsFeatures; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts new file mode 100644 index 0000000..4175fd3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts @@ -0,0 +1 @@ +export { Identity, IdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts new file mode 100644 index 0000000..13793f9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts @@ -0,0 +1,18 @@ +import { Identity, IdentityProvider } from "./Identity"; +/** + * @public + */ +export interface LoginIdentity extends Identity { + /** + * Identity username + */ + readonly username: string; + /** + * Identity password + */ + readonly password: string; +} +/** + * @public + */ +export type LoginIdentityProvider = IdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts new file mode 100644 index 0000000..66301bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts @@ -0,0 +1 @@ +export { TokenIdentity, TokenIdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/index.d.ts new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/index.d.ts @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/logger.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/logger.d.ts new file mode 100644 index 0000000..11a33c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/logger.d.ts @@ -0,0 +1,22 @@ +import type { Logger } from "@smithy/types"; +export type { Logger } from "@smithy/types"; +/** + * @public + * + * A list of logger's log level. These levels are sorted in + * order of increasing severity. Each log level includes itself and all + * the levels behind itself. + * + * @example `new Logger({logLevel: 'warn'})` will print all the warn and error + * message. + */ +export type LogLevel = "all" | "trace" | "debug" | "log" | "info" | "warn" | "error" | "off"; +/** + * @public + * + * An object consumed by Logger constructor to initiate a logger object. + */ +export interface LoggerOptions { + logger?: Logger; + logLevel?: LogLevel; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/middleware.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/middleware.d.ts new file mode 100644 index 0000000..06ba3e2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/middleware.d.ts @@ -0,0 +1,13 @@ +import { HandlerExecutionContext } from "@smithy/types"; +import { AwsSdkFeatures } from "./feature-ids"; +export { AbsoluteLocation, BuildHandler, BuildHandlerArguments, BuildHandlerOptions, BuildHandlerOutput, BuildMiddleware, DeserializeHandler, DeserializeHandlerArguments, DeserializeHandlerOptions, DeserializeHandlerOutput, DeserializeMiddleware, FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, FinalizeRequestHandlerOptions, FinalizeRequestMiddleware, Handler, HandlerExecutionContext, HandlerOptions, InitializeHandler, InitializeHandlerArguments, InitializeHandlerOptions, InitializeHandlerOutput, InitializeMiddleware, MiddlewareStack, MiddlewareType, Pluggable, Priority, Relation, RelativeLocation, RelativeMiddlewareOptions, SerializeHandler, SerializeHandlerArguments, SerializeHandlerOptions, SerializeHandlerOutput, SerializeMiddleware, Step, Terminalware, } from "@smithy/types"; +/** + * @internal + * Contains reserved keys for AWS SDK internal usage of the + * handler execution context object. + */ +export interface AwsHandlerExecutionContext extends HandlerExecutionContext { + __aws_sdk_context?: { + features?: AwsSdkFeatures; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/pagination.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/pagination.d.ts new file mode 100644 index 0000000..af791b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/pagination.d.ts @@ -0,0 +1 @@ +export { PaginationConfiguration, Paginator } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/profile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/profile.d.ts new file mode 100644 index 0000000..9916f3b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/profile.d.ts @@ -0,0 +1 @@ +export { IniSection, Profile, ParsedIniData, SharedConfigFiles } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/request.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/request.d.ts new file mode 100644 index 0000000..95405d1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/request.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export interface Request { + destination: URL; + body?: any; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/response.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/response.d.ts new file mode 100644 index 0000000..8d99350 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/response.d.ts @@ -0,0 +1,7 @@ +export { MetadataBearer, ResponseMetadata } from "@smithy/types"; +/** + * @internal + */ +export interface Response { + body: any; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/retry.d.ts new file mode 100644 index 0000000..4b7eb98 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/retry.d.ts @@ -0,0 +1 @@ +export { ExponentialBackoffJitterType, ExponentialBackoffStrategyOptions, RetryBackoffStrategy, RetryErrorInfo, RetryErrorType, RetryStrategyOptions, RetryStrategyV2, RetryToken, StandardRetryBackoffStrategy, StandardRetryToken, } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/serde.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/serde.d.ts new file mode 100644 index 0000000..c4cab79 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/serde.d.ts @@ -0,0 +1,24 @@ +export { EndpointBearer, StreamCollector, SerdeContext, ResponseDeserializer, RequestSerializer, SdkStreamMixin, SdkStream, WithSdkStreamMixin, SdkStreamMixinInjector, SdkStreamSerdeContext, } from "@smithy/types"; +/** + * @public + * + * Declare DOM interfaces in case dom.d.ts is not added to the tsconfig lib, causing + * interfaces to not be defined. For developers with dom.d.ts added, the interfaces will + * be merged correctly. + * + * This is also required for any clients with streaming interfaces where the corresponding + * types are also referred. The type is only declared here once since this `@aws-sdk/types` + * is depended by all `@aws-sdk` packages. + */ +declare global { + /** + * @public + */ + export interface ReadableStream { + } + /** + * @public + */ + export interface Blob { + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/shapes.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/shapes.d.ts new file mode 100644 index 0000000..bc19cc7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/shapes.d.ts @@ -0,0 +1 @@ +export { DocumentType, RetryableTrait, SmithyException, SdkError } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/signature.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/signature.d.ts new file mode 100644 index 0000000..23cbe97 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/signature.d.ts @@ -0,0 +1 @@ +export { DateInput, EventSigner, EventSigningArguments, FormattedEvent, MessageSigner, RequestSigningArguments, RequestPresigner, RequestPresigningArguments, RequestSigner, SignableMessage, SignedMessage, SigningArguments, StringSigner, } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/stream.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/stream.d.ts new file mode 100644 index 0000000..9092844 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/stream.d.ts @@ -0,0 +1 @@ +export { GetAwsChunkedEncodingStream, GetAwsChunkedEncodingStreamOptions } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/token.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/token.d.ts new file mode 100644 index 0000000..a68d58f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/token.d.ts @@ -0,0 +1,17 @@ +import { TokenIdentity } from "./identity"; +import { Provider } from "./util"; +/** + * @public + * + * An object representing temporary or permanent AWS token. + * + * @deprecated Use {@link TokenIdentity} + */ +export interface Token extends TokenIdentity { +} +/** + * @public + * + * @deprecated Use {@link TokenIdentityProvider} + */ +export type TokenProvider = Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/transfer.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/transfer.d.ts new file mode 100644 index 0000000..ba78190 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/transfer.d.ts @@ -0,0 +1 @@ +export { RequestContext, RequestHandler, RequestHandlerMetadata, RequestHandlerOutput, RequestHandlerProtocol, } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts new file mode 100644 index 0000000..dad6079 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts @@ -0,0 +1 @@ +export { AbortController, AbortHandler, AbortSignal } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts new file mode 100644 index 0000000..8a02dbc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts @@ -0,0 +1,5 @@ +export { + AuthScheme, + HttpAuthDefinition, + HttpAuthLocation, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts new file mode 100644 index 0000000..df39efe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts @@ -0,0 +1,2 @@ +import { BlobTypes } from "@smithy/types"; +export { BlobTypes }; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts new file mode 100644 index 0000000..f805d72 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts @@ -0,0 +1 @@ +export { Checksum, ChecksumConstructor } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..d6b3dcf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts @@ -0,0 +1 @@ +export { Client } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..3887267 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts @@ -0,0 +1 @@ +export { Command } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts new file mode 100644 index 0000000..36ebd00 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts @@ -0,0 +1,6 @@ +export { + ConnectConfiguration, + ConnectionManager, + ConnectionManagerConfiguration, + ConnectionPool, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts new file mode 100644 index 0000000..6c91a35 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts @@ -0,0 +1,13 @@ +import { Logger } from "@smithy/types"; +import { AwsCredentialIdentity } from "./identity"; +import { Provider } from "./util"; +export interface Credentials extends AwsCredentialIdentity {} +export type CredentialProvider = Provider; +export type CredentialProviderOptions = { + logger?: Logger; + parentClientConfig?: { + region?: string | Provider; + profile?: string; + [key: string]: unknown; + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts new file mode 100644 index 0000000..dfe61bf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts @@ -0,0 +1,7 @@ +export { + Hash, + HashConstructor, + StreamHasher, + randomValues, + SourceData, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts new file mode 100644 index 0000000..d899949 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts @@ -0,0 +1,19 @@ +export declare enum HostAddressType { + AAAA = "AAAA", + A = "A", +} +export interface HostAddress { + addressType: HostAddressType; + address: string; + hostName: string; + service?: string; +} +export interface HostResolverArguments { + hostName: string; + service?: string; +} +export interface HostResolver { + resolveAddress(args: HostResolverArguments): Promise; + reportFailureOnAddress(addr: HostAddress): void; + purgeCache(args?: HostResolverArguments): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts new file mode 100644 index 0000000..76966f9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts @@ -0,0 +1,6 @@ +export { + MessageDecoder, + MessageEncoder, + AvailableMessage, + AvailableMessages, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts new file mode 100644 index 0000000..ff3c7de --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts @@ -0,0 +1,9 @@ +export { + EndpointARN, + EndpointPartition, + EndpointURLScheme, + EndpointURL, + EndpointObjectProperty, + EndpointV2, + EndpointParameters, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts new file mode 100644 index 0000000..e4c04a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts @@ -0,0 +1,24 @@ +export { + Message, + MessageHeaders, + BooleanHeaderValue, + ByteHeaderValue, + ShortHeaderValue, + IntegerHeaderValue, + LongHeaderValue, + BinaryHeaderValue, + StringHeaderValue, + TimestampHeaderValue, + UuidHeaderValue, + MessageHeaderValue, + Int64, + EventStreamSerdeContext, + EventStreamMarshaller, + EventStreamMarshallerDeserFn, + EventStreamMarshallerSerFn, + EventStreamPayloadHandler, + EventStreamPayloadHandlerProvider, + EventStreamRequestSigner, + EventStreamSerdeProvider, + EventStreamSignerProvider, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..accf5ec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +export interface AwsRegionExtensionConfiguration { + setRegion(region: Provider): void; + region(): Provider; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts new file mode 100644 index 0000000..6d57509 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts @@ -0,0 +1,54 @@ +export type AwsSdkFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + S3_EXPRESS_BUCKET: "J"; + S3_ACCESS_GRANTS: "K"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + ACCOUNT_ID_ENDPOINT: "O"; + ACCOUNT_ID_MODE_PREFERRED: "P"; + ACCOUNT_ID_MODE_DISABLED: "Q"; + ACCOUNT_ID_MODE_REQUIRED: "R"; + SIGV4A_SIGNING: "S"; + FLEXIBLE_CHECKSUMS_REQ_CRC32: "U"; + FLEXIBLE_CHECKSUMS_REQ_CRC32C: "V"; + FLEXIBLE_CHECKSUMS_REQ_CRC64: "W"; + FLEXIBLE_CHECKSUMS_REQ_SHA1: "X"; + FLEXIBLE_CHECKSUMS_REQ_SHA256: "Y"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED: "Z"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED: "a"; + FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED: "b"; + FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED: "c"; + DDB_MAPPER: "d"; +}> & + AwsSdkCredentialsFeatures; +export type AwsSdkCredentialsFeatures = Partial<{ + RESOLVED_ACCOUNT_ID: "T"; + CREDENTIALS_CODE: "e"; + CREDENTIALS_ENV_VARS: "g"; + CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN: "h"; + CREDENTIALS_STS_ASSUME_ROLE: "i"; + CREDENTIALS_STS_ASSUME_ROLE_SAML: "j"; + CREDENTIALS_STS_ASSUME_ROLE_WEB_ID: "k"; + CREDENTIALS_STS_FEDERATION_TOKEN: "l"; + CREDENTIALS_STS_SESSION_TOKEN: "m"; + CREDENTIALS_PROFILE: "n"; + CREDENTIALS_PROFILE_SOURCE_PROFILE: "o"; + CREDENTIALS_PROFILE_NAMED_PROVIDER: "p"; + CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN: "q"; + CREDENTIALS_PROFILE_SSO: "r"; + CREDENTIALS_SSO: "s"; + CREDENTIALS_PROFILE_SSO_LEGACY: "t"; + CREDENTIALS_SSO_LEGACY: "u"; + CREDENTIALS_PROFILE_PROCESS: "v"; + CREDENTIALS_PROCESS: "w"; + CREDENTIALS_BOTO2_CONFIG_FILE: "x"; + CREDENTIALS_AWS_SDK_STORE: "y"; + CREDENTIALS_HTTP: "z"; + CREDENTIALS_IMDS: "0"; +}>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts new file mode 100644 index 0000000..d6efac5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts @@ -0,0 +1,7 @@ +export type MergeFunctions = F1 extends (arg: infer A1) => infer R1 + ? F2 extends (arg: infer A2) => infer R2 + ? R1 extends Promise + ? (arg?: A1 & A2) => Promise & Awaited> + : (arg?: A1 & A2) => R1 & R2 + : never + : never; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts new file mode 100644 index 0000000..d8e0eab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts @@ -0,0 +1,17 @@ +import { HttpResponse } from "@smithy/types"; +export { + Endpoint, + HeaderBag, + HttpHandlerOptions, + HttpMessage, + HttpRequest, + HttpResponse, + QueryParameterBag, +} from "@smithy/types"; +export interface Headers extends Map { + withHeader(headerName: string, headerValue: string): Headers; + withoutHeader(headerName: string): Headers; +} +export interface ResolvedHttpResponse extends HttpResponse { + body: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts new file mode 100644 index 0000000..5b175f6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts @@ -0,0 +1,2 @@ +import { Identity } from "./Identity"; +export interface AnonymousIdentity extends Identity {} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts new file mode 100644 index 0000000..aaec358 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts @@ -0,0 +1,30 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + Logger, + RequestHandler, +} from "@smithy/types"; +import { AwsSdkCredentialsFeatures } from "../feature-ids"; +export { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + IdentityProvider, +} from "@smithy/types"; +export interface AwsIdentityProperties { + callerClientConfig?: { + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + credentialDefaultProvider?: (input?: any) => AwsCredentialIdentityProvider; + logger?: Logger; + profile?: string; + region(): Promise; + requestHandler?: RequestHandler; + }; +} +export type RuntimeConfigIdentityProvider = ( + awsIdentityProperties?: AwsIdentityProperties +) => Promise; +export type RuntimeConfigAwsCredentialIdentityProvider = + RuntimeConfigIdentityProvider; +export type AttributedAwsCredentialIdentity = AwsCredentialIdentity & { + $source?: AwsSdkCredentialsFeatures; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts new file mode 100644 index 0000000..4175fd3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts @@ -0,0 +1 @@ +export { Identity, IdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts new file mode 100644 index 0000000..3258bbb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts @@ -0,0 +1,6 @@ +import { Identity, IdentityProvider } from "./Identity"; +export interface LoginIdentity extends Identity { + readonly username: string; + readonly password: string; +} +export type LoginIdentityProvider = IdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts new file mode 100644 index 0000000..66301bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts @@ -0,0 +1 @@ +export { TokenIdentity, TokenIdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts new file mode 100644 index 0000000..c714915 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts @@ -0,0 +1,15 @@ +import { Logger } from "@smithy/types"; +export { Logger } from "@smithy/types"; +export type LogLevel = + | "all" + | "trace" + | "debug" + | "log" + | "info" + | "warn" + | "error" + | "off"; +export interface LoggerOptions { + logger?: Logger; + logLevel?: LogLevel; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts new file mode 100644 index 0000000..e101e9b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts @@ -0,0 +1,47 @@ +import { HandlerExecutionContext } from "@smithy/types"; +import { AwsSdkFeatures } from "./feature-ids"; +export { + AbsoluteLocation, + BuildHandler, + BuildHandlerArguments, + BuildHandlerOptions, + BuildHandlerOutput, + BuildMiddleware, + DeserializeHandler, + DeserializeHandlerArguments, + DeserializeHandlerOptions, + DeserializeHandlerOutput, + DeserializeMiddleware, + FinalizeHandler, + FinalizeHandlerArguments, + FinalizeHandlerOutput, + FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware, + Handler, + HandlerExecutionContext, + HandlerOptions, + InitializeHandler, + InitializeHandlerArguments, + InitializeHandlerOptions, + InitializeHandlerOutput, + InitializeMiddleware, + MiddlewareStack, + MiddlewareType, + Pluggable, + Priority, + Relation, + RelativeLocation, + RelativeMiddlewareOptions, + SerializeHandler, + SerializeHandlerArguments, + SerializeHandlerOptions, + SerializeHandlerOutput, + SerializeMiddleware, + Step, + Terminalware, +} from "@smithy/types"; +export interface AwsHandlerExecutionContext extends HandlerExecutionContext { + __aws_sdk_context?: { + features?: AwsSdkFeatures; + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts new file mode 100644 index 0000000..af791b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts @@ -0,0 +1 @@ +export { PaginationConfiguration, Paginator } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts new file mode 100644 index 0000000..b3813d8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts @@ -0,0 +1,6 @@ +export { + IniSection, + Profile, + ParsedIniData, + SharedConfigFiles, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts new file mode 100644 index 0000000..5c6e793 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts @@ -0,0 +1,4 @@ +export interface Request { + destination: URL; + body?: any; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts new file mode 100644 index 0000000..4e5fcd0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts @@ -0,0 +1,4 @@ +export { MetadataBearer, ResponseMetadata } from "@smithy/types"; +export interface Response { + body: any; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts new file mode 100644 index 0000000..8fc946a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts @@ -0,0 +1,12 @@ +export { + ExponentialBackoffJitterType, + ExponentialBackoffStrategyOptions, + RetryBackoffStrategy, + RetryErrorInfo, + RetryErrorType, + RetryStrategyOptions, + RetryStrategyV2, + RetryToken, + StandardRetryBackoffStrategy, + StandardRetryToken, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts new file mode 100644 index 0000000..a7ed76f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts @@ -0,0 +1,16 @@ +export { + EndpointBearer, + StreamCollector, + SerdeContext, + ResponseDeserializer, + RequestSerializer, + SdkStreamMixin, + SdkStream, + WithSdkStreamMixin, + SdkStreamMixinInjector, + SdkStreamSerdeContext, +} from "@smithy/types"; +declare global { + export interface ReadableStream {} + export interface Blob {} +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts new file mode 100644 index 0000000..d1efa9a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts @@ -0,0 +1,6 @@ +export { + DocumentType, + RetryableTrait, + SmithyException, + SdkError, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts new file mode 100644 index 0000000..cbabd75 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts @@ -0,0 +1,15 @@ +export { + DateInput, + EventSigner, + EventSigningArguments, + FormattedEvent, + MessageSigner, + RequestSigningArguments, + RequestPresigner, + RequestPresigningArguments, + RequestSigner, + SignableMessage, + SignedMessage, + SigningArguments, + StringSigner, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts new file mode 100644 index 0000000..1b79413 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts @@ -0,0 +1,4 @@ +export { + GetAwsChunkedEncodingStream, + GetAwsChunkedEncodingStreamOptions, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts new file mode 100644 index 0000000..c33e506 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts @@ -0,0 +1,4 @@ +import { TokenIdentity } from "./identity"; +import { Provider } from "./util"; +export interface Token extends TokenIdentity {} +export type TokenProvider = Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts new file mode 100644 index 0000000..04a7f87 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts @@ -0,0 +1,7 @@ +export { + RequestContext, + RequestHandler, + RequestHandlerMetadata, + RequestHandlerOutput, + RequestHandlerProtocol, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts new file mode 100644 index 0000000..297dfe4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts @@ -0,0 +1 @@ +export { URI } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..e7e43e6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts @@ -0,0 +1,14 @@ +export { + Encoder, + Decoder, + Provider, + UserAgentPair, + UserAgent, + UrlParser, + MemoizedProvider, + BodyLengthCalculator, + RegionInfo, + RegionInfoProviderOptions, + RegionInfoProvider, + RetryStrategy, +} from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..bb98020 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1 @@ +export { WaiterConfiguration } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/uri.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/uri.d.ts new file mode 100644 index 0000000..297dfe4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/uri.d.ts @@ -0,0 +1 @@ +export { URI } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/util.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/util.d.ts new file mode 100644 index 0000000..fd059b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/util.d.ts @@ -0,0 +1 @@ +export { Encoder, Decoder, Provider, UserAgentPair, UserAgent, UrlParser, MemoizedProvider, BodyLengthCalculator, RegionInfo, RegionInfoProviderOptions, RegionInfoProvider, RetryStrategy, } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/waiter.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/waiter.d.ts new file mode 100644 index 0000000..bb98020 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/dist-types/waiter.d.ts @@ -0,0 +1 @@ +export { WaiterConfiguration } from "@smithy/types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/package.json new file mode 100755 index 0000000..eaf5c44 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/types/package.json @@ -0,0 +1,56 @@ +{ + "name": "@aws-sdk/types", + "version": "3.775.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "description": "Types for the AWS SDK", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline types", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "tsc -p tsconfig.test.json" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/types", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/types" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "browser": {}, + "react-native": {} +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/README.md new file mode 100644 index 0000000..1d6d61a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/README.md @@ -0,0 +1,50 @@ +# @aws-sdk/util-dynamodb + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-dynamodb/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-dynamodb) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-dynamodb.svg)](https://www.npmjs.com/package/@aws-sdk/util-dynamodb) + +This package provides utilities to be used with `@aws-sdk/client-dynamodb` + +If you are looking for DynamoDB Document client, please check +[@aws-sdk/lib-dynamodb](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) +which automatically performs the necessary marshalling and unmarshalling. + +## Convert JavaScript object into DynamoDB Record + +```js +const { DynamoDB } = require("@aws-sdk/client-dynamodb"); +const { marshall } = require("@aws-sdk/util-dynamodb"); + +const client = new DynamoDB(clientParams); +const params = { + TableName: "Table", + Item: marshall({ + HashKey: "hashKey", + NumAttribute: 1, + BoolAttribute: true, + ListAttribute: [1, "two", false], + MapAttribute: { foo: "bar" }, + NullAttribute: null, + }), +}; + +await client.putItem(params); +``` + +## Convert DynamoDB Record into JavaScript object + +```js +const { DynamoDB } = require("@aws-sdk/client-dynamodb"); +const { marshall, unmarshall } = require("@aws-sdk/util-dynamodb"); + +const client = new DynamoDB(clientParams); +const params = { + TableName: "Table", + Key: marshall({ + HashKey: "hashKey", + }), +}; + +const { Item } = await client.getItem(params); +unmarshall(Item); +``` diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..955685c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js @@ -0,0 +1,350 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NumberValueImpl: () => NumberValue, + convertToAttr: () => convertToAttr, + convertToNative: () => convertToNative, + marshall: () => marshall, + unmarshall: () => unmarshall +}); +module.exports = __toCommonJS(index_exports); + +// src/NumberValue.ts +var NumberValue = class _NumberValue { + static { + __name(this, "NumberValue"); + } + value; + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + constructor(value) { + if (typeof value === "object" && "N" in value) { + this.value = String(value.N); + } else { + this.value = String(value); + } + const valueOf = typeof value.valueOf() === "number" ? value.valueOf() : 0; + const imprecise = valueOf > Number.MAX_SAFE_INTEGER || valueOf < Number.MIN_SAFE_INTEGER || Math.abs(valueOf) === Infinity || Number.isNaN(valueOf); + if (imprecise) { + throw new Error( + `NumberValue should not be initialized with an imprecise number=${valueOf}. Use a string instead.` + ); + } + } + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + static from(value) { + return new _NumberValue(value); + } + /** + * @returns the AttributeValue form for DynamoDB. + */ + toAttributeValue() { + return { + N: this.toString() + }; + } + /** + * @returns BigInt representation. + * + * @throws SyntaxError if the string representation is not convertable to a BigInt. + */ + toBigInt() { + const stringValue = this.toString(); + return BigInt(stringValue); + } + /** + * @override + * + * @returns string representation. This is the canonical format in DynamoDB. + */ + toString() { + return String(this.value); + } + /** + * @override + */ + valueOf() { + return this.toString(); + } +}; + +// src/convertToAttr.ts +var convertToAttr = /* @__PURE__ */ __name((data, options) => { + if (data === void 0) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } else if (data === null && typeof data === "object") { + return convertToNullAttr(); + } else if (Array.isArray(data)) { + return convertToListAttr(data, options); + } else if (data?.constructor?.name === "Set") { + return convertToSetAttr(data, options); + } else if (data?.constructor?.name === "Map") { + return convertToMapAttrFromIterable(data, options); + } else if (data?.constructor?.name === "Object" || // for object which is result of Object.create(null), which doesn't have constructor defined + !data.constructor && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } else if (isBinary(data)) { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToBinaryAttr(data); + } else if (typeof data === "boolean" || data?.constructor?.name === "Boolean") { + return { BOOL: data.valueOf() }; + } else if (typeof data === "number" || data?.constructor?.name === "Number") { + return convertToNumberAttr(data, options); + } else if (data instanceof NumberValue) { + return data.toAttributeValue(); + } else if (typeof data === "bigint") { + return convertToBigIntAttr(data); + } else if (typeof data === "string" || data?.constructor?.name === "String") { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToStringAttr(data); + } else if (options?.convertClassInstanceToMap && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } + throw new Error( + `Unsupported type passed: ${data}. Pass options.convertClassInstanceToMap=true to marshall typeof object as map attribute.` + ); +}, "convertToAttr"); +var convertToListAttr = /* @__PURE__ */ __name((data, options) => ({ + L: data.filter( + (item) => typeof item !== "function" && (!options?.removeUndefinedValues || options?.removeUndefinedValues && item !== void 0) + ).map((item) => convertToAttr(item, options)) +}), "convertToListAttr"); +var convertToSetAttr = /* @__PURE__ */ __name((set, options) => { + const setToOperate = options?.removeUndefinedValues ? new Set([...set].filter((value) => value !== void 0)) : set; + if (!options?.removeUndefinedValues && setToOperate.has(void 0)) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + if (setToOperate.size === 0) { + if (options?.convertEmptyValues) { + return convertToNullAttr(); + } + throw new Error(`Pass a non-empty set, or options.convertEmptyValues=true.`); + } + const item = setToOperate.values().next().value; + if (item instanceof NumberValue) { + return { + NS: Array.from(setToOperate).map((_) => _.toString()) + }; + } else if (typeof item === "number") { + return { + NS: Array.from(setToOperate).map((num) => convertToNumberAttr(num, options)).map((item2) => item2.N) + }; + } else if (typeof item === "bigint") { + return { + NS: Array.from(setToOperate).map(convertToBigIntAttr).map((item2) => item2.N) + }; + } else if (typeof item === "string") { + return { + SS: Array.from(setToOperate).map(convertToStringAttr).map((item2) => item2.S) + }; + } else if (isBinary(item)) { + return { + // Do not alter binary data passed https://github.com/aws/aws-sdk-js-v3/issues/1530 + // @ts-expect-error Type 'ArrayBuffer' is not assignable to type 'Uint8Array' + BS: Array.from(setToOperate).map(convertToBinaryAttr).map((item2) => item2.B) + }; + } else { + throw new Error(`Only Number Set (NS), Binary Set (BS) or String Set (SS) are allowed.`); + } +}, "convertToSetAttr"); +var convertToMapAttrFromIterable = /* @__PURE__ */ __name((data, options) => ({ + M: ((data2) => { + const map = {}; + for (const [key, value] of data2) { + if (typeof value !== "function" && (value !== void 0 || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data) +}), "convertToMapAttrFromIterable"); +var convertToMapAttrFromEnumerableProps = /* @__PURE__ */ __name((data, options) => ({ + M: ((data2) => { + const map = {}; + for (const key in data2) { + const value = data2[key]; + if (typeof value !== "function" && (value !== void 0 || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data) +}), "convertToMapAttrFromEnumerableProps"); +var convertToNullAttr = /* @__PURE__ */ __name(() => ({ NULL: true }), "convertToNullAttr"); +var convertToBinaryAttr = /* @__PURE__ */ __name((data) => ({ B: data }), "convertToBinaryAttr"); +var convertToStringAttr = /* @__PURE__ */ __name((data) => ({ S: data.toString() }), "convertToStringAttr"); +var convertToBigIntAttr = /* @__PURE__ */ __name((data) => ({ N: data.toString() }), "convertToBigIntAttr"); +var validateBigIntAndThrow = /* @__PURE__ */ __name((errorPrefix) => { + throw new Error(`${errorPrefix} Use NumberValue from @aws-sdk/lib-dynamodb.`); +}, "validateBigIntAndThrow"); +var convertToNumberAttr = /* @__PURE__ */ __name((num, options) => { + if ([Number.NaN, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY].map((val) => val.toString()).includes(num.toString())) { + throw new Error(`Special numeric value ${num.toString()} is not allowed`); + } else if (!options?.allowImpreciseNumbers) { + if (Number(num) > Number.MAX_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is greater than Number.MAX_SAFE_INTEGER.`); + } else if (Number(num) < Number.MIN_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is lesser than Number.MIN_SAFE_INTEGER.`); + } + } + return { N: num.toString() }; +}, "convertToNumberAttr"); +var isBinary = /* @__PURE__ */ __name((data) => { + const binaryTypes = [ + "ArrayBuffer", + "Blob", + "Buffer", + "DataView", + "File", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "BigInt64Array", + "BigUint64Array" + ]; + if (data?.constructor) { + return binaryTypes.includes(data.constructor.name); + } + return false; +}, "isBinary"); + +// src/convertToNative.ts +var convertToNative = /* @__PURE__ */ __name((data, options) => { + for (const [key, value] of Object.entries(data)) { + if (value !== void 0) { + switch (key) { + case "NULL": + return null; + case "BOOL": + return Boolean(value); + case "N": + return convertNumber(value, options); + case "B": + return convertBinary(value); + case "S": + return convertString(value); + case "L": + return convertList(value, options); + case "M": + return convertMap(value, options); + case "NS": + return new Set(value.map((item) => convertNumber(item, options))); + case "BS": + return new Set(value.map(convertBinary)); + case "SS": + return new Set(value.map(convertString)); + default: + throw new Error(`Unsupported type passed: ${key}`); + } + } + } + throw new Error(`No value defined: ${JSON.stringify(data)}`); +}, "convertToNative"); +var convertNumber = /* @__PURE__ */ __name((numString, options) => { + if (typeof options?.wrapNumbers === "function") { + return options?.wrapNumbers(numString); + } + if (options?.wrapNumbers) { + return NumberValue.from(numString); + } + const num = Number(numString); + const infinityValues = [Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]; + const isLargeFiniteNumber = (num > Number.MAX_SAFE_INTEGER || num < Number.MIN_SAFE_INTEGER) && !infinityValues.includes(num); + if (isLargeFiniteNumber) { + if (typeof BigInt === "function") { + try { + return BigInt(numString); + } catch (error) { + throw new Error(`${numString} can't be converted to BigInt. Set options.wrapNumbers to get string value.`); + } + } else { + throw new Error(`${numString} is outside SAFE_INTEGER bounds. Set options.wrapNumbers to get string value.`); + } + } + return num; +}, "convertNumber"); +var convertString = /* @__PURE__ */ __name((stringValue) => stringValue, "convertString"); +var convertBinary = /* @__PURE__ */ __name((binaryValue) => binaryValue, "convertBinary"); +var convertList = /* @__PURE__ */ __name((list, options) => list.map((item) => convertToNative(item, options)), "convertList"); +var convertMap = /* @__PURE__ */ __name((map, options) => Object.entries(map).reduce( + (acc, [key, value]) => (acc[key] = convertToNative(value, options), acc), + {} +), "convertMap"); + +// src/marshall.ts +function marshall(data, options) { + const attributeValue = convertToAttr(data, options); + const [key, value] = Object.entries(attributeValue)[0]; + switch (key) { + case "M": + case "L": + return options?.convertTopLevelContainer ? attributeValue : value; + case "SS": + case "NS": + case "BS": + case "S": + case "N": + case "B": + case "NULL": + case "BOOL": + case "$unknown": + default: + return attributeValue; + } +} +__name(marshall, "marshall"); + +// src/unmarshall.ts +var unmarshall = /* @__PURE__ */ __name((data, options) => { + if (options?.convertWithoutMapWrapper) { + return convertToNative(data, options); + } + return convertToNative({ M: data }, options); +}, "unmarshall"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NumberValueImpl, + convertToAttr, + convertToNative, + marshall, + unmarshall +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js new file mode 100644 index 0000000..a9df9f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js @@ -0,0 +1,37 @@ +export class NumberValue { + value; + constructor(value) { + if (typeof value === "object" && "N" in value) { + this.value = String(value.N); + } + else { + this.value = String(value); + } + const valueOf = typeof value.valueOf() === "number" ? value.valueOf() : 0; + const imprecise = valueOf > Number.MAX_SAFE_INTEGER || + valueOf < Number.MIN_SAFE_INTEGER || + Math.abs(valueOf) === Infinity || + Number.isNaN(valueOf); + if (imprecise) { + throw new Error(`NumberValue should not be initialized with an imprecise number=${valueOf}. Use a string instead.`); + } + } + static from(value) { + return new NumberValue(value); + } + toAttributeValue() { + return { + N: this.toString(), + }; + } + toBigInt() { + const stringValue = this.toString(); + return BigInt(stringValue); + } + toString() { + return String(this.value); + } + valueOf() { + return this.toString(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js new file mode 100644 index 0000000..62a888f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js @@ -0,0 +1,175 @@ +import { NumberValue } from "./NumberValue"; +export const convertToAttr = (data, options) => { + if (data === undefined) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + else if (data === null && typeof data === "object") { + return convertToNullAttr(); + } + else if (Array.isArray(data)) { + return convertToListAttr(data, options); + } + else if (data?.constructor?.name === "Set") { + return convertToSetAttr(data, options); + } + else if (data?.constructor?.name === "Map") { + return convertToMapAttrFromIterable(data, options); + } + else if (data?.constructor?.name === "Object" || + (!data.constructor && typeof data === "object")) { + return convertToMapAttrFromEnumerableProps(data, options); + } + else if (isBinary(data)) { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToBinaryAttr(data); + } + else if (typeof data === "boolean" || data?.constructor?.name === "Boolean") { + return { BOOL: data.valueOf() }; + } + else if (typeof data === "number" || data?.constructor?.name === "Number") { + return convertToNumberAttr(data, options); + } + else if (data instanceof NumberValue) { + return data.toAttributeValue(); + } + else if (typeof data === "bigint") { + return convertToBigIntAttr(data); + } + else if (typeof data === "string" || data?.constructor?.name === "String") { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToStringAttr(data); + } + else if (options?.convertClassInstanceToMap && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } + throw new Error(`Unsupported type passed: ${data}. Pass options.convertClassInstanceToMap=true to marshall typeof object as map attribute.`); +}; +const convertToListAttr = (data, options) => ({ + L: data + .filter((item) => typeof item !== "function" && + (!options?.removeUndefinedValues || (options?.removeUndefinedValues && item !== undefined))) + .map((item) => convertToAttr(item, options)), +}); +const convertToSetAttr = (set, options) => { + const setToOperate = options?.removeUndefinedValues ? new Set([...set].filter((value) => value !== undefined)) : set; + if (!options?.removeUndefinedValues && setToOperate.has(undefined)) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + if (setToOperate.size === 0) { + if (options?.convertEmptyValues) { + return convertToNullAttr(); + } + throw new Error(`Pass a non-empty set, or options.convertEmptyValues=true.`); + } + const item = setToOperate.values().next().value; + if (item instanceof NumberValue) { + return { + NS: Array.from(setToOperate).map((_) => _.toString()), + }; + } + else if (typeof item === "number") { + return { + NS: Array.from(setToOperate) + .map((num) => convertToNumberAttr(num, options)) + .map((item) => item.N), + }; + } + else if (typeof item === "bigint") { + return { + NS: Array.from(setToOperate) + .map(convertToBigIntAttr) + .map((item) => item.N), + }; + } + else if (typeof item === "string") { + return { + SS: Array.from(setToOperate) + .map(convertToStringAttr) + .map((item) => item.S), + }; + } + else if (isBinary(item)) { + return { + BS: Array.from(setToOperate) + .map(convertToBinaryAttr) + .map((item) => item.B), + }; + } + else { + throw new Error(`Only Number Set (NS), Binary Set (BS) or String Set (SS) are allowed.`); + } +}; +const convertToMapAttrFromIterable = (data, options) => ({ + M: ((data) => { + const map = {}; + for (const [key, value] of data) { + if (typeof value !== "function" && (value !== undefined || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data), +}); +const convertToMapAttrFromEnumerableProps = (data, options) => ({ + M: ((data) => { + const map = {}; + for (const key in data) { + const value = data[key]; + if (typeof value !== "function" && (value !== undefined || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data), +}); +const convertToNullAttr = () => ({ NULL: true }); +const convertToBinaryAttr = (data) => ({ B: data }); +const convertToStringAttr = (data) => ({ S: data.toString() }); +const convertToBigIntAttr = (data) => ({ N: data.toString() }); +const validateBigIntAndThrow = (errorPrefix) => { + throw new Error(`${errorPrefix} Use NumberValue from @aws-sdk/lib-dynamodb.`); +}; +const convertToNumberAttr = (num, options) => { + if ([Number.NaN, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY] + .map((val) => val.toString()) + .includes(num.toString())) { + throw new Error(`Special numeric value ${num.toString()} is not allowed`); + } + else if (!options?.allowImpreciseNumbers) { + if (Number(num) > Number.MAX_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is greater than Number.MAX_SAFE_INTEGER.`); + } + else if (Number(num) < Number.MIN_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is lesser than Number.MIN_SAFE_INTEGER.`); + } + } + return { N: num.toString() }; +}; +const isBinary = (data) => { + const binaryTypes = [ + "ArrayBuffer", + "Blob", + "Buffer", + "DataView", + "File", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "BigInt64Array", + "BigUint64Array", + ]; + if (data?.constructor) { + return binaryTypes.includes(data.constructor.name); + } + return false; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js new file mode 100644 index 0000000..3e7b2c2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js @@ -0,0 +1,61 @@ +import { NumberValue } from "./NumberValue"; +export const convertToNative = (data, options) => { + for (const [key, value] of Object.entries(data)) { + if (value !== undefined) { + switch (key) { + case "NULL": + return null; + case "BOOL": + return Boolean(value); + case "N": + return convertNumber(value, options); + case "B": + return convertBinary(value); + case "S": + return convertString(value); + case "L": + return convertList(value, options); + case "M": + return convertMap(value, options); + case "NS": + return new Set(value.map((item) => convertNumber(item, options))); + case "BS": + return new Set(value.map(convertBinary)); + case "SS": + return new Set(value.map(convertString)); + default: + throw new Error(`Unsupported type passed: ${key}`); + } + } + } + throw new Error(`No value defined: ${JSON.stringify(data)}`); +}; +const convertNumber = (numString, options) => { + if (typeof options?.wrapNumbers === "function") { + return options?.wrapNumbers(numString); + } + if (options?.wrapNumbers) { + return NumberValue.from(numString); + } + const num = Number(numString); + const infinityValues = [Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]; + const isLargeFiniteNumber = (num > Number.MAX_SAFE_INTEGER || num < Number.MIN_SAFE_INTEGER) && !infinityValues.includes(num); + if (isLargeFiniteNumber) { + if (typeof BigInt === "function") { + try { + return BigInt(numString); + } + catch (error) { + throw new Error(`${numString} can't be converted to BigInt. Set options.wrapNumbers to get string value.`); + } + } + else { + throw new Error(`${numString} is outside SAFE_INTEGER bounds. Set options.wrapNumbers to get string value.`); + } + } + return num; +}; +const convertString = (stringValue) => stringValue; +const convertBinary = (binaryValue) => binaryValue; +const convertList = (list, options) => list.map((item) => convertToNative(item, options)); +const convertMap = (map, options) => Object.entries(map).reduce((acc, [key, value]) => ((acc[key] = convertToNative(value, options)), acc), {}); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js new file mode 100644 index 0000000..9899c8a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js @@ -0,0 +1,21 @@ +import { convertToAttr } from "./convertToAttr"; +export function marshall(data, options) { + const attributeValue = convertToAttr(data, options); + const [key, value] = Object.entries(attributeValue)[0]; + switch (key) { + case "M": + case "L": + return options?.convertTopLevelContainer ? attributeValue : value; + case "SS": + case "NS": + case "BS": + case "S": + case "N": + case "B": + case "NULL": + case "BOOL": + case "$unknown": + default: + return attributeValue; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js new file mode 100644 index 0000000..6028656 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js @@ -0,0 +1,7 @@ +import { convertToNative } from "./convertToNative"; +export const unmarshall = (data, options) => { + if (options?.convertWithoutMapWrapper) { + return convertToNative(data, options); + } + return convertToNative({ M: data }, options); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts new file mode 100644 index 0000000..c444ff8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts @@ -0,0 +1,55 @@ +import { NumberValue as INumberValue } from "./models"; +/** + * + * Class for storing DynamoDB numbers that exceed the scale of + * JavaScript's MAX_SAFE_INTEGER and MIN_SAFE_INTEGER, or the + * decimal precision limit. + * + * This class does not support mathematical operations in JavaScript. + * Convert the contained string value to your application-specific + * large number implementation to perform mathematical operations. + * + * @public + * + */ +export declare class NumberValue implements INumberValue { + value: string; + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + constructor(value: number | Number | BigInt | string | { + N: string; + }); + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + static from(value: number | Number | BigInt | string | { + N: string; + }): NumberValue; + /** + * @returns the AttributeValue form for DynamoDB. + */ + toAttributeValue(): { + N: string; + }; + /** + * @returns BigInt representation. + * + * @throws SyntaxError if the string representation is not convertable to a BigInt. + */ + toBigInt(): bigint; + /** + * @override + * + * @returns string representation. This is the canonical format in DynamoDB. + */ + toString(): string; + /** + * @override + */ + valueOf(): string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts new file mode 100644 index 0000000..7b0eae7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts @@ -0,0 +1,10 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { marshallOptions } from "./marshall"; +import { NativeAttributeValue } from "./models"; +/** + * Convert a JavaScript value to its equivalent DynamoDB AttributeValue type. + * + * @param data - The data to convert to a DynamoDB AttributeValue. + * @param options - An optional configuration object for `convertToAttr`. + */ +export declare const convertToAttr: (data: NativeAttributeValue, options?: marshallOptions) => AttributeValue; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts new file mode 100644 index 0000000..4cbac6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts @@ -0,0 +1,10 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "./models"; +import { unmarshallOptions } from "./unmarshall"; +/** + * Convert a DynamoDB AttributeValue object to its equivalent JavaScript type. + * + * @param data - The DynamoDB record to convert to JavaScript type. + * @param options - An optional configuration object for `convertToNative`. + */ +export declare const convertToNative: (data: AttributeValue, options?: unmarshallOptions) => NativeAttributeValue; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts new file mode 100644 index 0000000..a949240 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts @@ -0,0 +1,81 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeBinary, NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +/** + * An optional configuration object for `marshall` + */ +export interface marshallOptions { + /** + * Whether to automatically convert empty strings, blobs, and sets to `null` + */ + convertEmptyValues?: boolean; + /** + * Whether to remove undefined values from JS arrays/Sets/objects + * when marshalling to DynamoDB lists/sets/maps respectively. + * + * A DynamoDB item is not itself considered a map. Only + * attributes of an item are examined. + */ + removeUndefinedValues?: boolean; + /** + * Whether to convert typeof object to map attribute. + */ + convertClassInstanceToMap?: boolean; + /** + * Whether to convert the top level container + * if it is a map or list. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the marshall function (backwards compatibility). + */ + convertTopLevelContainer?: boolean; + /** + * Whether to allow numbers beyond Number.MAX_SAFE_INTEGER during marshalling. + * When set to true, allows numbers that may lose precision when converted to JavaScript numbers. + * When false (default), throws an error if a number exceeds Number.MAX_SAFE_INTEGER to prevent + * unintended loss of precision. Consider using the NumberValue type from @aws-sdk/lib-dynamodb + * for precise handling of large numbers. + */ + allowImpreciseNumbers?: boolean; +} +/** + * Convert a JavaScript object into a DynamoDB record. + * + * @param data - The data to convert to a DynamoDB record + * @param options - An optional configuration object for `marshall` + * + */ +export declare function marshall(data: null, options?: marshallOptions): AttributeValue.NULLMember; +export declare function marshall(data: Set | Set | Set, options?: marshallOptions): AttributeValue.NSMember; +export declare function marshall(data: Set, options?: marshallOptions): AttributeValue.SSMember; +export declare function marshall(data: Set, options?: marshallOptions): AttributeValue.BSMember; +export declare function marshall(data: NativeAttributeBinary, options?: marshallOptions): AttributeValue.BMember; +export declare function marshall(data: boolean, options?: marshallOptions): AttributeValue.BOOLMember; +export declare function marshall(data: number | NumberValue | bigint, options?: marshallOptions): AttributeValue.NMember; +export declare function marshall(data: string, options?: marshallOptions): AttributeValue.SMember; +export declare function marshall(data: boolean, options?: marshallOptions): AttributeValue.BOOLMember; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue.LMember; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[]; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[] | AttributeValue.LMember; +export declare function marshall(data: NativeAttributeValue[], options?: marshallOptions): AttributeValue[]; +export declare function marshall(data: Map | Record, options: marshallOptions & O): AttributeValue.MMember; +export declare function marshall(data: Map | Record, options: marshallOptions & O): Record; +export declare function marshall(data: Map | Record, options: marshallOptions & O): Record | AttributeValue.MMember; +export declare function marshall(data: Map | Record, options?: marshallOptions): Record; +export declare function marshall(data: any, options?: marshallOptions): any; +/** + * This signature will be unmatchable but is included for information. + */ +export declare function marshall(data: unknown, options?: marshallOptions): AttributeValue.$UnknownMember; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts new file mode 100644 index 0000000..7f0a963 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts @@ -0,0 +1,40 @@ +/// +/// +/** + * A interface recognizable as a numeric value that stores the underlying number + * as a string. + * + * Intended to be a deserialization target for the DynamoDB Document Client when + * the `wrapNumbers` flag is set. This allows for numeric values that lose + * precision when converted to JavaScript's `number` type. + */ +export interface NumberValue { + readonly value: string; +} +/** + * @public + */ +export type NativeAttributeValue = NativeScalarAttributeValue | { + [key: string]: NativeAttributeValue; +} | NativeAttributeValue[] | Set | InstanceType<{ + new (...args: any[]): any; +}>; +/** + * @public + */ +export type NativeScalarAttributeValue = null | undefined | boolean | number | NumberValue | bigint | NativeAttributeBinary | string; +/** + * Declare File in case DOM is not added to the tsconfig lib causing + * File interface is not defined. For developers with DOM lib added, + * the File interface will be merged correctly. + */ +declare global { + interface File { + } +} +type IfDefined = {} extends T ? never : T; +/** + * @public + */ +export type NativeAttributeBinary = ArrayBuffer | IfDefined | IfDefined | DataView | IfDefined | Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array | BigInt64Array | BigUint64Array; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts new file mode 100644 index 0000000..8180624 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts @@ -0,0 +1,30 @@ +import { NumberValue as INumberValue } from "./models"; +export declare class NumberValue implements INumberValue { + value: string; + constructor( + value: + | number + | Number + | BigInt + | string + | { + N: string; + } + ); + static from( + value: + | number + | Number + | BigInt + | string + | { + N: string; + } + ): NumberValue; + toAttributeValue(): { + N: string; + }; + toBigInt(): bigint; + toString(): string; + valueOf(): string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts new file mode 100644 index 0000000..d148d57 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts @@ -0,0 +1,7 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { marshallOptions } from "./marshall"; +import { NativeAttributeValue } from "./models"; +export declare const convertToAttr: ( + data: NativeAttributeValue, + options?: marshallOptions +) => AttributeValue; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts new file mode 100644 index 0000000..c1a03f4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts @@ -0,0 +1,7 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { unmarshallOptions } from "./unmarshall"; +export declare const convertToNative: ( + data: AttributeValue, + options?: unmarshallOptions +) => NativeAttributeValue; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts new file mode 100644 index 0000000..f81b876 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts @@ -0,0 +1,112 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeBinary, NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +export interface marshallOptions { + convertEmptyValues?: boolean; + removeUndefinedValues?: boolean; + convertClassInstanceToMap?: boolean; + convertTopLevelContainer?: boolean; + allowImpreciseNumbers?: boolean; +} +export declare function marshall( + data: null, + options?: marshallOptions +): AttributeValue.NULLMember; +export declare function marshall( + data: Set | Set | Set, + options?: marshallOptions +): AttributeValue.NSMember; +export declare function marshall( + data: Set, + options?: marshallOptions +): AttributeValue.SSMember; +export declare function marshall( + data: Set, + options?: marshallOptions +): AttributeValue.BSMember; +export declare function marshall( + data: NativeAttributeBinary, + options?: marshallOptions +): AttributeValue.BMember; +export declare function marshall( + data: boolean, + options?: marshallOptions +): AttributeValue.BOOLMember; +export declare function marshall( + data: number | NumberValue | bigint, + options?: marshallOptions +): AttributeValue.NMember; +export declare function marshall( + data: string, + options?: marshallOptions +): AttributeValue.SMember; +export declare function marshall( + data: boolean, + options?: marshallOptions +): AttributeValue.BOOLMember; +export declare function marshall< + O extends { + convertTopLevelContainer: true; + } +>( + data: NativeAttributeValue[], + options: marshallOptions & O +): AttributeValue.LMember; +export declare function marshall< + O extends { + convertTopLevelContainer: false; + } +>(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[]; +export declare function marshall< + O extends { + convertTopLevelContainer: boolean; + } +>( + data: NativeAttributeValue[], + options: marshallOptions & O +): AttributeValue[] | AttributeValue.LMember; +export declare function marshall( + data: NativeAttributeValue[], + options?: marshallOptions +): AttributeValue[]; +export declare function marshall< + O extends { + convertTopLevelContainer: true; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): AttributeValue.MMember; +export declare function marshall< + O extends { + convertTopLevelContainer: false; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): Record; +export declare function marshall< + O extends { + convertTopLevelContainer: boolean; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): Record | AttributeValue.MMember; +export declare function marshall( + data: + | Map + | Record, + options?: marshallOptions +): Record; +export declare function marshall(data: any, options?: marshallOptions): any; +export declare function marshall( + data: unknown, + options?: marshallOptions +): AttributeValue.$UnknownMember; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts new file mode 100644 index 0000000..f2939b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts @@ -0,0 +1,46 @@ +export interface NumberValue { + readonly value: string; +} +export type NativeAttributeValue = + | NativeScalarAttributeValue + | { + [key: string]: NativeAttributeValue; + } + | NativeAttributeValue[] + | Set< + number | bigint | NumberValue | string | NativeAttributeBinary | undefined + > + | InstanceType<{ + new (...args: any[]): any; + }>; +export type NativeScalarAttributeValue = + | null + | undefined + | boolean + | number + | NumberValue + | bigint + | NativeAttributeBinary + | string; +declare global { + interface File {} +} +type IfDefined = {} extends T ? never : T; +export type NativeAttributeBinary = + | ArrayBuffer + | IfDefined + | IfDefined + | DataView + | IfDefined + | Int8Array + | Uint8Array + | Uint8ClampedArray + | Int16Array + | Uint16Array + | Int32Array + | Uint32Array + | Float32Array + | Float64Array + | BigInt64Array + | BigUint64Array; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts new file mode 100644 index 0000000..9d511e0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts @@ -0,0 +1,13 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +export interface unmarshallOptions { + wrapNumbers?: + | boolean + | ((value: string) => number | bigint | NumberValue | any); + convertWithoutMapWrapper?: boolean; +} +export declare const unmarshall: ( + data: Record | AttributeValue, + options?: unmarshallOptions +) => Record; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts new file mode 100644 index 0000000..c477e32 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts @@ -0,0 +1,31 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +/** + * An optional configuration object for `convertToNative` + */ +export interface unmarshallOptions { + /** + * Whether to modify how numbers are unmarshalled from DynamoDB. + * When set to true, returns numbers as NumberValue instances instead of native JavaScript numbers. + * This allows for the safe round-trip transport of numbers of arbitrary size. + * + * If a function is provided, it will be called with the string representation of numbers to handle + * custom conversions (e.g., using BigInt or decimal libraries). + */ + wrapNumbers?: boolean | ((value: string) => number | bigint | NumberValue | any); + /** + * When true, skip wrapping the data in `{ M: data }` before converting. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the unmarshall function (backwards compatibility). + */ + convertWithoutMapWrapper?: boolean; +} +/** + * Convert a DynamoDB record into a JavaScript object. + * + * @param data - The DynamoDB record + * @param options - An optional configuration object for `unmarshall` + */ +export declare const unmarshall: (data: Record | AttributeValue, options?: unmarshallOptions) => Record; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/package.json new file mode 100644 index 0000000..db385c7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-dynamodb/package.json @@ -0,0 +1,57 @@ +{ + "name": "@aws-sdk/util-dynamodb", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-sdk/client-dynamodb": "3.803.0", + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-dynamodb" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/README.md new file mode 100644 index 0000000..641f54a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/util-endpoints + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-endpoints/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-endpoints) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-endpoints.svg)](https://www.npmjs.com/package/@aws-sdk/util-endpoints) + +> An internal package diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js new file mode 100644 index 0000000..ee0a932 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js @@ -0,0 +1,450 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ConditionObject: () => import_util_endpoints.ConditionObject, + DeprecatedObject: () => import_util_endpoints.DeprecatedObject, + EndpointError: () => import_util_endpoints.EndpointError, + EndpointObject: () => import_util_endpoints.EndpointObject, + EndpointObjectHeaders: () => import_util_endpoints.EndpointObjectHeaders, + EndpointObjectProperties: () => import_util_endpoints.EndpointObjectProperties, + EndpointParams: () => import_util_endpoints.EndpointParams, + EndpointResolverOptions: () => import_util_endpoints.EndpointResolverOptions, + EndpointRuleObject: () => import_util_endpoints.EndpointRuleObject, + ErrorRuleObject: () => import_util_endpoints.ErrorRuleObject, + EvaluateOptions: () => import_util_endpoints.EvaluateOptions, + Expression: () => import_util_endpoints.Expression, + FunctionArgv: () => import_util_endpoints.FunctionArgv, + FunctionObject: () => import_util_endpoints.FunctionObject, + FunctionReturn: () => import_util_endpoints.FunctionReturn, + ParameterObject: () => import_util_endpoints.ParameterObject, + ReferenceObject: () => import_util_endpoints.ReferenceObject, + ReferenceRecord: () => import_util_endpoints.ReferenceRecord, + RuleSetObject: () => import_util_endpoints.RuleSetObject, + RuleSetRules: () => import_util_endpoints.RuleSetRules, + TreeRuleObject: () => import_util_endpoints.TreeRuleObject, + awsEndpointFunctions: () => awsEndpointFunctions, + getUserAgentPrefix: () => getUserAgentPrefix, + isIpAddress: () => import_util_endpoints.isIpAddress, + partition: () => partition, + resolveEndpoint: () => import_util_endpoints.resolveEndpoint, + setPartitionInfo: () => setPartitionInfo, + useDefaultPartitionInfo: () => useDefaultPartitionInfo +}); +module.exports = __toCommonJS(index_exports); + +// src/aws.ts + + +// src/lib/aws/isVirtualHostableS3Bucket.ts + + +// src/lib/isIpAddress.ts +var import_util_endpoints = require("@smithy/util-endpoints"); + +// src/lib/aws/isVirtualHostableS3Bucket.ts +var isVirtualHostableS3Bucket = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!(0, import_util_endpoints.isValidHostLabel)(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if ((0, import_util_endpoints.isIpAddress)(value)) { + return false; + } + return true; +}, "isVirtualHostableS3Bucket"); + +// src/lib/aws/parseArn.ts +var ARN_DELIMITER = ":"; +var RESOURCE_DELIMITER = "/"; +var parseArn = /* @__PURE__ */ __name((value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) return null; + const [arn, partition2, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition2 === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition: partition2, + service, + region, + accountId, + resourceId + }; +}, "parseArn"); + +// src/lib/aws/partitions.json +var partitions_default = { + partitions: [{ + id: "aws", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-east-1", + name: "aws", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + regions: { + "af-south-1": { + description: "Africa (Cape Town)" + }, + "ap-east-1": { + description: "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + description: "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + description: "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + description: "Asia Pacific (Osaka)" + }, + "ap-south-1": { + description: "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + description: "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + description: "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + description: "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + description: "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + description: "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + description: "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + description: "Asia Pacific (Thailand)" + }, + "aws-global": { + description: "AWS Standard global region" + }, + "ca-central-1": { + description: "Canada (Central)" + }, + "ca-west-1": { + description: "Canada West (Calgary)" + }, + "eu-central-1": { + description: "Europe (Frankfurt)" + }, + "eu-central-2": { + description: "Europe (Zurich)" + }, + "eu-north-1": { + description: "Europe (Stockholm)" + }, + "eu-south-1": { + description: "Europe (Milan)" + }, + "eu-south-2": { + description: "Europe (Spain)" + }, + "eu-west-1": { + description: "Europe (Ireland)" + }, + "eu-west-2": { + description: "Europe (London)" + }, + "eu-west-3": { + description: "Europe (Paris)" + }, + "il-central-1": { + description: "Israel (Tel Aviv)" + }, + "me-central-1": { + description: "Middle East (UAE)" + }, + "me-south-1": { + description: "Middle East (Bahrain)" + }, + "mx-central-1": { + description: "Mexico (Central)" + }, + "sa-east-1": { + description: "South America (Sao Paulo)" + }, + "us-east-1": { + description: "US East (N. Virginia)" + }, + "us-east-2": { + description: "US East (Ohio)" + }, + "us-west-1": { + description: "US West (N. California)" + }, + "us-west-2": { + description: "US West (Oregon)" + } + } + }, { + id: "aws-cn", + outputs: { + dnsSuffix: "amazonaws.com.cn", + dualStackDnsSuffix: "api.amazonwebservices.com.cn", + implicitGlobalRegion: "cn-northwest-1", + name: "aws-cn", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^cn\\-\\w+\\-\\d+$", + regions: { + "aws-cn-global": { + description: "AWS China global region" + }, + "cn-north-1": { + description: "China (Beijing)" + }, + "cn-northwest-1": { + description: "China (Ningxia)" + } + } + }, { + id: "aws-us-gov", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-gov-west-1", + name: "aws-us-gov", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^us\\-gov\\-\\w+\\-\\d+$", + regions: { + "aws-us-gov-global": { + description: "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + description: "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + description: "AWS GovCloud (US-West)" + } + } + }, { + id: "aws-iso", + outputs: { + dnsSuffix: "c2s.ic.gov", + dualStackDnsSuffix: "c2s.ic.gov", + implicitGlobalRegion: "us-iso-east-1", + name: "aws-iso", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-iso\\-\\w+\\-\\d+$", + regions: { + "aws-iso-global": { + description: "AWS ISO (US) global region" + }, + "us-iso-east-1": { + description: "US ISO East" + }, + "us-iso-west-1": { + description: "US ISO WEST" + } + } + }, { + id: "aws-iso-b", + outputs: { + dnsSuffix: "sc2s.sgov.gov", + dualStackDnsSuffix: "sc2s.sgov.gov", + implicitGlobalRegion: "us-isob-east-1", + name: "aws-iso-b", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isob\\-\\w+\\-\\d+$", + regions: { + "aws-iso-b-global": { + description: "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + description: "US ISOB East (Ohio)" + } + } + }, { + id: "aws-iso-e", + outputs: { + dnsSuffix: "cloud.adc-e.uk", + dualStackDnsSuffix: "cloud.adc-e.uk", + implicitGlobalRegion: "eu-isoe-west-1", + name: "aws-iso-e", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eu\\-isoe\\-\\w+\\-\\d+$", + regions: { + "aws-iso-e-global": { + description: "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + description: "EU ISOE West" + } + } + }, { + id: "aws-iso-f", + outputs: { + dnsSuffix: "csp.hci.ic.gov", + dualStackDnsSuffix: "csp.hci.ic.gov", + implicitGlobalRegion: "us-isof-south-1", + name: "aws-iso-f", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isof\\-\\w+\\-\\d+$", + regions: { + "aws-iso-f-global": { + description: "AWS ISOF global region" + }, + "us-isof-east-1": { + description: "US ISOF EAST" + }, + "us-isof-south-1": { + description: "US ISOF SOUTH" + } + } + }, { + id: "aws-eusc", + outputs: { + dnsSuffix: "amazonaws.eu", + dualStackDnsSuffix: "amazonaws.eu", + implicitGlobalRegion: "eusc-de-east-1", + name: "aws-eusc", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eusc\\-(de)\\-\\w+\\-\\d+$", + regions: { + "eusc-de-east-1": { + description: "EU (Germany)" + } + } + }], + version: "1.1" +}; + +// src/lib/aws/partition.ts +var selectedPartitionsInfo = partitions_default; +var selectedUserAgentPrefix = ""; +var partition = /* @__PURE__ */ __name((value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition2 of partitions) { + const { regions, outputs } = partition2; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData + }; + } + } + } + for (const partition2 of partitions) { + const { regionRegex, outputs } = partition2; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition2) => partition2.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error( + "Provided region was not found in the partition array or regex, and default partition with id 'aws' doesn't exist." + ); + } + return { + ...DEFAULT_PARTITION.outputs + }; +}, "partition"); +var setPartitionInfo = /* @__PURE__ */ __name((partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}, "setPartitionInfo"); +var useDefaultPartitionInfo = /* @__PURE__ */ __name(() => { + setPartitionInfo(partitions_default, ""); +}, "useDefaultPartitionInfo"); +var getUserAgentPrefix = /* @__PURE__ */ __name(() => selectedUserAgentPrefix, "getUserAgentPrefix"); + +// src/aws.ts +var awsEndpointFunctions = { + isVirtualHostableS3Bucket, + parseArn, + partition +}; +import_util_endpoints.customEndpointFunctions.aws = awsEndpointFunctions; + +// src/resolveEndpoint.ts + + +// src/types/EndpointError.ts + + +// src/types/EndpointRuleObject.ts + + +// src/types/ErrorRuleObject.ts + + +// src/types/RuleSetObject.ts + + +// src/types/TreeRuleObject.ts + + +// src/types/shared.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + awsEndpointFunctions, + partition, + setPartitionInfo, + useDefaultPartitionInfo, + getUserAgentPrefix, + isIpAddress, + resolveEndpoint, + EndpointError +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json new file mode 100644 index 0000000..a11705a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json @@ -0,0 +1,258 @@ +{ + "partitions": [{ + "id": "aws", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-east-1", + "name": "aws", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + "regions": { + "af-south-1": { + "description": "Africa (Cape Town)" + }, + "ap-east-1": { + "description": "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + "description": "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + "description": "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + "description": "Asia Pacific (Osaka)" + }, + "ap-south-1": { + "description": "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + "description": "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + "description": "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + "description": "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + "description": "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + "description": "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + "description": "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + "description": "Asia Pacific (Thailand)" + }, + "aws-global": { + "description": "AWS Standard global region" + }, + "ca-central-1": { + "description": "Canada (Central)" + }, + "ca-west-1": { + "description": "Canada West (Calgary)" + }, + "eu-central-1": { + "description": "Europe (Frankfurt)" + }, + "eu-central-2": { + "description": "Europe (Zurich)" + }, + "eu-north-1": { + "description": "Europe (Stockholm)" + }, + "eu-south-1": { + "description": "Europe (Milan)" + }, + "eu-south-2": { + "description": "Europe (Spain)" + }, + "eu-west-1": { + "description": "Europe (Ireland)" + }, + "eu-west-2": { + "description": "Europe (London)" + }, + "eu-west-3": { + "description": "Europe (Paris)" + }, + "il-central-1": { + "description": "Israel (Tel Aviv)" + }, + "me-central-1": { + "description": "Middle East (UAE)" + }, + "me-south-1": { + "description": "Middle East (Bahrain)" + }, + "mx-central-1": { + "description": "Mexico (Central)" + }, + "sa-east-1": { + "description": "South America (Sao Paulo)" + }, + "us-east-1": { + "description": "US East (N. Virginia)" + }, + "us-east-2": { + "description": "US East (Ohio)" + }, + "us-west-1": { + "description": "US West (N. California)" + }, + "us-west-2": { + "description": "US West (Oregon)" + } + } + }, { + "id": "aws-cn", + "outputs": { + "dnsSuffix": "amazonaws.com.cn", + "dualStackDnsSuffix": "api.amazonwebservices.com.cn", + "implicitGlobalRegion": "cn-northwest-1", + "name": "aws-cn", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^cn\\-\\w+\\-\\d+$", + "regions": { + "aws-cn-global": { + "description": "AWS China global region" + }, + "cn-north-1": { + "description": "China (Beijing)" + }, + "cn-northwest-1": { + "description": "China (Ningxia)" + } + } + }, { + "id": "aws-us-gov", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-gov-west-1", + "name": "aws-us-gov", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^us\\-gov\\-\\w+\\-\\d+$", + "regions": { + "aws-us-gov-global": { + "description": "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + "description": "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + "description": "AWS GovCloud (US-West)" + } + } + }, { + "id": "aws-iso", + "outputs": { + "dnsSuffix": "c2s.ic.gov", + "dualStackDnsSuffix": "c2s.ic.gov", + "implicitGlobalRegion": "us-iso-east-1", + "name": "aws-iso", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-iso\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-global": { + "description": "AWS ISO (US) global region" + }, + "us-iso-east-1": { + "description": "US ISO East" + }, + "us-iso-west-1": { + "description": "US ISO WEST" + } + } + }, { + "id": "aws-iso-b", + "outputs": { + "dnsSuffix": "sc2s.sgov.gov", + "dualStackDnsSuffix": "sc2s.sgov.gov", + "implicitGlobalRegion": "us-isob-east-1", + "name": "aws-iso-b", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isob\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-b-global": { + "description": "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + "description": "US ISOB East (Ohio)" + } + } + }, { + "id": "aws-iso-e", + "outputs": { + "dnsSuffix": "cloud.adc-e.uk", + "dualStackDnsSuffix": "cloud.adc-e.uk", + "implicitGlobalRegion": "eu-isoe-west-1", + "name": "aws-iso-e", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-e-global": { + "description": "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + "description": "EU ISOE West" + } + } + }, { + "id": "aws-iso-f", + "outputs": { + "dnsSuffix": "csp.hci.ic.gov", + "dualStackDnsSuffix": "csp.hci.ic.gov", + "implicitGlobalRegion": "us-isof-south-1", + "name": "aws-iso-f", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isof\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-f-global": { + "description": "AWS ISOF global region" + }, + "us-isof-east-1": { + "description": "US ISOF EAST" + }, + "us-isof-south-1": { + "description": "US ISOF SOUTH" + } + } + }, { + "id": "aws-eusc", + "outputs": { + "dnsSuffix": "amazonaws.eu", + "dualStackDnsSuffix": "amazonaws.eu", + "implicitGlobalRegion": "eusc-de-east-1", + "name": "aws-eusc", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eusc\\-(de)\\-\\w+\\-\\d+$", + "regions": { + "eusc-de-east-1": { + "description": "EU (Germany)" + } + } + }], + "version": "1.1" +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js new file mode 100644 index 0000000..49a408e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js @@ -0,0 +1,10 @@ +import { customEndpointFunctions } from "@smithy/util-endpoints"; +import { isVirtualHostableS3Bucket } from "./lib/aws/isVirtualHostableS3Bucket"; +import { parseArn } from "./lib/aws/parseArn"; +import { partition } from "./lib/aws/partition"; +export const awsEndpointFunctions = { + isVirtualHostableS3Bucket: isVirtualHostableS3Bucket, + parseArn: parseArn, + partition: partition, +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/index.js new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js new file mode 100644 index 0000000..f2bacc0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js @@ -0,0 +1,25 @@ +import { isValidHostLabel } from "@smithy/util-endpoints"; +import { isIpAddress } from "../isIpAddress"; +export const isVirtualHostableS3Bucket = (value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!isValidHostLabel(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if (isIpAddress(value)) { + return false; + } + return true; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js new file mode 100644 index 0000000..6b12887 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js @@ -0,0 +1,18 @@ +const ARN_DELIMITER = ":"; +const RESOURCE_DELIMITER = "/"; +export const parseArn = (value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) + return null; + const [arn, partition, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") + return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition, + service, + region, + accountId, + resourceId, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js new file mode 100644 index 0000000..8d39d81 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js @@ -0,0 +1,41 @@ +import partitionsInfo from "./partitions.json"; +let selectedPartitionsInfo = partitionsInfo; +let selectedUserAgentPrefix = ""; +export const partition = (value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition of partitions) { + const { regions, outputs } = partition; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData, + }; + } + } + } + for (const partition of partitions) { + const { regionRegex, outputs } = partition; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs, + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition) => partition.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error("Provided region was not found in the partition array or regex," + + " and default partition with id 'aws' doesn't exist."); + } + return { + ...DEFAULT_PARTITION.outputs, + }; +}; +export const setPartitionInfo = (partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}; +export const useDefaultPartitionInfo = () => { + setPartitionInfo(partitionsInfo, ""); +}; +export const getUserAgentPrefix = () => selectedUserAgentPrefix; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json new file mode 100644 index 0000000..a11705a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json @@ -0,0 +1,258 @@ +{ + "partitions": [{ + "id": "aws", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-east-1", + "name": "aws", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + "regions": { + "af-south-1": { + "description": "Africa (Cape Town)" + }, + "ap-east-1": { + "description": "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + "description": "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + "description": "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + "description": "Asia Pacific (Osaka)" + }, + "ap-south-1": { + "description": "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + "description": "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + "description": "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + "description": "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + "description": "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + "description": "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + "description": "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + "description": "Asia Pacific (Thailand)" + }, + "aws-global": { + "description": "AWS Standard global region" + }, + "ca-central-1": { + "description": "Canada (Central)" + }, + "ca-west-1": { + "description": "Canada West (Calgary)" + }, + "eu-central-1": { + "description": "Europe (Frankfurt)" + }, + "eu-central-2": { + "description": "Europe (Zurich)" + }, + "eu-north-1": { + "description": "Europe (Stockholm)" + }, + "eu-south-1": { + "description": "Europe (Milan)" + }, + "eu-south-2": { + "description": "Europe (Spain)" + }, + "eu-west-1": { + "description": "Europe (Ireland)" + }, + "eu-west-2": { + "description": "Europe (London)" + }, + "eu-west-3": { + "description": "Europe (Paris)" + }, + "il-central-1": { + "description": "Israel (Tel Aviv)" + }, + "me-central-1": { + "description": "Middle East (UAE)" + }, + "me-south-1": { + "description": "Middle East (Bahrain)" + }, + "mx-central-1": { + "description": "Mexico (Central)" + }, + "sa-east-1": { + "description": "South America (Sao Paulo)" + }, + "us-east-1": { + "description": "US East (N. Virginia)" + }, + "us-east-2": { + "description": "US East (Ohio)" + }, + "us-west-1": { + "description": "US West (N. California)" + }, + "us-west-2": { + "description": "US West (Oregon)" + } + } + }, { + "id": "aws-cn", + "outputs": { + "dnsSuffix": "amazonaws.com.cn", + "dualStackDnsSuffix": "api.amazonwebservices.com.cn", + "implicitGlobalRegion": "cn-northwest-1", + "name": "aws-cn", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^cn\\-\\w+\\-\\d+$", + "regions": { + "aws-cn-global": { + "description": "AWS China global region" + }, + "cn-north-1": { + "description": "China (Beijing)" + }, + "cn-northwest-1": { + "description": "China (Ningxia)" + } + } + }, { + "id": "aws-us-gov", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-gov-west-1", + "name": "aws-us-gov", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^us\\-gov\\-\\w+\\-\\d+$", + "regions": { + "aws-us-gov-global": { + "description": "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + "description": "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + "description": "AWS GovCloud (US-West)" + } + } + }, { + "id": "aws-iso", + "outputs": { + "dnsSuffix": "c2s.ic.gov", + "dualStackDnsSuffix": "c2s.ic.gov", + "implicitGlobalRegion": "us-iso-east-1", + "name": "aws-iso", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-iso\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-global": { + "description": "AWS ISO (US) global region" + }, + "us-iso-east-1": { + "description": "US ISO East" + }, + "us-iso-west-1": { + "description": "US ISO WEST" + } + } + }, { + "id": "aws-iso-b", + "outputs": { + "dnsSuffix": "sc2s.sgov.gov", + "dualStackDnsSuffix": "sc2s.sgov.gov", + "implicitGlobalRegion": "us-isob-east-1", + "name": "aws-iso-b", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isob\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-b-global": { + "description": "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + "description": "US ISOB East (Ohio)" + } + } + }, { + "id": "aws-iso-e", + "outputs": { + "dnsSuffix": "cloud.adc-e.uk", + "dualStackDnsSuffix": "cloud.adc-e.uk", + "implicitGlobalRegion": "eu-isoe-west-1", + "name": "aws-iso-e", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-e-global": { + "description": "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + "description": "EU ISOE West" + } + } + }, { + "id": "aws-iso-f", + "outputs": { + "dnsSuffix": "csp.hci.ic.gov", + "dualStackDnsSuffix": "csp.hci.ic.gov", + "implicitGlobalRegion": "us-isof-south-1", + "name": "aws-iso-f", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isof\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-f-global": { + "description": "AWS ISOF global region" + }, + "us-isof-east-1": { + "description": "US ISOF EAST" + }, + "us-isof-south-1": { + "description": "US ISOF SOUTH" + } + } + }, { + "id": "aws-eusc", + "outputs": { + "dnsSuffix": "amazonaws.eu", + "dualStackDnsSuffix": "amazonaws.eu", + "implicitGlobalRegion": "eusc-de-east-1", + "name": "aws-eusc", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eusc\\-(de)\\-\\w+\\-\\d+$", + "regions": { + "eusc-de-east-1": { + "description": "EU (Germany)" + } + } + }], + "version": "1.1" +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts new file mode 100644 index 0000000..13c64a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts @@ -0,0 +1,2 @@ +import { EndpointFunctions } from "@smithy/util-endpoints"; +export declare const awsEndpointFunctions: EndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts new file mode 100644 index 0000000..25d46e4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a string is a DNS compatible bucket name and can be used with + * virtual hosted style addressing. + */ +export declare const isVirtualHostableS3Bucket: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts new file mode 100644 index 0000000..fa5af83 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts @@ -0,0 +1,7 @@ +import { EndpointARN } from "@smithy/types"; +/** + * Evaluates a single string argument value, and returns an object containing + * details about the parsed ARN. + * If the input was not a valid ARN, the function returns null. + */ +export declare const parseArn: (value: string) => EndpointARN | null; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts new file mode 100644 index 0000000..96d14e4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts @@ -0,0 +1,38 @@ +import { EndpointPartition } from "@smithy/types"; +export type PartitionsInfo = { + partitions: Array<{ + id: string; + outputs: { + dnsSuffix: string; + dualStackDnsSuffix: string; + name: string; + supportsDualStack: boolean; + supportsFIPS: boolean; + }; + regionRegex: string; + regions: Record; + }>; +}; +/** + * Evaluates a single string argument value as a region, and matches the + * string value to an AWS partition. + * The matcher MUST always return a successful object describing the partition + * that the region has been determined to be a part of. + */ +export declare const partition: (value: string) => EndpointPartition; +/** + * Set custom partitions.json data. + * @internal + */ +export declare const setPartitionInfo: (partitionsInfo: PartitionsInfo, userAgentPrefix?: string) => void; +/** + * Reset to the default partitions.json data. + * @internal + */ +export declare const useDefaultPartitionInfo: () => void; +/** + * @internal + */ +export declare const getUserAgentPrefix: () => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts new file mode 100644 index 0000000..13c64a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts @@ -0,0 +1,2 @@ +import { EndpointFunctions } from "@smithy/util-endpoints"; +export declare const awsEndpointFunctions: EndpointFunctions; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts new file mode 100644 index 0000000..5ef3296 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts @@ -0,0 +1,4 @@ +export declare const isVirtualHostableS3Bucket: ( + value: string, + allowSubDomains?: boolean +) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts new file mode 100644 index 0000000..690d459 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts @@ -0,0 +1,2 @@ +import { EndpointARN } from "@smithy/types"; +export declare const parseArn: (value: string) => EndpointARN | null; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts new file mode 100644 index 0000000..0683113 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts @@ -0,0 +1,28 @@ +import { EndpointPartition } from "@smithy/types"; +export type PartitionsInfo = { + partitions: Array<{ + id: string; + outputs: { + dnsSuffix: string; + dualStackDnsSuffix: string; + name: string; + supportsDualStack: boolean; + supportsFIPS: boolean; + }; + regionRegex: string; + regions: Record< + string, + | { + description?: string; + } + | undefined + >; + }>; +}; +export declare const partition: (value: string) => EndpointPartition; +export declare const setPartitionInfo: ( + partitionsInfo: PartitionsInfo, + userAgentPrefix?: string +) => void; +export declare const useDefaultPartitionInfo: () => void; +export declare const getUserAgentPrefix: () => string; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..b48af7f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts @@ -0,0 +1,6 @@ +export { + EndpointObjectProperties, + EndpointObjectHeaders, + EndpointObject, + EndpointRuleObject, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..e7b8881 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts @@ -0,0 +1 @@ +export { ErrorRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts new file mode 100644 index 0000000..2a489c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts @@ -0,0 +1,5 @@ +export { + DeprecatedObject, + ParameterObject, + RuleSetObject, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..716ddcf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts @@ -0,0 +1 @@ +export { RuleSetRules, TreeRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts new file mode 100644 index 0000000..cfd2248 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts @@ -0,0 +1,12 @@ +export { + ReferenceObject, + FunctionObject, + FunctionArgv, + FunctionReturn, + ConditionObject, + Expression, + EndpointParams, + EndpointResolverOptions, + ReferenceRecord, + EvaluateOptions, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..ef666fe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts @@ -0,0 +1 @@ +export { EndpointObjectProperties, EndpointObjectHeaders, EndpointObject, EndpointRuleObject, } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..e7b8881 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts @@ -0,0 +1 @@ +export { ErrorRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts new file mode 100644 index 0000000..c052af0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts @@ -0,0 +1 @@ +export { DeprecatedObject, ParameterObject, RuleSetObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..716ddcf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts @@ -0,0 +1 @@ +export { RuleSetRules, TreeRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts new file mode 100644 index 0000000..af7cc53 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts @@ -0,0 +1 @@ +export { ReferenceObject, FunctionObject, FunctionArgv, FunctionReturn, ConditionObject, Expression, EndpointParams, EndpointResolverOptions, ReferenceRecord, EvaluateOptions, } from "@smithy/util-endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/package.json new file mode 100644 index 0000000..36d8d2a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-endpoints/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/util-endpoints", + "version": "3.787.0", + "description": "Utilities to help with endpoint resolution", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-endpoints", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-endpoints", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-endpoints" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/README.md new file mode 100644 index 0000000..cac53d3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/util-locate-window + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-locate-window/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-locate-window) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-locate-window.svg)](https://www.npmjs.com/package/@aws-sdk/util-locate-window) diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js new file mode 100644 index 0000000..95a6423 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js @@ -0,0 +1,42 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + locateWindow: () => locateWindow +}); +module.exports = __toCommonJS(src_exports); +var fallbackWindow = {}; +function locateWindow() { + if (typeof window !== "undefined") { + return window; + } else if (typeof self !== "undefined") { + return self; + } + return fallbackWindow; +} +__name(locateWindow, "locateWindow"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + locateWindow +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-es/index.js new file mode 100644 index 0000000..a51e644 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-es/index.js @@ -0,0 +1,10 @@ +const fallbackWindow = {}; +export function locateWindow() { + if (typeof window !== "undefined") { + return window; + } + else if (typeof self !== "undefined") { + return self; + } + return fallbackWindow; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts new file mode 100644 index 0000000..2b02d7f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts @@ -0,0 +1,6 @@ +/** + * Locates the global scope for a browser or browser-like environment. If + * neither `window` nor `self` is defined by the environment, the same object + * will be returned on each invocation. + */ +export declare function locateWindow(): Window; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a5bbba3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export declare function locateWindow(): Window; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/package.json new file mode 100644 index 0000000..2835b09 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-locate-window/package.json @@ -0,0 +1,53 @@ +{ + "name": "@aws-sdk/util-locate-window", + "version": "3.723.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-locate-window", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-locate-window", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-locate-window" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/README.md new file mode 100644 index 0000000..f2b6c62 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/util-user-agent-browser + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-user-agent-browser/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-user-agent-browser.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js new file mode 100644 index 0000000..aaf7621 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultUserAgent = exports.createDefaultUserAgentProvider = void 0; +const tslib_1 = require("tslib"); +const bowser_1 = tslib_1.__importDefault(require("bowser")); +const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser_1.default.parse(window.navigator.userAgent) + : undefined; + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${parsedUA?.os?.name || "other"}`, parsedUA?.os?.version], + ["lang/js"], + ["md/browser", `${parsedUA?.browser?.name ?? "unknown"}_${parsedUA?.browser?.version ?? "unknown"}`], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +exports.createDefaultUserAgentProvider = createDefaultUserAgentProvider; +exports.defaultUserAgent = exports.createDefaultUserAgentProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js new file mode 100644 index 0000000..4d06e36 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultUserAgent = exports.createDefaultUserAgentProvider = void 0; +const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + ["os/other"], + ["lang/js"], + ["md/rn"], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +exports.createDefaultUserAgentProvider = createDefaultUserAgentProvider; +exports.defaultUserAgent = exports.createDefaultUserAgentProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js new file mode 100644 index 0000000..1584d7e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js @@ -0,0 +1,22 @@ +import bowser from "bowser"; +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser.parse(window.navigator.userAgent) + : undefined; + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${parsedUA?.os?.name || "other"}`, parsedUA?.os?.version], + ["lang/js"], + ["md/browser", `${parsedUA?.browser?.name ?? "unknown"}_${parsedUA?.browser?.version ?? "unknown"}`], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js new file mode 100644 index 0000000..04c7ae5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js @@ -0,0 +1,18 @@ +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + ["os/other"], + ["lang/js"], + ["md/rn"], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts new file mode 100644 index 0000000..00537a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts new file mode 100644 index 0000000..fb107d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Default provider to the user agent in browsers. It's a best effort to infer + * the device information. It uses bowser library to detect the browser and version + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * @internal + * @deprecated use createDefaultUserAgentProvider + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts new file mode 100644 index 0000000..5b4926b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Default provider to the user agent in ReactNative. It's a best effort to infer + * the device information. It uses bowser library to detect the browser and virsion + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * @internal + * @deprecated use createDefaultUserAgentProvider + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..1428231 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,4 @@ +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..32e643a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts new file mode 100644 index 0000000..32e643a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/package.json new file mode 100644 index 0000000..4065f6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-browser/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/util-user-agent-browser", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-user-agent-browser", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "react-native": "dist-es/index.native.js", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-user-agent-browser", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-user-agent-browser" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/LICENSE b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/README.md b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/README.md new file mode 100644 index 0000000..fccfbb5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/util-user-agent-node + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-user-agent-node/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-node) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-user-agent-node.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js new file mode 100644 index 0000000..083dccb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js @@ -0,0 +1,102 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_APP_ID_CONFIG_OPTIONS: () => NODE_APP_ID_CONFIG_OPTIONS, + UA_APP_ID_ENV_NAME: () => UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME: () => UA_APP_ID_INI_NAME, + createDefaultUserAgentProvider: () => createDefaultUserAgentProvider, + crtAvailability: () => crtAvailability, + defaultUserAgent: () => defaultUserAgent +}); +module.exports = __toCommonJS(index_exports); + +// src/defaultUserAgent.ts +var import_os = require("os"); +var import_process = require("process"); + +// src/crt-availability.ts +var crtAvailability = { + isCrtAvailable: false +}; + +// src/is-crt-available.ts +var isCrtAvailable = /* @__PURE__ */ __name(() => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}, "isCrtAvailable"); + +// src/defaultUserAgent.ts +var createDefaultUserAgentProvider = /* @__PURE__ */ __name(({ serviceId, clientVersion }) => { + return async (config) => { + const sections = [ + // sdk-metadata + ["aws-sdk-js", clientVersion], + // ua-metadata + ["ua", "2.1"], + // os-metadata + [`os/${(0, import_os.platform)()}`, (0, import_os.release)()], + // language-metadata + // ECMAScript edition doesn't matter in JS, so no version needed. + ["lang/js"], + ["md/nodejs", `${import_process.versions.node}`] + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (import_process.env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${import_process.env.AWS_EXECUTION_ENV}`]); + } + const appId = await config?.userAgentAppId?.(); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}, "createDefaultUserAgentProvider"); +var defaultUserAgent = createDefaultUserAgentProvider; + +// src/nodeAppIdConfigOptions.ts +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +var UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +var UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +var NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env2) => env2[UA_APP_ID_ENV_NAME], "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], "configFileSelector"), + default: import_middleware_user_agent.DEFAULT_UA_APP_ID +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + crtAvailability, + createDefaultUserAgentProvider, + defaultUserAgent, + UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME, + NODE_APP_ID_CONFIG_OPTIONS +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js new file mode 100644 index 0000000..99ebeb9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js @@ -0,0 +1,3 @@ +export const crtAvailability = { + isCrtAvailable: false, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js new file mode 100644 index 0000000..d92681d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js @@ -0,0 +1,29 @@ +import { platform, release } from "os"; +import { env, versions } from "process"; +import { isCrtAvailable } from "./is-crt-available"; +export { crtAvailability } from "./crt-availability"; +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => { + return async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${platform()}`, release()], + ["lang/js"], + ["md/nodejs", `${versions.node}`], + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${env.AWS_EXECUTION_ENV}`]); + } + const appId = await config?.userAgentAppId?.(); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js new file mode 100644 index 0000000..e9f8b0d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js @@ -0,0 +1,7 @@ +import { crtAvailability } from "./crt-availability"; +export const isCrtAvailable = () => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js new file mode 100644 index 0000000..f270db9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js @@ -0,0 +1,9 @@ +import { DEFAULT_UA_APP_ID } from "@aws-sdk/middleware-user-agent"; +export const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +export const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +const UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +export const NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[UA_APP_ID_ENV_NAME], + configFileSelector: (profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], + default: DEFAULT_UA_APP_ID, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts new file mode 100644 index 0000000..c2033a0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + * + * If \@aws-sdk/signature-v4-crt is installed and loaded, it will register + * this value to true. + */ +export declare const crtAvailability: { + isCrtAvailable: boolean; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts new file mode 100644 index 0000000..28537a6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts @@ -0,0 +1,23 @@ +import { Provider, UserAgent } from "@smithy/types"; +export { crtAvailability } from "./crt-availability"; +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Collect metrics from runtime to put into user agent. + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * + * @internal + * + * @deprecated use createDefaultUserAgentProvider + * + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts new file mode 100644 index 0000000..675ffa8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts @@ -0,0 +1,5 @@ +import { UserAgentPair } from "@smithy/types"; +/** + * @internal + */ +export declare const isCrtAvailable: () => UserAgentPair | null; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts new file mode 100644 index 0000000..92a8edc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +/** + * @internal + */ +export declare const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +/** + * @internal + */ +export declare const NODE_APP_ID_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts new file mode 100644 index 0000000..9dccfb0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts @@ -0,0 +1,3 @@ +export declare const crtAvailability: { + isCrtAvailable: boolean; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts new file mode 100644 index 0000000..6e4884f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts @@ -0,0 +1,21 @@ +import { Provider, UserAgent } from "@smithy/types"; +export { crtAvailability } from "./crt-availability"; +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts new file mode 100644 index 0000000..d28355c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts @@ -0,0 +1,2 @@ +import { UserAgentPair } from "@smithy/types"; +export declare const isCrtAvailable: () => UserAgentPair | null; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts new file mode 100644 index 0000000..b9fa123 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +export declare const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +export declare const NODE_APP_ID_CONFIG_OPTIONS: LoadedConfigSelectors< + string | undefined +>; diff --git a/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/package.json b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/package.json new file mode 100644 index 0000000..14742a5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@aws-sdk/util-user-agent-node/package.json @@ -0,0 +1,65 @@ +{ + "name": "@aws-sdk/util-user-agent-node", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-user-agent-node", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-user-agent-node", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-user-agent-node" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/README.md new file mode 100644 index 0000000..175bc37 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/README.md @@ -0,0 +1,4 @@ +# @smithy/abort-controller + +[![NPM version](https://img.shields.io/npm/v/@smithy/abort-controller/latest.svg)](https://www.npmjs.com/package/@smithy/abort-controller) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/abort-controller.svg)](https://www.npmjs.com/package/@smithy/abort-controller) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/index.js new file mode 100644 index 0000000..e2f7caa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-cjs/index.js @@ -0,0 +1,84 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AbortController: () => AbortController, + AbortHandler: () => import_types.AbortHandler, + AbortSignal: () => AbortSignal, + IAbortController: () => import_types.AbortController, + IAbortSignal: () => import_types.AbortSignal +}); +module.exports = __toCommonJS(src_exports); + +// src/AbortController.ts + + +// src/AbortSignal.ts +var import_types = require("@smithy/types"); +var AbortSignal = class { + constructor() { + this.onabort = null; + this._aborted = false; + Object.defineProperty(this, "_aborted", { + value: false, + writable: true + }); + } + static { + __name(this, "AbortSignal"); + } + /** + * Whether the associated operation has already been cancelled. + */ + get aborted() { + return this._aborted; + } + /** + * @internal + */ + abort() { + this._aborted = true; + if (this.onabort) { + this.onabort(this); + this.onabort = null; + } + } +}; + +// src/AbortController.ts +var AbortController = class { + constructor() { + this.signal = new AbortSignal(); + } + static { + __name(this, "AbortController"); + } + abort() { + this.signal.abort(); + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AbortController, + AbortSignal +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/AbortController.js b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/AbortController.js new file mode 100644 index 0000000..696f137 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/AbortController.js @@ -0,0 +1,9 @@ +import { AbortSignal } from "./AbortSignal"; +export class AbortController { + constructor() { + this.signal = new AbortSignal(); + } + abort() { + this.signal.abort(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js new file mode 100644 index 0000000..9fc0813 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js @@ -0,0 +1,20 @@ +export class AbortSignal { + constructor() { + this.onabort = null; + this._aborted = false; + Object.defineProperty(this, "_aborted", { + value: false, + writable: true, + }); + } + get aborted() { + return this._aborted; + } + abort() { + this._aborted = true; + if (this.onabort) { + this.onabort(this); + this.onabort = null; + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/index.js new file mode 100644 index 0000000..a0f47f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts new file mode 100644 index 0000000..007f0f6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts @@ -0,0 +1,16 @@ +import { AbortController as DeprecatedAbortController } from "@smithy/types"; +import { AbortSignal } from "./AbortSignal"; +/** + * @public + */ +export { DeprecatedAbortController as IAbortController }; +/** + * @deprecated This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @public + */ +export declare class AbortController implements DeprecatedAbortController { + readonly signal: AbortSignal; + abort(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts new file mode 100644 index 0000000..a97c3dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts @@ -0,0 +1,21 @@ +import { AbortHandler, AbortSignal as DeprecatedAbortSignal } from "@smithy/types"; +/** + * @public + */ +export { AbortHandler, DeprecatedAbortSignal as IAbortSignal }; +/** + * @public + */ +export declare class AbortSignal implements DeprecatedAbortSignal { + onabort: AbortHandler | null; + private _aborted; + constructor(); + /** + * Whether the associated operation has already been cancelled. + */ + get aborted(): boolean; + /** + * @internal + */ + abort(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/index.d.ts new file mode 100644 index 0000000..8788e2f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/index.d.ts @@ -0,0 +1,9 @@ +/** + * This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @deprecated Use standard implementations in [Browsers](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) and [Node.js](https://nodejs.org/docs/latest/api/globals.html#class-abortcontroller) + * @packageDocumentation + */ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts new file mode 100644 index 0000000..89457d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts @@ -0,0 +1,16 @@ +import { AbortController as DeprecatedAbortController } from "@smithy/types"; +import { AbortSignal } from "./AbortSignal"; +/** + * @public + */ +export { DeprecatedAbortController as IAbortController }; +/** + * @deprecated This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @public + */ +export declare class AbortController implements DeprecatedAbortController { + readonly signal: AbortSignal; + abort(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts new file mode 100644 index 0000000..92130a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts @@ -0,0 +1,21 @@ +import { AbortHandler, AbortSignal as DeprecatedAbortSignal } from "@smithy/types"; +/** + * @public + */ +export { AbortHandler, DeprecatedAbortSignal as IAbortSignal }; +/** + * @public + */ +export declare class AbortSignal implements DeprecatedAbortSignal { + onabort: AbortHandler | null; + private _aborted; + constructor(); + /* + * Whether the associated operation has already been cancelled. + */ + readonly aborted: boolean; + /** + * @internal + */ + abort(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..5a907b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +/** + * This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @deprecated Use standard implementations in [Browsers](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) and [Node.js](https://nodejs.org/docs/latest/api/globals.html#class-abortcontroller) + * @packageDocumentation + */ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/package.json new file mode 100644 index 0000000..b7e5769 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/abort-controller/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/abort-controller", + "version": "4.0.2", + "description": "A simple abort controller library", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline abort-controller", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/abort-controller", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/abort-controller" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/README.md new file mode 100644 index 0000000..2a25da2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/README.md @@ -0,0 +1,10 @@ +# @smithy/config-resolver + +[![NPM version](https://img.shields.io/npm/v/@smithy/config-resolver/latest.svg)](https://www.npmjs.com/package/@smithy/config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/config-resolver.svg)](https://www.npmjs.com/package/@smithy/config-resolver) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/index.js new file mode 100644 index 0000000..42f7a4c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/index.js @@ -0,0 +1,228 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_USE_DUALSTACK_ENDPOINT: () => CONFIG_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT: () => CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT: () => DEFAULT_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT: () => DEFAULT_USE_FIPS_ENDPOINT, + ENV_USE_DUALSTACK_ENDPOINT: () => ENV_USE_DUALSTACK_ENDPOINT, + ENV_USE_FIPS_ENDPOINT: () => ENV_USE_FIPS_ENDPOINT, + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getRegionInfo: () => getRegionInfo, + resolveCustomEndpointsConfig: () => resolveCustomEndpointsConfig, + resolveEndpointsConfig: () => resolveEndpointsConfig, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/endpointsConfig/NodeUseDualstackEndpointConfigOptions.ts +var import_util_config_provider = require("@smithy/util-config-provider"); +var ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +var CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +var DEFAULT_USE_DUALSTACK_ENDPOINT = false; +var NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/NodeUseFipsEndpointConfigOptions.ts + +var ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +var CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +var DEFAULT_USE_FIPS_ENDPOINT = false; +var NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/resolveCustomEndpointsConfig.ts +var import_util_middleware = require("@smithy/util-middleware"); +var resolveCustomEndpointsConfig = /* @__PURE__ */ __name((input) => { + const { tls, endpoint, urlParser, useDualstackEndpoint } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(useDualstackEndpoint ?? false) + }); +}, "resolveCustomEndpointsConfig"); + +// src/endpointsConfig/resolveEndpointsConfig.ts + + +// src/endpointsConfig/utils/getEndpointFromRegion.ts +var getEndpointFromRegion = /* @__PURE__ */ __name(async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}, "getEndpointFromRegion"); + +// src/endpointsConfig/resolveEndpointsConfig.ts +var resolveEndpointsConfig = /* @__PURE__ */ __name((input) => { + const useDualstackEndpoint = (0, import_util_middleware.normalizeProvider)(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser, tls } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: endpoint ? (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint + }); +}, "resolveEndpointsConfig"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + } +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + } + }); +}, "resolveRegionConfig"); + +// src/regionInfo/getHostnameFromVariants.ts +var getHostnameFromVariants = /* @__PURE__ */ __name((variants = [], { useFipsEndpoint, useDualstackEndpoint }) => variants.find( + ({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack") +)?.hostname, "getHostnameFromVariants"); + +// src/regionInfo/getResolvedHostname.ts +var getResolvedHostname = /* @__PURE__ */ __name((resolvedRegion, { regionHostname, partitionHostname }) => regionHostname ? regionHostname : partitionHostname ? partitionHostname.replace("{region}", resolvedRegion) : void 0, "getResolvedHostname"); + +// src/regionInfo/getResolvedPartition.ts +var getResolvedPartition = /* @__PURE__ */ __name((region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws", "getResolvedPartition"); + +// src/regionInfo/getResolvedSigningRegion.ts +var getResolvedSigningRegion = /* @__PURE__ */ __name((hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}, "getResolvedSigningRegion"); + +// src/regionInfo/getRegionInfo.ts +var getRegionInfo = /* @__PURE__ */ __name((region, { + useFipsEndpoint = false, + useDualstackEndpoint = false, + signingService, + regionHash, + partitionHash +}) => { + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : partitionHash[partition]?.endpoint ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants(regionHash[resolvedRegion]?.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants(partitionHash[partition]?.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === void 0) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: regionHash[resolvedRegion]?.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint + }); + return { + partition, + signingService, + hostname, + ...signingRegion && { signingRegion }, + ...regionHash[resolvedRegion]?.signingService && { + signingService: regionHash[resolvedRegion].signingService + } + }; +}, "getRegionInfo"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + ENV_USE_FIPS_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + resolveCustomEndpointsConfig, + resolveEndpointsConfig, + REGION_ENV_NAME, + REGION_INI_NAME, + NODE_REGION_CONFIG_OPTIONS, + NODE_REGION_CONFIG_FILE_OPTIONS, + resolveRegionConfig, + getRegionInfo +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js new file mode 100644 index 0000000..d061567 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js @@ -0,0 +1,9 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +export const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +export const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +export const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, ENV_USE_DUALSTACK_ENDPOINT, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, CONFIG_USE_DUALSTACK_ENDPOINT, SelectorType.CONFIG), + default: false, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js new file mode 100644 index 0000000..8cac1e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js @@ -0,0 +1,9 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +export const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +export const DEFAULT_USE_FIPS_ENDPOINT = false; +export const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, ENV_USE_FIPS_ENDPOINT, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, CONFIG_USE_FIPS_ENDPOINT, SelectorType.CONFIG), + default: false, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js new file mode 100644 index 0000000..1424c22 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js @@ -0,0 +1,4 @@ +export * from "./NodeUseDualstackEndpointConfigOptions"; +export * from "./NodeUseFipsEndpointConfigOptions"; +export * from "./resolveCustomEndpointsConfig"; +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js new file mode 100644 index 0000000..7f9a953 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js @@ -0,0 +1,10 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +export const resolveCustomEndpointsConfig = (input) => { + const { tls, endpoint, urlParser, useDualstackEndpoint } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: normalizeProvider(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: normalizeProvider(useDualstackEndpoint ?? false), + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js new file mode 100644 index 0000000..440657d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js @@ -0,0 +1,14 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { getEndpointFromRegion } from "./utils/getEndpointFromRegion"; +export const resolveEndpointsConfig = (input) => { + const useDualstackEndpoint = normalizeProvider(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser, tls } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: endpoint + ? normalizeProvider(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) + : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js new file mode 100644 index 0000000..5627c32 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js @@ -0,0 +1,15 @@ +export const getEndpointFromRegion = async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = (await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint })) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/index.js new file mode 100644 index 0000000..61456a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./endpointsConfig"; +export * from "./regionConfig"; +export * from "./regionInfo"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js new file mode 100644 index 0000000..7db9896 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js @@ -0,0 +1,12 @@ +export const REGION_ENV_NAME = "AWS_REGION"; +export const REGION_INI_NAME = "region"; +export const NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +export const NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js new file mode 100644 index 0000000..8d1246b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js @@ -0,0 +1,6 @@ +import { isFipsRegion } from "./isFipsRegion"; +export const getRealRegion = (region) => isFipsRegion(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..d758967 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +export const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..f88e00f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js @@ -0,0 +1,24 @@ +import { getRealRegion } from "./getRealRegion"; +import { isFipsRegion } from "./isFipsRegion"; +export const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js new file mode 100644 index 0000000..84fc50e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js @@ -0,0 +1 @@ +export const getHostnameFromVariants = (variants = [], { useFipsEndpoint, useDualstackEndpoint }) => variants.find(({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack"))?.hostname; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js new file mode 100644 index 0000000..c39e2f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js @@ -0,0 +1,29 @@ +import { getHostnameFromVariants } from "./getHostnameFromVariants"; +import { getResolvedHostname } from "./getResolvedHostname"; +import { getResolvedPartition } from "./getResolvedPartition"; +import { getResolvedSigningRegion } from "./getResolvedSigningRegion"; +export const getRegionInfo = (region, { useFipsEndpoint = false, useDualstackEndpoint = false, signingService, regionHash, partitionHash, }) => { + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : partitionHash[partition]?.endpoint ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants(regionHash[resolvedRegion]?.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants(partitionHash[partition]?.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === undefined) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: regionHash[resolvedRegion]?.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint, + }); + return { + partition, + signingService, + hostname, + ...(signingRegion && { signingRegion }), + ...(regionHash[resolvedRegion]?.signingService && { + signingService: regionHash[resolvedRegion].signingService, + }), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js new file mode 100644 index 0000000..35fb988 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js @@ -0,0 +1,5 @@ +export const getResolvedHostname = (resolvedRegion, { regionHostname, partitionHostname }) => regionHostname + ? regionHostname + : partitionHostname + ? partitionHostname.replace("{region}", resolvedRegion) + : undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js new file mode 100644 index 0000000..3d7bc55 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js @@ -0,0 +1 @@ +export const getResolvedPartition = (region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js new file mode 100644 index 0000000..7977e00 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js @@ -0,0 +1,12 @@ +export const getResolvedSigningRegion = (hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } + else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js new file mode 100644 index 0000000..e29686a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js @@ -0,0 +1,3 @@ +export * from "./PartitionHash"; +export * from "./RegionHash"; +export * from "./getRegionInfo"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts new file mode 100644 index 0000000..172d8c1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts new file mode 100644 index 0000000..106bbdb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_FIPS_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts new file mode 100644 index 0000000..ea1cf59 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./NodeUseDualstackEndpointConfigOptions"; +/** + * @internal + */ +export * from "./NodeUseFipsEndpointConfigOptions"; +/** + * @internal + */ +export * from "./resolveCustomEndpointsConfig"; +/** + * @internal + */ +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts new file mode 100644 index 0000000..477afbc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts @@ -0,0 +1,32 @@ +import { Endpoint, Provider, UrlParser } from "@smithy/types"; +import { EndpointsInputConfig, EndpointsResolvedConfig } from "./resolveEndpointsConfig"; +/** + * @public + */ +export interface CustomEndpointsInputConfig extends EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. + */ + endpoint: string | Endpoint | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; +} +/** + * @internal + */ +export interface CustomEndpointsResolvedConfig extends EndpointsResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint: true; +} +/** + * @internal + */ +export declare const resolveCustomEndpointsConfig: (input: T & CustomEndpointsInputConfig & PreviouslyResolved) => T & CustomEndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts new file mode 100644 index 0000000..4cd1d8f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +/** + * @public + */ +export interface EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only required when using + * a custom endpoint (for example, when using a local version of S3). + */ + endpoint?: string | Endpoint | Provider; + /** + * Whether TLS is enabled for requests. + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + region: Provider; + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export interface EndpointsResolvedConfig extends Required { + /** + * Resolved value for input {@link EndpointsInputConfig.endpoint} + */ + endpoint: Provider; + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; +} +/** + * @internal + * + * @deprecated endpoints rulesets use \@smithy/middleware-endpoint resolveEndpointConfig. + * All generated clients should migrate to Endpoints 2.0 endpointRuleSet traits. + */ +export declare const resolveEndpointsConfig: (input: T & EndpointsInputConfig & PreviouslyResolved) => T & EndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts new file mode 100644 index 0000000..5ded732 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts @@ -0,0 +1,11 @@ +import { Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +interface GetEndpointFromRegionOptions { + region: Provider; + tls?: boolean; + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + useDualstackEndpoint: Provider; + useFipsEndpoint: Provider; +} +export declare const getEndpointFromRegion: (input: GetEndpointFromRegionOptions) => Promise; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/index.d.ts new file mode 100644 index 0000000..fde7086 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./endpointsConfig"; +/** + * @internal + */ +export * from "./regionConfig"; +/** + * @internal + */ +export * from "./regionInfo"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts new file mode 100644 index 0000000..d203bb0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..c70fb5b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts new file mode 100644 index 0000000..6dcf5e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..b42cee7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..c06c9d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,34 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts new file mode 100644 index 0000000..9b68e93 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts @@ -0,0 +1,10 @@ +import { EndpointVariantTag } from "./EndpointVariantTag"; +/** + * @internal + * + * Provides hostname information for specific host label. + */ +export type EndpointVariant = { + hostname: string; + tags: EndpointVariantTag[]; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts new file mode 100644 index 0000000..ca50e1f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The tag which mentions which area variant is providing information for. + * Can be either "fips" or "dualstack". + */ +export type EndpointVariantTag = "fips" | "dualstack"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts new file mode 100644 index 0000000..0a5be17 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts @@ -0,0 +1,14 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of partition with the information specific to that partition. + * The information includes the list of regions belonging to that partition, + * and the hostname to be used for the partition. + */ +export type PartitionHash = Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts new file mode 100644 index 0000000..01cd843 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of region with the information specific to that region. + * The information can include hostname, signingService and signingRegion. + */ +export type RegionHash = Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts new file mode 100644 index 0000000..47bcf70 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + */ +export interface GetHostnameFromVariantsOptions { + useFipsEndpoint: boolean; + useDualstackEndpoint: boolean; +} +/** + * @internal + */ +export declare const getHostnameFromVariants: (variants: EndpointVariant[] | undefined, { useFipsEndpoint, useDualstackEndpoint }: GetHostnameFromVariantsOptions) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts new file mode 100644 index 0000000..0aaae08 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts @@ -0,0 +1,17 @@ +import { RegionInfo } from "@smithy/types"; +import { PartitionHash } from "./PartitionHash"; +import { RegionHash } from "./RegionHash"; +/** + * @internal + */ +export interface GetRegionInfoOptions { + useFipsEndpoint?: boolean; + useDualstackEndpoint?: boolean; + signingService: string; + regionHash: RegionHash; + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getRegionInfo: (region: string, { useFipsEndpoint, useDualstackEndpoint, signingService, regionHash, partitionHash, }: GetRegionInfoOptions) => RegionInfo; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts new file mode 100644 index 0000000..bf7a2b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface GetResolvedHostnameOptions { + regionHostname?: string; + partitionHostname?: string; +} +/** + * @internal + */ +export declare const getResolvedHostname: (resolvedRegion: string, { regionHostname, partitionHostname }: GetResolvedHostnameOptions) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts new file mode 100644 index 0000000..587b4fc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts @@ -0,0 +1,11 @@ +import { PartitionHash } from "./PartitionHash"; +/** + * @internal + */ +export interface GetResolvedPartitionOptions { + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getResolvedPartition: (region: string, { partitionHash }: GetResolvedPartitionOptions) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts new file mode 100644 index 0000000..3f5f7af --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export interface GetResolvedSigningRegionOptions { + regionRegex: string; + signingRegion?: string; + useFipsEndpoint: boolean; +} +/** + * @internal + */ +export declare const getResolvedSigningRegion: (hostname: string, { signingRegion, regionRegex, useFipsEndpoint }: GetResolvedSigningRegionOptions) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts new file mode 100644 index 0000000..64ef0d5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./PartitionHash"; +/** + * @internal + */ +export * from "./RegionHash"; +/** + * @internal + */ +export * from "./getRegionInfo"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts new file mode 100644 index 0000000..169720a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts new file mode 100644 index 0000000..b17417e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_FIPS_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts new file mode 100644 index 0000000..cbabe5b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./NodeUseDualstackEndpointConfigOptions"; +/** + * @internal + */ +export * from "./NodeUseFipsEndpointConfigOptions"; +/** + * @internal + */ +export * from "./resolveCustomEndpointsConfig"; +/** + * @internal + */ +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts new file mode 100644 index 0000000..f49306e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts @@ -0,0 +1,32 @@ +import { Endpoint, Provider, UrlParser } from "@smithy/types"; +import { EndpointsInputConfig, EndpointsResolvedConfig } from "./resolveEndpointsConfig"; +/** + * @public + */ +export interface CustomEndpointsInputConfig extends EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. + */ + endpoint: string | Endpoint | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; +} +/** + * @internal + */ +export interface CustomEndpointsResolvedConfig extends EndpointsResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint: true; +} +/** + * @internal + */ +export declare const resolveCustomEndpointsConfig: (input: T & CustomEndpointsInputConfig & PreviouslyResolved) => T & CustomEndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts new file mode 100644 index 0000000..388819d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +/** + * @public + */ +export interface EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only required when using + * a custom endpoint (for example, when using a local version of S3). + */ + endpoint?: string | Endpoint | Provider; + /** + * Whether TLS is enabled for requests. + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + region: Provider; + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export interface EndpointsResolvedConfig extends Required { + /** + * Resolved value for input {@link EndpointsInputConfig.endpoint} + */ + endpoint: Provider; + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; +} +/** + * @internal + * + * @deprecated endpoints rulesets use \@smithy/middleware-endpoint resolveEndpointConfig. + * All generated clients should migrate to Endpoints 2.0 endpointRuleSet traits. + */ +export declare const resolveEndpointsConfig: (input: T & EndpointsInputConfig & PreviouslyResolved) => T & EndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts new file mode 100644 index 0000000..83d4635 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts @@ -0,0 +1,11 @@ +import { Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +interface GetEndpointFromRegionOptions { + region: Provider; + tls?: boolean; + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + useDualstackEndpoint: Provider; + useFipsEndpoint: Provider; +} +export declare const getEndpointFromRegion: (input: GetEndpointFromRegionOptions) => Promise; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e205411 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./endpointsConfig"; +/** + * @internal + */ +export * from "./regionConfig"; +/** + * @internal + */ +export * from "./regionInfo"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts new file mode 100644 index 0000000..8f3a9b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..6c11d4d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts new file mode 100644 index 0000000..0e6f55d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..1ee8bd4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..7aaf9e1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,34 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts new file mode 100644 index 0000000..e533cc7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts @@ -0,0 +1,10 @@ +import { EndpointVariantTag } from "./EndpointVariantTag"; +/** + * @internal + * + * Provides hostname information for specific host label. + */ +export type EndpointVariant = { + hostname: string; + tags: EndpointVariantTag[]; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts new file mode 100644 index 0000000..755bbe5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The tag which mentions which area variant is providing information for. + * Can be either "fips" or "dualstack". + */ +export type EndpointVariantTag = "fips" | "dualstack"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts new file mode 100644 index 0000000..6fed65e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts @@ -0,0 +1,14 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of partition with the information specific to that partition. + * The information includes the list of regions belonging to that partition, + * and the hostname to be used for the partition. + */ +export type PartitionHash = Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts new file mode 100644 index 0000000..cd90c70 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of region with the information specific to that region. + * The information can include hostname, signingService and signingRegion. + */ +export type RegionHash = Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts new file mode 100644 index 0000000..3d61daa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + */ +export interface GetHostnameFromVariantsOptions { + useFipsEndpoint: boolean; + useDualstackEndpoint: boolean; +} +/** + * @internal + */ +export declare const getHostnameFromVariants: (variants: EndpointVariant[] | undefined, { useFipsEndpoint, useDualstackEndpoint }: GetHostnameFromVariantsOptions) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts new file mode 100644 index 0000000..820a548 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts @@ -0,0 +1,17 @@ +import { RegionInfo } from "@smithy/types"; +import { PartitionHash } from "./PartitionHash"; +import { RegionHash } from "./RegionHash"; +/** + * @internal + */ +export interface GetRegionInfoOptions { + useFipsEndpoint?: boolean; + useDualstackEndpoint?: boolean; + signingService: string; + regionHash: RegionHash; + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getRegionInfo: (region: string, { useFipsEndpoint, useDualstackEndpoint, signingService, regionHash, partitionHash, }: GetRegionInfoOptions) => RegionInfo; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts new file mode 100644 index 0000000..6aae405 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface GetResolvedHostnameOptions { + regionHostname?: string; + partitionHostname?: string; +} +/** + * @internal + */ +export declare const getResolvedHostname: (resolvedRegion: string, { regionHostname, partitionHostname }: GetResolvedHostnameOptions) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts new file mode 100644 index 0000000..355c318 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts @@ -0,0 +1,11 @@ +import { PartitionHash } from "./PartitionHash"; +/** + * @internal + */ +export interface GetResolvedPartitionOptions { + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getResolvedPartition: (region: string, { partitionHash }: GetResolvedPartitionOptions) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts new file mode 100644 index 0000000..a7b1db6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export interface GetResolvedSigningRegionOptions { + regionRegex: string; + signingRegion?: string; + useFipsEndpoint: boolean; +} +/** + * @internal + */ +export declare const getResolvedSigningRegion: (hostname: string, { signingRegion, regionRegex, useFipsEndpoint }: GetResolvedSigningRegionOptions) => string | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts new file mode 100644 index 0000000..5826308 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./PartitionHash"; +/** + * @internal + */ +export * from "./RegionHash"; +/** + * @internal + */ +export * from "./getRegionInfo"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/package.json new file mode 100644 index 0000000..2c4927f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/config-resolver/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/config-resolver", + "version": "4.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline config-resolver", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/config-resolver" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/core/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/core/README.md new file mode 100644 index 0000000..51f8922 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/README.md @@ -0,0 +1,45 @@ +# @smithy/core + +[![NPM version](https://img.shields.io/npm/v/@smithy/core/latest.svg)](https://www.npmjs.com/package/@smithy/core) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/core.svg)](https://www.npmjs.com/package/@smithy/core) + +> An internal package. You probably shouldn't use this package, at least directly. + +This package provides common or core functionality for generic Smithy clients. + +You do not need to explicitly install this package, since it will be installed during code generation if used. + +## Development of `@smithy/core` submodules + +Core submodules are organized for distribution via the `package.json` `exports` field. + +`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be +enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support), but we also provide a compatibility redirect. + +Think of `@smithy/core` as a mono-package within the monorepo. +It preserves the benefits of modularization, for example to optimize Node.js initialization speed, +while making it easier to have a consistent version of core dependencies, reducing package sprawl when +installing a Smithy runtime client. + +### Guide for submodules + +- Each `index.ts` file corresponding to the pattern `./src/submodules//index.ts` will be + published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script. +- create a folder as `./src/submodules/` including an `index.ts` file and a `README.md` file. + - The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible. +- a submodule is equivalent to a standalone `@smithy/` package in that importing it in Node.js will resolve a separate bundle. +- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import. + - The linter will check for this and throw an error. +- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@smithy/core`. + The linter runs during `yarn build` and also as `yarn lint`. + +### When should I create an `@smithy/core/submodule` vs. `@smithy/new-package`? + +Keep in mind that the core package is installed by all downstream clients. + +If the component functionality is upstream of multiple clients, it is +a good candidate for a core submodule. For example, if `middleware-retry` had been written +after the support for submodules was added, it would have been a submodule. + +If the component's functionality is downstream of a client (rare), or only expected to be used by a very small +subset of clients, it could be written as a standalone package. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/cbor.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/cbor.d.ts new file mode 100644 index 0000000..c44b707 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/cbor.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/cbor" { + export * from "@smithy/core/dist-types/submodules/cbor/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/cbor.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/cbor.js new file mode 100644 index 0000000..710fb79 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/cbor.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/cbor/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/getSmithyContext.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/getSmithyContext.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/getSmithyContext.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/index.js new file mode 100644 index 0000000..a3735f6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/index.js @@ -0,0 +1,454 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DefaultIdentityProviderConfig: () => DefaultIdentityProviderConfig, + EXPIRATION_MS: () => EXPIRATION_MS, + HttpApiKeyAuthSigner: () => HttpApiKeyAuthSigner, + HttpBearerAuthSigner: () => HttpBearerAuthSigner, + NoAuthSigner: () => NoAuthSigner, + createIsIdentityExpiredFunction: () => createIsIdentityExpiredFunction, + createPaginator: () => createPaginator, + doesIdentityRequireRefresh: () => doesIdentityRequireRefresh, + getHttpAuthSchemeEndpointRuleSetPlugin: () => getHttpAuthSchemeEndpointRuleSetPlugin, + getHttpAuthSchemePlugin: () => getHttpAuthSchemePlugin, + getHttpSigningPlugin: () => getHttpSigningPlugin, + getSmithyContext: () => getSmithyContext, + httpAuthSchemeEndpointRuleSetMiddlewareOptions: () => httpAuthSchemeEndpointRuleSetMiddlewareOptions, + httpAuthSchemeMiddleware: () => httpAuthSchemeMiddleware, + httpAuthSchemeMiddlewareOptions: () => httpAuthSchemeMiddlewareOptions, + httpSigningMiddleware: () => httpSigningMiddleware, + httpSigningMiddlewareOptions: () => httpSigningMiddlewareOptions, + isIdentityExpired: () => isIdentityExpired, + memoizeIdentityProvider: () => memoizeIdentityProvider, + normalizeProvider: () => normalizeProvider, + requestBuilder: () => import_protocols.requestBuilder, + setFeature: () => setFeature +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = require("@smithy/types"); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +var import_util_middleware = require("@smithy/util-middleware"); + +// src/middleware-http-auth-scheme/resolveAuthOptions.ts +var resolveAuthOptions = /* @__PURE__ */ __name((candidateAuthOptions, authSchemePreference) => { + if (!authSchemePreference || authSchemePreference.length === 0) { + return candidateAuthOptions; + } + const preferredAuthOptions = []; + for (const preferredSchemeName of authSchemePreference) { + for (const candidateAuthOption of candidateAuthOptions) { + const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1]; + if (candidateAuthSchemeName === preferredSchemeName) { + preferredAuthOptions.push(candidateAuthOption); + } + } + } + for (const candidateAuthOption of candidateAuthOptions) { + if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) { + preferredAuthOptions.push(candidateAuthOption); + } + } + return preferredAuthOptions; +}, "resolveAuthOptions"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = /* @__PURE__ */ new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +__name(convertHttpAuthSchemesToMap, "convertHttpAuthSchemesToMap"); +var httpAuthSchemeMiddleware = /* @__PURE__ */ __name((config, mwOptions) => (next, context) => async (args) => { + const options = config.httpAuthSchemeProvider( + await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input) + ); + const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : []; + const resolvedOptions = resolveAuthOptions(options, authSchemePreference); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const failureReasons = []; + for (const option of resolvedOptions) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}, "httpAuthSchemeMiddleware"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.ts +var httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware" +}; +var getHttpAuthSchemeEndpointRuleSetPlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeEndpointRuleSetMiddlewareOptions + ); + } +}), "getHttpAuthSchemeEndpointRuleSetPlugin"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemePlugin.ts +var import_middleware_serde = require("@smithy/middleware-serde"); +var httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getHttpAuthSchemePlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeMiddlewareOptions + ); + } +}), "getHttpAuthSchemePlugin"); + +// src/middleware-http-signing/httpSigningMiddleware.ts +var import_protocol_http = require("@smithy/protocol-http"); + +var defaultErrorHandler = /* @__PURE__ */ __name((signingProperties) => (error) => { + throw error; +}, "defaultErrorHandler"); +var defaultSuccessHandler = /* @__PURE__ */ __name((httpResponse, signingProperties) => { +}, "defaultSuccessHandler"); +var httpSigningMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { + httpAuthOption: { signingProperties = {} }, + identity, + signer + } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties) + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}, "httpSigningMiddleware"); + +// src/middleware-http-signing/getHttpSigningMiddleware.ts +var httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware" +}; +var getHttpSigningPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + } +}), "getHttpSigningPlugin"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); + +// src/pagination/createPaginator.ts +var makePagedClientRequest = /* @__PURE__ */ __name(async (CommandCtor, client, input, withCommand = (_) => _, ...args) => { + let command = new CommandCtor(input); + command = withCommand(command) ?? command; + return await client.send(command, ...args); +}, "makePagedClientRequest"); +function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return /* @__PURE__ */ __name(async function* paginateOperation(config, input, ...additionalArguments) { + const _input = input; + let token = config.startingToken ?? _input[inputTokenName]; + let hasNext = true; + let page; + while (hasNext) { + _input[inputTokenName] = token; + if (pageSizeTokenName) { + _input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest( + CommandCtor, + config.client, + input, + config.withCommand, + ...additionalArguments + ); + } else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return void 0; + }, "paginateOperation"); +} +__name(createPaginator, "createPaginator"); +var get = /* @__PURE__ */ __name((fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return void 0; + } + cursor = cursor[step]; + } + return cursor; +}, "get"); + +// src/protocols/requestBuilder.ts +var import_protocols = require("@smithy/core/protocols"); + +// src/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {} + }; + } else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} +__name(setFeature, "setFeature"); + +// src/util-identity-and-auth/DefaultIdentityProviderConfig.ts +var DefaultIdentityProviderConfig = class { + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config) { + this.authSchemes = /* @__PURE__ */ new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== void 0) { + this.authSchemes.set(key, value); + } + } + } + static { + __name(this, "DefaultIdentityProviderConfig"); + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.ts + + +var HttpApiKeyAuthSigner = class { + static { + __name(this, "HttpApiKeyAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error( + "request could not be signed with `apiKey` since the `name` and `in` signer properties are missing" + ); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (signingProperties.in === import_types.HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } else if (signingProperties.in === import_types.HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme ? `${signingProperties.scheme} ${identity.apiKey}` : identity.apiKey; + } else { + throw new Error( + "request can only be signed with `apiKey` locations `query` or `header`, but found: `" + signingProperties.in + "`" + ); + } + return clonedRequest; + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.ts + +var HttpBearerAuthSigner = class { + static { + __name(this, "HttpBearerAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/noAuth.ts +var NoAuthSigner = class { + static { + __name(this, "NoAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +}; + +// src/util-identity-and-auth/memoizeIdentityProvider.ts +var createIsIdentityExpiredFunction = /* @__PURE__ */ __name((expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs, "createIsIdentityExpiredFunction"); +var EXPIRATION_MS = 3e5; +var isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +var doesIdentityRequireRefresh = /* @__PURE__ */ __name((identity) => identity.expiration !== void 0, "doesIdentityRequireRefresh"); +var memoizeIdentityProvider = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + if (provider === void 0) { + return void 0; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}, "memoizeIdentityProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + createPaginator, + getSmithyContext, + httpAuthSchemeMiddleware, + httpAuthSchemeEndpointRuleSetMiddlewareOptions, + getHttpAuthSchemeEndpointRuleSetPlugin, + httpAuthSchemeMiddlewareOptions, + getHttpAuthSchemePlugin, + httpSigningMiddleware, + httpSigningMiddlewareOptions, + getHttpSigningPlugin, + normalizeProvider, + requestBuilder, + setFeature, + DefaultIdentityProviderConfig, + HttpApiKeyAuthSigner, + HttpBearerAuthSigner, + NoAuthSigner, + createIsIdentityExpiredFunction, + EXPIRATION_MS, + isIdentityExpired, + doesIdentityRequireRefresh, + memoizeIdentityProvider +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/normalizeProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/normalizeProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/normalizeProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/setFeature.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/setFeature.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/setFeature.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js new file mode 100644 index 0000000..0f69723 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js @@ -0,0 +1,733 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/cbor/index.ts +var cbor_exports = {}; +__export(cbor_exports, { + buildHttpRpcRequest: () => buildHttpRpcRequest, + cbor: () => cbor, + checkCborResponse: () => checkCborResponse, + dateToTag: () => dateToTag, + loadSmithyRpcV2CborErrorCode: () => loadSmithyRpcV2CborErrorCode, + parseCborBody: () => parseCborBody, + parseCborErrorBody: () => parseCborErrorBody, + tag: () => tag, + tagSymbol: () => tagSymbol +}); +module.exports = __toCommonJS(cbor_exports); + +// src/submodules/cbor/cbor-decode.ts +var import_util_utf8 = require("@smithy/util-utf8"); + +// src/submodules/cbor/cbor-types.ts +var majorUint64 = 0; +var majorNegativeInt64 = 1; +var majorUnstructuredByteString = 2; +var majorUtf8String = 3; +var majorList = 4; +var majorMap = 5; +var majorTag = 6; +var majorSpecial = 7; +var specialFalse = 20; +var specialTrue = 21; +var specialNull = 22; +var specialUndefined = 23; +var extendedOneByte = 24; +var extendedFloat16 = 25; +var extendedFloat32 = 26; +var extendedFloat64 = 27; +var minorIndefinite = 31; +function alloc(size) { + return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size); +} +var tagSymbol = Symbol("@smithy/core/cbor::tagSymbol"); +function tag(data2) { + data2[tagSymbol] = true; + return data2; +} + +// src/submodules/cbor/cbor-decode.ts +var USE_TEXT_DECODER = typeof TextDecoder !== "undefined"; +var USE_BUFFER = typeof Buffer !== "undefined"; +var payload = alloc(0); +var dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +var textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null; +var _offset = 0; +function setPayload(bytes) { + payload = bytes; + dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +} +function decode(at, to) { + if (at >= to) { + throw new Error("unexpected end of (decode) payload."); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + switch (major) { + case majorUint64: + case majorNegativeInt64: + case majorTag: + let unsignedInt; + let offset; + if (minor < 24) { + unsignedInt = minor; + offset = 1; + } else { + switch (minor) { + case extendedOneByte: + case extendedFloat16: + case extendedFloat32: + case extendedFloat64: + const countLength = minorValueToArgumentLength[minor]; + const countOffset = countLength + 1; + offset = countOffset; + if (to - at < countOffset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + unsignedInt = payload[countIndex]; + } else if (countLength === 2) { + unsignedInt = dataView.getUint16(countIndex); + } else if (countLength === 4) { + unsignedInt = dataView.getUint32(countIndex); + } else { + unsignedInt = dataView.getBigUint64(countIndex); + } + break; + default: + throw new Error(`unexpected minor value ${minor}.`); + } + } + if (major === majorUint64) { + _offset = offset; + return castBigInt(unsignedInt); + } else if (major === majorNegativeInt64) { + let negativeInt; + if (typeof unsignedInt === "bigint") { + negativeInt = BigInt(-1) - unsignedInt; + } else { + negativeInt = -1 - unsignedInt; + } + _offset = offset; + return castBigInt(negativeInt); + } else { + const value = decode(at + offset, to); + const valueOffset = _offset; + _offset = offset + valueOffset; + return tag({ tag: castBigInt(unsignedInt), value }); + } + case majorUtf8String: + case majorMap: + case majorList: + case majorUnstructuredByteString: + if (minor === minorIndefinite) { + switch (major) { + case majorUtf8String: + return decodeUtf8StringIndefinite(at, to); + case majorMap: + return decodeMapIndefinite(at, to); + case majorList: + return decodeListIndefinite(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteStringIndefinite(at, to); + } + } else { + switch (major) { + case majorUtf8String: + return decodeUtf8String(at, to); + case majorMap: + return decodeMap(at, to); + case majorList: + return decodeList(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteString(at, to); + } + } + default: + return decodeSpecial(at, to); + } +} +function bytesToUtf8(bytes, at, to) { + if (USE_BUFFER && bytes.constructor?.name === "Buffer") { + return bytes.toString("utf-8", at, to); + } + if (textDecoder) { + return textDecoder.decode(bytes.subarray(at, to)); + } + return (0, import_util_utf8.toUtf8)(bytes.subarray(at, to)); +} +function demote(bigInteger) { + const num = Number(bigInteger); + if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) { + console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`)); + } + return num; +} +var minorValueToArgumentLength = { + [extendedOneByte]: 1, + [extendedFloat16]: 2, + [extendedFloat32]: 4, + [extendedFloat64]: 8 +}; +function bytesToFloat16(a, b) { + const sign = a >> 7; + const exponent = (a & 124) >> 2; + const fraction = (a & 3) << 8 | b; + const scalar = sign === 0 ? 1 : -1; + let exponentComponent; + let summation; + if (exponent === 0) { + if (fraction === 0) { + return 0; + } else { + exponentComponent = Math.pow(2, 1 - 15); + summation = 0; + } + } else if (exponent === 31) { + if (fraction === 0) { + return scalar * Infinity; + } else { + return NaN; + } + } else { + exponentComponent = Math.pow(2, exponent - 15); + summation = 1; + } + summation += fraction / 1024; + return scalar * (exponentComponent * summation); +} +function decodeCount(at, to) { + const minor = payload[at] & 31; + if (minor < 24) { + _offset = 1; + return minor; + } + if (minor === extendedOneByte || minor === extendedFloat16 || minor === extendedFloat32 || minor === extendedFloat64) { + const countLength = minorValueToArgumentLength[minor]; + _offset = countLength + 1; + if (to - at < _offset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + return payload[countIndex]; + } else if (countLength === 2) { + return dataView.getUint16(countIndex); + } else if (countLength === 4) { + return dataView.getUint32(countIndex); + } + return demote(dataView.getBigUint64(countIndex)); + } + throw new Error(`unexpected minor value ${minor}.`); +} +function decodeUtf8String(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`string len ${length} greater than remaining buf len.`); + } + const value = bytesToUtf8(payload, at, at + length); + _offset = offset + length; + return value; +} +function decodeUtf8StringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + const data2 = alloc(vector.length); + data2.set(vector, 0); + _offset = at - base + 2; + return bytesToUtf8(data2, 0, data2.length); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeUnstructuredByteString(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`); + } + const value = payload.subarray(at, at + length); + _offset = offset + length; + return value; +} +function decodeUnstructuredByteStringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + const data2 = alloc(vector.length); + data2.set(vector, 0); + _offset = at - base + 2; + return data2; + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUnstructuredByteString) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeList(at, to) { + const listDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const list = Array(listDataLength); + for (let i = 0; i < listDataLength; ++i) { + const item = decode(at, to); + const itemOffset = _offset; + list[i] = item; + at += itemOffset; + } + _offset = offset + (at - base); + return list; +} +function decodeListIndefinite(at, to) { + at += 1; + const list = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + _offset = at - base + 2; + return list; + } + const item = decode(at, to); + const n = _offset; + at += n; + list.push(item); + } + throw new Error("expected break marker."); +} +function decodeMap(at, to) { + const mapDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const map = {}; + for (let i = 0; i < mapDataLength; ++i) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key at index ${at}.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + _offset = offset + (at - base); + return map; +} +function decodeMapIndefinite(at, to) { + at += 1; + const base = at; + const map = {}; + for (; at < to; ) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + if (payload[at] === 255) { + _offset = at - base + 2; + return map; + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + throw new Error("expected break marker."); +} +function decodeSpecial(at, to) { + const minor = payload[at] & 31; + switch (minor) { + case specialTrue: + case specialFalse: + _offset = 1; + return minor === specialTrue; + case specialNull: + _offset = 1; + return null; + case specialUndefined: + _offset = 1; + return null; + case extendedFloat16: + if (to - at < 3) { + throw new Error("incomplete float16 at end of buf."); + } + _offset = 3; + return bytesToFloat16(payload[at + 1], payload[at + 2]); + case extendedFloat32: + if (to - at < 5) { + throw new Error("incomplete float32 at end of buf."); + } + _offset = 5; + return dataView.getFloat32(at + 1); + case extendedFloat64: + if (to - at < 9) { + throw new Error("incomplete float64 at end of buf."); + } + _offset = 9; + return dataView.getFloat64(at + 1); + default: + throw new Error(`unexpected minor value ${minor}.`); + } +} +function castBigInt(bigInt) { + if (typeof bigInt === "number") { + return bigInt; + } + const num = Number(bigInt); + if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) { + return num; + } + return bigInt; +} + +// src/submodules/cbor/cbor-encode.ts +var import_util_utf82 = require("@smithy/util-utf8"); +var USE_BUFFER2 = typeof Buffer !== "undefined"; +var initialSize = 2048; +var data = alloc(initialSize); +var dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength); +var cursor = 0; +function ensureSpace(bytes) { + const remaining = data.byteLength - cursor; + if (remaining < bytes) { + if (cursor < 16e6) { + resize(Math.max(data.byteLength * 4, data.byteLength + bytes)); + } else { + resize(data.byteLength + bytes + 16e6); + } + } +} +function toUint8Array() { + const out = alloc(cursor); + out.set(data.subarray(0, cursor), 0); + cursor = 0; + return out; +} +function resize(size) { + const old = data; + data = alloc(size); + if (old) { + if (old.copy) { + old.copy(data, 0, 0, old.byteLength); + } else { + data.set(old, 0); + } + } + dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength); +} +function encodeHeader(major, value) { + if (value < 24) { + data[cursor++] = major << 5 | value; + } else if (value < 1 << 8) { + data[cursor++] = major << 5 | 24; + data[cursor++] = value; + } else if (value < 1 << 16) { + data[cursor++] = major << 5 | extendedFloat16; + dataView2.setUint16(cursor, value); + cursor += 2; + } else if (value < 2 ** 32) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, value); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value)); + cursor += 8; + } +} +function encode(_input) { + const encodeStack = [_input]; + while (encodeStack.length) { + const input = encodeStack.pop(); + ensureSpace(typeof input === "string" ? input.length * 4 : 64); + if (typeof input === "string") { + if (USE_BUFFER2) { + encodeHeader(majorUtf8String, Buffer.byteLength(input)); + cursor += data.write(input, cursor); + } else { + const bytes = (0, import_util_utf82.fromUtf8)(input); + encodeHeader(majorUtf8String, bytes.byteLength); + data.set(bytes, cursor); + cursor += bytes.byteLength; + } + continue; + } else if (typeof input === "number") { + if (Number.isInteger(input)) { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - 1; + if (value < 24) { + data[cursor++] = major << 5 | value; + } else if (value < 256) { + data[cursor++] = major << 5 | 24; + data[cursor++] = value; + } else if (value < 65536) { + data[cursor++] = major << 5 | extendedFloat16; + data[cursor++] = value >> 8; + data[cursor++] = value; + } else if (value < 4294967296) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, value); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, BigInt(value)); + cursor += 8; + } + continue; + } + data[cursor++] = majorSpecial << 5 | extendedFloat64; + dataView2.setFloat64(cursor, input); + cursor += 8; + continue; + } else if (typeof input === "bigint") { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - BigInt(1); + const n = Number(value); + if (n < 24) { + data[cursor++] = major << 5 | n; + } else if (n < 256) { + data[cursor++] = major << 5 | 24; + data[cursor++] = n; + } else if (n < 65536) { + data[cursor++] = major << 5 | extendedFloat16; + data[cursor++] = n >> 8; + data[cursor++] = n & 255; + } else if (n < 4294967296) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, n); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, value); + cursor += 8; + } + continue; + } else if (input === null) { + data[cursor++] = majorSpecial << 5 | specialNull; + continue; + } else if (typeof input === "boolean") { + data[cursor++] = majorSpecial << 5 | (input ? specialTrue : specialFalse); + continue; + } else if (typeof input === "undefined") { + throw new Error("@smithy/core/cbor: client may not serialize undefined value."); + } else if (Array.isArray(input)) { + for (let i = input.length - 1; i >= 0; --i) { + encodeStack.push(input[i]); + } + encodeHeader(majorList, input.length); + continue; + } else if (typeof input.byteLength === "number") { + ensureSpace(input.length * 2); + encodeHeader(majorUnstructuredByteString, input.length); + data.set(input, cursor); + cursor += input.byteLength; + continue; + } else if (typeof input === "object") { + if (input[tagSymbol]) { + if ("tag" in input && "value" in input) { + encodeStack.push(input.value); + encodeHeader(majorTag, input.tag); + continue; + } else { + throw new Error( + "tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input) + ); + } + } + const keys = Object.keys(input); + for (let i = keys.length - 1; i >= 0; --i) { + const key = keys[i]; + encodeStack.push(input[key]); + encodeStack.push(key); + } + encodeHeader(majorMap, keys.length); + continue; + } + throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`); + } +} + +// src/submodules/cbor/cbor.ts +var cbor = { + deserialize(payload2) { + setPayload(payload2); + return decode(0, payload2.length); + }, + serialize(input) { + try { + encode(input); + return toUint8Array(); + } catch (e) { + toUint8Array(); + throw e; + } + }, + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size) { + resize(size); + } +}; + +// src/submodules/cbor/parseCborBody.ts +var import_protocols = require("@smithy/core/protocols"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_util_body_length_browser = require("@smithy/util-body-length-browser"); +var parseCborBody = (streamBody, context) => { + return (0, import_protocols.collectBody)(streamBody, context).then(async (bytes) => { + if (bytes.length) { + try { + return cbor.deserialize(bytes); + } catch (e) { + Object.defineProperty(e, "$responseBodyText", { + value: context.utf8Encoder(bytes) + }); + throw e; + } + } + return {}; + }); +}; +var dateToTag = (date) => { + return tag({ + tag: 1, + value: date.getTime() / 1e3 + }); +}; +var parseCborErrorBody = async (errorBody, context) => { + const value = await parseCborBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +var loadSmithyRpcV2CborErrorCode = (output, data2) => { + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + if (data2["__type"] !== void 0) { + return sanitizeErrorCode(data2["__type"]); + } + if (data2.code !== void 0) { + return sanitizeErrorCode(data2.code); + } +}; +var checkCborResponse = (response) => { + if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") { + throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode); + } +}; +var buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers: { + // intentional copy. + ...headers + } + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + try { + contents.headers["content-length"] = String((0, import_util_body_length_browser.calculateBodyLength)(body)); + } catch (e) { + } + } + return new import_protocol_http.HttpRequest(contents); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + buildHttpRpcRequest, + cbor, + checkCborResponse, + dateToTag, + loadSmithyRpcV2CborErrorCode, + parseCborBody, + parseCborErrorBody, + tag, + tagSymbol +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js new file mode 100644 index 0000000..455a5de --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js @@ -0,0 +1,164 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var protocols_exports = {}; +__export(protocols_exports, { + RequestBuilder: () => RequestBuilder, + collectBody: () => collectBody, + extendedEncodeURIComponent: () => extendedEncodeURIComponent, + requestBuilder: () => requestBuilder, + resolvedPath: () => resolvedPath +}); +module.exports = __toCommonJS(protocols_exports); + +// src/submodules/protocols/collect-stream-body.ts +var import_util_stream = require("@smithy/util-stream"); +var collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return import_util_stream.Uint8ArrayBlobAdapter.mutate(await fromContext); +}; + +// src/submodules/protocols/extended-encode-uri-component.ts +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +// src/submodules/protocols/requestBuilder.ts +var import_protocol_http = require("@smithy/protocol-http"); + +// src/submodules/protocols/resolve-path.ts +var resolvedPath = (resolvedPath2, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== void 0) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath2 = resolvedPath2.replace( + uriLabel, + isGreedyLabel ? labelValue.split("/").map((segment) => extendedEncodeURIComponent(segment)).join("/") : extendedEncodeURIComponent(labelValue) + ); + } else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath2; +}; + +// src/submodules/protocols/requestBuilder.ts +function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +var RequestBuilder = class { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new import_protocol_http.HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers + }); + } + /** + * Brevity setter for "hostname". + */ + hn(hostname) { + this.hostname = hostname; + return this; + } + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + /** + * Brevity incremental builder for "path". + */ + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + /** + * Brevity setter for "headers". + */ + h(headers) { + this.headers = headers; + return this; + } + /** + * Brevity setter for "query". + */ + q(query) { + this.query = query; + return this; + } + /** + * Brevity setter for "body". + */ + b(body) { + this.body = body; + return this; + } + /** + * Brevity setter for "method". + */ + m(method) { + this.method = method; + return this; + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + RequestBuilder, + collectBody, + extendedEncodeURIComponent, + requestBuilder, + resolvedPath +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js new file mode 100644 index 0000000..047fb9b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/serde/index.ts +var serde_exports = {}; +__export(serde_exports, { + NumericValue: () => NumericValue, + nv: () => nv +}); +module.exports = __toCommonJS(serde_exports); + +// src/submodules/serde/value/NumericValue.ts +var NumericValue = class { + constructor(string, type) { + this.string = string; + this.type = type; + } +}; +function nv(string) { + return new NumericValue(string, "bigDecimal"); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + NumericValue, + nv +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/getSmithyContext.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/getSmithyContext.js new file mode 100644 index 0000000..3848a0c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/getSmithyContext.js @@ -0,0 +1,2 @@ +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/index.js new file mode 100644 index 0000000..1dcdba1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js new file mode 100644 index 0000000..d0aaae6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js @@ -0,0 +1,17 @@ +import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware"; +export const httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware", +}; +export const getHttpAuthSchemeEndpointRuleSetPlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider, + }), httpAuthSchemeEndpointRuleSetMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js new file mode 100644 index 0000000..3fe03c5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js @@ -0,0 +1,18 @@ +import { serializerMiddlewareOption } from "@smithy/middleware-serde"; +import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware"; +export const httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: serializerMiddlewareOption.name, +}; +export const getHttpAuthSchemePlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider, + }), httpAuthSchemeMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js new file mode 100644 index 0000000..9869f65 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js @@ -0,0 +1,43 @@ +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { resolveAuthOptions } from "./resolveAuthOptions"; +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +export const httpAuthSchemeMiddleware = (config, mwOptions) => (next, context) => async (args) => { + const options = config.httpAuthSchemeProvider(await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input)); + const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : []; + const resolvedOptions = resolveAuthOptions(options, authSchemePreference); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = getSmithyContext(context); + const failureReasons = []; + for (const option of resolvedOptions) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer, + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js new file mode 100644 index 0000000..5042e7d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js new file mode 100644 index 0000000..8260757 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js @@ -0,0 +1,20 @@ +export const resolveAuthOptions = (candidateAuthOptions, authSchemePreference) => { + if (!authSchemePreference || authSchemePreference.length === 0) { + return candidateAuthOptions; + } + const preferredAuthOptions = []; + for (const preferredSchemeName of authSchemePreference) { + for (const candidateAuthOption of candidateAuthOptions) { + const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1]; + if (candidateAuthSchemeName === preferredSchemeName) { + preferredAuthOptions.push(candidateAuthOption); + } + } + } + for (const candidateAuthOption of candidateAuthOptions) { + if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) { + preferredAuthOptions.push(candidateAuthOption); + } + } + return preferredAuthOptions; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js new file mode 100644 index 0000000..e199712 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js @@ -0,0 +1,15 @@ +import { httpSigningMiddleware } from "./httpSigningMiddleware"; +export const httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware", +}; +export const getHttpSigningPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js new file mode 100644 index 0000000..dbc1b28 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js @@ -0,0 +1,24 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +const defaultErrorHandler = (signingProperties) => (error) => { + throw error; +}; +const defaultSuccessHandler = (httpResponse, signingProperties) => { }; +export const httpSigningMiddleware = (config) => (next, context) => async (args) => { + if (!HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = getSmithyContext(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { httpAuthOption: { signingProperties = {} }, identity, signer, } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties), + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js new file mode 100644 index 0000000..7bc6cfe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/normalizeProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/normalizeProvider.js new file mode 100644 index 0000000..a83ea99 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/normalizeProvider.js @@ -0,0 +1,6 @@ +export const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/pagination/createPaginator.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/pagination/createPaginator.js new file mode 100644 index 0000000..4e8f889 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/pagination/createPaginator.js @@ -0,0 +1,41 @@ +const makePagedClientRequest = async (CommandCtor, client, input, withCommand = (_) => _, ...args) => { + let command = new CommandCtor(input); + command = withCommand(command) ?? command; + return await client.send(command, ...args); +}; +export function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return async function* paginateOperation(config, input, ...additionalArguments) { + const _input = input; + let token = config.startingToken ?? _input[inputTokenName]; + let hasNext = true; + let page; + while (hasNext) { + _input[inputTokenName] = token; + if (pageSizeTokenName) { + _input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest(CommandCtor, config.client, input, config.withCommand, ...additionalArguments); + } + else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return undefined; + }; +} +const get = (fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return undefined; + } + cursor = cursor[step]; + } + return cursor; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js new file mode 100644 index 0000000..5b790a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js @@ -0,0 +1 @@ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/setFeature.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/setFeature.js new file mode 100644 index 0000000..a3a0303 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/setFeature.js @@ -0,0 +1,11 @@ +export function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {}, + }; + } + else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js new file mode 100644 index 0000000..dca1c63 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js @@ -0,0 +1,391 @@ +import { toUtf8 } from "@smithy/util-utf8"; +import { alloc, extendedFloat16, extendedFloat32, extendedFloat64, extendedOneByte, majorList, majorMap, majorNegativeInt64, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, minorIndefinite, specialFalse, specialNull, specialTrue, specialUndefined, tag, } from "./cbor-types"; +const USE_TEXT_DECODER = typeof TextDecoder !== "undefined"; +const USE_BUFFER = typeof Buffer !== "undefined"; +let payload = alloc(0); +let dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +const textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null; +let _offset = 0; +export function setPayload(bytes) { + payload = bytes; + dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +} +export function decode(at, to) { + if (at >= to) { + throw new Error("unexpected end of (decode) payload."); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + switch (major) { + case majorUint64: + case majorNegativeInt64: + case majorTag: + let unsignedInt; + let offset; + if (minor < 24) { + unsignedInt = minor; + offset = 1; + } + else { + switch (minor) { + case extendedOneByte: + case extendedFloat16: + case extendedFloat32: + case extendedFloat64: + const countLength = minorValueToArgumentLength[minor]; + const countOffset = (countLength + 1); + offset = countOffset; + if (to - at < countOffset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + unsignedInt = payload[countIndex]; + } + else if (countLength === 2) { + unsignedInt = dataView.getUint16(countIndex); + } + else if (countLength === 4) { + unsignedInt = dataView.getUint32(countIndex); + } + else { + unsignedInt = dataView.getBigUint64(countIndex); + } + break; + default: + throw new Error(`unexpected minor value ${minor}.`); + } + } + if (major === majorUint64) { + _offset = offset; + return castBigInt(unsignedInt); + } + else if (major === majorNegativeInt64) { + let negativeInt; + if (typeof unsignedInt === "bigint") { + negativeInt = BigInt(-1) - unsignedInt; + } + else { + negativeInt = -1 - unsignedInt; + } + _offset = offset; + return castBigInt(negativeInt); + } + else { + const value = decode(at + offset, to); + const valueOffset = _offset; + _offset = offset + valueOffset; + return tag({ tag: castBigInt(unsignedInt), value }); + } + case majorUtf8String: + case majorMap: + case majorList: + case majorUnstructuredByteString: + if (minor === minorIndefinite) { + switch (major) { + case majorUtf8String: + return decodeUtf8StringIndefinite(at, to); + case majorMap: + return decodeMapIndefinite(at, to); + case majorList: + return decodeListIndefinite(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteStringIndefinite(at, to); + } + } + else { + switch (major) { + case majorUtf8String: + return decodeUtf8String(at, to); + case majorMap: + return decodeMap(at, to); + case majorList: + return decodeList(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteString(at, to); + } + } + default: + return decodeSpecial(at, to); + } +} +function bytesToUtf8(bytes, at, to) { + if (USE_BUFFER && bytes.constructor?.name === "Buffer") { + return bytes.toString("utf-8", at, to); + } + if (textDecoder) { + return textDecoder.decode(bytes.subarray(at, to)); + } + return toUtf8(bytes.subarray(at, to)); +} +function demote(bigInteger) { + const num = Number(bigInteger); + if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) { + console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`)); + } + return num; +} +const minorValueToArgumentLength = { + [extendedOneByte]: 1, + [extendedFloat16]: 2, + [extendedFloat32]: 4, + [extendedFloat64]: 8, +}; +export function bytesToFloat16(a, b) { + const sign = a >> 7; + const exponent = (a & 124) >> 2; + const fraction = ((a & 3) << 8) | b; + const scalar = sign === 0 ? 1 : -1; + let exponentComponent; + let summation; + if (exponent === 0b00000) { + if (fraction === 0) { + return 0; + } + else { + exponentComponent = Math.pow(2, 1 - 15); + summation = 0; + } + } + else if (exponent === 0b11111) { + if (fraction === 0) { + return scalar * Infinity; + } + else { + return NaN; + } + } + else { + exponentComponent = Math.pow(2, exponent - 15); + summation = 1; + } + summation += fraction / 1024; + return scalar * (exponentComponent * summation); +} +function decodeCount(at, to) { + const minor = payload[at] & 31; + if (minor < 24) { + _offset = 1; + return minor; + } + if (minor === extendedOneByte || + minor === extendedFloat16 || + minor === extendedFloat32 || + minor === extendedFloat64) { + const countLength = minorValueToArgumentLength[minor]; + _offset = (countLength + 1); + if (to - at < _offset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + return payload[countIndex]; + } + else if (countLength === 2) { + return dataView.getUint16(countIndex); + } + else if (countLength === 4) { + return dataView.getUint32(countIndex); + } + return demote(dataView.getBigUint64(countIndex)); + } + throw new Error(`unexpected minor value ${minor}.`); +} +function decodeUtf8String(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`string len ${length} greater than remaining buf len.`); + } + const value = bytesToUtf8(payload, at, at + length); + _offset = offset + length; + return value; +} +function decodeUtf8StringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + const data = alloc(vector.length); + data.set(vector, 0); + _offset = at - base + 2; + return bytesToUtf8(data, 0, data.length); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeUnstructuredByteString(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`); + } + const value = payload.subarray(at, at + length); + _offset = offset + length; + return value; +} +function decodeUnstructuredByteStringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + const data = alloc(vector.length); + data.set(vector, 0); + _offset = at - base + 2; + return data; + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUnstructuredByteString) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeList(at, to) { + const listDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const list = Array(listDataLength); + for (let i = 0; i < listDataLength; ++i) { + const item = decode(at, to); + const itemOffset = _offset; + list[i] = item; + at += itemOffset; + } + _offset = offset + (at - base); + return list; +} +function decodeListIndefinite(at, to) { + at += 1; + const list = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + _offset = at - base + 2; + return list; + } + const item = decode(at, to); + const n = _offset; + at += n; + list.push(item); + } + throw new Error("expected break marker."); +} +function decodeMap(at, to) { + const mapDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const map = {}; + for (let i = 0; i < mapDataLength; ++i) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key at index ${at}.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + _offset = offset + (at - base); + return map; +} +function decodeMapIndefinite(at, to) { + at += 1; + const base = at; + const map = {}; + for (; at < to;) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + if (payload[at] === 255) { + _offset = at - base + 2; + return map; + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + throw new Error("expected break marker."); +} +function decodeSpecial(at, to) { + const minor = payload[at] & 31; + switch (minor) { + case specialTrue: + case specialFalse: + _offset = 1; + return minor === specialTrue; + case specialNull: + _offset = 1; + return null; + case specialUndefined: + _offset = 1; + return null; + case extendedFloat16: + if (to - at < 3) { + throw new Error("incomplete float16 at end of buf."); + } + _offset = 3; + return bytesToFloat16(payload[at + 1], payload[at + 2]); + case extendedFloat32: + if (to - at < 5) { + throw new Error("incomplete float32 at end of buf."); + } + _offset = 5; + return dataView.getFloat32(at + 1); + case extendedFloat64: + if (to - at < 9) { + throw new Error("incomplete float64 at end of buf."); + } + _offset = 9; + return dataView.getFloat64(at + 1); + default: + throw new Error(`unexpected minor value ${minor}.`); + } +} +function castBigInt(bigInt) { + if (typeof bigInt === "number") { + return bigInt; + } + const num = Number(bigInt); + if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) { + return num; + } + return bigInt; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js new file mode 100644 index 0000000..17af4e2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js @@ -0,0 +1,191 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { extendedFloat16, extendedFloat32, extendedFloat64, majorList, majorMap, majorNegativeInt64, majorSpecial, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, specialFalse, specialNull, specialTrue, tagSymbol, } from "./cbor-types"; +import { alloc } from "./cbor-types"; +const USE_BUFFER = typeof Buffer !== "undefined"; +const initialSize = 2048; +let data = alloc(initialSize); +let dataView = new DataView(data.buffer, data.byteOffset, data.byteLength); +let cursor = 0; +function ensureSpace(bytes) { + const remaining = data.byteLength - cursor; + if (remaining < bytes) { + if (cursor < 16000000) { + resize(Math.max(data.byteLength * 4, data.byteLength + bytes)); + } + else { + resize(data.byteLength + bytes + 16000000); + } + } +} +export function toUint8Array() { + const out = alloc(cursor); + out.set(data.subarray(0, cursor), 0); + cursor = 0; + return out; +} +export function resize(size) { + const old = data; + data = alloc(size); + if (old) { + if (old.copy) { + old.copy(data, 0, 0, old.byteLength); + } + else { + data.set(old, 0); + } + } + dataView = new DataView(data.buffer, data.byteOffset, data.byteLength); +} +function encodeHeader(major, value) { + if (value < 24) { + data[cursor++] = (major << 5) | value; + } + else if (value < 1 << 8) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = value; + } + else if (value < 1 << 16) { + data[cursor++] = (major << 5) | extendedFloat16; + dataView.setUint16(cursor, value); + cursor += 2; + } + else if (value < 2 ** 32) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, value); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value)); + cursor += 8; + } +} +export function encode(_input) { + const encodeStack = [_input]; + while (encodeStack.length) { + const input = encodeStack.pop(); + ensureSpace(typeof input === "string" ? input.length * 4 : 64); + if (typeof input === "string") { + if (USE_BUFFER) { + encodeHeader(majorUtf8String, Buffer.byteLength(input)); + cursor += data.write(input, cursor); + } + else { + const bytes = fromUtf8(input); + encodeHeader(majorUtf8String, bytes.byteLength); + data.set(bytes, cursor); + cursor += bytes.byteLength; + } + continue; + } + else if (typeof input === "number") { + if (Number.isInteger(input)) { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - 1; + if (value < 24) { + data[cursor++] = (major << 5) | value; + } + else if (value < 256) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = value; + } + else if (value < 65536) { + data[cursor++] = (major << 5) | extendedFloat16; + data[cursor++] = value >> 8; + data[cursor++] = value; + } + else if (value < 4294967296) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, value); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, BigInt(value)); + cursor += 8; + } + continue; + } + data[cursor++] = (majorSpecial << 5) | extendedFloat64; + dataView.setFloat64(cursor, input); + cursor += 8; + continue; + } + else if (typeof input === "bigint") { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - BigInt(1); + const n = Number(value); + if (n < 24) { + data[cursor++] = (major << 5) | n; + } + else if (n < 256) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = n; + } + else if (n < 65536) { + data[cursor++] = (major << 5) | extendedFloat16; + data[cursor++] = n >> 8; + data[cursor++] = n & 255; + } + else if (n < 4294967296) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, n); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, value); + cursor += 8; + } + continue; + } + else if (input === null) { + data[cursor++] = (majorSpecial << 5) | specialNull; + continue; + } + else if (typeof input === "boolean") { + data[cursor++] = (majorSpecial << 5) | (input ? specialTrue : specialFalse); + continue; + } + else if (typeof input === "undefined") { + throw new Error("@smithy/core/cbor: client may not serialize undefined value."); + } + else if (Array.isArray(input)) { + for (let i = input.length - 1; i >= 0; --i) { + encodeStack.push(input[i]); + } + encodeHeader(majorList, input.length); + continue; + } + else if (typeof input.byteLength === "number") { + ensureSpace(input.length * 2); + encodeHeader(majorUnstructuredByteString, input.length); + data.set(input, cursor); + cursor += input.byteLength; + continue; + } + else if (typeof input === "object") { + if (input[tagSymbol]) { + if ("tag" in input && "value" in input) { + encodeStack.push(input.value); + encodeHeader(majorTag, input.tag); + continue; + } + else { + throw new Error("tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input)); + } + } + const keys = Object.keys(input); + for (let i = keys.length - 1; i >= 0; --i) { + const key = keys[i]; + encodeStack.push(input[key]); + encodeStack.push(key); + } + encodeHeader(majorMap, keys.length); + continue; + } + throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js new file mode 100644 index 0000000..a720eb7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js @@ -0,0 +1,25 @@ +export const majorUint64 = 0; +export const majorNegativeInt64 = 1; +export const majorUnstructuredByteString = 2; +export const majorUtf8String = 3; +export const majorList = 4; +export const majorMap = 5; +export const majorTag = 6; +export const majorSpecial = 7; +export const specialFalse = 20; +export const specialTrue = 21; +export const specialNull = 22; +export const specialUndefined = 23; +export const extendedOneByte = 24; +export const extendedFloat16 = 25; +export const extendedFloat32 = 26; +export const extendedFloat64 = 27; +export const minorIndefinite = 31; +export function alloc(size) { + return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size); +} +export const tagSymbol = Symbol("@smithy/core/cbor::tagSymbol"); +export function tag(data) { + data[tagSymbol] = true; + return data; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js new file mode 100644 index 0000000..8df975f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js @@ -0,0 +1,21 @@ +import { decode, setPayload } from "./cbor-decode"; +import { encode, resize, toUint8Array } from "./cbor-encode"; +export const cbor = { + deserialize(payload) { + setPayload(payload); + return decode(0, payload.length); + }, + serialize(input) { + try { + encode(input); + return toUint8Array(); + } + catch (e) { + toUint8Array(); + throw e; + } + }, + resizeEncodingBuffer(size) { + resize(size); + }, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/index.js new file mode 100644 index 0000000..0910d27 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/index.js @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js new file mode 100644 index 0000000..03eeae6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js @@ -0,0 +1,85 @@ +import { collectBody } from "@smithy/core/protocols"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { cbor } from "./cbor"; +import { tag, tagSymbol } from "./cbor-types"; +export const parseCborBody = (streamBody, context) => { + return collectBody(streamBody, context).then(async (bytes) => { + if (bytes.length) { + try { + return cbor.deserialize(bytes); + } + catch (e) { + Object.defineProperty(e, "$responseBodyText", { + value: context.utf8Encoder(bytes), + }); + throw e; + } + } + return {}; + }); +}; +export const dateToTag = (date) => { + return tag({ + tag: 1, + value: date.getTime() / 1000, + }); +}; +export const parseCborErrorBody = async (errorBody, context) => { + const value = await parseCborBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +export const loadSmithyRpcV2CborErrorCode = (output, data) => { + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } +}; +export const checkCborResponse = (response) => { + if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") { + throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode); + } +}; +export const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers: { + ...headers, + }, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + try { + contents.headers["content-length"] = String(calculateBodyLength(body)); + } + catch (e) { } + } + return new __HttpRequest(contents); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js new file mode 100644 index 0000000..b6a5c0b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js @@ -0,0 +1,11 @@ +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +export const collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return Uint8ArrayBlobAdapter.mutate(await fromContext); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js new file mode 100644 index 0000000..5baeaf5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js @@ -0,0 +1,5 @@ +export function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/index.js new file mode 100644 index 0000000..a5de22f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/index.js @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js new file mode 100644 index 0000000..3391ef2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js @@ -0,0 +1,67 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { resolvedPath } from "./resolve-path"; +export function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +export class RequestBuilder { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers, + }); + } + hn(hostname) { + this.hostname = hostname; + return this; + } + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + h(headers) { + this.headers = headers; + return this; + } + q(query) { + this.query = query; + return this; + } + b(body) { + this.body = body; + return this; + } + m(method) { + this.method = method; + return this; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js new file mode 100644 index 0000000..8483e01 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js @@ -0,0 +1,19 @@ +import { extendedEncodeURIComponent } from "./extended-encode-uri-component"; +export const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== undefined) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel + ? labelValue + .split("/") + .map((segment) => extendedEncodeURIComponent(segment)) + .join("/") + : extendedEncodeURIComponent(labelValue)); + } + else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/serde/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/serde/index.js new file mode 100644 index 0000000..a70d0dd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/serde/index.js @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js new file mode 100644 index 0000000..6af270f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js @@ -0,0 +1,9 @@ +export class NumericValue { + constructor(string, type) { + this.string = string; + this.type = type; + } +} +export function nv(string) { + return new NumericValue(string, "bigDecimal"); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js new file mode 100644 index 0000000..3bc1016 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js @@ -0,0 +1,13 @@ +export class DefaultIdentityProviderConfig { + constructor(config) { + this.authSchemes = new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== undefined) { + this.authSchemes.set(key, value); + } + } + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js new file mode 100644 index 0000000..8b6f598 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js @@ -0,0 +1,34 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpApiKeyAuthLocation } from "@smithy/types"; +export class HttpApiKeyAuthSigner { + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error("request could not be signed with `apiKey` since the `name` and `in` signer properties are missing"); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = HttpRequest.clone(httpRequest); + if (signingProperties.in === HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } + else if (signingProperties.in === HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme + ? `${signingProperties.scheme} ${identity.apiKey}` + : identity.apiKey; + } + else { + throw new Error("request can only be signed with `apiKey` locations `query` or `header`, " + + "but found: `" + + signingProperties.in + + "`"); + } + return clonedRequest; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js new file mode 100644 index 0000000..b92a9c3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js @@ -0,0 +1,11 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export class HttpBearerAuthSigner { + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js new file mode 100644 index 0000000..9d240fe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js new file mode 100644 index 0000000..356193d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js @@ -0,0 +1,5 @@ +export class NoAuthSigner { + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js new file mode 100644 index 0000000..87ba64b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js new file mode 100644 index 0000000..8050585 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js @@ -0,0 +1,53 @@ +export const createIsIdentityExpiredFunction = (expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs; +export const EXPIRATION_MS = 300000; +export const isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +export const doesIdentityRequireRefresh = (identity) => identity.expiration !== undefined; +export const memoizeIdentityProvider = (provider, isExpired, requiresRefresh) => { + if (provider === undefined) { + return undefined; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts new file mode 100644 index 0000000..523ee47 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/index.d.ts new file mode 100644 index 0000000..1dcdba1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts new file mode 100644 index 0000000..996b0de --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemeEndpointRuleSetPluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemeEndpointRuleSetPlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts new file mode 100644 index 0000000..2e57733 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemePluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemePlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemePluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts new file mode 100644 index 0000000..50f1ea8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts @@ -0,0 +1,33 @@ +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, IdentityProviderConfig, Provider, SelectedHttpAuthScheme, SerializeMiddleware, SMITHY_CONTEXT_KEY } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + authSchemePreference?: Provider; + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: HttpAuthSchemeProvider; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareSmithyContext extends Record { + selectedHttpAuthScheme?: SelectedHttpAuthScheme; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareHandlerExecutionContext extends HandlerExecutionContext { + [SMITHY_CONTEXT_KEY]?: HttpAuthSchemeMiddlewareSmithyContext; +} +/** + * @internal + */ +export declare const httpAuthSchemeMiddleware: (config: TConfig & PreviouslyResolved, mwOptions: HttpAuthSchemeMiddlewareOptions) => SerializeMiddleware; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts new file mode 100644 index 0000000..5042e7d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts new file mode 100644 index 0000000..52fc604 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts @@ -0,0 +1,10 @@ +import { HttpAuthOption } from "@smithy/types"; +/** + * Resolves list of auth options based on the supported ones, vs the preference list. + * + * @param candidateAuthOptions list of supported auth options selected by the standard + * resolution process (model-based, endpoints 2.0, etc.) + * @param authSchemePreference list of auth schemes preferred by user. + * @returns + */ +export declare const resolveAuthOptions: (candidateAuthOptions: HttpAuthOption[], authSchemePreference: string[]) => HttpAuthOption[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts new file mode 100644 index 0000000..56c89a2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts @@ -0,0 +1,9 @@ +import { FinalizeRequestHandlerOptions, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddlewareOptions: FinalizeRequestHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getHttpSigningPlugin: (config: object) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts new file mode 100644 index 0000000..3b43611 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts @@ -0,0 +1,5 @@ +import { FinalizeRequestMiddleware } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddleware: (config: object) => FinalizeRequestMiddleware; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts new file mode 100644 index 0000000..7bc6cfe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts new file mode 100644 index 0000000..4fe2d9a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts new file mode 100644 index 0000000..78fcbe0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts @@ -0,0 +1,7 @@ +import type { PaginationConfiguration, Paginator } from "@smithy/types"; +/** + * @internal + * + * Creates a paginator. + */ +export declare function createPaginator(ClientCtor: any, CommandCtor: any, inputTokenName: string, outputTokenName: string, pageSizeTokenName?: string): (config: PaginationConfigType, input: InputType, ...additionalArguments: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..8e2f2ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/setFeature.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/setFeature.d.ts new file mode 100644 index 0000000..279106c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/setFeature.d.ts @@ -0,0 +1,12 @@ +import type { HandlerExecutionContext, SmithyFeatures } from "@smithy/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the library not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: HandlerExecutionContext, feature: F, value: SmithyFeatures[F]): void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts new file mode 100644 index 0000000..baf3961 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts @@ -0,0 +1,17 @@ +import { CborValueType, Float32, Uint8, Uint32 } from "./cbor-types"; +/** + * @internal + * @param bytes - to be set as the decode source. + * + * Sets the decode bytearray source and its data view. + */ +export declare function setPayload(bytes: Uint8Array): void; +/** + * @internal + * Decodes the data between the two indices. + */ +export declare function decode(at: Uint32, to: Uint32): CborValueType; +/** + * @internal + */ +export declare function bytesToFloat16(a: Uint8, b: Uint8): Float32; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts new file mode 100644 index 0000000..bfc3328 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + */ +export declare function toUint8Array(): Uint8Array; +export declare function resize(size: number): void; +/** + * @param _input - JS data object. + */ +export declare function encode(_input: any): void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts new file mode 100644 index 0000000..dd41338 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts @@ -0,0 +1,64 @@ +/// +/// +export type CborItemType = undefined | boolean | number | bigint | [CborUnstructuredByteStringType, Uint64] | string | CborTagType; +export type CborTagType = { + tag: Uint64 | number; + value: CborValueType; + [tagSymbol]: true; +}; +export type CborUnstructuredByteStringType = Uint8Array; +export type CborListType = Array; +export type CborMapType = Record; +export type CborCollectionType = CborMapType | CborListType; +export type CborValueType = CborItemType | CborCollectionType | any; +export type CborArgumentLength = 1 | 2 | 4 | 8; +export type CborArgumentLengthOffset = 1 | 2 | 3 | 5 | 9; +export type CborOffset = number; +export type Uint8 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Float32 = number; +export type Int64 = bigint; +export type Float16Binary = number; +export type Float32Binary = number; +export type CborMajorType = typeof majorUint64 | typeof majorNegativeInt64 | typeof majorUnstructuredByteString | typeof majorUtf8String | typeof majorList | typeof majorMap | typeof majorTag | typeof majorSpecial; +export declare const majorUint64 = 0; +export declare const majorNegativeInt64 = 1; +export declare const majorUnstructuredByteString = 2; +export declare const majorUtf8String = 3; +export declare const majorList = 4; +export declare const majorMap = 5; +export declare const majorTag = 6; +export declare const majorSpecial = 7; +export declare const specialFalse = 20; +export declare const specialTrue = 21; +export declare const specialNull = 22; +export declare const specialUndefined = 23; +export declare const extendedOneByte = 24; +export declare const extendedFloat16 = 25; +export declare const extendedFloat32 = 26; +export declare const extendedFloat64 = 27; +export declare const minorIndefinite = 31; +export declare function alloc(size: number): Uint8Array | Buffer; +/** + * @public + * + * The presence of this symbol as an object key indicates it should be considered a tag + * for CBOR serialization purposes. + * + * The object must also have the properties "tag" and "value". + */ +export declare const tagSymbol: unique symbol; +/** + * @public + * Applies the tag symbol to the object. + */ +export declare function tag(data: { + tag: number | bigint; + value: any; + [tagSymbol]?: true; +}): { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts new file mode 100644 index 0000000..7577213 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts @@ -0,0 +1,26 @@ +/** + * This implementation is synchronous and only implements the parts of CBOR + * specification used by Smithy RPCv2 CBOR protocol. + * + * This cbor serde implementation is derived from AWS SDK for Go's implementation. + * @see https://github.com/aws/smithy-go/tree/main/encoding/cbor + * + * The cbor-x implementation was also instructional: + * @see https://github.com/kriszyp/cbor-x + */ +export declare const cbor: { + deserialize(payload: Uint8Array): any; + serialize(input: any): Uint8Array; + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size: number): void; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts new file mode 100644 index 0000000..0910d27 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts new file mode 100644 index 0000000..8811679 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts @@ -0,0 +1,31 @@ +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { HeaderBag as __HeaderBag, HttpResponse, SerdeContext as __SerdeContext, SerdeContext } from "@smithy/types"; +import { tag, tagSymbol } from "./cbor-types"; +/** + * @internal + */ +export declare const parseCborBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const dateToTag: (date: Date) => { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; +/** + * @internal + */ +export declare const parseCborErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadSmithyRpcV2CborErrorCode: (output: HttpResponse, data: any) => string | undefined; +/** + * @internal + */ +export declare const checkCborResponse: (response: HttpResponse) => void; +/** + * @internal + */ +export declare const buildHttpRpcRequest: (context: __SerdeContext, headers: __HeaderBag, path: string, resolvedHostname: string | undefined, body: any) => Promise<__HttpRequest>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts new file mode 100644 index 0000000..b555804 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts @@ -0,0 +1,10 @@ +import { SerdeContext } from "@smithy/types"; +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +/** + * @internal + * + * Collect low-level response body stream to Uint8Array. + */ +export declare const collectBody: (streamBody: any, context: { + streamCollector: SerdeContext["streamCollector"]; +}) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..403e9ae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Function that wraps encodeURIComponent to encode additional characters + * to fully adhere to RFC 3986. + */ +export declare function extendedEncodeURIComponent(str: string): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts new file mode 100644 index 0000000..a5de22f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..3013d8a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts @@ -0,0 +1,51 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import type { SerdeContext } from "@smithy/types"; +/** + * @internal + * used in code-generated serde. + */ +export declare function requestBuilder(input: any, context: SerdeContext): RequestBuilder; +/** + * @internal + */ +export declare class RequestBuilder { + private input; + private context; + private query; + private method; + private headers; + private path; + private body; + private hostname; + private resolvePathStack; + constructor(input: any, context: SerdeContext); + build(): Promise; + /** + * Brevity setter for "hostname". + */ + hn(hostname: string): this; + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel: string): this; + /** + * Brevity incremental builder for "path". + */ + p(memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean): this; + /** + * Brevity setter for "headers". + */ + h(headers: Record): this; + /** + * Brevity setter for "query". + */ + q(query: Record): this; + /** + * Brevity setter for "body". + */ + b(body: any): this; + /** + * Brevity setter for "method". + */ + m(method: string): this; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts new file mode 100644 index 0000000..03386d6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const resolvedPath: (resolvedPath: string, input: unknown, memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts new file mode 100644 index 0000000..a70d0dd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts new file mode 100644 index 0000000..c3736fc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts @@ -0,0 +1,31 @@ +/** + * Types which may be represented by {@link NumericValue}. + * + * There is currently only one option, because BigInteger and Long should + * use JS BigInt directly, and all other numeric types can be contained in JS Number. + * + * @public + */ +export type NumericType = "bigDecimal"; +/** + * Serialization container for Smithy simple types that do not have a + * direct JavaScript runtime representation. + * + * This container does not perform numeric mathematical operations. + * It is a container for discerning a value's true type. + * + * It allows storage of numeric types not representable in JS without + * making a decision on what numeric library to use. + * + * @public + */ +export declare class NumericValue { + readonly string: string; + readonly type: NumericType; + constructor(string: string, type: NumericType); +} +/** + * Serde shortcut. + * @internal + */ +export declare function nv(string: string): NumericValue; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts new file mode 100644 index 0000000..14cd7c4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..347898d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts new file mode 100644 index 0000000..27e2e26 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemeEndpointRuleSetPluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemeEndpointRuleSetPlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts new file mode 100644 index 0000000..531e6ec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemePluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemePlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemePluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts new file mode 100644 index 0000000..bbeaf5f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts @@ -0,0 +1,33 @@ +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, IdentityProviderConfig, Provider, SelectedHttpAuthScheme, SerializeMiddleware, SMITHY_CONTEXT_KEY } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + authSchemePreference?: Provider; + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: HttpAuthSchemeProvider; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareSmithyContext extends Record { + selectedHttpAuthScheme?: SelectedHttpAuthScheme; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareHandlerExecutionContext extends HandlerExecutionContext { + [SMITHY_CONTEXT_KEY]?: HttpAuthSchemeMiddlewareSmithyContext; +} +/** + * @internal + */ +export declare const httpAuthSchemeMiddleware: (config: TConfig & PreviouslyResolved, mwOptions: HttpAuthSchemeMiddlewareOptions) => SerializeMiddleware; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts new file mode 100644 index 0000000..2f275c5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts new file mode 100644 index 0000000..8088683 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts @@ -0,0 +1,10 @@ +import { HttpAuthOption } from "@smithy/types"; +/** + * Resolves list of auth options based on the supported ones, vs the preference list. + * + * @param candidateAuthOptions list of supported auth options selected by the standard + * resolution process (model-based, endpoints 2.0, etc.) + * @param authSchemePreference list of auth schemes preferred by user. + * @returns + */ +export declare const resolveAuthOptions: (candidateAuthOptions: HttpAuthOption[], authSchemePreference: string[]) => HttpAuthOption[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts new file mode 100644 index 0000000..a01bb31 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts @@ -0,0 +1,9 @@ +import { FinalizeRequestHandlerOptions, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddlewareOptions: FinalizeRequestHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getHttpSigningPlugin: (config: object) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts new file mode 100644 index 0000000..7a86b0b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts @@ -0,0 +1,5 @@ +import { FinalizeRequestMiddleware } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddleware: (config: object) => FinalizeRequestMiddleware; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts new file mode 100644 index 0000000..578f26d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts new file mode 100644 index 0000000..594e8fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts new file mode 100644 index 0000000..50400d8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts @@ -0,0 +1,7 @@ +import { PaginationConfiguration, Paginator } from "@smithy/types"; +/** + * @internal + * + * Creates a paginator. + */ +export declare function createPaginator(ClientCtor: any, CommandCtor: any, inputTokenName: string, outputTokenName: string, pageSizeTokenName?: string): (config: PaginationConfigType, input: InputType, ...additionalArguments: any[]) => Paginator; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..25459a8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts new file mode 100644 index 0000000..a1995ab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts @@ -0,0 +1,12 @@ +import { HandlerExecutionContext, SmithyFeatures } from "@smithy/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the library not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: HandlerExecutionContext, feature: F, value: SmithyFeatures[F]): void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts new file mode 100644 index 0000000..9ddc992 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts @@ -0,0 +1,17 @@ +import { CborValueType, Float32, Uint8, Uint32 } from "./cbor-types"; +/** + * @internal + * @param bytes - to be set as the decode source. + * + * Sets the decode bytearray source and its data view. + */ +export declare function setPayload(bytes: Uint8Array): void; +/** + * @internal + * Decodes the data between the two indices. + */ +export declare function decode(at: Uint32, to: Uint32): CborValueType; +/** + * @internal + */ +export declare function bytesToFloat16(a: Uint8, b: Uint8): Float32; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts new file mode 100644 index 0000000..83218b5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + */ +export declare function toUint8Array(): Uint8Array; +export declare function resize(size: number): void; +/** + * @param _input - JS data object. + */ +export declare function encode(_input: any): void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts new file mode 100644 index 0000000..e37a6ac --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts @@ -0,0 +1,66 @@ +/// +export type CborItemType = undefined | boolean | number | bigint | [ + CborUnstructuredByteStringType, + Uint64 +] | string | CborTagType; +export type CborTagType = { + tag: Uint64 | number; + value: CborValueType; + [tagSymbol]: true; +}; +export type CborUnstructuredByteStringType = Uint8Array; +export type CborListType = Array; +export type CborMapType = Record; +export type CborCollectionType = CborMapType | CborListType; +export type CborValueType = CborItemType | CborCollectionType | any; +export type CborArgumentLength = 1 | 2 | 4 | 8; +export type CborArgumentLengthOffset = 1 | 2 | 3 | 5 | 9; +export type CborOffset = number; +export type Uint8 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Float32 = number; +export type Int64 = bigint; +export type Float16Binary = number; +export type Float32Binary = number; +export type CborMajorType = typeof majorUint64 | typeof majorNegativeInt64 | typeof majorUnstructuredByteString | typeof majorUtf8String | typeof majorList | typeof majorMap | typeof majorTag | typeof majorSpecial; +export declare const majorUint64 = 0; +export declare const majorNegativeInt64 = 1; +export declare const majorUnstructuredByteString = 2; +export declare const majorUtf8String = 3; +export declare const majorList = 4; +export declare const majorMap = 5; +export declare const majorTag = 6; +export declare const majorSpecial = 7; +export declare const specialFalse = 20; +export declare const specialTrue = 21; +export declare const specialNull = 22; +export declare const specialUndefined = 23; +export declare const extendedOneByte = 24; +export declare const extendedFloat16 = 25; +export declare const extendedFloat32 = 26; +export declare const extendedFloat64 = 27; +export declare const minorIndefinite = 31; +export declare function alloc(size: number): Uint8Array | Buffer; +/** + * @public + * + * The presence of this symbol as an object key indicates it should be considered a tag + * for CBOR serialization purposes. + * + * The object must also have the properties "tag" and "value". + */ +export declare const tagSymbol: unique symbol; +/** + * @public + * Applies the tag symbol to the object. + */ +export declare function tag(data: { + tag: number | bigint; + value: any; + [tagSymbol]?: true; +}): { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts new file mode 100644 index 0000000..d317890 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts @@ -0,0 +1,26 @@ +/** + * This implementation is synchronous and only implements the parts of CBOR + * specification used by Smithy RPCv2 CBOR protocol. + * + * This cbor serde implementation is derived from AWS SDK for Go's implementation. + * @see https://github.com/aws/smithy-go/tree/main/encoding/cbor + * + * The cbor-x implementation was also instructional: + * @see https://github.com/kriszyp/cbor-x + */ +export declare const cbor: { + deserialize(payload: Uint8Array): any; + serialize(input: any): Uint8Array; + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size: number): void; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts new file mode 100644 index 0000000..63e2787 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts new file mode 100644 index 0000000..90676a2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts @@ -0,0 +1,31 @@ +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { HeaderBag as __HeaderBag, HttpResponse, SerdeContext as __SerdeContext, SerdeContext } from "@smithy/types"; +import { tag, tagSymbol } from "./cbor-types"; +/** + * @internal + */ +export declare const parseCborBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const dateToTag: (date: Date) => { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; +/** + * @internal + */ +export declare const parseCborErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadSmithyRpcV2CborErrorCode: (output: HttpResponse, data: any) => string | undefined; +/** + * @internal + */ +export declare const checkCborResponse: (response: HttpResponse) => void; +/** + * @internal + */ +export declare const buildHttpRpcRequest: (context: __SerdeContext, headers: __HeaderBag, path: string, resolvedHostname: string | undefined, body: any) => Promise<__HttpRequest>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts new file mode 100644 index 0000000..9c5f471 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts @@ -0,0 +1,10 @@ +import { SerdeContext } from "@smithy/types"; +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +/** + * @internal + * + * Collect low-level response body stream to Uint8Array. + */ +export declare const collectBody: (streamBody: any, context: { + streamCollector: SerdeContext["streamCollector"]; +}) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..98c3802 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Function that wraps encodeURIComponent to encode additional characters + * to fully adhere to RFC 3986. + */ +export declare function extendedEncodeURIComponent(str: string): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts new file mode 100644 index 0000000..4ffc290 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..0449354 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts @@ -0,0 +1,51 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { SerdeContext } from "@smithy/types"; +/** + * @internal + * used in code-generated serde. + */ +export declare function requestBuilder(input: any, context: SerdeContext): RequestBuilder; +/** + * @internal + */ +export declare class RequestBuilder { + private input; + private context; + private query; + private method; + private headers; + private path; + private body; + private hostname; + private resolvePathStack; + constructor(input: any, context: SerdeContext); + build(): Promise; + /** + * Brevity setter for "hostname". + */ + hn(hostname: string): this; + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel: string): this; + /** + * Brevity incremental builder for "path". + */ + p(memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean): this; + /** + * Brevity setter for "headers". + */ + h(headers: Record): this; + /** + * Brevity setter for "query". + */ + q(query: Record): this; + /** + * Brevity setter for "body". + */ + b(body: any): this; + /** + * Brevity setter for "method". + */ + m(method: string): this; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts new file mode 100644 index 0000000..4c4c443 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const resolvedPath: (resolvedPath: string, input: unknown, memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts new file mode 100644 index 0000000..3e78075 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts new file mode 100644 index 0000000..00dd3b7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts @@ -0,0 +1,31 @@ +/** + * Types which may be represented by {@link NumericValue}. + * + * There is currently only one option, because BigInteger and Long should + * use JS BigInt directly, and all other numeric types can be contained in JS Number. + * + * @public + */ +export type NumericType = "bigDecimal"; +/** + * Serialization container for Smithy simple types that do not have a + * direct JavaScript runtime representation. + * + * This container does not perform numeric mathematical operations. + * It is a container for discerning a value's true type. + * + * It allows storage of numeric types not representable in JS without + * making a decision on what numeric library to use. + * + * @public + */ +export declare class NumericValue { + readonly string: string; + readonly type: NumericType; + constructor(string: string, type: NumericType); +} +/** + * Serde shortcut. + * @internal + */ +export declare function nv(string: string): NumericValue; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts new file mode 100644 index 0000000..7e80659 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts @@ -0,0 +1,15 @@ +import { HttpAuthSchemeId, Identity, IdentityProvider, IdentityProviderConfig } from "@smithy/types"; +/** + * Default implementation of IdentityProviderConfig + * @internal + */ +export declare class DefaultIdentityProviderConfig implements IdentityProviderConfig { + private authSchemes; + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config: Record | undefined>); + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts new file mode 100644 index 0000000..3981a1b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { ApiKeyIdentity, HttpRequest as IHttpRequest, HttpSigner } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpApiKeyAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: ApiKeyIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts new file mode 100644 index 0000000..9c83b1c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpRequest as IHttpRequest, HttpSigner, TokenIdentity } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpBearerAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: TokenIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..aa5caa8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts new file mode 100644 index 0000000..0d7b612 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest, HttpSigner, Identity } from "@smithy/types"; +/** + * Signer for the synthetic @smithy.api#noAuth auth scheme. + * @internal + */ +export declare class NoAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts new file mode 100644 index 0000000..626ade9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts new file mode 100644 index 0000000..270aa71 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts @@ -0,0 +1,30 @@ +import { Identity, IdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const createIsIdentityExpiredFunction: (expirationMs: number) => (identity: Identity) => boolean; +/** + * @internal + * This may need to be configurable in the future, but for now it is defaulted to 5min. + */ +export declare const EXPIRATION_MS = 300000; +/** + * @internal + */ +export declare const isIdentityExpired: (identity: Identity) => boolean; +/** + * @internal + */ +export declare const doesIdentityRequireRefresh: (identity: Identity) => boolean; +/** + * @internal + */ +export interface MemoizedIdentityProvider { + (options?: Record & { + forceRefresh?: boolean; + }): Promise; +} +/** + * @internal + */ +export declare const memoizeIdentityProvider: (provider: IdentityT | IdentityProvider | undefined, isExpired: (resolved: Identity) => boolean, requiresRefresh: (resolved: Identity) => boolean) => MemoizedIdentityProvider | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts new file mode 100644 index 0000000..0b39204 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts @@ -0,0 +1,15 @@ +import { HttpAuthSchemeId, Identity, IdentityProvider, IdentityProviderConfig } from "@smithy/types"; +/** + * Default implementation of IdentityProviderConfig + * @internal + */ +export declare class DefaultIdentityProviderConfig implements IdentityProviderConfig { + private authSchemes; + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config: Record | undefined>); + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts new file mode 100644 index 0000000..63de4bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { ApiKeyIdentity, HttpRequest as IHttpRequest, HttpSigner } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpApiKeyAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: ApiKeyIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts new file mode 100644 index 0000000..0e31e7d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpRequest as IHttpRequest, HttpSigner, TokenIdentity } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpBearerAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: TokenIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..9d240fe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts new file mode 100644 index 0000000..fc8d6b1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest, HttpSigner, Identity } from "@smithy/types"; +/** + * Signer for the synthetic @smithy.api#noAuth auth scheme. + * @internal + */ +export declare class NoAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts new file mode 100644 index 0000000..87ba64b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts new file mode 100644 index 0000000..67b3be8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts @@ -0,0 +1,30 @@ +import { Identity, IdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const createIsIdentityExpiredFunction: (expirationMs: number) => (identity: Identity) => boolean; +/** + * @internal + * This may need to be configurable in the future, but for now it is defaulted to 5min. + */ +export declare const EXPIRATION_MS = 300000; +/** + * @internal + */ +export declare const isIdentityExpired: (identity: Identity) => boolean; +/** + * @internal + */ +export declare const doesIdentityRequireRefresh: (identity: Identity) => boolean; +/** + * @internal + */ +export interface MemoizedIdentityProvider { + (options?: Record & { + forceRefresh?: boolean; + }): Promise; +} +/** + * @internal + */ +export declare const memoizeIdentityProvider: (provider: IdentityT | IdentityProvider | undefined, isExpired: (resolved: Identity) => boolean, requiresRefresh: (resolved: Identity) => boolean) => MemoizedIdentityProvider | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/core/package.json new file mode 100644 index 0000000..d3776e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/package.json @@ -0,0 +1,112 @@ +{ + "name": "@smithy/core", + "version": "3.3.1", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline core", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "npx eslint -c ../../.eslintrc.js \"src/**/*.ts\" --fix && node ./scripts/lint", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:cbor:perf": "node ./scripts/cbor-perf.mjs", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "exports": { + ".": { + "module": "./dist-es/index.js", + "node": "./dist-cjs/index.js", + "import": "./dist-es/index.js", + "require": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts" + }, + "./package.json": { + "module": "./package.json", + "node": "./package.json", + "import": "./package.json", + "require": "./package.json" + }, + "./cbor": { + "module": "./dist-es/submodules/cbor/index.js", + "node": "./dist-cjs/submodules/cbor/index.js", + "import": "./dist-es/submodules/cbor/index.js", + "require": "./dist-cjs/submodules/cbor/index.js", + "types": "./dist-types/submodules/cbor/index.d.ts" + }, + "./protocols": { + "module": "./dist-es/submodules/protocols/index.js", + "node": "./dist-cjs/submodules/protocols/index.js", + "import": "./dist-es/submodules/protocols/index.js", + "require": "./dist-cjs/submodules/protocols/index.js", + "types": "./dist-types/submodules/protocols/index.d.ts" + }, + "./serde": { + "module": "./dist-es/submodules/serde/index.js", + "node": "./dist-cjs/submodules/serde/index.js", + "import": "./dist-es/submodules/serde/index.js", + "require": "./dist-cjs/submodules/serde/index.js", + "types": "./dist-types/submodules/serde/index.d.ts" + } + }, + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "./cbor.d.ts", + "./cbor.js", + "./protocols.d.ts", + "./protocols.js", + "./serde.d.ts", + "./serde.js", + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/core", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/core" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "json-bigint": "^1.0.0", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/protocols.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/protocols.d.ts new file mode 100644 index 0000000..e0afd4e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/protocols.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/protocols" { + export * from "@smithy/core/dist-types/submodules/protocols/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/protocols.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/protocols.js new file mode 100644 index 0000000..43e0c42 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/protocols.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/protocols/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/serde.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/core/serde.d.ts new file mode 100644 index 0000000..9906bb0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/serde.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/serde" { + export * from "@smithy/core/dist-types/submodules/serde/index.d"; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/core/serde.js b/amplify/functions/downloadDocument/node_modules/@smithy/core/serde.js new file mode 100644 index 0000000..b2d727f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/core/serde.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/serde/index.js"); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/README.md new file mode 100644 index 0000000..9a8f8a5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/README.md @@ -0,0 +1,11 @@ +# @smithy/credential-provider-imds + +[![NPM version](https://img.shields.io/npm/v/@smithy/credential-provider-imds/latest.svg)](https://www.npmjs.com/package/@smithy/credential-provider-imds) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/credential-provider-imds.svg)](https://www.npmjs.com/package/@smithy/credential-provider-imds) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@smithy/credential-providers](https://www.npmjs.com/package/@smithy/credential-providers) +instead. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js new file mode 100644 index 0000000..21b3423 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js @@ -0,0 +1,445 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_MAX_RETRIES: () => DEFAULT_MAX_RETRIES, + DEFAULT_TIMEOUT: () => DEFAULT_TIMEOUT, + ENV_CMDS_AUTH_TOKEN: () => ENV_CMDS_AUTH_TOKEN, + ENV_CMDS_FULL_URI: () => ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI: () => ENV_CMDS_RELATIVE_URI, + Endpoint: () => Endpoint, + fromContainerMetadata: () => fromContainerMetadata, + fromInstanceMetadata: () => fromInstanceMetadata, + getInstanceMetadataEndpoint: () => getInstanceMetadataEndpoint, + httpRequest: () => httpRequest, + providerConfigFromInit: () => providerConfigFromInit +}); +module.exports = __toCommonJS(src_exports); + +// src/fromContainerMetadata.ts + +var import_url = require("url"); + +// src/remoteProvider/httpRequest.ts +var import_property_provider = require("@smithy/property-provider"); +var import_buffer = require("buffer"); +var import_http = require("http"); +function httpRequest(options) { + return new Promise((resolve, reject) => { + const req = (0, import_http.request)({ + method: "GET", + ...options, + // Node.js http module doesn't accept hostname with square brackets + // Refs: https://github.com/nodejs/node/issues/39738 + hostname: options.hostname?.replace(/^\[(.+)\]$/, "$1") + }); + req.on("error", (err) => { + reject(Object.assign(new import_property_provider.ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new import_property_provider.ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject( + Object.assign(new import_property_provider.ProviderError("Error response received from instance metadata service"), { statusCode }) + ); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(import_buffer.Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} +__name(httpRequest, "httpRequest"); + +// src/remoteProvider/ImdsCredentials.ts +var isImdsCredentials = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.AccessKeyId === "string" && typeof arg.SecretAccessKey === "string" && typeof arg.Token === "string" && typeof arg.Expiration === "string", "isImdsCredentials"); +var fromImdsCredentials = /* @__PURE__ */ __name((creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...creds.AccountId && { accountId: creds.AccountId } +}), "fromImdsCredentials"); + +// src/remoteProvider/RemoteProviderInit.ts +var DEFAULT_TIMEOUT = 1e3; +var DEFAULT_MAX_RETRIES = 0; +var providerConfigFromInit = /* @__PURE__ */ __name(({ + maxRetries = DEFAULT_MAX_RETRIES, + timeout = DEFAULT_TIMEOUT +}) => ({ maxRetries, timeout }), "providerConfigFromInit"); + +// src/remoteProvider/retry.ts +var retry = /* @__PURE__ */ __name((toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}, "retry"); + +// src/fromContainerMetadata.ts +var ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +var ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +var ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +var fromContainerMetadata = /* @__PURE__ */ __name((init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}, "fromContainerMetadata"); +var requestFromEcsImds = /* @__PURE__ */ __name(async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN] + }; + } + const buffer = await httpRequest({ + ...options, + timeout + }); + return buffer.toString(); +}, "requestFromEcsImds"); +var CMDS_IP = "169.254.170.2"; +var GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true +}; +var GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true +}; +var getCmdsUri = /* @__PURE__ */ __name(async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI] + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = (0, import_url.parse)(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : void 0 + }; + } + throw new import_property_provider.CredentialsProviderError( + `The container metadata credential provider cannot be used unless the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment variable is set`, + { + tryNextLink: false, + logger + } + ); +}, "getCmdsUri"); + +// src/fromInstanceMetadata.ts + + + +// src/error/InstanceMetadataV1FallbackError.ts + +var InstanceMetadataV1FallbackError = class _InstanceMetadataV1FallbackError extends import_property_provider.CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, _InstanceMetadataV1FallbackError.prototype); + } + static { + __name(this, "InstanceMetadataV1FallbackError"); + } +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var import_node_config_provider = require("@smithy/node-config-provider"); +var import_url_parser = require("@smithy/url-parser"); + +// src/config/Endpoint.ts +var Endpoint = /* @__PURE__ */ ((Endpoint2) => { + Endpoint2["IPv4"] = "http://169.254.169.254"; + Endpoint2["IPv6"] = "http://[fd00:ec2::254]"; + return Endpoint2; +})(Endpoint || {}); + +// src/config/EndpointConfigOptions.ts +var ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +var CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +var ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: void 0 +}; + +// src/config/EndpointMode.ts +var EndpointMode = /* @__PURE__ */ ((EndpointMode2) => { + EndpointMode2["IPv4"] = "IPv4"; + EndpointMode2["IPv6"] = "IPv6"; + return EndpointMode2; +})(EndpointMode || {}); + +// src/config/EndpointModeConfigOptions.ts +var ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +var CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +var ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: "IPv4" /* IPv4 */ +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var getInstanceMetadataEndpoint = /* @__PURE__ */ __name(async () => (0, import_url_parser.parseUrl)(await getFromEndpointConfig() || await getFromEndpointModeConfig()), "getInstanceMetadataEndpoint"); +var getFromEndpointConfig = /* @__PURE__ */ __name(async () => (0, import_node_config_provider.loadConfig)(ENDPOINT_CONFIG_OPTIONS)(), "getFromEndpointConfig"); +var getFromEndpointModeConfig = /* @__PURE__ */ __name(async () => { + const endpointMode = await (0, import_node_config_provider.loadConfig)(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case "IPv4" /* IPv4 */: + return "http://169.254.169.254" /* IPv4 */; + case "IPv6" /* IPv6 */: + return "http://[fd00:ec2::254]" /* IPv6 */; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}. Select from ${Object.values(EndpointMode)}`); + } +}, "getFromEndpointModeConfig"); + +// src/utils/getExtendedInstanceMetadataCredentials.ts +var STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +var STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +var STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +var getExtendedInstanceMetadataCredentials = /* @__PURE__ */ __name((credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1e3); + logger.warn( + `Attempting credential expiration extension due to a credential service availability issue. A refresh of these credentials will be attempted after ${new Date(newExpiration)}. +For more information, please visit: ` + STATIC_STABILITY_DOC_URL + ); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...originalExpiration ? { originalExpiration } : {}, + expiration: newExpiration + }; +}, "getExtendedInstanceMetadataCredentials"); + +// src/utils/staticStabilityProvider.ts +var staticStabilityProvider = /* @__PURE__ */ __name((provider, options = {}) => { + const logger = options?.logger || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}, "staticStabilityProvider"); + +// src/fromInstanceMetadata.ts +var IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +var IMDS_TOKEN_PATH = "/latest/api/token"; +var AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +var PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +var X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +var fromInstanceMetadata = /* @__PURE__ */ __name((init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }), "fromInstanceMetadata"); +var getInstanceMetadataProvider = /* @__PURE__ */ __name((init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = /* @__PURE__ */ __name(async (maxRetries2, options) => { + const isImdsV1Fallback = disableFetchToken || options.headers?.[X_AWS_EC2_METADATA_TOKEN] == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await (0, import_node_config_provider.loadConfig)( + { + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === void 0) { + throw new import_property_provider.CredentialsProviderError( + `${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, + { logger: init.logger } + ); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile2) => { + const profileValue = profile2[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false + }, + { + profile + } + )(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError( + `AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join( + ", " + )}].` + ); + } + } + const imdsProfile = (await retry(async () => { + let profile2; + try { + profile2 = await getProfile(options); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile2; + }, maxRetries2)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries2); + }, "getCredentials"); + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } catch (error) { + if (error?.statusCode === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error" + }); + } else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token + }, + timeout + }); + } + }; +}, "getInstanceMetadataProvider"); +var getMetadataToken = /* @__PURE__ */ __name(async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600" + } +}), "getMetadataToken"); +var getProfile = /* @__PURE__ */ __name(async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(), "getProfile"); +var getCredentialsFromProfile = /* @__PURE__ */ __name(async (profile, options, init) => { + const credentialsResponse = JSON.parse( + (await httpRequest({ + ...options, + path: IMDS_PATH + profile + })).toString() + ); + if (!isImdsCredentials(credentialsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credentialsResponse); +}, "getCredentialsFromProfile"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + httpRequest, + getInstanceMetadataEndpoint, + Endpoint, + ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI, + ENV_CMDS_AUTH_TOKEN, + fromContainerMetadata, + fromInstanceMetadata, + DEFAULT_TIMEOUT, + DEFAULT_MAX_RETRIES, + providerConfigFromInit +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js new file mode 100644 index 0000000..b088eb0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js @@ -0,0 +1,5 @@ +export var Endpoint; +(function (Endpoint) { + Endpoint["IPv4"] = "http://169.254.169.254"; + Endpoint["IPv6"] = "http://[fd00:ec2::254]"; +})(Endpoint || (Endpoint = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js new file mode 100644 index 0000000..f043de9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js @@ -0,0 +1,7 @@ +export const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +export const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +export const ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: undefined, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js new file mode 100644 index 0000000..bace819 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js @@ -0,0 +1,5 @@ +export var EndpointMode; +(function (EndpointMode) { + EndpointMode["IPv4"] = "IPv4"; + EndpointMode["IPv6"] = "IPv6"; +})(EndpointMode || (EndpointMode = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js new file mode 100644 index 0000000..15b19d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js @@ -0,0 +1,8 @@ +import { EndpointMode } from "./EndpointMode"; +export const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +export const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +export const ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: EndpointMode.IPv4, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js new file mode 100644 index 0000000..29aaf50 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js @@ -0,0 +1,9 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +export class InstanceMetadataV1FallbackError extends CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, InstanceMetadataV1FallbackError.prototype); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js new file mode 100644 index 0000000..4340e3e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js @@ -0,0 +1,77 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { parse } from "url"; +import { httpRequest } from "./remoteProvider/httpRequest"; +import { fromImdsCredentials, isImdsCredentials } from "./remoteProvider/ImdsCredentials"; +import { providerConfigFromInit } from "./remoteProvider/RemoteProviderInit"; +import { retry } from "./remoteProvider/retry"; +export const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +export const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +export const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +export const fromContainerMetadata = (init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger, + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}; +const requestFromEcsImds = async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN], + }; + } + const buffer = await httpRequest({ + ...options, + timeout, + }); + return buffer.toString(); +}; +const CMDS_IP = "169.254.170.2"; +const GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true, +}; +const GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true, +}; +const getCmdsUri = async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI], + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = parse(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger, + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger, + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : undefined, + }; + } + throw new CredentialsProviderError("The container metadata credential provider cannot be used unless" + + ` the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment` + + " variable is set", { + tryNextLink: false, + logger, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js new file mode 100644 index 0000000..24ecbfd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js @@ -0,0 +1,134 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { InstanceMetadataV1FallbackError } from "./error/InstanceMetadataV1FallbackError"; +import { httpRequest } from "./remoteProvider/httpRequest"; +import { fromImdsCredentials, isImdsCredentials } from "./remoteProvider/ImdsCredentials"; +import { providerConfigFromInit } from "./remoteProvider/RemoteProviderInit"; +import { retry } from "./remoteProvider/retry"; +import { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +import { staticStabilityProvider } from "./utils/staticStabilityProvider"; +const IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +const IMDS_TOKEN_PATH = "/latest/api/token"; +const AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +const PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +const X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +export const fromInstanceMetadata = (init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }); +const getInstanceMetadataProvider = (init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = async (maxRetries, options) => { + const isImdsV1Fallback = disableFetchToken || options.headers?.[X_AWS_EC2_METADATA_TOKEN] == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await loadConfig({ + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === undefined) { + throw new CredentialsProviderError(`${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, { logger: init.logger }); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile) => { + const profileValue = profile[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false, + }, { + profile, + })(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError(`AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join(", ")}].`); + } + } + const imdsProfile = (await retry(async () => { + let profile; + try { + profile = await getProfile(options); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile; + }, maxRetries)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries); + }; + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } + catch (error) { + if (error?.statusCode === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error", + }); + } + else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token, + }, + timeout, + }); + } + }; +}; +const getMetadataToken = async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600", + }, +}); +const getProfile = async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(); +const getCredentialsFromProfile = async (profile, options, init) => { + const credentialsResponse = JSON.parse((await httpRequest({ + ...options, + path: IMDS_PATH + profile, + })).toString()); + if (!isImdsCredentials(credentialsResponse)) { + throw new CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger, + }); + } + return fromImdsCredentials(credentialsResponse); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/index.js new file mode 100644 index 0000000..5362760 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./fromContainerMetadata"; +export * from "./fromInstanceMetadata"; +export * from "./remoteProvider/RemoteProviderInit"; +export * from "./types"; +export { httpRequest } from "./remoteProvider/httpRequest"; +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js new file mode 100644 index 0000000..c559c4f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js @@ -0,0 +1,13 @@ +export const isImdsCredentials = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.AccessKeyId === "string" && + typeof arg.SecretAccessKey === "string" && + typeof arg.Token === "string" && + typeof arg.Expiration === "string"; +export const fromImdsCredentials = (creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...(creds.AccountId && { accountId: creds.AccountId }), +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js new file mode 100644 index 0000000..39ace38 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js @@ -0,0 +1,3 @@ +export const DEFAULT_TIMEOUT = 1000; +export const DEFAULT_MAX_RETRIES = 0; +export const providerConfigFromInit = ({ maxRetries = DEFAULT_MAX_RETRIES, timeout = DEFAULT_TIMEOUT, }) => ({ maxRetries, timeout }); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js new file mode 100644 index 0000000..91742d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js @@ -0,0 +1,36 @@ +import { ProviderError } from "@smithy/property-provider"; +import { Buffer } from "buffer"; +import { request } from "http"; +export function httpRequest(options) { + return new Promise((resolve, reject) => { + const req = request({ + method: "GET", + ...options, + hostname: options.hostname?.replace(/^\[(.+)\]$/, "$1"), + }); + req.on("error", (err) => { + reject(Object.assign(new ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject(Object.assign(new ProviderError("Error response received from instance metadata service"), { statusCode })); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js new file mode 100644 index 0000000..d4ad601 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js @@ -0,0 +1,2 @@ +export * from "./ImdsCredentials"; +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js new file mode 100644 index 0000000..22b79bb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js @@ -0,0 +1,7 @@ +export const retry = (toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js new file mode 100644 index 0000000..5614692 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js @@ -0,0 +1,17 @@ +const STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +const STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +const STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +export const getExtendedInstanceMetadataCredentials = (credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1000); + logger.warn("Attempting credential expiration extension due to a credential service availability issue. A refresh of these " + + `credentials will be attempted after ${new Date(newExpiration)}.\nFor more information, please visit: ` + + STATIC_STABILITY_DOC_URL); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...(originalExpiration ? { originalExpiration } : {}), + expiration: newExpiration, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js new file mode 100644 index 0000000..4c611ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js @@ -0,0 +1,19 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { parseUrl } from "@smithy/url-parser"; +import { Endpoint as InstanceMetadataEndpoint } from "../config/Endpoint"; +import { ENDPOINT_CONFIG_OPTIONS } from "../config/EndpointConfigOptions"; +import { EndpointMode } from "../config/EndpointMode"; +import { ENDPOINT_MODE_CONFIG_OPTIONS, } from "../config/EndpointModeConfigOptions"; +export const getInstanceMetadataEndpoint = async () => parseUrl((await getFromEndpointConfig()) || (await getFromEndpointModeConfig())); +const getFromEndpointConfig = async () => loadConfig(ENDPOINT_CONFIG_OPTIONS)(); +const getFromEndpointModeConfig = async () => { + const endpointMode = await loadConfig(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case EndpointMode.IPv4: + return InstanceMetadataEndpoint.IPv4; + case EndpointMode.IPv6: + return InstanceMetadataEndpoint.IPv6; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}.` + ` Select from ${Object.values(EndpointMode)}`); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js new file mode 100644 index 0000000..9a1e742 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js @@ -0,0 +1,25 @@ +import { getExtendedInstanceMetadataCredentials } from "./getExtendedInstanceMetadataCredentials"; +export const staticStabilityProvider = (provider, options = {}) => { + const logger = options?.logger || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } + catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } + else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts new file mode 100644 index 0000000..000e313 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum Endpoint { + IPv4 = "http://169.254.169.254", + IPv6 = "http://[fd00:ec2::254]" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts new file mode 100644 index 0000000..c03e22c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +/** + * @internal + */ +export declare const ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts new file mode 100644 index 0000000..db70619 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum EndpointMode { + IPv4 = "IPv4", + IPv6 = "IPv6" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..c743199 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +/** + * @internal + */ +export declare const ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts new file mode 100644 index 0000000..8338ccb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts @@ -0,0 +1,12 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +/** + * @public + * + * A specific sub-case of CredentialsProviderError, when the IMDSv1 fallback + * has been attempted but shut off by SDK configuration. + */ +export declare class InstanceMetadataV1FallbackError extends CredentialsProviderError { + readonly tryNextLink: boolean; + name: string; + constructor(message: string, tryNextLink?: boolean); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts new file mode 100644 index 0000000..f6f28f0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts @@ -0,0 +1,21 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export declare const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the ECS + * Container Metadata Service + */ +export declare const fromContainerMetadata: (init?: RemoteProviderInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts new file mode 100644 index 0000000..24db95a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts @@ -0,0 +1,10 @@ +import { Provider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +import { InstanceMetadataCredentials } from "./types"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the EC2 + * Instance Metadata Service + */ +export declare const fromInstanceMetadata: (init?: RemoteProviderInit) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts new file mode 100644 index 0000000..5a87b2f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export * from "./fromContainerMetadata"; +/** + * @internal + */ +export * from "./fromInstanceMetadata"; +/** + * @internal + */ +export * from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export { httpRequest } from "./remoteProvider/httpRequest"; +/** + * @internal + */ +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +/** + * @internal + */ +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts new file mode 100644 index 0000000..c2c7d51 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface ImdsCredentials { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + Expiration: string; + AccountId?: string; +} +/** + * @internal + */ +export declare const isImdsCredentials: (arg: any) => arg is ImdsCredentials; +/** + * @internal + */ +export declare const fromImdsCredentials: (creds: ImdsCredentials) => AwsCredentialIdentity; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts new file mode 100644 index 0000000..df9eff7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts @@ -0,0 +1,40 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_TIMEOUT = 1000; +/** + * @internal + */ +export declare const DEFAULT_MAX_RETRIES = 0; +/** + * @public + */ +export interface RemoteProviderConfig { + /** + * The connection timeout (in milliseconds) + */ + timeout: number; + /** + * The maximum number of times the HTTP connection should be retried + */ + maxRetries: number; +} +/** + * @public + */ +export interface RemoteProviderInit extends Partial { + logger?: Logger; + /** + * Only used in the IMDS credential provider. + */ + ec2MetadataV1Disabled?: boolean; + /** + * AWS_PROFILE. + */ + profile?: string; +} +/** + * @internal + */ +export declare const providerConfigFromInit: ({ maxRetries, timeout, }: RemoteProviderInit) => RemoteProviderConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts new file mode 100644 index 0000000..87c7d0d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +import { Buffer } from "buffer"; +import { RequestOptions } from "http"; +/** + * @internal + */ +export declare function httpRequest(options: RequestOptions): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts new file mode 100644 index 0000000..ed18a70 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./ImdsCredentials"; +/** + * @internal + */ +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts new file mode 100644 index 0000000..4e8abc0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retry: (toRetry: RetryableProvider, maxRetries: number) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts new file mode 100644 index 0000000..b700953 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum Endpoint { + IPv4 = "http://169.254.169.254", + IPv6 = "http://[fd00:ec2::254]" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts new file mode 100644 index 0000000..dbcb243 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +/** + * @internal + */ +export declare const ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts new file mode 100644 index 0000000..7dee86e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum EndpointMode { + IPv4 = "IPv4", + IPv6 = "IPv6" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..1d5e458 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +/** + * @internal + */ +export declare const ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts new file mode 100644 index 0000000..93ac220 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts @@ -0,0 +1,12 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +/** + * @public + * + * A specific sub-case of CredentialsProviderError, when the IMDSv1 fallback + * has been attempted but shut off by SDK configuration. + */ +export declare class InstanceMetadataV1FallbackError extends CredentialsProviderError { + readonly tryNextLink: boolean; + name: string; + constructor(message: string, tryNextLink?: boolean); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts new file mode 100644 index 0000000..deb48fd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts @@ -0,0 +1,21 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export declare const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the ECS + * Container Metadata Service + */ +export declare const fromContainerMetadata: (init?: RemoteProviderInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts new file mode 100644 index 0000000..8a533f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts @@ -0,0 +1,10 @@ +import { Provider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +import { InstanceMetadataCredentials } from "./types"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the EC2 + * Instance Metadata Service + */ +export declare const fromInstanceMetadata: (init?: RemoteProviderInit) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c0bc7e4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export * from "./fromContainerMetadata"; +/** + * @internal + */ +export * from "./fromInstanceMetadata"; +/** + * @internal + */ +export * from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export { httpRequest } from "./remoteProvider/httpRequest"; +/** + * @internal + */ +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +/** + * @internal + */ +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts new file mode 100644 index 0000000..c621e0a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface ImdsCredentials { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + Expiration: string; + AccountId?: string; +} +/** + * @internal + */ +export declare const isImdsCredentials: (arg: any) => arg is ImdsCredentials; +/** + * @internal + */ +export declare const fromImdsCredentials: (creds: ImdsCredentials) => AwsCredentialIdentity; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts new file mode 100644 index 0000000..4fe25f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts @@ -0,0 +1,40 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_TIMEOUT = 1000; +/** + * @internal + */ +export declare const DEFAULT_MAX_RETRIES = 0; +/** + * @public + */ +export interface RemoteProviderConfig { + /** + * The connection timeout (in milliseconds) + */ + timeout: number; + /** + * The maximum number of times the HTTP connection should be retried + */ + maxRetries: number; +} +/** + * @public + */ +export interface RemoteProviderInit extends Partial { + logger?: Logger; + /** + * Only used in the IMDS credential provider. + */ + ec2MetadataV1Disabled?: boolean; + /** + * AWS_PROFILE. + */ + profile?: string; +} +/** + * @internal + */ +export declare const providerConfigFromInit: ({ maxRetries, timeout, }: RemoteProviderInit) => RemoteProviderConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts new file mode 100644 index 0000000..b514fef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts @@ -0,0 +1,7 @@ +/// +import { Buffer } from "buffer"; +import { RequestOptions } from "http"; +/** + * @internal + */ +export declare function httpRequest(options: RequestOptions): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts new file mode 100644 index 0000000..a9d6094 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./ImdsCredentials"; +/** + * @internal + */ +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts new file mode 100644 index 0000000..d72d604 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retry: (toRetry: RetryableProvider, maxRetries: number) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..2e9592b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface InstanceMetadataCredentials extends AwsCredentialIdentity { + readonly originalExpiration?: Date; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts new file mode 100644 index 0000000..67edd2c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + */ +export declare const getExtendedInstanceMetadataCredentials: (credentials: InstanceMetadataCredentials, logger: Logger) => InstanceMetadataCredentials; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts new file mode 100644 index 0000000..1ad772d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts @@ -0,0 +1,21 @@ +import { Endpoint } from "@smithy/types"; +/** + * Returns the host to use for instance metadata service call. + * + * The host is read from endpoint which can be set either in + * {@link ENV_ENDPOINT_NAME} environment variable or {@link CONFIG_ENDPOINT_NAME} + * configuration property. + * + * If endpoint is not set, then endpoint mode is read either from + * {@link ENV_ENDPOINT_MODE_NAME} environment variable or {@link CONFIG_ENDPOINT_MODE_NAME} + * configuration property. If endpoint mode is not set, then default endpoint mode + * {@link EndpointMode.IPv4} is used. + * + * If endpoint mode is set to {@link EndpointMode.IPv4}, then the host is {@link Endpoint.IPv4}. + * If endpoint mode is set to {@link EndpointMode.IPv6}, then the host is {@link Endpoint.IPv6}. + * + * @returns Host to use for instance metadata service call. + * + * @internal + */ +export declare const getInstanceMetadataEndpoint: () => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts new file mode 100644 index 0000000..337091e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts @@ -0,0 +1,16 @@ +import { Logger, Provider } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + * + * IMDS credential supports static stability feature. When used, the expiration + * of recently issued credentials is extended. The server side allows using + * the recently expired credentials. This mitigates impact when clients using + * refreshable credentials are unable to retrieve updates. + * + * @param provider Credential provider + * @returns A credential provider that supports static stability + */ +export declare const staticStabilityProvider: (provider: Provider, options?: { + logger?: Logger | undefined; +}) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts new file mode 100644 index 0000000..e74ec99 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface InstanceMetadataCredentials extends AwsCredentialIdentity { + readonly originalExpiration?: Date; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts new file mode 100644 index 0000000..f0ed41b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + */ +export declare const getExtendedInstanceMetadataCredentials: (credentials: InstanceMetadataCredentials, logger: Logger) => InstanceMetadataCredentials; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts new file mode 100644 index 0000000..db6b6da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts @@ -0,0 +1,21 @@ +import { Endpoint } from "@smithy/types"; +/** + * Returns the host to use for instance metadata service call. + * + * The host is read from endpoint which can be set either in + * {@link ENV_ENDPOINT_NAME} environment variable or {@link CONFIG_ENDPOINT_NAME} + * configuration property. + * + * If endpoint is not set, then endpoint mode is read either from + * {@link ENV_ENDPOINT_MODE_NAME} environment variable or {@link CONFIG_ENDPOINT_MODE_NAME} + * configuration property. If endpoint mode is not set, then default endpoint mode + * {@link EndpointMode.IPv4} is used. + * + * If endpoint mode is set to {@link EndpointMode.IPv4}, then the host is {@link Endpoint.IPv4}. + * If endpoint mode is set to {@link EndpointMode.IPv6}, then the host is {@link Endpoint.IPv6}. + * + * @returns Host to use for instance metadata service call. + * + * @internal + */ +export declare const getInstanceMetadataEndpoint: () => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts new file mode 100644 index 0000000..6bfcb69 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts @@ -0,0 +1,16 @@ +import { Logger, Provider } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + * + * IMDS credential supports static stability feature. When used, the expiration + * of recently issued credentials is extended. The server side allows using + * the recently expired credentials. This mitigates impact when clients using + * refreshable credentials are unable to retrieve updates. + * + * @param provider Credential provider + * @returns A credential provider that supports static stability + */ +export declare const staticStabilityProvider: (provider: Provider, options?: { + logger?: Logger | undefined; +}) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/package.json new file mode 100644 index 0000000..8fd0824 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/credential-provider-imds/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/credential-provider-imds", + "version": "4.0.2", + "description": "AWS credential provider that sources credentials from the EC2 instance metadata service and ECS container metadata service", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline credential-provider-imds", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/credential-provider-imds", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/credential-provider-imds" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/README.md new file mode 100644 index 0000000..e52e8f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/README.md @@ -0,0 +1,11 @@ +# @smithy/fetch-http-handler + +[![NPM version](https://img.shields.io/npm/v/@smithy/fetch-http-handler/latest.svg)](https://www.npmjs.com/package/@smithy/fetch-http-handler) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/fetch-http-handler.svg)](https://www.npmjs.com/package/@smithy/fetch-http-handler) + +This is the default `requestHandler` used for browser applications. +Since Node.js introduced experimental Web Streams API in v16.5.0 and made it stable in v21.0.0, +you can consider using `fetch-http-handler` in Node.js, although it's not recommended. + +For the Node.js default `requestHandler` implementation, see instead +[`@smithy/node-http-handler`](https://www.npmjs.com/package/@smithy/node-http-handler). diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js new file mode 100644 index 0000000..9c9c44b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js @@ -0,0 +1,264 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + FetchHttpHandler: () => FetchHttpHandler, + keepAliveSupport: () => keepAliveSupport, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/fetch-http-handler.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_querystring_builder = require("@smithy/querystring-builder"); + +// src/create-request.ts +function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} +__name(createRequest, "createRequest"); + +// src/request-timeout.ts +function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} +__name(requestTimeout, "requestTimeout"); + +// src/fetch-http-handler.ts +var keepAliveSupport = { + supported: void 0 +}; +var FetchHttpHandler = class _FetchHttpHandler { + static { + __name(this, "FetchHttpHandler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === void 0) { + keepAliveSupport.supported = Boolean( + typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]") + ); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? void 0 : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method, + credentials + }; + if (this.config?.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = /* @__PURE__ */ __name(() => { + }, "removeSignalEventListener"); + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != void 0; + if (!hasReadableStream) { + return response.blob().then((body2) => ({ + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: body2 + }) + })); + } + return { + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body + }) + }; + }), + requestTimeout(requestTimeoutInMs) + ]; + if (abortSignal) { + raceOfPromises.push( + new Promise((resolve, reject) => { + const onAbort = /* @__PURE__ */ __name(() => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = /* @__PURE__ */ __name(() => signal.removeEventListener("abort", onAbort), "removeSignalEventListener"); + } else { + abortSignal.onabort = onAbort; + } + }) + ); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; + +// src/stream-collector.ts +var import_util_base64 = require("@smithy/util-base64"); +var streamCollector = /* @__PURE__ */ __name(async (stream) => { + if (typeof Blob === "function" && stream instanceof Blob || stream.constructor?.name === "Blob") { + if (Blob.prototype.arrayBuffer !== void 0) { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectBlob(stream); + } + return collectStream(stream); +}, "streamCollector"); +async function collectBlob(blob) { + const base64 = await readToBase64(blob); + const arrayBuffer = (0, import_util_base64.fromBase64)(base64); + return new Uint8Array(arrayBuffer); +} +__name(collectBlob, "collectBlob"); +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectStream, "collectStream"); +function readToBase64(blob) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onloadend = () => { + if (reader.readyState !== 2) { + return reject(new Error("Reader aborted too early")); + } + const result = reader.result ?? ""; + const commaIndex = result.indexOf(","); + const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length; + resolve(result.substring(dataOffset)); + }; + reader.onabort = () => reject(new Error("Read aborted")); + reader.onerror = () => reject(reader.error); + reader.readAsDataURL(blob); + }); +} +__name(readToBase64, "readToBase64"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + keepAliveSupport, + FetchHttpHandler, + streamCollector +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js new file mode 100644 index 0000000..b6f1816 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js @@ -0,0 +1,3 @@ +export function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js new file mode 100644 index 0000000..dd56e37 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js @@ -0,0 +1,139 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { createRequest } from "./create-request"; +import { requestTimeout } from "./request-timeout"; +export const keepAliveSupport = { + supported: undefined, +}; +export class FetchHttpHandler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } + else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === undefined) { + keepAliveSupport.supported = Boolean(typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]")); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = buildQueryString(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? undefined : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method: method, + credentials, + }; + if (this.config?.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = () => { }; + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != undefined; + if (!hasReadableStream) { + return response.blob().then((body) => ({ + response: new HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body, + }), + })); + } + return { + response: new HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body, + }), + }; + }), + requestTimeout(requestTimeoutInMs), + ]; + if (abortSignal) { + raceOfPromises.push(new Promise((resolve, reject) => { + const onAbort = () => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = () => signal.removeEventListener("abort", onAbort); + } + else { + abortSignal.onabort = onAbort; + } + })); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/index.js new file mode 100644 index 0000000..a0c61f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js new file mode 100644 index 0000000..66b09b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js @@ -0,0 +1,11 @@ +export function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js new file mode 100644 index 0000000..a400d9b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js @@ -0,0 +1,53 @@ +import { fromBase64 } from "@smithy/util-base64"; +export const streamCollector = async (stream) => { + if ((typeof Blob === "function" && stream instanceof Blob) || stream.constructor?.name === "Blob") { + if (Blob.prototype.arrayBuffer !== undefined) { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectBlob(stream); + } + return collectStream(stream); +}; +async function collectBlob(blob) { + const base64 = await readToBase64(blob); + const arrayBuffer = fromBase64(base64); + return new Uint8Array(arrayBuffer); +} +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +function readToBase64(blob) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onloadend = () => { + if (reader.readyState !== 2) { + return reject(new Error("Reader aborted too early")); + } + const result = (reader.result ?? ""); + const commaIndex = result.indexOf(","); + const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length; + resolve(result.substring(dataOffset)); + }; + reader.onabort = () => reject(new Error("Read aborted")); + reader.onerror = () => reject(reader.error); + reader.readAsDataURL(blob); + }); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts new file mode 100644 index 0000000..d668b06 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts @@ -0,0 +1,6 @@ +import { AdditionalRequestParameters } from "./fetch-http-handler"; +/** + * @internal + * For mocking/interception. + */ +export declare function createRequest(url: string, requestOptions?: RequestInit & AdditionalRequestParameters): Request; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts new file mode 100644 index 0000000..446301c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts @@ -0,0 +1,41 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import type { FetchHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * @public + */ +export { FetchHttpHandlerOptions }; +/** + * @internal + * Detection of keepalive support. Can be overridden for testing. + */ +export declare const keepAliveSupport: { + supported: boolean | undefined; +}; +/** + * @internal + */ +export type AdditionalRequestParameters = { + duplex?: "half"; +}; +/** + * @public + * + * HttpHandler implementation using browsers' `fetch` global function. + */ +export declare class FetchHttpHandler implements HttpHandler { + private config?; + private configProvider; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | FetchHttpHandlerOptions | Provider): FetchHttpHandler | HttpHandler; + constructor(options?: FetchHttpHandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof FetchHttpHandlerOptions, value: FetchHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): FetchHttpHandlerOptions; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts new file mode 100644 index 0000000..a0c61f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts new file mode 100644 index 0000000..28d784b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts @@ -0,0 +1 @@ +export declare function requestTimeout(timeoutInMs?: number): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts new file mode 100644 index 0000000..b2ca812 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts @@ -0,0 +1,2 @@ +import { StreamCollector } from "@smithy/types"; +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts new file mode 100644 index 0000000..5f0b074 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts @@ -0,0 +1,6 @@ +import { AdditionalRequestParameters } from "./fetch-http-handler"; +/** + * @internal + * For mocking/interception. + */ +export declare function createRequest(url: string, requestOptions?: RequestInit & AdditionalRequestParameters): Request; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts new file mode 100644 index 0000000..19a2943 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts @@ -0,0 +1,41 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { FetchHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * @public + */ +export { FetchHttpHandlerOptions }; +/** + * @internal + * Detection of keepalive support. Can be overridden for testing. + */ +export declare const keepAliveSupport: { + supported: boolean | undefined; +}; +/** + * @internal + */ +export type AdditionalRequestParameters = { + duplex?: "half"; +}; +/** + * @public + * + * HttpHandler implementation using browsers' `fetch` global function. + */ +export declare class FetchHttpHandler implements HttpHandler { + private config?; + private configProvider; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | FetchHttpHandlerOptions | Provider): FetchHttpHandler | HttpHandler; + constructor(options?: FetchHttpHandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof FetchHttpHandlerOptions, value: FetchHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): FetchHttpHandlerOptions; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d30edab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts new file mode 100644 index 0000000..ca24128 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts @@ -0,0 +1 @@ +export declare function requestTimeout(timeoutInMs?: number): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts new file mode 100644 index 0000000..8259097 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts @@ -0,0 +1,2 @@ +import { StreamCollector } from "@smithy/types"; +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/package.json new file mode 100644 index 0000000..8ebcaa1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/fetch-http-handler/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/fetch-http-handler", + "version": "5.0.2", + "description": "Provides a way to make requests", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline fetch-http-handler", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run && yarn test:browser", + "test:watch": "yarn g:vitest watch", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/abort-controller": "^4.0.2", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/fetch-http-handler", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/fetch-http-handler" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/README.md new file mode 100644 index 0000000..a160019 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/README.md @@ -0,0 +1,10 @@ +# @smithy/md5-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/hash-node/latest.svg)](https://www.npmjs.com/package/@smithy/hash-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/hash-node.svg)](https://www.npmjs.com/package/@smithy/hash-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-cjs/index.js new file mode 100644 index 0000000..fc7f7de --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-cjs/index.js @@ -0,0 +1,67 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Hash: () => Hash +}); +module.exports = __toCommonJS(src_exports); +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var import_util_utf8 = require("@smithy/util-utf8"); +var import_buffer = require("buffer"); +var import_crypto = require("crypto"); +var Hash = class { + static { + __name(this, "Hash"); + } + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update((0, import_util_utf8.toUint8Array)(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret ? (0, import_crypto.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) : (0, import_crypto.createHash)(this.algorithmIdentifier); + } +}; +function castSourceData(toCast, encoding) { + if (import_buffer.Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return (0, import_util_buffer_from.fromString)(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return (0, import_util_buffer_from.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return (0, import_util_buffer_from.fromArrayBuffer)(toCast); +} +__name(castSourceData, "castSourceData"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Hash +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-es/index.js new file mode 100644 index 0000000..718d9c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-es/index.js @@ -0,0 +1,34 @@ +import { fromArrayBuffer, fromString } from "@smithy/util-buffer-from"; +import { toUint8Array } from "@smithy/util-utf8"; +import { Buffer } from "buffer"; +import { createHash, createHmac } from "crypto"; +export class Hash { + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update(toUint8Array(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret + ? createHmac(this.algorithmIdentifier, castSourceData(this.secret)) + : createHash(this.algorithmIdentifier); + } +} +function castSourceData(toCast, encoding) { + if (Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return fromString(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return fromArrayBuffer(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return fromArrayBuffer(toCast); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-types/index.d.ts new file mode 100644 index 0000000..20ed5ed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Hash implements Checksum { + private readonly algorithmIdentifier; + private readonly secret?; + private hash; + constructor(algorithmIdentifier: string, secret?: SourceData); + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..313ab7e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Hash implements Checksum { + private readonly algorithmIdentifier; + private readonly secret?; + private hash; + constructor(algorithmIdentifier: string, secret?: SourceData); + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/package.json new file mode 100644 index 0000000..527b45a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/hash-node/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/hash-node", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline hash-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "hash-test-vectors": "^1.3.2", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/hash-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/hash-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/README.md new file mode 100644 index 0000000..9110465 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/README.md @@ -0,0 +1,10 @@ +# @smithy/invalid-dependency + +[![NPM version](https://img.shields.io/npm/v/@smithy/invalid-dependency/latest.svg)](https://www.npmjs.com/package/@smithy/invalid-dependency) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/invalid-dependency.svg)](https://www.npmjs.com/package/@smithy/invalid-dependency) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/index.js new file mode 100644 index 0000000..8eeb1d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + invalidFunction: () => invalidFunction, + invalidProvider: () => invalidProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/invalidFunction.ts +var invalidFunction = /* @__PURE__ */ __name((message) => () => { + throw new Error(message); +}, "invalidFunction"); + +// src/invalidProvider.ts +var invalidProvider = /* @__PURE__ */ __name((message) => () => Promise.reject(message), "invalidProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + invalidFunction, + invalidProvider +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/index.js new file mode 100644 index 0000000..fa0f1a6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./invalidFunction"; +export * from "./invalidProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js new file mode 100644 index 0000000..676f9cb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js @@ -0,0 +1,3 @@ +export const invalidFunction = (message) => () => { + throw new Error(message); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js new file mode 100644 index 0000000..5305a0b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js @@ -0,0 +1 @@ +export const invalidProvider = (message) => () => Promise.reject(message); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts new file mode 100644 index 0000000..1c99a56 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./invalidFunction"; +/** + * @internal + */ +export * from "./invalidProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts new file mode 100644 index 0000000..2118b32 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const invalidFunction: (message: string) => () => never; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts new file mode 100644 index 0000000..3e9c28c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const invalidProvider: (message: string) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6818f1c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./invalidFunction"; +/** + * @internal + */ +export * from "./invalidProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts new file mode 100644 index 0000000..b0e8f32 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const invalidFunction: (message: string) => () => never; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts new file mode 100644 index 0000000..765ee5a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const invalidProvider: (message: string) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/package.json new file mode 100644 index 0000000..4782ea4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/invalid-dependency/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/invalid-dependency", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline invalid-dependency", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/invalid-dependency", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/invalid-dependency" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..93a468c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "4.0.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/README.md new file mode 100644 index 0000000..2d40d92 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/README.md @@ -0,0 +1,4 @@ +# @smithy/middleware-content-length + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-content-length/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-content-length) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-content-length.svg)](https://www.npmjs.com/package/@smithy/middleware-content-length) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-cjs/index.js new file mode 100644 index 0000000..9585153 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-cjs/index.js @@ -0,0 +1,71 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + contentLengthMiddleware: () => contentLengthMiddleware, + contentLengthMiddlewareOptions: () => contentLengthMiddlewareOptions, + getContentLengthPlugin: () => getContentLengthPlugin +}); +module.exports = __toCommonJS(src_exports); +var import_protocol_http = require("@smithy/protocol-http"); +var CONTENT_LENGTH_HEADER = "content-length"; +function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (import_protocol_http.HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && Object.keys(headers).map((str) => str.toLowerCase()).indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length) + }; + } catch (error) { + } + } + } + return next({ + ...args, + request + }); + }; +} +__name(contentLengthMiddleware, "contentLengthMiddleware"); +var contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true +}; +var getContentLengthPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + } +}), "getContentLengthPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + contentLengthMiddleware, + contentLengthMiddlewareOptions, + getContentLengthPlugin +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-es/index.js new file mode 100644 index 0000000..fa18e71 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-es/index.js @@ -0,0 +1,39 @@ +import { HttpRequest } from "@smithy/protocol-http"; +const CONTENT_LENGTH_HEADER = "content-length"; +export function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && + Object.keys(headers) + .map((str) => str.toLowerCase()) + .indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length), + }; + } + catch (error) { + } + } + } + return next({ + ...args, + request, + }); + }; +} +export const contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true, +}; +export const getContentLengthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts new file mode 100644 index 0000000..91a7000 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts @@ -0,0 +1,6 @@ +import { BodyLengthCalculator, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +export declare function contentLengthMiddleware(bodyLengthChecker: BodyLengthCalculator): BuildMiddleware; +export declare const contentLengthMiddlewareOptions: BuildHandlerOptions; +export declare const getContentLengthPlugin: (options: { + bodyLengthChecker: BodyLengthCalculator; +}) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..10e1e18 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +import { BodyLengthCalculator, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +export declare function contentLengthMiddleware(bodyLengthChecker: BodyLengthCalculator): BuildMiddleware; +export declare const contentLengthMiddlewareOptions: BuildHandlerOptions; +export declare const getContentLengthPlugin: (options: { + bodyLengthChecker: BodyLengthCalculator; +}) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/package.json new file mode 100644 index 0000000..807c95b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-content-length/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-content-length", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-content-length", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-content-length", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-content-length" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/README.md new file mode 100644 index 0000000..e03cbb2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/README.md @@ -0,0 +1,10 @@ +# @smithy/middleware-endpoint + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-endpoint/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-endpoint) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-endpoint.svg)](https://www.npmjs.com/package/@smithy/middleware-endpoint) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js new file mode 100644 index 0000000..9b578a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointFromConfig = void 0; +const getEndpointFromConfig = async (serviceId) => undefined; +exports.getEndpointFromConfig = getEndpointFromConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js new file mode 100644 index 0000000..c7c302b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointFromConfig = void 0; +const node_config_provider_1 = require("@smithy/node-config-provider"); +const getEndpointUrlConfig_1 = require("./getEndpointUrlConfig"); +const getEndpointFromConfig = async (serviceId) => (0, node_config_provider_1.loadConfig)((0, getEndpointUrlConfig_1.getEndpointUrlConfig)(serviceId !== null && serviceId !== void 0 ? serviceId : ""))(); +exports.getEndpointFromConfig = getEndpointFromConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js new file mode 100644 index 0000000..fe5c010 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointUrlConfig = void 0; +const shared_ini_file_loader_1 = require("@smithy/shared-ini-file-loader"); +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); +exports.getEndpointUrlConfig = getEndpointUrlConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js new file mode 100644 index 0000000..177fdc3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js @@ -0,0 +1,279 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + endpointMiddleware: () => endpointMiddleware, + endpointMiddlewareOptions: () => endpointMiddlewareOptions, + getEndpointFromInstructions: () => getEndpointFromInstructions, + getEndpointPlugin: () => getEndpointPlugin, + resolveEndpointConfig: () => resolveEndpointConfig, + resolveParams: () => resolveParams, + toEndpointV1: () => toEndpointV1 +}); +module.exports = __toCommonJS(src_exports); + +// src/service-customizations/s3.ts +var resolveParamsForS3 = /* @__PURE__ */ __name(async (endpointParams) => { + const bucket = endpointParams?.Bucket || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } else if (!isDnsCompatibleBucketName(bucket) || bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:") || bucket.toLowerCase() !== bucket || bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}, "resolveParamsForS3"); +var DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +var IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +var DOTS_PATTERN = /\.\./; +var isDnsCompatibleBucketName = /* @__PURE__ */ __name((bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName), "isDnsCompatibleBucketName"); +var isArnBucketName = /* @__PURE__ */ __name((bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}, "isArnBucketName"); + +// src/adaptors/createConfigValueProvider.ts +var createConfigValueProvider = /* @__PURE__ */ __name((configKey, canonicalEndpointParamKey, config) => { + const configProvider = /* @__PURE__ */ __name(async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }, "configProvider"); + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.credentialScope ?? credentials?.CredentialScope; + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.accountId ?? credentials?.AccountId; + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}, "createConfigValueProvider"); + +// src/adaptors/getEndpointFromInstructions.ts +var import_getEndpointFromConfig = require("./adaptors/getEndpointFromConfig"); + +// src/adaptors/toEndpointV1.ts +var import_url_parser = require("@smithy/url-parser"); +var toEndpointV1 = /* @__PURE__ */ __name((endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, import_url_parser.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, import_url_parser.parseUrl)(endpoint); +}, "toEndpointV1"); + +// src/adaptors/getEndpointFromInstructions.ts +var getEndpointFromInstructions = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } else { + endpointFromConfig = await (0, import_getEndpointFromConfig.getEndpointFromConfig)(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}, "getEndpointFromInstructions"); +var resolveParams = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig) => { + const endpointParams = {}; + const instructions = instructionsSupplier?.getEndpointParameterInstructions?.() || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}, "resolveParams"); + +// src/endpointMiddleware.ts +var import_core = require("@smithy/core"); +var import_util_middleware = require("@smithy/util-middleware"); +var endpointMiddleware = /* @__PURE__ */ __name(({ + config, + instructions +}) => { + return (next, context) => async (args) => { + if (config.endpoint) { + (0, import_core.setFeature)(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions( + args.input, + { + getEndpointParameterInstructions() { + return instructions; + } + }, + { ...config }, + context + ); + context.endpointV2 = endpoint; + context.authSchemes = endpoint.properties?.authSchemes; + const authScheme = context.authSchemes?.[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const httpAuthOption = smithyContext?.selectedHttpAuthScheme?.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign( + httpAuthOption.signingProperties || {}, + { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet + }, + authScheme.properties + ); + } + } + return next({ + ...args + }); + }; +}, "endpointMiddleware"); + +// src/getEndpointPlugin.ts +var import_middleware_serde = require("@smithy/middleware-serde"); +var endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getEndpointPlugin = /* @__PURE__ */ __name((config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + endpointMiddleware({ + config, + instructions + }), + endpointMiddlewareOptions + ); + } +}), "getEndpointPlugin"); + +// src/resolveEndpointConfig.ts + +var import_getEndpointFromConfig2 = require("./adaptors/getEndpointFromConfig"); +var resolveEndpointConfig = /* @__PURE__ */ __name((input) => { + const tls = input.tls ?? true; + const { endpoint, useDualstackEndpoint, useFipsEndpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await (0, import_util_middleware.normalizeProvider)(endpoint)()) : void 0; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = Object.assign(input, { + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(useDualstackEndpoint ?? false), + useFipsEndpoint: (0, import_util_middleware.normalizeProvider)(useFipsEndpoint ?? false) + }); + let configuredEndpointPromise = void 0; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = (0, import_getEndpointFromConfig2.getEndpointFromConfig)(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}, "resolveEndpointConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getEndpointFromInstructions, + resolveParams, + toEndpointV1, + endpointMiddleware, + endpointMiddlewareOptions, + getEndpointPlugin, + resolveEndpointConfig +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js new file mode 100644 index 0000000..b468b83 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js @@ -0,0 +1,39 @@ +export const createConfigValueProvider = (configKey, canonicalEndpointParamKey, config) => { + const configProvider = async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }; + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.credentialScope ?? credentials?.CredentialScope; + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.accountId ?? credentials?.AccountId; + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js new file mode 100644 index 0000000..75fc136 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js @@ -0,0 +1 @@ +export const getEndpointFromConfig = async (serviceId) => undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js new file mode 100644 index 0000000..33c1d45 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js @@ -0,0 +1,3 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { getEndpointUrlConfig } from "./getEndpointUrlConfig"; +export const getEndpointFromConfig = async (serviceId) => loadConfig(getEndpointUrlConfig(serviceId ?? ""))(); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js new file mode 100644 index 0000000..e445646 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js @@ -0,0 +1,54 @@ +import { resolveParamsForS3 } from "../service-customizations"; +import { createConfigValueProvider } from "./createConfigValueProvider"; +import { getEndpointFromConfig } from "./getEndpointFromConfig"; +import { toEndpointV1 } from "./toEndpointV1"; +export const getEndpointFromInstructions = async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } + else { + endpointFromConfig = await getEndpointFromConfig(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}; +export const resolveParams = async (commandInput, instructionsSupplier, clientConfig) => { + const endpointParams = {}; + const instructions = instructionsSupplier?.getEndpointParameterInstructions?.() || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js new file mode 100644 index 0000000..82a1519 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js @@ -0,0 +1,31 @@ +import { CONFIG_PREFIX_SEPARATOR } from "@smithy/shared-ini-file-loader"; +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +export const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js new file mode 100644 index 0000000..17752da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js @@ -0,0 +1,2 @@ +export * from "./getEndpointFromInstructions"; +export * from "./toEndpointV1"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js new file mode 100644 index 0000000..83f4324 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js @@ -0,0 +1,10 @@ +import { parseUrl } from "@smithy/url-parser"; +export const toEndpointV1 = (endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return parseUrl(endpoint.url); + } + return endpoint; + } + return parseUrl(endpoint); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js new file mode 100644 index 0000000..df25795 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js @@ -0,0 +1,36 @@ +import { setFeature } from "@smithy/core"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { getEndpointFromInstructions } from "./adaptors/getEndpointFromInstructions"; +export const endpointMiddleware = ({ config, instructions, }) => { + return (next, context) => async (args) => { + if (config.endpoint) { + setFeature(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions(args.input, { + getEndpointParameterInstructions() { + return instructions; + }, + }, { ...config }, context); + context.endpointV2 = endpoint; + context.authSchemes = endpoint.properties?.authSchemes; + const authScheme = context.authSchemes?.[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = getSmithyContext(context); + const httpAuthOption = smithyContext?.selectedHttpAuthScheme?.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign(httpAuthOption.signingProperties || {}, { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet, + }, authScheme.properties); + } + } + return next({ + ...args, + }); + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js new file mode 100644 index 0000000..e2335f4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js @@ -0,0 +1,18 @@ +import { serializerMiddlewareOption } from "@smithy/middleware-serde"; +import { endpointMiddleware } from "./endpointMiddleware"; +export const endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: serializerMiddlewareOption.name, +}; +export const getEndpointPlugin = (config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(endpointMiddleware({ + config, + instructions, + }), endpointMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/index.js new file mode 100644 index 0000000..f89653e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./adaptors"; +export * from "./endpointMiddleware"; +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js new file mode 100644 index 0000000..c3a0eea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js @@ -0,0 +1,24 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { getEndpointFromConfig } from "./adaptors/getEndpointFromConfig"; +import { toEndpointV1 } from "./adaptors/toEndpointV1"; +export const resolveEndpointConfig = (input) => { + const tls = input.tls ?? true; + const { endpoint, useDualstackEndpoint, useFipsEndpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await normalizeProvider(endpoint)()) : undefined; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = Object.assign(input, { + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: normalizeProvider(useDualstackEndpoint ?? false), + useFipsEndpoint: normalizeProvider(useFipsEndpoint ?? false), + }); + let configuredEndpointPromise = undefined; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = getEndpointFromConfig(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js new file mode 100644 index 0000000..e50e107 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js @@ -0,0 +1 @@ +export * from "./s3"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js new file mode 100644 index 0000000..e993fc7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js @@ -0,0 +1,37 @@ +export const resolveParamsForS3 = async (endpointParams) => { + const bucket = endpointParams?.Bucket || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } + else if (!isDnsCompatibleBucketName(bucket) || + (bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:")) || + bucket.toLowerCase() !== bucket || + bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}; +const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +const DOTS_PATTERN = /\.\./; +export const DOT_PATTERN = /\./; +export const S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +export const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); +export const isArnBucketName = (bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts new file mode 100644 index 0000000..df65914 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts @@ -0,0 +1,13 @@ +/** + * Normalize some key of the client config to an async provider. + * @internal + * + * @param configKey - the key to look up in config. + * @param canonicalEndpointParamKey - this is the name the EndpointRuleSet uses. + * it will most likely not contain the config + * value, but we use it as a fallback. + * @param config - container of the config values. + * + * @returns async function that will resolve with the value. + */ +export declare const createConfigValueProvider: >(configKey: string, canonicalEndpointParamKey: string, config: Config) => () => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts new file mode 100644 index 0000000..de05fa5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts @@ -0,0 +1 @@ +export declare const getEndpointFromConfig: (serviceId: string) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts new file mode 100644 index 0000000..42a3566 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getEndpointFromConfig: (serviceId?: string) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts new file mode 100644 index 0000000..49cef2a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts @@ -0,0 +1,28 @@ +import { EndpointParameters, EndpointV2, HandlerExecutionContext } from "@smithy/types"; +import { EndpointResolvedConfig } from "../resolveEndpointConfig"; +import { EndpointParameterInstructions } from "../types"; +/** + * @internal + */ +export type EndpointParameterInstructionsSupplier = Partial<{ + getEndpointParameterInstructions(): EndpointParameterInstructions; +}>; +/** + * This step in the endpoint resolution process is exposed as a function + * to allow packages such as signers, lib-upload, etc. to get + * the V2 Endpoint associated to an instance of some api operation command + * without needing to send it or resolve its middleware stack. + * + * @internal + * @param commandInput - the input of the Command in question. + * @param instructionsSupplier - this is typically a Command constructor. A static function supplying the + * endpoint parameter instructions will exist for commands in services + * having an endpoints ruleset trait. + * @param clientConfig - config of the service client. + * @param context - optional context. + */ +export declare const getEndpointFromInstructions: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config, context?: HandlerExecutionContext) => Promise; +/** + * @internal + */ +export declare const resolveParams: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..0971010 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts new file mode 100644 index 0000000..cc13488 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getEndpointFromInstructions"; +/** + * @internal + */ +export * from "./toEndpointV1"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts new file mode 100644 index 0000000..834aabb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const toEndpointV1: (endpoint: string | Endpoint | EndpointV2) => Endpoint; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts new file mode 100644 index 0000000..67cee64 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts @@ -0,0 +1,10 @@ +import { EndpointParameters, SerializeMiddleware } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddleware: ({ config, instructions, }: { + config: EndpointResolvedConfig; + instructions: EndpointParameterInstructions; +}) => SerializeMiddleware; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts new file mode 100644 index 0000000..910f44d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts @@ -0,0 +1,11 @@ +import { EndpointParameters, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getEndpointPlugin: (config: EndpointResolvedConfig, instructions: EndpointParameterInstructions) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts new file mode 100644 index 0000000..bea06cf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export * from "./adaptors"; +/** + * @internal + */ +export * from "./endpointMiddleware"; +/** + * @internal + */ +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +/** + * @internal + */ +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts new file mode 100644 index 0000000..ec7dc70 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts @@ -0,0 +1,107 @@ +import { Endpoint, EndpointParameters, EndpointV2, Logger, Provider, UrlParser } from "@smithy/types"; +/** + * @public + * + * Endpoint config interfaces and resolver for Endpoint v2. They live in separate package to allow per-service onboarding. + * When all services onboard Endpoint v2, the resolver in config-resolver package can be removed. + * This interface includes all the endpoint parameters with built-in bindings of "AWS::*" and "SDK::*" + */ +export interface EndpointInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only for using + * a custom endpoint (for example, when using a local version of S3). + * + * Endpoint transformations such as S3 applying a bucket to the hostname are + * still applicable to this custom endpoint. + */ + endpoint?: string | Endpoint | Provider | EndpointV2 | Provider; + /** + * Providing a custom endpointProvider will override + * built-in transformations of the endpoint such as S3 adding the bucket + * name to the hostname, since they are part of the default endpointProvider. + */ + endpointProvider?: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; + /** + * @internal + * This field is used internally so you should not fill any value to this field. + */ + serviceConfiguredEndpoint?: never; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; + region: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + logger?: Logger; + serviceId?: string; +} +/** + * @internal + * + * This supercedes the similarly named EndpointsResolvedConfig (no parametric types) + * from resolveEndpointsConfig.ts in \@smithy/config-resolver. + */ +export interface EndpointResolvedConfig { + /** + * Custom endpoint provided by the user. + * This is normalized to a single interface from the various acceptable types. + * This field will be undefined if a custom endpoint is not provided. + */ + endpoint?: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls: boolean; + /** + * Whether the endpoint is specified by caller. + * @internal + * @deprecated + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input {@link EndpointsInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * A configured endpoint global or specific to the service from ENV or AWS SDK configuration files. + * @internal + */ + serviceConfiguredEndpoint?: Provider; +} +/** + * @internal + */ +export declare const resolveEndpointConfig: (input: T & EndpointInputConfig

& PreviouslyResolved

) => T & EndpointResolvedConfig

; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts new file mode 100644 index 0000000..716a15d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./s3"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts new file mode 100644 index 0000000..80b2e6a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts @@ -0,0 +1,26 @@ +import { EndpointParameters } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveParamsForS3: (endpointParams: EndpointParameters) => Promise; +/** + * @internal + */ +export declare const DOT_PATTERN: RegExp; +/** + * @internal + */ +export declare const S3_HOSTNAME_PATTERN: RegExp; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +/** + * @internal + */ +export declare const isArnBucketName: (bucketName: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts new file mode 100644 index 0000000..842f8fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts @@ -0,0 +1,13 @@ +/** + * Normalize some key of the client config to an async provider. + * @internal + * + * @param configKey - the key to look up in config. + * @param canonicalEndpointParamKey - this is the name the EndpointRuleSet uses. + * it will most likely not contain the config + * value, but we use it as a fallback. + * @param config - container of the config values. + * + * @returns async function that will resolve with the value. + */ +export declare const createConfigValueProvider: >(configKey: string, canonicalEndpointParamKey: string, config: Config) => () => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts new file mode 100644 index 0000000..1a4f6ba --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts @@ -0,0 +1 @@ +export declare const getEndpointFromConfig: (serviceId: string) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts new file mode 100644 index 0000000..641570c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getEndpointFromConfig: (serviceId?: string) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts new file mode 100644 index 0000000..82dc8df --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts @@ -0,0 +1,28 @@ +import { EndpointParameters, EndpointV2, HandlerExecutionContext } from "@smithy/types"; +import { EndpointResolvedConfig } from "../resolveEndpointConfig"; +import { EndpointParameterInstructions } from "../types"; +/** + * @internal + */ +export type EndpointParameterInstructionsSupplier = Partial<{ + getEndpointParameterInstructions(): EndpointParameterInstructions; +}>; +/** + * This step in the endpoint resolution process is exposed as a function + * to allow packages such as signers, lib-upload, etc. to get + * the V2 Endpoint associated to an instance of some api operation command + * without needing to send it or resolve its middleware stack. + * + * @internal + * @param commandInput - the input of the Command in question. + * @param instructionsSupplier - this is typically a Command constructor. A static function supplying the + * endpoint parameter instructions will exist for commands in services + * having an endpoints ruleset trait. + * @param clientConfig - config of the service client. + * @param context - optional context. + */ +export declare const getEndpointFromInstructions: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config, context?: HandlerExecutionContext) => Promise; +/** + * @internal + */ +export declare const resolveParams: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..7b9d068 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts new file mode 100644 index 0000000..ced0520 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getEndpointFromInstructions"; +/** + * @internal + */ +export * from "./toEndpointV1"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts new file mode 100644 index 0000000..047ded8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const toEndpointV1: (endpoint: string | Endpoint | EndpointV2) => Endpoint; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts new file mode 100644 index 0000000..3f7e40a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts @@ -0,0 +1,10 @@ +import { EndpointParameters, SerializeMiddleware } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddleware: ({ config, instructions, }: { + config: EndpointResolvedConfig; + instructions: EndpointParameterInstructions; +}) => SerializeMiddleware; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts new file mode 100644 index 0000000..39f93a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts @@ -0,0 +1,11 @@ +import { EndpointParameters, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getEndpointPlugin: (config: EndpointResolvedConfig, instructions: EndpointParameterInstructions) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..2ad75b9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export * from "./adaptors"; +/** + * @internal + */ +export * from "./endpointMiddleware"; +/** + * @internal + */ +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +/** + * @internal + */ +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts new file mode 100644 index 0000000..875c9fc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts @@ -0,0 +1,107 @@ +import { Endpoint, EndpointParameters, EndpointV2, Logger, Provider, UrlParser } from "@smithy/types"; +/** + * @public + * + * Endpoint config interfaces and resolver for Endpoint v2. They live in separate package to allow per-service onboarding. + * When all services onboard Endpoint v2, the resolver in config-resolver package can be removed. + * This interface includes all the endpoint parameters with built-in bindings of "AWS::*" and "SDK::*" + */ +export interface EndpointInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only for using + * a custom endpoint (for example, when using a local version of S3). + * + * Endpoint transformations such as S3 applying a bucket to the hostname are + * still applicable to this custom endpoint. + */ + endpoint?: string | Endpoint | Provider | EndpointV2 | Provider; + /** + * Providing a custom endpointProvider will override + * built-in transformations of the endpoint such as S3 adding the bucket + * name to the hostname, since they are part of the default endpointProvider. + */ + endpointProvider?: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; + /** + * @internal + * This field is used internally so you should not fill any value to this field. + */ + serviceConfiguredEndpoint?: never; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; + region: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + logger?: Logger; + serviceId?: string; +} +/** + * @internal + * + * This supercedes the similarly named EndpointsResolvedConfig (no parametric types) + * from resolveEndpointsConfig.ts in \@smithy/config-resolver. + */ +export interface EndpointResolvedConfig { + /** + * Custom endpoint provided by the user. + * This is normalized to a single interface from the various acceptable types. + * This field will be undefined if a custom endpoint is not provided. + */ + endpoint?: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls: boolean; + /** + * Whether the endpoint is specified by caller. + * @internal + * @deprecated + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input {@link EndpointsInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * A configured endpoint global or specific to the service from ENV or AWS SDK configuration files. + * @internal + */ + serviceConfiguredEndpoint?: Provider; +} +/** + * @internal + */ +export declare const resolveEndpointConfig: (input: T & EndpointInputConfig

& PreviouslyResolved

) => T & EndpointResolvedConfig

; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts new file mode 100644 index 0000000..6529752 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./s3"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts new file mode 100644 index 0000000..cace227 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts @@ -0,0 +1,26 @@ +import { EndpointParameters } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveParamsForS3: (endpointParams: EndpointParameters) => Promise; +/** + * @internal + */ +export declare const DOT_PATTERN: RegExp; +/** + * @internal + */ +export declare const S3_HOSTNAME_PATTERN: RegExp; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +/** + * @internal + */ +export declare const isArnBucketName: (bucketName: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..a6084c8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts @@ -0,0 +1,41 @@ +/** + * @internal + */ +export interface EndpointParameterInstructions { + [name: string]: BuiltInParamInstruction | ClientContextParamInstruction | StaticContextParamInstruction | ContextParamInstruction | OperationContextParamInstruction; +} +/** + * @internal + */ +export interface BuiltInParamInstruction { + type: "builtInParams"; + name: string; +} +/** + * @internal + */ +export interface ClientContextParamInstruction { + type: "clientContextParams"; + name: string; +} +/** + * @internal + */ +export interface StaticContextParamInstruction { + type: "staticContextParams"; + value: string | boolean; +} +/** + * @internal + */ +export interface ContextParamInstruction { + type: "contextParams"; + name: string; +} +/** + * @internal + */ +export interface OperationContextParamInstruction { + type: "operationContextParams"; + get(input: any): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts new file mode 100644 index 0000000..0d1d9e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts @@ -0,0 +1,41 @@ +/** + * @internal + */ +export interface EndpointParameterInstructions { + [name: string]: BuiltInParamInstruction | ClientContextParamInstruction | StaticContextParamInstruction | ContextParamInstruction | OperationContextParamInstruction; +} +/** + * @internal + */ +export interface BuiltInParamInstruction { + type: "builtInParams"; + name: string; +} +/** + * @internal + */ +export interface ClientContextParamInstruction { + type: "clientContextParams"; + name: string; +} +/** + * @internal + */ +export interface StaticContextParamInstruction { + type: "staticContextParams"; + value: string | boolean; +} +/** + * @internal + */ +export interface ContextParamInstruction { + type: "contextParams"; + name: string; +} +/** + * @internal + */ +export interface OperationContextParamInstruction { + type: "operationContextParams"; + get(input: any): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/package.json new file mode 100644 index 0000000..e95e228 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-endpoint/package.json @@ -0,0 +1,74 @@ +{ + "name": "@smithy/middleware-endpoint", + "version": "4.1.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-endpoint", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/adaptors/getEndpointFromConfig": "./dist-es/adaptors/getEndpointFromConfig.browser" + }, + "react-native": { + "./dist-es/adaptors/getEndpointFromConfig": "./dist-es/adaptors/getEndpointFromConfig.browser", + "./dist-cjs/adaptors/getEndpointFromConfig": "./dist-cjs/adaptors/getEndpointFromConfig.browser" + }, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-endpoint", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-endpoint" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/README.md new file mode 100644 index 0000000..21ce947 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/README.md @@ -0,0 +1,11 @@ +# @smithy/middleware-retry + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-retry/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-retry) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-retry.svg)](https://www.npmjs.com/package/@smithy/middleware-retry) + +## Usage + +See [@smithy/util-retry](https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-retry) +for retry behavior and configuration. + +See also: [AWS Documentation: Retry behavior](https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html). diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/index.js new file mode 100644 index 0000000..c8375f0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/index.js @@ -0,0 +1,425 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + CONFIG_MAX_ATTEMPTS: () => CONFIG_MAX_ATTEMPTS, + CONFIG_RETRY_MODE: () => CONFIG_RETRY_MODE, + ENV_MAX_ATTEMPTS: () => ENV_MAX_ATTEMPTS, + ENV_RETRY_MODE: () => ENV_RETRY_MODE, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS: () => NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + NODE_RETRY_MODE_CONFIG_OPTIONS: () => NODE_RETRY_MODE_CONFIG_OPTIONS, + StandardRetryStrategy: () => StandardRetryStrategy, + defaultDelayDecider: () => defaultDelayDecider, + defaultRetryDecider: () => defaultRetryDecider, + getOmitRetryHeadersPlugin: () => getOmitRetryHeadersPlugin, + getRetryAfterHint: () => getRetryAfterHint, + getRetryPlugin: () => getRetryPlugin, + omitRetryHeadersMiddleware: () => omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions: () => omitRetryHeadersMiddlewareOptions, + resolveRetryConfig: () => resolveRetryConfig, + retryMiddleware: () => retryMiddleware, + retryMiddlewareOptions: () => retryMiddlewareOptions +}); +module.exports = __toCommonJS(src_exports); + +// src/AdaptiveRetryStrategy.ts + + +// src/StandardRetryStrategy.ts +var import_protocol_http = require("@smithy/protocol-http"); + + +var import_uuid = require("uuid"); + +// src/defaultRetryQuota.ts +var import_util_retry = require("@smithy/util-retry"); +var getDefaultRetryQuota = /* @__PURE__ */ __name((initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = options?.noRetryIncrement ?? import_util_retry.NO_RETRY_INCREMENT; + const retryCost = options?.retryCost ?? import_util_retry.RETRY_COST; + const timeoutRetryCost = options?.timeoutRetryCost ?? import_util_retry.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = /* @__PURE__ */ __name((error) => error.name === "TimeoutError" ? timeoutRetryCost : retryCost, "getCapacityAmount"); + const hasRetryTokens = /* @__PURE__ */ __name((error) => getCapacityAmount(error) <= availableCapacity, "hasRetryTokens"); + const retrieveRetryTokens = /* @__PURE__ */ __name((error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }, "retrieveRetryTokens"); + const releaseRetryTokens = /* @__PURE__ */ __name((capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }, "releaseRetryTokens"); + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens + }); +}, "getDefaultRetryQuota"); + +// src/delayDecider.ts + +var defaultDelayDecider = /* @__PURE__ */ __name((delayBase, attempts) => Math.floor(Math.min(import_util_retry.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)), "defaultDelayDecider"); + +// src/retryDecider.ts +var import_service_error_classification = require("@smithy/service-error-classification"); +var defaultRetryDecider = /* @__PURE__ */ __name((error) => { + if (!error) { + return false; + } + return (0, import_service_error_classification.isRetryableByTrait)(error) || (0, import_service_error_classification.isClockSkewError)(error) || (0, import_service_error_classification.isThrottlingError)(error) || (0, import_service_error_classification.isTransientError)(error); +}, "defaultRetryDecider"); + +// src/util.ts +var asSdkError = /* @__PURE__ */ __name((error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}, "asSdkError"); + +// src/StandardRetryStrategy.ts +var StandardRetryStrategy = class { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = import_util_retry.RETRY_MODES.STANDARD; + this.retryDecider = options?.retryDecider ?? defaultRetryDecider; + this.delayDecider = options?.delayDecider ?? defaultDelayDecider; + this.retryQuota = options?.retryQuota ?? getDefaultRetryQuota(import_util_retry.INITIAL_RETRY_TOKENS); + } + static { + __name(this, "StandardRetryStrategy"); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } catch (error) { + maxAttempts = import_util_retry.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options?.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options?.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider( + (0, import_service_error_classification.isThrottlingError)(err) ? import_util_retry.THROTTLING_RETRY_DELAY_BASE : import_util_retry.DEFAULT_RETRY_DELAY_BASE, + attempts + ); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +}; +var getDelayFromRetryAfterHeader = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1e3; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}, "getDelayFromRetryAfterHeader"); + +// src/AdaptiveRetryStrategy.ts +var AdaptiveRetryStrategy = class extends StandardRetryStrategy { + static { + __name(this, "AdaptiveRetryStrategy"); + } + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new import_util_retry.DefaultRateLimiter(); + this.mode = import_util_retry.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + } + }); + } +}; + +// src/configurations.ts +var import_util_middleware = require("@smithy/util-middleware"); + +var ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +var CONFIG_MAX_ATTEMPTS = "max_attempts"; +var NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: import_util_retry.DEFAULT_MAX_ATTEMPTS +}; +var resolveRetryConfig = /* @__PURE__ */ __name((input) => { + const { retryStrategy, retryMode: _retryMode, maxAttempts: _maxAttempts } = input; + const maxAttempts = (0, import_util_middleware.normalizeProvider)(_maxAttempts ?? import_util_retry.DEFAULT_MAX_ATTEMPTS); + return Object.assign(input, { + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, import_util_middleware.normalizeProvider)(_retryMode)(); + if (retryMode === import_util_retry.RETRY_MODES.ADAPTIVE) { + return new import_util_retry.AdaptiveRetryStrategy(maxAttempts); + } + return new import_util_retry.StandardRetryStrategy(maxAttempts); + } + }); +}, "resolveRetryConfig"); +var ENV_RETRY_MODE = "AWS_RETRY_MODE"; +var CONFIG_RETRY_MODE = "retry_mode"; +var NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: import_util_retry.DEFAULT_RETRY_MODE +}; + +// src/omitRetryHeadersMiddleware.ts + + +var omitRetryHeadersMiddleware = /* @__PURE__ */ __name(() => (next) => async (args) => { + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + delete request.headers[import_util_retry.INVOCATION_ID_HEADER]; + delete request.headers[import_util_retry.REQUEST_HEADER]; + } + return next(args); +}, "omitRetryHeadersMiddleware"); +var omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true +}; +var getOmitRetryHeadersPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + } +}), "getOmitRetryHeadersPlugin"); + +// src/retryMiddleware.ts + + +var import_smithy_client = require("@smithy/smithy-client"); + + +var import_isStreamingPayload = require("./isStreamingPayload/isStreamingPayload"); +var retryMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = import_protocol_http.HttpRequest.isInstance(request); + if (isRequest) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (isRequest) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && (0, import_isStreamingPayload.isStreamingPayload)(request)) { + (context.logger instanceof import_smithy_client.NoOpLogger ? console : context.logger)?.warn( + "An error was encountered in a non-retryable streaming request." + ); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } else { + retryStrategy = retryStrategy; + if (retryStrategy?.mode) + context.userAgent = [...context.userAgent || [], ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}, "retryMiddleware"); +var isRetryStrategyV2 = /* @__PURE__ */ __name((retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && typeof retryStrategy.recordSuccess !== "undefined", "isRetryStrategyV2"); +var getRetryErrorInfo = /* @__PURE__ */ __name((error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error) + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}, "getRetryErrorInfo"); +var getRetryErrorType = /* @__PURE__ */ __name((error) => { + if ((0, import_service_error_classification.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, import_service_error_classification.isTransientError)(error)) + return "TRANSIENT"; + if ((0, import_service_error_classification.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}, "getRetryErrorType"); +var retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true +}; +var getRetryPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + } +}), "getRetryPlugin"); +var getRetryAfterHint = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1e3); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}, "getRetryAfterHint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AdaptiveRetryStrategy, + StandardRetryStrategy, + ENV_MAX_ATTEMPTS, + CONFIG_MAX_ATTEMPTS, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + resolveRetryConfig, + ENV_RETRY_MODE, + CONFIG_RETRY_MODE, + NODE_RETRY_MODE_CONFIG_OPTIONS, + defaultDelayDecider, + omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions, + getOmitRetryHeadersPlugin, + defaultRetryDecider, + retryMiddleware, + retryMiddlewareOptions, + getRetryPlugin, + getRetryAfterHint +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js new file mode 100644 index 0000000..21fc19a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isStreamingPayload = void 0; +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream; +exports.isStreamingPayload = isStreamingPayload; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js new file mode 100644 index 0000000..06f420b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isStreamingPayload = void 0; +const stream_1 = require("stream"); +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof stream_1.Readable || + (typeof ReadableStream !== "undefined" && (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream); +exports.isStreamingPayload = isStreamingPayload; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/util.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/util.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-cjs/util.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..d349451 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js @@ -0,0 +1,20 @@ +import { DefaultRateLimiter, RETRY_MODES } from "@smithy/util-retry"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class AdaptiveRetryStrategy extends StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.mode = RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + }, + }); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js new file mode 100644 index 0000000..e718ad6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js @@ -0,0 +1,90 @@ +import { HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { isThrottlingError } from "@smithy/service-error-classification"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_DELAY_BASE, INITIAL_RETRY_TOKENS, INVOCATION_ID_HEADER, REQUEST_HEADER, RETRY_MODES, THROTTLING_RETRY_DELAY_BASE, } from "@smithy/util-retry"; +import { v4 } from "uuid"; +import { getDefaultRetryQuota } from "./defaultRetryQuota"; +import { defaultDelayDecider } from "./delayDecider"; +import { defaultRetryDecider } from "./retryDecider"; +import { asSdkError } from "./util"; +export class StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = RETRY_MODES.STANDARD; + this.retryDecider = options?.retryDecider ?? defaultRetryDecider; + this.delayDecider = options?.delayDecider ?? defaultDelayDecider; + this.retryQuota = options?.retryQuota ?? getDefaultRetryQuota(INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } + catch (error) { + maxAttempts = DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (HttpRequest.isInstance(request)) { + request.headers[INVOCATION_ID_HEADER] = v4(); + } + while (true) { + try { + if (HttpRequest.isInstance(request)) { + request.headers[REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options?.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options?.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } + catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider(isThrottlingError(err) ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE, attempts); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +} +const getDelayFromRetryAfterHeader = (response) => { + if (!HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1000; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/configurations.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/configurations.js new file mode 100644 index 0000000..ec375e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/configurations.js @@ -0,0 +1,51 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { AdaptiveRetryStrategy, DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE, RETRY_MODES, StandardRetryStrategy, } from "@smithy/util-retry"; +export const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +export const CONFIG_MAX_ATTEMPTS = "max_attempts"; +export const NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: DEFAULT_MAX_ATTEMPTS, +}; +export const resolveRetryConfig = (input) => { + const { retryStrategy, retryMode: _retryMode, maxAttempts: _maxAttempts } = input; + const maxAttempts = normalizeProvider(_maxAttempts ?? DEFAULT_MAX_ATTEMPTS); + return Object.assign(input, { + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await normalizeProvider(_retryMode)(); + if (retryMode === RETRY_MODES.ADAPTIVE) { + return new AdaptiveRetryStrategy(maxAttempts); + } + return new StandardRetryStrategy(maxAttempts); + }, + }); +}; +export const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +export const CONFIG_RETRY_MODE = "retry_mode"; +export const NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: DEFAULT_RETRY_MODE, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js new file mode 100644 index 0000000..4bf6771 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js @@ -0,0 +1,27 @@ +import { NO_RETRY_INCREMENT, RETRY_COST, TIMEOUT_RETRY_COST } from "@smithy/util-retry"; +export const getDefaultRetryQuota = (initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = options?.noRetryIncrement ?? NO_RETRY_INCREMENT; + const retryCost = options?.retryCost ?? RETRY_COST; + const timeoutRetryCost = options?.timeoutRetryCost ?? TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = (error) => (error.name === "TimeoutError" ? timeoutRetryCost : retryCost); + const hasRetryTokens = (error) => getCapacityAmount(error) <= availableCapacity; + const retrieveRetryTokens = (error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }; + const releaseRetryTokens = (capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }; + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js new file mode 100644 index 0000000..2928506 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js @@ -0,0 +1,2 @@ +import { MAXIMUM_RETRY_DELAY } from "@smithy/util-retry"; +export const defaultDelayDecider = (delayBase, attempts) => Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/index.js new file mode 100644 index 0000000..9ebe326 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js new file mode 100644 index 0000000..9569e92 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js @@ -0,0 +1 @@ +export const isStreamingPayload = (request) => request?.body instanceof ReadableStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js new file mode 100644 index 0000000..7dcc687 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js @@ -0,0 +1,3 @@ +import { Readable } from "stream"; +export const isStreamingPayload = (request) => request?.body instanceof Readable || + (typeof ReadableStream !== "undefined" && request?.body instanceof ReadableStream); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js new file mode 100644 index 0000000..cb3c372 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js @@ -0,0 +1,22 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { INVOCATION_ID_HEADER, REQUEST_HEADER } from "@smithy/util-retry"; +export const omitRetryHeadersMiddleware = () => (next) => async (args) => { + const { request } = args; + if (HttpRequest.isInstance(request)) { + delete request.headers[INVOCATION_ID_HEADER]; + delete request.headers[REQUEST_HEADER]; + } + return next(args); +}; +export const omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true, +}; +export const getOmitRetryHeadersPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + }, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js new file mode 100644 index 0000000..b965fba --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js @@ -0,0 +1,7 @@ +import { isClockSkewError, isRetryableByTrait, isThrottlingError, isTransientError, } from "@smithy/service-error-classification"; +export const defaultRetryDecider = (error) => { + if (!error) { + return false; + } + return isRetryableByTrait(error) || isClockSkewError(error) || isThrottlingError(error) || isTransientError(error); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js new file mode 100644 index 0000000..a897735 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js @@ -0,0 +1,112 @@ +import { HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { isServerError, isThrottlingError, isTransientError } from "@smithy/service-error-classification"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { INVOCATION_ID_HEADER, REQUEST_HEADER } from "@smithy/util-retry"; +import { v4 } from "uuid"; +import { isStreamingPayload } from "./isStreamingPayload/isStreamingPayload"; +import { asSdkError } from "./util"; +export const retryMiddleware = (options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = HttpRequest.isInstance(request); + if (isRequest) { + request.headers[INVOCATION_ID_HEADER] = v4(); + } + while (true) { + try { + if (isRequest) { + request.headers[REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } + catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && isStreamingPayload(request)) { + (context.logger instanceof NoOpLogger ? console : context.logger)?.warn("An error was encountered in a non-retryable streaming request."); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } + catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } + else { + retryStrategy = retryStrategy; + if (retryStrategy?.mode) + context.userAgent = [...(context.userAgent || []), ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}; +const isRetryStrategyV2 = (retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && + typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && + typeof retryStrategy.recordSuccess !== "undefined"; +const getRetryErrorInfo = (error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error), + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}; +const getRetryErrorType = (error) => { + if (isThrottlingError(error)) + return "THROTTLING"; + if (isTransientError(error)) + return "TRANSIENT"; + if (isServerError(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}; +export const retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true, +}; +export const getRetryPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + }, +}); +export const getRetryAfterHint = (response) => { + if (!HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1000); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/util.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/util.js new file mode 100644 index 0000000..f45e6b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-es/util.js @@ -0,0 +1,9 @@ +export const asSdkError = (error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..98a6a1d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,22 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider } from "@smithy/types"; +import { RateLimiter } from "@smithy/util-retry"; +import { StandardRetryStrategy, StandardRetryStrategyOptions } from "./StandardRetryStrategy"; +/** + * @public + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions extends StandardRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * @deprecated use AdaptiveRetryStrategy from @smithy/util-retry + */ +export declare class AdaptiveRetryStrategy extends StandardRetryStrategy { + private rateLimiter; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + retry(next: FinalizeHandler, args: FinalizeHandlerArguments): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..7007ac3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider, RetryStrategy } from "@smithy/types"; +import { DelayDecider, RetryDecider, RetryQuota } from "./types"; +/** + * Strategy options to be passed to StandardRetryStrategy + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export interface StandardRetryStrategyOptions { + retryDecider?: RetryDecider; + delayDecider?: DelayDecider; + retryQuota?: RetryQuota; +} +/** + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export declare class StandardRetryStrategy implements RetryStrategy { + private readonly maxAttemptsProvider; + private retryDecider; + private delayDecider; + private retryQuota; + mode: string; + constructor(maxAttemptsProvider: Provider, options?: StandardRetryStrategyOptions); + private shouldRetry; + private getMaxAttempts; + retry(next: FinalizeHandler, args: FinalizeHandlerArguments, options?: { + beforeRequest: Function; + afterRequest: Function; + }): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts new file mode 100644 index 0000000..150c2a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts @@ -0,0 +1,66 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +/** + * @internal + */ +export declare const CONFIG_MAX_ATTEMPTS = "max_attempts"; +/** + * @internal + */ +export declare const NODE_MAX_ATTEMPT_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @public + */ +export interface RetryInputConfig { + /** + * The maximum number of times requests that encounter retryable failures should be attempted. + */ + maxAttempts?: number | Provider; + /** + * The strategy to retry the request. Using built-in exponential backoff strategy by default. + */ + retryStrategy?: RetryStrategy | RetryStrategyV2; +} +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * Specifies provider for retry algorithm to use. + * @internal + */ + retryMode: string | Provider; +} +/** + * @internal + */ +export interface RetryResolvedConfig { + /** + * Resolved value for input config {@link RetryInputConfig.maxAttempts} + */ + maxAttempts: Provider; + /** + * Resolved value for input config {@link RetryInputConfig.retryStrategy} + */ + retryStrategy: Provider; +} +/** + * @internal + */ +export declare const resolveRetryConfig: (input: T & PreviouslyResolved & RetryInputConfig) => T & RetryResolvedConfig; +/** + * @internal + */ +export declare const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +/** + * @internal + */ +export declare const CONFIG_RETRY_MODE = "retry_mode"; +/** + * @internal + */ +export declare const NODE_RETRY_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts new file mode 100644 index 0000000..332a494 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts @@ -0,0 +1,24 @@ +import { RetryQuota } from "./types"; +/** + * @internal + */ +export interface DefaultRetryQuotaOptions { + /** + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ + noRetryIncrement?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance. + */ + retryCost?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ + timeoutRetryCost?: number; +} +/** + * @internal + */ +export declare const getDefaultRetryQuota: (initialRetryTokens: number, options?: DefaultRetryQuotaOptions) => RetryQuota; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts new file mode 100644 index 0000000..986ff42 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Calculate a capped, fully-jittered exponential backoff time. + */ +export declare const defaultDelayDecider: (delayBase: number, attempts: number) => number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/index.d.ts new file mode 100644 index 0000000..9ebe326 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts new file mode 100644 index 0000000..48d70ba --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts new file mode 100644 index 0000000..48d70ba --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts new file mode 100644 index 0000000..50c1ab6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts @@ -0,0 +1,13 @@ +import { FinalizeHandler, MetadataBearer, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const omitRetryHeadersMiddleware: () => (next: FinalizeHandler) => FinalizeHandler; +/** + * @internal + */ +export declare const omitRetryHeadersMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getOmitRetryHeadersPlugin: (options: unknown) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts new file mode 100644 index 0000000..11a4a9c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts @@ -0,0 +1,6 @@ +import { SdkError } from "@smithy/types"; +/** + * @internal + * @deprecated this is only used in the deprecated StandardRetryStrategy. Do not use in new code. + */ +export declare const defaultRetryDecider: (error: SdkError) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts new file mode 100644 index 0000000..9310301 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, FinalizeHandler, FinalizeRequestHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { RetryResolvedConfig } from "./configurations"; +/** + * @internal + */ +export declare const retryMiddleware: (options: RetryResolvedConfig) => (next: FinalizeHandler, context: HandlerExecutionContext) => FinalizeHandler; +/** + * @internal + */ +export declare const retryMiddlewareOptions: FinalizeRequestHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRetryPlugin: (options: RetryResolvedConfig) => Pluggable; +/** + * @internal + */ +export declare const getRetryAfterHint: (response: unknown) => Date | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..33f0416 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,22 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider } from "@smithy/types"; +import { RateLimiter } from "@smithy/util-retry"; +import { StandardRetryStrategy, StandardRetryStrategyOptions } from "./StandardRetryStrategy"; +/** + * @public + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions extends StandardRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * @deprecated use AdaptiveRetryStrategy from @smithy/util-retry + */ +export declare class AdaptiveRetryStrategy extends StandardRetryStrategy { + private rateLimiter; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + retry(next: FinalizeHandler, args: FinalizeHandlerArguments): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..b4656d2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider, RetryStrategy } from "@smithy/types"; +import { DelayDecider, RetryDecider, RetryQuota } from "./types"; +/** + * Strategy options to be passed to StandardRetryStrategy + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export interface StandardRetryStrategyOptions { + retryDecider?: RetryDecider; + delayDecider?: DelayDecider; + retryQuota?: RetryQuota; +} +/** + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export declare class StandardRetryStrategy implements RetryStrategy { + private readonly maxAttemptsProvider; + private retryDecider; + private delayDecider; + private retryQuota; + mode: string; + constructor(maxAttemptsProvider: Provider, options?: StandardRetryStrategyOptions); + private shouldRetry; + private getMaxAttempts; + retry(next: FinalizeHandler, args: FinalizeHandlerArguments, options?: { + beforeRequest: Function; + afterRequest: Function; + }): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..79f8646 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,66 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +/** + * @internal + */ +export declare const CONFIG_MAX_ATTEMPTS = "max_attempts"; +/** + * @internal + */ +export declare const NODE_MAX_ATTEMPT_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @public + */ +export interface RetryInputConfig { + /** + * The maximum number of times requests that encounter retryable failures should be attempted. + */ + maxAttempts?: number | Provider; + /** + * The strategy to retry the request. Using built-in exponential backoff strategy by default. + */ + retryStrategy?: RetryStrategy | RetryStrategyV2; +} +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * Specifies provider for retry algorithm to use. + * @internal + */ + retryMode: string | Provider; +} +/** + * @internal + */ +export interface RetryResolvedConfig { + /** + * Resolved value for input config {@link RetryInputConfig.maxAttempts} + */ + maxAttempts: Provider; + /** + * Resolved value for input config {@link RetryInputConfig.retryStrategy} + */ + retryStrategy: Provider; +} +/** + * @internal + */ +export declare const resolveRetryConfig: (input: T & PreviouslyResolved & RetryInputConfig) => T & RetryResolvedConfig; +/** + * @internal + */ +export declare const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +/** + * @internal + */ +export declare const CONFIG_RETRY_MODE = "retry_mode"; +/** + * @internal + */ +export declare const NODE_RETRY_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts new file mode 100644 index 0000000..704b5af --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts @@ -0,0 +1,24 @@ +import { RetryQuota } from "./types"; +/** + * @internal + */ +export interface DefaultRetryQuotaOptions { + /** + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ + noRetryIncrement?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance. + */ + retryCost?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ + timeoutRetryCost?: number; +} +/** + * @internal + */ +export declare const getDefaultRetryQuota: (initialRetryTokens: number, options?: DefaultRetryQuotaOptions) => RetryQuota; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts new file mode 100644 index 0000000..7fa73ec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Calculate a capped, fully-jittered exponential backoff time. + */ +export declare const defaultDelayDecider: (delayBase: number, attempts: number) => number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e366bbb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts new file mode 100644 index 0000000..2a4d542 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts new file mode 100644 index 0000000..2a4d542 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts new file mode 100644 index 0000000..abd8f71 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts @@ -0,0 +1,13 @@ +import { FinalizeHandler, MetadataBearer, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const omitRetryHeadersMiddleware: () => (next: FinalizeHandler) => FinalizeHandler; +/** + * @internal + */ +export declare const omitRetryHeadersMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getOmitRetryHeadersPlugin: (options: unknown) => Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts new file mode 100644 index 0000000..c00661a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts @@ -0,0 +1,6 @@ +import { SdkError } from "@smithy/types"; +/** + * @internal + * @deprecated this is only used in the deprecated StandardRetryStrategy. Do not use in new code. + */ +export declare const defaultRetryDecider: (error: SdkError) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts new file mode 100644 index 0000000..137dbf1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, FinalizeHandler, FinalizeRequestHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { RetryResolvedConfig } from "./configurations"; +/** + * @internal + */ +export declare const retryMiddleware: (options: RetryResolvedConfig) => (next: FinalizeHandler, context: HandlerExecutionContext) => FinalizeHandler; +/** + * @internal + */ +export declare const retryMiddlewareOptions: FinalizeRequestHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRetryPlugin: (options: RetryResolvedConfig) => Pluggable; +/** + * @internal + */ +export declare const getRetryAfterHint: (response: unknown) => Date | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..06775c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts @@ -0,0 +1,65 @@ +import { SdkError } from "@smithy/types"; +/** + * Determines whether an error is retryable based on the number of retries + * already attempted, the HTTP status code, and the error received (if any). + * + * @param error - The error encountered. + * + * @deprecated + * @internal + */ +export interface RetryDecider { + (error: SdkError): boolean; +} +/** + * Determines the number of milliseconds to wait before retrying an action. + * + * @param delayBase - The base delay (in milliseconds). + * @param attempts - The number of times the action has already been tried. + * + * @deprecated + * @internal + */ +export interface DelayDecider { + (delayBase: number, attempts: number): number; +} +/** + * Interface that specifies the retry quota behavior. + * @deprecated + * @internal + */ +export interface RetryQuota { + /** + * returns true if retry tokens are available from the retry quota bucket. + */ + hasRetryTokens: (error: SdkError) => boolean; + /** + * returns token amount from the retry quota bucket. + * throws error is retry tokens are not available. + */ + retrieveRetryTokens: (error: SdkError) => number; + /** + * releases tokens back to the retry quota. + */ + releaseRetryTokens: (releaseCapacityAmount?: number) => void; +} +/** + * @deprecated + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..7684a9f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts @@ -0,0 +1,2 @@ +import { SdkError } from "@smithy/types"; +export declare const asSdkError: (error: unknown) => SdkError; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/types.d.ts new file mode 100644 index 0000000..8f22712 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/types.d.ts @@ -0,0 +1,65 @@ +import { SdkError } from "@smithy/types"; +/** + * Determines whether an error is retryable based on the number of retries + * already attempted, the HTTP status code, and the error received (if any). + * + * @param error - The error encountered. + * + * @deprecated + * @internal + */ +export interface RetryDecider { + (error: SdkError): boolean; +} +/** + * Determines the number of milliseconds to wait before retrying an action. + * + * @param delayBase - The base delay (in milliseconds). + * @param attempts - The number of times the action has already been tried. + * + * @deprecated + * @internal + */ +export interface DelayDecider { + (delayBase: number, attempts: number): number; +} +/** + * Interface that specifies the retry quota behavior. + * @deprecated + * @internal + */ +export interface RetryQuota { + /** + * returns true if retry tokens are available from the retry quota bucket. + */ + hasRetryTokens: (error: SdkError) => boolean; + /** + * returns token amount from the retry quota bucket. + * throws error is retry tokens are not available. + */ + retrieveRetryTokens: (error: SdkError) => number; + /** + * releases tokens back to the retry quota. + */ + releaseRetryTokens: (releaseCapacityAmount?: number) => void; +} +/** + * @deprecated + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/util.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/util.d.ts new file mode 100644 index 0000000..00939b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/dist-types/util.d.ts @@ -0,0 +1,2 @@ +import { SdkError } from "@smithy/types"; +export declare const asSdkError: (error: unknown) => SdkError; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/package.json new file mode 100644 index 0000000..b029e53 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-retry/package.json @@ -0,0 +1,79 @@ +{ + "name": "@smithy/middleware-retry", + "version": "4.1.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-retry", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "browser": { + "./dist-es/isStreamingPayload/isStreamingPayload": "./dist-es/isStreamingPayload/isStreamingPayload.browser" + }, + "react-native": { + "./dist-cjs/isStreamingPayload/isStreamingPayload": "./dist-cjs/isStreamingPayload/isStreamingPayload.browser", + "./dist-es/isStreamingPayload/isStreamingPayload": "./dist-es/isStreamingPayload/isStreamingPayload.browser" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "@types/uuid": "^8.3.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-retry", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-retry" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/README.md new file mode 100644 index 0000000..d2bbfa6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/README.md @@ -0,0 +1,4 @@ +# @smithy/middleware-serde + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-serde/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-serde) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-serde.svg)](https://www.npmjs.com/package/@smithy/middleware-serde) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/index.js new file mode 100644 index 0000000..04fa6f3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/index.js @@ -0,0 +1,109 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + deserializerMiddleware: () => deserializerMiddleware, + deserializerMiddlewareOption: () => deserializerMiddlewareOption, + getSerdePlugin: () => getSerdePlugin, + serializerMiddleware: () => serializerMiddleware, + serializerMiddlewareOption: () => serializerMiddlewareOption +}); +module.exports = __toCommonJS(src_exports); + +// src/deserializerMiddleware.ts +var deserializerMiddleware = /* @__PURE__ */ __name((options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed + }; + } catch (error) { + Object.defineProperty(error, "$response", { + value: response + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + try { + error.message += "\n " + hint; + } catch (e) { + if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") { + console.warn(hint); + } else { + context.logger?.warn?.(hint); + } + } + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}, "deserializerMiddleware"); + +// src/serializerMiddleware.ts +var serializerMiddleware = /* @__PURE__ */ __name((options, serializer) => (next, context) => async (args) => { + const endpoint = context.endpointV2?.url && options.urlParser ? async () => options.urlParser(context.endpointV2.url) : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request + }); +}, "serializerMiddleware"); + +// src/serdePlugin.ts +var deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true +}; +var serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + } + }; +} +__name(getSerdePlugin, "getSerdePlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + deserializerMiddleware, + deserializerMiddlewareOption, + serializerMiddlewareOption, + getSerdePlugin, + serializerMiddleware +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js new file mode 100644 index 0000000..19c0c27 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js @@ -0,0 +1,35 @@ +export const deserializerMiddleware = (options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed, + }; + } + catch (error) { + Object.defineProperty(error, "$response", { + value: response, + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + try { + error.message += "\n " + hint; + } + catch (e) { + if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") { + console.warn(hint); + } + else { + context.logger?.warn?.(hint); + } + } + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/index.js new file mode 100644 index 0000000..166a2be --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js new file mode 100644 index 0000000..be2a06e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js @@ -0,0 +1,22 @@ +import { deserializerMiddleware } from "./deserializerMiddleware"; +import { serializerMiddleware } from "./serializerMiddleware"; +export const deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true, +}; +export const serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true, +}; +export function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + }, + }; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js new file mode 100644 index 0000000..b02b93d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js @@ -0,0 +1,13 @@ +export const serializerMiddleware = (options, serializer) => (next, context) => async (args) => { + const endpoint = context.endpointV2?.url && options.urlParser + ? async () => options.urlParser(context.endpointV2.url) + : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts new file mode 100644 index 0000000..4d81141 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts @@ -0,0 +1,5 @@ +import { DeserializeMiddleware, ResponseDeserializer, SerdeContext, SerdeFunctions } from "@smithy/types"; +/** + * @internal + */ +export declare const deserializerMiddleware: (options: SerdeFunctions, deserializer: ResponseDeserializer) => DeserializeMiddleware; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/index.d.ts new file mode 100644 index 0000000..166a2be --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts new file mode 100644 index 0000000..bf1091a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts @@ -0,0 +1,12 @@ +import { DeserializeHandlerOptions, Endpoint, MetadataBearer, Pluggable, Provider, RequestSerializer, ResponseDeserializer, SerdeContext, SerdeFunctions, SerializeHandlerOptions, UrlParser } from "@smithy/types"; +export declare const deserializerMiddlewareOption: DeserializeHandlerOptions; +export declare const serializerMiddlewareOption: SerializeHandlerOptions; +export type V1OrV2Endpoint = { + urlParser?: UrlParser; + endpoint?: Provider; +}; +/** + * @internal + * + */ +export declare function getSerdePlugin(config: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer, deserializer: ResponseDeserializer): Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts new file mode 100644 index 0000000..5437298 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts @@ -0,0 +1,6 @@ +import { RequestSerializer, SerdeContext, SerdeFunctions, SerializeMiddleware } from "@smithy/types"; +import type { V1OrV2Endpoint } from "./serdePlugin"; +/** + * @internal + */ +export declare const serializerMiddleware: (options: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer) => SerializeMiddleware; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts new file mode 100644 index 0000000..b0ed492 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts @@ -0,0 +1,5 @@ +import { DeserializeMiddleware, ResponseDeserializer, SerdeContext, SerdeFunctions } from "@smithy/types"; +/** + * @internal + */ +export declare const deserializerMiddleware: (options: SerdeFunctions, deserializer: ResponseDeserializer) => DeserializeMiddleware; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ec66df4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts new file mode 100644 index 0000000..c381721 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts @@ -0,0 +1,12 @@ +import { DeserializeHandlerOptions, Endpoint, MetadataBearer, Pluggable, Provider, RequestSerializer, ResponseDeserializer, SerdeContext, SerdeFunctions, SerializeHandlerOptions, UrlParser } from "@smithy/types"; +export declare const deserializerMiddlewareOption: DeserializeHandlerOptions; +export declare const serializerMiddlewareOption: SerializeHandlerOptions; +export type V1OrV2Endpoint = { + urlParser?: UrlParser; + endpoint?: Provider; +}; +/** + * @internal + * + */ +export declare function getSerdePlugin(config: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer, deserializer: ResponseDeserializer): Pluggable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts new file mode 100644 index 0000000..914b3b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts @@ -0,0 +1,6 @@ +import { RequestSerializer, SerdeContext, SerdeFunctions, SerializeMiddleware } from "@smithy/types"; +import { V1OrV2Endpoint } from "./serdePlugin"; +/** + * @internal + */ +export declare const serializerMiddleware: (options: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer) => SerializeMiddleware; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/package.json new file mode 100644 index 0000000..042be08 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-serde/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-serde", + "version": "4.0.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-serde", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-serde", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-serde" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/README.md new file mode 100644 index 0000000..c09d4d3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/README.md @@ -0,0 +1,78 @@ +# @smithy/middleware-stack + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-stack/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-stack) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-stack.svg)](https://www.npmjs.com/package/@smithy/middleware-stack) + +The package contains an implementation of middleware stack interface. Middleware +stack is a structure storing middleware in specified order and resolve these +middleware into a single handler. + +A middleware stack has five `Step`s, each of them represents a specific request life cycle: + +- **initialize**: The input is being prepared. Examples of typical initialization tasks include injecting default options computing derived parameters. + +- **serialize**: The input is complete and ready to be serialized. Examples of typical serialization tasks include input validation and building an HTTP request from user input. + +- **build**: The input has been serialized into an HTTP request, but that request may require further modification. Any request alterations will be applied to all retries. Examples of typical build tasks include injecting HTTP headers that describe a stable aspect of the request, such as `Content-Length` or a body checksum. + +- **finalizeRequest**: The request is being prepared to be sent over the wire. The request in this stage should already be semantically complete and should therefore only be altered to match the recipient's expectations. Examples of typical finalization tasks include request signing and injecting hop-by-hop headers. + +- **deserialize**: The response has arrived, the middleware here will deserialize the raw response object to structured response + +## Adding Middleware + +There are two ways to add middleware to a middleware stack. They both add middleware to specified `Step` but they provide fine-grained location control differently. + +### Absolute Location + +You can add middleware to specified step with: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", +}); +``` + +This approach works for most cases. Sometimes you want your middleware to be executed in the front of the `Step`, you can set the `Priority` to `high`. Set the `Priority` to `low` then this middleware will be executed at the end of `Step`: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + priority: "high", +}); +``` + +If multiple middleware is added to same `step` with same `priority`, the order of them is determined by the order of adding them. + +### Relative Location + +In some cases, you might want to execute your middleware before some other known middleware, then you can use `addRelativeTo()`: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + name: "myMiddleware", +}); +stack.addRelativeTo(anotherMiddleware, { + relation: "before", //or 'after' + toMiddleware: "myMiddleware", +}); +``` + +## Removing Middleware + +You can remove middleware by name one at a time: + +```javascript +stack.remove("Middleware1"); +``` + +If you specify tags for middleware, you can remove multiple middleware at a time according to tag: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + tags: ["final"], +}); +stack.removeByTag("final"); +``` diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/index.js new file mode 100644 index 0000000..4c78597 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/index.js @@ -0,0 +1,313 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + constructStack: () => constructStack +}); +module.exports = __toCommonJS(src_exports); + +// src/MiddlewareStack.ts +var getAllAliases = /* @__PURE__ */ __name((name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}, "getAllAliases"); +var getMiddlewareNameWithAliases = /* @__PURE__ */ __name((name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}, "getMiddlewareNameWithAliases"); +var constructStack = /* @__PURE__ */ __name(() => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = /* @__PURE__ */ new Set(); + const sort = /* @__PURE__ */ __name((entries) => entries.sort( + (a, b) => stepWeights[b.step] - stepWeights[a.step] || priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"] + ), "sort"); + const removeByName = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByName"); + const removeByReference = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByReference"); + const cloneTo = /* @__PURE__ */ __name((toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + toStack.identifyOnResolve?.(stack.identifyOnResolve()); + return toStack; + }, "cloneTo"); + const expandRelativeMiddlewareList = /* @__PURE__ */ __name((from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }, "expandRelativeMiddlewareList"); + const getMiddlewareList = /* @__PURE__ */ __name((debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === void 0) { + if (debug) { + return; + } + throw new Error( + `${entry.toMiddleware} is not found when adding ${getMiddlewareNameWithAliases(entry.name, entry.aliases)} middleware ${entry.relation} ${entry.toMiddleware}` + ); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries).map(expandRelativeMiddlewareList).reduce( + (wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, + [] + ); + return mainChain; + }, "getMiddlewareList"); + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex( + (entry2) => entry2.name === alias || entry2.aliases?.some((a) => a === alias) + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ${toOverride.priority} priority in ${toOverride.step} step cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ${entry.priority} priority in ${entry.step} step.` + ); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex( + (entry2) => entry2.name === alias || entry2.aliases?.some((a) => a === alias) + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} "${entry.toMiddleware}" middleware.` + ); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve( + identifyOnResolve || cloned.identifyOnResolve() || (from.identifyOnResolve?.() ?? false) + ); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? mw.relation + " " + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList().map((entry) => entry.middleware).reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + } + }; + return stack; +}, "constructStack"); +var stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1 +}; +var priorityWeights = { + high: 3, + normal: 2, + low: 1 +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + constructStack +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js new file mode 100644 index 0000000..2e02c73 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js @@ -0,0 +1,281 @@ +const getAllAliases = (name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}; +const getMiddlewareNameWithAliases = (name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}; +export const constructStack = () => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = new Set(); + const sort = (entries) => entries.sort((a, b) => stepWeights[b.step] - stepWeights[a.step] || + priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"]); + const removeByName = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const removeByReference = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const cloneTo = (toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + toStack.identifyOnResolve?.(stack.identifyOnResolve()); + return toStack; + }; + const expandRelativeMiddlewareList = (from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }; + const getMiddlewareList = (debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === undefined) { + if (debug) { + return; + } + throw new Error(`${entry.toMiddleware} is not found when adding ` + + `${getMiddlewareNameWithAliases(entry.name, entry.aliases)} ` + + `middleware ${entry.relation} ${entry.toMiddleware}`); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries) + .map(expandRelativeMiddlewareList) + .reduce((wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, []); + return mainChain; + }; + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex((entry) => entry.name === alias || entry.aliases?.some((a) => a === alias)); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ` + + `${toOverride.priority} priority in ${toOverride.step} step cannot ` + + `be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ` + + `${entry.priority} priority in ${entry.step} step.`); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex((entry) => entry.name === alias || entry.aliases?.some((a) => a === alias)); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ` + + `${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden ` + + `by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} ` + + `"${entry.toMiddleware}" middleware.`); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve(identifyOnResolve || cloned.identifyOnResolve() || (from.identifyOnResolve?.() ?? false)); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? + mw.relation + + " " + + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList() + .map((entry) => entry.middleware) + .reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + }, + }; + return stack; +}; +const stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1, +}; +const priorityWeights = { + high: 3, + normal: 2, + low: 1, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/index.js new file mode 100644 index 0000000..16f56ce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/index.js @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts new file mode 100644 index 0000000..2aa088b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts @@ -0,0 +1,5 @@ +import { MiddlewareStack } from "@smithy/types"; +/** + * @internal + */ +export declare const constructStack: () => MiddlewareStack; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/index.d.ts new file mode 100644 index 0000000..16f56ce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts new file mode 100644 index 0000000..d93ce93 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts @@ -0,0 +1,5 @@ +import { MiddlewareStack } from "@smithy/types"; +/** + * @internal + */ +export declare const constructStack: () => MiddlewareStack; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d906b7d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..38eb54c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts @@ -0,0 +1,22 @@ +import { AbsoluteLocation, HandlerOptions, MiddlewareType, Priority, RelativeLocation, Step } from "@smithy/types"; +export interface MiddlewareEntry extends HandlerOptions { + middleware: MiddlewareType; +} +export interface AbsoluteMiddlewareEntry extends MiddlewareEntry, AbsoluteLocation { + step: Step; + priority: Priority; +} +export interface RelativeMiddlewareEntry extends MiddlewareEntry, RelativeLocation { +} +export type Normalized, Input extends object = {}, Output extends object = {}> = T & { + after: Normalized, Input, Output>[]; + before: Normalized, Input, Output>[]; +}; +export interface NormalizedRelativeEntry extends HandlerOptions { + step: Step; + middleware: MiddlewareType; + next?: NormalizedRelativeEntry; + prev?: NormalizedRelativeEntry; + priority: null; +} +export type NamedMiddlewareEntriesMap = Record>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/types.d.ts new file mode 100644 index 0000000..4aa5fc6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/dist-types/types.d.ts @@ -0,0 +1,22 @@ +import { AbsoluteLocation, HandlerOptions, MiddlewareType, Priority, RelativeLocation, Step } from "@smithy/types"; +export interface MiddlewareEntry extends HandlerOptions { + middleware: MiddlewareType; +} +export interface AbsoluteMiddlewareEntry extends MiddlewareEntry, AbsoluteLocation { + step: Step; + priority: Priority; +} +export interface RelativeMiddlewareEntry extends MiddlewareEntry, RelativeLocation { +} +export type Normalized, Input extends object = {}, Output extends object = {}> = T & { + after: Normalized, Input, Output>[]; + before: Normalized, Input, Output>[]; +}; +export interface NormalizedRelativeEntry extends HandlerOptions { + step: Step; + middleware: MiddlewareType; + next?: NormalizedRelativeEntry; + prev?: NormalizedRelativeEntry; + priority: null; +} +export type NamedMiddlewareEntriesMap = Record>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/package.json new file mode 100644 index 0000000..57077ab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/middleware-stack/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-stack", + "version": "4.0.2", + "description": "Provides a means for composing multiple middleware functions into a single handler", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-stack", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-stack", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-stack" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/README.md new file mode 100644 index 0000000..af591d2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/README.md @@ -0,0 +1,4 @@ +# @smithy/node-config-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/node-config-provider/latest.svg)](https://www.npmjs.com/package/@smithy/node-config-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/node-config-provider.svg)](https://www.npmjs.com/package/@smithy/node-config-provider) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/index.js new file mode 100644 index 0000000..8a98b1b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-cjs/index.js @@ -0,0 +1,105 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + loadConfig: () => loadConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/configLoader.ts + + +// src/fromEnv.ts +var import_property_provider = require("@smithy/property-provider"); + +// src/getSelectorName.ts +function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } catch (e) { + return functionString; + } +} +__name(getSelectorName, "getSelectorName"); + +// src/fromEnv.ts +var fromEnv = /* @__PURE__ */ __name((envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === void 0) { + throw new Error(); + } + return config; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, + { logger } + ); + } +}, "fromEnv"); + +// src/fromSharedConfigFiles.ts + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var fromSharedConfigFiles = /* @__PURE__ */ __name((configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, import_shared_ini_file_loader.getProfileName)(init); + const { configFile, credentialsFile } = await (0, import_shared_ini_file_loader.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" ? { ...profileFromCredentials, ...profileFromConfig } : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === void 0) { + throw new Error(); + } + return configValue; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, + { logger: init.logger } + ); + } +}, "fromSharedConfigFiles"); + +// src/fromStatic.ts + +var isFunction = /* @__PURE__ */ __name((func) => typeof func === "function", "isFunction"); +var fromStatic = /* @__PURE__ */ __name((defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, import_property_provider.fromStatic)(defaultValue), "fromStatic"); + +// src/configLoader.ts +var loadConfig = /* @__PURE__ */ __name(({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + fromEnv(environmentVariableSelector), + fromSharedConfigFiles(configFileSelector, configuration), + fromStatic(defaultValue) + ) +), "loadConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + loadConfig +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/configLoader.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/configLoader.js new file mode 100644 index 0000000..db044dd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/configLoader.js @@ -0,0 +1,5 @@ +import { chain, memoize } from "@smithy/property-provider"; +import { fromEnv } from "./fromEnv"; +import { fromSharedConfigFiles } from "./fromSharedConfigFiles"; +import { fromStatic } from "./fromStatic"; +export const loadConfig = ({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => memoize(chain(fromEnv(environmentVariableSelector), fromSharedConfigFiles(configFileSelector, configuration), fromStatic(defaultValue))); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js new file mode 100644 index 0000000..d43edbd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js @@ -0,0 +1,14 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getSelectorName } from "./getSelectorName"; +export const fromEnv = (envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === undefined) { + throw new Error(); + } + return config; + } + catch (e) { + throw new CredentialsProviderError(e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, { logger }); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js new file mode 100644 index 0000000..b6435ed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js @@ -0,0 +1,23 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName, loadSharedConfigFiles } from "@smithy/shared-ini-file-loader"; +import { getSelectorName } from "./getSelectorName"; +export const fromSharedConfigFiles = (configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = getProfileName(init); + const { configFile, credentialsFile } = await loadSharedConfigFiles(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" + ? { ...profileFromCredentials, ...profileFromConfig } + : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === undefined) { + throw new Error(); + } + return configValue; + } + catch (e) { + throw new CredentialsProviderError(e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, { logger: init.logger }); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js new file mode 100644 index 0000000..c9f91ff --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js @@ -0,0 +1,3 @@ +import { fromStatic as convertToProvider } from "@smithy/property-provider"; +const isFunction = (func) => typeof func === "function"; +export const fromStatic = (defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : convertToProvider(defaultValue); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js new file mode 100644 index 0000000..d5e0f78 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js @@ -0,0 +1,12 @@ +export function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } + catch (e) { + return functionString; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/index.js new file mode 100644 index 0000000..2d035d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-es/index.js @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts new file mode 100644 index 0000000..0d0b232 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts @@ -0,0 +1,31 @@ +import { Provider } from "@smithy/types"; +import { GetterFromEnv } from "./fromEnv"; +import { GetterFromConfig, SharedConfigInit } from "./fromSharedConfigFiles"; +import { FromStaticConfig } from "./fromStatic"; +/** + * @internal + */ +export type LocalConfigOptions = SharedConfigInit; +/** + * @internal + */ +export interface LoadedConfigSelectors { + /** + * A getter function getting the config values from all the environment + * variables. + */ + environmentVariableSelector: GetterFromEnv; + /** + * A getter function getting config values associated with the inferred + * profile from shared INI files + */ + configFileSelector: GetterFromConfig; + /** + * Default value or getter + */ + default: FromStaticConfig; +} +/** + * @internal + */ +export declare const loadConfig: ({ environmentVariableSelector, configFileSelector, default: defaultValue }: LoadedConfigSelectors, configuration?: LocalConfigOptions) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts new file mode 100644 index 0000000..b2454c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts @@ -0,0 +1,7 @@ +import { Logger, Provider } from "@smithy/types"; +export type GetterFromEnv = (env: Record) => T | undefined; +/** + * Get config value given the environment variable name or getter from + * environment variable. + */ +export declare const fromEnv: (envVarSelector: GetterFromEnv, logger?: Logger) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts new file mode 100644 index 0000000..89a8eac --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts @@ -0,0 +1,22 @@ +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { ParsedIniData, Profile, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface SharedConfigInit extends SourceProfileInit { + /** + * The preferred shared ini file to load the config. "config" option refers to + * the shared config file(defaults to `~/.aws/config`). "credentials" option + * refers to the shared credentials file(defaults to `~/.aws/credentials`) + */ + preferredFile?: "config" | "credentials"; +} +/** + * @internal + */ +export type GetterFromConfig = (profile: Profile, configFile?: ParsedIniData) => T | undefined; +/** + * Get config value from the shared config files with inferred profile name. + * @internal + */ +export declare const fromSharedConfigFiles: (configSelector: GetterFromConfig, { preferredFile, ...init }?: SharedConfigInit) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..d2c32a4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export type FromStaticConfig = T | (() => T) | Provider; +/** + * @internal + */ +export declare const fromStatic: (defaultValue: FromStaticConfig) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts new file mode 100644 index 0000000..b5f1a1b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts @@ -0,0 +1,9 @@ +/** + * Attempts to extract the name of the variable that the functional selector is looking for. + * Improves readability over the raw Function.toString() value. + * @internal + * @param functionString - function's string representation. + * + * @returns constant value used within the function. + */ +export declare function getSelectorName(functionString: string): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/index.d.ts new file mode 100644 index 0000000..2d035d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts new file mode 100644 index 0000000..e877731 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts @@ -0,0 +1,31 @@ +import { Provider } from "@smithy/types"; +import { GetterFromEnv } from "./fromEnv"; +import { GetterFromConfig, SharedConfigInit } from "./fromSharedConfigFiles"; +import { FromStaticConfig } from "./fromStatic"; +/** + * @internal + */ +export type LocalConfigOptions = SharedConfigInit; +/** + * @internal + */ +export interface LoadedConfigSelectors { + /** + * A getter function getting the config values from all the environment + * variables. + */ + environmentVariableSelector: GetterFromEnv; + /** + * A getter function getting config values associated with the inferred + * profile from shared INI files + */ + configFileSelector: GetterFromConfig; + /** + * Default value or getter + */ + default: FromStaticConfig; +} +/** + * @internal + */ +export declare const loadConfig: ({ environmentVariableSelector, configFileSelector, default: defaultValue }: LoadedConfigSelectors, configuration?: LocalConfigOptions) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts new file mode 100644 index 0000000..e0a4cc7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts @@ -0,0 +1,7 @@ +import { Logger, Provider } from "@smithy/types"; +export type GetterFromEnv = (env: Record) => T | undefined; +/** + * Get config value given the environment variable name or getter from + * environment variable. + */ +export declare const fromEnv: (envVarSelector: GetterFromEnv, logger?: Logger) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts new file mode 100644 index 0000000..aa0efa0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts @@ -0,0 +1,22 @@ +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { ParsedIniData, Profile, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface SharedConfigInit extends SourceProfileInit { + /** + * The preferred shared ini file to load the config. "config" option refers to + * the shared config file(defaults to `~/.aws/config`). "credentials" option + * refers to the shared credentials file(defaults to `~/.aws/credentials`) + */ + preferredFile?: "config" | "credentials"; +} +/** + * @internal + */ +export type GetterFromConfig = (profile: Profile, configFile?: ParsedIniData) => T | undefined; +/** + * Get config value from the shared config files with inferred profile name. + * @internal + */ +export declare const fromSharedConfigFiles: (configSelector: GetterFromConfig, { preferredFile, ...init }?: SharedConfigInit) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..a4bab2d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export type FromStaticConfig = T | (() => T) | Provider; +/** + * @internal + */ +export declare const fromStatic: (defaultValue: FromStaticConfig) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts new file mode 100644 index 0000000..11c5da2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts @@ -0,0 +1,9 @@ +/** + * Attempts to extract the name of the variable that the functional selector is looking for. + * Improves readability over the raw Function.toString() value. + * @internal + * @param functionString - function's string representation. + * + * @returns constant value used within the function. + */ +export declare function getSelectorName(functionString: string): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..74a76f5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/package.json new file mode 100644 index 0000000..3002d8e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-config-provider/package.json @@ -0,0 +1,65 @@ +{ + "name": "@smithy/node-config-provider", + "version": "4.0.2", + "description": "Load config default values from ini config files and environmental variable", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline node-config-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/node-config-provider", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/node-config-provider" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/README.md new file mode 100644 index 0000000..214719f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/README.md @@ -0,0 +1,9 @@ +# @smithy/node-http-handler + +[![NPM version](https://img.shields.io/npm/v/@smithy/node-http-handler/latest.svg)](https://www.npmjs.com/package/@smithy/node-http-handler) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/node-http-handler.svg)](https://www.npmjs.com/package/@smithy/node-http-handler) + +This package implements the default `requestHandler` for Node.js using `node:http`, `node:https`, and `node:http2`. + +For an example on how `requestHandler`s are used by Smithy generated SDK clients, refer to +the [AWS SDK for JavaScript (v3) supplemental docs](https://github.com/aws/aws-sdk-js-v3/blob/main/supplemental-docs/CLIENTS.md#request-handler-requesthandler). diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/index.js new file mode 100644 index 0000000..e31976f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/index.js @@ -0,0 +1,806 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_REQUEST_TIMEOUT: () => DEFAULT_REQUEST_TIMEOUT, + NodeHttp2Handler: () => NodeHttp2Handler, + NodeHttpHandler: () => NodeHttpHandler, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/node-http-handler.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_querystring_builder = require("@smithy/querystring-builder"); +var import_http = require("http"); +var import_https = require("https"); + +// src/constants.ts +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; + +// src/get-transformed-headers.ts +var getTransformedHeaders = /* @__PURE__ */ __name((headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}, "getTransformedHeaders"); + +// src/timing.ts +var timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId) +}; + +// src/set-connection-timeout.ts +var DEFER_EVENT_LISTENER_TIME = 1e3; +var setConnectionTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject( + Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError" + }) + ); + }, timeoutInMs - offset); + const doWithSocket = /* @__PURE__ */ __name((socket) => { + if (socket?.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } else { + timing.clearTimeout(timeoutId); + } + }, "doWithSocket"); + if (request.socket) { + doWithSocket(request.socket); + } else { + request.on("socket", doWithSocket); + } + }, "registerTimeout"); + if (timeoutInMs < 2e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}, "setConnectionTimeout"); + +// src/set-socket-keep-alive.ts +var DEFER_EVENT_LISTENER_TIME2 = 3e3; +var setSocketKeepAlive = /* @__PURE__ */ __name((request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME2) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = /* @__PURE__ */ __name(() => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }, "registerListener"); + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}, "setSocketKeepAlive"); + +// src/set-socket-timeout.ts +var DEFER_EVENT_LISTENER_TIME3 = 3e3; +var setSocketTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = DEFAULT_REQUEST_TIMEOUT) => { + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = /* @__PURE__ */ __name(() => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }, "onTimeout"); + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + request.on("close", () => request.socket?.removeListener("timeout", onTimeout)); + } else { + request.setTimeout(timeout, onTimeout); + } + }, "registerTimeout"); + if (0 < timeoutInMs && timeoutInMs < 6e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout( + registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME3), + DEFER_EVENT_LISTENER_TIME3 + ); +}, "setSocketTimeout"); + +// src/write-request-body.ts +var import_stream = require("stream"); +var MIN_WAIT_TIME = 6e3; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(() => resolve(true), Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }) + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +__name(writeRequestBody, "writeRequestBody"); +function writeBody(httpRequest, body) { + if (body instanceof import_stream.Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && uint8.buffer && typeof uint8.byteOffset === "number" && typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} +__name(writeBody, "writeBody"); + +// src/node-http-handler.ts +var DEFAULT_REQUEST_TIMEOUT = 0; +var NodeHttpHandler = class _NodeHttpHandler { + constructor(options) { + this.socketWarningTimestamp = 0; + // Node http handler is hard-coded to http/1.1: https://github.com/nodejs/node/blob/ff5664b83b89c55e4ab5d5f60068fb457f1f5872/lib/_http_server.js#L286 + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }).catch(reject); + } else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + static { + __name(this, "NodeHttpHandler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _NodeHttpHandler(instanceOrOptions); + } + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15e3; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = sockets[origin]?.length ?? 0; + const requestsEnqueued = requests[origin]?.length ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + logger?.warn?.( + `@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.` + ); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, socketAcquisitionWarningTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + socketAcquisitionWarningTimeout, + httpAgent: (() => { + if (httpAgent instanceof import_http.Agent || typeof httpAgent?.destroy === "function") { + return httpAgent; + } + return new import_http.Agent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof import_https.Agent || typeof httpsAgent?.destroy === "function") { + return httpsAgent; + } + return new import_https.Agent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console + }; + } + destroy() { + this.config?.httpAgent?.destroy(); + this.config?.httpsAgent?.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = void 0; + const timeouts = []; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }, "reject"); + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push( + timing.setTimeout( + () => { + this.socketWarningTimestamp = _NodeHttpHandler.checkSocketUsage( + agent, + this.socketWarningTimestamp, + this.config.logger + ); + }, + this.config.socketAcquisitionWarningTimeout ?? (this.config.requestTimeout ?? 2e3) + (this.config.connectionTimeout ?? 1e3) + ) + ); + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + let auth = void 0; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth + }; + const requestFunc = isSSL ? import_https.request : import_http.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push( + setSocketKeepAlive(req, { + // @ts-expect-error keepAlive is not public on httpAgent. + keepAlive: httpAgent.keepAlive, + // @ts-expect-error keepAliveMsecs is not public on httpAgent. + keepAliveMsecs: httpAgent.keepAliveMsecs + }) + ); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; + +// src/node-http2-handler.ts + + +var import_http22 = require("http2"); + +// src/node-http2-connection-manager.ts +var import_http2 = __toESM(require("http2")); + +// src/node-http2-connection-pool.ts +var NodeHttp2ConnectionPool = class { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + static { + __name(this, "NodeHttp2ConnectionPool"); + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +}; + +// src/node-http2-connection-manager.ts +var NodeHttp2ConnectionManager = class { + constructor(config) { + this.sessionCache = /* @__PURE__ */ new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + static { + __name(this, "NodeHttp2ConnectionManager"); + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = import_http2.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error( + "Fail to set maxConcurrentStreams to " + this.config.maxConcurrency + "when creating new session for " + requestContext.destination.toString() + ); + } + }); + } + session.unref(); + const destroySessionCb = /* @__PURE__ */ __name(() => { + session.destroy(); + this.deleteSession(url, session); + }, "destroySessionCb"); + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + const cacheKey = this.getUrlString(requestContext); + this.sessionCache.get(cacheKey)?.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +}; + +// src/node-http2-handler.ts +var NodeHttp2Handler = class _NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((opts) => { + resolve(opts || {}); + }).catch(reject); + } else { + resolve(options || {}); + } + }); + } + static { + __name(this, "NodeHttp2Handler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _NodeHttp2Handler(instanceOrOptions); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + let fulfilled = false; + let writeRequestBodyPromise = void 0; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }, "reject"); + if (abortSignal?.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: this.config?.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false + }); + const rejectWithDestroy = /* @__PURE__ */ __name((err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }, "rejectWithDestroy"); + const queryString = (0, import_querystring_builder.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [import_http22.constants.HTTP2_HEADER_PATH]: path, + [import_http22.constants.HTTP2_HEADER_METHOD]: method + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy( + new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`) + ); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + /** + * Destroys a session. + * @param session - the session to destroy. + */ + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +}; + +// src/stream-collector/collector.ts + +var Collector = class extends import_stream.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + static { + __name(this, "Collector"); + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +}; + +// src/stream-collector/index.ts +var streamCollector = /* @__PURE__ */ __name((stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function() { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}, "streamCollector"); +var isReadableStreamInstance = /* @__PURE__ */ __name((stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream, "isReadableStreamInstance"); +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectReadableStream, "collectReadableStream"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DEFAULT_REQUEST_TIMEOUT, + NodeHttpHandler, + NodeHttp2Handler, + streamCollector +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/timing.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/timing.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/timing.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/constants.js new file mode 100644 index 0000000..0619d28 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/constants.js @@ -0,0 +1 @@ +export const NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js new file mode 100644 index 0000000..562883c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js @@ -0,0 +1,9 @@ +const getTransformedHeaders = (headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}; +export { getTransformedHeaders }; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/index.js new file mode 100644 index 0000000..09c0b9a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js new file mode 100644 index 0000000..f0ca1e7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js @@ -0,0 +1,209 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { Agent as hAgent, request as hRequest } from "http"; +import { Agent as hsAgent, request as hsRequest } from "https"; +import { NODEJS_TIMEOUT_ERROR_CODES } from "./constants"; +import { getTransformedHeaders } from "./get-transformed-headers"; +import { setConnectionTimeout } from "./set-connection-timeout"; +import { setSocketKeepAlive } from "./set-socket-keep-alive"; +import { setSocketTimeout } from "./set-socket-timeout"; +import { timing } from "./timing"; +import { writeRequestBody } from "./write-request-body"; +export const DEFAULT_REQUEST_TIMEOUT = 0; +export class NodeHttpHandler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new NodeHttpHandler(instanceOrOptions); + } + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15000; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = sockets[origin]?.length ?? 0; + const requestsEnqueued = requests[origin]?.length ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + logger?.warn?.(`@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.`); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + constructor(options) { + this.socketWarningTimestamp = 0; + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }) + .catch(reject); + } + else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, socketAcquisitionWarningTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + socketAcquisitionWarningTimeout, + httpAgent: (() => { + if (httpAgent instanceof hAgent || typeof httpAgent?.destroy === "function") { + return httpAgent; + } + return new hAgent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof hsAgent || typeof httpsAgent?.destroy === "function") { + return httpsAgent; + } + return new hsAgent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console, + }; + } + destroy() { + this.config?.httpAgent?.destroy(); + this.config?.httpsAgent?.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = undefined; + const timeouts = []; + const resolve = async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }; + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push(timing.setTimeout(() => { + this.socketWarningTimestamp = NodeHttpHandler.checkSocketUsage(agent, this.socketWarningTimestamp, this.config.logger); + }, this.config.socketAcquisitionWarningTimeout ?? + (this.config.requestTimeout ?? 2000) + (this.config.connectionTimeout ?? 1000))); + const queryString = buildQueryString(request.query || {}); + let auth = undefined; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } + else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth, + }; + const requestFunc = isSSL ? hsRequest : hRequest; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res, + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } + else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = () => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } + else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push(setSocketKeepAlive(req, { + keepAlive: httpAgent.keepAlive, + keepAliveMsecs: httpAgent.keepAliveMsecs, + })); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js new file mode 100644 index 0000000..206d94f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js @@ -0,0 +1,86 @@ +import http2 from "http2"; +import { NodeHttp2ConnectionPool } from "./node-http2-connection-pool"; +export class NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = http2.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error("Fail to set maxConcurrentStreams to " + + this.config.maxConcurrency + + "when creating new session for " + + requestContext.destination.toString()); + } + }); + } + session.unref(); + const destroySessionCb = () => { + session.destroy(); + this.deleteSession(url, session); + }; + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + const cacheKey = this.getUrlString(requestContext); + this.sessionCache.get(cacheKey)?.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js new file mode 100644 index 0000000..429eb49 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js @@ -0,0 +1,32 @@ +export class NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js new file mode 100644 index 0000000..b68601e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js @@ -0,0 +1,167 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { constants } from "http2"; +import { getTransformedHeaders } from "./get-transformed-headers"; +import { NodeHttp2ConnectionManager } from "./node-http2-connection-manager"; +import { writeRequestBody } from "./write-request-body"; +export class NodeHttp2Handler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new NodeHttp2Handler(instanceOrOptions); + } + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((opts) => { + resolve(opts || {}); + }) + .catch(reject); + } + else { + resolve(options || {}); + } + }); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + let fulfilled = false; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (abortSignal?.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: this.config?.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false, + }); + const rejectWithDestroy = (err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }; + const queryString = buildQueryString(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [constants.HTTP2_HEADER_PATH]: path, + [constants.HTTP2_HEADER_METHOD]: method, + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req, + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } + else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js new file mode 100644 index 0000000..41fb0b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +export class ReadFromBuffers extends Readable { + constructor(options) { + super(options); + this.numBuffersRead = 0; + this.buffersToRead = options.buffers; + this.errorAfter = typeof options.errorAfter === "number" ? options.errorAfter : -1; + } + _read() { + if (this.errorAfter !== -1 && this.errorAfter === this.numBuffersRead) { + this.emit("error", new Error("Mock Error")); + return; + } + if (this.numBuffersRead >= this.buffersToRead.length) { + return this.push(null); + } + return this.push(this.buffersToRead[this.numBuffersRead++]); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/server.mock.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/server.mock.js new file mode 100644 index 0000000..6a31adf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/server.mock.js @@ -0,0 +1,88 @@ +import { readFileSync } from "fs"; +import { createServer as createHttpServer } from "http"; +import { createServer as createHttp2Server } from "http2"; +import { createServer as createHttpsServer } from "https"; +import { join } from "path"; +import { Readable } from "stream"; +import { timing } from "./timing"; +const fixturesDir = join(__dirname, "..", "fixtures"); +const setResponseHeaders = (response, headers) => { + for (const [key, value] of Object.entries(headers)) { + response.setHeader(key, value); + } +}; +const setResponseBody = (response, body) => { + if (body instanceof Readable) { + body.pipe(response); + } + else { + response.end(body); + } +}; +export const createResponseFunction = (httpResp) => (request, response) => { + response.statusCode = httpResp.statusCode; + if (httpResp.reason) { + response.statusMessage = httpResp.reason; + } + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, httpResp.body); +}; +export const createResponseFunctionWithDelay = (httpResp, delay) => (request, response) => { + response.statusCode = httpResp.statusCode; + if (httpResp.reason) { + response.statusMessage = httpResp.reason; + } + setResponseHeaders(response, httpResp.headers); + timing.setTimeout(() => setResponseBody(response, httpResp.body), delay); +}; +export const createContinueResponseFunction = (httpResp) => (request, response) => { + response.writeContinue(); + timing.setTimeout(() => { + createResponseFunction(httpResp)(request, response); + }, 100); +}; +export const createMockHttpsServer = () => { + const server = createHttpsServer({ + key: readFileSync(join(fixturesDir, "test-server-key.pem")), + cert: readFileSync(join(fixturesDir, "test-server-cert.pem")), + }); + return server; +}; +export const createMockHttpServer = () => { + const server = createHttpServer(); + return server; +}; +export const createMockHttp2Server = () => { + const server = createHttp2Server(); + return server; +}; +export const createMirrorResponseFunction = (httpResp) => (request, response) => { + const bufs = []; + request.on("data", (chunk) => { + bufs.push(chunk); + }); + request.on("end", () => { + response.statusCode = httpResp.statusCode; + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, Buffer.concat(bufs)); + }); + request.on("error", (err) => { + response.statusCode = 500; + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, err.message); + }); +}; +export const getResponseBody = (response) => { + return new Promise((resolve, reject) => { + const bufs = []; + response.body.on("data", function (d) { + bufs.push(d); + }); + response.body.on("end", function () { + resolve(Buffer.concat(bufs).toString()); + }); + response.body.on("error", (err) => { + reject(err); + }); + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js new file mode 100644 index 0000000..587532e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js @@ -0,0 +1,36 @@ +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 1000; +export const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = (offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError", + })); + }, timeoutInMs - offset); + const doWithSocket = (socket) => { + if (socket?.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } + else { + timing.clearTimeout(timeoutId); + } + }; + if (request.socket) { + doWithSocket(request.socket); + } + else { + request.on("socket", doWithSocket); + } + }; + if (timeoutInMs < 2000) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js new file mode 100644 index 0000000..18391a8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js @@ -0,0 +1,22 @@ +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 3000; +export const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = () => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } + else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }; + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js new file mode 100644 index 0000000..5c4456c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js @@ -0,0 +1,24 @@ +import { DEFAULT_REQUEST_TIMEOUT } from "./node-http-handler"; +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 3000; +export const setSocketTimeout = (request, reject, timeoutInMs = DEFAULT_REQUEST_TIMEOUT) => { + const registerTimeout = (offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }; + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + request.on("close", () => request.socket?.removeListener("timeout", onTimeout)); + } + else { + request.setTimeout(timeout, onTimeout); + } + }; + if (0 < timeoutInMs && timeoutInMs < 6000) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js new file mode 100644 index 0000000..c3737e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js @@ -0,0 +1,11 @@ +import { Writable } from "stream"; +export class Collector extends Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js new file mode 100644 index 0000000..8ff09c0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js @@ -0,0 +1,41 @@ +import { Collector } from "./collector"; +export const streamCollector = (stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}; +const isReadableStreamInstance = (stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream; +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js new file mode 100644 index 0000000..2f653c5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +export class ReadFromBuffers extends Readable { + constructor(options) { + super(options); + this.numBuffersRead = 0; + this.buffersToRead = options.buffers; + this.errorAfter = typeof options.errorAfter === "number" ? options.errorAfter : -1; + } + _read(size) { + if (this.errorAfter !== -1 && this.errorAfter === this.numBuffersRead) { + this.emit("error", new Error("Mock Error")); + return; + } + if (this.numBuffersRead >= this.buffersToRead.length) { + return this.push(null); + } + return this.push(this.buffersToRead[this.numBuffersRead++]); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/timing.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/timing.js new file mode 100644 index 0000000..792ba48 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/timing.js @@ -0,0 +1,4 @@ +export const timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId), +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js new file mode 100644 index 0000000..36e15f9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js @@ -0,0 +1,56 @@ +import { Readable } from "stream"; +import { timing } from "./timing"; +const MIN_WAIT_TIME = 6000; +export async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(() => resolve(true), Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }), + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +function writeBody(httpRequest, body) { + if (body instanceof Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && + uint8.buffer && + typeof uint8.byteOffset === "number" && + typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts new file mode 100644 index 0000000..3540461 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts @@ -0,0 +1,5 @@ +/** + * Node.js system error codes that indicate timeout. + * @deprecated use NODEJS_TIMEOUT_ERROR_CODES from @smithy/service-error-classification/constants + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts new file mode 100644 index 0000000..bb7cd4e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +import { IncomingHttpHeaders } from "http2"; +declare const getTransformedHeaders: (headers: IncomingHttpHeaders) => HeaderBag; +export { getTransformedHeaders }; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/index.d.ts new file mode 100644 index 0000000..09c0b9a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts new file mode 100644 index 0000000..b120313 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts @@ -0,0 +1,47 @@ +/// +/// +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import type { Logger, NodeHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +import { Agent as hAgent } from "http"; +import { Agent as hsAgent } from "https"; +export { NodeHttpHandlerOptions }; +/** + * @public + * A default of 0 means no timeout. + */ +export declare const DEFAULT_REQUEST_TIMEOUT = 0; +/** + * @public + * A request handler that uses the Node.js http and https modules. + */ +export declare class NodeHttpHandler implements HttpHandler { + private config?; + private configProvider; + private socketWarningTimestamp; + readonly metadata: { + handlerProtocol: string; + }; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttpHandlerOptions | Provider): NodeHttpHandler | HttpHandler; + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent: hAgent | hsAgent, socketWarningTimestamp: number, logger?: Logger): number; + constructor(options?: NodeHttpHandlerOptions | Provider); + private resolveDefaultConfig; + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttpHandlerOptions, value: NodeHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttpHandlerOptions; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts new file mode 100644 index 0000000..24bc3b5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts @@ -0,0 +1,25 @@ +/// +import { RequestContext } from "@smithy/types"; +import { ConnectConfiguration } from "@smithy/types"; +import { ConnectionManager, ConnectionManagerConfiguration } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +/** + * @public + */ +export declare class NodeHttp2ConnectionManager implements ConnectionManager { + constructor(config: ConnectionManagerConfiguration); + private config; + private readonly sessionCache; + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): ClientHttp2Session; + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority: string, session: ClientHttp2Session): void; + release(requestContext: RequestContext, session: ClientHttp2Session): void; + destroy(): void; + setMaxConcurrentStreams(maxConcurrentStreams: number): void; + setDisableConcurrentStreams(disableConcurrentStreams: boolean): void; + private getUrlString; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts new file mode 100644 index 0000000..6695893 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts @@ -0,0 +1,13 @@ +/// +import { ConnectionPool } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +export declare class NodeHttp2ConnectionPool implements ConnectionPool { + private sessions; + constructor(sessions?: ClientHttp2Session[]); + poll(): ClientHttp2Session | void; + offerLast(session: ClientHttp2Session): void; + contains(session: ClientHttp2Session): boolean; + remove(session: ClientHttp2Session): void; + [Symbol.iterator](): IterableIterator; + destroy(connection: ClientHttp2Session): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts new file mode 100644 index 0000000..68610a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts @@ -0,0 +1,62 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * Represents the http2 options that can be passed to a node http2 client. + * @public + */ +export interface NodeHttp2HandlerOptions { + /** + * The maximum time in milliseconds that a stream may remain idle before it + * is closed. + */ + requestTimeout?: number; + /** + * The maximum time in milliseconds that a session or socket may remain idle + * before it is closed. + * https://nodejs.org/docs/latest-v12.x/api/http2.html#http2_http2session_and_sockets + */ + sessionTimeout?: number; + /** + * Disables processing concurrent streams on a ClientHttp2Session instance. When set + * to true, a new session instance is created for each request to a URL. + * **Default:** false. + * https://nodejs.org/api/http2.html#http2_class_clienthttp2session + */ + disableConcurrentStreams?: boolean; + /** + * Maximum number of concurrent Http2Stream instances per ClientHttp2Session. Each session + * may have up to 2^31-1 Http2Stream instances over its lifetime. + * This value must be greater than or equal to 0. + * https://nodejs.org/api/http2.html#class-http2stream + */ + maxConcurrentStreams?: number; +} +/** + * A request handler using the node:http2 package. + * @public + */ +export declare class NodeHttp2Handler implements HttpHandler { + private config?; + private configProvider; + readonly metadata: { + handlerProtocol: string; + }; + private readonly connectionManager; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttp2HandlerOptions | Provider): HttpHandler | NodeHttp2Handler; + constructor(options?: NodeHttp2HandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttp2HandlerOptions, value: NodeHttp2HandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttp2HandlerOptions; + /** + * Destroys a session. + * @param session - the session to destroy. + */ + private destroySession; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts new file mode 100644 index 0000000..cd7e77f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(): boolean | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts new file mode 100644 index 0000000..585a677 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/types"; +import { IncomingMessage, Server as HttpServer, ServerResponse } from "http"; +import { Http2Server } from "http2"; +import { Server as HttpsServer } from "https"; +export declare const createResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createResponseFunctionWithDelay: (httpResp: HttpResponse, delay: number) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createContinueResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createMockHttpsServer: () => HttpsServer; +export declare const createMockHttpServer: () => HttpServer; +export declare const createMockHttp2Server: () => Http2Server; +export declare const createMirrorResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const getResponseBody: (response: HttpResponse) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts new file mode 100644 index 0000000..57b811f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export declare const setConnectionTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts new file mode 100644 index 0000000..80507d8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts @@ -0,0 +1,13 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export interface SocketKeepAliveOptions { + keepAlive: boolean; + keepAliveMsecs?: number; +} +export declare const setSocketKeepAlive: (request: ClientRequest, { keepAlive, keepAliveMsecs }: SocketKeepAliveOptions, deferTimeMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts new file mode 100644 index 0000000..019a62b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export declare const setSocketTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts new file mode 100644 index 0000000..b7d4d12 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts @@ -0,0 +1,8 @@ +/// +/// +/// +import { Writable } from "stream"; +export declare class Collector extends Writable { + readonly bufferedBytes: Buffer[]; + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts new file mode 100644 index 0000000..a9a9498 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts @@ -0,0 +1,6 @@ +import { StreamCollector } from "@smithy/types"; +/** + * @internal + * Converts a stream to a byte array. + */ +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts new file mode 100644 index 0000000..2543a28 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(size: number): boolean | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts new file mode 100644 index 0000000..de5b695 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * For test spies. + */ +export declare const timing: { + setTimeout: (cb: (...ignored: any[]) => void | unknown, ms?: number) => number; + clearTimeout: (timeoutId: string | number | undefined | unknown) => void; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..b02b0b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,5 @@ +/** + * Node.js system error codes that indicate timeout. + * @deprecated use NODEJS_TIMEOUT_ERROR_CODES from @smithy/service-error-classification/constants + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts new file mode 100644 index 0000000..c6f5a8b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +import { IncomingHttpHeaders } from "http2"; +declare const getTransformedHeaders: (headers: IncomingHttpHeaders) => HeaderBag; +export { getTransformedHeaders }; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..055c48c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts new file mode 100644 index 0000000..eb1da7b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts @@ -0,0 +1,46 @@ +/// +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { Logger, NodeHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +import { Agent as hAgent } from "http"; +import { Agent as hsAgent } from "https"; +export { NodeHttpHandlerOptions }; +/** + * @public + * A default of 0 means no timeout. + */ +export declare const DEFAULT_REQUEST_TIMEOUT = 0; +/** + * @public + * A request handler that uses the Node.js http and https modules. + */ +export declare class NodeHttpHandler implements HttpHandler { + private config?; + private configProvider; + private socketWarningTimestamp; + readonly metadata: { + handlerProtocol: string; + }; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttpHandlerOptions | Provider): NodeHttpHandler | HttpHandler; + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent: hAgent | hsAgent, socketWarningTimestamp: number, logger?: Logger): number; + constructor(options?: NodeHttpHandlerOptions | Provider); + private resolveDefaultConfig; + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttpHandlerOptions, value: NodeHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttpHandlerOptions; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts new file mode 100644 index 0000000..8aa87c1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts @@ -0,0 +1,25 @@ +/// +import { RequestContext } from "@smithy/types"; +import { ConnectConfiguration } from "@smithy/types"; +import { ConnectionManager, ConnectionManagerConfiguration } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +/** + * @public + */ +export declare class NodeHttp2ConnectionManager implements ConnectionManager { + constructor(config: ConnectionManagerConfiguration); + private config; + private readonly sessionCache; + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): ClientHttp2Session; + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority: string, session: ClientHttp2Session): void; + release(requestContext: RequestContext, session: ClientHttp2Session): void; + destroy(): void; + setMaxConcurrentStreams(maxConcurrentStreams: number): void; + setDisableConcurrentStreams(disableConcurrentStreams: boolean): void; + private getUrlString; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts new file mode 100644 index 0000000..e9116cb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts @@ -0,0 +1,13 @@ +/// +import { ConnectionPool } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +export declare class NodeHttp2ConnectionPool implements ConnectionPool { + private sessions; + constructor(sessions?: ClientHttp2Session[]); + poll(): ClientHttp2Session | void; + offerLast(session: ClientHttp2Session): void; + contains(session: ClientHttp2Session): boolean; + remove(session: ClientHttp2Session): void; + [Symbol.iterator](): IterableIterator; + destroy(connection: ClientHttp2Session): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts new file mode 100644 index 0000000..eaa24bd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts @@ -0,0 +1,62 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * Represents the http2 options that can be passed to a node http2 client. + * @public + */ +export interface NodeHttp2HandlerOptions { + /** + * The maximum time in milliseconds that a stream may remain idle before it + * is closed. + */ + requestTimeout?: number; + /** + * The maximum time in milliseconds that a session or socket may remain idle + * before it is closed. + * https://nodejs.org/docs/latest-v12.x/api/http2.html#http2_http2session_and_sockets + */ + sessionTimeout?: number; + /** + * Disables processing concurrent streams on a ClientHttp2Session instance. When set + * to true, a new session instance is created for each request to a URL. + * **Default:** false. + * https://nodejs.org/api/http2.html#http2_class_clienthttp2session + */ + disableConcurrentStreams?: boolean; + /** + * Maximum number of concurrent Http2Stream instances per ClientHttp2Session. Each session + * may have up to 2^31-1 Http2Stream instances over its lifetime. + * This value must be greater than or equal to 0. + * https://nodejs.org/api/http2.html#class-http2stream + */ + maxConcurrentStreams?: number; +} +/** + * A request handler using the node:http2 package. + * @public + */ +export declare class NodeHttp2Handler implements HttpHandler { + private config?; + private configProvider; + readonly metadata: { + handlerProtocol: string; + }; + private readonly connectionManager; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttp2HandlerOptions | Provider): HttpHandler | NodeHttp2Handler; + constructor(options?: NodeHttp2HandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttp2HandlerOptions, value: NodeHttp2HandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttp2HandlerOptions; + /** + * Destroys a session. + * @param session - the session to destroy. + */ + private destroySession; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts new file mode 100644 index 0000000..f0492d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(): boolean | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts new file mode 100644 index 0000000..6a7e350 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/types"; +import { IncomingMessage, Server as HttpServer, ServerResponse } from "http"; +import { Http2Server } from "http2"; +import { Server as HttpsServer } from "https"; +export declare const createResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createResponseFunctionWithDelay: (httpResp: HttpResponse, delay: number) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createContinueResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createMockHttpsServer: () => HttpsServer; +export declare const createMockHttpServer: () => HttpServer; +export declare const createMockHttp2Server: () => Http2Server; +export declare const createMirrorResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const getResponseBody: (response: HttpResponse) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts new file mode 100644 index 0000000..96cdb66 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts @@ -0,0 +1,3 @@ +/// +import { ClientRequest } from "http"; +export declare const setConnectionTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts new file mode 100644 index 0000000..3bb6ec0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts @@ -0,0 +1,7 @@ +/// +import { ClientRequest } from "http"; +export interface SocketKeepAliveOptions { + keepAlive: boolean; + keepAliveMsecs?: number; +} +export declare const setSocketKeepAlive: (request: ClientRequest, { keepAlive, keepAliveMsecs }: SocketKeepAliveOptions, deferTimeMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts new file mode 100644 index 0000000..57f8743 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts @@ -0,0 +1,3 @@ +/// +import { ClientRequest } from "http"; +export declare const setSocketTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts new file mode 100644 index 0000000..c329bd4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts @@ -0,0 +1,6 @@ +/// +import { Writable } from "stream"; +export declare class Collector extends Writable { + readonly bufferedBytes: Buffer[]; + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts new file mode 100644 index 0000000..1022a17 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts @@ -0,0 +1,6 @@ +import { StreamCollector } from "@smithy/types"; +/** + * @internal + * Converts a stream to a byte array. + */ +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts new file mode 100644 index 0000000..e2c0a4c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(size: number): boolean | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts new file mode 100644 index 0000000..c88dd2f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * For test spies. + */ +export declare const timing: { + setTimeout: (cb: (...ignored: any[]) => void | unknown, ms?: number) => number; + clearTimeout: (timeoutId: string | number | undefined | unknown) => void; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts new file mode 100644 index 0000000..0f13e96 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts @@ -0,0 +1,12 @@ +/// +import { HttpRequest } from "@smithy/types"; +import { ClientRequest } from "http"; +import { ClientHttp2Stream } from "http2"; +/** + * This resolves when writeBody has been called. + * + * @param httpRequest - opened Node.js request. + * @param request - container with the request body. + * @param maxContinueTimeoutMs - time to wait for the continue event. + */ +export declare function writeRequestBody(httpRequest: ClientRequest | ClientHttp2Stream, request: HttpRequest, maxContinueTimeoutMs?: number): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts new file mode 100644 index 0000000..0c49e32 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts @@ -0,0 +1,13 @@ +/// +/// +import { HttpRequest } from "@smithy/types"; +import { ClientRequest } from "http"; +import { ClientHttp2Stream } from "http2"; +/** + * This resolves when writeBody has been called. + * + * @param httpRequest - opened Node.js request. + * @param request - container with the request body. + * @param maxContinueTimeoutMs - time to wait for the continue event. + */ +export declare function writeRequestBody(httpRequest: ClientRequest | ClientHttp2Stream, request: HttpRequest, maxContinueTimeoutMs?: number): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/package.json new file mode 100644 index 0000000..2e4e1e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/node-http-handler/package.json @@ -0,0 +1,67 @@ +{ + "name": "@smithy/node-http-handler", + "version": "4.0.4", + "description": "Provides a way to make requests", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline node-http-handler", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/node-http-handler", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/node-http-handler" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/README.md new file mode 100644 index 0000000..b35fafb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/README.md @@ -0,0 +1,10 @@ +# @smithy/property-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/property-provider/latest.svg)](https://www.npmjs.com/package/@smithy/property-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/property-provider.svg)](https://www.npmjs.com/package/@smithy/property-provider) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/chain.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/chain.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/chain.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/index.js new file mode 100644 index 0000000..b0fa627 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/index.js @@ -0,0 +1,170 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CredentialsProviderError: () => CredentialsProviderError, + ProviderError: () => ProviderError, + TokenProviderError: () => TokenProviderError, + chain: () => chain, + fromStatic: () => fromStatic, + memoize: () => memoize +}); +module.exports = __toCommonJS(src_exports); + +// src/ProviderError.ts +var ProviderError = class _ProviderError extends Error { + constructor(message, options = true) { + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = void 0; + tryNextLink = options; + } else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, _ProviderError.prototype); + logger?.debug?.(`@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + static { + __name(this, "ProviderError"); + } + /** + * @deprecated use new operator. + */ + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +}; + +// src/CredentialsProviderError.ts +var CredentialsProviderError = class _CredentialsProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, _CredentialsProviderError.prototype); + } + static { + __name(this, "CredentialsProviderError"); + } +}; + +// src/TokenProviderError.ts +var TokenProviderError = class _TokenProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, _TokenProviderError.prototype); + } + static { + __name(this, "TokenProviderError"); + } +}; + +// src/chain.ts +var chain = /* @__PURE__ */ __name((...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } catch (err) { + lastProviderError = err; + if (err?.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}, "chain"); + +// src/fromStatic.ts +var fromStatic = /* @__PURE__ */ __name((staticValue) => () => Promise.resolve(staticValue), "fromStatic"); + +// src/memoize.ts +var memoize = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}, "memoize"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + CredentialsProviderError, + ProviderError, + TokenProviderError, + chain, + fromStatic, + memoize +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/memoize.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/memoize.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-cjs/memoize.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js new file mode 100644 index 0000000..cec1f9e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js @@ -0,0 +1,8 @@ +import { ProviderError } from "./ProviderError"; +export class CredentialsProviderError extends ProviderError { + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, CredentialsProviderError.prototype); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/ProviderError.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/ProviderError.js new file mode 100644 index 0000000..e0db2b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/ProviderError.js @@ -0,0 +1,22 @@ +export class ProviderError extends Error { + constructor(message, options = true) { + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = undefined; + tryNextLink = options; + } + else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, ProviderError.prototype); + logger?.debug?.(`@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js new file mode 100644 index 0000000..f0e75b1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js @@ -0,0 +1,8 @@ +import { ProviderError } from "./ProviderError"; +export class TokenProviderError extends ProviderError { + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, TokenProviderError.prototype); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/chain.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/chain.js new file mode 100644 index 0000000..c389f7f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/chain.js @@ -0,0 +1,21 @@ +import { ProviderError } from "./ProviderError"; +export const chain = (...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } + catch (err) { + lastProviderError = err; + if (err?.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/fromStatic.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/fromStatic.js new file mode 100644 index 0000000..67da7a7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/fromStatic.js @@ -0,0 +1 @@ +export const fromStatic = (staticValue) => () => Promise.resolve(staticValue); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/index.js new file mode 100644 index 0000000..15d14e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./CredentialsProviderError"; +export * from "./ProviderError"; +export * from "./TokenProviderError"; +export * from "./chain"; +export * from "./fromStatic"; +export * from "./memoize"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/memoize.js b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/memoize.js new file mode 100644 index 0000000..e04839a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-es/memoize.js @@ -0,0 +1,45 @@ +export const memoize = (provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts new file mode 100644 index 0000000..7955dc1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual credential provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class CredentialsProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts new file mode 100644 index 0000000..b87b014 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts @@ -0,0 +1,39 @@ +import { Logger } from "@smithy/types"; +/** + * @public + */ +export type ProviderErrorOptionsType = { + tryNextLink?: boolean | undefined; + logger?: Logger; +}; +/** + * @public + * + * An error representing a failure of an individual provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class ProviderError extends Error { + name: string; + readonly tryNextLink: boolean; + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); + /** + * @deprecated use new operator. + */ + static from(error: Error, options?: boolean | ProviderErrorOptionsType): ProviderError; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts new file mode 100644 index 0000000..a2f9dd6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual token provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class TokenProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/chain.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/chain.d.ts new file mode 100644 index 0000000..168df5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/chain.d.ts @@ -0,0 +1,13 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * Compose a single credential provider function from multiple credential + * providers. The first provider in the argument list will always be invoked; + * subsequent providers in the list will be invoked in the order in which the + * were received if the preceding provider did not successfully resolve. + * + * If no providers were received or no provider resolves successfully, the + * returned promise will be rejected. + */ +export declare const chain: (...providers: Provider[]) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..f58bece --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const fromStatic: (staticValue: T) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/index.d.ts new file mode 100644 index 0000000..6326994 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/index.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export * from "./CredentialsProviderError"; +/** + * @internal + */ +export * from "./ProviderError"; +/** + * @internal + */ +export * from "./TokenProviderError"; +/** + * @internal + */ +export * from "./chain"; +/** + * @internal + */ +export * from "./fromStatic"; +/** + * @internal + */ +export * from "./memoize"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/memoize.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/memoize.d.ts new file mode 100644 index 0000000..ce197c0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/memoize.d.ts @@ -0,0 +1,40 @@ +import { MemoizedProvider, Provider } from "@smithy/types"; +interface MemoizeOverload { + /** + * + * Decorates a provider function with either static memoization. + * + * To create a statically memoized provider, supply a provider as the only + * argument to this function. The provider will be invoked once, and all + * invocations of the provider returned by `memoize` will return the same + * promise object. + * + * @param provider The provider whose result should be cached indefinitely. + */ + (provider: Provider): MemoizedProvider; + /** + * Decorates a provider function with refreshing memoization. + * + * @param provider The provider whose result should be cached. + * @param isExpired A function that will evaluate the resolved value and + * determine if it is expired. For example, when + * memoizing AWS credential providers, this function + * should return `true` when the credential's + * expiration is in the past (or very near future) and + * `false` otherwise. + * @param requiresRefresh A function that will evaluate the resolved value and + * determine if it represents static value or one that + * will eventually need to be refreshed. For example, + * AWS credentials that have no defined expiration will + * never need to be refreshed, so this function would + * return `true` if the credentials resolved by the + * underlying provider had an expiration and `false` + * otherwise. + */ + (provider: Provider, isExpired: (resolved: T) => boolean, requiresRefresh?: (resolved: T) => boolean): MemoizedProvider; +} +/** + * @internal + */ +export declare const memoize: MemoizeOverload; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts new file mode 100644 index 0000000..11e4aea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual credential provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class CredentialsProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts new file mode 100644 index 0000000..daf499c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts @@ -0,0 +1,39 @@ +import { Logger } from "@smithy/types"; +/** + * @public + */ +export type ProviderErrorOptionsType = { + tryNextLink?: boolean | undefined; + logger?: Logger; +}; +/** + * @public + * + * An error representing a failure of an individual provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class ProviderError extends Error { + name: string; + readonly tryNextLink: boolean; + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); + /** + * @deprecated use new operator. + */ + static from(error: Error, options?: boolean | ProviderErrorOptionsType): ProviderError; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts new file mode 100644 index 0000000..6f67fd5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual token provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class TokenProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts new file mode 100644 index 0000000..44390b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts @@ -0,0 +1,13 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * Compose a single credential provider function from multiple credential + * providers. The first provider in the argument list will always be invoked; + * subsequent providers in the list will be invoked in the order in which the + * were received if the preceding provider did not successfully resolve. + * + * If no providers were received or no provider resolves successfully, the + * returned promise will be rejected. + */ +export declare const chain: (...providers: Provider[]) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..0df6309 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const fromStatic: (staticValue: T) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e28099d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export * from "./CredentialsProviderError"; +/** + * @internal + */ +export * from "./ProviderError"; +/** + * @internal + */ +export * from "./TokenProviderError"; +/** + * @internal + */ +export * from "./chain"; +/** + * @internal + */ +export * from "./fromStatic"; +/** + * @internal + */ +export * from "./memoize"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts new file mode 100644 index 0000000..29ce53d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts @@ -0,0 +1,40 @@ +import { MemoizedProvider, Provider } from "@smithy/types"; +interface MemoizeOverload { + /** + * + * Decorates a provider function with either static memoization. + * + * To create a statically memoized provider, supply a provider as the only + * argument to this function. The provider will be invoked once, and all + * invocations of the provider returned by `memoize` will return the same + * promise object. + * + * @param provider The provider whose result should be cached indefinitely. + */ + (provider: Provider): MemoizedProvider; + /** + * Decorates a provider function with refreshing memoization. + * + * @param provider The provider whose result should be cached. + * @param isExpired A function that will evaluate the resolved value and + * determine if it is expired. For example, when + * memoizing AWS credential providers, this function + * should return `true` when the credential's + * expiration is in the past (or very near future) and + * `false` otherwise. + * @param requiresRefresh A function that will evaluate the resolved value and + * determine if it represents static value or one that + * will eventually need to be refreshed. For example, + * AWS credentials that have no defined expiration will + * never need to be refreshed, so this function would + * return `true` if the credentials resolved by the + * underlying provider had an expiration and `false` + * otherwise. + */ + (provider: Provider, isExpired: (resolved: T) => boolean, requiresRefresh?: (resolved: T) => boolean): MemoizedProvider; +} +/** + * @internal + */ +export declare const memoize: MemoizeOverload; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/package.json new file mode 100644 index 0000000..b2e7fc6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/property-provider/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/property-provider", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline property-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/property-provider", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/property-provider" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/README.md new file mode 100644 index 0000000..a547ab0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/README.md @@ -0,0 +1,4 @@ +# @smithy/protocol-http + +[![NPM version](https://img.shields.io/npm/v/@smithy/protocol-http/latest.svg)](https://www.npmjs.com/package/@smithy/protocol-http) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/protocol-http.svg)](https://www.npmjs.com/package/@smithy/protocol-http) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/Field.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/Field.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/Field.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/Fields.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/Fields.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/Fields.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/index.js new file mode 100644 index 0000000..df37109 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/index.js @@ -0,0 +1,262 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Field: () => Field, + Fields: () => Fields, + HttpRequest: () => HttpRequest, + HttpResponse: () => HttpResponse, + IHttpRequest: () => import_types.HttpRequest, + getHttpHandlerExtensionConfiguration: () => getHttpHandlerExtensionConfiguration, + isValidHostname: () => isValidHostname, + resolveHttpHandlerRuntimeConfig: () => resolveHttpHandlerRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/extensions/httpExtensionConfiguration.ts +var getHttpHandlerExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setHttpHandler(handler) { + runtimeConfig.httpHandler = handler; + }, + httpHandler() { + return runtimeConfig.httpHandler; + }, + updateHttpClientConfig(key, value) { + runtimeConfig.httpHandler?.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return runtimeConfig.httpHandler.httpHandlerConfigs(); + } + }; +}, "getHttpHandlerExtensionConfiguration"); +var resolveHttpHandlerRuntimeConfig = /* @__PURE__ */ __name((httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler() + }; +}, "resolveHttpHandlerRuntimeConfig"); + +// src/Field.ts +var import_types = require("@smithy/types"); +var Field = class { + static { + __name(this, "Field"); + } + constructor({ name, kind = import_types.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value) { + this.values.push(value); + } + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values) { + this.values = values; + } + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString() { + return this.values.map((v) => v.includes(",") || v.includes(" ") ? `"${v}"` : v).join(", "); + } + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get() { + return this.values; + } +}; + +// src/Fields.ts +var Fields = class { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + static { + __name(this, "Fields"); + } + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name) { + return this.entries[name.toLowerCase()]; + } + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +}; + +// src/httpRequest.ts + +var HttpRequest = class _HttpRequest { + static { + __name(this, "HttpRequest"); + } + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol ? options.protocol.slice(-1) !== ":" ? `${options.protocol}:` : options.protocol : "https:"; + this.path = options.path ? options.path.charAt(0) !== "/" ? `/${options.path}` : options.path : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + /** + * Note: this does not deep-clone the body. + */ + static clone(request) { + const cloned = new _HttpRequest({ + ...request, + headers: { ...request.headers } + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return "method" in req && "protocol" in req && "hostname" in req && "path" in req && typeof req["query"] === "object" && typeof req["headers"] === "object"; + } + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone() { + return _HttpRequest.clone(this); + } +}; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param + }; + }, {}); +} +__name(cloneQuery, "cloneQuery"); + +// src/httpResponse.ts +var HttpResponse = class { + static { + __name(this, "HttpResponse"); + } + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +}; + +// src/isValidHostname.ts +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +__name(isValidHostname, "isValidHostname"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getHttpHandlerExtensionConfiguration, + resolveHttpHandlerRuntimeConfig, + Field, + Fields, + HttpRequest, + HttpResponse, + isValidHostname +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/Field.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/Field.js new file mode 100644 index 0000000..918c883 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/Field.js @@ -0,0 +1,23 @@ +import { FieldPosition } from "@smithy/types"; +export class Field { + constructor({ name, kind = FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + add(value) { + this.values.push(value); + } + set(values) { + this.values = values; + } + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + toString() { + return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } + get() { + return this.values; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/Fields.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/Fields.js new file mode 100644 index 0000000..efa591f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/Fields.js @@ -0,0 +1,19 @@ +export class Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + getField(name) { + return this.entries[name.toLowerCase()]; + } + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js new file mode 100644 index 0000000..1a5aa0c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js @@ -0,0 +1,21 @@ +export const getHttpHandlerExtensionConfiguration = (runtimeConfig) => { + return { + setHttpHandler(handler) { + runtimeConfig.httpHandler = handler; + }, + httpHandler() { + return runtimeConfig.httpHandler; + }, + updateHttpClientConfig(key, value) { + runtimeConfig.httpHandler?.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return runtimeConfig.httpHandler.httpHandlerConfigs(); + }, + }; +}; +export const resolveHttpHandlerRuntimeConfig = (httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler(), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/extensions/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/extensions/index.js new file mode 100644 index 0000000..a215a4a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/extensions/index.js @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpHandler.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpHandler.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpHandler.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpRequest.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpRequest.js new file mode 100644 index 0000000..fd426ab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpRequest.js @@ -0,0 +1,53 @@ +export class HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol + ? options.protocol.slice(-1) !== ":" + ? `${options.protocol}:` + : options.protocol + : "https:"; + this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static clone(request) { + const cloned = new HttpRequest({ + ...request, + headers: { ...request.headers }, + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return ("method" in req && + "protocol" in req && + "hostname" in req && + "path" in req && + typeof req["query"] === "object" && + typeof req["headers"] === "object"); + } + clone() { + return HttpRequest.clone(this); + } +} +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; + }, {}); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpResponse.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpResponse.js new file mode 100644 index 0000000..75f470f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/httpResponse.js @@ -0,0 +1,14 @@ +export class HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/index.js new file mode 100644 index 0000000..8ff7f26 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js new file mode 100644 index 0000000..464c7db --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js @@ -0,0 +1,4 @@ +export function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/Field.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/Field.d.ts new file mode 100644 index 0000000..2d1613a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/Field.d.ts @@ -0,0 +1,49 @@ +import { FieldOptions, FieldPosition } from "@smithy/types"; +/** + * A name-value pair representing a single field + * transmitted in an HTTP Request or Response. + * + * The kind will dictate metadata placement within + * an HTTP message. + * + * All field names are case insensitive and + * case-variance must be treated as equivalent. + * Names MAY be normalized but SHOULD be preserved + * for accuracy during transmission. + */ +export declare class Field { + readonly name: string; + readonly kind: FieldPosition; + values: string[]; + constructor({ name, kind, values }: FieldOptions); + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value: string): void; + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values: string[]): void; + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value: string): void; + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString(): string; + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get(): string[]; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts new file mode 100644 index 0000000..8915826 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts @@ -0,0 +1,44 @@ +import { FieldPosition } from "@smithy/types"; +import { Field } from "./Field"; +export type FieldsOptions = { + fields?: Field[]; + encoding?: string; +}; +/** + * Collection of Field entries mapped by name. + */ +export declare class Fields { + private readonly entries; + private readonly encoding; + constructor({ fields, encoding }: FieldsOptions); + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field: Field): void; + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name: string): Field | undefined; + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name: string): void; + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind: FieldPosition): Field[]; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts new file mode 100644 index 0000000..bfe452d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts @@ -0,0 +1,37 @@ +import { HttpHandler } from "../httpHandler"; +/** + * @internal + */ +export interface HttpHandlerExtensionConfiguration { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[typeof key]): void; + httpHandlerConfigs(): HandlerConfig; +} +/** + * @internal + */ +export type HttpHandlerExtensionConfigType = Partial<{ + httpHandler: HttpHandler; +}>; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getHttpHandlerExtensionConfiguration: (runtimeConfig: Partial<{ + httpHandler: HttpHandler; +}>) => { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[keyof HandlerConfig]): void; + httpHandlerConfigs(): HandlerConfig; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveHttpHandlerRuntimeConfig: (httpHandlerExtensionConfiguration: HttpHandlerExtensionConfiguration) => Partial<{ + httpHandler: HttpHandler; +}>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..a215a4a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts new file mode 100644 index 0000000..8dc8d32 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts @@ -0,0 +1,35 @@ +import type { FetchHttpHandlerOptions, HttpHandlerOptions, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +import type { HttpRequest } from "./httpRequest"; +import type { HttpResponse } from "./httpResponse"; +/** + * @internal + */ +export type HttpHandler = RequestHandler & { + /** + * @internal + */ + updateHttpClientConfig(key: keyof HttpHandlerConfig, value: HttpHandlerConfig[typeof key]): void; + /** + * @internal + */ + httpHandlerConfigs(): HttpHandlerConfig; +}; +/** + * @public + * + * A type representing the accepted user inputs for the `requestHandler` field + * of a client's constructor object. + * + * You may provide an instance of an HttpHandler, or alternatively + * provide the constructor arguments as an object which will be passed + * to the constructor of the default request handler. + * + * The default class constructor to which your arguments will be passed + * varies. The Node.js default is the NodeHttpHandler and the browser/react-native + * default is the FetchHttpHandler. In rarer cases specific clients may be + * configured to use other default implementations such as Websocket or HTTP2. + * + * The fallback type Record is part of the union to allow + * passing constructor params to an unknown requestHandler type. + */ +export type HttpHandlerUserInput = HttpHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts new file mode 100644 index 0000000..8b64ff6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts @@ -0,0 +1,55 @@ +import { HeaderBag, HttpMessage, HttpRequest as IHttpRequest, QueryParameterBag, URI } from "@smithy/types"; +type HttpRequestOptions = Partial & Partial & { + method?: string; +}; +/** + * Use the distinct IHttpRequest interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpRequest extends IHttpRequest { +} +/** + * @public + */ +export { IHttpRequest }; +/** + * @public + */ +export declare class HttpRequest implements HttpMessage, URI { + method: string; + protocol: string; + hostname: string; + port?: number; + path: string; + query: QueryParameterBag; + headers: HeaderBag; + username?: string; + password?: string; + fragment?: string; + body?: any; + constructor(options: HttpRequestOptions); + /** + * Note: this does not deep-clone the body. + */ + static clone(request: IHttpRequest): HttpRequest; + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request: unknown): request is HttpRequest; + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone(): HttpRequest; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts new file mode 100644 index 0000000..e51f18b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts @@ -0,0 +1,29 @@ +import { HeaderBag, HttpMessage, HttpResponse as IHttpResponse } from "@smithy/types"; +type HttpResponseOptions = Partial & { + statusCode: number; + reason?: string; +}; +/** + * Use the distinct IHttpResponse interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpResponse extends IHttpResponse { +} +/** + * @public + */ +export declare class HttpResponse { + statusCode: number; + reason?: string; + headers: HeaderBag; + body?: any; + constructor(options: HttpResponseOptions); + static isInstance(response: unknown): response is HttpResponse; +} +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/index.d.ts new file mode 100644 index 0000000..8ff7f26 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts new file mode 100644 index 0000000..6fb5bcb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts @@ -0,0 +1 @@ +export declare function isValidHostname(hostname: string): boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts new file mode 100644 index 0000000..faa4b70 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts @@ -0,0 +1,49 @@ +import { FieldOptions, FieldPosition } from "@smithy/types"; +/** + * A name-value pair representing a single field + * transmitted in an HTTP Request or Response. + * + * The kind will dictate metadata placement within + * an HTTP message. + * + * All field names are case insensitive and + * case-variance must be treated as equivalent. + * Names MAY be normalized but SHOULD be preserved + * for accuracy during transmission. + */ +export declare class Field { + readonly name: string; + readonly kind: FieldPosition; + values: string[]; + constructor({ name, kind, values }: FieldOptions); + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value: string): void; + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values: string[]): void; + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value: string): void; + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString(): string; + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get(): string[]; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts new file mode 100644 index 0000000..616f55e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts @@ -0,0 +1,44 @@ +import { FieldPosition } from "@smithy/types"; +import { Field } from "./Field"; +export type FieldsOptions = { + fields?: Field[]; + encoding?: string; +}; +/** + * Collection of Field entries mapped by name. + */ +export declare class Fields { + private readonly entries; + private readonly encoding; + constructor({ fields, encoding }: FieldsOptions); + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field: Field): void; + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name: string): Field | undefined; + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name: string): void; + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind: FieldPosition): Field[]; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts new file mode 100644 index 0000000..3cd2cf6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts @@ -0,0 +1,37 @@ +import { HttpHandler } from "../httpHandler"; +/** + * @internal + */ +export interface HttpHandlerExtensionConfiguration { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[typeof key]): void; + httpHandlerConfigs(): HandlerConfig; +} +/** + * @internal + */ +export type HttpHandlerExtensionConfigType = Partial<{ + httpHandler: HttpHandler; +}>; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getHttpHandlerExtensionConfiguration: (runtimeConfig: Partial<{ + httpHandler: HttpHandler; +}>) => { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[keyof HandlerConfig]): void; + httpHandlerConfigs(): HandlerConfig; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveHttpHandlerRuntimeConfig: (httpHandlerExtensionConfiguration: HttpHandlerExtensionConfiguration) => Partial<{ + httpHandler: HttpHandler; +}>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..e0f765b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts new file mode 100644 index 0000000..b8f1978 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts @@ -0,0 +1,35 @@ +import { FetchHttpHandlerOptions, HttpHandlerOptions, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +import { HttpRequest } from "./httpRequest"; +import { HttpResponse } from "./httpResponse"; +/** + * @internal + */ +export type HttpHandler = RequestHandler & { + /** + * @internal + */ + updateHttpClientConfig(key: keyof HttpHandlerConfig, value: HttpHandlerConfig[typeof key]): void; + /** + * @internal + */ + httpHandlerConfigs(): HttpHandlerConfig; +}; +/** + * @public + * + * A type representing the accepted user inputs for the `requestHandler` field + * of a client's constructor object. + * + * You may provide an instance of an HttpHandler, or alternatively + * provide the constructor arguments as an object which will be passed + * to the constructor of the default request handler. + * + * The default class constructor to which your arguments will be passed + * varies. The Node.js default is the NodeHttpHandler and the browser/react-native + * default is the FetchHttpHandler. In rarer cases specific clients may be + * configured to use other default implementations such as Websocket or HTTP2. + * + * The fallback type Record is part of the union to allow + * passing constructor params to an unknown requestHandler type. + */ +export type HttpHandlerUserInput = HttpHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts new file mode 100644 index 0000000..cdcf38b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts @@ -0,0 +1,55 @@ +import { HeaderBag, HttpMessage, HttpRequest as IHttpRequest, QueryParameterBag, URI } from "@smithy/types"; +type HttpRequestOptions = Partial & Partial & { + method?: string; +}; +/** + * Use the distinct IHttpRequest interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpRequest extends IHttpRequest { +} +/** + * @public + */ +export { IHttpRequest }; +/** + * @public + */ +export declare class HttpRequest implements HttpMessage, URI { + method: string; + protocol: string; + hostname: string; + port?: number; + path: string; + query: QueryParameterBag; + headers: HeaderBag; + username?: string; + password?: string; + fragment?: string; + body?: any; + constructor(options: HttpRequestOptions); + /** + * Note: this does not deep-clone the body. + */ + static clone(request: IHttpRequest): HttpRequest; + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request: unknown): request is HttpRequest; + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone(): HttpRequest; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts new file mode 100644 index 0000000..8babc91 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts @@ -0,0 +1,29 @@ +import { HeaderBag, HttpMessage, HttpResponse as IHttpResponse } from "@smithy/types"; +type HttpResponseOptions = Partial & { + statusCode: number; + reason?: string; +}; +/** + * Use the distinct IHttpResponse interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpResponse extends IHttpResponse { +} +/** + * @public + */ +export declare class HttpResponse { + statusCode: number; + reason?: string; + headers: HeaderBag; + body?: any; + constructor(options: HttpResponseOptions); + static isInstance(response: unknown): response is HttpResponse; +} +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..08feffa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts new file mode 100644 index 0000000..7b85b36 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts @@ -0,0 +1 @@ +export declare function isValidHostname(hostname: string): boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..42e3c66 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts @@ -0,0 +1,21 @@ +import { FieldOptions as __FieldOptions, FieldPosition as __FieldPosition, HeaderBag as __HeaderBag, HttpHandlerOptions as __HttpHandlerOptions, HttpMessage as __HttpMessage } from "@smithy/types"; +/** + * @deprecated Use FieldOptions from `@smithy/types` instead + */ +export type FieldOptions = __FieldOptions; +/** + * @deprecated Use FieldPosition from `@smithy/types` instead + */ +export type FieldPosition = __FieldPosition; +/** + * @deprecated Use HeaderBag from `@smithy/types` instead + */ +export type HeaderBag = __HeaderBag; +/** + * @deprecated Use HttpMessage from `@smithy/types` instead + */ +export type HttpMessage = __HttpMessage; +/** + * @deprecated Use HttpHandlerOptions from `@smithy/types` instead + */ +export type HttpHandlerOptions = __HttpHandlerOptions; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/types.d.ts new file mode 100644 index 0000000..0d597b9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/dist-types/types.d.ts @@ -0,0 +1,21 @@ +import { FieldOptions as __FieldOptions, FieldPosition as __FieldPosition, HeaderBag as __HeaderBag, HttpHandlerOptions as __HttpHandlerOptions, HttpMessage as __HttpMessage } from "@smithy/types"; +/** + * @deprecated Use FieldOptions from `@smithy/types` instead + */ +export type FieldOptions = __FieldOptions; +/** + * @deprecated Use FieldPosition from `@smithy/types` instead + */ +export type FieldPosition = __FieldPosition; +/** + * @deprecated Use HeaderBag from `@smithy/types` instead + */ +export type HeaderBag = __HeaderBag; +/** + * @deprecated Use HttpMessage from `@smithy/types` instead + */ +export type HttpMessage = __HttpMessage; +/** + * @deprecated Use HttpHandlerOptions from `@smithy/types` instead + */ +export type HttpHandlerOptions = __HttpHandlerOptions; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/package.json new file mode 100644 index 0000000..549711a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/protocol-http/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/protocol-http", + "version": "5.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline protocol-http", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/protocol-http", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/protocol-http" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/README.md new file mode 100644 index 0000000..00275da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/README.md @@ -0,0 +1,10 @@ +# @smithy/querystring-builder + +[![NPM version](https://img.shields.io/npm/v/@smithy/querystring-builder/latest.svg)](https://www.npmjs.com/package/@smithy/querystring-builder) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/querystring-builder.svg)](https://www.npmjs.com/package/@smithy/querystring-builder) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-cjs/index.js new file mode 100644 index 0000000..7030242 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-cjs/index.js @@ -0,0 +1,52 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + buildQueryString: () => buildQueryString +}); +module.exports = __toCommonJS(src_exports); +var import_util_uri_escape = require("@smithy/util-uri-escape"); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, import_util_uri_escape.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, import_util_uri_escape.escapeUri)(value[i])}`); + } + } else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, import_util_uri_escape.escapeUri)(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} +__name(buildQueryString, "buildQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + buildQueryString +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-es/index.js new file mode 100644 index 0000000..fbc7684 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-es/index.js @@ -0,0 +1,21 @@ +import { escapeUri } from "@smithy/util-uri-escape"; +export function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = escapeUri(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${escapeUri(value[i])}`); + } + } + else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${escapeUri(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-types/index.d.ts new file mode 100644 index 0000000..538b1b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function buildQueryString(query: QueryParameterBag): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..1f866f3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function buildQueryString(query: QueryParameterBag): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/package.json new file mode 100644 index 0000000..d144f0a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-builder/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/querystring-builder", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline querystring-builder", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/querystring-builder", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/querystring-builder" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/README.md new file mode 100644 index 0000000..02dcf51 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/README.md @@ -0,0 +1,10 @@ +# @smithy/querystring-parser + +[![NPM version](https://img.shields.io/npm/v/@smithy/querystring-parser/latest.svg)](https://www.npmjs.com/package/@smithy/querystring-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/querystring-parser.svg)](https://www.npmjs.com/package/@smithy/querystring-parser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-cjs/index.js new file mode 100644 index 0000000..924647c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-cjs/index.js @@ -0,0 +1,53 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseQueryString: () => parseQueryString +}); +module.exports = __toCommonJS(src_exports); +function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } else if (Array.isArray(query[key])) { + query[key].push(value); + } else { + query[key] = [query[key], value]; + } + } + } + return query; +} +__name(parseQueryString, "parseQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + parseQueryString +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-es/index.js new file mode 100644 index 0000000..bd7bf00 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-es/index.js @@ -0,0 +1,23 @@ +export function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } + else if (Array.isArray(query[key])) { + query[key].push(value); + } + else { + query[key] = [query[key], value]; + } + } + } + return query; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-types/index.d.ts new file mode 100644 index 0000000..fdc1ba5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function parseQueryString(querystring: string): QueryParameterBag; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..8bb747d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function parseQueryString(querystring: string): QueryParameterBag; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/package.json new file mode 100644 index 0000000..9a27e7e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/querystring-parser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/querystring-parser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline querystring-parser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/querystring-parser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/querystring-parser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/README.md new file mode 100644 index 0000000..902dd43 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/README.md @@ -0,0 +1,4 @@ +# @smithy/service-error-classification + +[![NPM version](https://img.shields.io/npm/v/@smithy/service-error-classification/latest.svg)](https://www.npmjs.com/package/@smithy/service-error-classification) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/service-error-classification.svg)](https://www.npmjs.com/package/@smithy/service-error-classification) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-cjs/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-cjs/index.js new file mode 100644 index 0000000..bcca2b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-cjs/index.js @@ -0,0 +1,109 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isBrowserNetworkError: () => isBrowserNetworkError, + isClockSkewCorrectedError: () => isClockSkewCorrectedError, + isClockSkewError: () => isClockSkewError, + isRetryableByTrait: () => isRetryableByTrait, + isServerError: () => isServerError, + isThrottlingError: () => isThrottlingError, + isTransientError: () => isTransientError +}); +module.exports = __toCommonJS(src_exports); + +// src/constants.ts +var CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch" +]; +var THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException" + // DynamoDB +]; +var TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +var TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; + +// src/index.ts +var isRetryableByTrait = /* @__PURE__ */ __name((error) => error.$retryable !== void 0, "isRetryableByTrait"); +var isClockSkewError = /* @__PURE__ */ __name((error) => CLOCK_SKEW_ERROR_CODES.includes(error.name), "isClockSkewError"); +var isClockSkewCorrectedError = /* @__PURE__ */ __name((error) => error.$metadata?.clockSkewCorrected, "isClockSkewCorrectedError"); +var isBrowserNetworkError = /* @__PURE__ */ __name((error) => { + const errorMessages = /* @__PURE__ */ new Set([ + "Failed to fetch", + // Chrome + "NetworkError when attempting to fetch resource", + // Firefox + "The Internet connection appears to be offline", + // Safari 16 + "Load failed", + // Safari 17+ + "Network request failed" + // `cross-fetch` + ]); + const isValid = error && error instanceof TypeError; + if (!isValid) { + return false; + } + return errorMessages.has(error.message); +}, "isBrowserNetworkError"); +var isThrottlingError = /* @__PURE__ */ __name((error) => error.$metadata?.httpStatusCode === 429 || THROTTLING_ERROR_CODES.includes(error.name) || error.$retryable?.throttling == true, "isThrottlingError"); +var isTransientError = /* @__PURE__ */ __name((error, depth = 0) => isClockSkewCorrectedError(error) || TRANSIENT_ERROR_CODES.includes(error.name) || NODEJS_TIMEOUT_ERROR_CODES.includes(error?.code || "") || TRANSIENT_ERROR_STATUS_CODES.includes(error.$metadata?.httpStatusCode || 0) || isBrowserNetworkError(error) || error.cause !== void 0 && depth <= 10 && isTransientError(error.cause, depth + 1), "isTransientError"); +var isServerError = /* @__PURE__ */ __name((error) => { + if (error.$metadata?.httpStatusCode !== void 0) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}, "isServerError"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isRetryableByTrait, + isClockSkewError, + isClockSkewCorrectedError, + isBrowserNetworkError, + isThrottlingError, + isTransientError, + isServerError +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-es/constants.js new file mode 100644 index 0000000..267443b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-es/constants.js @@ -0,0 +1,27 @@ +export const CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch", +]; +export const THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException", +]; +export const TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +export const TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +export const NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-es/index.js new file mode 100644 index 0000000..1da4aa9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-es/index.js @@ -0,0 +1,37 @@ +import { CLOCK_SKEW_ERROR_CODES, NODEJS_TIMEOUT_ERROR_CODES, THROTTLING_ERROR_CODES, TRANSIENT_ERROR_CODES, TRANSIENT_ERROR_STATUS_CODES, } from "./constants"; +export const isRetryableByTrait = (error) => error.$retryable !== undefined; +export const isClockSkewError = (error) => CLOCK_SKEW_ERROR_CODES.includes(error.name); +export const isClockSkewCorrectedError = (error) => error.$metadata?.clockSkewCorrected; +export const isBrowserNetworkError = (error) => { + const errorMessages = new Set([ + "Failed to fetch", + "NetworkError when attempting to fetch resource", + "The Internet connection appears to be offline", + "Load failed", + "Network request failed", + ]); + const isValid = error && error instanceof TypeError; + if (!isValid) { + return false; + } + return errorMessages.has(error.message); +}; +export const isThrottlingError = (error) => error.$metadata?.httpStatusCode === 429 || + THROTTLING_ERROR_CODES.includes(error.name) || + error.$retryable?.throttling == true; +export const isTransientError = (error, depth = 0) => isClockSkewCorrectedError(error) || + TRANSIENT_ERROR_CODES.includes(error.name) || + NODEJS_TIMEOUT_ERROR_CODES.includes(error?.code || "") || + TRANSIENT_ERROR_STATUS_CODES.includes(error.$metadata?.httpStatusCode || 0) || + isBrowserNetworkError(error) || + (error.cause !== undefined && depth <= 10 && isTransientError(error.cause, depth + 1)); +export const isServerError = (error) => { + if (error.$metadata?.httpStatusCode !== undefined) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts new file mode 100644 index 0000000..f07663b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts @@ -0,0 +1,26 @@ +/** + * Errors encountered when the client clock and server clock cannot agree on the + * current time. + * + * These errors are retryable, assuming the SDK has enabled clock skew + * correction. + */ +export declare const CLOCK_SKEW_ERROR_CODES: string[]; +/** + * Errors that indicate the SDK is being throttled. + * + * These errors are always retryable. + */ +export declare const THROTTLING_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_STATUS_CODES: number[]; +/** + * Node.js system error codes that indicate timeout. + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/index.d.ts new file mode 100644 index 0000000..6aad102 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/index.d.ts @@ -0,0 +1,24 @@ +import { SdkError } from "@smithy/types"; +export declare const isRetryableByTrait: (error: SdkError) => boolean; +/** + * @deprecated use isClockSkewCorrectedError. This is only used in deprecated code. + */ +export declare const isClockSkewError: (error: SdkError) => boolean; +/** + * @returns whether the error resulted in a systemClockOffset aka clock skew correction. + */ +export declare const isClockSkewCorrectedError: (error: SdkError) => true | undefined; +/** + * + * @internal + */ +export declare const isBrowserNetworkError: (error: SdkError) => boolean; +export declare const isThrottlingError: (error: SdkError) => boolean; +/** + * Though NODEJS_TIMEOUT_ERROR_CODES are platform specific, they are + * included here because there is an error scenario with unknown root + * cause where the NodeHttpHandler does not decorate the Error with + * the name "TimeoutError" to be checked by the TRANSIENT_ERROR_CODES condition. + */ +export declare const isTransientError: (error: SdkError, depth?: number) => boolean; +export declare const isServerError: (error: SdkError) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..74c4858 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,26 @@ +/** + * Errors encountered when the client clock and server clock cannot agree on the + * current time. + * + * These errors are retryable, assuming the SDK has enabled clock skew + * correction. + */ +export declare const CLOCK_SKEW_ERROR_CODES: string[]; +/** + * Errors that indicate the SDK is being throttled. + * + * These errors are always retryable. + */ +export declare const THROTTLING_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_STATUS_CODES: number[]; +/** + * Node.js system error codes that indicate timeout. + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c7909ae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts @@ -0,0 +1,24 @@ +import { SdkError } from "@smithy/types"; +export declare const isRetryableByTrait: (error: SdkError) => boolean; +/** + * @deprecated use isClockSkewCorrectedError. This is only used in deprecated code. + */ +export declare const isClockSkewError: (error: SdkError) => boolean; +/** + * @returns whether the error resulted in a systemClockOffset aka clock skew correction. + */ +export declare const isClockSkewCorrectedError: (error: SdkError) => true | undefined; +/** + * + * @internal + */ +export declare const isBrowserNetworkError: (error: SdkError) => boolean; +export declare const isThrottlingError: (error: SdkError) => boolean; +/** + * Though NODEJS_TIMEOUT_ERROR_CODES are platform specific, they are + * included here because there is an error scenario with unknown root + * cause where the NodeHttpHandler does not decorate the Error with + * the name "TimeoutError" to be checked by the TRANSIENT_ERROR_CODES condition. + */ +export declare const isTransientError: (error: SdkError, depth?: number) => boolean; +export declare const isServerError: (error: SdkError) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/package.json new file mode 100644 index 0000000..a568aee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/service-error-classification/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/service-error-classification", + "version": "4.0.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline service-error-classification", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/service-error-classification", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/service-error-classification" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "dependencies": { + "@smithy/types": "^4.2.0" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/README.md new file mode 100644 index 0000000..45a4b2e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/README.md @@ -0,0 +1,105 @@ +# @smithy/shared-ini-file-loader + +[![NPM version](https://img.shields.io/npm/v/@smithy/shared-ini-file-loader/latest.svg)](https://www.npmjs.com/package/@smithy/shared-ini-file-loader) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/shared-ini-file-loader.svg)](https://www.npmjs.com/package/@smithy/shared-ini-file-loader) + +## AWS Shared Configuration File Loader + +This module provides a function that reads from AWS SDK configuration files and +returns a promise that will resolve with a hash of the parsed contents of the +AWS credentials file and of the AWS config file. Given the [sample +files](#sample-files) below, the promise returned by `loadSharedConfigFiles` +would resolve with: + +```javascript +{ + configFile: { + 'default': { + aws_access_key_id: 'foo', + aws_secret_access_key: 'bar', + }, + dev: { + aws_access_key_id: 'foo1', + aws_secret_access_key: 'bar1', + }, + prod: { + aws_access_key_id: 'foo2', + aws_secret_access_key: 'bar2', + }, + 'testing host': { + aws_access_key_id: 'foo4', + aws_secret_access_key: 'bar4', + } + }, + credentialsFile: { + 'default': { + aws_access_key_id: 'foo', + aws_secret_access_key: 'bar', + }, + dev: { + aws_access_key_id: 'foo1', + aws_secret_access_key: 'bar1', + }, + prod: { + aws_access_key_id: 'foo2', + aws_secret_access_key: 'bar2', + } + }, +} +``` + +If a file is not found, its key (`configFile` or `credentialsFile`) will instead +have a value of an empty object. + +## Supported configuration + +You may customize how the files are loaded by providing an options hash to the +`loadSharedConfigFiles` function. The following options are supported: + +- `filepath` - The path to the shared credentials file. If not specified, the + provider will use the value in the `AWS_SHARED_CREDENTIALS_FILE` environment + variable or a default of `~/.aws/credentials`. +- `configFilepath` - The path to the shared config file. If not specified, the + provider will use the value in the `AWS_CONFIG_FILE` environment variable or a + default of `~/.aws/config`. +- `ignoreCache` - The provider will normally cache the contents of the files it + loads. This option will force the provider to reload the files from disk. + Defaults to `false`. + +## Sample files + +### `~/.aws/credentials` + +```ini +[default] +aws_access_key_id=foo +aws_secret_access_key=bar + +[dev] +aws_access_key_id=foo2 +aws_secret_access_key=bar2 + +[prod] +aws_access_key_id=foo3 +aws_secret_access_key=bar3 +``` + +### `~/.aws/config` + +```ini +[default] +aws_access_key_id=foo +aws_secret_access_key=bar + +[profile dev] +aws_access_key_id=foo2 +aws_secret_access_key=bar2 + +[profile prod] +aws_access_key_id=foo3 +aws_secret_access_key=bar3 + +[profile "testing host"] +aws_access_key_id=foo4 +aws_secret_access_key=bar4 +``` diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js new file mode 100644 index 0000000..2a4f737 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getHomeDir = void 0; +const os_1 = require("os"); +const path_1 = require("path"); +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = (0, os_1.homedir)(); + return homeDirCache[homeDirCacheKey]; +}; +exports.getHomeDir = getHomeDir; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js new file mode 100644 index 0000000..30d97b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSSOTokenFilepath = void 0; +const crypto_1 = require("crypto"); +const path_1 = require("path"); +const getHomeDir_1 = require("./getHomeDir"); +const getSSOTokenFilepath = (id) => { + const hasher = (0, crypto_1.createHash)("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); +}; +exports.getSSOTokenFilepath = getSSOTokenFilepath; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js new file mode 100644 index 0000000..688accb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSSOTokenFromFile = void 0; +const fs_1 = require("fs"); +const getSSOTokenFilepath_1 = require("./getSSOTokenFilepath"); +const { readFile } = fs_1.promises; +const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; +exports.getSSOTokenFromFile = getSSOTokenFromFile; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js new file mode 100644 index 0000000..de59bfa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js @@ -0,0 +1,206 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_PREFIX_SEPARATOR: () => CONFIG_PREFIX_SEPARATOR, + DEFAULT_PROFILE: () => DEFAULT_PROFILE, + ENV_PROFILE: () => ENV_PROFILE, + getProfileName: () => getProfileName, + loadSharedConfigFiles: () => loadSharedConfigFiles, + loadSsoSessionData: () => loadSsoSessionData, + parseKnownFiles: () => parseKnownFiles +}); +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././getHomeDir"), module.exports); + +// src/getProfileName.ts +var ENV_PROFILE = "AWS_PROFILE"; +var DEFAULT_PROFILE = "default"; +var getProfileName = /* @__PURE__ */ __name((init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE, "getProfileName"); + +// src/index.ts +__reExport(src_exports, require("././getSSOTokenFilepath"), module.exports); +__reExport(src_exports, require("././getSSOTokenFromFile"), module.exports); + +// src/loadSharedConfigFiles.ts + + +// src/getConfigData.ts +var import_types = require("@smithy/types"); +var getConfigData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(import_types.IniSectionType).includes(key.substring(0, indexOfSeparator)); +}).reduce( + (acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === import_types.IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; + }, + { + // Populate default profile, if present. + ...data.default && { default: data.default } + } +), "getConfigData"); + +// src/getConfigFilepath.ts +var import_path = require("path"); +var import_getHomeDir = require("././getHomeDir"); +var ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +var getConfigFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CONFIG_PATH] || (0, import_path.join)((0, import_getHomeDir.getHomeDir)(), ".aws", "config"), "getConfigFilepath"); + +// src/getCredentialsFilepath.ts + +var import_getHomeDir2 = require("././getHomeDir"); +var ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +var getCredentialsFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CREDENTIALS_PATH] || (0, import_path.join)((0, import_getHomeDir2.getHomeDir)(), ".aws", "credentials"), "getCredentialsFilepath"); + +// src/loadSharedConfigFiles.ts +var import_getHomeDir3 = require("././getHomeDir"); + +// src/parseIni.ts + +var prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +var profileNameBlockList = ["__proto__", "profile __proto__"]; +var parseIni = /* @__PURE__ */ __name((iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = void 0; + currentSubSection = void 0; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(import_types.IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim() + ]; + if (value === "") { + currentSubSection = name; + } else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = void 0; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}, "parseIni"); + +// src/loadSharedConfigFiles.ts +var import_slurpFile = require("././slurpFile"); +var swallowError = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var CONFIG_PREFIX_SEPARATOR = "."; +var loadSharedConfigFiles = /* @__PURE__ */ __name(async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = (0, import_getHomeDir3.getHomeDir)(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = (0, import_path.join)(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = (0, import_path.join)(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + (0, import_slurpFile.slurpFile)(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).then(getConfigData).catch(swallowError), + (0, import_slurpFile.slurpFile)(resolvedFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).catch(swallowError) + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1] + }; +}, "loadSharedConfigFiles"); + +// src/getSsoSessionData.ts + +var getSsoSessionData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => key.startsWith(import_types.IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)).reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}), "getSsoSessionData"); + +// src/loadSsoSessionData.ts +var import_slurpFile2 = require("././slurpFile"); +var swallowError2 = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var loadSsoSessionData = /* @__PURE__ */ __name(async (init = {}) => (0, import_slurpFile2.slurpFile)(init.configFilepath ?? getConfigFilepath()).then(parseIni).then(getSsoSessionData).catch(swallowError2), "loadSsoSessionData"); + +// src/mergeConfigFiles.ts +var mergeConfigFiles = /* @__PURE__ */ __name((...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== void 0) { + Object.assign(merged[key], values); + } else { + merged[key] = values; + } + } + } + return merged; +}, "mergeConfigFiles"); + +// src/parseKnownFiles.ts +var parseKnownFiles = /* @__PURE__ */ __name(async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}, "parseKnownFiles"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getHomeDir, + ENV_PROFILE, + DEFAULT_PROFILE, + getProfileName, + getSSOTokenFilepath, + getSSOTokenFromFile, + CONFIG_PREFIX_SEPARATOR, + loadSharedConfigFiles, + loadSsoSessionData, + parseKnownFiles +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js new file mode 100644 index 0000000..82d7d65 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.slurpFile = void 0; +const fs_1 = require("fs"); +const { readFile } = fs_1.promises; +const filePromisesHash = {}; +const slurpFile = (path, options) => { + if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; +exports.slurpFile = slurpFile; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js new file mode 100644 index 0000000..4579286 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js @@ -0,0 +1,18 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +export const getConfigData = (data) => Object.entries(data) + .filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(IniSectionType).includes(key.substring(0, indexOfSeparator)); +}) + .reduce((acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; +}, { + ...(data.default && { default: data.default }), +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js new file mode 100644 index 0000000..ca07c2d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js @@ -0,0 +1,4 @@ +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export const getConfigFilepath = () => process.env[ENV_CONFIG_PATH] || join(getHomeDir(), ".aws", "config"); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js new file mode 100644 index 0000000..393c0ae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js @@ -0,0 +1,4 @@ +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export const getCredentialsFilepath = () => process.env[ENV_CREDENTIALS_PATH] || join(getHomeDir(), ".aws", "credentials"); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js new file mode 100644 index 0000000..58772af --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js @@ -0,0 +1,22 @@ +import { homedir } from "os"; +import { sep } from "path"; +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +export const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = homedir(); + return homeDirCache[homeDirCacheKey]; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js new file mode 100644 index 0000000..acc29f0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js @@ -0,0 +1,3 @@ +export const ENV_PROFILE = "AWS_PROFILE"; +export const DEFAULT_PROFILE = "default"; +export const getProfileName = (init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js new file mode 100644 index 0000000..a44b4ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js @@ -0,0 +1,8 @@ +import { createHash } from "crypto"; +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const getSSOTokenFilepath = (id) => { + const hasher = createHash("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return join(getHomeDir(), ".aws", "sso", "cache", `${cacheName}.json`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js new file mode 100644 index 0000000..42659db --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js @@ -0,0 +1,8 @@ +import { promises as fsPromises } from "fs"; +import { getSSOTokenFilepath } from "./getSSOTokenFilepath"; +const { readFile } = fsPromises; +export const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = getSSOTokenFilepath(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js new file mode 100644 index 0000000..f2df194 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js @@ -0,0 +1,5 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +export const getSsoSessionData = (data) => Object.entries(data) + .filter(([key]) => key.startsWith(IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)) + .reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js new file mode 100644 index 0000000..3e8b2c7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js new file mode 100644 index 0000000..77ee32c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js @@ -0,0 +1,39 @@ +import { join } from "path"; +import { getConfigData } from "./getConfigData"; +import { getConfigFilepath } from "./getConfigFilepath"; +import { getCredentialsFilepath } from "./getCredentialsFilepath"; +import { getHomeDir } from "./getHomeDir"; +import { parseIni } from "./parseIni"; +import { slurpFile } from "./slurpFile"; +const swallowError = () => ({}); +export const CONFIG_PREFIX_SEPARATOR = "."; +export const loadSharedConfigFiles = async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = getHomeDir(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = join(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = join(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + slurpFile(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni) + .then(getConfigData) + .catch(swallowError), + slurpFile(resolvedFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni) + .catch(swallowError), + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1], + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js new file mode 100644 index 0000000..3bd730b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js @@ -0,0 +1,9 @@ +import { getConfigFilepath } from "./getConfigFilepath"; +import { getSsoSessionData } from "./getSsoSessionData"; +import { parseIni } from "./parseIni"; +import { slurpFile } from "./slurpFile"; +const swallowError = () => ({}); +export const loadSsoSessionData = async (init = {}) => slurpFile(init.configFilepath ?? getConfigFilepath()) + .then(parseIni) + .then(getSsoSessionData) + .catch(swallowError); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js new file mode 100644 index 0000000..58576f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js @@ -0,0 +1,14 @@ +export const mergeConfigFiles = (...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== undefined) { + Object.assign(merged[key], values); + } + else { + merged[key] = values; + } + } + } + return merged; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js new file mode 100644 index 0000000..7af4a6a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js @@ -0,0 +1,52 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +const prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +const profileNameBlockList = ["__proto__", "profile __proto__"]; +export const parseIni = (iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = undefined; + currentSubSection = undefined; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } + else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } + else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim(), + ]; + if (value === "") { + currentSubSection = name; + } + else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = undefined; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js new file mode 100644 index 0000000..4920e28 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js @@ -0,0 +1,6 @@ +import { loadSharedConfigFiles } from "./loadSharedConfigFiles"; +import { mergeConfigFiles } from "./mergeConfigFiles"; +export const parseKnownFiles = async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js new file mode 100644 index 0000000..7b360cc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js @@ -0,0 +1,9 @@ +import { promises as fsPromises } from "fs"; +const { readFile } = fsPromises; +const filePromisesHash = {}; +export const slurpFile = (path, options) => { + if (!filePromisesHash[path] || options?.ignoreCache) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts new file mode 100644 index 0000000..4259831 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts @@ -0,0 +1,8 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the config data from parsed ini data. + * * Returns data for `default` + * * Returns profile name without prefix. + * * Returns non-profiles as is. + */ +export declare const getConfigData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts new file mode 100644 index 0000000..1d123be --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export declare const getConfigFilepath: () => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts new file mode 100644 index 0000000..26fda4a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export declare const getCredentialsFilepath: () => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts new file mode 100644 index 0000000..5d15bf1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts @@ -0,0 +1,6 @@ +/** + * Get the HOME directory for the current runtime. + * + * @internal + */ +export declare const getHomeDir: () => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts new file mode 100644 index 0000000..5a608b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + */ +export declare const ENV_PROFILE = "AWS_PROFILE"; +/** + * @internal + */ +export declare const DEFAULT_PROFILE = "default"; +/** + * Returns profile with priority order code - ENV - default. + * @internal + */ +export declare const getProfileName: (init: { + profile?: string; +}) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts new file mode 100644 index 0000000..44a4030 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the filepath of the file where SSO token is stored. + * @internal + */ +export declare const getSSOTokenFilepath: (id: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts new file mode 100644 index 0000000..18199ac --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts @@ -0,0 +1,46 @@ +/** + * Cached SSO token retrieved from SSO login flow. + * @public + */ +export interface SSOToken { + /** + * A base64 encoded string returned by the sso-oidc service. + */ + accessToken: string; + /** + * The expiration time of the accessToken as an RFC 3339 formatted timestamp. + */ + expiresAt: string; + /** + * The token used to obtain an access token in the event that the accessToken is invalid or expired. + */ + refreshToken?: string; + /** + * The unique identifier string for each client. The client ID generated when performing the registration + * portion of the OIDC authorization flow. This is used to refresh the accessToken. + */ + clientId?: string; + /** + * A secret string generated when performing the registration portion of the OIDC authorization flow. + * This is used to refresh the accessToken. + */ + clientSecret?: string; + /** + * The expiration time of the client registration (clientId and clientSecret) as an RFC 3339 formatted timestamp. + */ + registrationExpiresAt?: string; + /** + * The configured sso_region for the profile that credentials are being resolved for. + */ + region?: string; + /** + * The configured sso_start_url for the profile that credentials are being resolved for. + */ + startUrl?: string; +} +/** + * @internal + * @param id - can be either a start URL or the SSO session name. + * Returns the SSO token from the file system. + */ +export declare const getSSOTokenFromFile: (id: string) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts new file mode 100644 index 0000000..9be020f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts @@ -0,0 +1,6 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the sso-session data from parsed ini data by reading + * ssoSessionName after sso-session prefix including/excluding quotes + */ +export declare const getSsoSessionData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts new file mode 100644 index 0000000..3e8b2c7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts new file mode 100644 index 0000000..3897ac3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts @@ -0,0 +1,36 @@ +import { Logger, SharedConfigFiles } from "@smithy/types"; +/** + * @public + */ +export interface SharedConfigInit { + /** + * The path at which to locate the ini credentials file. Defaults to the + * value of the `AWS_SHARED_CREDENTIALS_FILE` environment variable (if + * defined) or `~/.aws/credentials` otherwise. + */ + filepath?: string; + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; + /** + * Configuration files are normally cached after the first time they are loaded. When this + * property is set, the provider will always reload any configuration files loaded before. + */ + ignoreCache?: boolean; + /** + * For credential resolution trace logging. + */ + logger?: Logger; +} +/** + * @internal + */ +export declare const CONFIG_PREFIX_SEPARATOR = "."; +/** + * Loads the config and credentials files. + * @internal + */ +export declare const loadSharedConfigFiles: (init?: SharedConfigInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts new file mode 100644 index 0000000..ed6c367 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts @@ -0,0 +1,17 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Subset of {@link SharedConfigInit}. + * @internal + */ +export interface SsoSessionInit { + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; +} +/** + * @internal + */ +export declare const loadSsoSessionData: (init?: SsoSessionInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts new file mode 100644 index 0000000..46b8965 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts @@ -0,0 +1,7 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Merge multiple profile config files such that settings each file are kept together + * + * @internal + */ +export declare const mergeConfigFiles: (...files: ParsedIniData[]) => ParsedIniData; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts new file mode 100644 index 0000000..0ae5851 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts @@ -0,0 +1,2 @@ +import { ParsedIniData } from "@smithy/types"; +export declare const parseIni: (iniData: string) => ParsedIniData; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts new file mode 100644 index 0000000..d5fcafa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts @@ -0,0 +1,18 @@ +import { ParsedIniData } from "@smithy/types"; +import { SharedConfigInit } from "./loadSharedConfigFiles"; +/** + * @public + */ +export interface SourceProfileInit extends SharedConfigInit { + /** + * The configuration profile to use. + */ + profile?: string; +} +/** + * Load profiles from credentials and config INI files and normalize them into a + * single profile list. + * + * @internal + */ +export declare const parseKnownFiles: (init: SourceProfileInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts new file mode 100644 index 0000000..a3bc84c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts @@ -0,0 +1,5 @@ +interface SlurpFileOptions { + ignoreCache?: boolean; +} +export declare const slurpFile: (path: string, options?: SlurpFileOptions) => Promise; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts new file mode 100644 index 0000000..c6b7588 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts @@ -0,0 +1,8 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the config data from parsed ini data. + * * Returns data for `default` + * * Returns profile name without prefix. + * * Returns non-profiles as is. + */ +export declare const getConfigData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts new file mode 100644 index 0000000..dc3699b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export declare const getConfigFilepath: () => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts new file mode 100644 index 0000000..f2c95b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export declare const getCredentialsFilepath: () => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts new file mode 100644 index 0000000..4c1bd7a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts @@ -0,0 +1,6 @@ +/** + * Get the HOME directory for the current runtime. + * + * @internal + */ +export declare const getHomeDir: () => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts new file mode 100644 index 0000000..91cb16b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + */ +export declare const ENV_PROFILE = "AWS_PROFILE"; +/** + * @internal + */ +export declare const DEFAULT_PROFILE = "default"; +/** + * Returns profile with priority order code - ENV - default. + * @internal + */ +export declare const getProfileName: (init: { + profile?: string; +}) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts new file mode 100644 index 0000000..e549daa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the filepath of the file where SSO token is stored. + * @internal + */ +export declare const getSSOTokenFilepath: (id: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts new file mode 100644 index 0000000..140979c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts @@ -0,0 +1,46 @@ +/** + * Cached SSO token retrieved from SSO login flow. + * @public + */ +export interface SSOToken { + /** + * A base64 encoded string returned by the sso-oidc service. + */ + accessToken: string; + /** + * The expiration time of the accessToken as an RFC 3339 formatted timestamp. + */ + expiresAt: string; + /** + * The token used to obtain an access token in the event that the accessToken is invalid or expired. + */ + refreshToken?: string; + /** + * The unique identifier string for each client. The client ID generated when performing the registration + * portion of the OIDC authorization flow. This is used to refresh the accessToken. + */ + clientId?: string; + /** + * A secret string generated when performing the registration portion of the OIDC authorization flow. + * This is used to refresh the accessToken. + */ + clientSecret?: string; + /** + * The expiration time of the client registration (clientId and clientSecret) as an RFC 3339 formatted timestamp. + */ + registrationExpiresAt?: string; + /** + * The configured sso_region for the profile that credentials are being resolved for. + */ + region?: string; + /** + * The configured sso_start_url for the profile that credentials are being resolved for. + */ + startUrl?: string; +} +/** + * @internal + * @param id - can be either a start URL or the SSO session name. + * Returns the SSO token from the file system. + */ +export declare const getSSOTokenFromFile: (id: string) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts new file mode 100644 index 0000000..04a1a99 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts @@ -0,0 +1,6 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the sso-session data from parsed ini data by reading + * ssoSessionName after sso-session prefix including/excluding quotes + */ +export declare const getSsoSessionData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..12ed3bb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts new file mode 100644 index 0000000..969254e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts @@ -0,0 +1,36 @@ +import { Logger, SharedConfigFiles } from "@smithy/types"; +/** + * @public + */ +export interface SharedConfigInit { + /** + * The path at which to locate the ini credentials file. Defaults to the + * value of the `AWS_SHARED_CREDENTIALS_FILE` environment variable (if + * defined) or `~/.aws/credentials` otherwise. + */ + filepath?: string; + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; + /** + * Configuration files are normally cached after the first time they are loaded. When this + * property is set, the provider will always reload any configuration files loaded before. + */ + ignoreCache?: boolean; + /** + * For credential resolution trace logging. + */ + logger?: Logger; +} +/** + * @internal + */ +export declare const CONFIG_PREFIX_SEPARATOR = "."; +/** + * Loads the config and credentials files. + * @internal + */ +export declare const loadSharedConfigFiles: (init?: SharedConfigInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts new file mode 100644 index 0000000..08e265e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts @@ -0,0 +1,17 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Subset of {@link SharedConfigInit}. + * @internal + */ +export interface SsoSessionInit { + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; +} +/** + * @internal + */ +export declare const loadSsoSessionData: (init?: SsoSessionInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts new file mode 100644 index 0000000..f94e725 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts @@ -0,0 +1,7 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Merge multiple profile config files such that settings each file are kept together + * + * @internal + */ +export declare const mergeConfigFiles: (...files: ParsedIniData[]) => ParsedIniData; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts new file mode 100644 index 0000000..4e58d0e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts @@ -0,0 +1,2 @@ +import { ParsedIniData } from "@smithy/types"; +export declare const parseIni: (iniData: string) => ParsedIniData; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts new file mode 100644 index 0000000..46ba24b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts @@ -0,0 +1,18 @@ +import { ParsedIniData } from "@smithy/types"; +import { SharedConfigInit } from "./loadSharedConfigFiles"; +/** + * @public + */ +export interface SourceProfileInit extends SharedConfigInit { + /** + * The configuration profile to use. + */ + profile?: string; +} +/** + * Load profiles from credentials and config INI files and normalize them into a + * single profile list. + * + * @internal + */ +export declare const parseKnownFiles: (init: SourceProfileInit) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts new file mode 100644 index 0000000..33e7a0c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts @@ -0,0 +1,5 @@ +interface SlurpFileOptions { + ignoreCache?: boolean; +} +export declare const slurpFile: (path: string, options?: SlurpFileOptions) => Promise; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..6d6acbd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts @@ -0,0 +1,16 @@ +import { ParsedIniData as __ParsedIniData, Profile as __Profile, SharedConfigFiles as __SharedConfigFiles } from "@smithy/types"; +/** + * @internal + * @deprecated Use Profile from "\@smithy/types" instead + */ +export type Profile = __Profile; +/** + * @internal + * @deprecated Use ParsedIniData from "\@smithy/types" instead + */ +export type ParsedIniData = __ParsedIniData; +/** + * @internal + * @deprecated Use SharedConfigFiles from "\@smithy/types" instead + */ +export type SharedConfigFiles = __SharedConfigFiles; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts new file mode 100644 index 0000000..c7c8d92 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts @@ -0,0 +1,16 @@ +import { ParsedIniData as __ParsedIniData, Profile as __Profile, SharedConfigFiles as __SharedConfigFiles } from "@smithy/types"; +/** + * @internal + * @deprecated Use Profile from "\@smithy/types" instead + */ +export type Profile = __Profile; +/** + * @internal + * @deprecated Use ParsedIniData from "\@smithy/types" instead + */ +export type ParsedIniData = __ParsedIniData; +/** + * @internal + * @deprecated Use SharedConfigFiles from "\@smithy/types" instead + */ +export type SharedConfigFiles = __SharedConfigFiles; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/package.json new file mode 100644 index 0000000..e6dfbfe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/shared-ini-file-loader/package.json @@ -0,0 +1,72 @@ +{ + "name": "@smithy/shared-ini-file-loader", + "version": "4.0.2", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline shared-ini-file-loader", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "browser": { + "./dist-es/getSSOTokenFromFile": false, + "./dist-es/slurpFile": false + }, + "react-native": { + "./dist-cjs/getSSOTokenFromFile": false, + "./dist-cjs/slurpFile": false, + "./dist-es/getSSOTokenFromFile": false, + "./dist-es/slurpFile": false + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/shared-ini-file-loader", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/shared-ini-file-loader" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/README.md new file mode 100644 index 0000000..3bc9a17 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/README.md @@ -0,0 +1,11 @@ +# @smithy/signature-v4 + +[![NPM version](https://img.shields.io/npm/v/@smithy/signature-v4/latest.svg)](https://www.npmjs.com/package/@smithy/signature-v4) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/signature-v4.svg)](https://www.npmjs.com/package/@smithy/signature-v4) + +This package contains an implementation of the [AWS Signature Version 4](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) +authentication scheme. + +It is internal to Smithy-TypeScript generated clients, and not generally intended for standalone usage outside this context. + +For custom usage, inspect the interface of the SignatureV4 class. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/index.js new file mode 100644 index 0000000..41f93bb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/index.js @@ -0,0 +1,682 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + ALGORITHM_IDENTIFIER: () => ALGORITHM_IDENTIFIER, + ALGORITHM_IDENTIFIER_V4A: () => ALGORITHM_IDENTIFIER_V4A, + ALGORITHM_QUERY_PARAM: () => ALGORITHM_QUERY_PARAM, + ALWAYS_UNSIGNABLE_HEADERS: () => ALWAYS_UNSIGNABLE_HEADERS, + AMZ_DATE_HEADER: () => AMZ_DATE_HEADER, + AMZ_DATE_QUERY_PARAM: () => AMZ_DATE_QUERY_PARAM, + AUTH_HEADER: () => AUTH_HEADER, + CREDENTIAL_QUERY_PARAM: () => CREDENTIAL_QUERY_PARAM, + DATE_HEADER: () => DATE_HEADER, + EVENT_ALGORITHM_IDENTIFIER: () => EVENT_ALGORITHM_IDENTIFIER, + EXPIRES_QUERY_PARAM: () => EXPIRES_QUERY_PARAM, + GENERATED_HEADERS: () => GENERATED_HEADERS, + HOST_HEADER: () => HOST_HEADER, + KEY_TYPE_IDENTIFIER: () => KEY_TYPE_IDENTIFIER, + MAX_CACHE_SIZE: () => MAX_CACHE_SIZE, + MAX_PRESIGNED_TTL: () => MAX_PRESIGNED_TTL, + PROXY_HEADER_PATTERN: () => PROXY_HEADER_PATTERN, + REGION_SET_PARAM: () => REGION_SET_PARAM, + SEC_HEADER_PATTERN: () => SEC_HEADER_PATTERN, + SHA256_HEADER: () => SHA256_HEADER, + SIGNATURE_HEADER: () => SIGNATURE_HEADER, + SIGNATURE_QUERY_PARAM: () => SIGNATURE_QUERY_PARAM, + SIGNED_HEADERS_QUERY_PARAM: () => SIGNED_HEADERS_QUERY_PARAM, + SignatureV4: () => SignatureV4, + SignatureV4Base: () => SignatureV4Base, + TOKEN_HEADER: () => TOKEN_HEADER, + TOKEN_QUERY_PARAM: () => TOKEN_QUERY_PARAM, + UNSIGNABLE_PATTERNS: () => UNSIGNABLE_PATTERNS, + UNSIGNED_PAYLOAD: () => UNSIGNED_PAYLOAD, + clearCredentialCache: () => clearCredentialCache, + createScope: () => createScope, + getCanonicalHeaders: () => getCanonicalHeaders, + getCanonicalQuery: () => getCanonicalQuery, + getPayloadHash: () => getPayloadHash, + getSigningKey: () => getSigningKey, + hasHeader: () => hasHeader, + moveHeadersToQuery: () => moveHeadersToQuery, + prepareRequest: () => prepareRequest, + signatureV4aContainer: () => signatureV4aContainer +}); +module.exports = __toCommonJS(src_exports); + +// src/SignatureV4.ts + +var import_util_utf85 = require("@smithy/util-utf8"); + +// src/constants.ts +var ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +var CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +var AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +var SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +var EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +var SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +var TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +var REGION_SET_PARAM = "X-Amz-Region-Set"; +var AUTH_HEADER = "authorization"; +var AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +var DATE_HEADER = "date"; +var GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +var SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +var SHA256_HEADER = "x-amz-content-sha256"; +var TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +var HOST_HEADER = "host"; +var ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true +}; +var PROXY_HEADER_PATTERN = /^proxy-/; +var SEC_HEADER_PATTERN = /^sec-/; +var UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +var ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +var ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +var EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +var UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +var MAX_CACHE_SIZE = 50; +var KEY_TYPE_IDENTIFIER = "aws4_request"; +var MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; + +// src/credentialDerivation.ts +var import_util_hex_encoding = require("@smithy/util-hex-encoding"); +var import_util_utf8 = require("@smithy/util-utf8"); +var signingKeyCache = {}; +var cacheQueue = []; +var createScope = /* @__PURE__ */ __name((shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`, "createScope"); +var getSigningKey = /* @__PURE__ */ __name(async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${(0, import_util_hex_encoding.toHex)(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return signingKeyCache[cacheKey] = key; +}, "getSigningKey"); +var clearCredentialCache = /* @__PURE__ */ __name(() => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}, "clearCredentialCache"); +var hmac = /* @__PURE__ */ __name((ctor, secret, data) => { + const hash = new ctor(secret); + hash.update((0, import_util_utf8.toUint8Array)(data)); + return hash.digest(); +}, "hmac"); + +// src/getCanonicalHeaders.ts +var getCanonicalHeaders = /* @__PURE__ */ __name(({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == void 0) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || unsignableHeaders?.has(canonicalHeaderName) || PROXY_HEADER_PATTERN.test(canonicalHeaderName) || SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || signableHeaders && !signableHeaders.has(canonicalHeaderName)) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}, "getCanonicalHeaders"); + +// src/getPayloadHash.ts +var import_is_array_buffer = require("@smithy/is-array-buffer"); + +var import_util_utf82 = require("@smithy/util-utf8"); +var getPayloadHash = /* @__PURE__ */ __name(async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == void 0) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, import_is_array_buffer.isArrayBuffer)(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update((0, import_util_utf82.toUint8Array)(body)); + return (0, import_util_hex_encoding.toHex)(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}, "getPayloadHash"); + +// src/HeaderFormatter.ts + +var import_util_utf83 = require("@smithy/util-utf8"); +var HeaderFormatter = class { + static { + __name(this, "HeaderFormatter"); + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = (0, import_util_utf83.fromUtf8)(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 /* boolTrue */ : 1 /* boolFalse */]); + case "byte": + return Uint8Array.from([2 /* byte */, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3 /* short */); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4 /* integer */); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5 /* long */; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6 /* byteArray */); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = (0, import_util_utf83.fromUtf8)(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7 /* string */); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8 /* timestamp */; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9 /* uuid */; + uuidBytes.set((0, import_util_hex_encoding.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +}; +var UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +var Int64 = class _Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static { + __name(this, "Int64"); + } + static fromNumber(number) { + if (number > 9223372036854776e3 || number < -9223372036854776e3) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new _Int64(bytes); + } + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 128; + if (negative) { + negate(bytes); + } + return parseInt((0, import_util_hex_encoding.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +}; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 255; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} +__name(negate, "negate"); + +// src/headerUtil.ts +var hasHeader = /* @__PURE__ */ __name((soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}, "hasHeader"); + +// src/moveHeadersToQuery.ts +var import_protocol_http = require("@smithy/protocol-http"); +var moveHeadersToQuery = /* @__PURE__ */ __name((request, options = {}) => { + const { headers, query = {} } = import_protocol_http.HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if (lname.slice(0, 6) === "x-amz-" && !options.unhoistableHeaders?.has(lname) || options.hoistableHeaders?.has(lname)) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query + }; +}, "moveHeadersToQuery"); + +// src/prepareRequest.ts + +var prepareRequest = /* @__PURE__ */ __name((request) => { + request = import_protocol_http.HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}, "prepareRequest"); + +// src/SignatureV4Base.ts + +var import_util_middleware = require("@smithy/util-middleware"); + +var import_util_utf84 = require("@smithy/util-utf8"); + +// src/getCanonicalQuery.ts +var import_util_uri_escape = require("@smithy/util-uri-escape"); +var getCanonicalQuery = /* @__PURE__ */ __name(({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = (0, import_util_uri_escape.escapeUri)(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value)}`; + } else if (Array.isArray(value)) { + serialized[encodedKey] = value.slice(0).reduce((encoded, value2) => encoded.concat([`${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value2)}`]), []).sort().join("&"); + } + } + return keys.sort().map((key) => serialized[key]).filter((serialized2) => serialized2).join("&"); +}, "getCanonicalQuery"); + +// src/utilDate.ts +var iso8601 = /* @__PURE__ */ __name((time) => toDate(time).toISOString().replace(/\.\d{3}Z$/, "Z"), "iso8601"); +var toDate = /* @__PURE__ */ __name((time) => { + if (typeof time === "number") { + return new Date(time * 1e3); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1e3); + } + return new Date(time); + } + return time; +}, "toDate"); + +// src/SignatureV4Base.ts +var SignatureV4Base = class { + static { + __name(this, "SignatureV4Base"); + } + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = (0, import_util_middleware.normalizeProvider)(region); + this.credentialProvider = (0, import_util_middleware.normalizeProvider)(credentials); + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest, algorithmIdentifier) { + const hash = new this.sha256(); + hash.update((0, import_util_utf84.toUint8Array)(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${algorithmIdentifier} +${longDate} +${credentialScope} +${(0, import_util_hex_encoding.toHex)(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if (pathSegment?.length === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${path?.startsWith("/") ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && path?.endsWith("/") ? "/" : ""}`; + const doubleEncoded = (0, import_util_uri_escape.escapeUri)(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || // @ts-expect-error: Property 'accessKeyId' does not exist on type 'object'.ts(2339) + typeof credentials.accessKeyId !== "string" || // @ts-expect-error: Property 'secretAccessKey' does not exist on type 'object'.ts(2339) + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } + formatDate(now) { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8) + }; + } + getCanonicalHeaderList(headers) { + return Object.keys(headers).sort().join(";"); + } +}; + +// src/SignatureV4.ts +var SignatureV4 = class extends SignatureV4Base { + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + super({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath + }); + this.headerFormatter = new HeaderFormatter(); + } + static { + __name(this, "SignatureV4"); + } + async presign(originalRequest, options = {}) { + const { + signingDate = /* @__PURE__ */ new Date(), + expiresIn = 3600, + unsignableHeaders, + unhoistableHeaders, + signableHeaders, + hoistableHeaders, + signingRegion, + signingService + } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { longDate, shortDate } = this.formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject( + "Signature version 4 presigned URLs must have an expiration date less than one week in the future" + ); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = this.getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256)) + ); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } else if (toSign.message) { + return this.signMessage(toSign, options); + } else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = /* @__PURE__ */ new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? await this.regionProvider(); + const { shortDate, longDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = (0, import_util_hex_encoding.toHex)(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService }) { + const promise = this.signEvent( + { + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body + }, + { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature + } + ); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { shortDate } = this.formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update((0, import_util_utf85.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + async signRequest(requestToSign, { + signingDate = /* @__PURE__ */ new Date(), + signableHeaders, + unsignableHeaders, + signingRegion, + signingService + } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, payloadHash) + ); + request.headers[AUTH_HEADER] = `${ALGORITHM_IDENTIFIER} Credential=${credentials.accessKeyId}/${scope}, SignedHeaders=${this.getCanonicalHeaderList(canonicalHeaders)}, Signature=${signature}`; + return request; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign( + longDate, + credentialScope, + canonicalRequest, + ALGORITHM_IDENTIFIER + ); + const hash = new this.sha256(await keyPromise); + hash.update((0, import_util_utf85.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } +}; + +// src/signature-v4a-container.ts +var signatureV4aContainer = { + SignatureV4a: null +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getCanonicalHeaders, + getCanonicalQuery, + getPayloadHash, + moveHeadersToQuery, + prepareRequest, + SignatureV4Base, + hasHeader, + SignatureV4, + ALGORITHM_QUERY_PARAM, + CREDENTIAL_QUERY_PARAM, + AMZ_DATE_QUERY_PARAM, + SIGNED_HEADERS_QUERY_PARAM, + EXPIRES_QUERY_PARAM, + SIGNATURE_QUERY_PARAM, + TOKEN_QUERY_PARAM, + REGION_SET_PARAM, + AUTH_HEADER, + AMZ_DATE_HEADER, + DATE_HEADER, + GENERATED_HEADERS, + SIGNATURE_HEADER, + SHA256_HEADER, + TOKEN_HEADER, + HOST_HEADER, + ALWAYS_UNSIGNABLE_HEADERS, + PROXY_HEADER_PATTERN, + SEC_HEADER_PATTERN, + UNSIGNABLE_PATTERNS, + ALGORITHM_IDENTIFIER, + ALGORITHM_IDENTIFIER_V4A, + EVENT_ALGORITHM_IDENTIFIER, + UNSIGNED_PAYLOAD, + MAX_CACHE_SIZE, + KEY_TYPE_IDENTIFIER, + MAX_PRESIGNED_TTL, + createScope, + getSigningKey, + clearCredentialCache, + signatureV4aContainer +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js new file mode 100644 index 0000000..4edc4b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js @@ -0,0 +1,125 @@ +import { fromHex, toHex } from "@smithy/util-hex-encoding"; +import { fromUtf8 } from "@smithy/util-utf8"; +export class HeaderFormatter { + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 : 1]); + case "byte": + return Uint8Array.from([2, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9; + uuidBytes.set(fromHex(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +} +var HEADER_VALUE_TYPE; +(function (HEADER_VALUE_TYPE) { + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; +})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +export class Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776000 || number < -9223372036854776000) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new Int64(bytes); + } + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 0b10000000; + if (negative) { + negate(bytes); + } + return parseInt(toHex(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +} +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 0xff; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js new file mode 100644 index 0000000..d149132 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js @@ -0,0 +1,135 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { ALGORITHM_IDENTIFIER, ALGORITHM_QUERY_PARAM, AMZ_DATE_HEADER, AMZ_DATE_QUERY_PARAM, AUTH_HEADER, CREDENTIAL_QUERY_PARAM, EVENT_ALGORITHM_IDENTIFIER, EXPIRES_QUERY_PARAM, MAX_PRESIGNED_TTL, SHA256_HEADER, SIGNATURE_QUERY_PARAM, SIGNED_HEADERS_QUERY_PARAM, TOKEN_HEADER, TOKEN_QUERY_PARAM, } from "./constants"; +import { createScope, getSigningKey } from "./credentialDerivation"; +import { getCanonicalHeaders } from "./getCanonicalHeaders"; +import { getPayloadHash } from "./getPayloadHash"; +import { HeaderFormatter } from "./HeaderFormatter"; +import { hasHeader } from "./headerUtil"; +import { moveHeadersToQuery } from "./moveHeadersToQuery"; +import { prepareRequest } from "./prepareRequest"; +import { SignatureV4Base } from "./SignatureV4Base"; +export class SignatureV4 extends SignatureV4Base { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + super({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath, + }); + this.headerFormatter = new HeaderFormatter(); + } + async presign(originalRequest, options = {}) { + const { signingDate = new Date(), expiresIn = 3600, unsignableHeaders, unhoistableHeaders, signableHeaders, hoistableHeaders, signingRegion, signingService, } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const { longDate, shortDate } = this.formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject("Signature version 4 presigned URLs" + " must have an expiration date less than one week in" + " the future"); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = this.getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256))); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } + else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } + else if (toSign.message) { + return this.signMessage(toSign, options); + } + else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? (await this.regionProvider()); + const { shortDate, longDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = toHex(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload, + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = new Date(), signingRegion, signingService }) { + const promise = this.signEvent({ + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body, + }, { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature, + }); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const { shortDate } = this.formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update(toUint8Array(stringToSign)); + return toHex(await hash.digest()); + } + async signRequest(requestToSign, { signingDate = new Date(), signableHeaders, unsignableHeaders, signingRegion, signingService, } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, payloadHash)); + request.headers[AUTH_HEADER] = + `${ALGORITHM_IDENTIFIER} ` + + `Credential=${credentials.accessKeyId}/${scope}, ` + + `SignedHeaders=${this.getCanonicalHeaderList(canonicalHeaders)}, ` + + `Signature=${signature}`; + return request; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest, ALGORITHM_IDENTIFIER); + const hash = new this.sha256(await keyPromise); + hash.update(toUint8Array(stringToSign)); + return toHex(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js new file mode 100644 index 0000000..857ff0c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js @@ -0,0 +1,73 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { normalizeProvider } from "@smithy/util-middleware"; +import { escapeUri } from "@smithy/util-uri-escape"; +import { toUint8Array } from "@smithy/util-utf8"; +import { getCanonicalQuery } from "./getCanonicalQuery"; +import { iso8601 } from "./utilDate"; +export class SignatureV4Base { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = normalizeProvider(region); + this.credentialProvider = normalizeProvider(credentials); + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest, algorithmIdentifier) { + const hash = new this.sha256(); + hash.update(toUint8Array(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${algorithmIdentifier} +${longDate} +${credentialScope} +${toHex(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if (pathSegment?.length === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } + else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${path?.startsWith("/") ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && path?.endsWith("/") ? "/" : ""}`; + const doubleEncoded = escapeUri(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || + typeof credentials.accessKeyId !== "string" || + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } + formatDate(now) { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8), + }; + } + getCanonicalHeaderList(headers) { + return Object.keys(headers).sort().join(";"); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/constants.js new file mode 100644 index 0000000..602728a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/constants.js @@ -0,0 +1,43 @@ +export const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export const REGION_SET_PARAM = "X-Amz-Region-Set"; +export const AUTH_HEADER = "authorization"; +export const AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +export const DATE_HEADER = "date"; +export const GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +export const SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +export const SHA256_HEADER = "x-amz-content-sha256"; +export const TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +export const HOST_HEADER = "host"; +export const ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true, +}; +export const PROXY_HEADER_PATTERN = /^proxy-/; +export const SEC_HEADER_PATTERN = /^sec-/; +export const UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +export const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export const MAX_CACHE_SIZE = 50; +export const KEY_TYPE_IDENTIFIER = "aws4_request"; +export const MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js new file mode 100644 index 0000000..b16ab8c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js @@ -0,0 +1,33 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { KEY_TYPE_IDENTIFIER, MAX_CACHE_SIZE } from "./constants"; +const signingKeyCache = {}; +const cacheQueue = []; +export const createScope = (shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`; +export const getSigningKey = async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${toHex(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return (signingKeyCache[cacheKey] = key); +}; +export const clearCredentialCache = () => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}; +const hmac = (ctor, secret, data) => { + const hash = new ctor(secret); + hash.update(toUint8Array(data)); + return hash.digest(); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js new file mode 100644 index 0000000..3321125 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js @@ -0,0 +1,20 @@ +import { ALWAYS_UNSIGNABLE_HEADERS, PROXY_HEADER_PATTERN, SEC_HEADER_PATTERN } from "./constants"; +export const getCanonicalHeaders = ({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == undefined) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || + unsignableHeaders?.has(canonicalHeaderName) || + PROXY_HEADER_PATTERN.test(canonicalHeaderName) || + SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || (signableHeaders && !signableHeaders.has(canonicalHeaderName))) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js new file mode 100644 index 0000000..0623f1a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js @@ -0,0 +1,29 @@ +import { escapeUri } from "@smithy/util-uri-escape"; +import { SIGNATURE_HEADER } from "./constants"; +export const getCanonicalQuery = ({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = escapeUri(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${escapeUri(value)}`; + } + else if (Array.isArray(value)) { + serialized[encodedKey] = value + .slice(0) + .reduce((encoded, value) => encoded.concat([`${encodedKey}=${escapeUri(value)}`]), []) + .sort() + .join("&"); + } + } + return keys + .sort() + .map((key) => serialized[key]) + .filter((serialized) => serialized) + .join("&"); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js new file mode 100644 index 0000000..cba165c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js @@ -0,0 +1,20 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { SHA256_HEADER, UNSIGNED_PAYLOAD } from "./constants"; +export const getPayloadHash = async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == undefined) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } + else if (typeof body === "string" || ArrayBuffer.isView(body) || isArrayBuffer(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update(toUint8Array(body)); + return toHex(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/headerUtil.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/headerUtil.js new file mode 100644 index 0000000..e502cbb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/headerUtil.js @@ -0,0 +1,26 @@ +export const hasHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}; +export const getHeaderValue = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return headers[headerName]; + } + } + return undefined; +}; +export const deleteHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + delete headers[headerName]; + } + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/index.js new file mode 100644 index 0000000..062752d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/index.js @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js new file mode 100644 index 0000000..806703a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js @@ -0,0 +1,17 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export const moveHeadersToQuery = (request, options = {}) => { + const { headers, query = {} } = HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if ((lname.slice(0, 6) === "x-amz-" && !options.unhoistableHeaders?.has(lname)) || + options.hoistableHeaders?.has(lname)) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js new file mode 100644 index 0000000..7fe5136 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js @@ -0,0 +1,11 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { GENERATED_HEADERS } from "./constants"; +export const prepareRequest = (request) => { + request = HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js new file mode 100644 index 0000000..a309b0a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js @@ -0,0 +1,3 @@ +export const signatureV4aContainer = { + SignatureV4a: null, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js new file mode 100644 index 0000000..bb704a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js @@ -0,0 +1,399 @@ +export const region = "us-east-1"; +export const service = "service"; +export const credentials = { + accessKeyId: "AKIDEXAMPLE", + secretAccessKey: "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY", +}; +export const signingDate = new Date("2015-08-30T12:36:00Z"); +export const requests = [ + { + name: "get-header-key-duplicate", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value2,value2,value1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=c9d5ea9f3f72853aea855b47ea873832890dbdd183b4468f858259531a5138ea", + }, + { + name: "get-header-value-multiline", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1,value2,value3", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=ba17b383a53190154eb5fa66a1b836cc297cc0a3d70a5d00705980573d8ff790", + }, + { + name: "get-header-value-order", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value4,value1,value3,value2", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=08c7e5a9acfcfeb3ab6b2185e75ce8b1deb5e634ec47601a50643f830c755c01", + }, + { + name: "get-header-value-trim", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1", + "my-header2": '"a b c"', + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;my-header2;x-amz-date, Signature=acc3ed3afb60bb290fc8d2dd0098b9911fcaa05412b367055dee359757a9c736", + }, + { + name: "get-unreserved", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=07ef7494c76fa4850883e2b006601f940f8a34d404d0cfa977f52a65bbf5f24f", + }, + { + name: "get-utf8", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/ሴ", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=8318018e0b0f223aa2bbf98705b62bb787dc9c0e678f255a891fd03141be5d85", + }, + { + name: "get-vanilla", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5fa00fa31553b73ebf1942676e86291e8372ff2a2260956d9b8aae1d763fbf31", + }, + { + name: "get-vanilla-empty-query-key", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=a67d582fa61cc504c4bae71f336f98b97f1ea3c7a6bfe1b6e45aec72011b9aeb", + }, + { + name: "get-vanilla-query", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5fa00fa31553b73ebf1942676e86291e8372ff2a2260956d9b8aae1d763fbf31", + }, + { + name: "get-vanilla-query-order-key-case", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + Param2: "value2", + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=b97d918cfa904a5beff61c982a1b6f458b799221646efd99d3219ec94cdf2500", + }, + { + name: "get-vanilla-query-unreserved", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + "-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz": "-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=9c3e54bfcdf0b19771a7f523ee5669cdf59bc7cc0884027167c21bb143a40197", + }, + { + name: "get-vanilla-utf8-query", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + ሴ: "bar", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=2cdec8eed098649ff3a119c94853b13c643bcf08f8b0a1d91e12c9027818dd04", + }, + { + name: "post-header-key-case", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-header-key-sort", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=c5410059b04c1ee005303aed430f6e6645f61f4dc9e1461ec8f8916fdf18852c", + }, + { + name: "post-header-value-case", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "VALUE1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=cdbc9802e29d2942e5e10b5bccfdd67c5f22c7c4e8ae67b53629efa58b974b7d", + }, + { + name: "post-sts-header-after", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-sts-header-before", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + "x-amz-security-token": "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date;x-amz-security-token, Signature=85d96828115b5dc0cfc3bd16ad9e210dd772bbebba041836c64533a82be05ead", + }, + { + name: "post-vanilla", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-vanilla-empty-query-value", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=28038455d6de14eafc1f9222cf5aa6f1a96197d7deb8263271d420d138af7f11", + }, + { + name: "post-vanilla-query", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=28038455d6de14eafc1f9222cf5aa6f1a96197d7deb8263271d420d138af7f11", + }, + { + name: "post-vanilla-query-nonunreserved", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + "@#$%^": "", + "+": '/,?><`";:\\|][{}', + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=66c82657c86e26fb25238d0e69f011edc4c6df5ae71119d7cb98ed9b87393c1e", + }, + { + name: "post-vanilla-query-space", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + p: "", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=e71688addb58a26418614085fb730ba3faa623b461c17f48f2fbdb9361b94a9b", + }, + { + name: "post-x-www-form-urlencoded", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + "content-type": "application/x-www-form-urlencoded", + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + body: "Param1=value1", + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=ff11897932ad3f4e8b18135d722051e5ac45fc38421b1da7b9d196a0fe09473a", + }, + { + name: "post-x-www-form-urlencoded-parameters", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + "content-type": "application/x-www-form-urlencoded; charset=utf8", + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + body: "Param1=value1", + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=1a72ec8f64bd914b0e42e42607c7fbce7fb2c7465f63e3092b3b0d39fa77a6fe", + }, +]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/utilDate.js b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/utilDate.js new file mode 100644 index 0000000..4aad623 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-es/utilDate.js @@ -0,0 +1,15 @@ +export const iso8601 = (time) => toDate(time) + .toISOString() + .replace(/\.\d{3}Z$/, "Z"); +export const toDate = (time) => { + if (typeof time === "number") { + return new Date(time * 1000); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1000); + } + return new Date(time); + } + return time; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts new file mode 100644 index 0000000..92056a6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts @@ -0,0 +1,24 @@ +import type { Int64 as IInt64, MessageHeaders } from "@smithy/types"; +/** + * @internal + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class HeaderFormatter { + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; +} +/** + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class Int64 implements IInt64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts new file mode 100644 index 0000000..99499d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts @@ -0,0 +1,20 @@ +import { EventSigner, EventSigningArguments, FormattedEvent, HttpRequest, MessageSigner, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments, SignableMessage, SignedMessage, SigningArguments, StringSigner } from "@smithy/types"; +import { SignatureV4Base, SignatureV4CryptoInit, SignatureV4Init } from "./SignatureV4Base"; +/** + * @public + */ +export declare class SignatureV4 extends SignatureV4Base implements RequestPresigner, RequestSigner, StringSigner, EventSigner, MessageSigner { + private readonly headerFormatter; + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + sign(stringToSign: string, options?: SigningArguments): Promise; + sign(event: FormattedEvent, options: EventSigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + private signEvent; + signMessage(signableMessage: SignableMessage, { signingDate, signingRegion, signingService }: SigningArguments): Promise; + private signString; + private signRequest; + private getSignature; + private getSigningKey; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts new file mode 100644 index 0000000..9a0e6ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts @@ -0,0 +1,69 @@ +import { AwsCredentialIdentity, ChecksumConstructor, DateInput, HashConstructor, HeaderBag, HttpRequest, Provider } from "@smithy/types"; +/** + * @public + */ +export interface SignatureV4Init { + /** + * The service signing name. + */ + service: string; + /** + * The region name or a function that returns a promise that will be + * resolved with the region name. + */ + region: string | Provider; + /** + * The credentials with which the request should be signed or a function + * that returns a promise that will be resolved with credentials. + */ + credentials: AwsCredentialIdentity | Provider; + /** + * A constructor function for a hash object that will calculate SHA-256 HMAC + * checksums. + */ + sha256?: ChecksumConstructor | HashConstructor; + /** + * Whether to uri-escape the request URI path as part of computing the + * canonical request string. This is required for every AWS service, except + * Amazon S3, as of late 2017. + * + * @default [true] + */ + uriEscapePath?: boolean; + /** + * Whether to calculate a checksum of the request body and include it as + * either a request header (when signing) or as a query string parameter + * (when presigning). This is required for AWS Glacier and Amazon S3 and optional for + * every other AWS service as of late 2017. + * + * @default [true] + */ + applyChecksum?: boolean; +} +/** + * @public + */ +export interface SignatureV4CryptoInit { + sha256: ChecksumConstructor | HashConstructor; +} +/** + * @internal + */ +export declare abstract class SignatureV4Base { + protected readonly service: string; + protected readonly regionProvider: Provider; + protected readonly credentialProvider: Provider; + protected readonly sha256: ChecksumConstructor | HashConstructor; + private readonly uriEscapePath; + protected readonly applyChecksum: boolean; + protected constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + protected createCanonicalRequest(request: HttpRequest, canonicalHeaders: HeaderBag, payloadHash: string): string; + protected createStringToSign(longDate: string, credentialScope: string, canonicalRequest: string, algorithmIdentifier: string): Promise; + private getCanonicalPath; + protected validateResolvedCredentials(credentials: unknown): void; + protected formatDate(now: DateInput): { + longDate: string; + shortDate: string; + }; + protected getCanonicalHeaderList(headers: object): string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/constants.d.ts new file mode 100644 index 0000000..ea1cfb5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/constants.d.ts @@ -0,0 +1,43 @@ +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export declare const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export declare const REGION_SET_PARAM = "X-Amz-Region-Set"; +export declare const AUTH_HEADER = "authorization"; +export declare const AMZ_DATE_HEADER: string; +export declare const DATE_HEADER = "date"; +export declare const GENERATED_HEADERS: string[]; +export declare const SIGNATURE_HEADER: string; +export declare const SHA256_HEADER = "x-amz-content-sha256"; +export declare const TOKEN_HEADER: string; +export declare const HOST_HEADER = "host"; +export declare const ALWAYS_UNSIGNABLE_HEADERS: { + authorization: boolean; + "cache-control": boolean; + connection: boolean; + expect: boolean; + from: boolean; + "keep-alive": boolean; + "max-forwards": boolean; + pragma: boolean; + referer: boolean; + te: boolean; + trailer: boolean; + "transfer-encoding": boolean; + upgrade: boolean; + "user-agent": boolean; + "x-amzn-trace-id": boolean; +}; +export declare const PROXY_HEADER_PATTERN: RegExp; +export declare const SEC_HEADER_PATTERN: RegExp; +export declare const UNSIGNABLE_PATTERNS: RegExp[]; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export declare const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export declare const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const MAX_CACHE_SIZE = 50; +export declare const KEY_TYPE_IDENTIFIER = "aws4_request"; +export declare const MAX_PRESIGNED_TTL: number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts new file mode 100644 index 0000000..a560c2c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts @@ -0,0 +1,30 @@ +import { AwsCredentialIdentity, ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * Create a string describing the scope of credentials used to sign a request. + * + * @internal + * + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being sent. + */ +export declare const createScope: (shortDate: string, region: string, service: string) => string; +/** + * Derive a signing key from its composite parts. + * + * @internal + * + * @param sha256Constructor - a constructor function that can instantiate SHA-256 + * hash objects. + * @param credentials - the credentials with which the request will be + * signed. + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being + * sent. + */ +export declare const getSigningKey: (sha256Constructor: ChecksumConstructor | HashConstructor, credentials: AwsCredentialIdentity, shortDate: string, region: string, service: string) => Promise; +/** + * @internal + */ +export declare const clearCredentialCache: () => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts new file mode 100644 index 0000000..efc417c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalHeaders: ({ headers }: HttpRequest, unsignableHeaders?: Set, signableHeaders?: Set) => HeaderBag; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts new file mode 100644 index 0000000..a8e1800 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalQuery: ({ query }: HttpRequest) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts new file mode 100644 index 0000000..2de0858 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getPayloadHash: ({ headers, body }: HttpRequest, hashConstructor: ChecksumConstructor | HashConstructor) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts new file mode 100644 index 0000000..c0b66eb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (soughtHeader: string, headers: HeaderBag) => boolean; +export declare const getHeaderValue: (soughtHeader: string, headers: HeaderBag) => string | undefined; +export declare const deleteHeader: (soughtHeader: string, headers: HeaderBag) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/index.d.ts new file mode 100644 index 0000000..9305cf3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/index.d.ts @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Init, SignatureV4CryptoInit, SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts new file mode 100644 index 0000000..e2c31e0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts @@ -0,0 +1,10 @@ +import type { HttpRequest as IHttpRequest, QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare const moveHeadersToQuery: (request: IHttpRequest, options?: { + unhoistableHeaders?: Set; + hoistableHeaders?: Set; +}) => IHttpRequest & { + query: QueryParameterBag; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts new file mode 100644 index 0000000..b20e0e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest as IHttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const prepareRequest: (request: IHttpRequest) => IHttpRequest; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts new file mode 100644 index 0000000..8901036 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts @@ -0,0 +1,24 @@ +import type { RequestSigner } from "@smithy/types"; +/** + * @public + */ +export type OptionalSigV4aSigner = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4a package. + * + * The true type is SignatureV4a from @smithy/signature-v4a. + */ + new (options: any): RequestSigner; +}; +/** + * @public + * + * \@smithy/signature-v4a will install the constructor in this + * container if it's installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + */ +export declare const signatureV4aContainer: { + SignatureV4a: null | OptionalSigV4aSigner; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts new file mode 100644 index 0000000..383bc35 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts @@ -0,0 +1,14 @@ +import { HttpRequest } from "@smithy/types"; +export interface TestCase { + name: string; + request: HttpRequest; + authorization: string; +} +export declare const region = "us-east-1"; +export declare const service = "service"; +export declare const credentials: { + accessKeyId: string; + secretAccessKey: string; +}; +export declare const signingDate: Date; +export declare const requests: Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts new file mode 100644 index 0000000..6c294c3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts @@ -0,0 +1,24 @@ +import { Int64 as IInt64, MessageHeaders } from "@smithy/types"; +/** + * @internal + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class HeaderFormatter { + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; +} +/** + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class Int64 implements IInt64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts new file mode 100644 index 0000000..c613753 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts @@ -0,0 +1,20 @@ +import { EventSigner, EventSigningArguments, FormattedEvent, HttpRequest, MessageSigner, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments, SignableMessage, SignedMessage, SigningArguments, StringSigner } from "@smithy/types"; +import { SignatureV4Base, SignatureV4CryptoInit, SignatureV4Init } from "./SignatureV4Base"; +/** + * @public + */ +export declare class SignatureV4 extends SignatureV4Base implements RequestPresigner, RequestSigner, StringSigner, EventSigner, MessageSigner { + private readonly headerFormatter; + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + sign(stringToSign: string, options?: SigningArguments): Promise; + sign(event: FormattedEvent, options: EventSigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + private signEvent; + signMessage(signableMessage: SignableMessage, { signingDate, signingRegion, signingService }: SigningArguments): Promise; + private signString; + private signRequest; + private getSignature; + private getSigningKey; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts new file mode 100644 index 0000000..be1da1f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts @@ -0,0 +1,69 @@ +import { AwsCredentialIdentity, ChecksumConstructor, DateInput, HashConstructor, HeaderBag, HttpRequest, Provider } from "@smithy/types"; +/** + * @public + */ +export interface SignatureV4Init { + /** + * The service signing name. + */ + service: string; + /** + * The region name or a function that returns a promise that will be + * resolved with the region name. + */ + region: string | Provider; + /** + * The credentials with which the request should be signed or a function + * that returns a promise that will be resolved with credentials. + */ + credentials: AwsCredentialIdentity | Provider; + /** + * A constructor function for a hash object that will calculate SHA-256 HMAC + * checksums. + */ + sha256?: ChecksumConstructor | HashConstructor; + /** + * Whether to uri-escape the request URI path as part of computing the + * canonical request string. This is required for every AWS service, except + * Amazon S3, as of late 2017. + * + * @default [true] + */ + uriEscapePath?: boolean; + /** + * Whether to calculate a checksum of the request body and include it as + * either a request header (when signing) or as a query string parameter + * (when presigning). This is required for AWS Glacier and Amazon S3 and optional for + * every other AWS service as of late 2017. + * + * @default [true] + */ + applyChecksum?: boolean; +} +/** + * @public + */ +export interface SignatureV4CryptoInit { + sha256: ChecksumConstructor | HashConstructor; +} +/** + * @internal + */ +export declare abstract class SignatureV4Base { + protected readonly service: string; + protected readonly regionProvider: Provider; + protected readonly credentialProvider: Provider; + protected readonly sha256: ChecksumConstructor | HashConstructor; + private readonly uriEscapePath; + protected readonly applyChecksum: boolean; + protected constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + protected createCanonicalRequest(request: HttpRequest, canonicalHeaders: HeaderBag, payloadHash: string): string; + protected createStringToSign(longDate: string, credentialScope: string, canonicalRequest: string, algorithmIdentifier: string): Promise; + private getCanonicalPath; + protected validateResolvedCredentials(credentials: unknown): void; + protected formatDate(now: DateInput): { + longDate: string; + shortDate: string; + }; + protected getCanonicalHeaderList(headers: object): string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..ff54b67 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,43 @@ +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export declare const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export declare const REGION_SET_PARAM = "X-Amz-Region-Set"; +export declare const AUTH_HEADER = "authorization"; +export declare const AMZ_DATE_HEADER: string; +export declare const DATE_HEADER = "date"; +export declare const GENERATED_HEADERS: string[]; +export declare const SIGNATURE_HEADER: string; +export declare const SHA256_HEADER = "x-amz-content-sha256"; +export declare const TOKEN_HEADER: string; +export declare const HOST_HEADER = "host"; +export declare const ALWAYS_UNSIGNABLE_HEADERS: { + authorization: boolean; + "cache-control": boolean; + connection: boolean; + expect: boolean; + from: boolean; + "keep-alive": boolean; + "max-forwards": boolean; + pragma: boolean; + referer: boolean; + te: boolean; + trailer: boolean; + "transfer-encoding": boolean; + upgrade: boolean; + "user-agent": boolean; + "x-amzn-trace-id": boolean; +}; +export declare const PROXY_HEADER_PATTERN: RegExp; +export declare const SEC_HEADER_PATTERN: RegExp; +export declare const UNSIGNABLE_PATTERNS: RegExp[]; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export declare const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export declare const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const MAX_CACHE_SIZE = 50; +export declare const KEY_TYPE_IDENTIFIER = "aws4_request"; +export declare const MAX_PRESIGNED_TTL: number; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts new file mode 100644 index 0000000..6cba9b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts @@ -0,0 +1,30 @@ +import { AwsCredentialIdentity, ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * Create a string describing the scope of credentials used to sign a request. + * + * @internal + * + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being sent. + */ +export declare const createScope: (shortDate: string, region: string, service: string) => string; +/** + * Derive a signing key from its composite parts. + * + * @internal + * + * @param sha256Constructor - a constructor function that can instantiate SHA-256 + * hash objects. + * @param credentials - the credentials with which the request will be + * signed. + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being + * sent. + */ +export declare const getSigningKey: (sha256Constructor: ChecksumConstructor | HashConstructor, credentials: AwsCredentialIdentity, shortDate: string, region: string, service: string) => Promise; +/** + * @internal + */ +export declare const clearCredentialCache: () => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts new file mode 100644 index 0000000..e8f2e98 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalHeaders: ({ headers }: HttpRequest, unsignableHeaders?: Set, signableHeaders?: Set) => HeaderBag; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts new file mode 100644 index 0000000..6a2d4fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalQuery: ({ query }: HttpRequest) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts new file mode 100644 index 0000000..c14a46d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getPayloadHash: ({ headers, body }: HttpRequest, hashConstructor: ChecksumConstructor | HashConstructor) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts new file mode 100644 index 0000000..41ca217 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (soughtHeader: string, headers: HeaderBag) => boolean; +export declare const getHeaderValue: (soughtHeader: string, headers: HeaderBag) => string | undefined; +export declare const deleteHeader: (soughtHeader: string, headers: HeaderBag) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c9fa5f6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Init, SignatureV4CryptoInit, SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts new file mode 100644 index 0000000..2017f3b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts @@ -0,0 +1,10 @@ +import { HttpRequest as IHttpRequest, QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare const moveHeadersToQuery: (request: IHttpRequest, options?: { + unhoistableHeaders?: Set; + hoistableHeaders?: Set; +}) => IHttpRequest & { + query: QueryParameterBag; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts new file mode 100644 index 0000000..57cf782 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest as IHttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const prepareRequest: (request: IHttpRequest) => IHttpRequest; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts new file mode 100644 index 0000000..f1adc97 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts @@ -0,0 +1,24 @@ +import { RequestSigner } from "@smithy/types"; +/** + * @public + */ +export type OptionalSigV4aSigner = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4a package. + * + * The true type is SignatureV4a from @smithy/signature-v4a. + */ + new (options: any): RequestSigner; +}; +/** + * @public + * + * \@smithy/signature-v4a will install the constructor in this + * container if it's installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + */ +export declare const signatureV4aContainer: { + SignatureV4a: null | OptionalSigV4aSigner; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts new file mode 100644 index 0000000..9363eeb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts @@ -0,0 +1,14 @@ +import { HttpRequest } from "@smithy/types"; +export interface TestCase { + name: string; + request: HttpRequest; + authorization: string; +} +export declare const region = "us-east-1"; +export declare const service = "service"; +export declare const credentials: { + accessKeyId: string; + secretAccessKey: string; +}; +export declare const signingDate: Date; +export declare const requests: Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts new file mode 100644 index 0000000..9a6f383 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts @@ -0,0 +1,2 @@ +export declare const iso8601: (time: number | string | Date) => string; +export declare const toDate: (time: number | string | Date) => Date; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts new file mode 100644 index 0000000..e8c6a68 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts @@ -0,0 +1,2 @@ +export declare const iso8601: (time: number | string | Date) => string; +export declare const toDate: (time: number | string | Date) => Date; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/package.json new file mode 100644 index 0000000..56e028c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/signature-v4/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/signature-v4", + "version": "5.1.0", + "description": "A standalone implementation of the AWS Signature V4 request signing algorithm", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline signature-v4", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-crypto/sha256-js": "5.2.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/signature-v4", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/signature-v4" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/README.md new file mode 100644 index 0000000..365cd62 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/README.md @@ -0,0 +1,10 @@ +# @smithy/smithy-client + +[![NPM version](https://img.shields.io/npm/v/@smithy/smithy-client/latest.svg)](https://www.npmjs.com/package/@smithy/smithy-client) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/smithy-client.svg)](https://www.npmjs.com/package/@smithy/smithy-client) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/client.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/command.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/command.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/command.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/index.js new file mode 100644 index 0000000..c410d8d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/index.js @@ -0,0 +1,1321 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Client: () => Client, + Command: () => Command, + LazyJsonString: () => LazyJsonString, + NoOpLogger: () => NoOpLogger, + SENSITIVE_STRING: () => SENSITIVE_STRING, + ServiceException: () => ServiceException, + _json: () => _json, + collectBody: () => import_protocols.collectBody, + convertMap: () => convertMap, + createAggregatedClient: () => createAggregatedClient, + dateToUtcString: () => dateToUtcString, + decorateServiceException: () => decorateServiceException, + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + expectBoolean: () => expectBoolean, + expectByte: () => expectByte, + expectFloat32: () => expectFloat32, + expectInt: () => expectInt, + expectInt32: () => expectInt32, + expectLong: () => expectLong, + expectNonNull: () => expectNonNull, + expectNumber: () => expectNumber, + expectObject: () => expectObject, + expectShort: () => expectShort, + expectString: () => expectString, + expectUnion: () => expectUnion, + extendedEncodeURIComponent: () => import_protocols.extendedEncodeURIComponent, + getArrayIfSingleItem: () => getArrayIfSingleItem, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + getDefaultExtensionConfiguration: () => getDefaultExtensionConfiguration, + getValueFromTextNode: () => getValueFromTextNode, + handleFloat: () => handleFloat, + isSerializableHeaderValue: () => isSerializableHeaderValue, + limitedParseDouble: () => limitedParseDouble, + limitedParseFloat: () => limitedParseFloat, + limitedParseFloat32: () => limitedParseFloat32, + loadConfigsForDefaultMode: () => loadConfigsForDefaultMode, + logger: () => logger, + map: () => map, + parseBoolean: () => parseBoolean, + parseEpochTimestamp: () => parseEpochTimestamp, + parseRfc3339DateTime: () => parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset: () => parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime: () => parseRfc7231DateTime, + quoteHeader: () => quoteHeader, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig, + resolvedPath: () => import_protocols.resolvedPath, + serializeDateTime: () => serializeDateTime, + serializeFloat: () => serializeFloat, + splitEvery: () => splitEvery, + splitHeader: () => splitHeader, + strictParseByte: () => strictParseByte, + strictParseDouble: () => strictParseDouble, + strictParseFloat: () => strictParseFloat, + strictParseFloat32: () => strictParseFloat32, + strictParseInt: () => strictParseInt, + strictParseInt32: () => strictParseInt32, + strictParseLong: () => strictParseLong, + strictParseShort: () => strictParseShort, + take: () => take, + throwDefaultError: () => throwDefaultError, + withBaseException: () => withBaseException +}); +module.exports = __toCommonJS(src_exports); + +// src/client.ts +var import_middleware_stack = require("@smithy/middleware-stack"); +var Client = class { + constructor(config) { + this.config = config; + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + static { + __name(this, "Client"); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : void 0; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === void 0 && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = /* @__PURE__ */ new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command).then( + (result) => callback(null, result.output), + (err) => callback(err) + ).catch( + // prevent any errors thrown in the callback from triggering an + // unhandled promise rejection + () => { + } + ); + } else { + return handler(command).then((result) => result.output); + } + } + destroy() { + this.config?.requestHandler?.destroy?.(); + delete this.handlers; + } +}; + +// src/collect-stream-body.ts +var import_protocols = require("@smithy/core/protocols"); + +// src/command.ts + +var import_types = require("@smithy/types"); +var Command = class { + constructor() { + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + static { + __name(this, "Command"); + } + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder() { + return new ClassBuilder(); + } + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack, configuration, options, { + middlewareFn, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + smithyContext, + additionalContext, + CommandCtor + }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger: logger2 } = configuration; + const handlerExecutionContext = { + logger: logger2, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [import_types.SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext + }, + ...additionalContext + }; + const { requestHandler } = configuration; + return stack.resolve( + (request) => requestHandler.handle(request.request, options || {}), + handlerExecutionContext + ); + } +}; +var ClassBuilder = class { + constructor() { + this._init = () => { + }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + static { + __name(this, "ClassBuilder"); + } + /** + * Optional init callback. + */ + init(cb) { + this._init = cb; + } + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + /** + * Add any number of middleware. + */ + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + /** + * Set the initial handler execution context Smithy field. + */ + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext + }; + return this; + } + /** + * Set the initial handler execution context. + */ + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + /** + * Set constant string identifiers for the operation. + */ + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + /** + * Sets the serializer. + */ + ser(serializer) { + this._serializer = serializer; + return this; + } + /** + * Sets the deserializer. + */ + de(deserializer) { + this._deserializer = deserializer; + return this; + } + /** + * @returns a Command class with the classBuilder properties. + */ + build() { + const closure = this; + let CommandRef; + return CommandRef = class extends Command { + /** + * @public + */ + constructor(...[input]) { + super(); + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.serialize = closure._serializer; + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + static { + __name(this, "CommandRef"); + } + /** + * @public + */ + static getEndpointParameterInstructions() { + return closure._ep; + } + /** + * @internal + */ + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext + }); + } + }; + } +}; + +// src/constants.ts +var SENSITIVE_STRING = "***SensitiveInformation***"; + +// src/create-aggregated-client.ts +var createAggregatedClient = /* @__PURE__ */ __name((commands, Client2) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = /* @__PURE__ */ __name(async function(args, optionsOrCb, cb) { + const command2 = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command2, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command2, optionsOrCb || {}, cb); + } else { + return this.send(command2, optionsOrCb); + } + }, "methodImpl"); + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client2.prototype[methodName] = methodImpl; + } +}, "createAggregatedClient"); + +// src/parse-utils.ts +var parseBoolean = /* @__PURE__ */ __name((value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}, "parseBoolean"); +var expectBoolean = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}, "expectBoolean"); +var expectNumber = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}, "expectNumber"); +var MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +var expectFloat32 = /* @__PURE__ */ __name((value) => { + const expected = expectNumber(value); + if (expected !== void 0 && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}, "expectFloat32"); +var expectLong = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}, "expectLong"); +var expectInt = expectLong; +var expectInt32 = /* @__PURE__ */ __name((value) => expectSizedInt(value, 32), "expectInt32"); +var expectShort = /* @__PURE__ */ __name((value) => expectSizedInt(value, 16), "expectShort"); +var expectByte = /* @__PURE__ */ __name((value) => expectSizedInt(value, 8), "expectByte"); +var expectSizedInt = /* @__PURE__ */ __name((value, size) => { + const expected = expectLong(value); + if (expected !== void 0 && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}, "expectSizedInt"); +var castInt = /* @__PURE__ */ __name((value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}, "castInt"); +var expectNonNull = /* @__PURE__ */ __name((value, location) => { + if (value === null || value === void 0) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}, "expectNonNull"); +var expectObject = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}, "expectObject"); +var expectString = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}, "expectString"); +var expectUnion = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject).filter(([, v]) => v != null).map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}, "expectUnion"); +var strictParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}, "strictParseDouble"); +var strictParseFloat = strictParseDouble; +var strictParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}, "strictParseFloat32"); +var NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +var parseNumber = /* @__PURE__ */ __name((value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}, "parseNumber"); +var limitedParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}, "limitedParseDouble"); +var handleFloat = limitedParseDouble; +var limitedParseFloat = limitedParseDouble; +var limitedParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}, "limitedParseFloat32"); +var parseFloatString = /* @__PURE__ */ __name((value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}, "parseFloatString"); +var strictParseLong = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}, "strictParseLong"); +var strictParseInt = strictParseLong; +var strictParseInt32 = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}, "strictParseInt32"); +var strictParseShort = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}, "strictParseShort"); +var strictParseByte = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}, "strictParseByte"); +var stackTraceWarning = /* @__PURE__ */ __name((message) => { + return String(new TypeError(message).stack || message).split("\n").slice(0, 5).filter((s) => !s.includes("stackTraceWarning")).join("\n"); +}, "stackTraceWarning"); +var logger = { + warn: console.warn +}; + +// src/date-utils.ts +var DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +var MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +__name(dateToUtcString, "dateToUtcString"); +var RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +var parseRfc3339DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}, "parseRfc3339DateTime"); +var RFC3339_WITH_OFFSET = new RegExp( + /^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/ +); +var parseRfc3339DateTimeWithOffset = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}, "parseRfc3339DateTimeWithOffset"); +var IMF_FIXDATE = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var RFC_850_DATE = new RegExp( + /^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var ASC_TIME = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/ +); +var parseRfc7231DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr, "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year( + buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds + }) + ); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr.trimLeft(), "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}, "parseRfc7231DateTime"); +var parseEpochTimestamp = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1e3)); +}, "parseEpochTimestamp"); +var buildDate = /* @__PURE__ */ __name((year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date( + Date.UTC( + year, + adjustedMonth, + day, + parseDateValue(time.hours, "hour", 0, 23), + parseDateValue(time.minutes, "minute", 0, 59), + // seconds can go up to 60 for leap seconds + parseDateValue(time.seconds, "seconds", 0, 60), + parseMilliseconds(time.fractionalMilliseconds) + ) + ); +}, "buildDate"); +var parseTwoDigitYear = /* @__PURE__ */ __name((value) => { + const thisYear = (/* @__PURE__ */ new Date()).getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}, "parseTwoDigitYear"); +var FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1e3; +var adjustRfc850Year = /* @__PURE__ */ __name((input) => { + if (input.getTime() - (/* @__PURE__ */ new Date()).getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date( + Date.UTC( + input.getUTCFullYear() - 100, + input.getUTCMonth(), + input.getUTCDate(), + input.getUTCHours(), + input.getUTCMinutes(), + input.getUTCSeconds(), + input.getUTCMilliseconds() + ) + ); + } + return input; +}, "adjustRfc850Year"); +var parseMonthByShortName = /* @__PURE__ */ __name((value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}, "parseMonthByShortName"); +var DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +var validateDayOfMonth = /* @__PURE__ */ __name((year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}, "validateDayOfMonth"); +var isLeapYear = /* @__PURE__ */ __name((year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}, "isLeapYear"); +var parseDateValue = /* @__PURE__ */ __name((value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}, "parseDateValue"); +var parseMilliseconds = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return 0; + } + return strictParseFloat32("0." + value) * 1e3; +}, "parseMilliseconds"); +var parseOffsetToMilliseconds = /* @__PURE__ */ __name((value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } else if (directionStr == "-") { + direction = -1; + } else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1e3; +}, "parseOffsetToMilliseconds"); +var stripLeadingZeroes = /* @__PURE__ */ __name((value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}, "stripLeadingZeroes"); + +// src/exceptions.ts +var ServiceException = class _ServiceException extends Error { + static { + __name(this, "ServiceException"); + } + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, Object.getPrototypeOf(this).constructor.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value) { + if (!value) + return false; + const candidate = value; + return _ServiceException.prototype.isPrototypeOf(candidate) || Boolean(candidate.$fault) && Boolean(candidate.$metadata) && (candidate.$fault === "client" || candidate.$fault === "server"); + } + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance) { + if (!instance) + return false; + const candidate = instance; + if (this === _ServiceException) { + return _ServiceException.isInstance(instance); + } + if (_ServiceException.isInstance(instance)) { + if (candidate.name && this.name) { + return this.prototype.isPrototypeOf(instance) || candidate.name === this.name; + } + return this.prototype.isPrototypeOf(instance); + } + return false; + } +}; +var decorateServiceException = /* @__PURE__ */ __name((exception, additions = {}) => { + Object.entries(additions).filter(([, v]) => v !== void 0).forEach(([k, v]) => { + if (exception[k] == void 0 || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}, "decorateServiceException"); + +// src/default-error-handler.ts +var throwDefaultError = /* @__PURE__ */ __name(({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : void 0; + const response = new exceptionCtor({ + name: parsedBody?.code || parsedBody?.Code || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata + }); + throw decorateServiceException(response, parsedBody); +}, "throwDefaultError"); +var withBaseException = /* @__PURE__ */ __name((ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}, "withBaseException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/defaults-mode.ts +var loadConfigsForDefaultMode = /* @__PURE__ */ __name((mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100 + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 3e4 + }; + default: + return {}; + } +}, "loadConfigsForDefaultMode"); + +// src/emitWarningIfUnsupportedVersion.ts +var warningEmitted = false; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}, "emitWarningIfUnsupportedVersion"); + +// src/extended-encode-uri-component.ts + + +// src/extensions/checksum.ts + +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in import_types.AlgorithmId) { + const algorithmId = import_types.AlgorithmId[id]; + if (runtimeConfig[algorithmId] === void 0) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId] + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/retry.ts +var getRetryConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setRetryStrategy(retryStrategy) { + runtimeConfig.retryStrategy = retryStrategy; + }, + retryStrategy() { + return runtimeConfig.retryStrategy; + } + }; +}, "getRetryConfiguration"); +var resolveRetryRuntimeConfig = /* @__PURE__ */ __name((retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}, "resolveRetryRuntimeConfig"); + +// src/extensions/defaultExtensionConfiguration.ts +var getDefaultExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return Object.assign(getChecksumConfiguration(runtimeConfig), getRetryConfiguration(runtimeConfig)); +}, "getDefaultExtensionConfiguration"); +var getDefaultClientConfiguration = getDefaultExtensionConfiguration; +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return Object.assign(resolveChecksumRuntimeConfig(config), resolveRetryRuntimeConfig(config)); +}, "resolveDefaultRuntimeConfig"); + +// src/get-array-if-single-item.ts +var getArrayIfSingleItem = /* @__PURE__ */ __name((mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray], "getArrayIfSingleItem"); + +// src/get-value-from-text-node.ts +var getValueFromTextNode = /* @__PURE__ */ __name((obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== void 0) { + obj[key] = obj[key][textNodeName]; + } else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}, "getValueFromTextNode"); + +// src/is-serializable-header-value.ts +var isSerializableHeaderValue = /* @__PURE__ */ __name((value) => { + return value != null; +}, "isSerializableHeaderValue"); + +// src/lazy-json.ts +var LazyJsonString = /* @__PURE__ */ __name(function LazyJsonString2(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + } + }); + return str; +}, "LazyJsonString"); +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; + +// src/NoOpLogger.ts +var NoOpLogger = class { + static { + __name(this, "NoOpLogger"); + } + trace() { + } + debug() { + } + info() { + } + warn() { + } + error() { + } +}; + +// src/object-mapping.ts +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +__name(map, "map"); +var convertMap = /* @__PURE__ */ __name((target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}, "convertMap"); +var take = /* @__PURE__ */ __name((source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}, "take"); +var mapWithFilter = /* @__PURE__ */ __name((target, filter, instructions) => { + return map( + target, + Object.entries(instructions).reduce( + (_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, + {} + ) + ); +}, "mapWithFilter"); +var applyInstruction = /* @__PURE__ */ __name((target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter2 = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if (typeof filter2 === "function" && filter2(source[sourceKey]) || typeof filter2 !== "function" && !!filter2) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === void 0 && (_value = value()) != null; + const customFilterPassed = typeof filter === "function" && !!filter(void 0) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed) { + target[targetKey] = _value; + } else if (customFilterPassed) { + target[targetKey] = value(); + } + } else { + const defaultFilterPassed = filter === void 0 && value != null; + const customFilterPassed = typeof filter === "function" && !!filter(value) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}, "applyInstruction"); +var nonNullish = /* @__PURE__ */ __name((_) => _ != null, "nonNullish"); +var pass = /* @__PURE__ */ __name((_) => _, "pass"); + +// src/quote-header.ts +function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} +__name(quoteHeader, "quoteHeader"); + +// src/resolve-path.ts + + +// src/ser-utils.ts +var serializeFloat = /* @__PURE__ */ __name((value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}, "serializeFloat"); +var serializeDateTime = /* @__PURE__ */ __name((date) => date.toISOString().replace(".000Z", "Z"), "serializeDateTime"); + +// src/serde-json.ts +var _json = /* @__PURE__ */ __name((obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}, "_json"); + +// src/split-every.ts +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} +__name(splitEvery, "splitEvery"); + +// src/split-header.ts +var splitHeader = /* @__PURE__ */ __name((value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = void 0; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z2 = v.length; + if (z2 < 2) { + return v; + } + if (v[0] === `"` && v[z2 - 1] === `"`) { + v = v.slice(1, z2 - 1); + } + return v.replace(/\\"/g, '"'); + }); +}, "splitHeader"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Client, + collectBody, + Command, + SENSITIVE_STRING, + createAggregatedClient, + dateToUtcString, + parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime, + parseEpochTimestamp, + throwDefaultError, + withBaseException, + loadConfigsForDefaultMode, + emitWarningIfUnsupportedVersion, + ServiceException, + decorateServiceException, + extendedEncodeURIComponent, + getDefaultExtensionConfiguration, + getDefaultClientConfiguration, + resolveDefaultRuntimeConfig, + getArrayIfSingleItem, + getValueFromTextNode, + isSerializableHeaderValue, + LazyJsonString, + NoOpLogger, + map, + convertMap, + take, + parseBoolean, + expectBoolean, + expectNumber, + expectFloat32, + expectLong, + expectInt, + expectInt32, + expectShort, + expectByte, + expectNonNull, + expectObject, + expectString, + expectUnion, + strictParseDouble, + strictParseFloat, + strictParseFloat32, + limitedParseDouble, + handleFloat, + limitedParseFloat, + limitedParseFloat32, + strictParseLong, + strictParseInt, + strictParseInt32, + strictParseShort, + strictParseByte, + logger, + quoteHeader, + resolvedPath, + serializeFloat, + serializeDateTime, + _json, + splitEvery, + splitHeader +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/split-every.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/split-every.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/split-every.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/split-header.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/split-header.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-cjs/split-header.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js new file mode 100644 index 0000000..73cd076 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js @@ -0,0 +1,7 @@ +export class NoOpLogger { + trace() { } + debug() { } + info() { } + warn() { } + error() { } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/client.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/client.js new file mode 100644 index 0000000..56cbc2e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/client.js @@ -0,0 +1,42 @@ +import { constructStack } from "@smithy/middleware-stack"; +export class Client { + constructor(config) { + this.config = config; + this.middlewareStack = constructStack(); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : undefined; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === undefined && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } + else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } + else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command) + .then((result) => callback(null, result.output), (err) => callback(err)) + .catch(() => { }); + } + else { + return handler(command).then((result) => result.output); + } + } + destroy() { + this.config?.requestHandler?.destroy?.(); + delete this.handlers; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js new file mode 100644 index 0000000..2b76f0a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js @@ -0,0 +1 @@ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/command.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/command.js new file mode 100644 index 0000000..078eecb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/command.js @@ -0,0 +1,115 @@ +import { constructStack } from "@smithy/middleware-stack"; +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export class Command { + constructor() { + this.middlewareStack = constructStack(); + } + static classBuilder() { + return new ClassBuilder(); + } + resolveMiddlewareWithContext(clientStack, configuration, options, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext, + }, + ...additionalContext, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } +} +class ClassBuilder { + constructor() { + this._init = () => { }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + init(cb) { + this._init = cb; + } + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext, + }; + return this; + } + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + ser(serializer) { + this._serializer = serializer; + return this; + } + de(deserializer) { + this._deserializer = deserializer; + return this; + } + build() { + const closure = this; + let CommandRef; + return (CommandRef = class extends Command { + static getEndpointParameterInstructions() { + return closure._ep; + } + constructor(...[input]) { + super(); + this.serialize = closure._serializer; + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext, + }); + } + }); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/constants.js new file mode 100644 index 0000000..9b193d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/constants.js @@ -0,0 +1 @@ +export const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js new file mode 100644 index 0000000..44cf4dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js @@ -0,0 +1,21 @@ +export const createAggregatedClient = (commands, Client) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = async function (args, optionsOrCb, cb) { + const command = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + }; + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client.prototype[methodName] = methodImpl; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/date-utils.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/date-utils.js new file mode 100644 index 0000000..0d0abf2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/date-utils.js @@ -0,0 +1,190 @@ +import { strictParseByte, strictParseDouble, strictParseFloat32, strictParseShort } from "./parse-utils"; +const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +export function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +export const parseRfc3339DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}; +const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/); +export const parseRfc3339DateTimeWithOffset = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}; +const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/); +export const parseRfc7231DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds, + })); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}; +export const parseEpochTimestamp = (value) => { + if (value === null || value === undefined) { + return undefined; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } + else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } + else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } + else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1000)); +}; +const buildDate = (year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds))); +}; +const parseTwoDigitYear = (value) => { + const thisYear = new Date().getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}; +const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000; +const adjustRfc850Year = (input) => { + if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds())); + } + return input; +}; +const parseMonthByShortName = (value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}; +const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +const validateDayOfMonth = (year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}; +const isLeapYear = (year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}; +const parseDateValue = (value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}; +const parseMilliseconds = (value) => { + if (value === null || value === undefined) { + return 0; + } + return strictParseFloat32("0." + value) * 1000; +}; +const parseOffsetToMilliseconds = (value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } + else if (directionStr == "-") { + direction = -1; + } + else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1000; +}; +const stripLeadingZeroes = (value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js new file mode 100644 index 0000000..7da1091 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js @@ -0,0 +1,22 @@ +import { decorateServiceException } from "./exceptions"; +export const throwDefaultError = ({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : undefined; + const response = new exceptionCtor({ + name: parsedBody?.code || parsedBody?.Code || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata, + }); + throw decorateServiceException(response, parsedBody); +}; +export const withBaseException = (ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js new file mode 100644 index 0000000..f19079c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js @@ -0,0 +1,26 @@ +export const loadConfigsForDefaultMode = (mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100, + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 30000, + }; + default: + return {}; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..7b30893 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js @@ -0,0 +1,6 @@ +let warningEmitted = false; +export const emitWarningIfUnsupportedVersion = (version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/exceptions.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/exceptions.js new file mode 100644 index 0000000..db6a801 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/exceptions.js @@ -0,0 +1,46 @@ +export class ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, Object.getPrototypeOf(this).constructor.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } + static isInstance(value) { + if (!value) + return false; + const candidate = value; + return (ServiceException.prototype.isPrototypeOf(candidate) || + (Boolean(candidate.$fault) && + Boolean(candidate.$metadata) && + (candidate.$fault === "client" || candidate.$fault === "server"))); + } + static [Symbol.hasInstance](instance) { + if (!instance) + return false; + const candidate = instance; + if (this === ServiceException) { + return ServiceException.isInstance(instance); + } + if (ServiceException.isInstance(instance)) { + if (candidate.name && this.name) { + return this.prototype.isPrototypeOf(instance) || candidate.name === this.name; + } + return this.prototype.isPrototypeOf(instance); + } + return false; + } +} +export const decorateServiceException = (exception, additions = {}) => { + Object.entries(additions) + .filter(([, v]) => v !== undefined) + .forEach(([k, v]) => { + if (exception[k] == undefined || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js new file mode 100644 index 0000000..cb4f991 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js @@ -0,0 +1 @@ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js new file mode 100644 index 0000000..f3831ee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js @@ -0,0 +1,30 @@ +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId }; +export const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in AlgorithmId) { + const algorithmId = AlgorithmId[id]; + if (runtimeConfig[algorithmId] === undefined) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId], + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + }, + }; +}; +export const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..272cd3a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js @@ -0,0 +1,9 @@ +import { getChecksumConfiguration, resolveChecksumRuntimeConfig } from "./checksum"; +import { getRetryConfiguration, resolveRetryRuntimeConfig } from "./retry"; +export const getDefaultExtensionConfiguration = (runtimeConfig) => { + return Object.assign(getChecksumConfiguration(runtimeConfig), getRetryConfiguration(runtimeConfig)); +}; +export const getDefaultClientConfiguration = getDefaultExtensionConfiguration; +export const resolveDefaultRuntimeConfig = (config) => { + return Object.assign(resolveChecksumRuntimeConfig(config), resolveRetryRuntimeConfig(config)); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/index.js new file mode 100644 index 0000000..f1b8074 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/index.js @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js new file mode 100644 index 0000000..2c18b0a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js @@ -0,0 +1,15 @@ +export const getRetryConfiguration = (runtimeConfig) => { + return { + setRetryStrategy(retryStrategy) { + runtimeConfig.retryStrategy = retryStrategy; + }, + retryStrategy() { + return runtimeConfig.retryStrategy; + }, + }; +}; +export const resolveRetryRuntimeConfig = (retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js new file mode 100644 index 0000000..25d9432 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js @@ -0,0 +1 @@ +export const getArrayIfSingleItem = (mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js new file mode 100644 index 0000000..aa0f827 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js @@ -0,0 +1,12 @@ +export const getValueFromTextNode = (obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== undefined) { + obj[key] = obj[key][textNodeName]; + } + else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/index.js new file mode 100644 index 0000000..b05ab01 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/index.js @@ -0,0 +1,25 @@ +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js new file mode 100644 index 0000000..cb117ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js @@ -0,0 +1,3 @@ +export const isSerializableHeaderValue = (value) => { + return value != null; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/lazy-json.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/lazy-json.js new file mode 100644 index 0000000..9bddfce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/lazy-json.js @@ -0,0 +1,24 @@ +export const LazyJsonString = function LazyJsonString(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + }, + }); + return str; +}; +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } + else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/object-mapping.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/object-mapping.js new file mode 100644 index 0000000..84a1f26 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/object-mapping.js @@ -0,0 +1,92 @@ +export function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } + else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } + else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +export const convertMap = (target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}; +export const take = (source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}; +const mapWithFilter = (target, filter, instructions) => { + return map(target, Object.entries(instructions).reduce((_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } + else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } + else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, {})); +}; +const applyInstruction = (target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if ((typeof filter === "function" && filter(source[sourceKey])) || (typeof filter !== "function" && !!filter)) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === undefined && (_value = value()) != null; + const customFilterPassed = (typeof filter === "function" && !!filter(void 0)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed) { + target[targetKey] = _value; + } + else if (customFilterPassed) { + target[targetKey] = value(); + } + } + else { + const defaultFilterPassed = filter === undefined && value != null; + const customFilterPassed = (typeof filter === "function" && !!filter(value)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}; +const nonNullish = (_) => _ != null; +const pass = (_) => _; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/parse-utils.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/parse-utils.js new file mode 100644 index 0000000..209db79 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/parse-utils.js @@ -0,0 +1,230 @@ +export const parseBoolean = (value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}; +export const expectBoolean = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}; +export const expectNumber = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}; +const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +export const expectFloat32 = (value) => { + const expected = expectNumber(value); + if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}; +export const expectLong = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}; +export const expectInt = expectLong; +export const expectInt32 = (value) => expectSizedInt(value, 32); +export const expectShort = (value) => expectSizedInt(value, 16); +export const expectByte = (value) => expectSizedInt(value, 8); +const expectSizedInt = (value, size) => { + const expected = expectLong(value); + if (expected !== undefined && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}; +const castInt = (value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}; +export const expectNonNull = (value, location) => { + if (value === null || value === undefined) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}; +export const expectObject = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}; +export const expectString = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}; +export const expectUnion = (value) => { + if (value === null || value === undefined) { + return undefined; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject) + .filter(([, v]) => v != null) + .map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}; +export const strictParseDouble = (value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}; +export const strictParseFloat = strictParseDouble; +export const strictParseFloat32 = (value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}; +const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +const parseNumber = (value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}; +export const limitedParseDouble = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}; +export const handleFloat = limitedParseDouble; +export const limitedParseFloat = limitedParseDouble; +export const limitedParseFloat32 = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}; +const parseFloatString = (value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}; +export const strictParseLong = (value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}; +export const strictParseInt = strictParseLong; +export const strictParseInt32 = (value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}; +export const strictParseShort = (value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}; +export const strictParseByte = (value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}; +const stackTraceWarning = (message) => { + return String(new TypeError(message).stack || message) + .split("\n") + .slice(0, 5) + .filter((s) => !s.includes("stackTraceWarning")) + .join("\n"); +}; +export const logger = { + warn: console.warn, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/quote-header.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/quote-header.js new file mode 100644 index 0000000..d0ddf67 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/quote-header.js @@ -0,0 +1,6 @@ +export function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/resolve-path.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/resolve-path.js new file mode 100644 index 0000000..6c70cb3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/resolve-path.js @@ -0,0 +1 @@ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/ser-utils.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/ser-utils.js new file mode 100644 index 0000000..207437f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/ser-utils.js @@ -0,0 +1,14 @@ +export const serializeFloat = (value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}; +export const serializeDateTime = (date) => date.toISOString().replace(".000Z", "Z"); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/serde-json.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/serde-json.js new file mode 100644 index 0000000..babb7c1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/serde-json.js @@ -0,0 +1,19 @@ +export const _json = (obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/split-every.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/split-every.js new file mode 100644 index 0000000..1d78dca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/split-every.js @@ -0,0 +1,27 @@ +export function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } + else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/split-header.js b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/split-header.js new file mode 100644 index 0000000..518e77f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-es/split-header.js @@ -0,0 +1,37 @@ +export const splitHeader = (value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = undefined; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z = v.length; + if (z < 2) { + return v; + } + if (v[0] === `"` && v[z - 1] === `"`) { + v = v.slice(1, z - 1); + } + return v.replace(/\\"/g, '"'); + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts new file mode 100644 index 0000000..93ebff4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts @@ -0,0 +1,11 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare class NoOpLogger implements Logger { + trace(): void; + debug(): void; + info(): void; + warn(): void; + error(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/client.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/client.d.ts new file mode 100644 index 0000000..6f155e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/client.d.ts @@ -0,0 +1,61 @@ +import { Client as IClient, Command, FetchHttpHandlerOptions, MetadataBearer, MiddlewareStack, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface SmithyConfiguration { + requestHandler: RequestHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; + /** + * The API version set internally by the SDK, and is + * not planned to be used by customer code. + * @internal + */ + readonly apiVersion: string; + /** + * @public + * + * Default false. + * + * When true, the client will only resolve the middleware stack once per + * Command class. This means modifying the middlewareStack of the + * command or client after requests have been made will not be + * recognized. + * + * Calling client.destroy() also clears this cache. + * + * Enable this only if needing the additional time saved (0-1ms per request) + * and not needing middleware modifications between requests. + */ + cacheMiddleware?: boolean; +} +/** + * @internal + */ +export type SmithyResolvedConfiguration = { + requestHandler: RequestHandler; + readonly apiVersion: string; + cacheMiddleware?: boolean; +}; +/** + * @public + */ +export declare class Client> implements IClient { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + /** + * Holds an object reference to the initial configuration object. + * Used to check that the config resolver stack does not create + * dangling instances of an intermediate form of the configuration object. + * + * @internal + */ + initConfig?: object; + /** + * May be used to cache the resolved handler function for a Command class. + */ + private handlers?; + constructor(config: ResolvedClientConfiguration); + send(command: Command>, options?: HandlerOptions): Promise; + send(command: Command>, cb: (err: any, data?: OutputType) => void): void; + send(command: Command>, options: HandlerOptions, cb: (err: any, data?: OutputType) => void): void; + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts new file mode 100644 index 0000000..33378b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/command.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/command.d.ts new file mode 100644 index 0000000..3625b88 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/command.d.ts @@ -0,0 +1,113 @@ +import type { EndpointParameterInstructions } from "@smithy/middleware-endpoint"; +import type { Command as ICommand, Handler, HandlerExecutionContext, HttpRequest as IHttpRequest, HttpResponse as IHttpResponse, Logger, MetadataBearer, MiddlewareStack as IMiddlewareStack, OptionalParameter, Pluggable, RequestHandler, SerdeContext } from "@smithy/types"; +/** + * @public + */ +export declare abstract class Command implements ICommand { + abstract input: Input; + readonly middlewareStack: IMiddlewareStack; + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder; + }, SI extends object = any, SO extends MetadataBearer = any>(): ClassBuilder; + abstract resolveMiddleware(stack: IMiddlewareStack, configuration: ResolvedClientConfiguration, options: any): Handler; + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack: IMiddlewareStack, configuration: { + logger: Logger; + requestHandler: RequestHandler; + }, options: any, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }: ResolveMiddlewareContextArgs): import("@smithy/types").InitializeHandler; +} +/** + * @internal + */ +type ResolveMiddlewareContextArgs = { + middlewareFn: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]; + clientName: string; + commandName: string; + smithyContext: Record; + additionalContext: HandlerExecutionContext; + inputFilterSensitiveLog: (_: any) => any; + outputFilterSensitiveLog: (_: any) => any; + CommandCtor: any; +}; +/** + * @internal + */ +declare class ClassBuilder; +}, SI extends object = any, SO extends MetadataBearer = any> { + private _init; + private _ep; + private _middlewareFn; + private _commandName; + private _clientName; + private _additionalContext; + private _smithyContext; + private _inputFilterSensitiveLog; + private _outputFilterSensitiveLog; + private _serializer; + private _deserializer; + /** + * Optional init callback. + */ + init(cb: (_: Command) => void): void; + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions: EndpointParameterInstructions): ClassBuilder; + /** + * Add any number of middleware. + */ + m(middlewareSupplier: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]): ClassBuilder; + /** + * Set the initial handler execution context Smithy field. + */ + s(service: string, operation: string, smithyContext?: Record): ClassBuilder; + /** + * Set the initial handler execution context. + */ + c(additionalContext?: HandlerExecutionContext): ClassBuilder; + /** + * Set constant string identifiers for the operation. + */ + n(clientName: string, commandName: string): ClassBuilder; + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter?: (_: any) => any, outputFilter?: (_: any) => any): ClassBuilder; + /** + * Sets the serializer. + */ + ser(serializer: (input: I, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * Sets the deserializer. + */ + de(deserializer: (output: IHttpResponse, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * @returns a Command class with the classBuilder properties. + */ + build(): { + new (input: I): CommandImpl; + new (...[input]: OptionalParameter): CommandImpl; + getEndpointParameterInstructions(): EndpointParameterInstructions; + }; +} +/** + * A concrete implementation of ICommand with no abstract members. + * @public + */ +export interface CommandImpl; +}, SI extends object = any, SO extends MetadataBearer = any> extends Command { + readonly input: I; + resolveMiddleware(stack: IMiddlewareStack, configuration: C, options: any): Handler; +} +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/constants.d.ts new file mode 100644 index 0000000..c17e1c8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/constants.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts new file mode 100644 index 0000000..00e23d8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts @@ -0,0 +1,9 @@ +import { Client } from "./client"; +/** + * @internal + * + * @param commands - command lookup container. + * @param client - client instance on which to add aggregated methods. + * @returns an aggregated client with dynamically created methods. + */ +export declare const createAggregatedClient: (commands: Record, Client: new (...args: any) => Client) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts new file mode 100644 index 0000000..99c55f4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts @@ -0,0 +1,73 @@ +/** + * @internal + * + * Builds a proper UTC HttpDate timestamp from a Date object + * since not all environments will have this as the expected + * format. + * + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString} + * - Prior to ECMAScript 2018, the format of the return value + * - varied according to the platform. The most common return + * - value was an RFC-1123 formatted date stamp, which is a + * - slightly updated version of RFC-822 date stamps. + */ +export declare function dateToUtcString(date: Date): string; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and cannot have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and can have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTimeWithOffset: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 7231 IMF-fixdate or obs-date. + * + * Input strings must conform to RFC7231 section 7.1.1.1. Fractional seconds are supported. + * + * @see {@link https://datatracker.ietf.org/doc/html/rfc7231.html#section-7.1.1.1} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc7231DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a number or a parseable string. + * + * Input strings must be an integer or floating point number. Fractional seconds are supported. + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseEpochTimestamp: (value: unknown) => Date | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts new file mode 100644 index 0000000..fd4b52d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts @@ -0,0 +1,13 @@ +/** + * Always throws an error with the given `exceptionCtor` and other arguments. + * This is only called from an error handling code path. + * + * @internal + */ +export declare const throwDefaultError: ({ output, parsedBody, exceptionCtor, errorCode }: any) => never; +/** + * @internal + * + * Creates {@link throwDefaultError} with bound ExceptionCtor. + */ +export declare const withBaseException: (ExceptionCtor: new (...args: any) => any) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts new file mode 100644 index 0000000..1ddb6f0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export declare const loadConfigsForDefaultMode: (mode: ResolvedDefaultsMode) => DefaultsModeConfigs; +/** + * Option determining how certain default configuration options are resolved in the SDK. It can be one of the value listed below: + * * `"standard"`:

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"in-region"`:

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"cross-region"`:

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"mobile"`:

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"auto"`:

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

+ * * `"legacy"`:

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

+ * + * @defaultValue "legacy" + */ +export type DefaultsMode = "standard" | "in-region" | "cross-region" | "mobile" | "auto" | "legacy"; +/** + * @internal + */ +export type ResolvedDefaultsMode = Exclude; +/** + * @internal + */ +export interface DefaultsModeConfigs { + retryMode?: string; + connectionTimeout?: number; + requestTimeout?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..8fc02ce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Emits warning if the provided Node.js version string is pending deprecation. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts new file mode 100644 index 0000000..0a362c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts @@ -0,0 +1,42 @@ +import { HttpResponse, MetadataBearer, ResponseMetadata, RetryableTrait, SmithyException } from "@smithy/types"; +/** + * The type of the exception class constructor parameter. The returned type contains the properties + * in the `ExceptionType` but not in the `BaseExceptionType`. If the `BaseExceptionType` contains + * `$metadata` and `message` properties, it's also included in the returned type. + * @internal + */ +export type ExceptionOptionType = Omit>; +/** + * @public + */ +export interface ServiceExceptionOptions extends SmithyException, MetadataBearer { + message?: string; +} +/** + * @public + * + * Base exception class for the exceptions from the server-side. + */ +export declare class ServiceException extends Error implements SmithyException, MetadataBearer { + readonly $fault: "client" | "server"; + $response?: HttpResponse; + $retryable?: RetryableTrait; + $metadata: ResponseMetadata; + constructor(options: ServiceExceptionOptions); + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value: unknown): value is ServiceException; + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance: unknown): boolean; +} +/** + * This method inject unmodeled member to a deserialized SDK exception, + * and load the error message from different possible keys('message', + * 'Message'). + * + * @internal + */ +export declare const decorateServiceException: (exception: E, additions?: Record) => E; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..ced666a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts new file mode 100644 index 0000000..8b5dd7b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts @@ -0,0 +1,24 @@ +import type { ChecksumAlgorithm, ChecksumConfiguration, ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration }; +/** + * @internal + */ +export type PartialChecksumRuntimeConfigType = Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; + crc32: ChecksumConstructor | HashConstructor; + crc32c: ChecksumConstructor | HashConstructor; + sha1: ChecksumConstructor | HashConstructor; +}>; +/** + * @internal + */ +export declare const getChecksumConfiguration: (runtimeConfig: PartialChecksumRuntimeConfigType) => { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +}; +/** + * @internal + */ +export declare const resolveChecksumRuntimeConfig: (clientConfig: ChecksumConfiguration) => PartialChecksumRuntimeConfigType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..42de409 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,38 @@ +import type { DefaultExtensionConfiguration } from "@smithy/types"; +import { PartialChecksumRuntimeConfigType } from "./checksum"; +import { PartialRetryRuntimeConfigType } from "./retry"; +/** + * @internal + */ +export type DefaultExtensionRuntimeConfigType = PartialRetryRuntimeConfigType & PartialChecksumRuntimeConfigType; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultExtensionConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @deprecated use getDefaultExtensionConfiguration + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultClientConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveDefaultRuntimeConfig: (config: DefaultExtensionConfiguration) => DefaultExtensionRuntimeConfigType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..f1b8074 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts new file mode 100644 index 0000000..6e28827 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { Provider, RetryStrategy, RetryStrategyConfiguration, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export type PartialRetryRuntimeConfigType = Partial<{ + retryStrategy: Provider; +}>; +/** + * @internal + */ +export declare const getRetryConfiguration: (runtimeConfig: PartialRetryRuntimeConfigType) => { + setRetryStrategy(retryStrategy: Provider): void; + retryStrategy(): Provider; +}; +/** + * @internal + */ +export declare const resolveRetryRuntimeConfig: (retryStrategyConfiguration: RetryStrategyConfiguration) => PartialRetryRuntimeConfigType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts new file mode 100644 index 0000000..6468b91 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The XML parser will set one K:V for a member that could + * return multiple entries but only has one. + */ +export declare const getArrayIfSingleItem: (mayBeArray: T) => T | T[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts new file mode 100644 index 0000000..7163e5a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Recursively parses object and populates value is node from + * "#text" key if it's available + */ +export declare const getValueFromTextNode: (obj: any) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/index.d.ts new file mode 100644 index 0000000..4a4ac19 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/index.d.ts @@ -0,0 +1,26 @@ +export type { DocumentType, SdkError, SmithyException } from "@smithy/types"; +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts new file mode 100644 index 0000000..a35a23a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * @returns whether the header value is serializable. + */ +export declare const isSerializableHeaderValue: (value: any) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts new file mode 100644 index 0000000..df7eb51 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts @@ -0,0 +1,46 @@ +/** + * @public + * + * A model field with this type means that you may provide a JavaScript + * object in lieu of a JSON string, and it will be serialized to JSON + * automatically before being sent in a request. + * + * For responses, you will receive a "LazyJsonString", which is a boxed String object + * with additional mixin methods. + * To get the string value, call `.toString()`, or to get the JSON object value, + * call `.deserializeJSON()` or parse it yourself. + */ +export type AutomaticJsonStringConversion = Parameters[0] | LazyJsonString; +/** + * @internal + * + */ +export interface LazyJsonString extends String { + /** + * @returns the JSON parsing of the string value. + */ + deserializeJSON(): any; + /** + * @returns the original string value rather than a JSON.stringified value. + */ + toJSON(): string; +} +/** + * @internal + * + * Extension of the native String class in the previous implementation + * has negative global performance impact on method dispatch for strings, + * and is generally discouraged. + * + * This current implementation may look strange, but is necessary to preserve the interface and + * behavior of extending the String class. + */ +export declare const LazyJsonString: { + (s: string): LazyJsonString; + new (s: string): LazyJsonString; + from(s: any): LazyJsonString; + /** + * @deprecated use #from. + */ + fromObject(s: any): LazyJsonString; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts new file mode 100644 index 0000000..97e28e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts @@ -0,0 +1,162 @@ +/** + * @internal + * + * A set of instructions for multiple keys. + * The aim is to provide a concise yet readable way to map and filter values + * onto a target object. + * + * @example + * ```javascript + * const example: ObjectMappingInstructions = { + * lazyValue1: [, () => 1], + * lazyValue2: [, () => 2], + * lazyValue3: [, () => 3], + * lazyConditionalValue1: [() => true, () => 4], + * lazyConditionalValue2: [() => true, () => 5], + * lazyConditionalValue3: [true, () => 6], + * lazyConditionalValue4: [false, () => 44], + * lazyConditionalValue5: [() => false, () => 55], + * lazyConditionalValue6: ["", () => 66], + * simpleValue1: [, 7], + * simpleValue2: [, 8], + * simpleValue3: [, 9], + * conditionalValue1: [() => true, 10], + * conditionalValue2: [() => true, 11], + * conditionalValue3: [{}, 12], + * conditionalValue4: [false, 110], + * conditionalValue5: [() => false, 121], + * conditionalValue6: ["", 132], + * }; + * + * const exampleResult: Record = { + * lazyValue1: 1, + * lazyValue2: 2, + * lazyValue3: 3, + * lazyConditionalValue1: 4, + * lazyConditionalValue2: 5, + * lazyConditionalValue3: 6, + * simpleValue1: 7, + * simpleValue2: 8, + * simpleValue3: 9, + * conditionalValue1: 10, + * conditionalValue2: 11, + * conditionalValue3: 12, + * }; + * ``` + */ +export type ObjectMappingInstructions = Record; +/** + * @internal + * + * A variant of the object mapping instruction for the `take` function. + * In this case, the source value is provided to the value function, turning it + * from a supplier into a mapper. + */ +export type SourceMappingInstructions = Record; +/** + * @internal + * + * An instruction set for assigning a value to a target object. + */ +export type ObjectMappingInstruction = LazyValueInstruction | ConditionalLazyValueInstruction | SimpleValueInstruction | ConditionalValueInstruction | UnfilteredValue; +/** + * @internal + * + * non-array + */ +export type UnfilteredValue = any; +/** + * @internal + */ +export type LazyValueInstruction = [FilterStatus, ValueSupplier]; +/** + * @internal + */ +export type ConditionalLazyValueInstruction = [FilterStatusSupplier, ValueSupplier]; +/** + * @internal + */ +export type SimpleValueInstruction = [FilterStatus, Value]; +/** + * @internal + */ +export type ConditionalValueInstruction = [ValueFilteringFunction, Value]; +/** + * @internal + */ +export type SourceMappingInstruction = [(ValueFilteringFunction | FilterStatus)?, ValueMapper?, string?]; +/** + * @internal + * + * Filter is considered passed if + * 1. It is a boolean true. + * 2. It is not undefined and is itself truthy. + * 3. It is undefined and the corresponding _value_ is neither null nor undefined. + */ +export type FilterStatus = boolean | unknown | void; +/** + * @internal + * + * Supplies the filter check but not against any value as input. + */ +export type FilterStatusSupplier = () => boolean; +/** + * @internal + * + * Filter check with the given value. + */ +export type ValueFilteringFunction = (value: any) => boolean; +/** + * @internal + * + * Supplies the value for lazy evaluation. + */ +export type ValueSupplier = () => any; +/** + * @internal + * + * A function that maps the source value to the target value. + * Defaults to pass-through with nullish check. + */ +export type ValueMapper = (value: any) => any; +/** + * @internal + * + * A non-function value. + */ +export type Value = any; +/** + * @internal + * Internal/Private, for codegen use only. + * + * Transfer a set of keys from [instructions] to [target]. + * + * For each instruction in the record, the target key will be the instruction key. + * The target assignment will be conditional on the instruction's filter. + * The target assigned value will be supplied by the instructions as an evaluable function or non-function value. + * + * @see ObjectMappingInstructions for an example. + */ +export declare function map(target: any, filter: (value: any) => boolean, instructions: Record): typeof target; +/** + * @internal + */ +export declare function map(instructions: ObjectMappingInstructions): any; +/** + * @internal + */ +export declare function map(target: any, instructions: ObjectMappingInstructions): typeof target; +/** + * Convert a regular object `{ k: v }` to `{ k: [, v] }` mapping instruction set with default + * filter. + * + * @internal + */ +export declare const convertMap: (target: any) => Record; +/** + * @param source - original object with data. + * @param instructions - how to map the data. + * @returns new object mapped from the source object. + * @internal + */ +export declare const take: (source: any, instructions: SourceMappingInstructions) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts new file mode 100644 index 0000000..b5ded6f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts @@ -0,0 +1,270 @@ +/** + * @internal + * + * Give an input string, strictly parses a boolean value. + * + * @param value - The boolean string to parse. + * @returns true for "true", false for "false", otherwise an error is thrown. + */ +export declare const parseBoolean: (value: string) => boolean; +/** + * @internal + * + * Asserts a value is a boolean and returns it. + * Casts strings and numbers with a warning if there is evidence that they were + * intended to be booleans. + * + * @param value - A value that is expected to be a boolean. + * @returns The value if it's a boolean, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectBoolean: (value: any) => boolean | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. + * Casts strings with a warning if the string is a parseable number. + * This is to unblock slight API definition/implementation inconsistencies. + * + * @param value - A value that is expected to be a number. + * @returns The value if it's a number, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectNumber: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. + * + * @param value - A value that is expected to be a 32-bit float. + * @returns The value if it's a float, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectFloat32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectLong: (value: any) => number | undefined; +/** + * @internal + * + * @deprecated Use expectLong + */ +export declare const expectInt: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectInt32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 16-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectShort: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an 8-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectByte: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is not null or undefined and returns it, or throws an error. + * + * @param value - A value that is expected to be defined + * @param location - The location where we're expecting to find a defined object (optional) + * @returns The value if it's not undefined, otherwise throws an error + */ +export declare const expectNonNull: (value: T | null | undefined, location?: string) => T; +/** + * @internal + * + * Asserts a value is an JSON-like object and returns it. This is expected to be used + * with values parsed from JSON (arrays, objects, numbers, strings, booleans). + * + * @param value - A value that is expected to be an object + * @returns The value if it's an object, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectObject: (value: any) => Record | undefined; +/** + * @internal + * + * Asserts a value is a string and returns it. + * Numbers and boolean will be cast to strings with a warning. + * + * @param value - A value that is expected to be a string. + * @returns The value if it's a string, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectString: (value: any) => string | undefined; +/** + * @internal + * + * Asserts a value is a JSON-like object with only one non-null/non-undefined key and + * returns it. + * + * @param value - A value that is expected to be an object with exactly one non-null, + * non-undefined key. + * @returns the value if it's a union, undefined if it's null/undefined, otherwise + * an error is thrown. + */ +export declare const expectUnion: (value: unknown) => Record | undefined; +/** + * @internal + * + * Parses a value into a double. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a double. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseDouble + */ +export declare const strictParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a float. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const handleFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const limitedParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseLong: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseLong + */ +export declare const strictParseInt: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 32-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 32-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseInt32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 16-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 16-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseShort: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an 8-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an 8-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseByte: (value: string | number) => number | undefined; +/** + * @internal + */ +export declare const logger: { + warn: { + (...data: any[]): void; + (message?: any, ...optionalParams: any[]): void; + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts new file mode 100644 index 0000000..73d6c16 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts @@ -0,0 +1,6 @@ +/** + * @public + * @param part - header list element + * @returns quoted string if part contains delimiter. + */ +export declare function quoteHeader(part: string): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts new file mode 100644 index 0000000..2a3204f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts new file mode 100644 index 0000000..ae03c61 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + * + * Serializes a number, turning non-numeric values into strings. + * + * @param value - The number to serialize. + * @returns A number, or a string if the given number was non-numeric. + */ +export declare const serializeFloat: (value: number) => string | number; +/** + * @internal + * @param date - to be serialized. + * @returns https://smithy.io/2.0/spec/protocol-traits.html#timestampformat-trait date-time format. + */ +export declare const serializeDateTime: (date: Date) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts new file mode 100644 index 0000000..96ac476 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + * + * Maps an object through the default JSON serde behavior. + * This means removing nullish fields and un-sparsifying lists. + * + * This is also used by Smithy RPCv2 CBOR as the default serde behavior. + * + * @param obj - to be checked. + * @returns same object with default serde behavior applied. + */ +export declare const _json: (obj: any) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts new file mode 100644 index 0000000..45a0229 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + * + * Given an input string, splits based on the delimiter after a given + * number of delimiters has been encountered. + * + * @param value - The input string to split. + * @param delimiter - The delimiter to split on. + * @param numDelimiters - The number of delimiters to have encountered to split. + */ +export declare function splitEvery(value: string, delimiter: string, numDelimiters: number): Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts new file mode 100644 index 0000000..0f51651 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts @@ -0,0 +1,5 @@ +/** + * @param value - header string value. + * @returns value split by commas that aren't in quotes. + */ +export declare const splitHeader: (value: string) => string[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts new file mode 100644 index 0000000..a9a1062 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts @@ -0,0 +1,11 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare class NoOpLogger implements Logger { + trace(): void; + debug(): void; + info(): void; + warn(): void; + error(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..578541e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts @@ -0,0 +1,61 @@ +import { Client as IClient, Command, FetchHttpHandlerOptions, MetadataBearer, MiddlewareStack, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface SmithyConfiguration { + requestHandler: RequestHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; + /** + * The API version set internally by the SDK, and is + * not planned to be used by customer code. + * @internal + */ + readonly apiVersion: string; + /** + * @public + * + * Default false. + * + * When true, the client will only resolve the middleware stack once per + * Command class. This means modifying the middlewareStack of the + * command or client after requests have been made will not be + * recognized. + * + * Calling client.destroy() also clears this cache. + * + * Enable this only if needing the additional time saved (0-1ms per request) + * and not needing middleware modifications between requests. + */ + cacheMiddleware?: boolean; +} +/** + * @internal + */ +export type SmithyResolvedConfiguration = { + requestHandler: RequestHandler; + readonly apiVersion: string; + cacheMiddleware?: boolean; +}; +/** + * @public + */ +export declare class Client> implements IClient { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + /** + * Holds an object reference to the initial configuration object. + * Used to check that the config resolver stack does not create + * dangling instances of an intermediate form of the configuration object. + * + * @internal + */ + initConfig?: object; + /** + * May be used to cache the resolved handler function for a Command class. + */ + private handlers?; + constructor(config: ResolvedClientConfiguration); + send(command: Command>, options?: HandlerOptions): Promise; + send(command: Command>, cb: (err: any, data?: OutputType) => void): void; + send(command: Command>, options: HandlerOptions, cb: (err: any, data?: OutputType) => void): void; + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts new file mode 100644 index 0000000..c53a1e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..8b42ff6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts @@ -0,0 +1,113 @@ +import { EndpointParameterInstructions } from "@smithy/middleware-endpoint"; +import { Command as ICommand, Handler, HandlerExecutionContext, HttpRequest as IHttpRequest, HttpResponse as IHttpResponse, Logger, MetadataBearer, MiddlewareStack as IMiddlewareStack, OptionalParameter, Pluggable, RequestHandler, SerdeContext } from "@smithy/types"; +/** + * @public + */ +export declare abstract class Command implements ICommand { + abstract input: Input; + readonly middlewareStack: IMiddlewareStack; + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder; + }, SI extends object = any, SO extends MetadataBearer = any>(): ClassBuilder; + abstract resolveMiddleware(stack: IMiddlewareStack, configuration: ResolvedClientConfiguration, options: any): Handler; + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack: IMiddlewareStack, configuration: { + logger: Logger; + requestHandler: RequestHandler; + }, options: any, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }: ResolveMiddlewareContextArgs): import("@smithy/types").InitializeHandler; +} +/** + * @internal + */ +type ResolveMiddlewareContextArgs = { + middlewareFn: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]; + clientName: string; + commandName: string; + smithyContext: Record; + additionalContext: HandlerExecutionContext; + inputFilterSensitiveLog: (_: any) => any; + outputFilterSensitiveLog: (_: any) => any; + CommandCtor: any; +}; +/** + * @internal + */ +declare class ClassBuilder; +}, SI extends object = any, SO extends MetadataBearer = any> { + private _init; + private _ep; + private _middlewareFn; + private _commandName; + private _clientName; + private _additionalContext; + private _smithyContext; + private _inputFilterSensitiveLog; + private _outputFilterSensitiveLog; + private _serializer; + private _deserializer; + /** + * Optional init callback. + */ + init(cb: (_: Command) => void): void; + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions: EndpointParameterInstructions): ClassBuilder; + /** + * Add any number of middleware. + */ + m(middlewareSupplier: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]): ClassBuilder; + /** + * Set the initial handler execution context Smithy field. + */ + s(service: string, operation: string, smithyContext?: Record): ClassBuilder; + /** + * Set the initial handler execution context. + */ + c(additionalContext?: HandlerExecutionContext): ClassBuilder; + /** + * Set constant string identifiers for the operation. + */ + n(clientName: string, commandName: string): ClassBuilder; + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter?: (_: any) => any, outputFilter?: (_: any) => any): ClassBuilder; + /** + * Sets the serializer. + */ + ser(serializer: (input: I, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * Sets the deserializer. + */ + de(deserializer: (output: IHttpResponse, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * @returns a Command class with the classBuilder properties. + */ + build(): { + new (input: I): CommandImpl; + new (...[input]: OptionalParameter): CommandImpl; + getEndpointParameterInstructions(): EndpointParameterInstructions; + }; +} +/** + * A concrete implementation of ICommand with no abstract members. + * @public + */ +export interface CommandImpl; +}, SI extends object = any, SO extends MetadataBearer = any> extends Command { + readonly input: I; + resolveMiddleware(stack: IMiddlewareStack, configuration: C, options: any): Handler; +} +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..eab978f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts new file mode 100644 index 0000000..ded1999 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts @@ -0,0 +1,9 @@ +import { Client } from "./client"; +/** + * @internal + * + * @param commands - command lookup container. + * @param client - client instance on which to add aggregated methods. + * @returns an aggregated client with dynamically created methods. + */ +export declare const createAggregatedClient: (commands: Record, Client: new (...args: any) => Client) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts new file mode 100644 index 0000000..41071c2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts @@ -0,0 +1,73 @@ +/** + * @internal + * + * Builds a proper UTC HttpDate timestamp from a Date object + * since not all environments will have this as the expected + * format. + * + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString} + * - Prior to ECMAScript 2018, the format of the return value + * - varied according to the platform. The most common return + * - value was an RFC-1123 formatted date stamp, which is a + * - slightly updated version of RFC-822 date stamps. + */ +export declare function dateToUtcString(date: Date): string; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and cannot have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and can have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTimeWithOffset: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 7231 IMF-fixdate or obs-date. + * + * Input strings must conform to RFC7231 section 7.1.1.1. Fractional seconds are supported. + * + * @see {@link https://datatracker.ietf.org/doc/html/rfc7231.html#section-7.1.1.1} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc7231DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a number or a parseable string. + * + * Input strings must be an integer or floating point number. Fractional seconds are supported. + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseEpochTimestamp: (value: unknown) => Date | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts new file mode 100644 index 0000000..e9852ba --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts @@ -0,0 +1,13 @@ +/** + * Always throws an error with the given `exceptionCtor` and other arguments. + * This is only called from an error handling code path. + * + * @internal + */ +export declare const throwDefaultError: ({ output, parsedBody, exceptionCtor, errorCode }: any) => never; +/** + * @internal + * + * Creates {@link throwDefaultError} with bound ExceptionCtor. + */ +export declare const withBaseException: (ExceptionCtor: new (...args: any) => any) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts new file mode 100644 index 0000000..c8a89ed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export declare const loadConfigsForDefaultMode: (mode: ResolvedDefaultsMode) => DefaultsModeConfigs; +/** + * Option determining how certain default configuration options are resolved in the SDK. It can be one of the value listed below: + * * `"standard"`:

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"in-region"`:

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"cross-region"`:

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"mobile"`:

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"auto"`:

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

+ * * `"legacy"`:

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

+ * + * @defaultValue "legacy" + */ +export type DefaultsMode = "standard" | "in-region" | "cross-region" | "mobile" | "auto" | "legacy"; +/** + * @internal + */ +export type ResolvedDefaultsMode = Exclude; +/** + * @internal + */ +export interface DefaultsModeConfigs { + retryMode?: string; + connectionTimeout?: number; + requestTimeout?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..f0284ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Emits warning if the provided Node.js version string is pending deprecation. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts new file mode 100644 index 0000000..675354a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts @@ -0,0 +1,42 @@ +import { HttpResponse, MetadataBearer, ResponseMetadata, RetryableTrait, SmithyException } from "@smithy/types"; +/** + * The type of the exception class constructor parameter. The returned type contains the properties + * in the `ExceptionType` but not in the `BaseExceptionType`. If the `BaseExceptionType` contains + * `$metadata` and `message` properties, it's also included in the returned type. + * @internal + */ +export type ExceptionOptionType = Pick>>; +/** + * @public + */ +export interface ServiceExceptionOptions extends SmithyException, MetadataBearer { + message?: string; +} +/** + * @public + * + * Base exception class for the exceptions from the server-side. + */ +export declare class ServiceException extends Error implements SmithyException, MetadataBearer { + readonly $fault: "client" | "server"; + $response?: HttpResponse; + $retryable?: RetryableTrait; + $metadata: ResponseMetadata; + constructor(options: ServiceExceptionOptions); + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value: unknown): value is ServiceException; + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance: unknown): boolean; +} +/** + * This method inject unmodeled member to a deserialized SDK exception, + * and load the error message from different possible keys('message', + * 'Message'). + * + * @internal + */ +export declare const decorateServiceException: (exception: E, additions?: Record) => E; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..4e510cf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts new file mode 100644 index 0000000..c5f06b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts @@ -0,0 +1,24 @@ +import { ChecksumAlgorithm, ChecksumConfiguration, ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration }; +/** + * @internal + */ +export type PartialChecksumRuntimeConfigType = Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; + crc32: ChecksumConstructor | HashConstructor; + crc32c: ChecksumConstructor | HashConstructor; + sha1: ChecksumConstructor | HashConstructor; +}>; +/** + * @internal + */ +export declare const getChecksumConfiguration: (runtimeConfig: PartialChecksumRuntimeConfigType) => { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +}; +/** + * @internal + */ +export declare const resolveChecksumRuntimeConfig: (clientConfig: ChecksumConfiguration) => PartialChecksumRuntimeConfigType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..d8c05bb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,38 @@ +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { PartialChecksumRuntimeConfigType } from "./checksum"; +import { PartialRetryRuntimeConfigType } from "./retry"; +/** + * @internal + */ +export type DefaultExtensionRuntimeConfigType = PartialRetryRuntimeConfigType & PartialChecksumRuntimeConfigType; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultExtensionConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @deprecated use getDefaultExtensionConfiguration + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultClientConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveDefaultRuntimeConfig: (config: DefaultExtensionConfiguration) => DefaultExtensionRuntimeConfigType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..04e3c83 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts new file mode 100644 index 0000000..b41fa3c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { Provider, RetryStrategy, RetryStrategyConfiguration, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export type PartialRetryRuntimeConfigType = Partial<{ + retryStrategy: Provider; +}>; +/** + * @internal + */ +export declare const getRetryConfiguration: (runtimeConfig: PartialRetryRuntimeConfigType) => { + setRetryStrategy(retryStrategy: Provider): void; + retryStrategy(): Provider; +}; +/** + * @internal + */ +export declare const resolveRetryRuntimeConfig: (retryStrategyConfiguration: RetryStrategyConfiguration) => PartialRetryRuntimeConfigType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts new file mode 100644 index 0000000..dbbd280 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The XML parser will set one K:V for a member that could + * return multiple entries but only has one. + */ +export declare const getArrayIfSingleItem: (mayBeArray: T) => T | T[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts new file mode 100644 index 0000000..d56771e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Recursively parses object and populates value is node from + * "#text" key if it's available + */ +export declare const getValueFromTextNode: (obj: any) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..684c977 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts @@ -0,0 +1,26 @@ +export { DocumentType, SdkError, SmithyException } from "@smithy/types"; +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts new file mode 100644 index 0000000..4d53109 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * @returns whether the header value is serializable. + */ +export declare const isSerializableHeaderValue: (value: any) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts new file mode 100644 index 0000000..3a41bf3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts @@ -0,0 +1,46 @@ +/** + * @public + * + * A model field with this type means that you may provide a JavaScript + * object in lieu of a JSON string, and it will be serialized to JSON + * automatically before being sent in a request. + * + * For responses, you will receive a "LazyJsonString", which is a boxed String object + * with additional mixin methods. + * To get the string value, call `.toString()`, or to get the JSON object value, + * call `.deserializeJSON()` or parse it yourself. + */ +export type AutomaticJsonStringConversion = Parameters[0] | LazyJsonString; +/** + * @internal + * + */ +export interface LazyJsonString extends String { + /** + * @returns the JSON parsing of the string value. + */ + deserializeJSON(): any; + /** + * @returns the original string value rather than a JSON.stringified value. + */ + toJSON(): string; +} +/** + * @internal + * + * Extension of the native String class in the previous implementation + * has negative global performance impact on method dispatch for strings, + * and is generally discouraged. + * + * This current implementation may look strange, but is necessary to preserve the interface and + * behavior of extending the String class. + */ +export declare const LazyJsonString: { + (s: string): LazyJsonString; + new (s: string): LazyJsonString; + from(s: any): LazyJsonString; + /** + * @deprecated use #from. + */ + fromObject(s: any): LazyJsonString; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts new file mode 100644 index 0000000..d658c16 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts @@ -0,0 +1,178 @@ +/** + * @internal + * + * A set of instructions for multiple keys. + * The aim is to provide a concise yet readable way to map and filter values + * onto a target object. + * + * @example + * ```javascript + * const example: ObjectMappingInstructions = { + * lazyValue1: [, () => 1], + * lazyValue2: [, () => 2], + * lazyValue3: [, () => 3], + * lazyConditionalValue1: [() => true, () => 4], + * lazyConditionalValue2: [() => true, () => 5], + * lazyConditionalValue3: [true, () => 6], + * lazyConditionalValue4: [false, () => 44], + * lazyConditionalValue5: [() => false, () => 55], + * lazyConditionalValue6: ["", () => 66], + * simpleValue1: [, 7], + * simpleValue2: [, 8], + * simpleValue3: [, 9], + * conditionalValue1: [() => true, 10], + * conditionalValue2: [() => true, 11], + * conditionalValue3: [{}, 12], + * conditionalValue4: [false, 110], + * conditionalValue5: [() => false, 121], + * conditionalValue6: ["", 132], + * }; + * + * const exampleResult: Record = { + * lazyValue1: 1, + * lazyValue2: 2, + * lazyValue3: 3, + * lazyConditionalValue1: 4, + * lazyConditionalValue2: 5, + * lazyConditionalValue3: 6, + * simpleValue1: 7, + * simpleValue2: 8, + * simpleValue3: 9, + * conditionalValue1: 10, + * conditionalValue2: 11, + * conditionalValue3: 12, + * }; + * ``` + */ +export type ObjectMappingInstructions = Record; +/** + * @internal + * + * A variant of the object mapping instruction for the `take` function. + * In this case, the source value is provided to the value function, turning it + * from a supplier into a mapper. + */ +export type SourceMappingInstructions = Record; +/** + * @internal + * + * An instruction set for assigning a value to a target object. + */ +export type ObjectMappingInstruction = LazyValueInstruction | ConditionalLazyValueInstruction | SimpleValueInstruction | ConditionalValueInstruction | UnfilteredValue; +/** + * @internal + * + * non-array + */ +export type UnfilteredValue = any; +/** + * @internal + */ +export type LazyValueInstruction = [ + FilterStatus, + ValueSupplier +]; +/** + * @internal + */ +export type ConditionalLazyValueInstruction = [ + FilterStatusSupplier, + ValueSupplier +]; +/** + * @internal + */ +export type SimpleValueInstruction = [ + FilterStatus, + Value +]; +/** + * @internal + */ +export type ConditionalValueInstruction = [ + ValueFilteringFunction, + Value +]; +/** + * @internal + */ +export type SourceMappingInstruction = [ + (ValueFilteringFunction | FilterStatus)?, + ValueMapper?, + string? +]; +/** + * @internal + * + * Filter is considered passed if + * 1. It is a boolean true. + * 2. It is not undefined and is itself truthy. + * 3. It is undefined and the corresponding _value_ is neither null nor undefined. + */ +export type FilterStatus = boolean | unknown | void; +/** + * @internal + * + * Supplies the filter check but not against any value as input. + */ +export type FilterStatusSupplier = () => boolean; +/** + * @internal + * + * Filter check with the given value. + */ +export type ValueFilteringFunction = (value: any) => boolean; +/** + * @internal + * + * Supplies the value for lazy evaluation. + */ +export type ValueSupplier = () => any; +/** + * @internal + * + * A function that maps the source value to the target value. + * Defaults to pass-through with nullish check. + */ +export type ValueMapper = (value: any) => any; +/** + * @internal + * + * A non-function value. + */ +export type Value = any; +/** + * @internal + * Internal/Private, for codegen use only. + * + * Transfer a set of keys from [instructions] to [target]. + * + * For each instruction in the record, the target key will be the instruction key. + * The target assignment will be conditional on the instruction's filter. + * The target assigned value will be supplied by the instructions as an evaluable function or non-function value. + * + * @see ObjectMappingInstructions for an example. + */ +export declare function map(target: any, filter: (value: any) => boolean, instructions: Record): typeof target; +/** + * @internal + */ +export declare function map(instructions: ObjectMappingInstructions): any; +/** + * @internal + */ +export declare function map(target: any, instructions: ObjectMappingInstructions): typeof target; +/** + * Convert a regular object `{ k: v }` to `{ k: [, v] }` mapping instruction set with default + * filter. + * + * @internal + */ +export declare const convertMap: (target: any) => Record; +/** + * @param source - original object with data. + * @param instructions - how to map the data. + * @returns new object mapped from the source object. + * @internal + */ +export declare const take: (source: any, instructions: SourceMappingInstructions) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts new file mode 100644 index 0000000..e4c8aef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts @@ -0,0 +1,270 @@ +/** + * @internal + * + * Give an input string, strictly parses a boolean value. + * + * @param value - The boolean string to parse. + * @returns true for "true", false for "false", otherwise an error is thrown. + */ +export declare const parseBoolean: (value: string) => boolean; +/** + * @internal + * + * Asserts a value is a boolean and returns it. + * Casts strings and numbers with a warning if there is evidence that they were + * intended to be booleans. + * + * @param value - A value that is expected to be a boolean. + * @returns The value if it's a boolean, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectBoolean: (value: any) => boolean | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. + * Casts strings with a warning if the string is a parseable number. + * This is to unblock slight API definition/implementation inconsistencies. + * + * @param value - A value that is expected to be a number. + * @returns The value if it's a number, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectNumber: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. + * + * @param value - A value that is expected to be a 32-bit float. + * @returns The value if it's a float, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectFloat32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectLong: (value: any) => number | undefined; +/** + * @internal + * + * @deprecated Use expectLong + */ +export declare const expectInt: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectInt32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 16-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectShort: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an 8-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectByte: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is not null or undefined and returns it, or throws an error. + * + * @param value - A value that is expected to be defined + * @param location - The location where we're expecting to find a defined object (optional) + * @returns The value if it's not undefined, otherwise throws an error + */ +export declare const expectNonNull: (value: T | null | undefined, location?: string) => T; +/** + * @internal + * + * Asserts a value is an JSON-like object and returns it. This is expected to be used + * with values parsed from JSON (arrays, objects, numbers, strings, booleans). + * + * @param value - A value that is expected to be an object + * @returns The value if it's an object, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectObject: (value: any) => Record | undefined; +/** + * @internal + * + * Asserts a value is a string and returns it. + * Numbers and boolean will be cast to strings with a warning. + * + * @param value - A value that is expected to be a string. + * @returns The value if it's a string, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectString: (value: any) => string | undefined; +/** + * @internal + * + * Asserts a value is a JSON-like object with only one non-null/non-undefined key and + * returns it. + * + * @param value - A value that is expected to be an object with exactly one non-null, + * non-undefined key. + * @returns the value if it's a union, undefined if it's null/undefined, otherwise + * an error is thrown. + */ +export declare const expectUnion: (value: unknown) => Record | undefined; +/** + * @internal + * + * Parses a value into a double. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a double. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseDouble + */ +export declare const strictParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a float. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const handleFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const limitedParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseLong: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseLong + */ +export declare const strictParseInt: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 32-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 32-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseInt32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 16-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 16-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseShort: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an 8-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an 8-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseByte: (value: string | number) => number | undefined; +/** + * @internal + */ +export declare const logger: { + warn: { + (...data: any[]): void; + (message?: any, ...optionalParams: any[]): void; + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts new file mode 100644 index 0000000..c2f12e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts @@ -0,0 +1,6 @@ +/** + * @public + * @param part - header list element + * @returns quoted string if part contains delimiter. + */ +export declare function quoteHeader(part: string): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts new file mode 100644 index 0000000..5432be7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts new file mode 100644 index 0000000..355f829 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + * + * Serializes a number, turning non-numeric values into strings. + * + * @param value - The number to serialize. + * @returns A number, or a string if the given number was non-numeric. + */ +export declare const serializeFloat: (value: number) => string | number; +/** + * @internal + * @param date - to be serialized. + * @returns https://smithy.io/2.0/spec/protocol-traits.html#timestampformat-trait date-time format. + */ +export declare const serializeDateTime: (date: Date) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts new file mode 100644 index 0000000..499409f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + * + * Maps an object through the default JSON serde behavior. + * This means removing nullish fields and un-sparsifying lists. + * + * This is also used by Smithy RPCv2 CBOR as the default serde behavior. + * + * @param obj - to be checked. + * @returns same object with default serde behavior applied. + */ +export declare const _json: (obj: any) => any; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts new file mode 100644 index 0000000..2280f3e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + * + * Given an input string, splits based on the delimiter after a given + * number of delimiters has been encountered. + * + * @param value - The input string to split. + * @param delimiter - The delimiter to split on. + * @param numDelimiters - The number of delimiters to have encountered to split. + */ +export declare function splitEvery(value: string, delimiter: string, numDelimiters: number): Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts new file mode 100644 index 0000000..7cf54c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts @@ -0,0 +1,5 @@ +/** + * @param value - header string value. + * @returns value split by commas that aren't in quotes. + */ +export declare const splitHeader: (value: string) => string[]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/package.json new file mode 100644 index 0000000..cbab653 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/smithy-client/package.json @@ -0,0 +1,67 @@ +{ + "name": "@smithy/smithy-client", + "version": "4.2.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline smithy-client", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/smithy-client", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/smithy-client" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/types/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/types/README.md new file mode 100644 index 0000000..7ab3ccd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/README.md @@ -0,0 +1,115 @@ +# @smithy/types + +[![NPM version](https://img.shields.io/npm/v/@smithy/types/latest.svg)](https://www.npmjs.com/package/@smithy/types) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/types.svg)](https://www.npmjs.com/package/@smithy/types) + +## Usage + +This package is mostly used internally by generated clients. +Some public components have independent applications. + +--- + +### Scenario: Removing `| undefined` from input and output structures + +Generated shapes' members are unioned with `undefined` for +input shapes, and are `?` (optional) for output shapes. + +- for inputs, this defers the validation to the service. +- for outputs, this strongly suggests that you should runtime-check the output data. + +If you would like to skip these steps, use the `AssertiveClient` or +`UncheckedClient` type helpers. + +Using AWS S3 as an example: + +```ts +import { S3 } from "@aws-sdk/client-s3"; +import type { AssertiveClient, UncheckedClient } from "@smithy/types"; + +const s3a = new S3({}) as AssertiveClient; +const s3b = new S3({}) as UncheckedClient; + +// AssertiveClient enforces required inputs are not undefined +// and required outputs are not undefined. +const get = await s3a.getObject({ + Bucket: "", + // @ts-expect-error (undefined not assignable to string) + Key: undefined, +}); + +// UncheckedClient makes output fields non-nullable. +// You should still perform type checks as you deem +// necessary, but the SDK will no longer prompt you +// with nullability errors. +const body = await ( + await s3b.getObject({ + Bucket: "", + Key: "", + }) +).Body.transformToString(); +``` + +When using the transform on non-aggregated client with the `Command` syntax, +the input cannot be validated because it goes through another class. + +```ts +import { S3Client, ListBucketsCommand, GetObjectCommand, GetObjectCommandInput } from "@aws-sdk/client-s3"; +import type { AssertiveClient, UncheckedClient, NoUndefined } from "@smithy/types"; + +const s3 = new S3Client({}) as UncheckedClient; + +const list = await s3.send( + new ListBucketsCommand({ + // command inputs are not validated by the type transform. + // because this is a separate class. + }) +); + +/** + * Although less ergonomic, you can use the NoUndefined + * transform on the input type. + */ +const getObjectInput: NoUndefined = { + Bucket: "undefined", + // @ts-expect-error (undefined not assignable to string) + Key: undefined, + // optional params can still be undefined. + SSECustomerAlgorithm: undefined, +}; + +const get = s3.send(new GetObjectCommand(getObjectInput)); + +// outputs are still transformed. +await get.Body.TransformToString(); +``` + +### Scenario: Narrowing a smithy-typescript generated client's output payload blob types + +This is mostly relevant to operations with streaming bodies such as within +the S3Client in the AWS SDK for JavaScript v3. + +Because blob payload types are platform dependent, you may wish to indicate in your application that a client is running in a specific +environment. This narrows the blob payload types. + +```typescript +import { GetObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import type { NodeJsClient, SdkStream, StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import type { IncomingMessage } from "node:http"; + +// default client init. +const s3Default = new S3Client({}); + +// client init with type narrowing. +const s3NarrowType = new S3Client({}) as NodeJsClient; + +// The default type of blob payloads is a wide union type including multiple possible +// request handlers. +const body1: StreamingBlobPayloadOutputTypes = (await s3Default.send(new GetObjectCommand({ Key: "", Bucket: "" }))) + .Body!; + +// This is of the narrower type SdkStream representing +// blob payload responses using specifically the node:http request handler. +const body2: SdkStream = (await s3NarrowType.send(new GetObjectCommand({ Key: "", Bucket: "" }))) + .Body!; +``` diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/abort-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/abort-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/abort-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/abort.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/abort.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/abort.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/auth.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/auth.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/auth.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/auth/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/checksum.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/checksum.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/checksum.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/client.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/command.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/command.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/command.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/config.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/config.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/config.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/manager.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/manager.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/manager.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/pool.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/pool.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/connection/pool.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/crypto.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/crypto.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/crypto.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/encode.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/encode.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/encode.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoint.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoint.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoint.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/shared.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/shared.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/endpoints/shared.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/eventStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/eventStream.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/eventStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/checksum.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/checksum.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/checksum.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/retry.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/extensions/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/feature-ids.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/feature-ids.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/feature-ids.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/http.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/http.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/http.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/identity.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/identity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/identity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/index.js new file mode 100644 index 0000000..0849f2b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/index.js @@ -0,0 +1,144 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AlgorithmId: () => AlgorithmId, + EndpointURLScheme: () => EndpointURLScheme, + FieldPosition: () => FieldPosition, + HttpApiKeyAuthLocation: () => HttpApiKeyAuthLocation, + HttpAuthLocation: () => HttpAuthLocation, + IniSectionType: () => IniSectionType, + RequestHandlerProtocol: () => RequestHandlerProtocol, + SMITHY_CONTEXT_KEY: () => SMITHY_CONTEXT_KEY, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/auth/auth.ts +var HttpAuthLocation = /* @__PURE__ */ ((HttpAuthLocation2) => { + HttpAuthLocation2["HEADER"] = "header"; + HttpAuthLocation2["QUERY"] = "query"; + return HttpAuthLocation2; +})(HttpAuthLocation || {}); + +// src/auth/HttpApiKeyAuth.ts +var HttpApiKeyAuthLocation = /* @__PURE__ */ ((HttpApiKeyAuthLocation2) => { + HttpApiKeyAuthLocation2["HEADER"] = "header"; + HttpApiKeyAuthLocation2["QUERY"] = "query"; + return HttpApiKeyAuthLocation2; +})(HttpApiKeyAuthLocation || {}); + +// src/endpoint.ts +var EndpointURLScheme = /* @__PURE__ */ ((EndpointURLScheme2) => { + EndpointURLScheme2["HTTP"] = "http"; + EndpointURLScheme2["HTTPS"] = "https"; + return EndpointURLScheme2; +})(EndpointURLScheme || {}); + +// src/extensions/checksum.ts +var AlgorithmId = /* @__PURE__ */ ((AlgorithmId2) => { + AlgorithmId2["MD5"] = "md5"; + AlgorithmId2["CRC32"] = "crc32"; + AlgorithmId2["CRC32C"] = "crc32c"; + AlgorithmId2["SHA1"] = "sha1"; + AlgorithmId2["SHA256"] = "sha256"; + return AlgorithmId2; +})(AlgorithmId || {}); +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== void 0) { + checksumAlgorithms.push({ + algorithmId: () => "sha256" /* SHA256 */, + checksumConstructor: () => runtimeConfig.sha256 + }); + } + if (runtimeConfig.md5 != void 0) { + checksumAlgorithms.push({ + algorithmId: () => "md5" /* MD5 */, + checksumConstructor: () => runtimeConfig.md5 + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/defaultClientConfiguration.ts +var getDefaultClientConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return getChecksumConfiguration(runtimeConfig); +}, "getDefaultClientConfiguration"); +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return resolveChecksumRuntimeConfig(config); +}, "resolveDefaultRuntimeConfig"); + +// src/http.ts +var FieldPosition = /* @__PURE__ */ ((FieldPosition2) => { + FieldPosition2[FieldPosition2["HEADER"] = 0] = "HEADER"; + FieldPosition2[FieldPosition2["TRAILER"] = 1] = "TRAILER"; + return FieldPosition2; +})(FieldPosition || {}); + +// src/middleware.ts +var SMITHY_CONTEXT_KEY = "__smithy_context"; + +// src/profile.ts +var IniSectionType = /* @__PURE__ */ ((IniSectionType2) => { + IniSectionType2["PROFILE"] = "profile"; + IniSectionType2["SSO_SESSION"] = "sso-session"; + IniSectionType2["SERVICES"] = "services"; + return IniSectionType2; +})(IniSectionType || {}); + +// src/transfer.ts +var RequestHandlerProtocol = /* @__PURE__ */ ((RequestHandlerProtocol2) => { + RequestHandlerProtocol2["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol2["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol2["TDS_8_0"] = "tds/8.0"; + return RequestHandlerProtocol2; +})(RequestHandlerProtocol || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + HttpAuthLocation, + HttpApiKeyAuthLocation, + EndpointURLScheme, + AlgorithmId, + getDefaultClientConfiguration, + resolveDefaultRuntimeConfig, + FieldPosition, + SMITHY_CONTEXT_KEY, + IniSectionType, + RequestHandlerProtocol +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/logger.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/logger.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/logger.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/middleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/middleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/middleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/pagination.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/pagination.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/pagination.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/profile.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/profile.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/profile.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/response.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/response.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/response.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/retry.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/retry.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/retry.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/serde.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/serde.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/serde.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/shapes.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/shapes.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/shapes.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/signature.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/signature.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/signature.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/stream.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/stream.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/stream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transfer.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transfer.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transfer.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/exact.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/exact.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/exact.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/type-transform.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/type-transform.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/transform/type-transform.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/uri.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/uri.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/uri.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/util.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/util.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/util.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/waiter.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/waiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-cjs/waiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/abort-handler.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/abort-handler.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/abort-handler.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/abort.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/abort.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/abort.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js new file mode 100644 index 0000000..4c02f24 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js @@ -0,0 +1,5 @@ +export var HttpApiKeyAuthLocation; +(function (HttpApiKeyAuthLocation) { + HttpApiKeyAuthLocation["HEADER"] = "header"; + HttpApiKeyAuthLocation["QUERY"] = "query"; +})(HttpApiKeyAuthLocation || (HttpApiKeyAuthLocation = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpSigner.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpSigner.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/HttpSigner.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/auth.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/auth.js new file mode 100644 index 0000000..bd3b2df --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/auth.js @@ -0,0 +1,5 @@ +export var HttpAuthLocation; +(function (HttpAuthLocation) { + HttpAuthLocation["HEADER"] = "header"; + HttpAuthLocation["QUERY"] = "query"; +})(HttpAuthLocation || (HttpAuthLocation = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/index.js new file mode 100644 index 0000000..7436030 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/auth/index.js @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/checksum.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/checksum.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/checksum.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/client.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/client.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/client.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/command.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/command.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/command.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/config.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/config.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/config.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/index.js new file mode 100644 index 0000000..c6c3ea8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/index.js @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/manager.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/manager.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/manager.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/pool.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/pool.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/connection/pool.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/crypto.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/crypto.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/crypto.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/encode.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/encode.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/encode.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoint.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoint.js new file mode 100644 index 0000000..4ae601f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoint.js @@ -0,0 +1,5 @@ +export var EndpointURLScheme; +(function (EndpointURLScheme) { + EndpointURLScheme["HTTP"] = "http"; + EndpointURLScheme["HTTPS"] = "https"; +})(EndpointURLScheme || (EndpointURLScheme = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/index.js new file mode 100644 index 0000000..64d85cf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/index.js @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/shared.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/endpoints/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/eventStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/eventStream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/eventStream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/checksum.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/checksum.js new file mode 100644 index 0000000..5a7939e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/checksum.js @@ -0,0 +1,38 @@ +export var AlgorithmId; +(function (AlgorithmId) { + AlgorithmId["MD5"] = "md5"; + AlgorithmId["CRC32"] = "crc32"; + AlgorithmId["CRC32C"] = "crc32c"; + AlgorithmId["SHA1"] = "sha1"; + AlgorithmId["SHA256"] = "sha256"; +})(AlgorithmId || (AlgorithmId = {})); +export const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.SHA256, + checksumConstructor: () => runtimeConfig.sha256, + }); + } + if (runtimeConfig.md5 != undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.MD5, + checksumConstructor: () => runtimeConfig.md5, + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + }, + }; +}; +export const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js new file mode 100644 index 0000000..4e3eb91 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js @@ -0,0 +1,7 @@ +import { getChecksumConfiguration, resolveChecksumRuntimeConfig } from "./checksum"; +export const getDefaultClientConfiguration = (runtimeConfig) => { + return getChecksumConfiguration(runtimeConfig); +}; +export const resolveDefaultRuntimeConfig = (config) => { + return resolveChecksumRuntimeConfig(config); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/index.js new file mode 100644 index 0000000..0fa92d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/index.js @@ -0,0 +1,3 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId } from "./checksum"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/retry.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/extensions/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/feature-ids.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/feature-ids.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/feature-ids.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/http.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/http.js new file mode 100644 index 0000000..27b22f0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/http.js @@ -0,0 +1,5 @@ +export var FieldPosition; +(function (FieldPosition) { + FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; + FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; +})(FieldPosition || (FieldPosition = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/identity.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/identity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/identity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/index.js new file mode 100644 index 0000000..3360320 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/index.js @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/index.js new file mode 100644 index 0000000..c370335 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/index.js @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/logger.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/logger.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/logger.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/middleware.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/middleware.js new file mode 100644 index 0000000..7d0d050 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/middleware.js @@ -0,0 +1 @@ +export const SMITHY_CONTEXT_KEY = "__smithy_context"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/pagination.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/pagination.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/pagination.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/profile.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/profile.js new file mode 100644 index 0000000..9d56c8d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/profile.js @@ -0,0 +1,6 @@ +export var IniSectionType; +(function (IniSectionType) { + IniSectionType["PROFILE"] = "profile"; + IniSectionType["SSO_SESSION"] = "sso-session"; + IniSectionType["SERVICES"] = "services"; +})(IniSectionType || (IniSectionType = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/response.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/response.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/response.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/retry.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/serde.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/serde.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/serde.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/shapes.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/shapes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/shapes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/signature.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/signature.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/signature.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/stream.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/stream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/stream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transfer.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transfer.js new file mode 100644 index 0000000..f776151 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transfer.js @@ -0,0 +1,6 @@ +export var RequestHandlerProtocol; +(function (RequestHandlerProtocol) { + RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; +})(RequestHandlerProtocol || (RequestHandlerProtocol = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/exact.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/exact.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/exact.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/no-undefined.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/no-undefined.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/no-undefined.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/type-transform.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/type-transform.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/transform/type-transform.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/uri.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/uri.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/uri.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/util.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/util.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/util.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/waiter.js b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/waiter.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-es/waiter.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/abort-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/abort-handler.d.ts new file mode 100644 index 0000000..09a0544 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/abort-handler.d.ts @@ -0,0 +1,7 @@ +import type { AbortSignal as DeprecatedAbortSignal } from "./abort"; +/** + * @public + */ +export interface AbortHandler { + (this: AbortSignal | DeprecatedAbortSignal, ev: any): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/abort.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/abort.d.ts new file mode 100644 index 0000000..80fc87f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/abort.d.ts @@ -0,0 +1,50 @@ +import type { AbortHandler } from "./abort-handler"; +/** + * @public + */ +export { AbortHandler }; +/** + * @public + * @deprecated use platform (global) type for AbortSignal. + * + * Holders of an AbortSignal object may query if the associated operation has + * been aborted and register an onabort handler. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export interface AbortSignal { + /** + * Whether the action represented by this signal has been cancelled. + */ + readonly aborted: boolean; + /** + * A function to be invoked when the action represented by this signal has + * been cancelled. + */ + onabort: AbortHandler | Function | null; +} +/** + * @public + * @deprecated use platform (global) type for AbortController. + * + * The AWS SDK uses a Controller/Signal model to allow for cooperative + * cancellation of asynchronous operations. When initiating such an operation, + * the caller can create an AbortController and then provide linked signal to + * subtasks. This allows a single source to communicate to multiple consumers + * that an action has been aborted without dictating how that cancellation + * should be handled. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortController + */ +export interface AbortController { + /** + * An object that reports whether the action associated with this + * `AbortController` has been cancelled. + */ + readonly signal: AbortSignal; + /** + * Declares the operation associated with this AbortController to have been + * cancelled. + */ + abort(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts new file mode 100644 index 0000000..5d74340 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum HttpApiKeyAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts new file mode 100644 index 0000000..c5be532 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts @@ -0,0 +1,49 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HandlerExecutionContext } from "../middleware"; +import { HttpSigner } from "./HttpSigner"; +import { IdentityProviderConfig } from "./IdentityProviderConfig"; +/** + * ID for {@link HttpAuthScheme} + * @internal + */ +export type HttpAuthSchemeId = string; +/** + * Interface that defines an HttpAuthScheme + * @internal + */ +export interface HttpAuthScheme { + /** + * ID for an HttpAuthScheme, typically the absolute shape ID of a Smithy auth trait. + */ + schemeId: HttpAuthSchemeId; + /** + * Gets the IdentityProvider corresponding to an HttpAuthScheme. + */ + identityProvider(config: IdentityProviderConfig): IdentityProvider | undefined; + /** + * HttpSigner corresponding to an HttpAuthScheme. + */ + signer: HttpSigner; +} +/** + * Interface that defines the identity and signing properties when selecting + * an HttpAuthScheme. + * @internal + */ +export interface HttpAuthOption { + schemeId: HttpAuthSchemeId; + identityProperties?: Record; + signingProperties?: Record; + propertiesExtractor?: (config: TConfig, context: TContext) => { + identityProperties?: Record; + signingProperties?: Record; + }; +} +/** + * @internal + */ +export interface SelectedHttpAuthScheme { + httpAuthOption: HttpAuthOption; + identity: Identity; + signer: HttpSigner; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..710dc8f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts @@ -0,0 +1,20 @@ +import { HandlerExecutionContext } from "../middleware"; +import { HttpAuthOption } from "./HttpAuthScheme"; +/** + * @internal + */ +export interface HttpAuthSchemeParameters { + operation?: string; +} +/** + * @internal + */ +export interface HttpAuthSchemeProvider { + (authParameters: TParameters): HttpAuthOption[]; +} +/** + * @internal + */ +export interface HttpAuthSchemeParametersProvider { + (config: TConfig, context: TContext, input: TInput): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts new file mode 100644 index 0000000..ea2969c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts @@ -0,0 +1,41 @@ +import { HttpRequest, HttpResponse } from "../http"; +import { Identity } from "../identity/identity"; +/** + * @internal + */ +export interface ErrorHandler { + (signingProperties: Record): (error: E) => never; +} +/** + * @internal + */ +export interface SuccessHandler { + (httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * Interface to sign identity and signing properties. + * @internal + */ +export interface HttpSigner { + /** + * Signs an HttpRequest with an identity and signing properties. + * @param httpRequest request to sign + * @param identity identity to sing the request with + * @param signingProperties property bag for signing + * @returns signed request in a promise + */ + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware throws an error. + * The error handler is expected to throw the error it receives, so the return type of the error handler is `never`. + * @internal + */ + errorHandler?: ErrorHandler; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware succeeds. + * @internal + */ + successHandler?: SuccessHandler; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts new file mode 100644 index 0000000..663d2ec --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HttpAuthSchemeId } from "./HttpAuthScheme"; +/** + * Interface to get an IdentityProvider for a specified HttpAuthScheme + * @internal + */ +export interface IdentityProviderConfig { + /** + * Get the IdentityProvider for a specified HttpAuthScheme. + * @param schemeId schemeId of the HttpAuthScheme + * @returns IdentityProvider or undefined if HttpAuthScheme is not found + */ + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/auth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/auth.d.ts new file mode 100644 index 0000000..2aaabbc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/auth.d.ts @@ -0,0 +1,57 @@ +/** + * @internal + * + * Authentication schemes represent a way that the service will authenticate the customer’s identity. + */ +export interface AuthScheme { + /** + * @example "sigv4a" or "sigv4" + */ + name: "sigv4" | "sigv4a" | string; + /** + * @example "s3" + */ + signingName: string; + /** + * @example "us-east-1" + */ + signingRegion: string; + /** + * @example ["*"] + * @example ["us-west-2", "us-east-1"] + */ + signingRegionSet?: string[]; + /** + * @deprecated this field was renamed to signingRegion. + */ + signingScope?: never; + properties: Record; +} +/** + * @internal + * @deprecated + */ +export interface HttpAuthDefinition { + /** + * Defines the location of where the Auth is serialized. + */ + in: HttpAuthLocation; + /** + * Defines the name of the HTTP header or query string parameter + * that contains the Auth. + */ + name: string; + /** + * Defines the security scheme to use on the `Authorization` header value. + * This can only be set if the "in" property is set to {@link HttpAuthLocation.HEADER}. + */ + scheme?: string; +} +/** + * @internal + * @deprecated + */ +export declare enum HttpAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/index.d.ts new file mode 100644 index 0000000..7436030 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/auth/index.d.ts @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts new file mode 100644 index 0000000..e468bae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts @@ -0,0 +1,43 @@ +/// +/// +/// +import { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * A union of types that can be used as inputs for the service model + * "blob" type when it represents the request's entire payload or body. + * + * For example, in Lambda::invoke, the payload is modeled as a blob type + * and this union applies to it. + * In contrast, in Lambda::createFunction the Zip file option is a blob type, + * but is not the (entire) payload and this union does not apply. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may work in some cases, + * but the expected types are primarily string and Uint8Array. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type BlobPayloadInputTypes = string | ArrayBuffer | ArrayBufferView | Uint8Array | NodeJsRuntimeBlobTypes | BrowserRuntimeBlobTypes; +/** + * @public + * + * Additional blob types for the Node.js environment. + */ +export type NodeJsRuntimeBlobTypes = Readable | Buffer; +/** + * @public + * + * Additional blob types for the browser environment. + */ +export type BrowserRuntimeBlobTypes = BlobOptionalType | ReadableStreamOptionalType; +/** + * @internal + * @deprecated renamed to BlobPayloadInputTypes. + */ +export type BlobTypes = BlobPayloadInputTypes; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/checksum.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/checksum.d.ts new file mode 100644 index 0000000..1906009 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/checksum.d.ts @@ -0,0 +1,63 @@ +import { SourceData } from "./crypto"; +/** + * @public + * + * An object that provides a checksum of data provided in chunks to `update`. + * The checksum may be performed incrementally as chunks are received or all + * at once when the checksum is finalized, depending on the underlying + * implementation. + * + * It's recommended to compute checksum incrementally to avoid reading the + * entire payload in memory. + * + * A class that implements this interface may accept an optional secret key in its + * constructor while computing checksum value, when using HMAC. If provided, + * this secret key would be used when computing checksum. + */ +export interface Checksum { + /** + * Constant length of the digest created by the algorithm in bytes. + */ + digestLength?: number; + /** + * Creates a new checksum object that contains a deep copy of the internal + * state of the current `Checksum` object. + */ + copy?(): Checksum; + /** + * Returns the digest of all of the data passed. + */ + digest(): Promise; + /** + * Allows marking a checksum for checksums that support the ability + * to mark and reset. + * + * @param readLimit - The maximum limit of bytes that can be read + * before the mark position becomes invalid. + */ + mark?(readLimit: number): void; + /** + * Resets the checksum to its initial value. + */ + reset(): void; + /** + * Adds a chunk of data for which checksum needs to be computed. + * This can be called many times with new data as it is streamed. + * + * Implementations may override this method which passes second param + * which makes Checksum object stateless. + * + * @param chunk - The buffer to update checksum with. + */ + update(chunk: Uint8Array): void; +} +/** + * @public + * + * A constructor for a Checksum that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + */ +export interface ChecksumConstructor { + new (secret?: SourceData): Checksum; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/client.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/client.d.ts new file mode 100644 index 0000000..8bd8f7e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/client.d.ts @@ -0,0 +1,57 @@ +import { Command } from "./command"; +import { MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +import { OptionalParameter } from "./util"; +/** + * @public + * + * A type which checks if the client configuration is optional. + * If all entries of the client configuration are optional, it allows client creation without passing any config. + */ +export type CheckOptionalClientConfig = OptionalParameter; +/** + * @public + * + * function definition for different overrides of client's 'send' function. + */ +export interface InvokeFunction { + (command: Command, options?: any): Promise; + (command: Command, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options: any, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods. + */ +export interface InvokeMethod { + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods when argument is optional. + */ +export interface InvokeMethodOptionalArgs { + (): Promise; + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * A general interface for service clients, idempotent to browser or node clients + * This type corresponds to SmithyClient(https://github.com/aws/aws-sdk-js-v3/blob/main/packages/smithy-client/src/client.ts). + * It's provided for using without importing the SmithyClient class. + * @internal + */ +export interface Client { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + send: InvokeFunction; + destroy: () => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/command.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/command.d.ts new file mode 100644 index 0000000..3a71ee7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/command.d.ts @@ -0,0 +1,23 @@ +import { Handler, MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + */ +export interface Command extends CommandIO { + readonly input: InputType; + readonly middlewareStack: MiddlewareStack; + resolveMiddleware(stack: MiddlewareStack, configuration: ResolvedConfiguration, options: any): Handler; +} +/** + * @internal + * + * This is a subset of the Command type used only to detect the i/o types. + */ +export interface CommandIO { + readonly input: InputType; + resolveMiddleware(stack: any, configuration: any, options: any): Handler; +} +/** + * @internal + */ +export type GetOutputType = Command extends CommandIO ? O : never; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/config.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/config.d.ts new file mode 100644 index 0000000..f9d4632 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/config.d.ts @@ -0,0 +1,10 @@ +/** + * @public + */ +export interface ConnectConfiguration { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/index.d.ts new file mode 100644 index 0000000..c6c3ea8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/index.d.ts @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/manager.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/manager.d.ts new file mode 100644 index 0000000..5b1a837 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/manager.d.ts @@ -0,0 +1,34 @@ +import { RequestContext } from "../transfer"; +import { ConnectConfiguration } from "./config"; +/** + * @public + */ +export interface ConnectionManagerConfiguration { + /** + * Maximum number of allowed concurrent requests per connection. + */ + maxConcurrency?: number; + /** + * Disables concurrent requests per connection. + */ + disableConcurrency?: boolean; +} +/** + * @public + */ +export interface ConnectionManager { + /** + * Retrieves a connection from the connection pool if available, + * otherwise establish a new connection + */ + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): T; + /** + * Releases the connection back to the pool making it potentially + * re-usable by other requests. + */ + release(requestContext: RequestContext, connection: T): void; + /** + * Destroys the connection manager. All connections will be closed. + */ + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/pool.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/pool.d.ts new file mode 100644 index 0000000..d43530a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/connection/pool.d.ts @@ -0,0 +1,32 @@ +/** + * @public + */ +export interface ConnectionPool { + /** + * Retrieve the first connection in the pool + */ + poll(): T | void; + /** + * Release the connection back to the pool making it potentially + * re-usable by other requests. + */ + offerLast(connection: T): void; + /** + * Removes the connection from the pool, and destroys it. + */ + destroy(connection: T): void; + /** + * Implements the iterable protocol and allows arrays to be consumed + * by most syntaxes expecting iterables, such as the spread syntax + * and for...of loops + */ + [Symbol.iterator](): Iterator; +} +/** + * Unused. + * @internal + * @deprecated + */ +export interface CacheKey { + destination: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/crypto.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/crypto.d.ts new file mode 100644 index 0000000..874320e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/crypto.d.ts @@ -0,0 +1,60 @@ +/** + * @public + */ +export type SourceData = string | ArrayBuffer | ArrayBufferView; +/** + * @public + * + * An object that provides a hash of data provided in chunks to `update`. The + * hash may be performed incrementally as chunks are received or all at once + * when the hash is finalized, depending on the underlying implementation. + * + * @deprecated use {@link Checksum} + */ +export interface Hash { + /** + * Adds a chunk of data to the hash. If a buffer is provided, the `encoding` + * argument will be ignored. If a string is provided without a specified + * encoding, implementations must assume UTF-8 encoding. + * + * Not all encodings are supported on all platforms, though all must support + * UTF-8. + */ + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + /** + * Finalizes the hash and provides a promise that will be fulfilled with the + * raw bytes of the calculated hash. + */ + digest(): Promise; +} +/** + * @public + * + * A constructor for a hash that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + * + * @deprecated use {@link ChecksumConstructor} + */ +export interface HashConstructor { + new (secret?: SourceData): Hash; +} +/** + * @public + * + * A function that calculates the hash of a data stream. Determining the hash + * will consume the stream, so only replayable streams should be provided to an + * implementation of this interface. + */ +export interface StreamHasher { + (hashCtor: HashConstructor, stream: StreamType): Promise; +} +/** + * @public + * + * A function that returns a promise fulfilled with bytes from a + * cryptographically secure pseudorandom number generator. + */ +export interface randomValues { + (byteLength: number): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..312ae6e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts @@ -0,0 +1,25 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [T] extends [FromType] ? ([FromType] extends [T] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [T[key]] extends [FromType] ? [FromType] extends [T[key]] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/encode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/encode.d.ts new file mode 100644 index 0000000..27d3a18 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/encode.d.ts @@ -0,0 +1,31 @@ +import { Message } from "./eventStream"; +/** + * @public + */ +export interface MessageEncoder { + encode(message: Message): Uint8Array; +} +/** + * @public + */ +export interface MessageDecoder { + decode(message: ArrayBufferView): Message; + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; +} +/** + * @public + */ +export interface AvailableMessage { + getMessage(): Message | undefined; + isEndOfStream(): boolean; +} +/** + * @public + */ +export interface AvailableMessages { + getMessages(): Message[]; + isEndOfStream(): boolean; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoint.d.ts new file mode 100644 index 0000000..4e93733 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoint.d.ts @@ -0,0 +1,77 @@ +import { AuthScheme } from "./auth/auth"; +/** + * @public + */ +export interface EndpointPartition { + name: string; + dnsSuffix: string; + dualStackDnsSuffix: string; + supportsFIPS: boolean; + supportsDualStack: boolean; +} +/** + * @public + */ +export interface EndpointARN { + partition: string; + service: string; + region: string; + accountId: string; + resourceId: Array; +} +/** + * @public + */ +export declare enum EndpointURLScheme { + HTTP = "http", + HTTPS = "https" +} +/** + * @public + */ +export interface EndpointURL { + /** + * The URL scheme such as http or https. + */ + scheme: EndpointURLScheme; + /** + * The authority is the host and optional port component of the URL. + */ + authority: string; + /** + * The parsed path segment of the URL. + * This value is as-is as provided by the user. + */ + path: string; + /** + * The parsed path segment of the URL. + * This value is guranteed to start and end with a "/". + */ + normalizedPath: string; + /** + * A boolean indicating whether the authority is an IP address. + */ + isIp: boolean; +} +/** + * @public + */ +export type EndpointObjectProperty = string | boolean | { + [key: string]: EndpointObjectProperty; +} | EndpointObjectProperty[]; +/** + * @public + */ +export interface EndpointV2 { + url: URL; + properties?: { + authSchemes?: AuthScheme[]; + } & Record; + headers?: Record; +} +/** + * @public + */ +export type EndpointParameters = { + [name: string]: undefined | boolean | string | string[]; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts new file mode 100644 index 0000000..349558e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts @@ -0,0 +1,27 @@ +import { EndpointObjectProperty } from "../endpoint"; +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type EndpointObjectProperties = Record; +/** + * @public + */ +export type EndpointObjectHeaders = Record; +/** + * @public + */ +export type EndpointObject = { + url: Expression; + properties?: EndpointObjectProperties; + headers?: EndpointObjectHeaders; +}; +/** + * @public + */ +export type EndpointRuleObject = { + type: "endpoint"; + conditions?: ConditionObject[]; + endpoint: EndpointObject; + documentation?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts new file mode 100644 index 0000000..9ce0733 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts @@ -0,0 +1,10 @@ +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type ErrorRuleObject = { + type: "error"; + conditions?: ConditionObject[]; + error: Expression; + documentation?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts new file mode 100644 index 0000000..669b591 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts @@ -0,0 +1,28 @@ +import { RuleSetRules } from "./TreeRuleObject"; +/** + * @public + */ +export type DeprecatedObject = { + message?: string; + since?: string; +}; +/** + * @public + */ +export type ParameterObject = { + type: "String" | "string" | "Boolean" | "boolean"; + default?: string | boolean; + required?: boolean; + documentation?: string; + builtIn?: string; + deprecated?: DeprecatedObject; +}; +/** + * @public + */ +export type RuleSetObject = { + version: string; + serviceId?: string; + parameters: Record; + rules: RuleSetRules; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts new file mode 100644 index 0000000..180d306 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts @@ -0,0 +1,16 @@ +import { EndpointRuleObject } from "./EndpointRuleObject"; +import { ErrorRuleObject } from "./ErrorRuleObject"; +import { ConditionObject } from "./shared"; +/** + * @public + */ +export type RuleSetRules = Array; +/** + * @public + */ +export type TreeRuleObject = { + type: "tree"; + conditions?: ConditionObject[]; + rules: RuleSetRules; + documentation?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/index.d.ts new file mode 100644 index 0000000..64d85cf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/index.d.ts @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts new file mode 100644 index 0000000..bd11393 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts @@ -0,0 +1,55 @@ +import { Logger } from "../logger"; +/** + * @public + */ +export type ReferenceObject = { + ref: string; +}; +/** + * @public + */ +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +/** + * @public + */ +export type FunctionArgv = Array; +/** + * @public + */ +export type FunctionReturn = string | boolean | number | { + [key: string]: FunctionReturn; +}; +/** + * @public + */ +export type ConditionObject = FunctionObject & { + assign?: string; +}; +/** + * @public + */ +export type Expression = string | ReferenceObject | FunctionObject; +/** + * @public + */ +export type EndpointParams = Record; +/** + * @public + */ +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +/** + * @public + */ +export type ReferenceRecord = Record; +/** + * @public + */ +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/eventStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/eventStream.d.ts new file mode 100644 index 0000000..7b9af6c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/eventStream.d.ts @@ -0,0 +1,137 @@ +import { HttpRequest } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, HandlerExecutionContext } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +/** + * @public + */ +export type MessageHeaders = Record; +/** + * @public + */ +export type HeaderValue = { + type: K; + value: V; +}; +/** + * @public + */ +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +/** + * @public + */ +export type ByteHeaderValue = HeaderValue<"byte", number>; +/** + * @public + */ +export type ShortHeaderValue = HeaderValue<"short", number>; +/** + * @public + */ +export type IntegerHeaderValue = HeaderValue<"integer", number>; +/** + * @public + */ +export type LongHeaderValue = HeaderValue<"long", Int64>; +/** + * @public + */ +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +/** + * @public + */ +export type StringHeaderValue = HeaderValue<"string", string>; +/** + * @public + */ +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +/** + * @public + */ +export type UuidHeaderValue = HeaderValue<"uuid", string>; +/** + * @public + */ +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +/** + * @public + */ +export interface Int64 { + readonly bytes: Uint8Array; + valueOf: () => number; + toString: () => string; +} +/** + * @public + * + * Util functions for serializing or deserializing event stream + */ +export interface EventStreamSerdeContext { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @public + * + * A function which deserializes binary event stream message into modeled shape. + */ +export interface EventStreamMarshallerDeserFn { + (body: StreamType, deserializer: (input: Record) => Promise): AsyncIterable; +} +/** + * @public + * + * A function that serializes modeled shape into binary stream message. + */ +export interface EventStreamMarshallerSerFn { + (input: AsyncIterable, serializer: (event: T) => Message): StreamType; +} +/** + * @public + * + * An interface which provides functions for serializing and deserializing binary event stream + * to/from corresponsing modeled shape. + */ +export interface EventStreamMarshaller { + deserialize: EventStreamMarshallerDeserFn; + serialize: EventStreamMarshallerSerFn; +} +/** + * @public + */ +export interface EventStreamRequestSigner { + sign(request: HttpRequest): Promise; +} +/** + * @public + */ +export interface EventStreamPayloadHandler { + handle: (next: FinalizeHandler, args: FinalizeHandlerArguments, context?: HandlerExecutionContext) => Promise>; +} +/** + * @public + */ +export interface EventStreamPayloadHandlerProvider { + (options: any): EventStreamPayloadHandler; +} +/** + * @public + */ +export interface EventStreamSerdeProvider { + (options: any): EventStreamMarshaller; +} +/** + * @public + */ +export interface EventStreamSignerProvider { + (options: any): EventStreamRequestSigner; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts new file mode 100644 index 0000000..88995b9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts @@ -0,0 +1,58 @@ +import { ChecksumConstructor } from "../checksum"; +import { HashConstructor } from "../crypto"; +/** + * @internal + */ +export declare enum AlgorithmId { + MD5 = "md5", + CRC32 = "crc32", + CRC32C = "crc32c", + SHA1 = "sha1", + SHA256 = "sha256" +} +/** + * @internal + */ +export interface ChecksumAlgorithm { + algorithmId(): AlgorithmId; + checksumConstructor(): ChecksumConstructor | HashConstructor; +} +/** + * @deprecated unused. + * @internal + */ +type ChecksumConfigurationLegacy = { + [other in string | number]: any; +}; +/** + * @internal + */ +export interface ChecksumConfiguration extends ChecksumConfigurationLegacy { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +} +/** + * @deprecated will be removed for implicit type. + * @internal + */ +type GetChecksumConfigurationType = (runtimeConfig: Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; +}>) => ChecksumConfiguration; +/** + * @internal + * @deprecated will be moved to smithy-client. + */ +export declare const getChecksumConfiguration: GetChecksumConfigurationType; +/** + * @internal + * @deprecated will be removed for implicit type. + */ +type ResolveChecksumRuntimeConfigType = (clientConfig: ChecksumConfiguration) => any; +/** + * @internal + * + * @deprecated will be moved to smithy-client. + */ +export declare const resolveChecksumRuntimeConfig: ResolveChecksumRuntimeConfigType; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts new file mode 100644 index 0000000..12eb924 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts @@ -0,0 +1,33 @@ +import { ChecksumConfiguration } from "./checksum"; +/** + * @deprecated will be replaced by DefaultExtensionConfiguration. + * @internal + * + * Default client configuration consisting various configurations for modifying a service client + */ +export interface DefaultClientConfiguration extends ChecksumConfiguration { +} +/** + * @deprecated will be removed for implicit type. + */ +type GetDefaultConfigurationType = (runtimeConfig: any) => DefaultClientConfiguration; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve default client configuration from runtime config + * + */ +export declare const getDefaultClientConfiguration: GetDefaultConfigurationType; +/** + * @deprecated will be removed for implicit type. + */ +type ResolveDefaultRuntimeConfigType = (clientConfig: DefaultClientConfiguration) => any; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve runtime config from default client configuration + */ +export declare const resolveDefaultRuntimeConfig: ResolveDefaultRuntimeConfigType; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..0e6fa0d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConfiguration } from "./checksum"; +import { RetryStrategyConfiguration } from "./retry"; +/** + * @internal + * + * Default extension configuration consisting various configurations for modifying a service client + */ +export interface DefaultExtensionConfiguration extends ChecksumConfiguration, RetryStrategyConfiguration { +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..cce65a1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/index.d.ts @@ -0,0 +1,4 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration } from "./checksum"; +export { RetryStrategyConfiguration } from "./retry"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/retry.d.ts new file mode 100644 index 0000000..8b91f1c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { RetryStrategyV2 } from "../retry"; +import { Provider, RetryStrategy } from "../util"; +/** + * A configuration interface with methods called by runtime extension + * @internal + */ +export interface RetryStrategyConfiguration { + /** + * Set retry strategy used for all http requests + * @param retryStrategy + */ + setRetryStrategy(retryStrategy: Provider): void; + /** + * Get retry strategy used for all http requests + * @param retryStrategy + */ + retryStrategy(): Provider; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts new file mode 100644 index 0000000..0de7f8f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts @@ -0,0 +1,35 @@ +import type { Exact } from "../transform/exact"; +/** + * @public + * + * A checked type that resolves to Blob if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing Blob + * excessively. + */ +export type BlobOptionalType = BlobDefined extends true ? Blob : Unavailable; +/** + * @public + * + * A checked type that resolves to ReadableStream if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing ReadableStream + * excessively. + */ +export type ReadableStreamOptionalType = ReadableStreamDefined extends true ? ReadableStream : Unavailable; +/** + * @public + * + * Indicates a type is unavailable if it resolves to this. + */ +export type Unavailable = never; +/** + * @internal + * + * Whether the global types define more than a stub for ReadableStream. + */ +export type ReadableStreamDefined = Exact extends true ? false : true; +/** + * @internal + * + * Whether the global types define more than a stub for Blob. + */ +export type BlobDefined = Exact extends true ? false : true; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/feature-ids.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/feature-ids.d.ts new file mode 100644 index 0000000..19e4bd2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/feature-ids.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export type SmithyFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + SIGV4A_SIGNING: "S"; + CREDENTIALS_CODE: "e"; +}>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/http.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/http.d.ts new file mode 100644 index 0000000..76c6cb2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/http.d.ts @@ -0,0 +1,112 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +import { URI } from "./uri"; +/** + * @public + * + * @deprecated use {@link EndpointV2} from `@smithy/types`. + */ +export interface Endpoint { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; +} +/** + * @public + * + * Interface an HTTP request class. Contains + * addressing information in addition to standard message properties. + */ +export interface HttpRequest extends HttpMessage, URI { + method: string; +} +/** + * @public + * + * Represents an HTTP message as received in reply to a request. Contains a + * numeric status code in addition to standard message properties. + */ +export interface HttpResponse extends HttpMessage { + statusCode: number; + reason?: string; +} +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. body: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * A mapping of query parameter names to strings or arrays of strings, with the + * second being used when a parameter contains a list of values. Value can be set + * to null when query is not in key-value pairs shape + */ +export type QueryParameterBag = Record | null>; +/** + * @public + */ +export type FieldOptions = { + name: string; + kind?: FieldPosition; + values?: string[]; +}; +/** + * @public + */ +export declare enum FieldPosition { + HEADER = 0, + TRAILER = 1 +} +/** + * @public + * + * A mapping of header names to string values. Multiple values for the same + * header should be represented as a single string with values separated by + * `, `. + * + * Keys should be considered case insensitive, even if this is not enforced by a + * particular implementation. For example, given the following HeaderBag, where + * keys differ only in case: + * + * ```json + * { + * 'x-request-date': '2000-01-01T00:00:00Z', + * 'X-Request-Date': '2001-01-01T00:00:00Z' + * } + * ``` + * + * The SDK may at any point during processing remove one of the object + * properties in favor of the other. The headers may or may not be combined, and + * the SDK will not deterministically select which header candidate to use. + */ +export type HeaderBag = Record; +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. bode: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * Represents the options that may be passed to an Http Handler. + */ +export interface HttpHandlerOptions { + abortSignal?: AbortSignal | DeprecatedAbortSignal; + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts new file mode 100644 index 0000000..bca0851 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts @@ -0,0 +1,123 @@ +/// +/// +import type { Agent as hAgent, AgentOptions as hAgentOptions } from "http"; +import type { Agent as hsAgent, AgentOptions as hsAgentOptions } from "https"; +import { HttpRequest as IHttpRequest } from "../http"; +import { Logger } from "../logger"; +/** + * + * This type represents an alternate client constructor option for the entry + * "requestHandler". Instead of providing an instance of a requestHandler, the user + * may provide the requestHandler's constructor options for either the + * NodeHttpHandler or FetchHttpHandler. + * + * For other RequestHandlers like HTTP2 or WebSocket, + * constructor parameter passthrough is not currently available. + * + * @public + */ +export type RequestHandlerParams = NodeHttpHandlerOptions | FetchHttpHandlerOptions; +/** + * Represents the http options that can be passed to a node http client. + * @public + */ +export interface NodeHttpHandlerOptions { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + * + * Defaults to 0, which disables the timeout. + */ + connectionTimeout?: number; + /** + * The number of milliseconds a request can take before automatically being terminated. + * Defaults to 0, which disables the timeout. + */ + requestTimeout?: number; + /** + * Delay before the NodeHttpHandler checks for socket exhaustion, + * and emits a warning if the active sockets and enqueued request count is greater than + * 2x the maxSockets count. + * + * Defaults to connectionTimeout + requestTimeout or 3000ms if those are not set. + */ + socketAcquisitionWarningTimeout?: number; + /** + * This field is deprecated, and requestTimeout should be used instead. + * The maximum time in milliseconds that a socket may remain idle before it + * is closed. + * + * @deprecated Use {@link requestTimeout} + */ + socketTimeout?: number; + /** + * You can pass http.Agent or its constructor options. + */ + httpAgent?: hAgent | hAgentOptions; + /** + * You can pass https.Agent or its constructor options. + */ + httpsAgent?: hsAgent | hsAgentOptions; + /** + * Optional logger. + */ + logger?: Logger; +} +/** + * Represents the http options that can be passed to a browser http client. + * @public + */ +export interface FetchHttpHandlerOptions { + /** + * The number of milliseconds a request can take before being automatically + * terminated. + */ + requestTimeout?: number; + /** + * Whether to allow the request to outlive the page. Default value is false. + * + * There may be limitations to the payload size, number of concurrent requests, + * request duration etc. when using keepalive in browsers. + * + * These may change over time, so look for up to date information about + * these limitations before enabling keepalive. + */ + keepAlive?: boolean; + /** + * A string indicating whether credentials will be sent with the request always, never, or + * only when sent to a same-origin URL. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials + */ + credentials?: "include" | "omit" | "same-origin" | undefined | string; + /** + * Cache settings for fetch. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/cache + */ + cache?: "default" | "force-cache" | "no-cache" | "no-store" | "only-if-cached" | "reload"; + /** + * An optional function that produces additional RequestInit + * parameters for each httpRequest. + * + * This is applied last via merging with Object.assign() and overwrites other values + * set from other sources. + * + * @example + * ```js + * new Client({ + * requestHandler: { + * requestInit(httpRequest) { + * return { cache: "no-store" }; + * } + * } + * }); + * ``` + */ + requestInit?: (httpRequest: IHttpRequest) => RequestInit; +} +declare global { + /** + * interface merging stub. + */ + interface RequestInit { + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts new file mode 100644 index 0000000..27750d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @public + */ +export interface ApiKeyIdentity extends Identity { + /** + * The literal API Key + */ + readonly apiKey: string; +} +/** + * @public + */ +export type ApiKeyIdentityProvider = IdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts new file mode 100644 index 0000000..7aa5a4b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts @@ -0,0 +1,31 @@ +import { Identity, IdentityProvider } from "./identity"; +/** + * @public + */ +export interface AwsCredentialIdentity extends Identity { + /** + * AWS access key ID + */ + readonly accessKeyId: string; + /** + * AWS secret access key + */ + readonly secretAccessKey: string; + /** + * A security or session token to use with these credentials. Usually + * present for temporary credentials. + */ + readonly sessionToken?: string; + /** + * AWS credential scope for this set of credentials. + */ + readonly credentialScope?: string; + /** + * AWS accountId. + */ + readonly accountId?: string; +} +/** + * @public + */ +export type AwsCredentialIdentityProvider = IdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/identity.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/identity.d.ts new file mode 100644 index 0000000..c6fd0d1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/identity.d.ts @@ -0,0 +1,15 @@ +/** + * @public + */ +export interface Identity { + /** + * A `Date` when the identity or credential will no longer be accepted. + */ + readonly expiration?: Date; +} +/** + * @public + */ +export interface IdentityProvider { + (identityProperties?: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/index.d.ts new file mode 100644 index 0000000..3360320 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/index.d.ts @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts new file mode 100644 index 0000000..84a74ff --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @internal + */ +export interface TokenIdentity extends Identity { + /** + * The literal token string + */ + readonly token: string; +} +/** + * @internal + */ +export type TokenIdentityProvider = IdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/index.d.ts new file mode 100644 index 0000000..c370335 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/index.d.ts @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/logger.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/logger.d.ts new file mode 100644 index 0000000..f66a664 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/logger.d.ts @@ -0,0 +1,13 @@ +/** + * @public + * + * Represents a logger object that is available in HandlerExecutionContext + * throughout the middleware stack. + */ +export interface Logger { + trace?: (...content: any[]) => void; + debug: (...content: any[]) => void; + info: (...content: any[]) => void; + warn: (...content: any[]) => void; + error: (...content: any[]) => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/middleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/middleware.d.ts new file mode 100644 index 0000000..cc20098 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/middleware.d.ts @@ -0,0 +1,534 @@ +import type { AuthScheme, HttpAuthDefinition } from "./auth/auth"; +import type { SelectedHttpAuthScheme } from "./auth/HttpAuthScheme"; +import type { Command } from "./command"; +import type { EndpointV2 } from "./endpoint"; +import type { SmithyFeatures } from "./feature-ids"; +import type { Logger } from "./logger"; +import type { UserAgent } from "./util"; +/** + * @public + */ +export interface InitializeHandlerArguments { + /** + * User input to a command. Reflects the userland representation of the + * union of data types the command can effectively handle. + */ + input: Input; +} +/** + * @public + */ +export interface InitializeHandlerOutput extends DeserializeHandlerOutput { + output: Output; +} +/** + * @public + */ +export interface SerializeHandlerArguments extends InitializeHandlerArguments { + /** + * The user input serialized as a request object. The request object is unknown, + * so you cannot modify it directly. When work with request, you need to guard its + * type to e.g. HttpRequest with 'instanceof' operand + * + * During the build phase of the execution of a middleware stack, a built + * request may or may not be available. + */ + request?: unknown; +} +/** + * @public + */ +export interface SerializeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface BuildHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface BuildHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface FinalizeHandlerArguments extends SerializeHandlerArguments { + /** + * The user input serialized as a request. + */ + request: unknown; +} +/** + * @public + */ +export interface FinalizeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface DeserializeHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface DeserializeHandlerOutput { + /** + * The raw response object from runtime is deserialized to structured output object. + * The response object is unknown so you cannot modify it directly. When work with + * response, you need to guard its type to e.g. HttpResponse with 'instanceof' operand. + * + * During the deserialize phase of the execution of a middleware stack, a deserialized + * response may or may not be available + */ + response: unknown; + output?: Output; +} +/** + * @public + */ +export interface InitializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: InitializeHandlerArguments): Promise>; +} +/** + * @public + */ +export type Handler = InitializeHandler; +/** + * @public + */ +export interface SerializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: SerializeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface FinalizeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: FinalizeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface BuildHandler { + (args: BuildHandlerArguments): Promise>; +} +/** + * @public + */ +export interface DeserializeHandler { + (args: DeserializeHandlerArguments): Promise>; +} +/** + * @public + * + * A factory function that creates functions implementing the `Handler` + * interface. + */ +export interface InitializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: InitializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `BuildHandler` + * interface. + */ +export interface SerializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: SerializeHandler, context: HandlerExecutionContext): SerializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `FinalizeHandler` + * interface. + */ +export interface FinalizeRequestMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: FinalizeHandler, context: HandlerExecutionContext): FinalizeHandler; +} +/** + * @public + */ +export interface BuildMiddleware { + (next: BuildHandler, context: HandlerExecutionContext): BuildHandler; +} +/** + * @public + */ +export interface DeserializeMiddleware { + (next: DeserializeHandler, context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type MiddlewareType = InitializeMiddleware | SerializeMiddleware | BuildMiddleware | FinalizeRequestMiddleware | DeserializeMiddleware; +/** + * @public + * + * A factory function that creates the terminal handler atop which a middleware + * stack sits. + */ +export interface Terminalware { + (context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type Step = "initialize" | "serialize" | "build" | "finalizeRequest" | "deserialize"; +/** + * @public + */ +export type Priority = "high" | "normal" | "low"; +/** + * @public + */ +export interface HandlerOptions { + /** + * Handlers are ordered using a "step" that describes the stage of command + * execution at which the handler will be executed. The available steps are: + * + * - initialize: The input is being prepared. Examples of typical + * initialization tasks include injecting default options computing + * derived parameters. + * - serialize: The input is complete and ready to be serialized. Examples + * of typical serialization tasks include input validation and building + * an HTTP request from user input. + * - build: The input has been serialized into an HTTP request, but that + * request may require further modification. Any request alterations + * will be applied to all retries. Examples of typical build tasks + * include injecting HTTP headers that describe a stable aspect of the + * request, such as `Content-Length` or a body checksum. + * - finalizeRequest: The request is being prepared to be sent over the wire. The + * request in this stage should already be semantically complete and + * should therefore only be altered as match the recipient's + * expectations. Examples of typical finalization tasks include request + * signing and injecting hop-by-hop headers. + * - deserialize: The response has arrived, the middleware here will deserialize + * the raw response object to structured response + * + * Unlike initialization and build handlers, which are executed once + * per operation execution, finalization and deserialize handlers will be + * executed foreach HTTP request sent. + * + * @defaultValue 'initialize' + */ + step?: Step; + /** + * A list of strings to any that identify the general purpose or important + * characteristics of a given handler. + */ + tags?: Array; + /** + * A unique name to refer to a middleware + */ + name?: string; + /** + * @internal + * Aliases allows for middleware to be found by multiple names besides {@link HandlerOptions.name}. + * This allows for references to replaced middleware to continue working, e.g. replacing + * multiple auth-specific middleware with a single generic auth middleware. + */ + aliases?: Array; + /** + * A flag to override the existing middleware with the same name. Without + * setting it, adding middleware with duplicated name will throw an exception. + * @internal + */ + override?: boolean; +} +/** + * @public + */ +export interface AbsoluteLocation { + /** + * By default middleware will be added to individual step in un-guaranteed order. + * In the case that + * + * @defaultValue 'normal' + */ + priority?: Priority; +} +/** + * @public + */ +export type Relation = "before" | "after"; +/** + * @public + */ +export interface RelativeLocation { + /** + * Specify the relation to be before or after a know middleware. + */ + relation: Relation; + /** + * A known middleware name to indicate inserting middleware's location. + */ + toMiddleware: string; +} +/** + * @public + */ +export type RelativeMiddlewareOptions = RelativeLocation & Omit; +/** + * @public + */ +export interface InitializeHandlerOptions extends HandlerOptions { + step?: "initialize"; +} +/** + * @public + */ +export interface SerializeHandlerOptions extends HandlerOptions { + step: "serialize"; +} +/** + * @public + */ +export interface BuildHandlerOptions extends HandlerOptions { + step: "build"; +} +/** + * @public + */ +export interface FinalizeRequestHandlerOptions extends HandlerOptions { + step: "finalizeRequest"; +} +/** + * @public + */ +export interface DeserializeHandlerOptions extends HandlerOptions { + step: "deserialize"; +} +/** + * @public + * + * A stack storing middleware. It can be resolved into a handler. It supports 2 + * approaches for adding middleware: + * 1. Adding middleware to specific step with `add()`. The order of middleware + * added into same step is determined by order of adding them. If one middleware + * needs to be executed at the front of the step or at the end of step, set + * `priority` options to `high` or `low`. + * 2. Adding middleware to location relative to known middleware with `addRelativeTo()`. + * This is useful when given middleware must be executed before or after specific + * middleware(`toMiddleware`). You can add a middleware relatively to another + * middleware which also added relatively. But eventually, this relative middleware + * chain **must** be 'anchored' by a middleware that added using `add()` API + * with absolute `step` and `priority`. This mothod will throw if specified + * `toMiddleware` is not found. + */ +export interface MiddlewareStack extends Pluggable { + /** + * Add middleware to the stack to be executed during the "initialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: InitializeMiddleware, options?: InitializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "serialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: SerializeMiddleware, options: SerializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "build" step, + * optionally specifying a priority, tags and name + */ + add(middleware: BuildMiddleware, options: BuildHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "finalizeRequest" step, + * optionally specifying a priority, tags and name + */ + add(middleware: FinalizeRequestMiddleware, options: FinalizeRequestHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "deserialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: DeserializeMiddleware, options: DeserializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to a stack position before or after a known middleware,optionally + * specifying name and tags. + */ + addRelativeTo(middleware: MiddlewareType, options: RelativeMiddlewareOptions): void; + /** + * Apply a customization function to mutate the middleware stack, often + * used for customizations that requires mutating multiple middleware. + */ + use(pluggable: Pluggable): void; + /** + * Create a shallow clone of this stack. Step bindings and handler priorities + * and tags are preserved in the copy. + */ + clone(): MiddlewareStack; + /** + * Removes middleware from the stack. + * + * If a string is provided, it will be treated as middleware name. If a middleware + * is inserted with the given name, it will be removed. + * + * If a middleware class is provided, all usages thereof will be removed. + */ + remove(toRemove: MiddlewareType | string): boolean; + /** + * Removes middleware that contains given tag + * + * Multiple middleware will potentially be removed + */ + removeByTag(toRemove: string): boolean; + /** + * Create a stack containing the middlewares in this stack as well as the + * middlewares in the `from` stack. Neither source is modified, and step + * bindings and handler priorities and tags are preserved in the copy. + */ + concat(from: MiddlewareStack): MiddlewareStack; + /** + * Returns a list of the current order of middleware in the stack. + * This does not execute the middleware functions, nor does it + * provide a reference to the stack itself. + */ + identify(): string[]; + /** + * @internal + * + * When an operation is called using this stack, + * it will log its list of middleware to the console using + * the identify function. + * + * @param toggle - set whether to log on resolve. + * If no argument given, returns the current value. + */ + identifyOnResolve(toggle?: boolean): boolean; + /** + * Builds a single handler function from zero or more middleware classes and + * a core handler. The core handler is meant to send command objects to AWS + * services and return promises that will resolve with the operation result + * or be rejected with an error. + * + * When a composed handler is invoked, the arguments will pass through all + * middleware in a defined order, and the return from the innermost handler + * will pass through all middleware in the reverse of that order. + */ + resolve(handler: DeserializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @internal + */ +export declare const SMITHY_CONTEXT_KEY = "__smithy_context"; +/** + * @public + * + * Data and helper objects that are not expected to change from one execution of + * a composed handler to another. + */ +export interface HandlerExecutionContext { + /** + * A logger that may be invoked by any handler during execution of an + * operation. + */ + logger?: Logger; + /** + * Name of the service the operation is being sent to. + */ + clientName?: string; + /** + * Name of the operation being executed. + */ + commandName?: string; + /** + * Additional user agent that inferred by middleware. It can be used to save + * the internal user agent sections without overriding the `customUserAgent` + * config in clients. + */ + userAgent?: UserAgent; + /** + * Resolved by the endpointMiddleware function of `@smithy/middleware-endpoint` + * in the serialization stage. + */ + endpointV2?: EndpointV2; + /** + * Set at the same time as endpointV2. + */ + authSchemes?: AuthScheme[]; + /** + * The current auth configuration that has been set by any auth middleware and + * that will prevent from being set more than once. + */ + currentAuthConfig?: HttpAuthDefinition; + /** + * @deprecated do not extend this field, it is a carryover from AWS SDKs. + * Used by DynamoDbDocumentClient. + */ + dynamoDbDocumentClientOptions?: Partial<{ + overrideInputFilterSensitiveLog(...args: any[]): string | void; + overrideOutputFilterSensitiveLog(...args: any[]): string | void; + }>; + /** + * @internal + * Context for Smithy properties. + */ + [SMITHY_CONTEXT_KEY]?: { + service?: string; + operation?: string; + commandInstance?: Command; + selectedHttpAuthScheme?: SelectedHttpAuthScheme; + features?: SmithyFeatures; + /** + * @deprecated + * Do not assign arbitrary members to the Smithy Context, + * fields should be explicitly declared here to avoid collisions. + */ + [key: string]: unknown; + }; + /** + * @deprecated + * Do not assign arbitrary members to the context, since + * they can interfere with existing functionality. + * + * Additional members should instead be declared on the SMITHY_CONTEXT_KEY + * or other reserved keys. + */ + [key: string]: any; +} +/** + * @public + */ +export interface Pluggable { + /** + * A function that mutate the passed in middleware stack. Functions implementing + * this interface can add, remove, modify existing middleware stack from clients + * or commands + */ + applyToStack: (stack: MiddlewareStack) => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/pagination.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/pagination.d.ts new file mode 100644 index 0000000..e10fdda --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/pagination.d.ts @@ -0,0 +1,33 @@ +import type { Client } from "./client"; +import type { Command } from "./command"; +/** + * @public + * + * Expected type definition of a paginator. + */ +export type Paginator = AsyncGenerator; +/** + * @public + * + * Expected paginator configuration passed to an operation. Services will extend + * this interface definition and may type client further. + */ +export interface PaginationConfiguration { + client: Client; + pageSize?: number; + startingToken?: any; + /** + * For some APIs, such as CloudWatchLogs events, the next page token will always + * be present. + * + * When true, this config field will have the paginator stop when the token doesn't change + * instead of when it is not present. + */ + stopOnSameToken?: boolean; + /** + * @param command - reference to the instantiated command. This callback is executed + * prior to sending the command with the paginator's client. + * @returns the original command or a replacement, defaulting to the original command object. + */ + withCommand?: (command: Command) => typeof command | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/profile.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/profile.d.ts new file mode 100644 index 0000000..b7885d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/profile.d.ts @@ -0,0 +1,30 @@ +/** + * @public + */ +export declare enum IniSectionType { + PROFILE = "profile", + SSO_SESSION = "sso-session", + SERVICES = "services" +} +/** + * @public + */ +export type IniSection = Record; +/** + * @public + * + * @deprecated Please use {@link IniSection} + */ +export interface Profile extends IniSection { +} +/** + * @public + */ +export type ParsedIniData = Record; +/** + * @public + */ +export interface SharedConfigFiles { + credentialsFile: ParsedIniData; + configFile: ParsedIniData; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/response.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/response.d.ts new file mode 100644 index 0000000..afcfe8f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/response.d.ts @@ -0,0 +1,40 @@ +/** + * @public + */ +export interface ResponseMetadata { + /** + * The status code of the last HTTP response received for this operation. + */ + httpStatusCode?: number; + /** + * A unique identifier for the last request sent for this operation. Often + * requested by AWS service teams to aid in debugging. + */ + requestId?: string; + /** + * A secondary identifier for the last request sent. Used for debugging. + */ + extendedRequestId?: string; + /** + * A tertiary identifier for the last request sent. Used for debugging. + */ + cfId?: string; + /** + * The number of times this operation was attempted. + */ + attempts?: number; + /** + * The total amount of time (in milliseconds) that was spent waiting between + * retry attempts. + */ + totalRetryDelay?: number; +} +/** + * @public + */ +export interface MetadataBearer { + /** + * Metadata pertaining to this request. + */ + $metadata: ResponseMetadata; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/retry.d.ts new file mode 100644 index 0000000..7bb5881 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/retry.d.ts @@ -0,0 +1,133 @@ +import { SdkError } from "./shapes"; +/** + * @public + */ +export type RetryErrorType = +/** + * This is a connection level error such as a socket timeout, socket connect + * error, tls negotiation timeout etc... + * Typically these should never be applied for non-idempotent request types + * since in this scenario, it's impossible to know whether the operation had + * a side effect on the server. + */ +"TRANSIENT" +/** + * This is an error where the server explicitly told the client to back off, + * such as a 429 or 503 Http error. + */ + | "THROTTLING" +/** + * This is a server error that isn't explicitly throttling but is considered + * by the client to be something that should be retried. + */ + | "SERVER_ERROR" +/** + * Doesn't count against any budgets. This could be something like a 401 + * challenge in Http. + */ + | "CLIENT_ERROR"; +/** + * @public + */ +export interface RetryErrorInfo { + /** + * The error thrown during the initial request, if available. + */ + error?: SdkError; + errorType: RetryErrorType; + /** + * Protocol hint. This could come from Http's 'retry-after' header or + * something from MQTT or any other protocol that has the ability to convey + * retry info from a peer. + * + * The Date after which a retry should be attempted. + */ + retryAfterHint?: Date; +} +/** + * @public + */ +export interface RetryBackoffStrategy { + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + computeNextBackoffDelay(retryAttempt: number): number; +} +/** + * @public + */ +export interface StandardRetryBackoffStrategy extends RetryBackoffStrategy { + /** + * Sets the delayBase used to compute backoff delays. + * @param delayBase - + */ + setDelayBase(delayBase: number): void; +} +/** + * @public + */ +export interface RetryStrategyOptions { + backoffStrategy: RetryBackoffStrategy; + maxRetriesBase: number; +} +/** + * @public + */ +export interface RetryToken { + /** + * @returns the current count of retry. + */ + getRetryCount(): number; + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + getRetryDelay(): number; +} +/** + * @public + */ +export interface StandardRetryToken extends RetryToken { + /** + * @returns the cost of the last retry attempt. + */ + getRetryCost(): number | undefined; +} +/** + * @public + */ +export interface RetryStrategyV2 { + /** + * Called before any retries (for the first call to the operation). It either + * returns a retry token or an error upon the failure to acquire a token prior. + * + * tokenScope is arbitrary and out of scope for this component. However, + * adding it here offers us a lot of future flexibility for outage detection. + * For example, it could be "us-east-1" on a shared retry strategy, or + * "us-west-2-c:dynamodb". + */ + acquireInitialRetryToken(retryTokenScope: string): Promise; + /** + * After a failed operation call, this function is invoked to refresh the + * retryToken returned by acquireInitialRetryToken(). This function can + * either choose to allow another retry and send a new or updated token, + * or reject the retry attempt and report the error either in an exception + * or returning an error. + */ + refreshRetryTokenForRetry(tokenToRenew: RetryToken, errorInfo: RetryErrorInfo): Promise; + /** + * Upon successful completion of the operation, this function is called + * to record that the operation was successful. + */ + recordSuccess(token: RetryToken): void; +} +/** + * @public + */ +export type ExponentialBackoffJitterType = "DEFAULT" | "NONE" | "FULL" | "DECORRELATED"; +/** + * @public + */ +export interface ExponentialBackoffStrategyOptions { + jitterType: ExponentialBackoffJitterType; + backoffScaleValue?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/serde.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/serde.d.ts new file mode 100644 index 0000000..a81314f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/serde.d.ts @@ -0,0 +1,112 @@ +import { Endpoint } from "./http"; +import { RequestHandler } from "./transfer"; +import { Decoder, Encoder, Provider } from "./util"; +/** + * @public + * + * Interface for object requires an Endpoint set. + */ +export interface EndpointBearer { + endpoint: Provider; +} +/** + * @public + */ +export interface StreamCollector { + /** + * A function that converts a stream into an array of bytes. + * + * @param stream - The low-level native stream from browser or Nodejs runtime + */ + (stream: any): Promise; +} +/** + * @public + * + * Request and Response serde util functions and settings for AWS services + */ +export interface SerdeContext extends SerdeFunctions, EndpointBearer { + requestHandler: RequestHandler; + disableHostPrefix: boolean; +} +/** + * @public + * + * Serde functions from the client config. + */ +export interface SerdeFunctions { + base64Encoder: Encoder; + base64Decoder: Decoder; + utf8Encoder: Encoder; + utf8Decoder: Decoder; + streamCollector: StreamCollector; +} +/** + * @public + */ +export interface RequestSerializer { + /** + * Converts the provided `input` into a request object + * + * @param input - The user input to serialize. + * + * @param context - Context containing runtime-specific util functions. + */ + (input: any, context: Context): Promise; +} +/** + * @public + */ +export interface ResponseDeserializer { + /** + * Converts the output of an operation into JavaScript types. + * + * @param output - The HTTP response received from the service + * + * @param context - context containing runtime-specific util functions. + */ + (output: ResponseType, context: Context): Promise; +} +/** + * The interface contains mix-in utility functions to transfer the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * @public + */ +export interface SdkStreamMixin { + transformToByteArray: () => Promise; + transformToString: (encoding?: string) => Promise; + transformToWebStream: () => ReadableStream; +} +/** + * @public + * + * The type describing a runtime-specific stream implementation with mix-in + * utility functions. + */ +export type SdkStream = BaseStream & SdkStreamMixin; +/** + * @public + * + * Indicates that the member of type T with + * key StreamKey have been extended + * with the SdkStreamMixin helper methods. + */ +export type WithSdkStreamMixin = { + [key in keyof T]: key extends StreamKey ? SdkStream : T[key]; +}; +/** + * Interface for internal function to inject stream utility functions + * implementation + * + * @internal + */ +export interface SdkStreamMixinInjector { + (stream: unknown): SdkStreamMixin; +} +/** + * @internal + */ +export interface SdkStreamSerdeContext { + sdkStreamMixin: SdkStreamMixinInjector; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/shapes.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/shapes.d.ts new file mode 100644 index 0000000..a4812fb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/shapes.d.ts @@ -0,0 +1,82 @@ +import { HttpResponse } from "./http"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A document type represents an untyped JSON-like value. + * + * Not all protocols support document types, and the serialization format of a + * document type is protocol specific. All JSON protocols SHOULD support + * document types and they SHOULD serialize document types inline as normal + * JSON values. + */ +export type DocumentType = null | boolean | number | string | DocumentType[] | { + [prop: string]: DocumentType; +}; +/** + * @public + * + * A structure shape with the error trait. + * https://smithy.io/2.0/spec/behavior-traits.html#smithy-api-retryable-trait + */ +export interface RetryableTrait { + /** + * Indicates that the error is a retryable throttling error. + */ + readonly throttling?: boolean; +} +/** + * @public + * + * Type that is implemented by all Smithy shapes marked with the + * error trait. + * @deprecated + */ +export interface SmithyException { + /** + * The shape ID name of the exception. + */ + readonly name: string; + /** + * Whether the client or server are at fault. + */ + readonly $fault: "client" | "server"; + /** + * The service that encountered the exception. + */ + readonly $service?: string; + /** + * Indicates that an error MAY be retried by the client. + */ + readonly $retryable?: RetryableTrait; + /** + * Reference to low-level HTTP response object. + */ + readonly $response?: HttpResponse; +} +/** + * @public + * + * @deprecated See {@link https://aws.amazon.com/blogs/developer/service-error-handling-modular-aws-sdk-js/} + * + * This type should not be used in your application. + * Users of the AWS SDK for JavaScript v3 service clients should prefer to + * use the specific Exception classes corresponding to each operation. + * These can be found as code in the deserializer for the operation's Command class, + * or as declarations in the service model file in codegen/sdk-codegen/aws-models. + * + * If no exceptions are enumerated by a particular Command operation, + * the base exception for the service should be used. Each client exports + * a base ServiceException prefixed with the service name. + */ +export type SdkError = Error & Partial & Partial & { + $metadata?: Partial["$metadata"] & { + /** + * If present, will have value of true and indicates that the error resulted in a + * correction of the clock skew, a.k.a. config.systemClockOffset. + * This is specific to AWS SDK and sigv4. + */ + readonly clockSkewCorrected?: true; + }; + cause?: Error; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/signature.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/signature.d.ts new file mode 100644 index 0000000..db0039d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/signature.d.ts @@ -0,0 +1,155 @@ +import { Message } from "./eventStream"; +import { HttpRequest } from "./http"; +/** + * @public + * + * A `Date` object, a unix (epoch) timestamp in seconds, or a string that can be + * understood by the JavaScript `Date` constructor. + */ +export type DateInput = number | string | Date; +/** + * @public + */ +export interface SigningArguments { + /** + * The date and time to be used as signature metadata. This value should be + * a Date object, a unix (epoch) timestamp, or a string that can be + * understood by the JavaScript `Date` constructor.If not supplied, the + * value returned by `new Date()` will be used. + */ + signingDate?: DateInput; + /** + * The service signing name. It will override the service name of the signer + * in current invocation + */ + signingService?: string; + /** + * The region name to sign the request. It will override the signing region of the + * signer in current invocation + */ + signingRegion?: string; +} +/** + * @public + */ +export interface RequestSigningArguments extends SigningArguments { + /** + * A set of strings whose members represents headers that cannot be signed. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unsignableHeaders set. + */ + unsignableHeaders?: Set; + /** + * A set of strings whose members represents headers that should be signed. + * Any values passed here will override those provided via unsignableHeaders, + * allowing them to be signed. + * + * All headers in the provided request will have their names converted to + * lower case before signing. + */ + signableHeaders?: Set; +} +/** + * @public + */ +export interface RequestPresigningArguments extends RequestSigningArguments { + /** + * The number of seconds before the presigned URL expires + */ + expiresIn?: number; + /** + * A set of strings whose representing headers that should not be hoisted + * to presigned request's query string. If not supplied, the presigner + * moves all the AWS-specific headers (starting with `x-amz-`) to the request + * query string. If supplied, these headers remain in the presigned request's + * header. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unhoistableHeaders set. + */ + unhoistableHeaders?: Set; + /** + * This overrides any headers with the same name(s) set by unhoistableHeaders. + * These headers will be hoisted into the query string and signed. + */ + hoistableHeaders?: Set; +} +/** + * @public + */ +export interface EventSigningArguments extends SigningArguments { + priorSignature: string; +} +/** + * @public + */ +export interface RequestPresigner { + /** + * Signs a request for future use. + * + * The request will be valid until either the provided `expiration` time has + * passed or the underlying credentials have expired. + * + * @param requestToSign - The request that should be signed. + * @param options - Additional signing options. + */ + presign(requestToSign: HttpRequest, options?: RequestPresigningArguments): Promise; +} +/** + * @public + * + * An object that signs request objects with AWS credentials using one of the + * AWS authentication protocols. + */ +export interface RequestSigner { + /** + * Sign the provided request for immediate dispatch. + */ + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; +} +/** + * @public + */ +export interface StringSigner { + /** + * Sign the provided `stringToSign` for use outside of the context of + * request signing. Typical uses include signed policy generation. + */ + sign(stringToSign: string, options?: SigningArguments): Promise; +} +/** + * @public + */ +export interface FormattedEvent { + headers: Uint8Array; + payload: Uint8Array; +} +/** + * @public + */ +export interface EventSigner { + /** + * Sign the individual event of the event stream. + */ + sign(event: FormattedEvent, options: EventSigningArguments): Promise; +} +/** + * @public + */ +export interface SignableMessage { + message: Message; + priorSignature: string; +} +/** + * @public + */ +export interface SignedMessage { + message: Message; + signature: string; +} +/** + * @public + */ +export interface MessageSigner { + signMessage(message: SignableMessage, args: SigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/stream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/stream.d.ts new file mode 100644 index 0000000..f305dd9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/stream.d.ts @@ -0,0 +1,22 @@ +import { ChecksumConstructor } from "./checksum"; +import { HashConstructor, StreamHasher } from "./crypto"; +import { BodyLengthCalculator, Encoder } from "./util"; +/** + * @public + */ +export interface GetAwsChunkedEncodingStreamOptions { + base64Encoder?: Encoder; + bodyLengthChecker: BodyLengthCalculator; + checksumAlgorithmFn?: ChecksumConstructor | HashConstructor; + checksumLocationName?: string; + streamHasher?: StreamHasher; +} +/** + * @public + * + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * It optionally adds checksum if options are provided. + */ +export interface GetAwsChunkedEncodingStream { + (readableStream: StreamType, options: GetAwsChunkedEncodingStreamOptions): StreamType; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts new file mode 100644 index 0000000..92c52da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts @@ -0,0 +1,33 @@ +/// +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This is the union representing the modeled blob type with streaming trait + * in a generic format that does not relate to HTTP input or output payloads. + * + * Note: the non-streaming blob type is represented by Uint8Array, but because + * the streaming blob type is always in the request/response paylod, it has + * historically been handled with different types. + * + * @see https://smithy.io/2.0/spec/simple-types.html#blob + * + * For compatibility with its historical representation, it must contain at least + * Readble (Node.js), Blob (browser), and ReadableStream (browser). + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + */ +export type StreamingBlobTypes = NodeJsRuntimeStreamingBlobTypes | BrowserRuntimeStreamingBlobTypes; +/** + * @public + * + * Node.js streaming blob type. + */ +export type NodeJsRuntimeStreamingBlobTypes = Readable; +/** + * @public + * + * Browser streaming blob types. + */ +export type BrowserRuntimeStreamingBlobTypes = ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts new file mode 100644 index 0000000..9bcc164 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts @@ -0,0 +1,63 @@ +/// +/// +/// +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This union represents a superset of the compatible types you + * can use for streaming payload inputs. + * + * FAQ: + * Why does the type union mix mutually exclusive runtime types, namely + * Node.js and browser types? + * + * There are several reasons: + * 1. For backwards compatibility. + * 2. As a convenient compromise solution so that users in either environment may use the types + * without customization. + * 3. The SDK does not have static type information about the exact implementation + * of the HTTP RequestHandler being used in your client(s) (e.g. fetch, XHR, node:http, or node:http2), + * given that it is chosen at runtime. There are multiple possible request handlers + * in both the Node.js and browser runtime environments. + * + * Rather than restricting the type to a known common format (Uint8Array, for example) + * which doesn't include a universal streaming format in the currently supported Node.js versions, + * the type declaration is widened to multiple possible formats. + * It is up to the user to ultimately select a compatible format with the + * runtime and HTTP handler implementation they are using. + * + * Usage: + * The typical solution we expect users to have is to manually narrow the + * type when needed, picking the appropriate one out of the union according to the + * runtime environment and specific request handler. + * There is also the type utility "NodeJsClient", "BrowserClient" and more + * exported from this package. These can be applied at the client level + * to pre-narrow these streaming payload blobs. For usage see the readme.md + * in the root of the \@smithy/types NPM package. + */ +export type StreamingBlobPayloadInputTypes = NodeJsRuntimeStreamingBlobPayloadInputTypes | BrowserRuntimeStreamingBlobPayloadInputTypes; +/** + * @public + * + * Streaming payload input types in the Node.js environment. + * These are derived from the types compatible with the request body used by node:http. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may be intended in some cases, + * but the expected types are primarily string, Uint8Array, and Buffer. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type NodeJsRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | Buffer | Readable; +/** + * @public + * + * Streaming payload input types in the browser environment. + * These are derived from the types compatible with fetch's Request.body. + */ +export type BrowserRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts new file mode 100644 index 0000000..b64a878 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts @@ -0,0 +1,53 @@ +/// +/// +import type { IncomingMessage } from "http"; +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +import type { SdkStream } from "../serde"; +/** + * @public + * + * This union represents a superset of the types you may receive + * in streaming payload outputs. + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + * + * To highlight the upstream docs about the SdkStream mixin: + * + * The interface contains mix-in (via Object.assign) methods to transform the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * + * The available methods are described on the SdkStream type via SdkStreamMixin. + */ +export type StreamingBlobPayloadOutputTypes = NodeJsRuntimeStreamingBlobPayloadOutputTypes | BrowserRuntimeStreamingBlobPayloadOutputTypes; +/** + * @public + * + * Streaming payload output types in the Node.js environment. + * + * This is by default the IncomingMessage type from node:http responses when + * using the default node-http-handler in Node.js environments. + * + * It can be other Readable types like node:http2's ClientHttp2Stream + * such as when using the node-http2-handler. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type NodeJsRuntimeStreamingBlobPayloadOutputTypes = SdkStream; +/** + * @public + * + * Streaming payload output types in the browser environment. + * + * This is by default fetch's Response.body type (ReadableStream) when using + * the default fetch-http-handler in browser-like environments. + * + * It may be a Blob, such as when using the XMLHttpRequest handler + * and receiving an arraybuffer response body. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type BrowserRuntimeStreamingBlobPayloadOutputTypes = SdkStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transfer.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transfer.d.ts new file mode 100644 index 0000000..462ee23 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transfer.d.ts @@ -0,0 +1,41 @@ +/** + * @public + */ +export type RequestHandlerOutput = { + response: ResponseType; +}; +/** + * @public + */ +export interface RequestHandler { + /** + * metadata contains information of a handler. For example + * 'h2' refers this handler is for handling HTTP/2 requests, + * whereas 'h1' refers handling HTTP1 requests + */ + metadata?: RequestHandlerMetadata; + destroy?: () => void; + handle: (request: RequestType, handlerOptions?: HandlerOptions) => Promise>; +} +/** + * @public + */ +export interface RequestHandlerMetadata { + handlerProtocol: RequestHandlerProtocol | string; +} +/** + * @public + * Values from ALPN Protocol IDs. + * @see https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + */ +export declare enum RequestHandlerProtocol { + HTTP_0_9 = "http/0.9", + HTTP_1_0 = "http/1.0", + TDS_8_0 = "tds/8.0" +} +/** + * @public + */ +export interface RequestContext { + destination: URL; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts new file mode 100644 index 0000000..f9424c4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts @@ -0,0 +1,26 @@ +import type { CommandIO } from "../command"; +import type { MetadataBearer } from "../response"; +import type { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import type { Transform } from "./type-transform"; +/** + * @internal + * + * Narrowed version of InvokeFunction used in Client::send. + */ +export interface NarrowedInvokeFunction { + (command: CommandIO, options?: HttpHandlerOptions): Promise>; + (command: CommandIO, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options?: HttpHandlerOptions, cb?: (err: unknown, data?: Transform) => void): Promise> | void; +} +/** + * @internal + * + * Narrowed version of InvokeMethod used in aggregated Client methods. + */ +export interface NarrowedInvokeMethod { + (input: InputType, options?: HttpHandlerOptions): Promise>; + (input: InputType, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options?: HttpHandlerOptions, cb?: (err: unknown, data?: OutputType) => void): Promise> | void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts new file mode 100644 index 0000000..243a40f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts @@ -0,0 +1,79 @@ +/// +/// +import type { IncomingMessage } from "http"; +import type { ClientHttp2Stream } from "http2"; +import type { InvokeMethod } from "../client"; +import type { GetOutputType } from "../command"; +import type { HttpHandlerOptions } from "../http"; +import type { SdkStream } from "../serde"; +import type { BrowserRuntimeStreamingBlobPayloadInputTypes, NodeJsRuntimeStreamingBlobPayloadInputTypes, StreamingBlobPayloadInputTypes } from "../streaming-payload/streaming-blob-payload-input-types"; +import type { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import type { NarrowedInvokeMethod } from "./client-method-transforms"; +import type { Transform } from "./type-transform"; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the NodeHttpHandler requestHandler, + * the default in Node.js when not using HTTP2. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as NodeJsClient; + * ``` + */ +export type NodeJsClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * Variant of NodeJsClient for node:http2. + */ +export type NodeJsHttp2Client = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the FetchHttpHandler requestHandler, + * which is the default in browser environments. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as BrowserClient; + * ``` + */ +export type BrowserClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Variant of BrowserClient for XMLHttpRequest. + */ +export type BrowserXhrClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * @deprecated use NarrowPayloadBlobTypes. + * + * Narrow a given Client's blob payload outputs to the given type T. + */ +export type NarrowPayloadBlobOutputType = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, T>>; +}; +/** + * @public + * + * Narrow a Client's blob payload input and output types to I and O. + */ +export type NarrowPayloadBlobTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod, FunctionOutputTypes> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, O>>; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/exact.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/exact.d.ts new file mode 100644 index 0000000..c8a15d8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/exact.d.ts @@ -0,0 +1,6 @@ +/** + * @internal + * + * Checks that A and B extend each other. + */ +export type Exact = [A] extends [B] ? ([B] extends [A] ? true : false) : false; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts new file mode 100644 index 0000000..a0ec72e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts @@ -0,0 +1,68 @@ +import type { InvokeMethod, InvokeMethodOptionalArgs } from "../client"; +import type { GetOutputType } from "../command"; +import type { DocumentType } from "../shapes"; +/** + * @public + * + * This type is intended as a type helper for generated clients. + * When initializing client, cast it to this type by passing + * the client constructor type as the type parameter. + * + * It will then recursively remove "undefined" as a union type from all + * input and output shapes' members. Note, this does not affect + * any member that is optional (?) such as outputs with no required members. + * + * @example + * ```ts + * const client = new Client({}) as AssertiveClient; + * ``` + */ +export type AssertiveClient = NarrowClientIOTypes; +/** + * @public + * + * This is similar to AssertiveClient but additionally changes all + * output types to (recursive) Required so as to bypass all output nullability guards. + */ +export type UncheckedClient = UncheckedClientOutputTypes; +/** + * @internal + * + * Excludes undefined recursively. + */ +export type NoUndefined = T extends Function ? T : T extends DocumentType ? T : [T] extends [object] ? { + [key in keyof T]: NoUndefined; +} : Exclude; +/** + * @internal + * + * Excludes undefined and optional recursively. + */ +export type RecursiveRequired = T extends Function ? T : T extends DocumentType ? T : [T] extends [object] ? { + [key in keyof T]-?: RecursiveRequired; +} : Exclude; +/** + * @internal + * + * Removes undefined from unions. + */ +type NarrowClientIOTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, NoUndefined> : [ClientType[key]] extends [InvokeMethod] ? InvokeMethod, NoUndefined> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>; +}; +/** + * @internal + * + * Removes undefined from unions and adds yolo output types. + */ +type UncheckedClientOutputTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, RecursiveRequired> : [ClientType[key]] extends [InvokeMethod] ? InvokeMethod, RecursiveRequired> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>>; +}; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts new file mode 100644 index 0000000..90373fb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts @@ -0,0 +1,34 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = ConditionalRecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [T] extends [FromType] ? ([FromType] extends [T] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [T[key]] extends [FromType] ? [FromType] extends [T[key]] ? ToType : ConditionalRecursiveTransformExact : ConditionalRecursiveTransformExact; +} : TransformExact; +/** + * @internal + * + * Same as RecursiveTransformExact but does not assign to an object + * unless there is a matching transformed member. + */ +type ConditionalRecursiveTransformExact = [T] extends [ + RecursiveTransformExact +] ? [RecursiveTransformExact] extends [T] ? T : RecursiveTransformExact : RecursiveTransformExact; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts new file mode 100644 index 0000000..26c068c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts @@ -0,0 +1,7 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +/** + * @public + */ +export interface AbortHandler { + (this: AbortSignal | DeprecatedAbortSignal, ev: any): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts new file mode 100644 index 0000000..00741af --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts @@ -0,0 +1,50 @@ +import { AbortHandler } from "./abort-handler"; +/** + * @public + */ +export { AbortHandler }; +/** + * @public + * @deprecated use platform (global) type for AbortSignal. + * + * Holders of an AbortSignal object may query if the associated operation has + * been aborted and register an onabort handler. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export interface AbortSignal { + /** + * Whether the action represented by this signal has been cancelled. + */ + readonly aborted: boolean; + /** + * A function to be invoked when the action represented by this signal has + * been cancelled. + */ + onabort: AbortHandler | Function | null; +} +/** + * @public + * @deprecated use platform (global) type for AbortController. + * + * The AWS SDK uses a Controller/Signal model to allow for cooperative + * cancellation of asynchronous operations. When initiating such an operation, + * the caller can create an AbortController and then provide linked signal to + * subtasks. This allows a single source to communicate to multiple consumers + * that an action has been aborted without dictating how that cancellation + * should be handled. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortController + */ +export interface AbortController { + /** + * An object that reports whether the action associated with this + * `AbortController` has been cancelled. + */ + readonly signal: AbortSignal; + /** + * Declares the operation associated with this AbortController to have been + * cancelled. + */ + abort(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts new file mode 100644 index 0000000..380c8fc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum HttpApiKeyAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts new file mode 100644 index 0000000..e0d939e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts @@ -0,0 +1,49 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HandlerExecutionContext } from "../middleware"; +import { HttpSigner } from "./HttpSigner"; +import { IdentityProviderConfig } from "./IdentityProviderConfig"; +/** + * ID for {@link HttpAuthScheme} + * @internal + */ +export type HttpAuthSchemeId = string; +/** + * Interface that defines an HttpAuthScheme + * @internal + */ +export interface HttpAuthScheme { + /** + * ID for an HttpAuthScheme, typically the absolute shape ID of a Smithy auth trait. + */ + schemeId: HttpAuthSchemeId; + /** + * Gets the IdentityProvider corresponding to an HttpAuthScheme. + */ + identityProvider(config: IdentityProviderConfig): IdentityProvider | undefined; + /** + * HttpSigner corresponding to an HttpAuthScheme. + */ + signer: HttpSigner; +} +/** + * Interface that defines the identity and signing properties when selecting + * an HttpAuthScheme. + * @internal + */ +export interface HttpAuthOption { + schemeId: HttpAuthSchemeId; + identityProperties?: Record; + signingProperties?: Record; + propertiesExtractor?: (config: TConfig, context: TContext) => { + identityProperties?: Record; + signingProperties?: Record; + }; +} +/** + * @internal + */ +export interface SelectedHttpAuthScheme { + httpAuthOption: HttpAuthOption; + identity: Identity; + signer: HttpSigner; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..d417aaf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts @@ -0,0 +1,20 @@ +import { HandlerExecutionContext } from "../middleware"; +import { HttpAuthOption } from "./HttpAuthScheme"; +/** + * @internal + */ +export interface HttpAuthSchemeParameters { + operation?: string; +} +/** + * @internal + */ +export interface HttpAuthSchemeProvider { + (authParameters: TParameters): HttpAuthOption[]; +} +/** + * @internal + */ +export interface HttpAuthSchemeParametersProvider { + (config: TConfig, context: TContext, input: TInput): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts new file mode 100644 index 0000000..7abcf84 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts @@ -0,0 +1,41 @@ +import { HttpRequest, HttpResponse } from "../http"; +import { Identity } from "../identity/identity"; +/** + * @internal + */ +export interface ErrorHandler { + (signingProperties: Record): (error: E) => never; +} +/** + * @internal + */ +export interface SuccessHandler { + (httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * Interface to sign identity and signing properties. + * @internal + */ +export interface HttpSigner { + /** + * Signs an HttpRequest with an identity and signing properties. + * @param httpRequest request to sign + * @param identity identity to sing the request with + * @param signingProperties property bag for signing + * @returns signed request in a promise + */ + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware throws an error. + * The error handler is expected to throw the error it receives, so the return type of the error handler is `never`. + * @internal + */ + errorHandler?: ErrorHandler; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware succeeds. + * @internal + */ + successHandler?: SuccessHandler; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts new file mode 100644 index 0000000..6a50f65 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HttpAuthSchemeId } from "./HttpAuthScheme"; +/** + * Interface to get an IdentityProvider for a specified HttpAuthScheme + * @internal + */ +export interface IdentityProviderConfig { + /** + * Get the IdentityProvider for a specified HttpAuthScheme. + * @param schemeId schemeId of the HttpAuthScheme + * @returns IdentityProvider or undefined if HttpAuthScheme is not found + */ + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts new file mode 100644 index 0000000..8241fe3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts @@ -0,0 +1,57 @@ +/** + * @internal + * + * Authentication schemes represent a way that the service will authenticate the customer’s identity. + */ +export interface AuthScheme { + /** + * @example "sigv4a" or "sigv4" + */ + name: "sigv4" | "sigv4a" | string; + /** + * @example "s3" + */ + signingName: string; + /** + * @example "us-east-1" + */ + signingRegion: string; + /** + * @example ["*"] + * @example ["us-west-2", "us-east-1"] + */ + signingRegionSet?: string[]; + /** + * @deprecated this field was renamed to signingRegion. + */ + signingScope?: never; + properties: Record; +} +/** + * @internal + * @deprecated + */ +export interface HttpAuthDefinition { + /** + * Defines the location of where the Auth is serialized. + */ + in: HttpAuthLocation; + /** + * Defines the name of the HTTP header or query string parameter + * that contains the Auth. + */ + name: string; + /** + * Defines the security scheme to use on the `Authorization` header value. + * This can only be set if the "in" property is set to {@link HttpAuthLocation.HEADER}. + */ + scheme?: string; +} +/** + * @internal + * @deprecated + */ +export declare enum HttpAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts new file mode 100644 index 0000000..fbb845d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts new file mode 100644 index 0000000..465c9a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts @@ -0,0 +1,41 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * A union of types that can be used as inputs for the service model + * "blob" type when it represents the request's entire payload or body. + * + * For example, in Lambda::invoke, the payload is modeled as a blob type + * and this union applies to it. + * In contrast, in Lambda::createFunction the Zip file option is a blob type, + * but is not the (entire) payload and this union does not apply. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may work in some cases, + * but the expected types are primarily string and Uint8Array. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type BlobPayloadInputTypes = string | ArrayBuffer | ArrayBufferView | Uint8Array | NodeJsRuntimeBlobTypes | BrowserRuntimeBlobTypes; +/** + * @public + * + * Additional blob types for the Node.js environment. + */ +export type NodeJsRuntimeBlobTypes = Readable | Buffer; +/** + * @public + * + * Additional blob types for the browser environment. + */ +export type BrowserRuntimeBlobTypes = BlobOptionalType | ReadableStreamOptionalType; +/** + * @internal + * @deprecated renamed to BlobPayloadInputTypes. + */ +export type BlobTypes = BlobPayloadInputTypes; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts new file mode 100644 index 0000000..dbfff0c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts @@ -0,0 +1,63 @@ +import { SourceData } from "./crypto"; +/** + * @public + * + * An object that provides a checksum of data provided in chunks to `update`. + * The checksum may be performed incrementally as chunks are received or all + * at once when the checksum is finalized, depending on the underlying + * implementation. + * + * It's recommended to compute checksum incrementally to avoid reading the + * entire payload in memory. + * + * A class that implements this interface may accept an optional secret key in its + * constructor while computing checksum value, when using HMAC. If provided, + * this secret key would be used when computing checksum. + */ +export interface Checksum { + /** + * Constant length of the digest created by the algorithm in bytes. + */ + digestLength?: number; + /** + * Creates a new checksum object that contains a deep copy of the internal + * state of the current `Checksum` object. + */ + copy?(): Checksum; + /** + * Returns the digest of all of the data passed. + */ + digest(): Promise; + /** + * Allows marking a checksum for checksums that support the ability + * to mark and reset. + * + * @param readLimit - The maximum limit of bytes that can be read + * before the mark position becomes invalid. + */ + mark?(readLimit: number): void; + /** + * Resets the checksum to its initial value. + */ + reset(): void; + /** + * Adds a chunk of data for which checksum needs to be computed. + * This can be called many times with new data as it is streamed. + * + * Implementations may override this method which passes second param + * which makes Checksum object stateless. + * + * @param chunk - The buffer to update checksum with. + */ + update(chunk: Uint8Array): void; +} +/** + * @public + * + * A constructor for a Checksum that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + */ +export interface ChecksumConstructor { + new (secret?: SourceData): Checksum; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..1d05c04 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts @@ -0,0 +1,57 @@ +import { Command } from "./command"; +import { MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +import { OptionalParameter } from "./util"; +/** + * @public + * + * A type which checks if the client configuration is optional. + * If all entries of the client configuration are optional, it allows client creation without passing any config. + */ +export type CheckOptionalClientConfig = OptionalParameter; +/** + * @public + * + * function definition for different overrides of client's 'send' function. + */ +export interface InvokeFunction { + (command: Command, options?: any): Promise; + (command: Command, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options: any, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods. + */ +export interface InvokeMethod { + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods when argument is optional. + */ +export interface InvokeMethodOptionalArgs { + (): Promise; + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * A general interface for service clients, idempotent to browser or node clients + * This type corresponds to SmithyClient(https://github.com/aws/aws-sdk-js-v3/blob/main/packages/smithy-client/src/client.ts). + * It's provided for using without importing the SmithyClient class. + * @internal + */ +export interface Client { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + send: InvokeFunction; + destroy: () => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..fb7c5b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts @@ -0,0 +1,23 @@ +import { Handler, MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + */ +export interface Command extends CommandIO { + readonly input: InputType; + readonly middlewareStack: MiddlewareStack; + resolveMiddleware(stack: MiddlewareStack, configuration: ResolvedConfiguration, options: any): Handler; +} +/** + * @internal + * + * This is a subset of the Command type used only to detect the i/o types. + */ +export interface CommandIO { + readonly input: InputType; + resolveMiddleware(stack: any, configuration: any, options: any): Handler; +} +/** + * @internal + */ +export type GetOutputType = Command extends CommandIO ? O : never; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts new file mode 100644 index 0000000..09ed18b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts @@ -0,0 +1,10 @@ +/** + * @public + */ +export interface ConnectConfiguration { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts new file mode 100644 index 0000000..eaacf8b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts new file mode 100644 index 0000000..7245028 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts @@ -0,0 +1,34 @@ +import { RequestContext } from "../transfer"; +import { ConnectConfiguration } from "./config"; +/** + * @public + */ +export interface ConnectionManagerConfiguration { + /** + * Maximum number of allowed concurrent requests per connection. + */ + maxConcurrency?: number; + /** + * Disables concurrent requests per connection. + */ + disableConcurrency?: boolean; +} +/** + * @public + */ +export interface ConnectionManager { + /** + * Retrieves a connection from the connection pool if available, + * otherwise establish a new connection + */ + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): T; + /** + * Releases the connection back to the pool making it potentially + * re-usable by other requests. + */ + release(requestContext: RequestContext, connection: T): void; + /** + * Destroys the connection manager. All connections will be closed. + */ + destroy(): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts new file mode 100644 index 0000000..161094f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts @@ -0,0 +1,32 @@ +/** + * @public + */ +export interface ConnectionPool { + /** + * Retrieve the first connection in the pool + */ + poll(): T | void; + /** + * Release the connection back to the pool making it potentially + * re-usable by other requests. + */ + offerLast(connection: T): void; + /** + * Removes the connection from the pool, and destroys it. + */ + destroy(connection: T): void; + /** + * Implements the iterable protocol and allows arrays to be consumed + * by most syntaxes expecting iterables, such as the spread syntax + * and for...of loops + */ + [Symbol.iterator](): Iterator; +} +/** + * Unused. + * @internal + * @deprecated + */ +export interface CacheKey { + destination: string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts new file mode 100644 index 0000000..467ec86 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts @@ -0,0 +1,60 @@ +/** + * @public + */ +export type SourceData = string | ArrayBuffer | ArrayBufferView; +/** + * @public + * + * An object that provides a hash of data provided in chunks to `update`. The + * hash may be performed incrementally as chunks are received or all at once + * when the hash is finalized, depending on the underlying implementation. + * + * @deprecated use {@link Checksum} + */ +export interface Hash { + /** + * Adds a chunk of data to the hash. If a buffer is provided, the `encoding` + * argument will be ignored. If a string is provided without a specified + * encoding, implementations must assume UTF-8 encoding. + * + * Not all encodings are supported on all platforms, though all must support + * UTF-8. + */ + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + /** + * Finalizes the hash and provides a promise that will be fulfilled with the + * raw bytes of the calculated hash. + */ + digest(): Promise; +} +/** + * @public + * + * A constructor for a hash that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + * + * @deprecated use {@link ChecksumConstructor} + */ +export interface HashConstructor { + new (secret?: SourceData): Hash; +} +/** + * @public + * + * A function that calculates the hash of a data stream. Determining the hash + * will consume the stream, so only replayable streams should be provided to an + * implementation of this interface. + */ +export interface StreamHasher { + (hashCtor: HashConstructor, stream: StreamType): Promise; +} +/** + * @public + * + * A function that returns a promise fulfilled with bytes from a + * cryptographically secure pseudorandom number generator. + */ +export interface randomValues { + (byteLength: number): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..547303f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts @@ -0,0 +1,41 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [ + T +] extends [ + FromType +] ? ([ + FromType +] extends [ + T +] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [ + T[key] + ] extends [ + FromType + ] ? [ + FromType + ] extends [ + T[key] + ] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts new file mode 100644 index 0000000..4714bf9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts @@ -0,0 +1,31 @@ +import { Message } from "./eventStream"; +/** + * @public + */ +export interface MessageEncoder { + encode(message: Message): Uint8Array; +} +/** + * @public + */ +export interface MessageDecoder { + decode(message: ArrayBufferView): Message; + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; +} +/** + * @public + */ +export interface AvailableMessage { + getMessage(): Message | undefined; + isEndOfStream(): boolean; +} +/** + * @public + */ +export interface AvailableMessages { + getMessages(): Message[]; + isEndOfStream(): boolean; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts new file mode 100644 index 0000000..a1221ee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts @@ -0,0 +1,77 @@ +import { AuthScheme } from "./auth/auth"; +/** + * @public + */ +export interface EndpointPartition { + name: string; + dnsSuffix: string; + dualStackDnsSuffix: string; + supportsFIPS: boolean; + supportsDualStack: boolean; +} +/** + * @public + */ +export interface EndpointARN { + partition: string; + service: string; + region: string; + accountId: string; + resourceId: Array; +} +/** + * @public + */ +export declare enum EndpointURLScheme { + HTTP = "http", + HTTPS = "https" +} +/** + * @public + */ +export interface EndpointURL { + /** + * The URL scheme such as http or https. + */ + scheme: EndpointURLScheme; + /** + * The authority is the host and optional port component of the URL. + */ + authority: string; + /** + * The parsed path segment of the URL. + * This value is as-is as provided by the user. + */ + path: string; + /** + * The parsed path segment of the URL. + * This value is guranteed to start and end with a "/". + */ + normalizedPath: string; + /** + * A boolean indicating whether the authority is an IP address. + */ + isIp: boolean; +} +/** + * @public + */ +export type EndpointObjectProperty = string | boolean | { + [key: string]: EndpointObjectProperty; +} | EndpointObjectProperty[]; +/** + * @public + */ +export interface EndpointV2 { + url: URL; + properties?: { + authSchemes?: AuthScheme[]; + } & Record; + headers?: Record; +} +/** + * @public + */ +export type EndpointParameters = { + [name: string]: undefined | boolean | string | string[]; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts new file mode 100644 index 0000000..2c8026b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts @@ -0,0 +1,27 @@ +import { EndpointObjectProperty } from "../endpoint"; +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type EndpointObjectProperties = Record; +/** + * @public + */ +export type EndpointObjectHeaders = Record; +/** + * @public + */ +export type EndpointObject = { + url: Expression; + properties?: EndpointObjectProperties; + headers?: EndpointObjectHeaders; +}; +/** + * @public + */ +export type EndpointRuleObject = { + type: "endpoint"; + conditions?: ConditionObject[]; + endpoint: EndpointObject; + documentation?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts new file mode 100644 index 0000000..98fc7a8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts @@ -0,0 +1,10 @@ +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type ErrorRuleObject = { + type: "error"; + conditions?: ConditionObject[]; + error: Expression; + documentation?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts new file mode 100644 index 0000000..e749fba --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts @@ -0,0 +1,28 @@ +import { RuleSetRules } from "./TreeRuleObject"; +/** + * @public + */ +export type DeprecatedObject = { + message?: string; + since?: string; +}; +/** + * @public + */ +export type ParameterObject = { + type: "String" | "string" | "Boolean" | "boolean"; + default?: string | boolean; + required?: boolean; + documentation?: string; + builtIn?: string; + deprecated?: DeprecatedObject; +}; +/** + * @public + */ +export type RuleSetObject = { + version: string; + serviceId?: string; + parameters: Record; + rules: RuleSetRules; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts new file mode 100644 index 0000000..c203eed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts @@ -0,0 +1,16 @@ +import { EndpointRuleObject } from "./EndpointRuleObject"; +import { ErrorRuleObject } from "./ErrorRuleObject"; +import { ConditionObject } from "./shared"; +/** + * @public + */ +export type RuleSetRules = Array; +/** + * @public + */ +export type TreeRuleObject = { + type: "tree"; + conditions?: ConditionObject[]; + rules: RuleSetRules; + documentation?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts new file mode 100644 index 0000000..8a29789 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts new file mode 100644 index 0000000..1c5d4b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts @@ -0,0 +1,55 @@ +import { Logger } from "../logger"; +/** + * @public + */ +export type ReferenceObject = { + ref: string; +}; +/** + * @public + */ +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +/** + * @public + */ +export type FunctionArgv = Array; +/** + * @public + */ +export type FunctionReturn = string | boolean | number | { + [key: string]: FunctionReturn; +}; +/** + * @public + */ +export type ConditionObject = FunctionObject & { + assign?: string; +}; +/** + * @public + */ +export type Expression = string | ReferenceObject | FunctionObject; +/** + * @public + */ +export type EndpointParams = Record; +/** + * @public + */ +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +/** + * @public + */ +export type ReferenceRecord = Record; +/** + * @public + */ +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts new file mode 100644 index 0000000..49c37c7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts @@ -0,0 +1,137 @@ +import { HttpRequest } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, HandlerExecutionContext } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +/** + * @public + */ +export type MessageHeaders = Record; +/** + * @public + */ +export type HeaderValue = { + type: K; + value: V; +}; +/** + * @public + */ +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +/** + * @public + */ +export type ByteHeaderValue = HeaderValue<"byte", number>; +/** + * @public + */ +export type ShortHeaderValue = HeaderValue<"short", number>; +/** + * @public + */ +export type IntegerHeaderValue = HeaderValue<"integer", number>; +/** + * @public + */ +export type LongHeaderValue = HeaderValue<"long", Int64>; +/** + * @public + */ +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +/** + * @public + */ +export type StringHeaderValue = HeaderValue<"string", string>; +/** + * @public + */ +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +/** + * @public + */ +export type UuidHeaderValue = HeaderValue<"uuid", string>; +/** + * @public + */ +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +/** + * @public + */ +export interface Int64 { + readonly bytes: Uint8Array; + valueOf: () => number; + toString: () => string; +} +/** + * @public + * + * Util functions for serializing or deserializing event stream + */ +export interface EventStreamSerdeContext { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @public + * + * A function which deserializes binary event stream message into modeled shape. + */ +export interface EventStreamMarshallerDeserFn { + (body: StreamType, deserializer: (input: Record) => Promise): AsyncIterable; +} +/** + * @public + * + * A function that serializes modeled shape into binary stream message. + */ +export interface EventStreamMarshallerSerFn { + (input: AsyncIterable, serializer: (event: T) => Message): StreamType; +} +/** + * @public + * + * An interface which provides functions for serializing and deserializing binary event stream + * to/from corresponsing modeled shape. + */ +export interface EventStreamMarshaller { + deserialize: EventStreamMarshallerDeserFn; + serialize: EventStreamMarshallerSerFn; +} +/** + * @public + */ +export interface EventStreamRequestSigner { + sign(request: HttpRequest): Promise; +} +/** + * @public + */ +export interface EventStreamPayloadHandler { + handle: (next: FinalizeHandler, args: FinalizeHandlerArguments, context?: HandlerExecutionContext) => Promise>; +} +/** + * @public + */ +export interface EventStreamPayloadHandlerProvider { + (options: any): EventStreamPayloadHandler; +} +/** + * @public + */ +export interface EventStreamSerdeProvider { + (options: any): EventStreamMarshaller; +} +/** + * @public + */ +export interface EventStreamSignerProvider { + (options: any): EventStreamRequestSigner; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts new file mode 100644 index 0000000..8ebbf00 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts @@ -0,0 +1,58 @@ +import { ChecksumConstructor } from "../checksum"; +import { HashConstructor } from "../crypto"; +/** + * @internal + */ +export declare enum AlgorithmId { + MD5 = "md5", + CRC32 = "crc32", + CRC32C = "crc32c", + SHA1 = "sha1", + SHA256 = "sha256" +} +/** + * @internal + */ +export interface ChecksumAlgorithm { + algorithmId(): AlgorithmId; + checksumConstructor(): ChecksumConstructor | HashConstructor; +} +/** + * @deprecated unused. + * @internal + */ +type ChecksumConfigurationLegacy = { + [other in string | number]: any; +}; +/** + * @internal + */ +export interface ChecksumConfiguration extends ChecksumConfigurationLegacy { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +} +/** + * @deprecated will be removed for implicit type. + * @internal + */ +type GetChecksumConfigurationType = (runtimeConfig: Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; +}>) => ChecksumConfiguration; +/** + * @internal + * @deprecated will be moved to smithy-client. + */ +export declare const getChecksumConfiguration: GetChecksumConfigurationType; +/** + * @internal + * @deprecated will be removed for implicit type. + */ +type ResolveChecksumRuntimeConfigType = (clientConfig: ChecksumConfiguration) => any; +/** + * @internal + * + * @deprecated will be moved to smithy-client. + */ +export declare const resolveChecksumRuntimeConfig: ResolveChecksumRuntimeConfigType; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts new file mode 100644 index 0000000..40458b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts @@ -0,0 +1,33 @@ +import { ChecksumConfiguration } from "./checksum"; +/** + * @deprecated will be replaced by DefaultExtensionConfiguration. + * @internal + * + * Default client configuration consisting various configurations for modifying a service client + */ +export interface DefaultClientConfiguration extends ChecksumConfiguration { +} +/** + * @deprecated will be removed for implicit type. + */ +type GetDefaultConfigurationType = (runtimeConfig: any) => DefaultClientConfiguration; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve default client configuration from runtime config + * + */ +export declare const getDefaultClientConfiguration: GetDefaultConfigurationType; +/** + * @deprecated will be removed for implicit type. + */ +type ResolveDefaultRuntimeConfigType = (clientConfig: DefaultClientConfiguration) => any; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve runtime config from default client configuration + */ +export declare const resolveDefaultRuntimeConfig: ResolveDefaultRuntimeConfigType; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..55f5137 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConfiguration } from "./checksum"; +import { RetryStrategyConfiguration } from "./retry"; +/** + * @internal + * + * Default extension configuration consisting various configurations for modifying a service client + */ +export interface DefaultExtensionConfiguration extends ChecksumConfiguration, RetryStrategyConfiguration { +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..55edb16 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,4 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration } from "./checksum"; +export { RetryStrategyConfiguration } from "./retry"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts new file mode 100644 index 0000000..3471d08 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { RetryStrategyV2 } from "../retry"; +import { Provider, RetryStrategy } from "../util"; +/** + * A configuration interface with methods called by runtime extension + * @internal + */ +export interface RetryStrategyConfiguration { + /** + * Set retry strategy used for all http requests + * @param retryStrategy + */ + setRetryStrategy(retryStrategy: Provider): void; + /** + * Get retry strategy used for all http requests + * @param retryStrategy + */ + retryStrategy(): Provider; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts new file mode 100644 index 0000000..b709d7f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts @@ -0,0 +1,35 @@ +import { Exact } from "../transform/exact"; +/** + * @public + * + * A checked type that resolves to Blob if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing Blob + * excessively. + */ +export type BlobOptionalType = BlobDefined extends true ? Blob : Unavailable; +/** + * @public + * + * A checked type that resolves to ReadableStream if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing ReadableStream + * excessively. + */ +export type ReadableStreamOptionalType = ReadableStreamDefined extends true ? ReadableStream : Unavailable; +/** + * @public + * + * Indicates a type is unavailable if it resolves to this. + */ +export type Unavailable = never; +/** + * @internal + * + * Whether the global types define more than a stub for ReadableStream. + */ +export type ReadableStreamDefined = Exact extends true ? false : true; +/** + * @internal + * + * Whether the global types define more than a stub for Blob. + */ +export type BlobDefined = Exact extends true ? false : true; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts new file mode 100644 index 0000000..1a2c157 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export type SmithyFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + SIGV4A_SIGNING: "S"; + CREDENTIALS_CODE: "e"; +}>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts new file mode 100644 index 0000000..1e47e4e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts @@ -0,0 +1,112 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +import { URI } from "./uri"; +/** + * @public + * + * @deprecated use {@link EndpointV2} from `@smithy/types`. + */ +export interface Endpoint { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; +} +/** + * @public + * + * Interface an HTTP request class. Contains + * addressing information in addition to standard message properties. + */ +export interface HttpRequest extends HttpMessage, URI { + method: string; +} +/** + * @public + * + * Represents an HTTP message as received in reply to a request. Contains a + * numeric status code in addition to standard message properties. + */ +export interface HttpResponse extends HttpMessage { + statusCode: number; + reason?: string; +} +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. body: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * A mapping of query parameter names to strings or arrays of strings, with the + * second being used when a parameter contains a list of values. Value can be set + * to null when query is not in key-value pairs shape + */ +export type QueryParameterBag = Record | null>; +/** + * @public + */ +export type FieldOptions = { + name: string; + kind?: FieldPosition; + values?: string[]; +}; +/** + * @public + */ +export declare enum FieldPosition { + HEADER = 0, + TRAILER = 1 +} +/** + * @public + * + * A mapping of header names to string values. Multiple values for the same + * header should be represented as a single string with values separated by + * `, `. + * + * Keys should be considered case insensitive, even if this is not enforced by a + * particular implementation. For example, given the following HeaderBag, where + * keys differ only in case: + * + * ```json + * { + * 'x-request-date': '2000-01-01T00:00:00Z', + * 'X-Request-Date': '2001-01-01T00:00:00Z' + * } + * ``` + * + * The SDK may at any point during processing remove one of the object + * properties in favor of the other. The headers may or may not be combined, and + * the SDK will not deterministically select which header candidate to use. + */ +export type HeaderBag = Record; +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. bode: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * Represents the options that may be passed to an Http Handler. + */ +export interface HttpHandlerOptions { + abortSignal?: AbortSignal | DeprecatedAbortSignal; + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts new file mode 100644 index 0000000..0ee18e4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts @@ -0,0 +1,122 @@ +/// +import { Agent as hAgent, AgentOptions as hAgentOptions } from "http"; +import { Agent as hsAgent, AgentOptions as hsAgentOptions } from "https"; +import { HttpRequest as IHttpRequest } from "../http"; +import { Logger } from "../logger"; +/** + * + * This type represents an alternate client constructor option for the entry + * "requestHandler". Instead of providing an instance of a requestHandler, the user + * may provide the requestHandler's constructor options for either the + * NodeHttpHandler or FetchHttpHandler. + * + * For other RequestHandlers like HTTP2 or WebSocket, + * constructor parameter passthrough is not currently available. + * + * @public + */ +export type RequestHandlerParams = NodeHttpHandlerOptions | FetchHttpHandlerOptions; +/** + * Represents the http options that can be passed to a node http client. + * @public + */ +export interface NodeHttpHandlerOptions { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + * + * Defaults to 0, which disables the timeout. + */ + connectionTimeout?: number; + /** + * The number of milliseconds a request can take before automatically being terminated. + * Defaults to 0, which disables the timeout. + */ + requestTimeout?: number; + /** + * Delay before the NodeHttpHandler checks for socket exhaustion, + * and emits a warning if the active sockets and enqueued request count is greater than + * 2x the maxSockets count. + * + * Defaults to connectionTimeout + requestTimeout or 3000ms if those are not set. + */ + socketAcquisitionWarningTimeout?: number; + /** + * This field is deprecated, and requestTimeout should be used instead. + * The maximum time in milliseconds that a socket may remain idle before it + * is closed. + * + * @deprecated Use {@link requestTimeout} + */ + socketTimeout?: number; + /** + * You can pass http.Agent or its constructor options. + */ + httpAgent?: hAgent | hAgentOptions; + /** + * You can pass https.Agent or its constructor options. + */ + httpsAgent?: hsAgent | hsAgentOptions; + /** + * Optional logger. + */ + logger?: Logger; +} +/** + * Represents the http options that can be passed to a browser http client. + * @public + */ +export interface FetchHttpHandlerOptions { + /** + * The number of milliseconds a request can take before being automatically + * terminated. + */ + requestTimeout?: number; + /** + * Whether to allow the request to outlive the page. Default value is false. + * + * There may be limitations to the payload size, number of concurrent requests, + * request duration etc. when using keepalive in browsers. + * + * These may change over time, so look for up to date information about + * these limitations before enabling keepalive. + */ + keepAlive?: boolean; + /** + * A string indicating whether credentials will be sent with the request always, never, or + * only when sent to a same-origin URL. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials + */ + credentials?: "include" | "omit" | "same-origin" | undefined | string; + /** + * Cache settings for fetch. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/cache + */ + cache?: "default" | "force-cache" | "no-cache" | "no-store" | "only-if-cached" | "reload"; + /** + * An optional function that produces additional RequestInit + * parameters for each httpRequest. + * + * This is applied last via merging with Object.assign() and overwrites other values + * set from other sources. + * + * @example + * ```js + * new Client({ + * requestHandler: { + * requestInit(httpRequest) { + * return { cache: "no-store" }; + * } + * } + * }); + * ``` + */ + requestInit?: (httpRequest: IHttpRequest) => RequestInit; +} +declare global { + /** + * interface merging stub. + */ + interface RequestInit { + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts new file mode 100644 index 0000000..4aee7a2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @public + */ +export interface ApiKeyIdentity extends Identity { + /** + * The literal API Key + */ + readonly apiKey: string; +} +/** + * @public + */ +export type ApiKeyIdentityProvider = IdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts new file mode 100644 index 0000000..9605e4d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts @@ -0,0 +1,31 @@ +import { Identity, IdentityProvider } from "./identity"; +/** + * @public + */ +export interface AwsCredentialIdentity extends Identity { + /** + * AWS access key ID + */ + readonly accessKeyId: string; + /** + * AWS secret access key + */ + readonly secretAccessKey: string; + /** + * A security or session token to use with these credentials. Usually + * present for temporary credentials. + */ + readonly sessionToken?: string; + /** + * AWS credential scope for this set of credentials. + */ + readonly credentialScope?: string; + /** + * AWS accountId. + */ + readonly accountId?: string; +} +/** + * @public + */ +export type AwsCredentialIdentityProvider = IdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts new file mode 100644 index 0000000..eaa7e5d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts @@ -0,0 +1,15 @@ +/** + * @public + */ +export interface Identity { + /** + * A `Date` when the identity or credential will no longer be accepted. + */ + readonly expiration?: Date; +} +/** + * @public + */ +export interface IdentityProvider { + (identityProperties?: Record): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts new file mode 100644 index 0000000..031a0fe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts new file mode 100644 index 0000000..33783eb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @internal + */ +export interface TokenIdentity extends Identity { + /** + * The literal token string + */ + readonly token: string; +} +/** + * @internal + */ +export type TokenIdentityProvider = IdentityProvider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..85b4e44 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts new file mode 100644 index 0000000..cc69a11 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts @@ -0,0 +1,13 @@ +/** + * @public + * + * Represents a logger object that is available in HandlerExecutionContext + * throughout the middleware stack. + */ +export interface Logger { + trace?: (...content: any[]) => void; + debug: (...content: any[]) => void; + info: (...content: any[]) => void; + warn: (...content: any[]) => void; + error: (...content: any[]) => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts new file mode 100644 index 0000000..8b35bbe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts @@ -0,0 +1,534 @@ +import { AuthScheme, HttpAuthDefinition } from "./auth/auth"; +import { SelectedHttpAuthScheme } from "./auth/HttpAuthScheme"; +import { Command } from "./command"; +import { EndpointV2 } from "./endpoint"; +import { SmithyFeatures } from "./feature-ids"; +import { Logger } from "./logger"; +import { UserAgent } from "./util"; +/** + * @public + */ +export interface InitializeHandlerArguments { + /** + * User input to a command. Reflects the userland representation of the + * union of data types the command can effectively handle. + */ + input: Input; +} +/** + * @public + */ +export interface InitializeHandlerOutput extends DeserializeHandlerOutput { + output: Output; +} +/** + * @public + */ +export interface SerializeHandlerArguments extends InitializeHandlerArguments { + /** + * The user input serialized as a request object. The request object is unknown, + * so you cannot modify it directly. When work with request, you need to guard its + * type to e.g. HttpRequest with 'instanceof' operand + * + * During the build phase of the execution of a middleware stack, a built + * request may or may not be available. + */ + request?: unknown; +} +/** + * @public + */ +export interface SerializeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface BuildHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface BuildHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface FinalizeHandlerArguments extends SerializeHandlerArguments { + /** + * The user input serialized as a request. + */ + request: unknown; +} +/** + * @public + */ +export interface FinalizeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface DeserializeHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface DeserializeHandlerOutput { + /** + * The raw response object from runtime is deserialized to structured output object. + * The response object is unknown so you cannot modify it directly. When work with + * response, you need to guard its type to e.g. HttpResponse with 'instanceof' operand. + * + * During the deserialize phase of the execution of a middleware stack, a deserialized + * response may or may not be available + */ + response: unknown; + output?: Output; +} +/** + * @public + */ +export interface InitializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: InitializeHandlerArguments): Promise>; +} +/** + * @public + */ +export type Handler = InitializeHandler; +/** + * @public + */ +export interface SerializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: SerializeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface FinalizeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: FinalizeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface BuildHandler { + (args: BuildHandlerArguments): Promise>; +} +/** + * @public + */ +export interface DeserializeHandler { + (args: DeserializeHandlerArguments): Promise>; +} +/** + * @public + * + * A factory function that creates functions implementing the `Handler` + * interface. + */ +export interface InitializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: InitializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `BuildHandler` + * interface. + */ +export interface SerializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: SerializeHandler, context: HandlerExecutionContext): SerializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `FinalizeHandler` + * interface. + */ +export interface FinalizeRequestMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: FinalizeHandler, context: HandlerExecutionContext): FinalizeHandler; +} +/** + * @public + */ +export interface BuildMiddleware { + (next: BuildHandler, context: HandlerExecutionContext): BuildHandler; +} +/** + * @public + */ +export interface DeserializeMiddleware { + (next: DeserializeHandler, context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type MiddlewareType = InitializeMiddleware | SerializeMiddleware | BuildMiddleware | FinalizeRequestMiddleware | DeserializeMiddleware; +/** + * @public + * + * A factory function that creates the terminal handler atop which a middleware + * stack sits. + */ +export interface Terminalware { + (context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type Step = "initialize" | "serialize" | "build" | "finalizeRequest" | "deserialize"; +/** + * @public + */ +export type Priority = "high" | "normal" | "low"; +/** + * @public + */ +export interface HandlerOptions { + /** + * Handlers are ordered using a "step" that describes the stage of command + * execution at which the handler will be executed. The available steps are: + * + * - initialize: The input is being prepared. Examples of typical + * initialization tasks include injecting default options computing + * derived parameters. + * - serialize: The input is complete and ready to be serialized. Examples + * of typical serialization tasks include input validation and building + * an HTTP request from user input. + * - build: The input has been serialized into an HTTP request, but that + * request may require further modification. Any request alterations + * will be applied to all retries. Examples of typical build tasks + * include injecting HTTP headers that describe a stable aspect of the + * request, such as `Content-Length` or a body checksum. + * - finalizeRequest: The request is being prepared to be sent over the wire. The + * request in this stage should already be semantically complete and + * should therefore only be altered as match the recipient's + * expectations. Examples of typical finalization tasks include request + * signing and injecting hop-by-hop headers. + * - deserialize: The response has arrived, the middleware here will deserialize + * the raw response object to structured response + * + * Unlike initialization and build handlers, which are executed once + * per operation execution, finalization and deserialize handlers will be + * executed foreach HTTP request sent. + * + * @defaultValue 'initialize' + */ + step?: Step; + /** + * A list of strings to any that identify the general purpose or important + * characteristics of a given handler. + */ + tags?: Array; + /** + * A unique name to refer to a middleware + */ + name?: string; + /** + * @internal + * Aliases allows for middleware to be found by multiple names besides {@link HandlerOptions.name}. + * This allows for references to replaced middleware to continue working, e.g. replacing + * multiple auth-specific middleware with a single generic auth middleware. + */ + aliases?: Array; + /** + * A flag to override the existing middleware with the same name. Without + * setting it, adding middleware with duplicated name will throw an exception. + * @internal + */ + override?: boolean; +} +/** + * @public + */ +export interface AbsoluteLocation { + /** + * By default middleware will be added to individual step in un-guaranteed order. + * In the case that + * + * @defaultValue 'normal' + */ + priority?: Priority; +} +/** + * @public + */ +export type Relation = "before" | "after"; +/** + * @public + */ +export interface RelativeLocation { + /** + * Specify the relation to be before or after a know middleware. + */ + relation: Relation; + /** + * A known middleware name to indicate inserting middleware's location. + */ + toMiddleware: string; +} +/** + * @public + */ +export type RelativeMiddlewareOptions = RelativeLocation & Pick>; +/** + * @public + */ +export interface InitializeHandlerOptions extends HandlerOptions { + step?: "initialize"; +} +/** + * @public + */ +export interface SerializeHandlerOptions extends HandlerOptions { + step: "serialize"; +} +/** + * @public + */ +export interface BuildHandlerOptions extends HandlerOptions { + step: "build"; +} +/** + * @public + */ +export interface FinalizeRequestHandlerOptions extends HandlerOptions { + step: "finalizeRequest"; +} +/** + * @public + */ +export interface DeserializeHandlerOptions extends HandlerOptions { + step: "deserialize"; +} +/** + * @public + * + * A stack storing middleware. It can be resolved into a handler. It supports 2 + * approaches for adding middleware: + * 1. Adding middleware to specific step with `add()`. The order of middleware + * added into same step is determined by order of adding them. If one middleware + * needs to be executed at the front of the step or at the end of step, set + * `priority` options to `high` or `low`. + * 2. Adding middleware to location relative to known middleware with `addRelativeTo()`. + * This is useful when given middleware must be executed before or after specific + * middleware(`toMiddleware`). You can add a middleware relatively to another + * middleware which also added relatively. But eventually, this relative middleware + * chain **must** be 'anchored' by a middleware that added using `add()` API + * with absolute `step` and `priority`. This mothod will throw if specified + * `toMiddleware` is not found. + */ +export interface MiddlewareStack extends Pluggable { + /** + * Add middleware to the stack to be executed during the "initialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: InitializeMiddleware, options?: InitializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "serialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: SerializeMiddleware, options: SerializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "build" step, + * optionally specifying a priority, tags and name + */ + add(middleware: BuildMiddleware, options: BuildHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "finalizeRequest" step, + * optionally specifying a priority, tags and name + */ + add(middleware: FinalizeRequestMiddleware, options: FinalizeRequestHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "deserialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: DeserializeMiddleware, options: DeserializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to a stack position before or after a known middleware,optionally + * specifying name and tags. + */ + addRelativeTo(middleware: MiddlewareType, options: RelativeMiddlewareOptions): void; + /** + * Apply a customization function to mutate the middleware stack, often + * used for customizations that requires mutating multiple middleware. + */ + use(pluggable: Pluggable): void; + /** + * Create a shallow clone of this stack. Step bindings and handler priorities + * and tags are preserved in the copy. + */ + clone(): MiddlewareStack; + /** + * Removes middleware from the stack. + * + * If a string is provided, it will be treated as middleware name. If a middleware + * is inserted with the given name, it will be removed. + * + * If a middleware class is provided, all usages thereof will be removed. + */ + remove(toRemove: MiddlewareType | string): boolean; + /** + * Removes middleware that contains given tag + * + * Multiple middleware will potentially be removed + */ + removeByTag(toRemove: string): boolean; + /** + * Create a stack containing the middlewares in this stack as well as the + * middlewares in the `from` stack. Neither source is modified, and step + * bindings and handler priorities and tags are preserved in the copy. + */ + concat(from: MiddlewareStack): MiddlewareStack; + /** + * Returns a list of the current order of middleware in the stack. + * This does not execute the middleware functions, nor does it + * provide a reference to the stack itself. + */ + identify(): string[]; + /** + * @internal + * + * When an operation is called using this stack, + * it will log its list of middleware to the console using + * the identify function. + * + * @param toggle - set whether to log on resolve. + * If no argument given, returns the current value. + */ + identifyOnResolve(toggle?: boolean): boolean; + /** + * Builds a single handler function from zero or more middleware classes and + * a core handler. The core handler is meant to send command objects to AWS + * services and return promises that will resolve with the operation result + * or be rejected with an error. + * + * When a composed handler is invoked, the arguments will pass through all + * middleware in a defined order, and the return from the innermost handler + * will pass through all middleware in the reverse of that order. + */ + resolve(handler: DeserializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @internal + */ +export declare const SMITHY_CONTEXT_KEY = "__smithy_context"; +/** + * @public + * + * Data and helper objects that are not expected to change from one execution of + * a composed handler to another. + */ +export interface HandlerExecutionContext { + /** + * A logger that may be invoked by any handler during execution of an + * operation. + */ + logger?: Logger; + /** + * Name of the service the operation is being sent to. + */ + clientName?: string; + /** + * Name of the operation being executed. + */ + commandName?: string; + /** + * Additional user agent that inferred by middleware. It can be used to save + * the internal user agent sections without overriding the `customUserAgent` + * config in clients. + */ + userAgent?: UserAgent; + /** + * Resolved by the endpointMiddleware function of `@smithy/middleware-endpoint` + * in the serialization stage. + */ + endpointV2?: EndpointV2; + /** + * Set at the same time as endpointV2. + */ + authSchemes?: AuthScheme[]; + /** + * The current auth configuration that has been set by any auth middleware and + * that will prevent from being set more than once. + */ + currentAuthConfig?: HttpAuthDefinition; + /** + * @deprecated do not extend this field, it is a carryover from AWS SDKs. + * Used by DynamoDbDocumentClient. + */ + dynamoDbDocumentClientOptions?: Partial<{ + overrideInputFilterSensitiveLog(...args: any[]): string | void; + overrideOutputFilterSensitiveLog(...args: any[]): string | void; + }>; + /** + * @internal + * Context for Smithy properties. + */ + [SMITHY_CONTEXT_KEY]?: { + service?: string; + operation?: string; + commandInstance?: Command; + selectedHttpAuthScheme?: SelectedHttpAuthScheme; + features?: SmithyFeatures; + /** + * @deprecated + * Do not assign arbitrary members to the Smithy Context, + * fields should be explicitly declared here to avoid collisions. + */ + [key: string]: unknown; + }; + /** + * @deprecated + * Do not assign arbitrary members to the context, since + * they can interfere with existing functionality. + * + * Additional members should instead be declared on the SMITHY_CONTEXT_KEY + * or other reserved keys. + */ + [key: string]: any; +} +/** + * @public + */ +export interface Pluggable { + /** + * A function that mutate the passed in middleware stack. Functions implementing + * this interface can add, remove, modify existing middleware stack from clients + * or commands + */ + applyToStack: (stack: MiddlewareStack) => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts new file mode 100644 index 0000000..c9d1c92 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts @@ -0,0 +1,33 @@ +import { Client } from "./client"; +import { Command } from "./command"; +/** + * @public + * + * Expected type definition of a paginator. + */ +export type Paginator = AsyncGenerator; +/** + * @public + * + * Expected paginator configuration passed to an operation. Services will extend + * this interface definition and may type client further. + */ +export interface PaginationConfiguration { + client: Client; + pageSize?: number; + startingToken?: any; + /** + * For some APIs, such as CloudWatchLogs events, the next page token will always + * be present. + * + * When true, this config field will have the paginator stop when the token doesn't change + * instead of when it is not present. + */ + stopOnSameToken?: boolean; + /** + * @param command - reference to the instantiated command. This callback is executed + * prior to sending the command with the paginator's client. + * @returns the original command or a replacement, defaulting to the original command object. + */ + withCommand?: (command: Command) => typeof command | undefined; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts new file mode 100644 index 0000000..1b3dba7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts @@ -0,0 +1,30 @@ +/** + * @public + */ +export declare enum IniSectionType { + PROFILE = "profile", + SSO_SESSION = "sso-session", + SERVICES = "services" +} +/** + * @public + */ +export type IniSection = Record; +/** + * @public + * + * @deprecated Please use {@link IniSection} + */ +export interface Profile extends IniSection { +} +/** + * @public + */ +export type ParsedIniData = Record; +/** + * @public + */ +export interface SharedConfigFiles { + credentialsFile: ParsedIniData; + configFile: ParsedIniData; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts new file mode 100644 index 0000000..3d8a45a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts @@ -0,0 +1,40 @@ +/** + * @public + */ +export interface ResponseMetadata { + /** + * The status code of the last HTTP response received for this operation. + */ + httpStatusCode?: number; + /** + * A unique identifier for the last request sent for this operation. Often + * requested by AWS service teams to aid in debugging. + */ + requestId?: string; + /** + * A secondary identifier for the last request sent. Used for debugging. + */ + extendedRequestId?: string; + /** + * A tertiary identifier for the last request sent. Used for debugging. + */ + cfId?: string; + /** + * The number of times this operation was attempted. + */ + attempts?: number; + /** + * The total amount of time (in milliseconds) that was spent waiting between + * retry attempts. + */ + totalRetryDelay?: number; +} +/** + * @public + */ +export interface MetadataBearer { + /** + * Metadata pertaining to this request. + */ + $metadata: ResponseMetadata; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts new file mode 100644 index 0000000..8436c9a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts @@ -0,0 +1,133 @@ +import { SdkError } from "./shapes"; +/** + * @public + */ +export type RetryErrorType = +/** + * This is a connection level error such as a socket timeout, socket connect + * error, tls negotiation timeout etc... + * Typically these should never be applied for non-idempotent request types + * since in this scenario, it's impossible to know whether the operation had + * a side effect on the server. + */ +"TRANSIENT" +/** + * This is an error where the server explicitly told the client to back off, + * such as a 429 or 503 Http error. + */ + | "THROTTLING" +/** + * This is a server error that isn't explicitly throttling but is considered + * by the client to be something that should be retried. + */ + | "SERVER_ERROR" +/** + * Doesn't count against any budgets. This could be something like a 401 + * challenge in Http. + */ + | "CLIENT_ERROR"; +/** + * @public + */ +export interface RetryErrorInfo { + /** + * The error thrown during the initial request, if available. + */ + error?: SdkError; + errorType: RetryErrorType; + /** + * Protocol hint. This could come from Http's 'retry-after' header or + * something from MQTT or any other protocol that has the ability to convey + * retry info from a peer. + * + * The Date after which a retry should be attempted. + */ + retryAfterHint?: Date; +} +/** + * @public + */ +export interface RetryBackoffStrategy { + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + computeNextBackoffDelay(retryAttempt: number): number; +} +/** + * @public + */ +export interface StandardRetryBackoffStrategy extends RetryBackoffStrategy { + /** + * Sets the delayBase used to compute backoff delays. + * @param delayBase - + */ + setDelayBase(delayBase: number): void; +} +/** + * @public + */ +export interface RetryStrategyOptions { + backoffStrategy: RetryBackoffStrategy; + maxRetriesBase: number; +} +/** + * @public + */ +export interface RetryToken { + /** + * @returns the current count of retry. + */ + getRetryCount(): number; + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + getRetryDelay(): number; +} +/** + * @public + */ +export interface StandardRetryToken extends RetryToken { + /** + * @returns the cost of the last retry attempt. + */ + getRetryCost(): number | undefined; +} +/** + * @public + */ +export interface RetryStrategyV2 { + /** + * Called before any retries (for the first call to the operation). It either + * returns a retry token or an error upon the failure to acquire a token prior. + * + * tokenScope is arbitrary and out of scope for this component. However, + * adding it here offers us a lot of future flexibility for outage detection. + * For example, it could be "us-east-1" on a shared retry strategy, or + * "us-west-2-c:dynamodb". + */ + acquireInitialRetryToken(retryTokenScope: string): Promise; + /** + * After a failed operation call, this function is invoked to refresh the + * retryToken returned by acquireInitialRetryToken(). This function can + * either choose to allow another retry and send a new or updated token, + * or reject the retry attempt and report the error either in an exception + * or returning an error. + */ + refreshRetryTokenForRetry(tokenToRenew: RetryToken, errorInfo: RetryErrorInfo): Promise; + /** + * Upon successful completion of the operation, this function is called + * to record that the operation was successful. + */ + recordSuccess(token: RetryToken): void; +} +/** + * @public + */ +export type ExponentialBackoffJitterType = "DEFAULT" | "NONE" | "FULL" | "DECORRELATED"; +/** + * @public + */ +export interface ExponentialBackoffStrategyOptions { + jitterType: ExponentialBackoffJitterType; + backoffScaleValue?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts new file mode 100644 index 0000000..d2d7ea9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts @@ -0,0 +1,112 @@ +import { Endpoint } from "./http"; +import { RequestHandler } from "./transfer"; +import { Decoder, Encoder, Provider } from "./util"; +/** + * @public + * + * Interface for object requires an Endpoint set. + */ +export interface EndpointBearer { + endpoint: Provider; +} +/** + * @public + */ +export interface StreamCollector { + /** + * A function that converts a stream into an array of bytes. + * + * @param stream - The low-level native stream from browser or Nodejs runtime + */ + (stream: any): Promise; +} +/** + * @public + * + * Request and Response serde util functions and settings for AWS services + */ +export interface SerdeContext extends SerdeFunctions, EndpointBearer { + requestHandler: RequestHandler; + disableHostPrefix: boolean; +} +/** + * @public + * + * Serde functions from the client config. + */ +export interface SerdeFunctions { + base64Encoder: Encoder; + base64Decoder: Decoder; + utf8Encoder: Encoder; + utf8Decoder: Decoder; + streamCollector: StreamCollector; +} +/** + * @public + */ +export interface RequestSerializer { + /** + * Converts the provided `input` into a request object + * + * @param input - The user input to serialize. + * + * @param context - Context containing runtime-specific util functions. + */ + (input: any, context: Context): Promise; +} +/** + * @public + */ +export interface ResponseDeserializer { + /** + * Converts the output of an operation into JavaScript types. + * + * @param output - The HTTP response received from the service + * + * @param context - context containing runtime-specific util functions. + */ + (output: ResponseType, context: Context): Promise; +} +/** + * The interface contains mix-in utility functions to transfer the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * @public + */ +export interface SdkStreamMixin { + transformToByteArray: () => Promise; + transformToString: (encoding?: string) => Promise; + transformToWebStream: () => ReadableStream; +} +/** + * @public + * + * The type describing a runtime-specific stream implementation with mix-in + * utility functions. + */ +export type SdkStream = BaseStream & SdkStreamMixin; +/** + * @public + * + * Indicates that the member of type T with + * key StreamKey have been extended + * with the SdkStreamMixin helper methods. + */ +export type WithSdkStreamMixin = { + [key in keyof T]: key extends StreamKey ? SdkStream : T[key]; +}; +/** + * Interface for internal function to inject stream utility functions + * implementation + * + * @internal + */ +export interface SdkStreamMixinInjector { + (stream: unknown): SdkStreamMixin; +} +/** + * @internal + */ +export interface SdkStreamSerdeContext { + sdkStreamMixin: SdkStreamMixinInjector; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts new file mode 100644 index 0000000..a81cbf1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts @@ -0,0 +1,82 @@ +import { HttpResponse } from "./http"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A document type represents an untyped JSON-like value. + * + * Not all protocols support document types, and the serialization format of a + * document type is protocol specific. All JSON protocols SHOULD support + * document types and they SHOULD serialize document types inline as normal + * JSON values. + */ +export type DocumentType = null | boolean | number | string | DocumentType[] | { + [prop: string]: DocumentType; +}; +/** + * @public + * + * A structure shape with the error trait. + * https://smithy.io/2.0/spec/behavior-traits.html#smithy-api-retryable-trait + */ +export interface RetryableTrait { + /** + * Indicates that the error is a retryable throttling error. + */ + readonly throttling?: boolean; +} +/** + * @public + * + * Type that is implemented by all Smithy shapes marked with the + * error trait. + * @deprecated + */ +export interface SmithyException { + /** + * The shape ID name of the exception. + */ + readonly name: string; + /** + * Whether the client or server are at fault. + */ + readonly $fault: "client" | "server"; + /** + * The service that encountered the exception. + */ + readonly $service?: string; + /** + * Indicates that an error MAY be retried by the client. + */ + readonly $retryable?: RetryableTrait; + /** + * Reference to low-level HTTP response object. + */ + readonly $response?: HttpResponse; +} +/** + * @public + * + * @deprecated See {@link https://aws.amazon.com/blogs/developer/service-error-handling-modular-aws-sdk-js/} + * + * This type should not be used in your application. + * Users of the AWS SDK for JavaScript v3 service clients should prefer to + * use the specific Exception classes corresponding to each operation. + * These can be found as code in the deserializer for the operation's Command class, + * or as declarations in the service model file in codegen/sdk-codegen/aws-models. + * + * If no exceptions are enumerated by a particular Command operation, + * the base exception for the service should be used. Each client exports + * a base ServiceException prefixed with the service name. + */ +export type SdkError = Error & Partial & Partial & { + $metadata?: Partial["$metadata"] & { + /** + * If present, will have value of true and indicates that the error resulted in a + * correction of the clock skew, a.k.a. config.systemClockOffset. + * This is specific to AWS SDK and sigv4. + */ + readonly clockSkewCorrected?: true; + }; + cause?: Error; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts new file mode 100644 index 0000000..bbaecde --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts @@ -0,0 +1,155 @@ +import { Message } from "./eventStream"; +import { HttpRequest } from "./http"; +/** + * @public + * + * A `Date` object, a unix (epoch) timestamp in seconds, or a string that can be + * understood by the JavaScript `Date` constructor. + */ +export type DateInput = number | string | Date; +/** + * @public + */ +export interface SigningArguments { + /** + * The date and time to be used as signature metadata. This value should be + * a Date object, a unix (epoch) timestamp, or a string that can be + * understood by the JavaScript `Date` constructor.If not supplied, the + * value returned by `new Date()` will be used. + */ + signingDate?: DateInput; + /** + * The service signing name. It will override the service name of the signer + * in current invocation + */ + signingService?: string; + /** + * The region name to sign the request. It will override the signing region of the + * signer in current invocation + */ + signingRegion?: string; +} +/** + * @public + */ +export interface RequestSigningArguments extends SigningArguments { + /** + * A set of strings whose members represents headers that cannot be signed. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unsignableHeaders set. + */ + unsignableHeaders?: Set; + /** + * A set of strings whose members represents headers that should be signed. + * Any values passed here will override those provided via unsignableHeaders, + * allowing them to be signed. + * + * All headers in the provided request will have their names converted to + * lower case before signing. + */ + signableHeaders?: Set; +} +/** + * @public + */ +export interface RequestPresigningArguments extends RequestSigningArguments { + /** + * The number of seconds before the presigned URL expires + */ + expiresIn?: number; + /** + * A set of strings whose representing headers that should not be hoisted + * to presigned request's query string. If not supplied, the presigner + * moves all the AWS-specific headers (starting with `x-amz-`) to the request + * query string. If supplied, these headers remain in the presigned request's + * header. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unhoistableHeaders set. + */ + unhoistableHeaders?: Set; + /** + * This overrides any headers with the same name(s) set by unhoistableHeaders. + * These headers will be hoisted into the query string and signed. + */ + hoistableHeaders?: Set; +} +/** + * @public + */ +export interface EventSigningArguments extends SigningArguments { + priorSignature: string; +} +/** + * @public + */ +export interface RequestPresigner { + /** + * Signs a request for future use. + * + * The request will be valid until either the provided `expiration` time has + * passed or the underlying credentials have expired. + * + * @param requestToSign - The request that should be signed. + * @param options - Additional signing options. + */ + presign(requestToSign: HttpRequest, options?: RequestPresigningArguments): Promise; +} +/** + * @public + * + * An object that signs request objects with AWS credentials using one of the + * AWS authentication protocols. + */ +export interface RequestSigner { + /** + * Sign the provided request for immediate dispatch. + */ + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; +} +/** + * @public + */ +export interface StringSigner { + /** + * Sign the provided `stringToSign` for use outside of the context of + * request signing. Typical uses include signed policy generation. + */ + sign(stringToSign: string, options?: SigningArguments): Promise; +} +/** + * @public + */ +export interface FormattedEvent { + headers: Uint8Array; + payload: Uint8Array; +} +/** + * @public + */ +export interface EventSigner { + /** + * Sign the individual event of the event stream. + */ + sign(event: FormattedEvent, options: EventSigningArguments): Promise; +} +/** + * @public + */ +export interface SignableMessage { + message: Message; + priorSignature: string; +} +/** + * @public + */ +export interface SignedMessage { + message: Message; + signature: string; +} +/** + * @public + */ +export interface MessageSigner { + signMessage(message: SignableMessage, args: SigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts new file mode 100644 index 0000000..1e2b85d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts @@ -0,0 +1,22 @@ +import { ChecksumConstructor } from "./checksum"; +import { HashConstructor, StreamHasher } from "./crypto"; +import { BodyLengthCalculator, Encoder } from "./util"; +/** + * @public + */ +export interface GetAwsChunkedEncodingStreamOptions { + base64Encoder?: Encoder; + bodyLengthChecker: BodyLengthCalculator; + checksumAlgorithmFn?: ChecksumConstructor | HashConstructor; + checksumLocationName?: string; + streamHasher?: StreamHasher; +} +/** + * @public + * + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * It optionally adds checksum if options are provided. + */ +export interface GetAwsChunkedEncodingStream { + (readableStream: StreamType, options: GetAwsChunkedEncodingStreamOptions): StreamType; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts new file mode 100644 index 0000000..27088db --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts @@ -0,0 +1,33 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This is the union representing the modeled blob type with streaming trait + * in a generic format that does not relate to HTTP input or output payloads. + * + * Note: the non-streaming blob type is represented by Uint8Array, but because + * the streaming blob type is always in the request/response paylod, it has + * historically been handled with different types. + * + * @see https://smithy.io/2.0/spec/simple-types.html#blob + * + * For compatibility with its historical representation, it must contain at least + * Readble (Node.js), Blob (browser), and ReadableStream (browser). + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + */ +export type StreamingBlobTypes = NodeJsRuntimeStreamingBlobTypes | BrowserRuntimeStreamingBlobTypes; +/** + * @public + * + * Node.js streaming blob type. + */ +export type NodeJsRuntimeStreamingBlobTypes = Readable; +/** + * @public + * + * Browser streaming blob types. + */ +export type BrowserRuntimeStreamingBlobTypes = ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts new file mode 100644 index 0000000..1a86dea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts @@ -0,0 +1,61 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This union represents a superset of the compatible types you + * can use for streaming payload inputs. + * + * FAQ: + * Why does the type union mix mutually exclusive runtime types, namely + * Node.js and browser types? + * + * There are several reasons: + * 1. For backwards compatibility. + * 2. As a convenient compromise solution so that users in either environment may use the types + * without customization. + * 3. The SDK does not have static type information about the exact implementation + * of the HTTP RequestHandler being used in your client(s) (e.g. fetch, XHR, node:http, or node:http2), + * given that it is chosen at runtime. There are multiple possible request handlers + * in both the Node.js and browser runtime environments. + * + * Rather than restricting the type to a known common format (Uint8Array, for example) + * which doesn't include a universal streaming format in the currently supported Node.js versions, + * the type declaration is widened to multiple possible formats. + * It is up to the user to ultimately select a compatible format with the + * runtime and HTTP handler implementation they are using. + * + * Usage: + * The typical solution we expect users to have is to manually narrow the + * type when needed, picking the appropriate one out of the union according to the + * runtime environment and specific request handler. + * There is also the type utility "NodeJsClient", "BrowserClient" and more + * exported from this package. These can be applied at the client level + * to pre-narrow these streaming payload blobs. For usage see the readme.md + * in the root of the \@smithy/types NPM package. + */ +export type StreamingBlobPayloadInputTypes = NodeJsRuntimeStreamingBlobPayloadInputTypes | BrowserRuntimeStreamingBlobPayloadInputTypes; +/** + * @public + * + * Streaming payload input types in the Node.js environment. + * These are derived from the types compatible with the request body used by node:http. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may be intended in some cases, + * but the expected types are primarily string, Uint8Array, and Buffer. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type NodeJsRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | Buffer | Readable; +/** + * @public + * + * Streaming payload input types in the browser environment. + * These are derived from the types compatible with fetch's Request.body. + */ +export type BrowserRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts new file mode 100644 index 0000000..e344a46 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts @@ -0,0 +1,52 @@ +/// +import { IncomingMessage } from "http"; +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +import { SdkStream } from "../serde"; +/** + * @public + * + * This union represents a superset of the types you may receive + * in streaming payload outputs. + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + * + * To highlight the upstream docs about the SdkStream mixin: + * + * The interface contains mix-in (via Object.assign) methods to transform the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * + * The available methods are described on the SdkStream type via SdkStreamMixin. + */ +export type StreamingBlobPayloadOutputTypes = NodeJsRuntimeStreamingBlobPayloadOutputTypes | BrowserRuntimeStreamingBlobPayloadOutputTypes; +/** + * @public + * + * Streaming payload output types in the Node.js environment. + * + * This is by default the IncomingMessage type from node:http responses when + * using the default node-http-handler in Node.js environments. + * + * It can be other Readable types like node:http2's ClientHttp2Stream + * such as when using the node-http2-handler. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type NodeJsRuntimeStreamingBlobPayloadOutputTypes = SdkStream; +/** + * @public + * + * Streaming payload output types in the browser environment. + * + * This is by default fetch's Response.body type (ReadableStream) when using + * the default fetch-http-handler in browser-like environments. + * + * It may be a Blob, such as when using the XMLHttpRequest handler + * and receiving an arraybuffer response body. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type BrowserRuntimeStreamingBlobPayloadOutputTypes = SdkStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts new file mode 100644 index 0000000..f37ddb7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts @@ -0,0 +1,41 @@ +/** + * @public + */ +export type RequestHandlerOutput = { + response: ResponseType; +}; +/** + * @public + */ +export interface RequestHandler { + /** + * metadata contains information of a handler. For example + * 'h2' refers this handler is for handling HTTP/2 requests, + * whereas 'h1' refers handling HTTP1 requests + */ + metadata?: RequestHandlerMetadata; + destroy?: () => void; + handle: (request: RequestType, handlerOptions?: HandlerOptions) => Promise>; +} +/** + * @public + */ +export interface RequestHandlerMetadata { + handlerProtocol: RequestHandlerProtocol | string; +} +/** + * @public + * Values from ALPN Protocol IDs. + * @see https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + */ +export declare enum RequestHandlerProtocol { + HTTP_0_9 = "http/0.9", + HTTP_1_0 = "http/1.0", + TDS_8_0 = "tds/8.0" +} +/** + * @public + */ +export interface RequestContext { + destination: URL; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts new file mode 100644 index 0000000..f1aecf3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts @@ -0,0 +1,26 @@ +import { CommandIO } from "../command"; +import { MetadataBearer } from "../response"; +import { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import { Transform } from "./type-transform"; +/** + * @internal + * + * Narrowed version of InvokeFunction used in Client::send. + */ +export interface NarrowedInvokeFunction { + (command: CommandIO, options?: HttpHandlerOptions): Promise>; + (command: CommandIO, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options?: HttpHandlerOptions, cb?: (err: unknown, data?: Transform) => void): Promise> | void; +} +/** + * @internal + * + * Narrowed version of InvokeMethod used in aggregated Client methods. + */ +export interface NarrowedInvokeMethod { + (input: InputType, options?: HttpHandlerOptions): Promise>; + (input: InputType, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options?: HttpHandlerOptions, cb?: (err: unknown, data?: OutputType) => void): Promise> | void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts new file mode 100644 index 0000000..e9516e2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts @@ -0,0 +1,82 @@ +/// +import { IncomingMessage } from "http"; +import { ClientHttp2Stream } from "http2"; +import { InvokeMethod } from "../client"; +import { GetOutputType } from "../command"; +import { HttpHandlerOptions } from "../http"; +import { SdkStream } from "../serde"; +import { BrowserRuntimeStreamingBlobPayloadInputTypes, NodeJsRuntimeStreamingBlobPayloadInputTypes, StreamingBlobPayloadInputTypes } from "../streaming-payload/streaming-blob-payload-input-types"; +import { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import { NarrowedInvokeMethod } from "./client-method-transforms"; +import { Transform } from "./type-transform"; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the NodeHttpHandler requestHandler, + * the default in Node.js when not using HTTP2. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as NodeJsClient; + * ``` + */ +export type NodeJsClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * Variant of NodeJsClient for node:http2. + */ +export type NodeJsHttp2Client = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the FetchHttpHandler requestHandler, + * which is the default in browser environments. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as BrowserClient; + * ``` + */ +export type BrowserClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Variant of BrowserClient for XMLHttpRequest. + */ +export type BrowserXhrClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * @deprecated use NarrowPayloadBlobTypes. + * + * Narrow a given Client's blob payload outputs to the given type T. + */ +export type NarrowPayloadBlobOutputType = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, T>>; +}; +/** + * @public + * + * Narrow a Client's blob payload input and output types to I and O. + */ +export type NarrowPayloadBlobTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod, FunctionOutputTypes> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, O>>; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts new file mode 100644 index 0000000..3a812df --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts @@ -0,0 +1,14 @@ +/** + * @internal + * + * Checks that A and B extend each other. + */ +export type Exact = [ + A +] extends [ + B +] ? ([ + B +] extends [ + A +] ? true : false) : false; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts new file mode 100644 index 0000000..6a7f6d8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts @@ -0,0 +1,88 @@ +import { InvokeMethod, InvokeMethodOptionalArgs } from "../client"; +import { GetOutputType } from "../command"; +import { DocumentType } from "../shapes"; +/** + * @public + * + * This type is intended as a type helper for generated clients. + * When initializing client, cast it to this type by passing + * the client constructor type as the type parameter. + * + * It will then recursively remove "undefined" as a union type from all + * input and output shapes' members. Note, this does not affect + * any member that is optional (?) such as outputs with no required members. + * + * @example + * ```ts + * const client = new Client({}) as AssertiveClient; + * ``` + */ +export type AssertiveClient = NarrowClientIOTypes; +/** + * @public + * + * This is similar to AssertiveClient but additionally changes all + * output types to (recursive) Required so as to bypass all output nullability guards. + */ +export type UncheckedClient = UncheckedClientOutputTypes; +/** + * @internal + * + * Excludes undefined recursively. + */ +export type NoUndefined = T extends Function ? T : T extends DocumentType ? T : [ + T +] extends [ + object +] ? { + [key in keyof T]: NoUndefined; +} : Exclude; +/** + * @internal + * + * Excludes undefined and optional recursively. + */ +export type RecursiveRequired = T extends Function ? T : T extends DocumentType ? T : [ + T +] extends [ + object +] ? { + [key in keyof T]-?: RecursiveRequired; +} : Exclude; +/** + * @internal + * + * Removes undefined from unions. + */ +type NarrowClientIOTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, NoUndefined> : [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? InvokeMethod, NoUndefined> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>; +}; +/** + * @internal + * + * Removes undefined from unions and adds yolo output types. + */ +type UncheckedClientOutputTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, RecursiveRequired> : [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? InvokeMethod, RecursiveRequired> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>>; +}; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..547303f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts @@ -0,0 +1,41 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [ + T +] extends [ + FromType +] ? ([ + FromType +] extends [ + T +] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [ + T[key] + ] extends [ + FromType + ] ? [ + FromType + ] extends [ + T[key] + ] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts new file mode 100644 index 0000000..4e7adb4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts @@ -0,0 +1,17 @@ +import { QueryParameterBag } from "./http"; +/** + * @internal + * + * Represents the components parts of a Uniform Resource Identifier used to + * construct the target location of a Request. + */ +export type URI = { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; + username?: string; + password?: string; + fragment?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..7c700af --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts @@ -0,0 +1,192 @@ +import { Endpoint } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A generic which checks if Type1 is exactly same as Type2. + */ +export type Exact = [ + Type1 +] extends [ + Type2 +] ? ([ + Type2 +] extends [ + Type1 +] ? true : false) : false; +/** + * @public + * + * A function that, given a Uint8Array of bytes, can produce a string + * representation thereof. The function may optionally attempt to + * convert other input types to Uint8Array before encoding. + * + * @example An encoder function that converts bytes to hexadecimal + * representation would return `'hello'` when given + * `new Uint8Array([104, 101, 108, 108, 111])`. + */ +export interface Encoder { + /** + * Caution: the `any` type on the input is for backwards compatibility. + * Runtime support is limited to Uint8Array and string by default. + * + * You may choose to support more encoder input types if overriding the default + * implementations. + */ + (input: Uint8Array | string | any): string; +} +/** + * @public + * + * A function that, given a string, can derive the bytes represented by that + * string. + * + * @example A decoder function that converts bytes to hexadecimal + * representation would return `new Uint8Array([104, 101, 108, 108, 111])` when + * given the string `'hello'`. + */ +export interface Decoder { + (input: string): Uint8Array; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. + * + * @example A function that reads credentials from shared SDK configuration + * files, assuming roles and collecting MFA tokens as necessary. + */ +export interface Provider { + (): Promise; +} +/** + * @public + * + * A tuple that represents an API name and optional version + * of a library built using the AWS SDK. + */ +export type UserAgentPair = [ + /*name*/ string, + /*version*/ string +]; +/** + * @public + * + * User agent data that to be put into the request's user + * agent. + */ +export type UserAgent = UserAgentPair[]; +/** + * @public + * + * Parses a URL in string form into an Endpoint object. + */ +export interface UrlParser { + (url: string | URL): Endpoint; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. It memoizes the result from the previous invocation + * instead of calling the underlying resources every time. + * + * You can force the provider to refresh the memoized value by invoke the + * function with optional parameter hash with `forceRefresh` boolean key and + * value `true`. + * + * @example A function that reads credentials from IMDS service that could + * return expired credentials. The SDK will keep using the expired credentials + * until an unretryable service error requiring a force refresh of the + * credentials. + */ +export interface MemoizedProvider { + (options?: { + forceRefresh?: boolean; + }): Promise; +} +/** + * @public + * + * A function that, given a request body, determines the + * length of the body. This is used to determine the Content-Length + * that should be sent with a request. + * + * @example A function that reads a file stream and calculates + * the size of the file. + */ +export interface BodyLengthCalculator { + (body: any): number | undefined; +} +/** + * @public + * + * Object containing regionalization information of + * AWS services. + */ +export interface RegionInfo { + hostname: string; + partition: string; + path?: string; + signingService?: string; + signingRegion?: string; +} +/** + * @public + * + * Options to pass when calling {@link RegionInfoProvider} + */ +export interface RegionInfoProviderOptions { + /** + * Enables IPv6/IPv4 dualstack endpoint. + * @defaultValue false + */ + useDualstackEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + * @defaultValue false + */ + useFipsEndpoint: boolean; +} +/** + * @public + * + * Function returns designated service's regionalization + * information from given region. Each service client + * comes with its regionalization provider. it serves + * to provide the default values of related configurations + */ +export interface RegionInfoProvider { + (region: string, options?: RegionInfoProviderOptions): Promise; +} +/** + * @public + * + * Interface that specifies the retry behavior + */ +export interface RetryStrategy { + /** + * The retry mode describing how the retry strategy control the traffic flow. + */ + mode?: string; + /** + * the retry behavior the will invoke the next handler and handle the retry accordingly. + * This function should also update the $metadata from the response accordingly. + * @see {@link ResponseMetadata} + */ + retry: (next: FinalizeHandler, args: FinalizeHandlerArguments) => Promise>; +} +/** + * @public + * + * Indicates the parameter may be omitted if the parameter object T + * is equivalent to a Partial, i.e. all properties optional. + */ +export type OptionalParameter = Exact, T> extends true ? [ +] | [ + T +] : [ + T +]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..2cc2fff --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1,35 @@ +import { AbortController as DeprecatedAbortController } from "./abort"; +/** + * @public + */ +export interface WaiterConfiguration { + /** + * Required service client + */ + client: Client; + /** + * The amount of time in seconds a user is willing to wait for a waiter to complete. + */ + maxWaitTime: number; + /** + * @deprecated Use abortSignal + * Abort controller. Used for ending the waiter early. + */ + abortController?: AbortController | DeprecatedAbortController; + /** + * Abort Signal. Used for ending the waiter early. + */ + abortSignal?: AbortController["signal"] | DeprecatedAbortController["signal"]; + /** + * The minimum amount of time to delay between retries in seconds. This is the + * floor of the exponential backoff. This value defaults to service default + * if not specified. This value MUST be less than or equal to maxDelay and greater than 0. + */ + minDelay?: number; + /** + * The maximum amount of time to delay between retries in seconds. This is the + * ceiling of the exponential backoff. This value defaults to service default + * if not specified. If specified, this value MUST be greater than or equal to 1. + */ + maxDelay?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/uri.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/uri.d.ts new file mode 100644 index 0000000..d7b874c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/uri.d.ts @@ -0,0 +1,17 @@ +import { QueryParameterBag } from "./http"; +/** + * @internal + * + * Represents the components parts of a Uniform Resource Identifier used to + * construct the target location of a Request. + */ +export type URI = { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; + username?: string; + password?: string; + fragment?: string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/util.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/util.d.ts new file mode 100644 index 0000000..b15045c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/util.d.ts @@ -0,0 +1,176 @@ +import { Endpoint } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A generic which checks if Type1 is exactly same as Type2. + */ +export type Exact = [Type1] extends [Type2] ? ([Type2] extends [Type1] ? true : false) : false; +/** + * @public + * + * A function that, given a Uint8Array of bytes, can produce a string + * representation thereof. The function may optionally attempt to + * convert other input types to Uint8Array before encoding. + * + * @example An encoder function that converts bytes to hexadecimal + * representation would return `'hello'` when given + * `new Uint8Array([104, 101, 108, 108, 111])`. + */ +export interface Encoder { + /** + * Caution: the `any` type on the input is for backwards compatibility. + * Runtime support is limited to Uint8Array and string by default. + * + * You may choose to support more encoder input types if overriding the default + * implementations. + */ + (input: Uint8Array | string | any): string; +} +/** + * @public + * + * A function that, given a string, can derive the bytes represented by that + * string. + * + * @example A decoder function that converts bytes to hexadecimal + * representation would return `new Uint8Array([104, 101, 108, 108, 111])` when + * given the string `'hello'`. + */ +export interface Decoder { + (input: string): Uint8Array; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. + * + * @example A function that reads credentials from shared SDK configuration + * files, assuming roles and collecting MFA tokens as necessary. + */ +export interface Provider { + (): Promise; +} +/** + * @public + * + * A tuple that represents an API name and optional version + * of a library built using the AWS SDK. + */ +export type UserAgentPair = [name: string, version?: string]; +/** + * @public + * + * User agent data that to be put into the request's user + * agent. + */ +export type UserAgent = UserAgentPair[]; +/** + * @public + * + * Parses a URL in string form into an Endpoint object. + */ +export interface UrlParser { + (url: string | URL): Endpoint; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. It memoizes the result from the previous invocation + * instead of calling the underlying resources every time. + * + * You can force the provider to refresh the memoized value by invoke the + * function with optional parameter hash with `forceRefresh` boolean key and + * value `true`. + * + * @example A function that reads credentials from IMDS service that could + * return expired credentials. The SDK will keep using the expired credentials + * until an unretryable service error requiring a force refresh of the + * credentials. + */ +export interface MemoizedProvider { + (options?: { + forceRefresh?: boolean; + }): Promise; +} +/** + * @public + * + * A function that, given a request body, determines the + * length of the body. This is used to determine the Content-Length + * that should be sent with a request. + * + * @example A function that reads a file stream and calculates + * the size of the file. + */ +export interface BodyLengthCalculator { + (body: any): number | undefined; +} +/** + * @public + * + * Object containing regionalization information of + * AWS services. + */ +export interface RegionInfo { + hostname: string; + partition: string; + path?: string; + signingService?: string; + signingRegion?: string; +} +/** + * @public + * + * Options to pass when calling {@link RegionInfoProvider} + */ +export interface RegionInfoProviderOptions { + /** + * Enables IPv6/IPv4 dualstack endpoint. + * @defaultValue false + */ + useDualstackEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + * @defaultValue false + */ + useFipsEndpoint: boolean; +} +/** + * @public + * + * Function returns designated service's regionalization + * information from given region. Each service client + * comes with its regionalization provider. it serves + * to provide the default values of related configurations + */ +export interface RegionInfoProvider { + (region: string, options?: RegionInfoProviderOptions): Promise; +} +/** + * @public + * + * Interface that specifies the retry behavior + */ +export interface RetryStrategy { + /** + * The retry mode describing how the retry strategy control the traffic flow. + */ + mode?: string; + /** + * the retry behavior the will invoke the next handler and handle the retry accordingly. + * This function should also update the $metadata from the response accordingly. + * @see {@link ResponseMetadata} + */ + retry: (next: FinalizeHandler, args: FinalizeHandlerArguments) => Promise>; +} +/** + * @public + * + * Indicates the parameter may be omitted if the parameter object T + * is equivalent to a Partial, i.e. all properties optional. + */ +export type OptionalParameter = Exact, T> extends true ? [] | [T] : [T]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/waiter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/waiter.d.ts new file mode 100644 index 0000000..5941832 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/dist-types/waiter.d.ts @@ -0,0 +1,35 @@ +import { AbortController as DeprecatedAbortController } from "./abort"; +/** + * @public + */ +export interface WaiterConfiguration { + /** + * Required service client + */ + client: Client; + /** + * The amount of time in seconds a user is willing to wait for a waiter to complete. + */ + maxWaitTime: number; + /** + * @deprecated Use abortSignal + * Abort controller. Used for ending the waiter early. + */ + abortController?: AbortController | DeprecatedAbortController; + /** + * Abort Signal. Used for ending the waiter early. + */ + abortSignal?: AbortController["signal"] | DeprecatedAbortController["signal"]; + /** + * The minimum amount of time to delay between retries in seconds. This is the + * floor of the exponential backoff. This value defaults to service default + * if not specified. This value MUST be less than or equal to maxDelay and greater than 0. + */ + minDelay?: number; + /** + * The maximum amount of time to delay between retries in seconds. This is the + * ceiling of the exponential backoff. This value defaults to service default + * if not specified. If specified, this value MUST be greater than or equal to 1. + */ + maxDelay?: number; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/types/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/types/package.json new file mode 100644 index 0000000..87c5ad0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/types/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/types", + "version": "4.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline types", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4 && node scripts/downlevel", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:tsc -p tsconfig.test.json", + "extract:docs": "api-extractor run --local" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<=4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/types", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/types" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/README.md new file mode 100644 index 0000000..0d8d61e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/README.md @@ -0,0 +1,10 @@ +# @smithy/url-parser + +[![NPM version](https://img.shields.io/npm/v/@smithy/url-parser/latest.svg)](https://www.npmjs.com/package/@smithy/url-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/url-parser.svg)](https://www.npmjs.com/package/@smithy/url-parser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-cjs/index.js new file mode 100644 index 0000000..ab81787 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-cjs/index.js @@ -0,0 +1,49 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseUrl: () => parseUrl +}); +module.exports = __toCommonJS(src_exports); +var import_querystring_parser = require("@smithy/querystring-parser"); +var parseUrl = /* @__PURE__ */ __name((url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, import_querystring_parser.parseQueryString)(search); + } + return { + hostname, + port: port ? parseInt(port) : void 0, + protocol, + path: pathname, + query + }; +}, "parseUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + parseUrl +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-es/index.js new file mode 100644 index 0000000..811f8bf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-es/index.js @@ -0,0 +1,18 @@ +import { parseQueryString } from "@smithy/querystring-parser"; +export const parseUrl = (url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = parseQueryString(search); + } + return { + hostname, + port: port ? parseInt(port) : undefined, + protocol, + path: pathname, + query, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-types/index.d.ts new file mode 100644 index 0000000..b0d91c9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { UrlParser } from "@smithy/types"; +/** + * @internal + */ +export declare const parseUrl: UrlParser; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d6f0ec5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { UrlParser } from "@smithy/types"; +/** + * @internal + */ +export declare const parseUrl: UrlParser; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/package.json new file mode 100644 index 0000000..10aebb8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/url-parser/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/url-parser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline url-parser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/url-parser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/url-parser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/README.md new file mode 100644 index 0000000..c9b6c87 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/README.md @@ -0,0 +1,4 @@ +# @smithy/util-base64 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-base64/latest.svg)](https://www.npmjs.com/package/@smithy/util-base64) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-base64.svg)](https://www.npmjs.com/package/@smithy/util-base64) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js new file mode 100644 index 0000000..d35d09f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.maxLetterValue = exports.bitsPerByte = exports.bitsPerLetter = exports.alphabetByValue = exports.alphabetByEncoding = void 0; +const alphabetByEncoding = {}; +exports.alphabetByEncoding = alphabetByEncoding; +const alphabetByValue = new Array(64); +exports.alphabetByValue = alphabetByValue; +for (let i = 0, start = "A".charCodeAt(0), limit = "Z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + alphabetByEncoding[char] = i; + alphabetByValue[i] = char; +} +for (let i = 0, start = "a".charCodeAt(0), limit = "z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + const index = i + 26; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +for (let i = 0; i < 10; i++) { + alphabetByEncoding[i.toString(10)] = i + 52; + const char = i.toString(10); + const index = i + 52; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +alphabetByEncoding["+"] = 62; +alphabetByValue[62] = "+"; +alphabetByEncoding["/"] = 63; +alphabetByValue[63] = "/"; +const bitsPerLetter = 6; +exports.bitsPerLetter = bitsPerLetter; +const bitsPerByte = 8; +exports.bitsPerByte = bitsPerByte; +const maxLetterValue = 0b111111; +exports.maxLetterValue = maxLetterValue; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js new file mode 100644 index 0000000..a5baffd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromBase64 = void 0; +const constants_browser_1 = require("./constants.browser"); +const fromBase64 = (input) => { + let totalByteLength = (input.length / 4) * 3; + if (input.slice(-2) === "==") { + totalByteLength -= 2; + } + else if (input.slice(-1) === "=") { + totalByteLength--; + } + const out = new ArrayBuffer(totalByteLength); + const dataView = new DataView(out); + for (let i = 0; i < input.length; i += 4) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = i + 3; j <= limit; j++) { + if (input[j] !== "=") { + if (!(input[j] in constants_browser_1.alphabetByEncoding)) { + throw new TypeError(`Invalid character ${input[j]} in base64 string.`); + } + bits |= constants_browser_1.alphabetByEncoding[input[j]] << ((limit - j) * constants_browser_1.bitsPerLetter); + bitLength += constants_browser_1.bitsPerLetter; + } + else { + bits >>= constants_browser_1.bitsPerLetter; + } + } + const chunkOffset = (i / 4) * 3; + bits >>= bitLength % constants_browser_1.bitsPerByte; + const byteLength = Math.floor(bitLength / constants_browser_1.bitsPerByte); + for (let k = 0; k < byteLength; k++) { + const offset = (byteLength - k - 1) * constants_browser_1.bitsPerByte; + dataView.setUint8(chunkOffset + k, (bits & (255 << offset)) >> offset); + } + } + return new Uint8Array(out); +}; +exports.fromBase64 = fromBase64; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js new file mode 100644 index 0000000..b06a7b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromBase64 = void 0; +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/index.js new file mode 100644 index 0000000..02848d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/index.js @@ -0,0 +1,27 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././fromBase64"), module.exports); +__reExport(src_exports, require("././toBase64"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromBase64, + toBase64 +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js new file mode 100644 index 0000000..e294f3f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBase64 = void 0; +const util_utf8_1 = require("@smithy/util-utf8"); +const constants_browser_1 = require("./constants.browser"); +function toBase64(_input) { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + const isArrayLike = typeof input === "object" && typeof input.length === "number"; + const isUint8Array = typeof input === "object" && + typeof input.byteOffset === "number" && + typeof input.byteLength === "number"; + if (!isArrayLike && !isUint8Array) { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + let str = ""; + for (let i = 0; i < input.length; i += 3) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = Math.min(i + 3, input.length); j < limit; j++) { + bits |= input[j] << ((limit - j - 1) * constants_browser_1.bitsPerByte); + bitLength += constants_browser_1.bitsPerByte; + } + const bitClusterCount = Math.ceil(bitLength / constants_browser_1.bitsPerLetter); + bits <<= bitClusterCount * constants_browser_1.bitsPerLetter - bitLength; + for (let k = 1; k <= bitClusterCount; k++) { + const offset = (bitClusterCount - k) * constants_browser_1.bitsPerLetter; + str += constants_browser_1.alphabetByValue[(bits & (constants_browser_1.maxLetterValue << offset)) >> offset]; + } + str += "==".slice(0, 4 - bitClusterCount); + } + return str; +} +exports.toBase64 = toBase64; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.js new file mode 100644 index 0000000..0590ce3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-cjs/toBase64.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBase64 = void 0; +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const util_utf8_1 = require("@smithy/util-utf8"); +const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; +exports.toBase64 = toBase64; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/constants.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/constants.browser.js new file mode 100644 index 0000000..fd4df4d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/constants.browser.js @@ -0,0 +1,28 @@ +const alphabetByEncoding = {}; +const alphabetByValue = new Array(64); +for (let i = 0, start = "A".charCodeAt(0), limit = "Z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + alphabetByEncoding[char] = i; + alphabetByValue[i] = char; +} +for (let i = 0, start = "a".charCodeAt(0), limit = "z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + const index = i + 26; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +for (let i = 0; i < 10; i++) { + alphabetByEncoding[i.toString(10)] = i + 52; + const char = i.toString(10); + const index = i + 52; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +alphabetByEncoding["+"] = 62; +alphabetByValue[62] = "+"; +alphabetByEncoding["/"] = 63; +alphabetByValue[63] = "/"; +const bitsPerLetter = 6; +const bitsPerByte = 8; +const maxLetterValue = 0b111111; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js new file mode 100644 index 0000000..c2c6a66 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js @@ -0,0 +1,36 @@ +import { alphabetByEncoding, bitsPerByte, bitsPerLetter } from "./constants.browser"; +export const fromBase64 = (input) => { + let totalByteLength = (input.length / 4) * 3; + if (input.slice(-2) === "==") { + totalByteLength -= 2; + } + else if (input.slice(-1) === "=") { + totalByteLength--; + } + const out = new ArrayBuffer(totalByteLength); + const dataView = new DataView(out); + for (let i = 0; i < input.length; i += 4) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = i + 3; j <= limit; j++) { + if (input[j] !== "=") { + if (!(input[j] in alphabetByEncoding)) { + throw new TypeError(`Invalid character ${input[j]} in base64 string.`); + } + bits |= alphabetByEncoding[input[j]] << ((limit - j) * bitsPerLetter); + bitLength += bitsPerLetter; + } + else { + bits >>= bitsPerLetter; + } + } + const chunkOffset = (i / 4) * 3; + bits >>= bitLength % bitsPerByte; + const byteLength = Math.floor(bitLength / bitsPerByte); + for (let k = 0; k < byteLength; k++) { + const offset = (byteLength - k - 1) * bitsPerByte; + dataView.setUint8(chunkOffset + k, (bits & (255 << offset)) >> offset); + } + } + return new Uint8Array(out); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.js new file mode 100644 index 0000000..5197e93 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/fromBase64.js @@ -0,0 +1,12 @@ +import { fromString } from "@smithy/util-buffer-from"; +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +export const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = fromString(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/index.js new file mode 100644 index 0000000..594bd43 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js new file mode 100644 index 0000000..2a03a9d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js @@ -0,0 +1,35 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { alphabetByValue, bitsPerByte, bitsPerLetter, maxLetterValue } from "./constants.browser"; +export function toBase64(_input) { + let input; + if (typeof _input === "string") { + input = fromUtf8(_input); + } + else { + input = _input; + } + const isArrayLike = typeof input === "object" && typeof input.length === "number"; + const isUint8Array = typeof input === "object" && + typeof input.byteOffset === "number" && + typeof input.byteLength === "number"; + if (!isArrayLike && !isUint8Array) { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + let str = ""; + for (let i = 0; i < input.length; i += 3) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = Math.min(i + 3, input.length); j < limit; j++) { + bits |= input[j] << ((limit - j - 1) * bitsPerByte); + bitLength += bitsPerByte; + } + const bitClusterCount = Math.ceil(bitLength / bitsPerLetter); + bits <<= bitClusterCount * bitsPerLetter - bitLength; + for (let k = 1; k <= bitClusterCount; k++) { + const offset = (bitClusterCount - k) * bitsPerLetter; + str += alphabetByValue[(bits & (maxLetterValue << offset)) >> offset]; + } + str += "==".slice(0, 4 - bitClusterCount); + } + return str; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/toBase64.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/toBase64.js new file mode 100644 index 0000000..61f03ce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-es/toBase64.js @@ -0,0 +1,15 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +import { fromUtf8 } from "@smithy/util-utf8"; +export const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = fromUtf8(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts new file mode 100644 index 0000000..eb750ea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts @@ -0,0 +1,6 @@ +declare const alphabetByEncoding: Record; +declare const alphabetByValue: Array; +declare const bitsPerLetter = 6; +declare const bitsPerByte = 8; +declare const maxLetterValue = 63; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts new file mode 100644 index 0000000..6a640f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes. + * + * @param input The base-64 encoded string + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts new file mode 100644 index 0000000..1878a89 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts @@ -0,0 +1,7 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes using Node.JS's + * `buffer` module. + * + * @param input The base-64 encoded string + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/index.d.ts new file mode 100644 index 0000000..594bd43 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts new file mode 100644 index 0000000..5f5615e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts @@ -0,0 +1,9 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare function toBase64(_input: Uint8Array | string): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts new file mode 100644 index 0000000..96bd0ed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string using + * Node.JS's `buffer` module. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + */ +export declare const toBase64: (_input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts new file mode 100644 index 0000000..61c36c8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts @@ -0,0 +1,6 @@ +declare const alphabetByEncoding: Record; +declare const alphabetByValue: Array; +declare const bitsPerLetter = 6; +declare const bitsPerByte = 8; +declare const maxLetterValue = 63; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts new file mode 100644 index 0000000..3a50006 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes. + * + * @param input The base-64 encoded string + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts new file mode 100644 index 0000000..f84c7c6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts @@ -0,0 +1,7 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes using Node.JS's + * `buffer` module. + * + * @param input The base-64 encoded string + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c4e1d03 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts new file mode 100644 index 0000000..260f696 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts @@ -0,0 +1,9 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare function toBase64(_input: Uint8Array | string): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts new file mode 100644 index 0000000..7e8bb70 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string using + * Node.JS's `buffer` module. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + */ +export declare const toBase64: (_input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/package.json new file mode 100644 index 0000000..e122233 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-base64/package.json @@ -0,0 +1,73 @@ +{ + "name": "@smithy/util-base64", + "version": "4.0.0", + "description": "A Base64 <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-base64", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromBase64": "./dist-es/fromBase64.browser", + "./dist-es/toBase64": "./dist-es/toBase64.browser" + }, + "react-native": { + "./dist-es/fromBase64": "./dist-es/fromBase64.browser", + "./dist-es/toBase64": "./dist-es/toBase64.browser", + "./dist-cjs/fromBase64": "./dist-cjs/fromBase64.browser", + "./dist-cjs/toBase64": "./dist-cjs/toBase64.browser" + }, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-base64", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-base64" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/README.md new file mode 100644 index 0000000..460d092 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/README.md @@ -0,0 +1,12 @@ +# @smithy/util-body-length-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-body-length-browser/latest.svg)](https://www.npmjs.com/package/@smithy/util-body-length-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-body-length-browser.svg)](https://www.npmjs.com/package/@smithy/util-body-length-browser) + +Determines the length of a request body in browsers + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js new file mode 100644 index 0000000..9e872bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js @@ -0,0 +1,57 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var TEXT_ENCODER = typeof TextEncoder == "function" ? new TextEncoder() : null; +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (typeof body === "string") { + if (TEXT_ENCODER) { + return TEXT_ENCODER.encode(body).byteLength; + } + let len = body.length; + for (let i = len - 1; i >= 0; i--) { + const code = body.charCodeAt(i); + if (code > 127 && code <= 2047) + len++; + else if (code > 2047 && code <= 65535) + len += 2; + if (code >= 56320 && code <= 57343) + i--; + } + return len; + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + calculateBodyLength +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js new file mode 100644 index 0000000..6b994ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js @@ -0,0 +1,26 @@ +const TEXT_ENCODER = typeof TextEncoder == "function" ? new TextEncoder() : null; +export const calculateBodyLength = (body) => { + if (typeof body === "string") { + if (TEXT_ENCODER) { + return TEXT_ENCODER.encode(body).byteLength; + } + let len = body.length; + for (let i = len - 1; i >= 0; i--) { + const code = body.charCodeAt(i); + if (code > 0x7f && code <= 0x7ff) + len++; + else if (code > 0x7ff && code <= 0xffff) + len += 2; + if (code >= 0xdc00 && code <= 0xdfff) + i--; + } + return len; + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-es/index.js new file mode 100644 index 0000000..16ba478 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-es/index.js @@ -0,0 +1 @@ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts new file mode 100644 index 0000000..8e1bdb0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts new file mode 100644 index 0000000..7b4a0d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts new file mode 100644 index 0000000..3260536 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab6cb83 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/package.json new file mode 100644 index 0000000..b571489 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-browser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/util-body-length-browser", + "description": "Determines the length of a request body in browsers", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-body-length-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-body-length-browser", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-body-length-browser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/README.md new file mode 100644 index 0000000..9a80efe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/README.md @@ -0,0 +1,12 @@ +# @smithy/util-body-length-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-body-length-node/latest.svg)](https://www.npmjs.com/package/@smithy/util-body-length-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-body-length-node.svg)](https://www.npmjs.com/package/@smithy/util-body-length-node) + +Determines the length of a request body in node.js + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-cjs/index.js new file mode 100644 index 0000000..1ecdc79 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-cjs/index.js @@ -0,0 +1,53 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var import_fs = require("fs"); +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return (0, import_fs.lstatSync)(body.path).size; + } else if (typeof body.fd === "number") { + return (0, import_fs.fstatSync)(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + calculateBodyLength +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js new file mode 100644 index 0000000..857cff5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js @@ -0,0 +1,25 @@ +import { fstatSync, lstatSync } from "fs"; +export const calculateBodyLength = (body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } + else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return lstatSync(body.path).size; + } + else if (typeof body.fd === "number") { + return fstatSync(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-es/index.js new file mode 100644 index 0000000..16ba478 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts new file mode 100644 index 0000000..8e1bdb0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts new file mode 100644 index 0000000..7b4a0d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts new file mode 100644 index 0000000..3260536 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab6cb83 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/package.json new file mode 100644 index 0000000..25b0f7a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-body-length-node/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-body-length-node", + "description": "Determines the length of a request body in node.js", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-body-length-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-body-length-node", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-body-length-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..0869899 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/README.md new file mode 100644 index 0000000..5b0341d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/README.md @@ -0,0 +1,4 @@ +# @smithy/util-config-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-config-provider/latest.svg)](https://www.npmjs.com/package/@smithy/util-config-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-config-provider.svg)](https://www.npmjs.com/package/@smithy/util-config-provider) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/index.js new file mode 100644 index 0000000..210d40d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/index.js @@ -0,0 +1,64 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + SelectorType: () => SelectorType, + booleanSelector: () => booleanSelector, + numberSelector: () => numberSelector +}); +module.exports = __toCommonJS(src_exports); + +// src/booleanSelector.ts +var booleanSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}, "booleanSelector"); + +// src/numberSelector.ts +var numberSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}, "numberSelector"); + +// src/types.ts +var SelectorType = /* @__PURE__ */ ((SelectorType2) => { + SelectorType2["ENV"] = "env"; + SelectorType2["CONFIG"] = "shared config entry"; + return SelectorType2; +})(SelectorType || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + booleanSelector, + numberSelector, + SelectorType +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js new file mode 100644 index 0000000..6ba2261 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js @@ -0,0 +1,9 @@ +export const booleanSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/index.js new file mode 100644 index 0000000..a926de8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js new file mode 100644 index 0000000..81cfe40 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js @@ -0,0 +1,9 @@ +export const numberSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/types.js new file mode 100644 index 0000000..5b10fb5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-es/types.js @@ -0,0 +1,5 @@ +export var SelectorType; +(function (SelectorType) { + SelectorType["ENV"] = "env"; + SelectorType["CONFIG"] = "shared config entry"; +})(SelectorType || (SelectorType = {})); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts new file mode 100644 index 0000000..d4977cb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts @@ -0,0 +1,10 @@ +import { SelectorType } from "./types"; +/** + * Returns boolean value true/false for string value "true"/"false", + * if the string is defined in obj[key] + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const booleanSelector: (obj: Record, key: string, type: SelectorType) => boolean | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/index.d.ts new file mode 100644 index 0000000..a926de8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts new file mode 100644 index 0000000..9e0cbf9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts @@ -0,0 +1,9 @@ +import { SelectorType } from "./types"; +/** + * Returns number value for string value, if the string is defined in obj[key]. + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const numberSelector: (obj: Record, key: string, type: SelectorType) => number | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts new file mode 100644 index 0000000..0b85452 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts @@ -0,0 +1,10 @@ +import { SelectorType } from "./types"; +/** + * Returns boolean value true/false for string value "true"/"false", + * if the string is defined in obj[key] + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const booleanSelector: (obj: Record, key: string, type: SelectorType) => boolean | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..02fd81d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts new file mode 100644 index 0000000..3a34671 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts @@ -0,0 +1,9 @@ +import { SelectorType } from "./types"; +/** + * Returns number value for string value, if the string is defined in obj[key]. + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const numberSelector: (obj: Record, key: string, type: SelectorType) => number | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..e01c128 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts @@ -0,0 +1,4 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/types.d.ts new file mode 100644 index 0000000..caa65d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/dist-types/types.d.ts @@ -0,0 +1,4 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/package.json new file mode 100644 index 0000000..50796be --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-config-provider/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/util-config-provider", + "version": "4.0.0", + "description": "Utilities package for configuration providers", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-config-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-config-provider", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-config-provider" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/README.md new file mode 100644 index 0000000..f2f1cc0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/README.md @@ -0,0 +1,10 @@ +# @smithy/util-defaults-mode-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-defaults-mode-browser/latest.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-defaults-mode-browser.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js new file mode 100644 index 0000000..3733506 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULTS_MODE_OPTIONS = void 0; +exports.DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js new file mode 100644 index 0000000..4624ef1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js @@ -0,0 +1,25 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././resolveDefaultsModeConfig"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveDefaultsModeConfig +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..f23368c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveDefaultsModeConfig = void 0; +const tslib_1 = require("tslib"); +const property_provider_1 = require("@smithy/property-provider"); +const bowser_1 = tslib_1.__importDefault(require("bowser")); +const constants_1 = require("./constants"); +const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return Promise.resolve(isMobileBrowser() ? "mobile" : "standard"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; +const isMobileBrowser = () => { + var _a, _b; + const parsedUA = typeof window !== "undefined" && ((_a = window === null || window === void 0 ? void 0 : window.navigator) === null || _a === void 0 ? void 0 : _a.userAgent) + ? bowser_1.default.parse(window.navigator.userAgent) + : undefined; + const platform = (_b = parsedUA === null || parsedUA === void 0 ? void 0 : parsedUA.platform) === null || _b === void 0 ? void 0 : _b.type; + return platform === "tablet" || platform === "mobile"; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js new file mode 100644 index 0000000..fc6be33 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveDefaultsModeConfig = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const constants_1 = require("./constants"); +const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return Promise.resolve("mobile"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js new file mode 100644 index 0000000..d58e11f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js @@ -0,0 +1 @@ +export const DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js new file mode 100644 index 0000000..05aa818 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js @@ -0,0 +1 @@ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..940ab63 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js @@ -0,0 +1,27 @@ +import { memoize } from "@smithy/property-provider"; +import bowser from "bowser"; +import { DEFAULTS_MODE_OPTIONS } from "./constants"; +export const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return Promise.resolve(isMobileBrowser() ? "mobile" : "standard"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +const isMobileBrowser = () => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser.parse(window.navigator.userAgent) + : undefined; + const platform = parsedUA?.platform?.type; + return platform === "tablet" || platform === "mobile"; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js new file mode 100644 index 0000000..3164191 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js @@ -0,0 +1,19 @@ +import { memoize } from "@smithy/property-provider"; +import { DEFAULTS_MODE_OPTIONS } from "./constants"; +export const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return Promise.resolve("mobile"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts new file mode 100644 index 0000000..18dbe6c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts @@ -0,0 +1,12 @@ +import type { DefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts new file mode 100644 index 0000000..003de26 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..e4cc1b7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile" if the app is running in a mobile browser, + * otherwise it resolves to "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts new file mode 100644 index 0000000..6c48ad8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts @@ -0,0 +1,16 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..fc88602 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,12 @@ +import { DefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..4ab48b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..d468478 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile" if the app is running in a mobile browser, + * otherwise it resolves to "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts new file mode 100644 index 0000000..86fe4b7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts @@ -0,0 +1,16 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/package.json new file mode 100644 index 0000000..f06f246 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-browser/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-defaults-mode-browser", + "version": "4.0.10", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-defaults-mode-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "react-native": {}, + "browser": {}, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-defaults-mode-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-defaults-mode-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/README.md new file mode 100644 index 0000000..bfae0bd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/README.md @@ -0,0 +1,10 @@ +# @smithy/util-defaults-mode-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-defaults-mode-node/latest.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-defaults-mode-node.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js new file mode 100644 index 0000000..ddd0684 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js @@ -0,0 +1,119 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + resolveDefaultsModeConfig: () => resolveDefaultsModeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/resolveDefaultsModeConfig.ts +var import_config_resolver = require("@smithy/config-resolver"); +var import_node_config_provider = require("@smithy/node-config-provider"); +var import_property_provider = require("@smithy/property-provider"); + +// src/constants.ts +var AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +var AWS_REGION_ENV = "AWS_REGION"; +var AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +var IMDS_REGION_PATH = "/latest/meta-data/placement/region"; + +// src/defaultsModeConfig.ts +var AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +var AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +var NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy" +}; + +// src/resolveDefaultsModeConfig.ts +var resolveDefaultsModeConfig = /* @__PURE__ */ __name(({ + region = (0, import_node_config_provider.loadConfig)(import_config_resolver.NODE_REGION_CONFIG_OPTIONS), + defaultsMode = (0, import_node_config_provider.loadConfig)(NODE_DEFAULTS_MODE_CONFIG_OPTIONS) +} = {}) => (0, import_property_provider.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case void 0: + return Promise.resolve("legacy"); + default: + throw new Error( + `Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}` + ); + } +}), "resolveDefaultsModeConfig"); +var resolveNodeDefaultsModeAuto = /* @__PURE__ */ __name(async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } else { + return "cross-region"; + } + } + return "standard"; +}, "resolveNodeDefaultsModeAuto"); +var inferPhysicalRegion = /* @__PURE__ */ __name(async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } catch (e) { + } + } +}, "inferPhysicalRegion"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveDefaultsModeConfig +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js new file mode 100644 index 0000000..69361a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js @@ -0,0 +1,6 @@ +export const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +export const AWS_REGION_ENV = "AWS_REGION"; +export const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +export const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export const DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +export const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js new file mode 100644 index 0000000..f43b570 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js @@ -0,0 +1,11 @@ +const AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +const AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +export const NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy", +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js new file mode 100644 index 0000000..05aa818 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..8c9d050 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js @@ -0,0 +1,52 @@ +import { NODE_REGION_CONFIG_OPTIONS } from "@smithy/config-resolver"; +import { loadConfig } from "@smithy/node-config-provider"; +import { memoize } from "@smithy/property-provider"; +import { AWS_DEFAULT_REGION_ENV, AWS_EXECUTION_ENV, AWS_REGION_ENV, DEFAULTS_MODE_OPTIONS, ENV_IMDS_DISABLED, IMDS_REGION_PATH, } from "./constants"; +import { NODE_DEFAULTS_MODE_CONFIG_OPTIONS } from "./defaultsModeConfig"; +export const resolveDefaultsModeConfig = ({ region = loadConfig(NODE_REGION_CONFIG_OPTIONS), defaultsMode = loadConfig(NODE_DEFAULTS_MODE_CONFIG_OPTIONS), } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +const resolveNodeDefaultsModeAuto = async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } + else { + return "cross-region"; + } + } + return "standard"; +}; +const inferPhysicalRegion = async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await import("@smithy/credential-provider-imds"); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } + catch (e) { + } + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts new file mode 100644 index 0000000..a2db283 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export declare const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +/** + * @internal + */ +export declare const AWS_REGION_ENV = "AWS_REGION"; +/** + * @internal + */ +export declare const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export declare const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts new file mode 100644 index 0000000..12f4dae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import type { DefaultsMode } from "@smithy/smithy-client"; +/** + * @internal + */ +export declare const NODE_DEFAULTS_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts new file mode 100644 index 0000000..003de26 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..8f34371 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; + region?: string | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "in-region", "cross-region", or "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ region, defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..b847dc2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export declare const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +/** + * @internal + */ +export declare const AWS_REGION_ENV = "AWS_REGION"; +/** + * @internal + */ +export declare const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export declare const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts new file mode 100644 index 0000000..76c3d0d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { DefaultsMode } from "@smithy/smithy-client"; +/** + * @internal + */ +export declare const NODE_DEFAULTS_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..4ab48b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..4daa927 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; + region?: string | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "in-region", "cross-region", or "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ region, defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/package.json new file mode 100644 index 0000000..0a69079 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-defaults-mode-node/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-defaults-mode-node", + "version": "4.0.10", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-defaults-mode-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-defaults-mode-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-defaults-mode-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/README.md new file mode 100644 index 0000000..85d60b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/README.md @@ -0,0 +1,10 @@ +# @smithy/util-endpoints + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-endpoints/latest.svg)](https://www.npmjs.com/package/@smithy/util-endpoints) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-endpoints.svg)](https://www.npmjs.com/package/@smithy/util-endpoints) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/index.js new file mode 100644 index 0000000..3bc5a7d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/index.js @@ -0,0 +1,544 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EndpointCache: () => EndpointCache, + EndpointError: () => EndpointError, + customEndpointFunctions: () => customEndpointFunctions, + isIpAddress: () => isIpAddress, + isValidHostLabel: () => isValidHostLabel, + resolveEndpoint: () => resolveEndpoint +}); +module.exports = __toCommonJS(src_exports); + +// src/cache/EndpointCache.ts +var EndpointCache = class { + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }) { + this.data = /* @__PURE__ */ new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + static { + __name(this, "EndpointCache"); + } + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + /** + * @returns cache key or false if not cachable. + */ + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +}; + +// src/lib/isIpAddress.ts +var IP_V4_REGEX = new RegExp( + `^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$` +); +var isIpAddress = /* @__PURE__ */ __name((value) => IP_V4_REGEX.test(value) || value.startsWith("[") && value.endsWith("]"), "isIpAddress"); + +// src/lib/isValidHostLabel.ts +var VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +var isValidHostLabel = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}, "isValidHostLabel"); + +// src/utils/customEndpointFunctions.ts +var customEndpointFunctions = {}; + +// src/debug/debugId.ts +var debugId = "endpoints"; + +// src/debug/toDebugString.ts +function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} +__name(toDebugString, "toDebugString"); + +// src/types/EndpointError.ts +var EndpointError = class extends Error { + static { + __name(this, "EndpointError"); + } + constructor(message) { + super(message); + this.name = "EndpointError"; + } +}; + +// src/lib/booleanEquals.ts +var booleanEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "booleanEquals"); + +// src/lib/getAttrPathList.ts +var getAttrPathList = /* @__PURE__ */ __name((path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } else { + pathList.push(part); + } + } + return pathList; +}, "getAttrPathList"); + +// src/lib/getAttr.ts +var getAttr = /* @__PURE__ */ __name((value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value), "getAttr"); + +// src/lib/isSet.ts +var isSet = /* @__PURE__ */ __name((value) => value != null, "isSet"); + +// src/lib/not.ts +var not = /* @__PURE__ */ __name((value) => !value, "not"); + +// src/lib/parseURL.ts +var import_types3 = require("@smithy/types"); +var DEFAULT_PORTS = { + [import_types3.EndpointURLScheme.HTTP]: 80, + [import_types3.EndpointURLScheme.HTTPS]: 443 +}; +var parseURL = /* @__PURE__ */ __name((value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname: hostname2, port, protocol: protocol2 = "", path = "", query = {} } = value; + const url = new URL(`${protocol2}//${hostname2}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query).map(([k, v]) => `${k}=${v}`).join("&"); + return url; + } + return new URL(value); + } catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(import_types3.EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp + }; +}, "parseURL"); + +// src/lib/stringEquals.ts +var stringEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "stringEquals"); + +// src/lib/substring.ts +var substring = /* @__PURE__ */ __name((input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}, "substring"); + +// src/lib/uriEncode.ts +var uriEncode = /* @__PURE__ */ __name((value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`), "uriEncode"); + +// src/utils/endpointFunctions.ts +var endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode +}; + +// src/utils/evaluateTemplate.ts +var evaluateTemplate = /* @__PURE__ */ __name((template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}, "evaluateTemplate"); + +// src/utils/getReferenceValue.ts +var getReferenceValue = /* @__PURE__ */ __name(({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord + }; + return referenceRecord[ref]; +}, "getReferenceValue"); + +// src/utils/evaluateExpression.ts +var evaluateExpression = /* @__PURE__ */ __name((obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } else if (obj["fn"]) { + return callFunction(obj, options); + } else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}, "evaluateExpression"); + +// src/utils/callFunction.ts +var callFunction = /* @__PURE__ */ __name(({ fn, argv }, options) => { + const evaluatedArgs = argv.map( + (arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options) + ); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}, "callFunction"); + +// src/utils/evaluateCondition.ts +var evaluateCondition = /* @__PURE__ */ __name(({ assign, ...fnArgs }, options) => { + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + options.logger?.debug?.(`${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...assign != null && { toAssign: { name: assign, value } } + }; +}, "evaluateCondition"); + +// src/utils/evaluateConditions.ts +var evaluateConditions = /* @__PURE__ */ __name((conditions = [], options) => { + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord + } + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + options.logger?.debug?.(`${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}, "evaluateConditions"); + +// src/utils/getEndpointHeaders.ts +var getEndpointHeaders = /* @__PURE__ */ __name((headers, options) => Object.entries(headers).reduce( + (acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }) + }), + {} +), "getEndpointHeaders"); + +// src/utils/getEndpointProperty.ts +var getEndpointProperty = /* @__PURE__ */ __name((property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}, "getEndpointProperty"); + +// src/utils/getEndpointProperties.ts +var getEndpointProperties = /* @__PURE__ */ __name((properties, options) => Object.entries(properties).reduce( + (acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options) + }), + {} +), "getEndpointProperties"); + +// src/utils/getEndpointUrl.ts +var getEndpointUrl = /* @__PURE__ */ __name((endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}, "getEndpointUrl"); + +// src/utils/evaluateEndpointRule.ts +var evaluateEndpointRule = /* @__PURE__ */ __name((endpointRule, options) => { + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }; + const { url, properties, headers } = endpoint; + options.logger?.debug?.(`${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...headers != void 0 && { + headers: getEndpointHeaders(headers, endpointRuleOptions) + }, + ...properties != void 0 && { + properties: getEndpointProperties(properties, endpointRuleOptions) + }, + url: getEndpointUrl(url, endpointRuleOptions) + }; +}, "evaluateEndpointRule"); + +// src/utils/evaluateErrorRule.ts +var evaluateErrorRule = /* @__PURE__ */ __name((errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError( + evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }) + ); +}, "evaluateErrorRule"); + +// src/utils/evaluateTreeRule.ts +var evaluateTreeRule = /* @__PURE__ */ __name((treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }); +}, "evaluateTreeRule"); + +// src/utils/evaluateRules.ts +var evaluateRules = /* @__PURE__ */ __name((rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}, "evaluateRules"); + +// src/resolveEndpoint.ts +var resolveEndpoint = /* @__PURE__ */ __name((ruleSetObject, options) => { + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + options.logger?.debug?.(`${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters).filter(([, v]) => v.default != null).map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters).filter(([, v]) => v.required).map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + options.logger?.debug?.(`${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}, "resolveEndpoint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EndpointCache, + isIpAddress, + isValidHostLabel, + customEndpointFunctions, + resolveEndpoint, + EndpointError +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js new file mode 100644 index 0000000..ddc7b0d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js @@ -0,0 +1,49 @@ +export class EndpointCache { + constructor({ size, params }) { + this.data = new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js new file mode 100644 index 0000000..0d4e27e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js @@ -0,0 +1 @@ +export const debugId = "endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/index.js new file mode 100644 index 0000000..70d3b15 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/index.js @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js new file mode 100644 index 0000000..33c8fcb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js @@ -0,0 +1,12 @@ +export function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js new file mode 100644 index 0000000..5069030 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js @@ -0,0 +1,21 @@ +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +export const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceEndpointUrlSections = [ENV_ENDPOINT_URL, ...serviceId.split(" ").map((w) => w.toUpperCase())]; + const serviceEndpointUrl = env[serviceEndpointUrlSections.join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile) => { + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/index.js new file mode 100644 index 0000000..c39ed2b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js new file mode 100644 index 0000000..730cbd3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js @@ -0,0 +1 @@ +export const booleanEquals = (value1, value2) => value1 === value2; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js new file mode 100644 index 0000000..d77f165 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js @@ -0,0 +1,11 @@ +import { EndpointError } from "../types"; +import { getAttrPathList } from "./getAttrPathList"; +export const getAttr = (value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } + else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js new file mode 100644 index 0000000..5817a2d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js @@ -0,0 +1,25 @@ +import { EndpointError } from "../types"; +export const getAttrPathList = (path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } + else { + pathList.push(part); + } + } + return pathList; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/index.js new file mode 100644 index 0000000..99a0844 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/index.js @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js new file mode 100644 index 0000000..20be5a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js @@ -0,0 +1,2 @@ +const IP_V4_REGEX = new RegExp(`^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$`); +export const isIpAddress = (value) => IP_V4_REGEX.test(value) || (value.startsWith("[") && value.endsWith("]")); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js new file mode 100644 index 0000000..83ccc7a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js @@ -0,0 +1 @@ +export const isSet = (value) => value != null; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js new file mode 100644 index 0000000..7858598 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js @@ -0,0 +1,13 @@ +const VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +export const isValidHostLabel = (value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/not.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/not.js new file mode 100644 index 0000000..180e5dd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/not.js @@ -0,0 +1 @@ +export const not = (value) => !value; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js new file mode 100644 index 0000000..79f9b24 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js @@ -0,0 +1,51 @@ +import { EndpointURLScheme } from "@smithy/types"; +import { isIpAddress } from "./isIpAddress"; +const DEFAULT_PORTS = { + [EndpointURLScheme.HTTP]: 80, + [EndpointURLScheme.HTTPS]: 443, +}; +export const parseURL = (value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname, port, protocol = "", path = "", query = {} } = value; + const url = new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query) + .map(([k, v]) => `${k}=${v}`) + .join("&"); + return url; + } + return new URL(value); + } + catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || + (typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`)); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js new file mode 100644 index 0000000..ee41426 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js @@ -0,0 +1 @@ +export const stringEquals = (value1, value2) => value1 === value2; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js new file mode 100644 index 0000000..942dde4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js @@ -0,0 +1,9 @@ +export const substring = (input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js new file mode 100644 index 0000000..ae226dc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js @@ -0,0 +1 @@ +export const uriEncode = (value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js new file mode 100644 index 0000000..ac12096 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js @@ -0,0 +1,27 @@ +import { debugId, toDebugString } from "./debug"; +import { EndpointError } from "./types"; +import { evaluateRules } from "./utils"; +export const resolveEndpoint = (ruleSetObject, options) => { + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + options.logger?.debug?.(`${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters) + .filter(([, v]) => v.default != null) + .map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters) + .filter(([, v]) => v.required) + .map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + options.logger?.debug?.(`${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js new file mode 100644 index 0000000..1ce597d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js @@ -0,0 +1,6 @@ +export class EndpointError extends Error { + constructor(message) { + super(message); + this.name = "EndpointError"; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/index.js new file mode 100644 index 0000000..a49f984 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/index.js @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/shared.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/types/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js new file mode 100644 index 0000000..bf0747a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js @@ -0,0 +1,11 @@ +import { customEndpointFunctions } from "./customEndpointFunctions"; +import { endpointFunctions } from "./endpointFunctions"; +import { evaluateExpression } from "./evaluateExpression"; +export const callFunction = ({ fn, argv }, options) => { + const evaluatedArgs = argv.map((arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options)); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js new file mode 100644 index 0000000..0c26493 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js @@ -0,0 +1 @@ +export const customEndpointFunctions = {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js new file mode 100644 index 0000000..e2215ff --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js @@ -0,0 +1,12 @@ +import { booleanEquals, getAttr, isSet, isValidHostLabel, not, parseURL, stringEquals, substring, uriEncode, } from "../lib"; +export const endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode, +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js new file mode 100644 index 0000000..8e84f08 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js @@ -0,0 +1,14 @@ +import { debugId, toDebugString } from "../debug"; +import { EndpointError } from "../types"; +import { callFunction } from "./callFunction"; +export const evaluateCondition = ({ assign, ...fnArgs }, options) => { + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + options.logger?.debug?.(`${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...(assign != null && { toAssign: { name: assign, value } }), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js new file mode 100644 index 0000000..5542076 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js @@ -0,0 +1,22 @@ +import { debugId, toDebugString } from "../debug"; +import { evaluateCondition } from "./evaluateCondition"; +export const evaluateConditions = (conditions = [], options) => { + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord, + }, + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + options.logger?.debug?.(`${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js new file mode 100644 index 0000000..ba6307b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js @@ -0,0 +1,27 @@ +import { debugId, toDebugString } from "../debug"; +import { evaluateConditions } from "./evaluateConditions"; +import { getEndpointHeaders } from "./getEndpointHeaders"; +import { getEndpointProperties } from "./getEndpointProperties"; +import { getEndpointUrl } from "./getEndpointUrl"; +export const evaluateEndpointRule = (endpointRule, options) => { + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }; + const { url, properties, headers } = endpoint; + options.logger?.debug?.(`${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...(headers != undefined && { + headers: getEndpointHeaders(headers, endpointRuleOptions), + }), + ...(properties != undefined && { + properties: getEndpointProperties(properties, endpointRuleOptions), + }), + url: getEndpointUrl(url, endpointRuleOptions), + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js new file mode 100644 index 0000000..1a57860 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js @@ -0,0 +1,14 @@ +import { EndpointError } from "../types"; +import { evaluateConditions } from "./evaluateConditions"; +import { evaluateExpression } from "./evaluateExpression"; +export const evaluateErrorRule = (errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError(evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + })); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js new file mode 100644 index 0000000..7f69658 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js @@ -0,0 +1,16 @@ +import { EndpointError } from "../types"; +import { callFunction } from "./callFunction"; +import { evaluateTemplate } from "./evaluateTemplate"; +import { getReferenceValue } from "./getReferenceValue"; +export const evaluateExpression = (obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } + else if (obj["fn"]) { + return callFunction(obj, options); + } + else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js new file mode 100644 index 0000000..58a40a0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js @@ -0,0 +1,27 @@ +import { EndpointError } from "../types"; +import { evaluateEndpointRule } from "./evaluateEndpointRule"; +import { evaluateErrorRule } from "./evaluateErrorRule"; +import { evaluateTreeRule } from "./evaluateTreeRule"; +export const evaluateRules = (rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } + else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js new file mode 100644 index 0000000..7005809 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js @@ -0,0 +1,36 @@ +import { getAttr } from "../lib"; +export const evaluateTemplate = (template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord, + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } + else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js new file mode 100644 index 0000000..427c1fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js @@ -0,0 +1,13 @@ +import { evaluateConditions } from "./evaluateConditions"; +import { evaluateRules } from "./evaluateRules"; +export const evaluateTreeRule = (treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js new file mode 100644 index 0000000..f94cf55 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js @@ -0,0 +1,12 @@ +import { EndpointError } from "../types"; +import { evaluateExpression } from "./evaluateExpression"; +export const getEndpointHeaders = (headers, options) => Object.entries(headers).reduce((acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }), +}), {}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js new file mode 100644 index 0000000..e7afe88 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js @@ -0,0 +1,5 @@ +import { getEndpointProperty } from "./getEndpointProperty"; +export const getEndpointProperties = (properties, options) => Object.entries(properties).reduce((acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options), +}), {}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js new file mode 100644 index 0000000..0600969 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js @@ -0,0 +1,21 @@ +import { EndpointError } from "../types"; +import { evaluateTemplate } from "./evaluateTemplate"; +import { getEndpointProperties } from "./getEndpointProperties"; +export const getEndpointProperty = (property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js new file mode 100644 index 0000000..8f1301e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js @@ -0,0 +1,15 @@ +import { EndpointError } from "../types"; +import { evaluateExpression } from "./evaluateExpression"; +export const getEndpointUrl = (endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } + catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js new file mode 100644 index 0000000..759f4d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js @@ -0,0 +1,7 @@ +export const getReferenceValue = ({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord, + }; + return referenceRecord[ref]; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/index.js new file mode 100644 index 0000000..b571d02 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-es/utils/index.js @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts new file mode 100644 index 0000000..19a338f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts @@ -0,0 +1,34 @@ +import type { EndpointParams, EndpointV2 } from "@smithy/types"; +/** + * @internal + * + * Cache for endpoint ruleSet resolution. + */ +export declare class EndpointCache { + private capacity; + private data; + private parameters; + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }: { + size?: number; + params?: string[]; + }); + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams: EndpointParams, resolver: () => EndpointV2): EndpointV2; + size(): number; + /** + * @returns cache key or false if not cachable. + */ + private hash; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts new file mode 100644 index 0000000..d39f408 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts @@ -0,0 +1 @@ +export declare const debugId = "endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts new file mode 100644 index 0000000..70d3b15 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts new file mode 100644 index 0000000..6bf1d3a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts @@ -0,0 +1,9 @@ +import { EndpointParameters, EndpointV2 } from "@smithy/types"; +import { GetAttrValue } from "../lib"; +import { EndpointObject, FunctionObject, FunctionReturn } from "../types"; +export declare function toDebugString(input: EndpointParameters): string; +export declare function toDebugString(input: EndpointV2): string; +export declare function toDebugString(input: GetAttrValue): string; +export declare function toDebugString(input: FunctionObject): string; +export declare function toDebugString(input: FunctionReturn): string; +export declare function toDebugString(input: EndpointObject): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..0971010 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/index.d.ts new file mode 100644 index 0000000..c39ed2b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts new file mode 100644 index 0000000..7eac561 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two boolean values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const booleanEquals: (value1: boolean, value2: boolean) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts new file mode 100644 index 0000000..a8088c5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts @@ -0,0 +1,7 @@ +export type GetAttrValue = string | boolean | { + [key: string]: GetAttrValue; +} | Array; +/** + * Returns value corresponding to pathing string for an array or object. + */ +export declare const getAttr: (value: GetAttrValue, path: string) => GetAttrValue; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts new file mode 100644 index 0000000..e6c4979 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts @@ -0,0 +1,4 @@ +/** + * Parses path as a getAttr expression, returning a list of strings. + */ +export declare const getAttrPathList: (path: string) => Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts new file mode 100644 index 0000000..99a0844 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts new file mode 100644 index 0000000..28aba97 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts @@ -0,0 +1,4 @@ +/** + * Validates if the provided value is an IP address. + */ +export declare const isIpAddress: (value: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts new file mode 100644 index 0000000..7c74ec5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a value is set (aka not null or undefined). + * Returns true if the value is set, otherwise returns false. + */ +export declare const isSet: (value: unknown) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts new file mode 100644 index 0000000..c05f9e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts @@ -0,0 +1,7 @@ +/** + * Evaluates whether one or more string values are valid host labels per RFC 1123. + * + * If allowSubDomains is true, then the provided value may be zero or more dotted + * subdomains which are each validated per RFC 1123. + */ +export declare const isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts new file mode 100644 index 0000000..1e8e728 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts @@ -0,0 +1,5 @@ +/** + * Performs logical negation on the provided boolean value, + * returning the negated value. + */ +export declare const not: (value: boolean) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts new file mode 100644 index 0000000..3e0dce3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointURL } from "@smithy/types"; +/** + * Parses a string, URL, or Endpoint into it’s Endpoint URL components. + */ +export declare const parseURL: (value: string | URL | Endpoint) => EndpointURL | null; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts new file mode 100644 index 0000000..bdfc98d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two string values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const stringEquals: (value1: string, value2: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts new file mode 100644 index 0000000..5d70035 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts @@ -0,0 +1,7 @@ +/** + * Computes the substring of a given string, conditionally indexing from the end of the string. + * When the string is long enough to fully include the substring, return the substring. + * Otherwise, return None. The start index is inclusive and the stop index is exclusive. + * The length of the returned string will always be stop-start. + */ +export declare const substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts new file mode 100644 index 0000000..c2a720c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts @@ -0,0 +1,4 @@ +/** + * Performs percent-encoding per RFC3986 section 2.1 + */ +export declare const uriEncode: (value: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts new file mode 100644 index 0000000..b02188b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts @@ -0,0 +1,6 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointResolverOptions, RuleSetObject } from "./types"; +/** + * Resolves an endpoint URL by processing the endpoints ruleset and options. + */ +export declare const resolveEndpoint: (ruleSetObject: RuleSetObject, options: EndpointResolverOptions) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts new file mode 100644 index 0000000..9d622ae --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts @@ -0,0 +1,34 @@ +import { EndpointParams, EndpointV2 } from "@smithy/types"; +/** + * @internal + * + * Cache for endpoint ruleSet resolution. + */ +export declare class EndpointCache { + private capacity; + private data; + private parameters; + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }: { + size?: number; + params?: string[]; + }); + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams: EndpointParams, resolver: () => EndpointV2): EndpointV2; + size(): number; + /** + * @returns cache key or false if not cachable. + */ + private hash; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts new file mode 100644 index 0000000..f674b8a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts @@ -0,0 +1 @@ +export declare const debugId = "endpoints"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts new file mode 100644 index 0000000..1eb0bf4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts new file mode 100644 index 0000000..e295ca0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts @@ -0,0 +1,9 @@ +import { EndpointParameters, EndpointV2 } from "@smithy/types"; +import { GetAttrValue } from "../lib"; +import { EndpointObject, FunctionObject, FunctionReturn } from "../types"; +export declare function toDebugString(input: EndpointParameters): string; +export declare function toDebugString(input: EndpointV2): string; +export declare function toDebugString(input: GetAttrValue): string; +export declare function toDebugString(input: FunctionObject): string; +export declare function toDebugString(input: FunctionReturn): string; +export declare function toDebugString(input: EndpointObject): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..7b9d068 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..7b367cf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts new file mode 100644 index 0000000..7aec001 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two boolean values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const booleanEquals: (value1: boolean, value2: boolean) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts new file mode 100644 index 0000000..e2f5b43 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts @@ -0,0 +1,7 @@ +export type GetAttrValue = string | boolean | { + [key: string]: GetAttrValue; +} | Array; +/** + * Returns value corresponding to pathing string for an array or object. + */ +export declare const getAttr: (value: GetAttrValue, path: string) => GetAttrValue; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts new file mode 100644 index 0000000..93bbf31 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts @@ -0,0 +1,4 @@ +/** + * Parses path as a getAttr expression, returning a list of strings. + */ +export declare const getAttrPathList: (path: string) => Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts new file mode 100644 index 0000000..a28ecaa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts new file mode 100644 index 0000000..9f37893 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts @@ -0,0 +1,4 @@ +/** + * Validates if the provided value is an IP address. + */ +export declare const isIpAddress: (value: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts new file mode 100644 index 0000000..6b102dd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a value is set (aka not null or undefined). + * Returns true if the value is set, otherwise returns false. + */ +export declare const isSet: (value: unknown) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts new file mode 100644 index 0000000..01f7eb9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts @@ -0,0 +1,7 @@ +/** + * Evaluates whether one or more string values are valid host labels per RFC 1123. + * + * If allowSubDomains is true, then the provided value may be zero or more dotted + * subdomains which are each validated per RFC 1123. + */ +export declare const isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts new file mode 100644 index 0000000..b4e84ac --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts @@ -0,0 +1,5 @@ +/** + * Performs logical negation on the provided boolean value, + * returning the negated value. + */ +export declare const not: (value: boolean) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts new file mode 100644 index 0000000..0f54066 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointURL } from "@smithy/types"; +/** + * Parses a string, URL, or Endpoint into it’s Endpoint URL components. + */ +export declare const parseURL: (value: string | URL | Endpoint) => EndpointURL | null; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts new file mode 100644 index 0000000..9acb10c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two string values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const stringEquals: (value1: string, value2: string) => boolean; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts new file mode 100644 index 0000000..a99025c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts @@ -0,0 +1,7 @@ +/** + * Computes the substring of a given string, conditionally indexing from the end of the string. + * When the string is long enough to fully include the substring, return the substring. + * Otherwise, return None. The start index is inclusive and the stop index is exclusive. + * The length of the returned string will always be stop-start. + */ +export declare const substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts new file mode 100644 index 0000000..acb75bb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts @@ -0,0 +1,4 @@ +/** + * Performs percent-encoding per RFC3986 section 2.1 + */ +export declare const uriEncode: (value: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts new file mode 100644 index 0000000..5469fa2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts @@ -0,0 +1,6 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointResolverOptions, RuleSetObject } from "./types"; +/** + * Resolves an endpoint URL by processing the endpoints ruleset and options. + */ +export declare const resolveEndpoint: (ruleSetObject: RuleSetObject, options: EndpointResolverOptions) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts new file mode 100644 index 0000000..4f3c538 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts @@ -0,0 +1,3 @@ +export declare class EndpointError extends Error { + constructor(message: string); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts new file mode 100644 index 0000000..7b3cf42 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts @@ -0,0 +1,2 @@ +import { FunctionReturn } from "./shared"; +export type EndpointFunctions = Record FunctionReturn>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..436001e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts @@ -0,0 +1,5 @@ +import { EndpointObject as __EndpointObject, EndpointObjectHeaders as __EndpointObjectHeaders, EndpointObjectProperties as __EndpointObjectProperties, EndpointRuleObject as __EndpointRuleObject } from "@smithy/types"; +export type EndpointObjectProperties = __EndpointObjectProperties; +export type EndpointObjectHeaders = __EndpointObjectHeaders; +export type EndpointObject = __EndpointObject; +export type EndpointRuleObject = __EndpointRuleObject; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..1540835 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject as __ErrorRuleObject } from "@smithy/types"; +export type ErrorRuleObject = __ErrorRuleObject; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts new file mode 100644 index 0000000..227b269 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts @@ -0,0 +1,4 @@ +import { DeprecatedObject as __DeprecatedObject, ParameterObject as __ParameterObject, RuleSetObject as __RuleSetObject } from "@smithy/types"; +export type DeprecatedObject = __DeprecatedObject; +export type ParameterObject = __ParameterObject; +export type RuleSetObject = __RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..ecdb6b4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts @@ -0,0 +1,3 @@ +import { RuleSetRules as __RuleSetRules, TreeRuleObject as __TreeRuleObject } from "@smithy/types"; +export type RuleSetRules = __RuleSetRules; +export type TreeRuleObject = __TreeRuleObject; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts new file mode 100644 index 0000000..f89fb63 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts new file mode 100644 index 0000000..052dcf3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts @@ -0,0 +1,25 @@ +import { EndpointARN, EndpointPartition, Logger } from "@smithy/types"; +export type ReferenceObject = { + ref: string; +}; +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +export type FunctionArgv = Array; +export type FunctionReturn = string | boolean | number | EndpointARN | EndpointPartition | { + [key: string]: FunctionReturn; +} | null; +export type ConditionObject = FunctionObject & { + assign?: string; +}; +export type Expression = string | ReferenceObject | FunctionObject; +export type EndpointParams = Record; +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +export type ReferenceRecord = Record; +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts new file mode 100644 index 0000000..bfdf543 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, FunctionObject, FunctionReturn } from "../types"; +export declare const callFunction: ({ fn, argv }: FunctionObject, options: EvaluateOptions) => FunctionReturn; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts new file mode 100644 index 0000000..1cd2240 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts @@ -0,0 +1,4 @@ +import { EndpointFunctions } from "../types/EndpointFunctions"; +export declare const customEndpointFunctions: { + [key: string]: EndpointFunctions; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts new file mode 100644 index 0000000..cde57d1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts @@ -0,0 +1,11 @@ +export declare const endpointFunctions: { + booleanEquals: (value1: boolean, value2: boolean) => boolean; + getAttr: (value: import("../lib").GetAttrValue, path: string) => import("../lib").GetAttrValue; + isSet: (value: unknown) => boolean; + isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; + not: (value: boolean) => boolean; + parseURL: (value: string | URL | import("@smithy/types").Endpoint) => import("@smithy/types").EndpointURL | null; + stringEquals: (value1: string, value2: string) => boolean; + substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; + uriEncode: (value: string) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts new file mode 100644 index 0000000..ba2c0be --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions } from "../types"; +export declare const evaluateCondition: ({ assign, ...fnArgs }: ConditionObject, options: EvaluateOptions) => { + toAssign?: { + name: string; + value: import("../types").FunctionReturn; + } | undefined; + result: boolean; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts new file mode 100644 index 0000000..a7fbc5f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions, FunctionReturn } from "../types"; +export declare const evaluateConditions: (conditions: ConditionObject[] | undefined, options: EvaluateOptions) => { + result: false; + referenceRecord?: undefined; +} | { + result: boolean; + referenceRecord: Record; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts new file mode 100644 index 0000000..32f23ff --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateEndpointRule: (endpointRule: EndpointRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts new file mode 100644 index 0000000..eef15e3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateErrorRule: (errorRule: ErrorRuleObject, options: EvaluateOptions) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts new file mode 100644 index 0000000..8bbd358 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const evaluateExpression: (obj: Expression, keyName: string, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts new file mode 100644 index 0000000..a37fe07 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const evaluateRules: (rules: import("@smithy/types").RuleSetRules, options: EvaluateOptions) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts new file mode 100644 index 0000000..e6ae9c3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions } from "../types"; +export declare const evaluateTemplate: (template: string, options: EvaluateOptions) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts new file mode 100644 index 0000000..8518f7b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions, TreeRuleObject } from "../types"; +export declare const evaluateTreeRule: (treeRule: TreeRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts new file mode 100644 index 0000000..2775159 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectHeaders, EvaluateOptions } from "../types"; +export declare const getEndpointHeaders: (headers: EndpointObjectHeaders, options: EvaluateOptions) => {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts new file mode 100644 index 0000000..944b39d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectProperties, EvaluateOptions } from "../types"; +export declare const getEndpointProperties: (properties: EndpointObjectProperties, options: EvaluateOptions) => {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts new file mode 100644 index 0000000..5002377 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts @@ -0,0 +1,3 @@ +import { EndpointObjectProperty } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const getEndpointProperty: (property: EndpointObjectProperty, options: EvaluateOptions) => EndpointObjectProperty; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts new file mode 100644 index 0000000..9c93422 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const getEndpointUrl: (endpointUrl: Expression, options: EvaluateOptions) => URL; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts new file mode 100644 index 0000000..2ebfda3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, ReferenceObject } from "../types"; +export declare const getReferenceValue: ({ ref }: ReferenceObject, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts new file mode 100644 index 0000000..bd481df --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts new file mode 100644 index 0000000..89132f2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts @@ -0,0 +1,3 @@ +export declare class EndpointError extends Error { + constructor(message: string); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts new file mode 100644 index 0000000..33b1a0b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts @@ -0,0 +1,2 @@ +import { FunctionReturn } from "./shared"; +export type EndpointFunctions = Record FunctionReturn>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..d24545f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts @@ -0,0 +1,5 @@ +import { EndpointObject as __EndpointObject, EndpointObjectHeaders as __EndpointObjectHeaders, EndpointObjectProperties as __EndpointObjectProperties, EndpointRuleObject as __EndpointRuleObject } from "@smithy/types"; +export type EndpointObjectProperties = __EndpointObjectProperties; +export type EndpointObjectHeaders = __EndpointObjectHeaders; +export type EndpointObject = __EndpointObject; +export type EndpointRuleObject = __EndpointRuleObject; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..51fe138 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject as __ErrorRuleObject } from "@smithy/types"; +export type ErrorRuleObject = __ErrorRuleObject; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts new file mode 100644 index 0000000..3335b80 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts @@ -0,0 +1,4 @@ +import { DeprecatedObject as __DeprecatedObject, ParameterObject as __ParameterObject, RuleSetObject as __RuleSetObject } from "@smithy/types"; +export type DeprecatedObject = __DeprecatedObject; +export type ParameterObject = __ParameterObject; +export type RuleSetObject = __RuleSetObject; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..3d902d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts @@ -0,0 +1,3 @@ +import { RuleSetRules as __RuleSetRules, TreeRuleObject as __TreeRuleObject } from "@smithy/types"; +export type RuleSetRules = __RuleSetRules; +export type TreeRuleObject = __TreeRuleObject; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts new file mode 100644 index 0000000..a49f984 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts new file mode 100644 index 0000000..8351a92 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts @@ -0,0 +1,25 @@ +import { EndpointARN, EndpointPartition, Logger } from "@smithy/types"; +export type ReferenceObject = { + ref: string; +}; +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +export type FunctionArgv = Array; +export type FunctionReturn = string | boolean | number | EndpointARN | EndpointPartition | { + [key: string]: FunctionReturn; +} | null; +export type ConditionObject = FunctionObject & { + assign?: string; +}; +export type Expression = string | ReferenceObject | FunctionObject; +export type EndpointParams = Record; +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +export type ReferenceRecord = Record; +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts new file mode 100644 index 0000000..729a206 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, FunctionObject, FunctionReturn } from "../types"; +export declare const callFunction: ({ fn, argv }: FunctionObject, options: EvaluateOptions) => FunctionReturn; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts new file mode 100644 index 0000000..d8971d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts @@ -0,0 +1,4 @@ +import { EndpointFunctions } from "../types/EndpointFunctions"; +export declare const customEndpointFunctions: { + [key: string]: EndpointFunctions; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts new file mode 100644 index 0000000..12d75b9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts @@ -0,0 +1,11 @@ +export declare const endpointFunctions: { + booleanEquals: (value1: boolean, value2: boolean) => boolean; + getAttr: (value: import("../lib").GetAttrValue, path: string) => import("../lib").GetAttrValue; + isSet: (value: unknown) => boolean; + isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; + not: (value: boolean) => boolean; + parseURL: (value: string | URL | import("@smithy/types").Endpoint) => import("@smithy/types").EndpointURL | null; + stringEquals: (value1: string, value2: string) => boolean; + substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; + uriEncode: (value: string) => string; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts new file mode 100644 index 0000000..5fbe59f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions } from "../types"; +export declare const evaluateCondition: ({ assign, ...fnArgs }: ConditionObject, options: EvaluateOptions) => { + toAssign?: { + name: string; + value: import("../types").FunctionReturn; + } | undefined; + result: boolean; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts new file mode 100644 index 0000000..4131beb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions, FunctionReturn } from "../types"; +export declare const evaluateConditions: (conditions: ConditionObject[] | undefined, options: EvaluateOptions) => { + result: false; + referenceRecord?: undefined; +} | { + result: boolean; + referenceRecord: Record; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts new file mode 100644 index 0000000..da9496e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateEndpointRule: (endpointRule: EndpointRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts new file mode 100644 index 0000000..df4973d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateErrorRule: (errorRule: ErrorRuleObject, options: EvaluateOptions) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts new file mode 100644 index 0000000..2541960 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const evaluateExpression: (obj: Expression, keyName: string, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts new file mode 100644 index 0000000..d38c8be --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const evaluateRules: (rules: import("@smithy/types").RuleSetRules, options: EvaluateOptions) => EndpointV2; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts new file mode 100644 index 0000000..9b0b9ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions } from "../types"; +export declare const evaluateTemplate: (template: string, options: EvaluateOptions) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts new file mode 100644 index 0000000..2564388 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions, TreeRuleObject } from "../types"; +export declare const evaluateTreeRule: (treeRule: TreeRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts new file mode 100644 index 0000000..a802565 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectHeaders, EvaluateOptions } from "../types"; +export declare const getEndpointHeaders: (headers: EndpointObjectHeaders, options: EvaluateOptions) => {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts new file mode 100644 index 0000000..9c83bb0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectProperties, EvaluateOptions } from "../types"; +export declare const getEndpointProperties: (properties: EndpointObjectProperties, options: EvaluateOptions) => {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts new file mode 100644 index 0000000..7bc5b82 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts @@ -0,0 +1,3 @@ +import { EndpointObjectProperty } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const getEndpointProperty: (property: EndpointObjectProperty, options: EvaluateOptions) => EndpointObjectProperty; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts new file mode 100644 index 0000000..4ab2289 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const getEndpointUrl: (endpointUrl: Expression, options: EvaluateOptions) => URL; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts new file mode 100644 index 0000000..3699ec1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, ReferenceObject } from "../types"; +export declare const getReferenceValue: ({ ref }: ReferenceObject, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts new file mode 100644 index 0000000..b571d02 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/package.json new file mode 100644 index 0000000..7c6f9b1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-endpoints/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/util-endpoints", + "version": "3.0.2", + "description": "Utilities to help with endpoint resolution.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-endpoints", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "keywords": [ + "endpoint" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-endpoints", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-endpoints" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/README.md new file mode 100644 index 0000000..67e4499 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/README.md @@ -0,0 +1,4 @@ +# @smithy/util-hex-encoding + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-hex-encoding/latest.svg)](https://www.npmjs.com/package/@smithy/util-hex-encoding) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-hex-encoding.svg)](https://www.npmjs.com/package/@smithy/util-hex-encoding) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js new file mode 100644 index 0000000..78a59ea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js @@ -0,0 +1,67 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromHex: () => fromHex, + toHex: () => toHex +}); +module.exports = __toCommonJS(src_exports); +var SHORT_TO_HEX = {}; +var HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +__name(fromHex, "fromHex"); +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} +__name(toHex, "toHex"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromHex, + toHex +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-es/index.js new file mode 100644 index 0000000..e47b3aa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-es/index.js @@ -0,0 +1,33 @@ +const SHORT_TO_HEX = {}; +const HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +export function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } + else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +export function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts new file mode 100644 index 0000000..9d4307a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * Converts a hexadecimal encoded string to a Uint8Array of bytes. + * + * @param encoded The hexadecimal encoded string + */ +export declare function fromHex(encoded: string): Uint8Array; +/** + * Converts a Uint8Array of binary data to a hexadecimal encoded string. + * + * @param bytes The binary data to encode + */ +export declare function toHex(bytes: Uint8Array): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..02a8848 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * Converts a hexadecimal encoded string to a Uint8Array of bytes. + * + * @param encoded The hexadecimal encoded string + */ +export declare function fromHex(encoded: string): Uint8Array; +/** + * Converts a Uint8Array of binary data to a hexadecimal encoded string. + * + * @param bytes The binary data to encode + */ +export declare function toHex(bytes: Uint8Array): string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/package.json new file mode 100644 index 0000000..2c1ba3d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-hex-encoding/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/util-hex-encoding", + "version": "4.0.0", + "description": "Converts binary buffers to and from lowercase hexadecimal encoding", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-hex-encoding", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "dependencies": { + "tslib": "^2.6.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-hex-encoding", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-hex-encoding" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/README.md new file mode 100644 index 0000000..f043cfa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/README.md @@ -0,0 +1,12 @@ +# @smithy/util-middleware + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-middleware/latest.svg)](https://www.npmjs.com/package/@smithy/util-middleware) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-middleware.svg)](https://www.npmjs.com/package/@smithy/util-middleware) + +> An internal package + +This package provides shared utilities for middleware. + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/index.js new file mode 100644 index 0000000..dfccf17 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/index.js @@ -0,0 +1,45 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + getSmithyContext: () => getSmithyContext, + normalizeProvider: () => normalizeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = require("@smithy/types"); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getSmithyContext, + normalizeProvider +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js new file mode 100644 index 0000000..3848a0c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js @@ -0,0 +1,2 @@ +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {}); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/index.js new file mode 100644 index 0000000..484290d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./getSmithyContext"; +export * from "./normalizeProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js new file mode 100644 index 0000000..a83ea99 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js @@ -0,0 +1,6 @@ +export const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts new file mode 100644 index 0000000..523ee47 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/index.d.ts new file mode 100644 index 0000000..3869284 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getSmithyContext"; +/** + * @internal + */ +export * from "./normalizeProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts new file mode 100644 index 0000000..4fe2d9a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts new file mode 100644 index 0000000..14cd7c4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab07159 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getSmithyContext"; +/** + * @internal + */ +export * from "./normalizeProvider"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts new file mode 100644 index 0000000..594e8fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/package.json new file mode 100644 index 0000000..b9aa172 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-middleware/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-middleware", + "version": "4.0.2", + "description": "Shared utilities for to be used in middleware packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-middleware", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "middleware" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-middleware", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-middleware" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/README.md new file mode 100644 index 0000000..bcf11a9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/README.md @@ -0,0 +1,78 @@ +# @smithy/util-retry + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-retry/latest.svg)](https://www.npmjs.com/package/@smithy/util-retry) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-retry.svg)](https://www.npmjs.com/package/@smithy/util-retry) + +This package provides shared utilities for retries. + +## Usage + +### Default + +By default, each client already has a default retry strategy. The default retry count is 3, and +only retryable errors will be retried. + +[AWS Documentation: Retry behavior](https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html). + +```js +import { S3Client } from "@aws-sdk/client-s3"; + +const client = new S3Client({}); // default retry strategy included. +``` + +### MaxAttempts + +If you want to change the number of attempts, you can provide `maxAttempts` configuration during client creation. + +```js +import { S3Client } from "@aws-sdk/client-s3"; + +const client = new S3Client({ maxAttempts: 4 }); +``` + +This is recommended because the `StandardRetryStrategy` includes backoff calculation, +deciding whether an error should be retried, and a retry token counter. + +### MaxAttempts and BackoffComputation + +If you want to change the number of attempts and use a custom delay +computation, you can use the `ConfiguredRetryStrategy` from `@smithy/util-retry`. + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { ConfiguredRetryStrategy } from "@smithy/util-retry"; + +const client = new S3Client({ + retryStrategy: new ConfiguredRetryStrategy( + 4, // max attempts. + (attempt: number) => 100 + attempt * 1000 // backoff function. + ), +}); +``` + +This example sets the backoff at 100ms plus 1s per attempt. + +### MaxAttempts and RetryStrategy + +If you provide both `maxAttempts` and `retryStrategy`, the `retryStrategy` will +get precedence as it's more specific. + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { ConfiguredRetryStrategy } from "@smithy/util-retry"; + +const client = new S3Client({ + maxAttempts: 2, // ignored. + retryStrategy: new ConfiguredRetryStrategy( + 4, // used. + (attempt: number) => 100 + attempt * 1000 // backoff function. + ), +}); +``` + +### Further customization + +You can implement the `RetryStrategyV2` interface. + +Source: https://github.com/smithy-lang/smithy-typescript/blob/main/packages/types/src/retry.ts +API Docs: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-types/Interface/RetryStrategyV2/ diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/config.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/config.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/config.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/index.js new file mode 100644 index 0000000..699447a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/index.js @@ -0,0 +1,358 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + ConfiguredRetryStrategy: () => ConfiguredRetryStrategy, + DEFAULT_MAX_ATTEMPTS: () => DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_DELAY_BASE: () => DEFAULT_RETRY_DELAY_BASE, + DEFAULT_RETRY_MODE: () => DEFAULT_RETRY_MODE, + DefaultRateLimiter: () => DefaultRateLimiter, + INITIAL_RETRY_TOKENS: () => INITIAL_RETRY_TOKENS, + INVOCATION_ID_HEADER: () => INVOCATION_ID_HEADER, + MAXIMUM_RETRY_DELAY: () => MAXIMUM_RETRY_DELAY, + NO_RETRY_INCREMENT: () => NO_RETRY_INCREMENT, + REQUEST_HEADER: () => REQUEST_HEADER, + RETRY_COST: () => RETRY_COST, + RETRY_MODES: () => RETRY_MODES, + StandardRetryStrategy: () => StandardRetryStrategy, + THROTTLING_RETRY_DELAY_BASE: () => THROTTLING_RETRY_DELAY_BASE, + TIMEOUT_RETRY_COST: () => TIMEOUT_RETRY_COST +}); +module.exports = __toCommonJS(src_exports); + +// src/config.ts +var RETRY_MODES = /* @__PURE__ */ ((RETRY_MODES2) => { + RETRY_MODES2["STANDARD"] = "standard"; + RETRY_MODES2["ADAPTIVE"] = "adaptive"; + return RETRY_MODES2; +})(RETRY_MODES || {}); +var DEFAULT_MAX_ATTEMPTS = 3; +var DEFAULT_RETRY_MODE = "standard" /* STANDARD */; + +// src/DefaultRateLimiter.ts +var import_service_error_classification = require("@smithy/service-error-classification"); +var DefaultRateLimiter = class _DefaultRateLimiter { + constructor(options) { + // Pre-set state variables + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = options?.beta ?? 0.7; + this.minCapacity = options?.minCapacity ?? 1; + this.minFillRate = options?.minFillRate ?? 0.5; + this.scaleConstant = options?.scaleConstant ?? 0.4; + this.smooth = options?.smooth ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + static { + __name(this, "DefaultRateLimiter"); + } + static { + /** + * Only used in testing. + */ + this.setTimeoutFn = setTimeout; + } + getCurrentTimeInSeconds() { + return Date.now() / 1e3; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = (amount - this.currentCapacity) / this.fillRate * 1e3; + await new Promise((resolve) => _DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, import_service_error_classification.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow(this.lastMaxRate * (1 - this.beta) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise( + this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate + ); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +}; + +// src/constants.ts +var DEFAULT_RETRY_DELAY_BASE = 100; +var MAXIMUM_RETRY_DELAY = 20 * 1e3; +var THROTTLING_RETRY_DELAY_BASE = 500; +var INITIAL_RETRY_TOKENS = 500; +var RETRY_COST = 5; +var TIMEOUT_RETRY_COST = 10; +var NO_RETRY_INCREMENT = 1; +var INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +var REQUEST_HEADER = "amz-sdk-request"; + +// src/defaultRetryBackoffStrategy.ts +var getDefaultRetryBackoffStrategy = /* @__PURE__ */ __name(() => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = /* @__PURE__ */ __name((attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }, "computeNextBackoffDelay"); + const setDelayBase = /* @__PURE__ */ __name((delay) => { + delayBase = delay; + }, "setDelayBase"); + return { + computeNextBackoffDelay, + setDelayBase + }; +}, "getDefaultRetryBackoffStrategy"); + +// src/defaultRetryToken.ts +var createDefaultRetryToken = /* @__PURE__ */ __name(({ + retryDelay, + retryCount, + retryCost +}) => { + const getRetryCount = /* @__PURE__ */ __name(() => retryCount, "getRetryCount"); + const getRetryDelay = /* @__PURE__ */ __name(() => Math.min(MAXIMUM_RETRY_DELAY, retryDelay), "getRetryDelay"); + const getRetryCost = /* @__PURE__ */ __name(() => retryCost, "getRetryCost"); + return { + getRetryCount, + getRetryDelay, + getRetryCost + }; +}, "createDefaultRetryToken"); + +// src/StandardRetryStrategy.ts +var StandardRetryStrategy = class { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = "standard" /* STANDARD */; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + static { + __name(this, "StandardRetryStrategy"); + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0 + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase( + errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE + ); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return attempts < maxAttempts && this.capacity >= this.getCapacityCost(errorInfo.errorType) && this.isRetryableError(errorInfo.errorType); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +}; + +// src/AdaptiveRetryStrategy.ts +var AdaptiveRetryStrategy = class { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = "adaptive" /* ADAPTIVE */; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + static { + __name(this, "AdaptiveRetryStrategy"); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +}; + +// src/ConfiguredRetryStrategy.ts +var ConfiguredRetryStrategy = class extends StandardRetryStrategy { + static { + __name(this, "ConfiguredRetryStrategy"); + } + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AdaptiveRetryStrategy, + ConfiguredRetryStrategy, + DefaultRateLimiter, + StandardRetryStrategy, + RETRY_MODES, + DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_MODE, + DEFAULT_RETRY_DELAY_BASE, + MAXIMUM_RETRY_DELAY, + THROTTLING_RETRY_DELAY_BASE, + INITIAL_RETRY_TOKENS, + RETRY_COST, + TIMEOUT_RETRY_COST, + NO_RETRY_INCREMENT, + INVOCATION_ID_HEADER, + REQUEST_HEADER +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..e20cf0f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js @@ -0,0 +1,24 @@ +import { RETRY_MODES } from "./config"; +import { DefaultRateLimiter } from "./DefaultRateLimiter"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = RETRY_MODES.ADAPTIVE; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js new file mode 100644 index 0000000..541bdb2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js @@ -0,0 +1,18 @@ +import { DEFAULT_RETRY_DELAY_BASE } from "./constants"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class ConfiguredRetryStrategy extends StandardRetryStrategy { + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } + else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js new file mode 100644 index 0000000..15240c8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js @@ -0,0 +1,100 @@ +import { isThrottlingError } from "@smithy/service-error-classification"; +export class DefaultRateLimiter { + constructor(options) { + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = options?.beta ?? 0.7; + this.minCapacity = options?.minCapacity ?? 1; + this.minFillRate = options?.minFillRate ?? 0.5; + this.scaleConstant = options?.scaleConstant ?? 0.4; + this.smooth = options?.smooth ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1000; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000; + await new Promise((resolve) => DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if (isThrottlingError(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } + else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +} +DefaultRateLimiter.setTimeoutFn = setTimeout; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js new file mode 100644 index 0000000..07adde0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js @@ -0,0 +1,65 @@ +import { DEFAULT_MAX_ATTEMPTS, RETRY_MODES } from "./config"; +import { DEFAULT_RETRY_DELAY_BASE, INITIAL_RETRY_TOKENS, NO_RETRY_INCREMENT, RETRY_COST, THROTTLING_RETRY_DELAY_BASE, TIMEOUT_RETRY_COST, } from "./constants"; +import { getDefaultRetryBackoffStrategy } from "./defaultRetryBackoffStrategy"; +import { createDefaultRetryToken } from "./defaultRetryToken"; +export class StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = RETRY_MODES.STANDARD; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0, + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint + ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) + : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost, + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } + catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return (attempts < maxAttempts && + this.capacity >= this.getCapacityCost(errorInfo.errorType) && + this.isRetryableError(errorInfo.errorType)); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/config.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/config.js new file mode 100644 index 0000000..438d42d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/config.js @@ -0,0 +1,7 @@ +export var RETRY_MODES; +(function (RETRY_MODES) { + RETRY_MODES["STANDARD"] = "standard"; + RETRY_MODES["ADAPTIVE"] = "adaptive"; +})(RETRY_MODES || (RETRY_MODES = {})); +export const DEFAULT_MAX_ATTEMPTS = 3; +export const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/constants.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/constants.js new file mode 100644 index 0000000..0876f8e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/constants.js @@ -0,0 +1,9 @@ +export const DEFAULT_RETRY_DELAY_BASE = 100; +export const MAXIMUM_RETRY_DELAY = 20 * 1000; +export const THROTTLING_RETRY_DELAY_BASE = 500; +export const INITIAL_RETRY_TOKENS = 500; +export const RETRY_COST = 5; +export const TIMEOUT_RETRY_COST = 10; +export const NO_RETRY_INCREMENT = 1; +export const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +export const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js new file mode 100644 index 0000000..ce04bc5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js @@ -0,0 +1,14 @@ +import { DEFAULT_RETRY_DELAY_BASE, MAXIMUM_RETRY_DELAY } from "./constants"; +export const getDefaultRetryBackoffStrategy = () => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = (attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }; + const setDelayBase = (delay) => { + delayBase = delay; + }; + return { + computeNextBackoffDelay, + setDelayBase, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js new file mode 100644 index 0000000..203bb66 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js @@ -0,0 +1,11 @@ +import { MAXIMUM_RETRY_DELAY } from "./constants"; +export const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => { + const getRetryCount = () => retryCount; + const getRetryDelay = () => Math.min(MAXIMUM_RETRY_DELAY, retryDelay); + const getRetryCost = () => retryCost; + return { + getRetryCount, + getRetryDelay, + getRetryCost, + }; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/index.js new file mode 100644 index 0000000..8637ced --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/types.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..8092519 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, RetryToken, StandardRetryToken } from "@smithy/types"; +import { RateLimiter } from "./types"; +/** + * @public + * + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * + * The AdaptiveRetryStrategy is a retry strategy for executing against a very + * resource constrained set of resources. Care should be taken when using this + * retry strategy. By default, it uses a dynamic backoff delay based on load + * currently perceived against the downstream resource and performs circuit + * breaking to disable retries in the event of high downstream failures using + * the DefaultRateLimiter. + * + * @see {@link StandardRetryStrategy} + * @see {@link DefaultRateLimiter } + */ +export declare class AdaptiveRetryStrategy implements RetryStrategyV2 { + private readonly maxAttemptsProvider; + private rateLimiter; + private standardRetryStrategy; + readonly mode: string; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts new file mode 100644 index 0000000..3250c6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts @@ -0,0 +1,32 @@ +import type { Provider, RetryBackoffStrategy, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +/** + * @public + * + * This extension of the StandardRetryStrategy allows customizing the + * backoff computation. + */ +export declare class ConfiguredRetryStrategy extends StandardRetryStrategy implements RetryStrategyV2 { + private readonly computeNextBackoffDelay; + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts: number | Provider, computeNextBackoffDelay?: number | RetryBackoffStrategy["computeNextBackoffDelay"]); + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts new file mode 100644 index 0000000..9d689fc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts @@ -0,0 +1,49 @@ +import { RateLimiter } from "./types"; +/** + * @public + */ +export interface DefaultRateLimiterOptions { + beta?: number; + minCapacity?: number; + minFillRate?: number; + scaleConstant?: number; + smooth?: number; +} +/** + * @public + */ +export declare class DefaultRateLimiter implements RateLimiter { + /** + * Only used in testing. + */ + private static setTimeoutFn; + private beta; + private minCapacity; + private minFillRate; + private scaleConstant; + private smooth; + private currentCapacity; + private enabled; + private lastMaxRate; + private measuredTxRate; + private requestCount; + private fillRate; + private lastThrottleTime; + private lastTimestamp; + private lastTxRateBucket; + private maxCapacity; + private timeWindow; + constructor(options?: DefaultRateLimiterOptions); + private getCurrentTimeInSeconds; + getSendToken(): Promise; + private acquireTokenBucket; + private refillTokenBucket; + updateClientSendingRate(response: any): void; + private calculateTimeWindow; + private cubicThrottle; + private cubicSuccess; + private enableTokenBucket; + private updateTokenBucketRate; + private updateMeasuredRate; + private getPrecise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..c100ebc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts @@ -0,0 +1,26 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +/** + * @public + */ +export declare class StandardRetryStrategy implements RetryStrategyV2 { + private readonly maxAttempts; + readonly mode: string; + private capacity; + private readonly retryBackoffStrategy; + private readonly maxAttemptsProvider; + constructor(maxAttempts: number); + constructor(maxAttemptsProvider: Provider); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(token: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity(): number; + private getMaxAttempts; + private shouldRetry; + private getCapacityCost; + private isRetryableError; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/config.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/config.d.ts new file mode 100644 index 0000000..e4e74b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/config.d.ts @@ -0,0 +1,20 @@ +/** + * @public + */ +export declare enum RETRY_MODES { + STANDARD = "standard", + ADAPTIVE = "adaptive" +} +/** + * @public + * + * The default value for how many HTTP requests an SDK should make for a + * single SDK operation invocation before giving up + */ +export declare const DEFAULT_MAX_ATTEMPTS = 3; +/** + * @public + * + * The default retry algorithm to use. + */ +export declare const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/constants.d.ts new file mode 100644 index 0000000..bc7fec8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/constants.d.ts @@ -0,0 +1,59 @@ +/** + * @public + * + * The base number of milliseconds to use in calculating a suitable cool-down + * time when a retryable error is encountered. + */ +export declare const DEFAULT_RETRY_DELAY_BASE = 100; +/** + * @public + * + * The maximum amount of time (in milliseconds) that will be used as a delay + * between retry attempts. + */ +export declare const MAXIMUM_RETRY_DELAY: number; +/** + * @public + * + * The retry delay base (in milliseconds) to use when a throttling error is + * encountered. + */ +export declare const THROTTLING_RETRY_DELAY_BASE = 500; +/** + * @public + * + * Initial number of retry tokens in Retry Quota + */ +export declare const INITIAL_RETRY_TOKENS = 500; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance. + */ +export declare const RETRY_COST = 5; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ +export declare const TIMEOUT_RETRY_COST = 10; +/** + * @public + * + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ +export declare const NO_RETRY_INCREMENT = 1; +/** + * @public + * + * Header name for SDK invocation ID + */ +export declare const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +/** + * @public + * + * Header name for request retry information. + */ +export declare const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts new file mode 100644 index 0000000..b70eb2d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts @@ -0,0 +1,5 @@ +import { StandardRetryBackoffStrategy } from "@smithy/types"; +/** + * @internal + */ +export declare const getDefaultRetryBackoffStrategy: () => StandardRetryBackoffStrategy; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts new file mode 100644 index 0000000..947b68f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts @@ -0,0 +1,9 @@ +import { StandardRetryToken } from "@smithy/types"; +/** + * @internal + */ +export declare const createDefaultRetryToken: ({ retryDelay, retryCount, retryCost, }: { + retryDelay: number; + retryCount: number; + retryCost?: number | undefined; +}) => StandardRetryToken; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/index.d.ts new file mode 100644 index 0000000..8637ced --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..f6b0ef4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, RetryToken, StandardRetryToken } from "@smithy/types"; +import { RateLimiter } from "./types"; +/** + * @public + * + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * + * The AdaptiveRetryStrategy is a retry strategy for executing against a very + * resource constrained set of resources. Care should be taken when using this + * retry strategy. By default, it uses a dynamic backoff delay based on load + * currently perceived against the downstream resource and performs circuit + * breaking to disable retries in the event of high downstream failures using + * the DefaultRateLimiter. + * + * @see {@link StandardRetryStrategy} + * @see {@link DefaultRateLimiter } + */ +export declare class AdaptiveRetryStrategy implements RetryStrategyV2 { + private readonly maxAttemptsProvider; + private rateLimiter; + private standardRetryStrategy; + readonly mode: string; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts new file mode 100644 index 0000000..7df2983 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts @@ -0,0 +1,32 @@ +import { Provider, RetryBackoffStrategy, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +/** + * @public + * + * This extension of the StandardRetryStrategy allows customizing the + * backoff computation. + */ +export declare class ConfiguredRetryStrategy extends StandardRetryStrategy implements RetryStrategyV2 { + private readonly computeNextBackoffDelay; + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts: number | Provider, computeNextBackoffDelay?: number | RetryBackoffStrategy["computeNextBackoffDelay"]); + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts new file mode 100644 index 0000000..9c239d6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts @@ -0,0 +1,49 @@ +import { RateLimiter } from "./types"; +/** + * @public + */ +export interface DefaultRateLimiterOptions { + beta?: number; + minCapacity?: number; + minFillRate?: number; + scaleConstant?: number; + smooth?: number; +} +/** + * @public + */ +export declare class DefaultRateLimiter implements RateLimiter { + /** + * Only used in testing. + */ + private static setTimeoutFn; + private beta; + private minCapacity; + private minFillRate; + private scaleConstant; + private smooth; + private currentCapacity; + private enabled; + private lastMaxRate; + private measuredTxRate; + private requestCount; + private fillRate; + private lastThrottleTime; + private lastTimestamp; + private lastTxRateBucket; + private maxCapacity; + private timeWindow; + constructor(options?: DefaultRateLimiterOptions); + private getCurrentTimeInSeconds; + getSendToken(): Promise; + private acquireTokenBucket; + private refillTokenBucket; + updateClientSendingRate(response: any): void; + private calculateTimeWindow; + private cubicThrottle; + private cubicSuccess; + private enableTokenBucket; + private updateTokenBucketRate; + private updateMeasuredRate; + private getPrecise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..c22f8b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts @@ -0,0 +1,26 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +/** + * @public + */ +export declare class StandardRetryStrategy implements RetryStrategyV2 { + private readonly maxAttempts; + readonly mode: string; + private capacity; + private readonly retryBackoffStrategy; + private readonly maxAttemptsProvider; + constructor(maxAttempts: number); + constructor(maxAttemptsProvider: Provider); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(token: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity(): number; + private getMaxAttempts; + private shouldRetry; + private getCapacityCost; + private isRetryableError; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts new file mode 100644 index 0000000..6727a38 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts @@ -0,0 +1,20 @@ +/** + * @public + */ +export declare enum RETRY_MODES { + STANDARD = "standard", + ADAPTIVE = "adaptive" +} +/** + * @public + * + * The default value for how many HTTP requests an SDK should make for a + * single SDK operation invocation before giving up + */ +export declare const DEFAULT_MAX_ATTEMPTS = 3; +/** + * @public + * + * The default retry algorithm to use. + */ +export declare const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..5c1a5ce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,59 @@ +/** + * @public + * + * The base number of milliseconds to use in calculating a suitable cool-down + * time when a retryable error is encountered. + */ +export declare const DEFAULT_RETRY_DELAY_BASE = 100; +/** + * @public + * + * The maximum amount of time (in milliseconds) that will be used as a delay + * between retry attempts. + */ +export declare const MAXIMUM_RETRY_DELAY: number; +/** + * @public + * + * The retry delay base (in milliseconds) to use when a throttling error is + * encountered. + */ +export declare const THROTTLING_RETRY_DELAY_BASE = 500; +/** + * @public + * + * Initial number of retry tokens in Retry Quota + */ +export declare const INITIAL_RETRY_TOKENS = 500; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance. + */ +export declare const RETRY_COST = 5; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ +export declare const TIMEOUT_RETRY_COST = 10; +/** + * @public + * + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ +export declare const NO_RETRY_INCREMENT = 1; +/** + * @public + * + * Header name for SDK invocation ID + */ +export declare const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +/** + * @public + * + * Header name for request retry information. + */ +export declare const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts new file mode 100644 index 0000000..1d632ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts @@ -0,0 +1,5 @@ +import { StandardRetryBackoffStrategy } from "@smithy/types"; +/** + * @internal + */ +export declare const getDefaultRetryBackoffStrategy: () => StandardRetryBackoffStrategy; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts new file mode 100644 index 0000000..fd4b75e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts @@ -0,0 +1,9 @@ +import { StandardRetryToken } from "@smithy/types"; +/** + * @internal + */ +export declare const createDefaultRetryToken: ({ retryDelay, retryCount, retryCost, }: { + retryDelay: number; + retryCount: number; + retryCost?: number | undefined; +}) => StandardRetryToken; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..de9af3d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..5a20c01 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts @@ -0,0 +1,19 @@ +/** + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/types.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/types.d.ts new file mode 100644 index 0000000..b3f2bd1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/dist-types/types.d.ts @@ -0,0 +1,19 @@ +/** + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/package.json new file mode 100644 index 0000000..6379727 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-retry/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/util-retry", + "version": "4.0.3", + "description": "Shared retry utilities to be used in middleware packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-retry", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "retry" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-retry", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-retry" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/README.md new file mode 100644 index 0000000..6fcd9f6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/README.md @@ -0,0 +1,6 @@ +# @smithy/util-stream + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-stream/latest.svg)](https://www.npmjs.com/package/@smithy/util-stream) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-stream.svg)](https://www.npmjs.com/package/@smithy/util-stream) + +Package with utilities to operate on streams. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js new file mode 100644 index 0000000..ea8baac --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ByteArrayCollector = void 0; +class ByteArrayCollector { + constructor(allocByteArray) { + this.allocByteArray = allocByteArray; + this.byteLength = 0; + this.byteArrays = []; + } + push(byteArray) { + this.byteArrays.push(byteArray); + this.byteLength += byteArray.byteLength; + } + flush() { + if (this.byteArrays.length === 1) { + const bytes = this.byteArrays[0]; + this.reset(); + return bytes; + } + const aggregation = this.allocByteArray(this.byteLength); + let cursor = 0; + for (let i = 0; i < this.byteArrays.length; ++i) { + const bytes = this.byteArrays[i]; + aggregation.set(bytes, cursor); + cursor += bytes.byteLength; + } + this.reset(); + return aggregation; + } + reset() { + this.byteArrays = []; + this.byteLength = 0; + } +} +exports.ByteArrayCollector = ByteArrayCollector; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js new file mode 100644 index 0000000..b73363a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChecksumStream = void 0; +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +class ChecksumStream extends ReadableStreamRef { +} +exports.ChecksumStream = ChecksumStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js new file mode 100644 index 0000000..92d0bc0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChecksumStream = void 0; +const util_base64_1 = require("@smithy/util-base64"); +const stream_1 = require("stream"); +class ChecksumStream extends stream_1.Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + var _a, _b; + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} +exports.ChecksumStream = ChecksumStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js new file mode 100644 index 0000000..2f6cf12 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createChecksumStream = void 0; +const util_base64_1 = require("@smithy/util-base64"); +const stream_type_check_1 = require("../stream-type-check"); +const ChecksumStream_browser_1 = require("./ChecksumStream.browser"); +const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + var _a, _b; + if (!(0, stream_type_check_1.isReadableStream)(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + const encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream_browser_1.ChecksumStream.prototype); + return readable; +}; +exports.createChecksumStream = createChecksumStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js new file mode 100644 index 0000000..57e2a2f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createChecksumStream = void 0; +const stream_type_check_1 = require("../stream-type-check"); +const ChecksumStream_1 = require("./ChecksumStream"); +const createChecksumStream_browser_1 = require("./createChecksumStream.browser"); +function createChecksumStream(init) { + if (typeof ReadableStream === "function" && (0, stream_type_check_1.isReadableStream)(init.source)) { + return (0, createChecksumStream_browser_1.createChecksumStream)(init); + } + return new ChecksumStream_1.ChecksumStream(init); +} +exports.createChecksumStream = createChecksumStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js new file mode 100644 index 0000000..4c10847 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createBufferedReadable = void 0; +const node_stream_1 = require("node:stream"); +const ByteArrayCollector_1 = require("./ByteArrayCollector"); +const createBufferedReadableStream_1 = require("./createBufferedReadableStream"); +const stream_type_check_1 = require("./stream-type-check"); +function createBufferedReadable(upstream, size, logger) { + if ((0, stream_type_check_1.isReadableStream)(upstream)) { + return (0, createBufferedReadableStream_1.createBufferedReadableStream)(upstream, size, logger); + } + const downstream = new node_stream_1.Readable({ read() { } }); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = [ + "", + new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size)), + new ByteArrayCollector_1.ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), + ]; + let mode = -1; + upstream.on("data", (chunk) => { + const chunkMode = (0, createBufferedReadableStream_1.modeOf)(chunk, true); + if (mode !== chunkMode) { + if (mode >= 0) { + downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + downstream.push(chunk); + return; + } + const chunkSize = (0, createBufferedReadableStream_1.sizeOf)(chunk); + bytesSeen += chunkSize; + const bufferSize = (0, createBufferedReadableStream_1.sizeOf)(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + downstream.push(chunk); + } + else { + const newSize = (0, createBufferedReadableStream_1.merge)(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger === null || logger === void 0 ? void 0 : logger.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode)); + } + } + }); + upstream.on("end", () => { + if (mode !== -1) { + const remainder = (0, createBufferedReadableStream_1.flush)(buffers, mode); + if ((0, createBufferedReadableStream_1.sizeOf)(remainder) > 0) { + downstream.push(remainder); + } + } + downstream.push(null); + }); + return downstream; +} +exports.createBufferedReadable = createBufferedReadable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js new file mode 100644 index 0000000..2cd72aa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.modeOf = exports.sizeOf = exports.flush = exports.merge = exports.createBufferedReadable = exports.createBufferedReadableStream = void 0; +const ByteArrayCollector_1 = require("./ByteArrayCollector"); +function createBufferedReadableStream(upstream, size, logger) { + const reader = upstream.getReader(); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = ["", new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size))]; + let mode = -1; + const pull = async (controller) => { + const { value, done } = await reader.read(); + const chunk = value; + if (done) { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + controller.enqueue(remainder); + } + } + controller.close(); + } + else { + const chunkMode = modeOf(chunk, false); + if (mode !== chunkMode) { + if (mode >= 0) { + controller.enqueue(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + controller.enqueue(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + controller.enqueue(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger === null || logger === void 0 ? void 0 : logger.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + controller.enqueue(flush(buffers, mode)); + } + else { + await pull(controller); + } + } + } + }; + return new ReadableStream({ + pull, + }); +} +exports.createBufferedReadableStream = createBufferedReadableStream; +exports.createBufferedReadable = createBufferedReadableStream; +function merge(buffers, mode, chunk) { + switch (mode) { + case 0: + buffers[0] += chunk; + return sizeOf(buffers[0]); + case 1: + case 2: + buffers[mode].push(chunk); + return sizeOf(buffers[mode]); + } +} +exports.merge = merge; +function flush(buffers, mode) { + switch (mode) { + case 0: + const s = buffers[0]; + buffers[0] = ""; + return s; + case 1: + case 2: + return buffers[mode].flush(); + } + throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`); +} +exports.flush = flush; +function sizeOf(chunk) { + var _a, _b; + return (_b = (_a = chunk === null || chunk === void 0 ? void 0 : chunk.byteLength) !== null && _a !== void 0 ? _a : chunk === null || chunk === void 0 ? void 0 : chunk.length) !== null && _b !== void 0 ? _b : 0; +} +exports.sizeOf = sizeOf; +function modeOf(chunk, allowBuffer = true) { + if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) { + return 2; + } + if (chunk instanceof Uint8Array) { + return 1; + } + if (typeof chunk === "string") { + return 0; + } + return -1; +} +exports.modeOf = modeOf; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js new file mode 100644 index 0000000..d8e540c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAwsChunkedEncodingStream = void 0; +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + bodyLengthChecker !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await reader.read(); + if (done) { + controller.enqueue(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + controller.enqueue(`${checksumLocationName}:${checksum}\r\n`); + controller.enqueue(`\r\n`); + } + controller.close(); + } + else { + controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`); + } + }, + }); +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js new file mode 100644 index 0000000..4f3f9e7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAwsChunkedEncodingStream = void 0; +const stream_1 = require("stream"); +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js new file mode 100644 index 0000000..38512c1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.headStream = void 0; +async function headStream(stream, bytes) { + var _a; + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += (_a = value === null || value === void 0 ? void 0 : value.byteLength) !== null && _a !== void 0 ? _a : 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} +exports.headStream = headStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.js new file mode 100644 index 0000000..86103b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/headStream.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.headStream = void 0; +const stream_1 = require("stream"); +const headStream_browser_1 = require("./headStream.browser"); +const stream_type_check_1 = require("./stream-type-check"); +const headStream = (stream, bytes) => { + if ((0, stream_type_check_1.isReadableStream)(stream)) { + return (0, headStream_browser_1.headStream)(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +exports.headStream = headStream; +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + var _a; + this.buffers.push(chunk); + this.bytesBuffered += (_a = chunk.byteLength) !== null && _a !== void 0 ? _a : 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/index.js new file mode 100644 index 0000000..d42fe10 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/index.js @@ -0,0 +1,103 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Uint8ArrayBlobAdapter: () => Uint8ArrayBlobAdapter +}); +module.exports = __toCommonJS(src_exports); + +// src/blob/transforms.ts +var import_util_base64 = require("@smithy/util-base64"); +var import_util_utf8 = require("@smithy/util-utf8"); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, import_util_base64.toBase64)(payload); + } + return (0, import_util_utf8.toUtf8)(payload); +} +__name(transformToString, "transformToString"); +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate((0, import_util_base64.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter.mutate((0, import_util_utf8.fromUtf8)(str)); +} +__name(transformFromString, "transformFromString"); + +// src/blob/Uint8ArrayBlobAdapter.ts +var Uint8ArrayBlobAdapter = class _Uint8ArrayBlobAdapter extends Uint8Array { + static { + __name(this, "Uint8ArrayBlobAdapter"); + } + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source) { + Object.setPrototypeOf(source, _Uint8ArrayBlobAdapter.prototype); + return source; + } + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +}; + +// src/index.ts +__reExport(src_exports, require("./checksum/ChecksumStream"), module.exports); +__reExport(src_exports, require("./checksum/createChecksumStream"), module.exports); +__reExport(src_exports, require("././createBufferedReadable"), module.exports); +__reExport(src_exports, require("././getAwsChunkedEncodingStream"), module.exports); +__reExport(src_exports, require("././headStream"), module.exports); +__reExport(src_exports, require("././sdk-stream-mixin"), module.exports); +__reExport(src_exports, require("././splitStream"), module.exports); +__reExport(src_exports, require("././stream-type-check"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Uint8ArrayBlobAdapter, + ChecksumStream, + createChecksumStream, + createBufferedReadable, + getAwsChunkedEncodingStream, + headStream, + sdkStreamMixin, + splitStream, + isReadableStream, + isBlob +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js new file mode 100644 index 0000000..9309af1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sdkStreamMixin = void 0; +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const util_base64_1 = require("@smithy/util-base64"); +const util_hex_encoding_1 = require("@smithy/util-hex-encoding"); +const util_utf8_1 = require("@smithy/util-utf8"); +const stream_type_check_1 = require("./stream-type-check"); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!isBlobInstance(stream) && !(0, stream_type_check_1.isReadableStream)(stream)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, fetch_http_handler_1.streamCollector)(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return (0, util_base64_1.toBase64)(buf); + } + else if (encoding === "hex") { + return (0, util_hex_encoding_1.toHex)(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return (0, util_utf8_1.toUtf8)(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if ((0, stream_type_check_1.isReadableStream)(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js new file mode 100644 index 0000000..0817eac --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const stream_1 = require("stream"); +const sdk_stream_mixin_browser_1 = require("./sdk-stream-mixin.browser"); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + try { + return (0, sdk_stream_mixin_browser_1.sdkStreamMixin)(stream); + } + catch (e) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js new file mode 100644 index 0000000..eb890cc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitStream = void 0; +async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} +exports.splitStream = splitStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.js new file mode 100644 index 0000000..c55b628 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/splitStream.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitStream = void 0; +const stream_1 = require("stream"); +const splitStream_browser_1 = require("./splitStream.browser"); +const stream_type_check_1 = require("./stream-type-check"); +async function splitStream(stream) { + if ((0, stream_type_check_1.isReadableStream)(stream) || (0, stream_type_check_1.isBlob)(stream)) { + return (0, splitStream_browser_1.splitStream)(stream); + } + const stream1 = new stream_1.PassThrough(); + const stream2 = new stream_1.PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} +exports.splitStream = splitStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js new file mode 100644 index 0000000..a4a6138 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBlob = exports.isReadableStream = void 0; +const isReadableStream = (stream) => { + var _a; + return typeof ReadableStream === "function" && + (((_a = stream === null || stream === void 0 ? void 0 : stream.constructor) === null || _a === void 0 ? void 0 : _a.name) === ReadableStream.name || stream instanceof ReadableStream); +}; +exports.isReadableStream = isReadableStream; +const isBlob = (blob) => { + var _a; + return typeof Blob === "function" && (((_a = blob === null || blob === void 0 ? void 0 : blob.constructor) === null || _a === void 0 ? void 0 : _a.name) === Blob.name || blob instanceof Blob); +}; +exports.isBlob = isBlob; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js new file mode 100644 index 0000000..39af48f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js @@ -0,0 +1,31 @@ +export class ByteArrayCollector { + constructor(allocByteArray) { + this.allocByteArray = allocByteArray; + this.byteLength = 0; + this.byteArrays = []; + } + push(byteArray) { + this.byteArrays.push(byteArray); + this.byteLength += byteArray.byteLength; + } + flush() { + if (this.byteArrays.length === 1) { + const bytes = this.byteArrays[0]; + this.reset(); + return bytes; + } + const aggregation = this.allocByteArray(this.byteLength); + let cursor = 0; + for (let i = 0; i < this.byteArrays.length; ++i) { + const bytes = this.byteArrays[i]; + aggregation.set(bytes, cursor); + cursor += bytes.byteLength; + } + this.reset(); + return aggregation; + } + reset() { + this.byteArrays = []; + this.byteLength = 0; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js new file mode 100644 index 0000000..41746b1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js @@ -0,0 +1,18 @@ +import { transformFromString, transformToString } from "./transforms"; +export class Uint8ArrayBlobAdapter extends Uint8Array { + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + static mutate(source) { + Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype); + return source; + } + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/blob/transforms.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/blob/transforms.js new file mode 100644 index 0000000..0d1f74a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/blob/transforms.js @@ -0,0 +1,15 @@ +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +export function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return toBase64(payload); + } + return toUtf8(payload); +} +export function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate(fromBase64(str)); + } + return Uint8ArrayBlobAdapter.mutate(fromUtf8(str)); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js new file mode 100644 index 0000000..afcf529 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js @@ -0,0 +1,3 @@ +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +export class ChecksumStream extends ReadableStreamRef { +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js new file mode 100644 index 0000000..e623a09 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js @@ -0,0 +1,44 @@ +import { toBase64 } from "@smithy/util-base64"; +import { Duplex } from "stream"; +export class ChecksumStream extends Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder ?? toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js new file mode 100644 index 0000000..6a41c12 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js @@ -0,0 +1,35 @@ +import { toBase64 } from "@smithy/util-base64"; +import { isReadableStream } from "../stream-type-check"; +import { ChecksumStream } from "./ChecksumStream.browser"; +export const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + if (!isReadableStream(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`); + } + const encoder = base64Encoder ?? toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream.prototype); + return readable; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js new file mode 100644 index 0000000..d205b82 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js @@ -0,0 +1,9 @@ +import { isReadableStream } from "../stream-type-check"; +import { ChecksumStream } from "./ChecksumStream"; +import { createChecksumStream as createChecksumStreamWeb } from "./createChecksumStream.browser"; +export function createChecksumStream(init) { + if (typeof ReadableStream === "function" && isReadableStream(init.source)) { + return createChecksumStreamWeb(init); + } + return new ChecksumStream(init); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js new file mode 100644 index 0000000..0e3bbce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js @@ -0,0 +1,57 @@ +import { Readable } from "node:stream"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +import { createBufferedReadableStream, flush, merge, modeOf, sizeOf } from "./createBufferedReadableStream"; +import { isReadableStream } from "./stream-type-check"; +export function createBufferedReadable(upstream, size, logger) { + if (isReadableStream(upstream)) { + return createBufferedReadableStream(upstream, size, logger); + } + const downstream = new Readable({ read() { } }); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = [ + "", + new ByteArrayCollector((size) => new Uint8Array(size)), + new ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), + ]; + let mode = -1; + upstream.on("data", (chunk) => { + const chunkMode = modeOf(chunk, true); + if (mode !== chunkMode) { + if (mode >= 0) { + downstream.push(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + downstream.push(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + downstream.push(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + downstream.push(flush(buffers, mode)); + } + } + }); + upstream.on("end", () => { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + downstream.push(remainder); + } + } + downstream.push(null); + }); + return downstream; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js new file mode 100644 index 0000000..698a757 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js @@ -0,0 +1,95 @@ +import { ByteArrayCollector } from "./ByteArrayCollector"; +export function createBufferedReadableStream(upstream, size, logger) { + const reader = upstream.getReader(); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = ["", new ByteArrayCollector((size) => new Uint8Array(size))]; + let mode = -1; + const pull = async (controller) => { + const { value, done } = await reader.read(); + const chunk = value; + if (done) { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + controller.enqueue(remainder); + } + } + controller.close(); + } + else { + const chunkMode = modeOf(chunk, false); + if (mode !== chunkMode) { + if (mode >= 0) { + controller.enqueue(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + controller.enqueue(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + controller.enqueue(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + controller.enqueue(flush(buffers, mode)); + } + else { + await pull(controller); + } + } + } + }; + return new ReadableStream({ + pull, + }); +} +export const createBufferedReadable = createBufferedReadableStream; +export function merge(buffers, mode, chunk) { + switch (mode) { + case 0: + buffers[0] += chunk; + return sizeOf(buffers[0]); + case 1: + case 2: + buffers[mode].push(chunk); + return sizeOf(buffers[mode]); + } +} +export function flush(buffers, mode) { + switch (mode) { + case 0: + const s = buffers[0]; + buffers[0] = ""; + return s; + case 1: + case 2: + return buffers[mode].flush(); + } + throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`); +} +export function sizeOf(chunk) { + return chunk?.byteLength ?? chunk?.length ?? 0; +} +export function modeOf(chunk, allowBuffer = true) { + if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) { + return 2; + } + if (chunk instanceof Uint8Array) { + return 1; + } + if (typeof chunk === "string") { + return 0; + } + return -1; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js new file mode 100644 index 0000000..b5d5fa4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js @@ -0,0 +1,27 @@ +export const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + bodyLengthChecker !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await reader.read(); + if (done) { + controller.enqueue(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + controller.enqueue(`${checksumLocationName}:${checksum}\r\n`); + controller.enqueue(`\r\n`); + } + controller.close(); + } + else { + controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`); + } + }, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js new file mode 100644 index 0000000..7c55116 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js @@ -0,0 +1,26 @@ +import { Readable } from "stream"; +export const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/headStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/headStream.browser.js new file mode 100644 index 0000000..4e7f864 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/headStream.browser.js @@ -0,0 +1,31 @@ +export async function headStream(stream, bytes) { + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += value?.byteLength ?? 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/headStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/headStream.js new file mode 100644 index 0000000..27b28ea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/headStream.js @@ -0,0 +1,41 @@ +import { Writable } from "stream"; +import { headStream as headWebStream } from "./headStream.browser"; +import { isReadableStream } from "./stream-type-check"; +export const headStream = (stream, bytes) => { + if (isReadableStream(stream)) { + return headWebStream(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +class Collector extends Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + this.buffers.push(chunk); + this.bytesBuffered += chunk.byteLength ?? 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/index.js new file mode 100644 index 0000000..1b5b599 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/index.js @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js new file mode 100644 index 0000000..f21ff66 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js @@ -0,0 +1,64 @@ +import { streamCollector } from "@smithy/fetch-http-handler"; +import { toBase64 } from "@smithy/util-base64"; +import { toHex } from "@smithy/util-hex-encoding"; +import { toUtf8 } from "@smithy/util-utf8"; +import { isReadableStream } from "./stream-type-check"; +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +export const sdkStreamMixin = (stream) => { + if (!isBlobInstance(stream) && !isReadableStream(stream)) { + const name = stream?.__proto__?.constructor?.name || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await streamCollector(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return toBase64(buf); + } + else if (encoding === "hex") { + return toHex(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return toUtf8(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if (isReadableStream(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js new file mode 100644 index 0000000..4731333 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js @@ -0,0 +1,50 @@ +import { streamCollector } from "@smithy/node-http-handler"; +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +import { Readable } from "stream"; +import { sdkStreamMixin as sdkStreamMixinReadableStream } from "./sdk-stream-mixin.browser"; +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +export const sdkStreamMixin = (stream) => { + if (!(stream instanceof Readable)) { + try { + return sdkStreamMixinReadableStream(stream); + } + catch (e) { + const name = stream?.__proto__?.constructor?.name || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await streamCollector(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return fromArrayBuffer(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available."); + } + transformed = true; + return Readable.toWeb(stream); + }, + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js new file mode 100644 index 0000000..6f06b0e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js @@ -0,0 +1,7 @@ +export async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/splitStream.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/splitStream.js new file mode 100644 index 0000000..1a8c032 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/splitStream.js @@ -0,0 +1,13 @@ +import { PassThrough } from "stream"; +import { splitStream as splitWebStream } from "./splitStream.browser"; +import { isBlob, isReadableStream } from "./stream-type-check"; +export async function splitStream(stream) { + if (isReadableStream(stream) || isBlob(stream)) { + return splitWebStream(stream); + } + const stream1 = new PassThrough(); + const stream2 = new PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/stream-type-check.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/stream-type-check.js new file mode 100644 index 0000000..6ee93a3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-es/stream-type-check.js @@ -0,0 +1,5 @@ +export const isReadableStream = (stream) => typeof ReadableStream === "function" && + (stream?.constructor?.name === ReadableStream.name || stream instanceof ReadableStream); +export const isBlob = (blob) => { + return typeof Blob === "function" && (blob?.constructor?.name === Blob.name || blob instanceof Blob); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts new file mode 100644 index 0000000..a1bbd53 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts @@ -0,0 +1,13 @@ +/** + * Aggregates byteArrays on demand. + * @internal + */ +export declare class ByteArrayCollector { + readonly allocByteArray: (size: number) => Uint8Array; + byteLength: number; + private byteArrays; + constructor(allocByteArray: (size: number) => Uint8Array); + push(byteArray: Uint8Array): void; + flush(): Uint8Array; + private reset; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts new file mode 100644 index 0000000..c3d994d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts @@ -0,0 +1,21 @@ +/** + * Adapter for conversions of the native Uint8Array type. + * @public + */ +export declare class Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source: string, encoding?: string): Uint8ArrayBlobAdapter; + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source: Uint8Array): Uint8ArrayBlobAdapter; + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding?: string): string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts new file mode 100644 index 0000000..c54a18b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts @@ -0,0 +1,9 @@ +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +/** + * @internal + */ +export declare function transformToString(payload: Uint8Array, encoding?: string): string; +/** + * @internal + */ +export declare function transformFromString(str: string, encoding?: string): Uint8ArrayBlobAdapter; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts new file mode 100644 index 0000000..0c5fbd4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts @@ -0,0 +1,37 @@ +import { Checksum, Encoder } from "@smithy/types"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: ReadableStream; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +declare const ChecksumStream_base: any; +/** + * This stub exists so that the readable returned by createChecksumStream + * identifies as "ChecksumStream" in alignment with the Node.js + * implementation. + * + * @extends ReadableStream + */ +export declare class ChecksumStream extends ChecksumStream_base { +} +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts new file mode 100644 index 0000000..6893e55 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts @@ -0,0 +1,62 @@ +/// +/// +/// +import { Checksum, Encoder } from "@smithy/types"; +import { Duplex, Readable } from "stream"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: T; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +/** + * @internal + * + * Wrapper for throwing checksum errors for streams without + * buffering the stream. + * + */ +export declare class ChecksumStream extends Duplex { + private expectedChecksum; + private checksumSourceLocation; + private checksum; + private source?; + private base64Encoder; + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit); + /** + * @internal do not call this directly. + */ + _read(size: number): void; + /** + * @internal do not call this directly. + * + * When the upstream source flows data to this stream, + * calculate a step update of the checksum. + */ + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; + /** + * @internal do not call this directly. + * + * When the upstream source finishes, perform the checksum comparison. + */ + _final(callback: (err?: Error) => void): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts new file mode 100644 index 0000000..1874987 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts @@ -0,0 +1,15 @@ +import { ChecksumStreamInit } from "./ChecksumStream.browser"; +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +export type ReadableStreamType = ReadableStream; +/** + * @internal + * + * Creates a stream adapter for throwing checksum errors for streams without + * buffering the stream. + */ +export declare const createChecksumStream: ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit) => ReadableStreamType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts new file mode 100644 index 0000000..db09f80 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from "stream"; +import { ChecksumStreamInit } from "./ChecksumStream"; +import { ReadableStreamType } from "./createChecksumStream.browser"; +/** + * @internal + * + * Creates a stream mirroring the input stream's interface, but + * performs checksumming when reading to the end of the stream. + */ +export declare function createChecksumStream(init: ChecksumStreamInit): ReadableStreamType; +export declare function createChecksumStream(init: ChecksumStreamInit): Readable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts new file mode 100644 index 0000000..b173636 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts @@ -0,0 +1,13 @@ +/// +import type { Logger } from "@smithy/types"; +import { Readable } from "node:stream"; +/** + * @internal + * @param upstream - any Readable or ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param onBuffer - for emitting warnings when buffering occurs. + * @returns another stream of the same data and stream class, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadable(upstream: Readable, size: number, logger?: Logger): Readable; +export declare function createBufferedReadable(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts new file mode 100644 index 0000000..9f6cdbd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts @@ -0,0 +1,50 @@ +import type { Logger } from "@smithy/types"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +export type BufferStore = [string, ByteArrayCollector, ByteArrayCollector?]; +export type BufferUnion = string | Uint8Array; +export type Modes = 0 | 1 | 2; +/** + * @internal + * @param upstream - any ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param logger - for emitting warnings when buffering occurs. + * @returns another stream of the same data, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadableStream(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; +/** + * Replaces R/RS polymorphic implementation in environments with only ReadableStream. + * @internal + */ +export declare const createBufferedReadable: typeof createBufferedReadableStream; +/** + * @internal + * @param buffers + * @param mode + * @param chunk + * @returns the new buffer size after merging the chunk with its appropriate buffer. + */ +export declare function merge(buffers: BufferStore, mode: Modes, chunk: string | Uint8Array): number; +/** + * @internal + * @param buffers + * @param mode + * @returns the buffer matching the mode. + */ +export declare function flush(buffers: BufferStore, mode: Modes | -1): BufferUnion; +/** + * @internal + * @param chunk + * @returns size of the chunk in bytes or characters. + */ +export declare function sizeOf(chunk?: { + byteLength?: number; + length?: number; +}): number; +/** + * @internal + * @param chunk - from upstream Readable. + * @param allowBuffer - allow mode 2 (Buffer), otherwise Buffer will return mode 1. + * @returns type index of the chunk. + */ +export declare function modeOf(chunk: BufferUnion, allowBuffer?: boolean): Modes | -1; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts new file mode 100644 index 0000000..f767f77 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts @@ -0,0 +1,5 @@ +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts new file mode 100644 index 0000000..d3997d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts @@ -0,0 +1,7 @@ +/// +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts new file mode 100644 index 0000000..80ad267 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * @param stream + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare function headStream(stream: ReadableStream, bytes: number): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/headStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/headStream.d.ts new file mode 100644 index 0000000..7ab9714 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/headStream.d.ts @@ -0,0 +1,9 @@ +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be read. + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare const headStream: (stream: Readable | ReadableStream, bytes: number) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/index.d.ts new file mode 100644 index 0000000..1b5b599 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/index.d.ts @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts new file mode 100644 index 0000000..400c0b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts @@ -0,0 +1,7 @@ +import { SdkStream } from "@smithy/types"; +/** + * The stream handling utility functions for browsers and React Native + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts new file mode 100644 index 0000000..34fcb6f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts @@ -0,0 +1,8 @@ +import { SdkStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * The function that mixes in the utility functions to help consuming runtime-specific payload stream. + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream | SdkStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts new file mode 100644 index 0000000..506c23a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts @@ -0,0 +1,5 @@ +/** + * @param stream + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: ReadableStream | Blob): Promise<[ReadableStream, ReadableStream]>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts new file mode 100644 index 0000000..8a8a48c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts @@ -0,0 +1,9 @@ +/// +import type { Readable } from "stream"; +/** + * @internal + * @param stream - to be split. + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: Readable): Promise<[Readable, Readable]>; +export declare function splitStream(stream: ReadableStream): Promise<[ReadableStream, ReadableStream]>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts new file mode 100644 index 0000000..5607088 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +type ReadableStreamType = ReadableStream; +/** + * @internal + */ +export declare const isReadableStream: (stream: unknown) => stream is ReadableStreamType; +/** + * @internal + */ +export declare const isBlob: (blob: unknown) => blob is Blob; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts new file mode 100644 index 0000000..c309a6c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts @@ -0,0 +1,13 @@ +/** + * Aggregates byteArrays on demand. + * @internal + */ +export declare class ByteArrayCollector { + readonly allocByteArray: (size: number) => Uint8Array; + byteLength: number; + private byteArrays; + constructor(allocByteArray: (size: number) => Uint8Array); + push(byteArray: Uint8Array): void; + flush(): Uint8Array; + private reset; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts new file mode 100644 index 0000000..e0338a2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts @@ -0,0 +1,21 @@ +/** + * Adapter for conversions of the native Uint8Array type. + * @public + */ +export declare class Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source: string, encoding?: string): Uint8ArrayBlobAdapter; + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source: Uint8Array): Uint8ArrayBlobAdapter; + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding?: string): string; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts new file mode 100644 index 0000000..6e3ee0a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts @@ -0,0 +1,9 @@ +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +/** + * @internal + */ +export declare function transformToString(payload: Uint8Array, encoding?: string): string; +/** + * @internal + */ +export declare function transformFromString(str: string, encoding?: string): Uint8ArrayBlobAdapter; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts new file mode 100644 index 0000000..902a9b2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts @@ -0,0 +1,37 @@ +import { Checksum, Encoder } from "@smithy/types"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: ReadableStream; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +declare const ChecksumStream_base: any; +/** + * This stub exists so that the readable returned by createChecksumStream + * identifies as "ChecksumStream" in alignment with the Node.js + * implementation. + * + * @extends ReadableStream + */ +export declare class ChecksumStream extends ChecksumStream_base { +} +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts new file mode 100644 index 0000000..7151034 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts @@ -0,0 +1,60 @@ +/// +import { Checksum, Encoder } from "@smithy/types"; +import { Duplex, Readable } from "stream"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: T; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +/** + * @internal + * + * Wrapper for throwing checksum errors for streams without + * buffering the stream. + * + */ +export declare class ChecksumStream extends Duplex { + private expectedChecksum; + private checksumSourceLocation; + private checksum; + private source?; + private base64Encoder; + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit); + /** + * @internal do not call this directly. + */ + _read(size: number): void; + /** + * @internal do not call this directly. + * + * When the upstream source flows data to this stream, + * calculate a step update of the checksum. + */ + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; + /** + * @internal do not call this directly. + * + * When the upstream source finishes, perform the checksum comparison. + */ + _final(callback: (err?: Error) => void): Promise; +} diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts new file mode 100644 index 0000000..bd3c004 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts @@ -0,0 +1,15 @@ +import { ChecksumStreamInit } from "./ChecksumStream.browser"; +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +export type ReadableStreamType = ReadableStream; +/** + * @internal + * + * Creates a stream adapter for throwing checksum errors for streams without + * buffering the stream. + */ +export declare const createChecksumStream: ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit) => ReadableStreamType; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts new file mode 100644 index 0000000..dc36418 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from "stream"; +import { ChecksumStreamInit } from "./ChecksumStream"; +import { ReadableStreamType } from "./createChecksumStream.browser"; +/** + * @internal + * + * Creates a stream mirroring the input stream's interface, but + * performs checksumming when reading to the end of the stream. + */ +export declare function createChecksumStream(init: ChecksumStreamInit): ReadableStreamType; +export declare function createChecksumStream(init: ChecksumStreamInit): Readable; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts new file mode 100644 index 0000000..f62c741 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts @@ -0,0 +1,13 @@ +/// +import { Logger } from "@smithy/types"; +import { Readable } from "node:stream"; +/** + * @internal + * @param upstream - any Readable or ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param onBuffer - for emitting warnings when buffering occurs. + * @returns another stream of the same data and stream class, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadable(upstream: Readable, size: number, logger?: Logger): Readable; +export declare function createBufferedReadable(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts new file mode 100644 index 0000000..7b4effd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts @@ -0,0 +1,54 @@ +import { Logger } from "@smithy/types"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +export type BufferStore = [ + string, + ByteArrayCollector, + ByteArrayCollector? +]; +export type BufferUnion = string | Uint8Array; +export type Modes = 0 | 1 | 2; +/** + * @internal + * @param upstream - any ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param logger - for emitting warnings when buffering occurs. + * @returns another stream of the same data, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadableStream(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; +/** + * Replaces R/RS polymorphic implementation in environments with only ReadableStream. + * @internal + */ +export declare const createBufferedReadable: typeof createBufferedReadableStream; +/** + * @internal + * @param buffers + * @param mode + * @param chunk + * @returns the new buffer size after merging the chunk with its appropriate buffer. + */ +export declare function merge(buffers: BufferStore, mode: Modes, chunk: string | Uint8Array): number; +/** + * @internal + * @param buffers + * @param mode + * @returns the buffer matching the mode. + */ +export declare function flush(buffers: BufferStore, mode: Modes | -1): BufferUnion; +/** + * @internal + * @param chunk + * @returns size of the chunk in bytes or characters. + */ +export declare function sizeOf(chunk?: { + byteLength?: number; + length?: number; +}): number; +/** + * @internal + * @param chunk - from upstream Readable. + * @param allowBuffer - allow mode 2 (Buffer), otherwise Buffer will return mode 1. + * @returns type index of the chunk. + */ +export declare function modeOf(chunk: BufferUnion, allowBuffer?: boolean): Modes | -1; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts new file mode 100644 index 0000000..5979078 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts @@ -0,0 +1,5 @@ +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts new file mode 100644 index 0000000..a100381 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts @@ -0,0 +1,7 @@ +/// +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts new file mode 100644 index 0000000..d8654c3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * @param stream + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare function headStream(stream: ReadableStream, bytes: number): Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts new file mode 100644 index 0000000..7037715 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts @@ -0,0 +1,9 @@ +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be read. + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare const headStream: (stream: Readable | ReadableStream, bytes: number) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c7c4c3f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts new file mode 100644 index 0000000..99dea40 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts @@ -0,0 +1,7 @@ +import { SdkStream } from "@smithy/types"; +/** + * The stream handling utility functions for browsers and React Native + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts new file mode 100644 index 0000000..c05518a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts @@ -0,0 +1,8 @@ +import { SdkStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * The function that mixes in the utility functions to help consuming runtime-specific payload stream. + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream | SdkStream; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts new file mode 100644 index 0000000..25c8549 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @param stream + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: ReadableStream | Blob): Promise<[ + ReadableStream, + ReadableStream +]>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts new file mode 100644 index 0000000..61a7620 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts @@ -0,0 +1,15 @@ +/// +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be split. + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: Readable): Promise<[ + Readable, + Readable +]>; +export declare function splitStream(stream: ReadableStream): Promise<[ + ReadableStream, + ReadableStream +]>; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts new file mode 100644 index 0000000..11be8f1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +type ReadableStreamType = ReadableStream; +/** + * @internal + */ +export declare const isReadableStream: (stream: unknown) => stream is ReadableStreamType; +/** + * @internal + */ +export declare const isBlob: (blob: unknown) => blob is Blob; +export {}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/package.json new file mode 100644 index 0000000..769bfc1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-stream/package.json @@ -0,0 +1,98 @@ +{ + "name": "@smithy/util-stream", + "version": "4.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-stream", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run && yarn test:browser", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/checksum/ChecksumStream": "./dist-es/checksum/ChecksumStream.browser", + "./dist-es/checksum/createChecksumStream": "./dist-es/checksum/createChecksumStream.browser", + "./dist-es/createBufferedReadable": "./dist-es/createBufferedReadableStream", + "./dist-es/getAwsChunkedEncodingStream": "./dist-es/getAwsChunkedEncodingStream.browser", + "./dist-es/headStream": "./dist-es/headStream.browser", + "./dist-es/sdk-stream-mixin": "./dist-es/sdk-stream-mixin.browser", + "./dist-es/splitStream": "./dist-es/splitStream.browser" + }, + "react-native": { + "./dist-es/checksum/createChecksumStream": "./dist-es/checksum/createChecksumStream.browser", + "./dist-es/checksum/ChecksumStream": "./dist-es/checksum/ChecksumStream.browser", + "./dist-es/getAwsChunkedEncodingStream": "./dist-es/getAwsChunkedEncodingStream.browser", + "./dist-es/sdk-stream-mixin": "./dist-es/sdk-stream-mixin.browser", + "./dist-es/headStream": "./dist-es/headStream.browser", + "./dist-es/splitStream": "./dist-es/splitStream.browser", + "./dist-es/createBufferedReadable": "./dist-es/createBufferedReadableStream", + "./dist-cjs/checksum/createChecksumStream": "./dist-cjs/checksum/createChecksumStream.browser", + "./dist-cjs/checksum/ChecksumStream": "./dist-cjs/checksum/ChecksumStream.browser", + "./dist-cjs/getAwsChunkedEncodingStream": "./dist-cjs/getAwsChunkedEncodingStream.browser", + "./dist-cjs/sdk-stream-mixin": "./dist-cjs/sdk-stream-mixin.browser", + "./dist-cjs/headStream": "./dist-cjs/headStream.browser", + "./dist-cjs/splitStream": "./dist-cjs/splitStream.browser", + "./dist-cjs/createBufferedReadable": "./dist-cjs/createBufferedReadableStream" + }, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-stream", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-stream" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/README.md new file mode 100644 index 0000000..22e939a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/README.md @@ -0,0 +1,10 @@ +# @smithy/util-uri-escape + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-uri-escape/latest.svg)](https://www.npmjs.com/package/@smithy/util-uri-escape) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-uri-escape.svg)](https://www.npmjs.com/package/@smithy/util-uri-escape) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/index.js new file mode 100644 index 0000000..51001ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-cjs/index.js @@ -0,0 +1,43 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + escapeUri: () => escapeUri, + escapeUriPath: () => escapeUriPath +}); +module.exports = __toCommonJS(src_exports); + +// src/escape-uri.ts +var escapeUri = /* @__PURE__ */ __name((uri) => ( + // AWS percent-encodes some extra non-standard characters in a URI + encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode) +), "escapeUri"); +var hexEncode = /* @__PURE__ */ __name((c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`, "hexEncode"); + +// src/escape-uri-path.ts +var escapeUriPath = /* @__PURE__ */ __name((uri) => uri.split("/").map(escapeUri).join("/"), "escapeUriPath"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + escapeUri, + escapeUriPath +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js new file mode 100644 index 0000000..81b3fe3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js @@ -0,0 +1,2 @@ +import { escapeUri } from "./escape-uri"; +export const escapeUriPath = (uri) => uri.split("/").map(escapeUri).join("/"); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js new file mode 100644 index 0000000..8990be1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js @@ -0,0 +1,2 @@ +export const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode); +const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/index.js new file mode 100644 index 0000000..ed402e1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./escape-uri"; +export * from "./escape-uri-path"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts new file mode 100644 index 0000000..b547ff9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUriPath: (uri: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts new file mode 100644 index 0000000..3f14d2c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUri: (uri: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts new file mode 100644 index 0000000..1913825 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./escape-uri"; +/** + * @internal + */ +export * from "./escape-uri-path"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts new file mode 100644 index 0000000..a7e19ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUriPath: (uri: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts new file mode 100644 index 0000000..13cc372 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUri: (uri: string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ad719fe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./escape-uri"; +/** + * @internal + */ +export * from "./escape-uri-path"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/package.json new file mode 100644 index 0000000..4ca6fd9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-uri-escape/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/util-uri-escape", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-uri-escape", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-uri-escape", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-uri-escape" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..e33060d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "4.0.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/LICENSE b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/README.md b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/README.md new file mode 100644 index 0000000..17169a8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/README.md @@ -0,0 +1,10 @@ +# @smithy/util-waiter + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-waiter/latest.svg)](https://www.npmjs.com/package/@smithy/util-waiter) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-waiter.svg)](https://www.npmjs.com/package/@smithy/util-waiter) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/index.js new file mode 100644 index 0000000..c038e3b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/index.js @@ -0,0 +1,185 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + WaiterState: () => WaiterState, + checkExceptions: () => checkExceptions, + createWaiter: () => createWaiter, + waiterServiceDefaults: () => waiterServiceDefaults +}); +module.exports = __toCommonJS(src_exports); + +// src/utils/sleep.ts +var sleep = /* @__PURE__ */ __name((seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); +}, "sleep"); + +// src/waiter.ts +var waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120 +}; +var WaiterState = /* @__PURE__ */ ((WaiterState2) => { + WaiterState2["ABORTED"] = "ABORTED"; + WaiterState2["FAILURE"] = "FAILURE"; + WaiterState2["SUCCESS"] = "SUCCESS"; + WaiterState2["RETRY"] = "RETRY"; + WaiterState2["TIMEOUT"] = "TIMEOUT"; + return WaiterState2; +})(WaiterState || {}); +var checkExceptions = /* @__PURE__ */ __name((result) => { + if (result.state === "ABORTED" /* ABORTED */) { + const abortError = new Error( + `${JSON.stringify({ + ...result, + reason: "Request was aborted" + })}` + ); + abortError.name = "AbortError"; + throw abortError; + } else if (result.state === "TIMEOUT" /* TIMEOUT */) { + const timeoutError = new Error( + `${JSON.stringify({ + ...result, + reason: "Waiter has timed out" + })}` + ); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } else if (result.state !== "SUCCESS" /* SUCCESS */) { + throw new Error(`${JSON.stringify(result)}`); + } + return result; +}, "checkExceptions"); + +// src/poller.ts +var exponentialBackoffWithJitter = /* @__PURE__ */ __name((minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}, "exponentialBackoffWithJitter"); +var randomInRange = /* @__PURE__ */ __name((min, max) => min + Math.random() * (max - min), "randomInRange"); +var runPolling = /* @__PURE__ */ __name(async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + const observedResponses = {}; + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== "RETRY" /* RETRY */) { + return { state, reason, observedResponses }; + } + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1e3; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (abortController?.signal?.aborted || abortSignal?.aborted) { + const message = "AbortController signal aborted."; + observedResponses[message] |= 0; + observedResponses[message] += 1; + return { state: "ABORTED" /* ABORTED */, observedResponses }; + } + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1e3 > waitUntil) { + return { state: "TIMEOUT" /* TIMEOUT */, observedResponses }; + } + await sleep(delay); + const { state: state2, reason: reason2 } = await acceptorChecks(client, input); + if (reason2) { + const message = createMessageFromResponse(reason2); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state2 !== "RETRY" /* RETRY */) { + return { state: state2, reason: reason2, observedResponses }; + } + currentAttempt += 1; + } +}, "runPolling"); +var createMessageFromResponse = /* @__PURE__ */ __name((reason) => { + if (reason?.$responseBodyText) { + return `Deserialization error for body: ${reason.$responseBodyText}`; + } + if (reason?.$metadata?.httpStatusCode) { + if (reason.$response || reason.message) { + return `${reason.$response.statusCode ?? reason.$metadata.httpStatusCode ?? "Unknown"}: ${reason.message}`; + } + return `${reason.$metadata.httpStatusCode}: OK`; + } + return String(reason?.message ?? JSON.stringify(reason) ?? "Unknown"); +}, "createMessageFromResponse"); + +// src/utils/validate.ts +var validateWaiterOptions = /* @__PURE__ */ __name((options) => { + if (options.maxWaitTime <= 0) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } else if (options.minDelay <= 0) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } else if (options.maxDelay <= 0) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } else if (options.maxWaitTime <= options.minDelay) { + throw new Error( + `WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter` + ); + } else if (options.maxDelay < options.minDelay) { + throw new Error( + `WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter` + ); + } +}, "validateWaiterOptions"); + +// src/createWaiter.ts +var abortTimeout = /* @__PURE__ */ __name(async (abortSignal) => { + return new Promise((resolve) => { + const onAbort = /* @__PURE__ */ __name(() => resolve({ state: "ABORTED" /* ABORTED */ }), "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + abortSignal.addEventListener("abort", onAbort); + } else { + abortSignal.onabort = onAbort; + } + }); +}, "abortTimeout"); +var createWaiter = /* @__PURE__ */ __name(async (options, input, acceptorChecks) => { + const params = { + ...waiterServiceDefaults, + ...options + }; + validateWaiterOptions(params); + const exitConditions = [runPolling(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); + } + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}, "createWaiter"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + createWaiter, + waiterServiceDefaults, + WaiterState, + checkExceptions +}); + diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/poller.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/poller.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/poller.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/waiter.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/waiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-cjs/waiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/createWaiter.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/createWaiter.js new file mode 100644 index 0000000..59bfdb9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/createWaiter.js @@ -0,0 +1,29 @@ +import { runPolling } from "./poller"; +import { validateWaiterOptions } from "./utils"; +import { waiterServiceDefaults, WaiterState } from "./waiter"; +const abortTimeout = async (abortSignal) => { + return new Promise((resolve) => { + const onAbort = () => resolve({ state: WaiterState.ABORTED }); + if (typeof abortSignal.addEventListener === "function") { + abortSignal.addEventListener("abort", onAbort); + } + else { + abortSignal.onabort = onAbort; + } + }); +}; +export const createWaiter = async (options, input, acceptorChecks) => { + const params = { + ...waiterServiceDefaults, + ...options, + }; + validateWaiterOptions(params); + const exitConditions = [runPolling(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); + } + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/index.js new file mode 100644 index 0000000..d77f139 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/poller.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/poller.js new file mode 100644 index 0000000..d1a0ec0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/poller.js @@ -0,0 +1,59 @@ +import { sleep } from "./utils/sleep"; +import { WaiterState } from "./waiter"; +const exponentialBackoffWithJitter = (minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}; +const randomInRange = (min, max) => min + Math.random() * (max - min); +export const runPolling = async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + const observedResponses = {}; + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== WaiterState.RETRY) { + return { state, reason, observedResponses }; + } + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1000; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (abortController?.signal?.aborted || abortSignal?.aborted) { + const message = "AbortController signal aborted."; + observedResponses[message] |= 0; + observedResponses[message] += 1; + return { state: WaiterState.ABORTED, observedResponses }; + } + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1000 > waitUntil) { + return { state: WaiterState.TIMEOUT, observedResponses }; + } + await sleep(delay); + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== WaiterState.RETRY) { + return { state, reason, observedResponses }; + } + currentAttempt += 1; + } +}; +const createMessageFromResponse = (reason) => { + if (reason?.$responseBodyText) { + return `Deserialization error for body: ${reason.$responseBodyText}`; + } + if (reason?.$metadata?.httpStatusCode) { + if (reason.$response || reason.message) { + return `${reason.$response.statusCode ?? reason.$metadata.httpStatusCode ?? "Unknown"}: ${reason.message}`; + } + return `${reason.$metadata.httpStatusCode}: OK`; + } + return String(reason?.message ?? JSON.stringify(reason) ?? "Unknown"); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/index.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/index.js new file mode 100644 index 0000000..e15a156 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/index.js @@ -0,0 +1,2 @@ +export * from "./sleep"; +export * from "./validate"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js new file mode 100644 index 0000000..789205d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js @@ -0,0 +1,3 @@ +export const sleep = (seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1000)); +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/validate.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/validate.js new file mode 100644 index 0000000..e094ea7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/utils/validate.js @@ -0,0 +1,17 @@ +export const validateWaiterOptions = (options) => { + if (options.maxWaitTime <= 0) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } + else if (options.minDelay <= 0) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } + else if (options.maxDelay <= 0) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } + else if (options.maxWaitTime <= options.minDelay) { + throw new Error(`WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } + else if (options.maxDelay < options.minDelay) { + throw new Error(`WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/waiter.js b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/waiter.js new file mode 100644 index 0000000..158c46a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-es/waiter.js @@ -0,0 +1,34 @@ +export const waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120, +}; +export var WaiterState; +(function (WaiterState) { + WaiterState["ABORTED"] = "ABORTED"; + WaiterState["FAILURE"] = "FAILURE"; + WaiterState["SUCCESS"] = "SUCCESS"; + WaiterState["RETRY"] = "RETRY"; + WaiterState["TIMEOUT"] = "TIMEOUT"; +})(WaiterState || (WaiterState = {})); +export const checkExceptions = (result) => { + if (result.state === WaiterState.ABORTED) { + const abortError = new Error(`${JSON.stringify({ + ...result, + reason: "Request was aborted", + })}`); + abortError.name = "AbortError"; + throw abortError; + } + else if (result.state === WaiterState.TIMEOUT) { + const timeoutError = new Error(`${JSON.stringify({ + ...result, + reason: "Waiter has timed out", + })}`); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } + else if (result.state !== WaiterState.SUCCESS) { + throw new Error(`${JSON.stringify(result)}`); + } + return result; +}; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts new file mode 100644 index 0000000..1695802 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts @@ -0,0 +1,11 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Create a waiter promise that only resolves when: + * 1. Abort controller is signaled + * 2. Max wait time is reached + * 3. `acceptorChecks` succeeds, or fails + * Otherwise, it invokes `acceptorChecks` with exponential-backoff delay. + * + * @internal + */ +export declare const createWaiter: (options: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/index.d.ts new file mode 100644 index 0000000..d77f139 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/poller.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/poller.d.ts new file mode 100644 index 0000000..4008957 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/poller.d.ts @@ -0,0 +1,10 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Function that runs polling as part of waiters. This will make one inital attempt and then + * subsequent attempts with an increasing delay. + * @param params - options passed to the waiter. + * @param client - AWS SDK Client + * @param input - client input + * @param stateChecker - function that checks the acceptor states on each poll. + */ +export declare const runPolling: ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts new file mode 100644 index 0000000..f9b3242 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts @@ -0,0 +1,11 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Create a waiter promise that only resolves when: + * 1. Abort controller is signaled + * 2. Max wait time is reached + * 3. `acceptorChecks` succeeds, or fails + * Otherwise, it invokes `acceptorChecks` with exponential-backoff delay. + * + * @internal + */ +export declare const createWaiter: (options: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..be143d5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts new file mode 100644 index 0000000..8b33c94 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts @@ -0,0 +1,10 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Function that runs polling as part of waiters. This will make one inital attempt and then + * subsequent attempts with an increasing delay. + * @param params - options passed to the waiter. + * @param client - AWS SDK Client + * @param input - client input + * @param stateChecker - function that checks the acceptor states on each poll. + */ +export declare const runPolling: ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts new file mode 100644 index 0000000..974384c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./sleep"; +/** + * @internal + */ +export * from "./validate"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts new file mode 100644 index 0000000..f53553b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const sleep: (seconds: number) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts new file mode 100644 index 0000000..73d79b0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts @@ -0,0 +1,8 @@ +import { WaiterOptions } from "../waiter"; +/** + * @internal + * + * Validates that waiter options are passed correctly + * @param options - a waiter configuration object + */ +export declare const validateWaiterOptions: (options: WaiterOptions) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..f685ce4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1,49 @@ +import { WaiterConfiguration as WaiterConfiguration__ } from "@smithy/types"; +/** + * @internal + */ +export interface WaiterConfiguration extends WaiterConfiguration__ { +} +/** + * @internal + */ +export declare const waiterServiceDefaults: { + minDelay: number; + maxDelay: number; +}; +/** + * @internal + */ +export type WaiterOptions = WaiterConfiguration & Required, "minDelay" | "maxDelay">>; +/** + * @internal + */ +export declare enum WaiterState { + ABORTED = "ABORTED", + FAILURE = "FAILURE", + SUCCESS = "SUCCESS", + RETRY = "RETRY", + TIMEOUT = "TIMEOUT" +} +/** + * @internal + */ +export type WaiterResult = { + state: WaiterState; + /** + * (optional) Indicates a reason for why a waiter has reached its state. + */ + reason?: any; + /** + * Responses observed by the waiter during its polling, where the value + * is the count. + */ + observedResponses?: Record; +}; +/** + * @internal + * + * Handles and throws exceptions resulting from the waiterResult + * @param result - WaiterResult + */ +export declare const checkExceptions: (result: WaiterResult) => WaiterResult; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts new file mode 100644 index 0000000..b9a3205 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./sleep"; +/** + * @internal + */ +export * from "./validate"; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts new file mode 100644 index 0000000..e5d9f73 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const sleep: (seconds: number) => Promise; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts new file mode 100644 index 0000000..a847eee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts @@ -0,0 +1,8 @@ +import { WaiterOptions } from "../waiter"; +/** + * @internal + * + * Validates that waiter options are passed correctly + * @param options - a waiter configuration object + */ +export declare const validateWaiterOptions: (options: WaiterOptions) => void; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts new file mode 100644 index 0000000..e0c690f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts @@ -0,0 +1,49 @@ +import { WaiterConfiguration as WaiterConfiguration__ } from "@smithy/types"; +/** + * @internal + */ +export interface WaiterConfiguration extends WaiterConfiguration__ { +} +/** + * @internal + */ +export declare const waiterServiceDefaults: { + minDelay: number; + maxDelay: number; +}; +/** + * @internal + */ +export type WaiterOptions = WaiterConfiguration & Required, "minDelay" | "maxDelay">>; +/** + * @internal + */ +export declare enum WaiterState { + ABORTED = "ABORTED", + FAILURE = "FAILURE", + SUCCESS = "SUCCESS", + RETRY = "RETRY", + TIMEOUT = "TIMEOUT" +} +/** + * @internal + */ +export type WaiterResult = { + state: WaiterState; + /** + * (optional) Indicates a reason for why a waiter has reached its state. + */ + reason?: any; + /** + * Responses observed by the waiter during its polling, where the value + * is the count. + */ + observedResponses?: Record; +}; +/** + * @internal + * + * Handles and throws exceptions resulting from the waiterResult + * @param result - WaiterResult + */ +export declare const checkExceptions: (result: WaiterResult) => WaiterResult; diff --git a/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/package.json b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/package.json new file mode 100644 index 0000000..2706fd7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@smithy/util-waiter/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/util-waiter", + "version": "4.0.3", + "description": "Shared utilities for client waiters for the AWS SDK", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-waiter", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-waiter", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-waiter" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/@types/uuid/LICENSE b/amplify/functions/downloadDocument/node_modules/@types/uuid/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@types/uuid/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/amplify/functions/downloadDocument/node_modules/@types/uuid/README.md b/amplify/functions/downloadDocument/node_modules/@types/uuid/README.md new file mode 100644 index 0000000..4cd2a58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@types/uuid/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/uuid` + +# Summary +This package contains type definitions for uuid (https://github.com/uuidjs/uuid). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/uuid. + +### Additional Details + * Last updated: Thu, 25 Jan 2024 23:07:19 GMT + * Dependencies: none + +# Credits +These definitions were written by [Oliver Hoffmann](https://github.com/iamolivinius), [Felipe Ochoa](https://github.com/felipeochoa), [Chris Barth](https://github.com/cjbarth), [Linus Unnebäck](https://github.com/LinusU), and [Christoph Tavan](https://github.com/ctavan). diff --git a/amplify/functions/downloadDocument/node_modules/@types/uuid/index.d.mts b/amplify/functions/downloadDocument/node_modules/@types/uuid/index.d.mts new file mode 100644 index 0000000..47a6599 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@types/uuid/index.d.mts @@ -0,0 +1,12 @@ +import uuid from "./index.js"; +export import v1 = uuid.v1; +export import v3 = uuid.v3; +export import v4 = uuid.v4; +export import v5 = uuid.v5; +export import NIL = uuid.NIL; +export import version = uuid.version; +export import validate = uuid.validate; +export import stringify = uuid.stringify; +export import parse = uuid.parse; +export import V1Options = uuid.V1Options; +export import V4Options = uuid.V4Options; diff --git a/amplify/functions/downloadDocument/node_modules/@types/uuid/index.d.ts b/amplify/functions/downloadDocument/node_modules/@types/uuid/index.d.ts new file mode 100644 index 0000000..2f7d813 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@types/uuid/index.d.ts @@ -0,0 +1,86 @@ +// disable automatic export +export {}; + +// Uses ArrayLike to admit Uint8 and co. +type OutputBuffer = ArrayLike; +type InputBuffer = ArrayLike; + +interface RandomOptions { + /** `Array` of 16 random bytes (0-255) */ + random?: InputBuffer | undefined; +} +interface RngOptions { + /** Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) */ + rng?: (() => InputBuffer) | undefined; +} + +interface V1BaseOptions { + /** RFC "node" field as an `Array[6]` of byte values (per 4.1.6) */ + node?: InputBuffer | undefined; + /** RFC "clock sequence" as a `Number` between 0 - 0x3fff */ + clockseq?: number | undefined; + /** RFC "timestamp" field (`Number` of milliseconds, unix epoch) */ + msecs?: number | Date | undefined; + /** RFC "timestamp" field (`Number` of nanoseconds to add to msecs, should be 0-10,000) */ + nsecs?: number | undefined; +} +interface V1RandomOptions extends V1BaseOptions, RandomOptions {} +interface V1RngOptions extends V1BaseOptions, RngOptions {} + +export type V1Options = V1RandomOptions | V1RngOptions; +export type V4Options = RandomOptions | RngOptions; + +type v1String = (options?: V1Options) => string; +type v1Buffer = (options: V1Options | null | undefined, buffer: T, offset?: number) => T; +type v1 = v1Buffer & v1String; + +type v4String = (options?: V4Options) => string; +type v4Buffer = (options: V4Options | null | undefined, buffer: T, offset?: number) => T; +type v4 = v4Buffer & v4String; + +type v3String = (name: string | InputBuffer, namespace: string | InputBuffer) => string; +type v3Buffer = ( + name: string | InputBuffer, + namespace: string | InputBuffer, + buffer: T, + offset?: number, +) => T; +interface v3Static { + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L16 + DNS: string; + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L17 + URL: string; +} +type v3 = v3Buffer & v3String & v3Static; + +type v5String = (name: string | InputBuffer, namespace: string | InputBuffer) => string; +type v5Buffer = ( + name: string | InputBuffer, + namespace: string | InputBuffer, + buffer: T, + offset?: number, +) => T; +interface v5Static { + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L16 + DNS: string; + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L17 + URL: string; +} +type v5 = v5Buffer & v5String & v5Static; + +type NIL = string; + +type parse = (uuid: string) => Uint8Array; +type stringify = (buffer: InputBuffer, offset?: number) => string; +type validate = (uuid: string) => boolean; +type version = (uuid: string) => number; + +export const NIL: NIL; +export const parse: parse; +export const stringify: stringify; +export const v1: v1; +export const v3: v3; +export const v4: v4; +export const v5: v5; +export const validate: validate; +export const version: version; diff --git a/amplify/functions/downloadDocument/node_modules/@types/uuid/package.json b/amplify/functions/downloadDocument/node_modules/@types/uuid/package.json new file mode 100644 index 0000000..09959ce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/@types/uuid/package.json @@ -0,0 +1,54 @@ +{ + "name": "@types/uuid", + "version": "9.0.8", + "description": "TypeScript definitions for uuid", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/uuid", + "license": "MIT", + "contributors": [ + { + "name": "Oliver Hoffmann", + "githubUsername": "iamolivinius", + "url": "https://github.com/iamolivinius" + }, + { + "name": "Felipe Ochoa", + "githubUsername": "felipeochoa", + "url": "https://github.com/felipeochoa" + }, + { + "name": "Chris Barth", + "githubUsername": "cjbarth", + "url": "https://github.com/cjbarth" + }, + { + "name": "Linus Unnebäck", + "githubUsername": "LinusU", + "url": "https://github.com/LinusU" + }, + { + "name": "Christoph Tavan", + "githubUsername": "ctavan", + "url": "https://github.com/ctavan" + } + ], + "main": "", + "types": "index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "types": { + "import": "./index.d.mts", + "default": "./index.d.ts" + } + } + }, + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/uuid" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "ee6ba7ad17fbbead7a508faf213a9ad0f49c12929e8c6b0f05fb35129bc72d61", + "typeScriptVersion": "4.6" +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/bowser/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/bowser/CHANGELOG.md new file mode 100644 index 0000000..260a03d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/CHANGELOG.md @@ -0,0 +1,218 @@ +# Bowser Changelog + +### 2.11.0 (Sep 12, 2020) +- [ADD] Added support for aliases in `Parser#is` method (#437) +- [ADD] Added more typings (#438, #427) +- [ADD] Added support for MIUI Browserr (#436) + +### 2.10.0 (Jul 9, 2020) +- [FIX] Fix for Firefox detection on iOS 13 [#415] +- [FIX] Fixes for typings.d.ts [#409] +- [FIX] Updated development dependencies + +### 2.9.0 (Jan 28, 2020) +- [ADD] Export more methods and constants via .d.ts [#388], [#390] + +### 2.8.1 (Dec 26, 2019) +- [FIX] Reverted [#382] as it broke build + +### 2.8.0 (Dec 26, 2019) +- [ADD] Add polyfills for Array.find & Object.assign [#383] +- [ADD] Export constants with types.d.ts [#382] +- [FIX] Add support for WeChat on Windows [#381] +- [FIX] Fix detection of Firefox on iPad [#379] +- [FIX] Add detection of Electron [#375] +- [FIX] Updated dev-dependencies + +### 2.7.0 (Oct 2, 2019) +- [FIX] Add support for QQ Browser [#362] +- [FIX] Add support for GSA [#364] +- [FIX] Updated dependencies + +### 2.6.0 (Sep 6, 2019) +- [ADD] Define "module" export in package.json [#354] +- [FIX] Fix Tablet PC detection [#334] + +### 2.5.4 (Sep 2, 2019) +- [FIX] Exclude docs from the npm package [#349] + +### 2.5.3 (Aug 4, 2019) +- [FIX] Add MacOS names support [#338] +- [FIX] Point typings.d.ts from package.json [#341] +- [FIX] Upgrade dependencies + +### 2.5.2 (July 17, 2019) +- [FIX] Fixes the bug undefined method because of failed build (#335) + +### 2.5.1 (July 17, 2019) +- [FIX] Fixes the bug with a custom Error class (#335) +- [FIX] Fixes the settings for Babel to reduce the bundle size (#259) + +### 2.5.0 (July 16, 2019) +- [ADD] Add constant output so that users can quickly get all types (#325) +- [FIX] Add support for Roku OS (#332) +- [FIX] Update devDependencies +- [FIX] Fix docs, README and added funding information + +### 2.4.0 (May 3, 2019) +- [FIX] Update regexp for generic browsers (#310) +- [FIX] Fix issues with module.exports (#318) +- [FIX] Update devDependencies (#316, #321, #322) +- [FIX] Fix docs (#320) + +### 2.3.0 (April 14, 2019) +- [ADD] Add support for Blink-based MS Edge (#311) +- [ADD] Add more types for TS (#289) +- [FIX] Update dev-dependencies +- [FIX] Update docs + +### 2.2.1 (April 12, 2019) +- [ADD] Add an alias for Samsung Internet +- [FIX] Fix browser name detection for browsers without an alias (#313) + +### 2.2.0 (April 7, 2019) +- [ADD] Add short aliases for browser names (#295) +- [FIX] Fix Yandex Browser version detection (#308) + +### 2.1.2 (March 6, 2019) +- [FIX] Fix buggy `getFirstMatch` reference + +### 2.1.1 (March 6, 2019) +- [ADD] Add detection of PlayStation 4 (#291) +- [ADD] Deploy docs on GH Pages (#293) +- [FIX] Fix files extensions for importing (#294) +- [FIX] Fix docs (#295) + +### 2.1.0 (January 24, 2019) +- [ADD] Add new `Parser.getEngineName()` method (#288) +- [ADD] Add detection of ChromeOS (#287) +- [FIX] Fix README + +### 2.0.0 (January 19, 2019) +- [ADD] Support a non strict equality in `Parser.satisfies()` (#275) +- [ADD] Add Android versions names (#276) +- [ADD] Add a typings file (#277) +- [ADD] Added support for Googlebot recognition (#278) +- [FIX] Update building tools, avoid security issues + +### 2.0.0-beta.3 (September 15, 2018) +- [FIX] Fix Chrome Mobile detection (#253) +- [FIX] Use built bowser for CI (#252) +- [FIX] Update babel-plugin-add-module-exports (#251) + +### 2.0.0-beta.2 (September 9, 2018) +- [FIX] Fix failing comparing version through `Parser.satisfies` (#243) +- [FIX] Fix travis testing, include eslint into CI testing +- [FIX] Add support for Maxthon desktop browser (#246) +- [FIX] Add support for Swing browser (#248) +- [DOCS] Regenerate docs + +### 2.0.0-beta.1 (August 18, 2018) +- [ADD] Add loose version comparison to `Parser.compareVersion()` and `Parser.satisfies()` +- [CHORE] Add CONTRIBUTING.md +- [DOCS] Regenerate docs + +### 2.0.0-alpha.4 (August 2, 2018) +- [DOCS] Fix usage docs (#238) +- [CHANGE] Make `./es5.js` the main file of the package (#239) + +### 2.0.0-alpha.3 (July 22, 2018) +- [CHANGE] Rename split and rename `compiled.js` to `es5.js` and `bundled.js` (#231, #236, #237) +- [ADD] Add `Parser.some` (#235) + +### 2.0.0-alpha.2 (July 17, 2018) +- [CHANGE] Make `src/bowser` main file instead of the bundled one +- [CHANGE] Move the bundled file to the root of the package to make it possible to `require('bowser/compiled')` (#231) +- [REMOVE] Remove `typings.d.ts` before stable release (#232) +- [FIX] Improve Nexus devices detection (#233) + +### 2.0.0-alpha.1 (July 9, 2018) +- [ADD] `Bowser.getParser()` +- [ADD] `Bowser.parse` +- [ADD] `Parser` class which describes parsing process +- [CHANGE] Change bowser's returning object +- [REMOVE] Remove bower support + +### 1.9.4 (June 28, 2018) +- [FIX] Fix NAVER Whale browser detection (#220) +- [FIX] Fix MZ Browser browser detection (#219) +- [FIX] Fix Firefox Focus browser detection (#191) +- [FIX] Fix webOS browser detection (#186) + +### 1.9.3 (March 12, 2018) +- [FIX] Fix `typings.d.ts` — add `ipad`, `iphone`, `ipod` flags to the interface + +### 1.9.2 (February 5, 2018) +- [FIX] Fix `typings.d.ts` — add `osname` flag to the interface + +### 1.9.1 (December 22, 2017) +- [FIX] Fix `typings.d.ts` — add `chromium` flag to the interface + +### 1.9.0 (December 20, 2017) +- [ADD] Add a public method `.detect()` (#205) +- [DOCS] Fix description of `chromium` flag in docs (#206) + +### 1.8.1 (October 7, 2017) +- [FIX] Fix detection of MS Edge on Android and iOS (#201) + +### 1.8.0 (October 7, 2017) +- [ADD] Add `osname` into result object (#200) + +### 1.7.3 (August 30, 2017) +- [FIX] Fix detection of Chrome on Android 8 OPR6 (#193) + +### 1.7.2 (August 17, 2017) +- [FIX] Fix typings.d.ts according to #185 + +### 1.7.1 (July 13, 2017) +- [ADD] Fix detecting of Tablet PC as tablet (#183) + +### 1.7.0 (May 18, 2017) +- [ADD] Add OS version support for Windows and macOS (#178) + +### 1.6.0 (December 5, 2016) +- [ADD] Add some tests for Windows devices (#89) +- [ADD] Add `root` to initialization process (#170) +- [FIX] Upgrade .travis.yml config + +### 1.5.0 (October 31, 2016) +- [ADD] Throw an error when `minVersion` map has not a string as a version and fix readme (#165) +- [FIX] Fix truly detection of Windows Phones (#167) + +### 1.4.6 (September 19, 2016) +- [FIX] Fix mobile Opera's version detection on Android +- [FIX] Fix typescript typings — add `mobile` and `tablet` flags +- [DOC] Fix description of `bowser.check` + +### 1.4.5 (August 30, 2016) + +- [FIX] Add support of Samsung Internet for Android +- [FIX] Fix case when `navigator.userAgent` is `undefined` +- [DOC] Add information about `strictMode` in `check` function +- [DOC] Consistent use of `bowser` variable in the README + +### 1.4.4 (August 10, 2016) + +- [FIX] Fix AMD `define` call — pass name to the function + +### 1.4.3 (July 27, 2016) + +- [FIX] Fix error `Object doesn't support this property or method` on IE8 + +### 1.4.2 (July 26, 2016) + +- [FIX] Fix missing `isUnsupportedBrowser` in typings description +- [DOC] Fix `check`'s declaration in README + +### 1.4.1 (July 7, 2016) + +- [FIX] Fix `strictMode` logic for `isUnsupportedBrowser` + +### 1.4.0 (June 28, 2016) + +- [FEATURE] Add `bowser.compareVersions` method +- [FEATURE] Add `bowser.isUnsupportedBrowser` method +- [FEATURE] Add `bowser.check` method +- [DOC] Changelog started +- [DOC] Add API section to README +- [FIX] Fix detection of browser type (A/C/X) for Chromium diff --git a/amplify/functions/downloadDocument/node_modules/bowser/LICENSE b/amplify/functions/downloadDocument/node_modules/bowser/LICENSE new file mode 100644 index 0000000..94085f0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/LICENSE @@ -0,0 +1,39 @@ +Copyright 2015, Dustin Diaz (the "Original Author") +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +Distributions of all or part of the Software intended to be used +by the recipients as they would use the unmodified Software, +containing modifications that substantially alter, remove, or +disable functionality of the Software, outside of the documented +configuration mechanisms provided by the Software, shall be +modified such that the Original Author's bug reporting email +addresses and urls are either replaced with the contact information +of the parties responsible for the changes, or removed entirely. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + + +Except where noted, this license applies to any and all software +programs and associated documentation files created by the +Original Author, when distributed with the Software. diff --git a/amplify/functions/downloadDocument/node_modules/bowser/README.md b/amplify/functions/downloadDocument/node_modules/bowser/README.md new file mode 100644 index 0000000..8f5f915 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/README.md @@ -0,0 +1,179 @@ +## Bowser +A small, fast and rich-API browser/platform/engine detector for both browser and node. +- **Small.** Use plain ES5-version which is ~4.8kB gzipped. +- **Optimized.** Use only those parsers you need — it doesn't do useless work. +- **Multi-platform.** It's browser- and node-ready, so you can use it in any environment. + +Don't hesitate to support the project on Github or [OpenCollective](https://opencollective.com/bowser) if you like it ❤️ Also, contributors are always welcome! + +[![Financial Contributors on Open Collective](https://opencollective.com/bowser/all/badge.svg?label=financial+contributors)](https://opencollective.com/bowser) [![Build Status](https://travis-ci.org/lancedikson/bowser.svg?branch=master)](https://travis-ci.org/lancedikson/bowser/) [![Greenkeeper badge](https://badges.greenkeeper.io/lancedikson/bowser.svg)](https://greenkeeper.io/) [![Coverage Status](https://coveralls.io/repos/github/lancedikson/bowser/badge.svg?branch=master)](https://coveralls.io/github/lancedikson/bowser?branch=master) ![Downloads](https://img.shields.io/npm/dm/bowser) + +# Contents +- [Overview](#overview) +- [Use cases](#use-cases) +- [Advanced usage](#advanced-usage) +- [How can I help?](#contributing) + +# Overview + +The library is made to help to detect what browser your user has and gives you a convenient API to filter the users somehow depending on their browsers. Check it out on this page: https://bowser-js.github.io/bowser-online/. + +### ⚠️ Version 2.0 breaking changes ⚠️ + +Version 2.0 has drastically changed the API. All available methods are on the [docs page](https://lancedikson.github.io/bowser/docs). + +_For legacy code, check out the [1.x](https://github.com/lancedikson/bowser/tree/v1.x) branch and install it through `npm install bowser@1.9.4`._ + +# Use cases + +First of all, require the library. This is a UMD Module, so it will work for AMD, TypeScript, ES6, and CommonJS module systems. + +```javascript +const Bowser = require("bowser"); // CommonJS + +import * as Bowser from "bowser"; // TypeScript + +import Bowser from "bowser"; // ES6 (and TypeScript with --esModuleInterop enabled) +``` + +By default, the exported version is the *ES5 transpiled version*, which **do not** include any polyfills. + +In case you don't use your own `babel-polyfill` you may need to have pre-built bundle with all needed polyfills. +So, for you it's suitable to require bowser like this: `require('bowser/bundled')`. +As the result, you get a ES5 version of bowser with `babel-polyfill` bundled together. + +You may need to use the source files, so they will be available in the package as well. + +## Browser props detection + +Often we need to pick users' browser properties such as the name, the version, the rendering engine and so on. Here is an example how to do it with Bowser: + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); + +console.log(`The current browser name is "${browser.getBrowserName()}"`); +// The current browser name is "Internet Explorer" +``` + +or + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); +console.log(browser.getBrowser()); + +// outputs +{ + name: "Internet Explorer" + version: "11.0" +} +``` + +or + +```javascript +console.log(Bowser.parse(window.navigator.userAgent)); + +// outputs +{ + browser: { + name: "Internet Explorer" + version: "11.0" + }, + os: { + name: "Windows" + version: "NT 6.3" + versionName: "8.1" + }, + platform: { + type: "desktop" + }, + engine: { + name: "Trident" + version: "7.0" + } +} +``` + + +## Filtering browsers + +You could want to filter some particular browsers to provide any special support for them or make any workarounds. +It could look like this: + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); +const isValidBrowser = browser.satisfies({ + // declare browsers per OS + windows: { + "internet explorer": ">10", + }, + macos: { + safari: ">10.1" + }, + + // per platform (mobile, desktop or tablet) + mobile: { + safari: '>=9', + 'android browser': '>3.10' + }, + + // or in general + chrome: "~20.1.1432", + firefox: ">31", + opera: ">=22", + + // also supports equality operator + chrome: "=20.1.1432", // will match particular build only + + // and loose-equality operator + chrome: "~20", // will match any 20.* sub-version + chrome: "~20.1" // will match any 20.1.* sub-version (20.1.19 as well as 20.1.12.42-alpha.1) +}); +``` + +Settings for any particular OS or platform has more priority and redefines settings of standalone browsers. +Thus, you can define OS or platform specific rules and they will have more priority in the end. + +More of API and possibilities you will find in the `docs` folder. + +### Browser names for `.satisfies()` + +By default you are supposed to use the full browser name for `.satisfies`. +But, there's a short way to define a browser using short aliases. The full +list of aliases can be found in [the file](src/constants.js). + +## Similar Projects +* [Kong](https://github.com/BigBadBleuCheese/Kong) - A C# port of Bowser. + +## Contributors + +### Code Contributors + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. + + +### Financial Contributors + +Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/bowser/contribute)] + +#### Individuals + + + +#### Organizations + +Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/bowser/contribute)] + + + + + + + + + + + + +## License +Licensed as MIT. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details. diff --git a/amplify/functions/downloadDocument/node_modules/bowser/bundled.js b/amplify/functions/downloadDocument/node_modules/bowser/bundled.js new file mode 100644 index 0000000..066ac40 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/bundled.js @@ -0,0 +1 @@ +!function(t,n){"object"==typeof exports&&"object"==typeof module?module.exports=n():"function"==typeof define&&define.amd?define([],n):"object"==typeof exports?exports.bowser=n():t.bowser=n()}(this,(function(){return function(t){var n={};function e(r){if(n[r])return n[r].exports;var i=n[r]={i:r,l:!1,exports:{}};return t[r].call(i.exports,i,i.exports,e),i.l=!0,i.exports}return e.m=t,e.c=n,e.d=function(t,n,r){e.o(t,n)||Object.defineProperty(t,n,{enumerable:!0,get:r})},e.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},e.t=function(t,n){if(1&n&&(t=e(t)),8&n)return t;if(4&n&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(e.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&n&&"string"!=typeof t)for(var i in t)e.d(r,i,function(n){return t[n]}.bind(null,i));return r},e.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(n,"a",n),n},e.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},e.p="",e(e.s=129)}([function(t,n,e){var r=e(1),i=e(7),o=e(14),u=e(11),a=e(19),c=function(t,n,e){var s,f,l,h,d=t&c.F,p=t&c.G,v=t&c.S,g=t&c.P,y=t&c.B,m=p?r:v?r[n]||(r[n]={}):(r[n]||{}).prototype,b=p?i:i[n]||(i[n]={}),S=b.prototype||(b.prototype={});for(s in p&&(e=n),e)l=((f=!d&&m&&void 0!==m[s])?m:e)[s],h=y&&f?a(l,r):g&&"function"==typeof l?a(Function.call,l):l,m&&u(m,s,l,t&c.U),b[s]!=l&&o(b,s,h),g&&S[s]!=l&&(S[s]=l)};r.core=i,c.F=1,c.G=2,c.S=4,c.P=8,c.B=16,c.W=32,c.U=64,c.R=128,t.exports=c},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,e){var r=e(4);t.exports=function(t){if(!r(t))throw TypeError(t+" is not an object!");return t}},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,e){var r=e(50)("wks"),i=e(31),o=e(1).Symbol,u="function"==typeof o;(t.exports=function(t){return r[t]||(r[t]=u&&o[t]||(u?o:i)("Symbol."+t))}).store=r},function(t,n,e){var r=e(21),i=Math.min;t.exports=function(t){return t>0?i(r(t),9007199254740991):0}},function(t,n){var e=t.exports={version:"2.6.9"};"number"==typeof __e&&(__e=e)},function(t,n,e){t.exports=!e(2)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(3),i=e(96),o=e(28),u=Object.defineProperty;n.f=e(8)?Object.defineProperty:function(t,n,e){if(r(t),n=o(n,!0),r(e),i)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n,e){var r=e(26);t.exports=function(t){return Object(r(t))}},function(t,n,e){var r=e(1),i=e(14),o=e(13),u=e(31)("src"),a=e(134),c=(""+a).split("toString");e(7).inspectSource=function(t){return a.call(t)},(t.exports=function(t,n,e,a){var s="function"==typeof e;s&&(o(e,"name")||i(e,"name",n)),t[n]!==e&&(s&&(o(e,u)||i(e,u,t[n]?""+t[n]:c.join(String(n)))),t===r?t[n]=e:a?t[n]?t[n]=e:i(t,n,e):(delete t[n],i(t,n,e)))})(Function.prototype,"toString",(function(){return"function"==typeof this&&this[u]||a.call(this)}))},function(t,n,e){var r=e(0),i=e(2),o=e(26),u=/"/g,a=function(t,n,e,r){var i=String(o(t)),a="<"+n;return""!==e&&(a+=" "+e+'="'+String(r).replace(u,""")+'"'),a+">"+i+""};t.exports=function(t,n){var e={};e[t]=n(a),r(r.P+r.F*i((function(){var n=""[t]('"');return n!==n.toLowerCase()||n.split('"').length>3})),"String",e)}},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}},function(t,n,e){var r=e(9),i=e(30);t.exports=e(8)?function(t,n,e){return r.f(t,n,i(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){var r=e(46),i=e(26);t.exports=function(t){return r(i(t))}},function(t,n,e){"use strict";var r=e(2);t.exports=function(t,n){return!!t&&r((function(){n?t.call(null,(function(){}),1):t.call(null)}))}},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r=e(18),i=function(){function t(){}return t.getFirstMatch=function(t,n){var e=n.match(t);return e&&e.length>0&&e[1]||""},t.getSecondMatch=function(t,n){var e=n.match(t);return e&&e.length>1&&e[2]||""},t.matchAndReturnConst=function(t,n,e){if(t.test(n))return e},t.getWindowsVersionName=function(t){switch(t){case"NT":return"NT";case"XP":return"XP";case"NT 5.0":return"2000";case"NT 5.1":return"XP";case"NT 5.2":return"2003";case"NT 6.0":return"Vista";case"NT 6.1":return"7";case"NT 6.2":return"8";case"NT 6.3":return"8.1";case"NT 10.0":return"10";default:return}},t.getMacOSVersionName=function(t){var n=t.split(".").splice(0,2).map((function(t){return parseInt(t,10)||0}));if(n.push(0),10===n[0])switch(n[1]){case 5:return"Leopard";case 6:return"Snow Leopard";case 7:return"Lion";case 8:return"Mountain Lion";case 9:return"Mavericks";case 10:return"Yosemite";case 11:return"El Capitan";case 12:return"Sierra";case 13:return"High Sierra";case 14:return"Mojave";case 15:return"Catalina";default:return}},t.getAndroidVersionName=function(t){var n=t.split(".").splice(0,2).map((function(t){return parseInt(t,10)||0}));if(n.push(0),!(1===n[0]&&n[1]<5))return 1===n[0]&&n[1]<6?"Cupcake":1===n[0]&&n[1]>=6?"Donut":2===n[0]&&n[1]<2?"Eclair":2===n[0]&&2===n[1]?"Froyo":2===n[0]&&n[1]>2?"Gingerbread":3===n[0]?"Honeycomb":4===n[0]&&n[1]<1?"Ice Cream Sandwich":4===n[0]&&n[1]<4?"Jelly Bean":4===n[0]&&n[1]>=4?"KitKat":5===n[0]?"Lollipop":6===n[0]?"Marshmallow":7===n[0]?"Nougat":8===n[0]?"Oreo":9===n[0]?"Pie":void 0},t.getVersionPrecision=function(t){return t.split(".").length},t.compareVersions=function(n,e,r){void 0===r&&(r=!1);var i=t.getVersionPrecision(n),o=t.getVersionPrecision(e),u=Math.max(i,o),a=0,c=t.map([n,e],(function(n){var e=u-t.getVersionPrecision(n),r=n+new Array(e+1).join(".0");return t.map(r.split("."),(function(t){return new Array(20-t.length).join("0")+t})).reverse()}));for(r&&(a=u-Math.min(i,o)),u-=1;u>=a;){if(c[0][u]>c[1][u])return 1;if(c[0][u]===c[1][u]){if(u===a)return 0;u-=1}else if(c[0][u]1?i-1:0),u=1;u0?r:e)(t)}},function(t,n,e){var r=e(47),i=e(30),o=e(15),u=e(28),a=e(13),c=e(96),s=Object.getOwnPropertyDescriptor;n.f=e(8)?s:function(t,n){if(t=o(t),n=u(n,!0),c)try{return s(t,n)}catch(t){}if(a(t,n))return i(!r.f.call(t,n),t[n])}},function(t,n,e){var r=e(0),i=e(7),o=e(2);t.exports=function(t,n){var e=(i.Object||{})[t]||Object[t],u={};u[t]=n(e),r(r.S+r.F*o((function(){e(1)})),"Object",u)}},function(t,n,e){var r=e(19),i=e(46),o=e(10),u=e(6),a=e(112);t.exports=function(t,n){var e=1==t,c=2==t,s=3==t,f=4==t,l=6==t,h=5==t||l,d=n||a;return function(n,a,p){for(var v,g,y=o(n),m=i(y),b=r(a,p,3),S=u(m.length),w=0,_=e?d(n,S):c?d(n,0):void 0;S>w;w++)if((h||w in m)&&(g=b(v=m[w],w,y),t))if(e)_[w]=g;else if(g)switch(t){case 3:return!0;case 5:return v;case 6:return w;case 2:_.push(v)}else if(f)return!1;return l?-1:s||f?f:_}}},function(t,n){var e={}.toString;t.exports=function(t){return e.call(t).slice(8,-1)}},function(t,n){t.exports=function(t){if(null==t)throw TypeError("Can't call method on "+t);return t}},function(t,n,e){"use strict";if(e(8)){var r=e(32),i=e(1),o=e(2),u=e(0),a=e(61),c=e(86),s=e(19),f=e(44),l=e(30),h=e(14),d=e(45),p=e(21),v=e(6),g=e(123),y=e(34),m=e(28),b=e(13),S=e(48),w=e(4),_=e(10),M=e(78),x=e(35),P=e(37),O=e(36).f,F=e(80),A=e(31),E=e(5),N=e(24),R=e(51),k=e(49),T=e(82),I=e(42),j=e(54),L=e(43),B=e(81),C=e(114),W=e(9),V=e(22),G=W.f,D=V.f,U=i.RangeError,z=i.TypeError,q=i.Uint8Array,K=Array.prototype,Y=c.ArrayBuffer,Q=c.DataView,H=N(0),J=N(2),X=N(3),Z=N(4),$=N(5),tt=N(6),nt=R(!0),et=R(!1),rt=T.values,it=T.keys,ot=T.entries,ut=K.lastIndexOf,at=K.reduce,ct=K.reduceRight,st=K.join,ft=K.sort,lt=K.slice,ht=K.toString,dt=K.toLocaleString,pt=E("iterator"),vt=E("toStringTag"),gt=A("typed_constructor"),yt=A("def_constructor"),mt=a.CONSTR,bt=a.TYPED,St=a.VIEW,wt=N(1,(function(t,n){return Ot(k(t,t[yt]),n)})),_t=o((function(){return 1===new q(new Uint16Array([1]).buffer)[0]})),Mt=!!q&&!!q.prototype.set&&o((function(){new q(1).set({})})),xt=function(t,n){var e=p(t);if(e<0||e%n)throw U("Wrong offset!");return e},Pt=function(t){if(w(t)&&bt in t)return t;throw z(t+" is not a typed array!")},Ot=function(t,n){if(!(w(t)&> in t))throw z("It is not a typed array constructor!");return new t(n)},Ft=function(t,n){return At(k(t,t[yt]),n)},At=function(t,n){for(var e=0,r=n.length,i=Ot(t,r);r>e;)i[e]=n[e++];return i},Et=function(t,n,e){G(t,n,{get:function(){return this._d[e]}})},Nt=function(t){var n,e,r,i,o,u,a=_(t),c=arguments.length,f=c>1?arguments[1]:void 0,l=void 0!==f,h=F(a);if(null!=h&&!M(h)){for(u=h.call(a),r=[],n=0;!(o=u.next()).done;n++)r.push(o.value);a=r}for(l&&c>2&&(f=s(f,arguments[2],2)),n=0,e=v(a.length),i=Ot(this,e);e>n;n++)i[n]=l?f(a[n],n):a[n];return i},Rt=function(){for(var t=0,n=arguments.length,e=Ot(this,n);n>t;)e[t]=arguments[t++];return e},kt=!!q&&o((function(){dt.call(new q(1))})),Tt=function(){return dt.apply(kt?lt.call(Pt(this)):Pt(this),arguments)},It={copyWithin:function(t,n){return C.call(Pt(this),t,n,arguments.length>2?arguments[2]:void 0)},every:function(t){return Z(Pt(this),t,arguments.length>1?arguments[1]:void 0)},fill:function(t){return B.apply(Pt(this),arguments)},filter:function(t){return Ft(this,J(Pt(this),t,arguments.length>1?arguments[1]:void 0))},find:function(t){return $(Pt(this),t,arguments.length>1?arguments[1]:void 0)},findIndex:function(t){return tt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},forEach:function(t){H(Pt(this),t,arguments.length>1?arguments[1]:void 0)},indexOf:function(t){return et(Pt(this),t,arguments.length>1?arguments[1]:void 0)},includes:function(t){return nt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},join:function(t){return st.apply(Pt(this),arguments)},lastIndexOf:function(t){return ut.apply(Pt(this),arguments)},map:function(t){return wt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},reduce:function(t){return at.apply(Pt(this),arguments)},reduceRight:function(t){return ct.apply(Pt(this),arguments)},reverse:function(){for(var t,n=Pt(this).length,e=Math.floor(n/2),r=0;r1?arguments[1]:void 0)},sort:function(t){return ft.call(Pt(this),t)},subarray:function(t,n){var e=Pt(this),r=e.length,i=y(t,r);return new(k(e,e[yt]))(e.buffer,e.byteOffset+i*e.BYTES_PER_ELEMENT,v((void 0===n?r:y(n,r))-i))}},jt=function(t,n){return Ft(this,lt.call(Pt(this),t,n))},Lt=function(t){Pt(this);var n=xt(arguments[1],1),e=this.length,r=_(t),i=v(r.length),o=0;if(i+n>e)throw U("Wrong length!");for(;o255?255:255&r),i.v[d](e*n+i.o,r,_t)}(this,e,t)},enumerable:!0})};b?(p=e((function(t,e,r,i){f(t,p,s,"_d");var o,u,a,c,l=0,d=0;if(w(e)){if(!(e instanceof Y||"ArrayBuffer"==(c=S(e))||"SharedArrayBuffer"==c))return bt in e?At(p,e):Nt.call(p,e);o=e,d=xt(r,n);var y=e.byteLength;if(void 0===i){if(y%n)throw U("Wrong length!");if((u=y-d)<0)throw U("Wrong length!")}else if((u=v(i)*n)+d>y)throw U("Wrong length!");a=u/n}else a=g(e),o=new Y(u=a*n);for(h(t,"_d",{b:o,o:d,l:u,e:a,v:new Q(o)});ldocument.F=Object<\/script>"),t.close(),c=t.F;r--;)delete c.prototype[o[r]];return c()};t.exports=Object.create||function(t,n){var e;return null!==t?(a.prototype=r(t),e=new a,a.prototype=null,e[u]=t):e=c(),void 0===n?e:i(e,n)}},function(t,n,e){var r=e(98),i=e(65).concat("length","prototype");n.f=Object.getOwnPropertyNames||function(t){return r(t,i)}},function(t,n,e){var r=e(13),i=e(10),o=e(64)("IE_PROTO"),u=Object.prototype;t.exports=Object.getPrototypeOf||function(t){return t=i(t),r(t,o)?t[o]:"function"==typeof t.constructor&&t instanceof t.constructor?t.constructor.prototype:t instanceof Object?u:null}},function(t,n,e){var r=e(5)("unscopables"),i=Array.prototype;null==i[r]&&e(14)(i,r,{}),t.exports=function(t){i[r][t]=!0}},function(t,n,e){var r=e(4);t.exports=function(t,n){if(!r(t)||t._t!==n)throw TypeError("Incompatible receiver, "+n+" required!");return t}},function(t,n,e){var r=e(9).f,i=e(13),o=e(5)("toStringTag");t.exports=function(t,n,e){t&&!i(t=e?t:t.prototype,o)&&r(t,o,{configurable:!0,value:n})}},function(t,n,e){var r=e(0),i=e(26),o=e(2),u=e(68),a="["+u+"]",c=RegExp("^"+a+a+"*"),s=RegExp(a+a+"*$"),f=function(t,n,e){var i={},a=o((function(){return!!u[t]()||"​…"!="​…"[t]()})),c=i[t]=a?n(l):u[t];e&&(i[e]=c),r(r.P+r.F*a,"String",i)},l=f.trim=function(t,n){return t=String(i(t)),1&n&&(t=t.replace(c,"")),2&n&&(t=t.replace(s,"")),t};t.exports=f},function(t,n){t.exports={}},function(t,n,e){"use strict";var r=e(1),i=e(9),o=e(8),u=e(5)("species");t.exports=function(t){var n=r[t];o&&n&&!n[u]&&i.f(n,u,{configurable:!0,get:function(){return this}})}},function(t,n){t.exports=function(t,n,e,r){if(!(t instanceof n)||void 0!==r&&r in t)throw TypeError(e+": incorrect invocation!");return t}},function(t,n,e){var r=e(11);t.exports=function(t,n,e){for(var i in n)r(t,i,n[i],e);return t}},function(t,n,e){var r=e(25);t.exports=Object("z").propertyIsEnumerable(0)?Object:function(t){return"String"==r(t)?t.split(""):Object(t)}},function(t,n){n.f={}.propertyIsEnumerable},function(t,n,e){var r=e(25),i=e(5)("toStringTag"),o="Arguments"==r(function(){return arguments}());t.exports=function(t){var n,e,u;return void 0===t?"Undefined":null===t?"Null":"string"==typeof(e=function(t,n){try{return t[n]}catch(t){}}(n=Object(t),i))?e:o?r(n):"Object"==(u=r(n))&&"function"==typeof n.callee?"Arguments":u}},function(t,n,e){var r=e(3),i=e(20),o=e(5)("species");t.exports=function(t,n){var e,u=r(t).constructor;return void 0===u||null==(e=r(u)[o])?n:i(e)}},function(t,n,e){var r=e(7),i=e(1),o=i["__core-js_shared__"]||(i["__core-js_shared__"]={});(t.exports=function(t,n){return o[t]||(o[t]=void 0!==n?n:{})})("versions",[]).push({version:r.version,mode:e(32)?"pure":"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})},function(t,n,e){var r=e(15),i=e(6),o=e(34);t.exports=function(t){return function(n,e,u){var a,c=r(n),s=i(c.length),f=o(u,s);if(t&&e!=e){for(;s>f;)if((a=c[f++])!=a)return!0}else for(;s>f;f++)if((t||f in c)&&c[f]===e)return t||f||0;return!t&&-1}}},function(t,n){n.f=Object.getOwnPropertySymbols},function(t,n,e){var r=e(25);t.exports=Array.isArray||function(t){return"Array"==r(t)}},function(t,n,e){var r=e(5)("iterator"),i=!1;try{var o=[7][r]();o.return=function(){i=!0},Array.from(o,(function(){throw 2}))}catch(t){}t.exports=function(t,n){if(!n&&!i)return!1;var e=!1;try{var o=[7],u=o[r]();u.next=function(){return{done:e=!0}},o[r]=function(){return u},t(o)}catch(t){}return e}},function(t,n,e){"use strict";var r=e(3);t.exports=function(){var t=r(this),n="";return t.global&&(n+="g"),t.ignoreCase&&(n+="i"),t.multiline&&(n+="m"),t.unicode&&(n+="u"),t.sticky&&(n+="y"),n}},function(t,n,e){"use strict";var r=e(48),i=RegExp.prototype.exec;t.exports=function(t,n){var e=t.exec;if("function"==typeof e){var o=e.call(t,n);if("object"!=typeof o)throw new TypeError("RegExp exec method returned something other than an Object or null");return o}if("RegExp"!==r(t))throw new TypeError("RegExp#exec called on incompatible receiver");return i.call(t,n)}},function(t,n,e){"use strict";e(116);var r=e(11),i=e(14),o=e(2),u=e(26),a=e(5),c=e(83),s=a("species"),f=!o((function(){var t=/./;return t.exec=function(){var t=[];return t.groups={a:"7"},t},"7"!=="".replace(t,"$")})),l=function(){var t=/(?:)/,n=t.exec;t.exec=function(){return n.apply(this,arguments)};var e="ab".split(t);return 2===e.length&&"a"===e[0]&&"b"===e[1]}();t.exports=function(t,n,e){var h=a(t),d=!o((function(){var n={};return n[h]=function(){return 7},7!=""[t](n)})),p=d?!o((function(){var n=!1,e=/a/;return e.exec=function(){return n=!0,null},"split"===t&&(e.constructor={},e.constructor[s]=function(){return e}),e[h](""),!n})):void 0;if(!d||!p||"replace"===t&&!f||"split"===t&&!l){var v=/./[h],g=e(u,h,""[t],(function(t,n,e,r,i){return n.exec===c?d&&!i?{done:!0,value:v.call(n,e,r)}:{done:!0,value:t.call(e,n,r)}:{done:!1}})),y=g[0],m=g[1];r(String.prototype,t,y),i(RegExp.prototype,h,2==n?function(t,n){return m.call(t,this,n)}:function(t){return m.call(t,this)})}}},function(t,n,e){var r=e(19),i=e(111),o=e(78),u=e(3),a=e(6),c=e(80),s={},f={};(n=t.exports=function(t,n,e,l,h){var d,p,v,g,y=h?function(){return t}:c(t),m=r(e,l,n?2:1),b=0;if("function"!=typeof y)throw TypeError(t+" is not iterable!");if(o(y)){for(d=a(t.length);d>b;b++)if((g=n?m(u(p=t[b])[0],p[1]):m(t[b]))===s||g===f)return g}else for(v=y.call(t);!(p=v.next()).done;)if((g=i(v,m,p.value,n))===s||g===f)return g}).BREAK=s,n.RETURN=f},function(t,n,e){var r=e(1).navigator;t.exports=r&&r.userAgent||""},function(t,n,e){"use strict";var r=e(1),i=e(0),o=e(11),u=e(45),a=e(29),c=e(58),s=e(44),f=e(4),l=e(2),h=e(54),d=e(40),p=e(69);t.exports=function(t,n,e,v,g,y){var m=r[t],b=m,S=g?"set":"add",w=b&&b.prototype,_={},M=function(t){var n=w[t];o(w,t,"delete"==t?function(t){return!(y&&!f(t))&&n.call(this,0===t?0:t)}:"has"==t?function(t){return!(y&&!f(t))&&n.call(this,0===t?0:t)}:"get"==t?function(t){return y&&!f(t)?void 0:n.call(this,0===t?0:t)}:"add"==t?function(t){return n.call(this,0===t?0:t),this}:function(t,e){return n.call(this,0===t?0:t,e),this})};if("function"==typeof b&&(y||w.forEach&&!l((function(){(new b).entries().next()})))){var x=new b,P=x[S](y?{}:-0,1)!=x,O=l((function(){x.has(1)})),F=h((function(t){new b(t)})),A=!y&&l((function(){for(var t=new b,n=5;n--;)t[S](n,n);return!t.has(-0)}));F||((b=n((function(n,e){s(n,b,t);var r=p(new m,n,b);return null!=e&&c(e,g,r[S],r),r}))).prototype=w,w.constructor=b),(O||A)&&(M("delete"),M("has"),g&&M("get")),(A||P)&&M(S),y&&w.clear&&delete w.clear}else b=v.getConstructor(n,t,g,S),u(b.prototype,e),a.NEED=!0;return d(b,t),_[t]=b,i(i.G+i.W+i.F*(b!=m),_),y||v.setStrong(b,t,g),b}},function(t,n,e){for(var r,i=e(1),o=e(14),u=e(31),a=u("typed_array"),c=u("view"),s=!(!i.ArrayBuffer||!i.DataView),f=s,l=0,h="Int8Array,Uint8Array,Uint8ClampedArray,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array".split(",");l<9;)(r=i[h[l++]])?(o(r.prototype,a,!0),o(r.prototype,c,!0)):f=!1;t.exports={ABV:s,CONSTR:f,TYPED:a,VIEW:c}},function(t,n,e){var r=e(4),i=e(1).document,o=r(i)&&r(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,e){n.f=e(5)},function(t,n,e){var r=e(50)("keys"),i=e(31);t.exports=function(t){return r[t]||(r[t]=i(t))}},function(t,n){t.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},function(t,n,e){var r=e(1).document;t.exports=r&&r.documentElement},function(t,n,e){var r=e(4),i=e(3),o=function(t,n){if(i(t),!r(n)&&null!==n)throw TypeError(n+": can't set as prototype!")};t.exports={set:Object.setPrototypeOf||("__proto__"in{}?function(t,n,r){try{(r=e(19)(Function.call,e(22).f(Object.prototype,"__proto__").set,2))(t,[]),n=!(t instanceof Array)}catch(t){n=!0}return function(t,e){return o(t,e),n?t.__proto__=e:r(t,e),t}}({},!1):void 0),check:o}},function(t,n){t.exports="\t\n\v\f\r   ᠎              \u2028\u2029\ufeff"},function(t,n,e){var r=e(4),i=e(67).set;t.exports=function(t,n,e){var o,u=n.constructor;return u!==e&&"function"==typeof u&&(o=u.prototype)!==e.prototype&&r(o)&&i&&i(t,o),t}},function(t,n,e){"use strict";var r=e(21),i=e(26);t.exports=function(t){var n=String(i(this)),e="",o=r(t);if(o<0||o==1/0)throw RangeError("Count can't be negative");for(;o>0;(o>>>=1)&&(n+=n))1&o&&(e+=n);return e}},function(t,n){t.exports=Math.sign||function(t){return 0==(t=+t)||t!=t?t:t<0?-1:1}},function(t,n){var e=Math.expm1;t.exports=!e||e(10)>22025.465794806718||e(10)<22025.465794806718||-2e-17!=e(-2e-17)?function(t){return 0==(t=+t)?t:t>-1e-6&&t<1e-6?t+t*t/2:Math.exp(t)-1}:e},function(t,n,e){var r=e(21),i=e(26);t.exports=function(t){return function(n,e){var o,u,a=String(i(n)),c=r(e),s=a.length;return c<0||c>=s?t?"":void 0:(o=a.charCodeAt(c))<55296||o>56319||c+1===s||(u=a.charCodeAt(c+1))<56320||u>57343?t?a.charAt(c):o:t?a.slice(c,c+2):u-56320+(o-55296<<10)+65536}}},function(t,n,e){"use strict";var r=e(32),i=e(0),o=e(11),u=e(14),a=e(42),c=e(110),s=e(40),f=e(37),l=e(5)("iterator"),h=!([].keys&&"next"in[].keys()),d=function(){return this};t.exports=function(t,n,e,p,v,g,y){c(e,n,p);var m,b,S,w=function(t){if(!h&&t in P)return P[t];switch(t){case"keys":case"values":return function(){return new e(this,t)}}return function(){return new e(this,t)}},_=n+" Iterator",M="values"==v,x=!1,P=t.prototype,O=P[l]||P["@@iterator"]||v&&P[v],F=O||w(v),A=v?M?w("entries"):F:void 0,E="Array"==n&&P.entries||O;if(E&&(S=f(E.call(new t)))!==Object.prototype&&S.next&&(s(S,_,!0),r||"function"==typeof S[l]||u(S,l,d)),M&&O&&"values"!==O.name&&(x=!0,F=function(){return O.call(this)}),r&&!y||!h&&!x&&P[l]||u(P,l,F),a[n]=F,a[_]=d,v)if(m={values:M?F:w("values"),keys:g?F:w("keys"),entries:A},y)for(b in m)b in P||o(P,b,m[b]);else i(i.P+i.F*(h||x),n,m);return m}},function(t,n,e){var r=e(76),i=e(26);t.exports=function(t,n,e){if(r(n))throw TypeError("String#"+e+" doesn't accept regex!");return String(i(t))}},function(t,n,e){var r=e(4),i=e(25),o=e(5)("match");t.exports=function(t){var n;return r(t)&&(void 0!==(n=t[o])?!!n:"RegExp"==i(t))}},function(t,n,e){var r=e(5)("match");t.exports=function(t){var n=/./;try{"/./"[t](n)}catch(e){try{return n[r]=!1,!"/./"[t](n)}catch(t){}}return!0}},function(t,n,e){var r=e(42),i=e(5)("iterator"),o=Array.prototype;t.exports=function(t){return void 0!==t&&(r.Array===t||o[i]===t)}},function(t,n,e){"use strict";var r=e(9),i=e(30);t.exports=function(t,n,e){n in t?r.f(t,n,i(0,e)):t[n]=e}},function(t,n,e){var r=e(48),i=e(5)("iterator"),o=e(42);t.exports=e(7).getIteratorMethod=function(t){if(null!=t)return t[i]||t["@@iterator"]||o[r(t)]}},function(t,n,e){"use strict";var r=e(10),i=e(34),o=e(6);t.exports=function(t){for(var n=r(this),e=o(n.length),u=arguments.length,a=i(u>1?arguments[1]:void 0,e),c=u>2?arguments[2]:void 0,s=void 0===c?e:i(c,e);s>a;)n[a++]=t;return n}},function(t,n,e){"use strict";var r=e(38),i=e(115),o=e(42),u=e(15);t.exports=e(74)(Array,"Array",(function(t,n){this._t=u(t),this._i=0,this._k=n}),(function(){var t=this._t,n=this._k,e=this._i++;return!t||e>=t.length?(this._t=void 0,i(1)):i(0,"keys"==n?e:"values"==n?t[e]:[e,t[e]])}),"values"),o.Arguments=o.Array,r("keys"),r("values"),r("entries")},function(t,n,e){"use strict";var r,i,o=e(55),u=RegExp.prototype.exec,a=String.prototype.replace,c=u,s=(r=/a/,i=/b*/g,u.call(r,"a"),u.call(i,"a"),0!==r.lastIndex||0!==i.lastIndex),f=void 0!==/()??/.exec("")[1];(s||f)&&(c=function(t){var n,e,r,i,c=this;return f&&(e=new RegExp("^"+c.source+"$(?!\\s)",o.call(c))),s&&(n=c.lastIndex),r=u.call(c,t),s&&r&&(c.lastIndex=c.global?r.index+r[0].length:n),f&&r&&r.length>1&&a.call(r[0],e,(function(){for(i=1;ie;)n.push(arguments[e++]);return y[++g]=function(){a("function"==typeof t?t:Function(t),n)},r(g),g},d=function(t){delete y[t]},"process"==e(25)(l)?r=function(t){l.nextTick(u(m,t,1))}:v&&v.now?r=function(t){v.now(u(m,t,1))}:p?(o=(i=new p).port2,i.port1.onmessage=b,r=u(o.postMessage,o,1)):f.addEventListener&&"function"==typeof postMessage&&!f.importScripts?(r=function(t){f.postMessage(t+"","*")},f.addEventListener("message",b,!1)):r="onreadystatechange"in s("script")?function(t){c.appendChild(s("script")).onreadystatechange=function(){c.removeChild(this),m.call(t)}}:function(t){setTimeout(u(m,t,1),0)}),t.exports={set:h,clear:d}},function(t,n,e){"use strict";var r=e(1),i=e(8),o=e(32),u=e(61),a=e(14),c=e(45),s=e(2),f=e(44),l=e(21),h=e(6),d=e(123),p=e(36).f,v=e(9).f,g=e(81),y=e(40),m="prototype",b="Wrong index!",S=r.ArrayBuffer,w=r.DataView,_=r.Math,M=r.RangeError,x=r.Infinity,P=S,O=_.abs,F=_.pow,A=_.floor,E=_.log,N=_.LN2,R=i?"_b":"buffer",k=i?"_l":"byteLength",T=i?"_o":"byteOffset";function I(t,n,e){var r,i,o,u=new Array(e),a=8*e-n-1,c=(1<>1,f=23===n?F(2,-24)-F(2,-77):0,l=0,h=t<0||0===t&&1/t<0?1:0;for((t=O(t))!=t||t===x?(i=t!=t?1:0,r=c):(r=A(E(t)/N),t*(o=F(2,-r))<1&&(r--,o*=2),(t+=r+s>=1?f/o:f*F(2,1-s))*o>=2&&(r++,o/=2),r+s>=c?(i=0,r=c):r+s>=1?(i=(t*o-1)*F(2,n),r+=s):(i=t*F(2,s-1)*F(2,n),r=0));n>=8;u[l++]=255&i,i/=256,n-=8);for(r=r<0;u[l++]=255&r,r/=256,a-=8);return u[--l]|=128*h,u}function j(t,n,e){var r,i=8*e-n-1,o=(1<>1,a=i-7,c=e-1,s=t[c--],f=127&s;for(s>>=7;a>0;f=256*f+t[c],c--,a-=8);for(r=f&(1<<-a)-1,f>>=-a,a+=n;a>0;r=256*r+t[c],c--,a-=8);if(0===f)f=1-u;else{if(f===o)return r?NaN:s?-x:x;r+=F(2,n),f-=u}return(s?-1:1)*r*F(2,f-n)}function L(t){return t[3]<<24|t[2]<<16|t[1]<<8|t[0]}function B(t){return[255&t]}function C(t){return[255&t,t>>8&255]}function W(t){return[255&t,t>>8&255,t>>16&255,t>>24&255]}function V(t){return I(t,52,8)}function G(t){return I(t,23,4)}function D(t,n,e){v(t[m],n,{get:function(){return this[e]}})}function U(t,n,e,r){var i=d(+e);if(i+n>t[k])throw M(b);var o=t[R]._b,u=i+t[T],a=o.slice(u,u+n);return r?a:a.reverse()}function z(t,n,e,r,i,o){var u=d(+e);if(u+n>t[k])throw M(b);for(var a=t[R]._b,c=u+t[T],s=r(+i),f=0;fQ;)(q=Y[Q++])in S||a(S,q,P[q]);o||(K.constructor=S)}var H=new w(new S(2)),J=w[m].setInt8;H.setInt8(0,2147483648),H.setInt8(1,2147483649),!H.getInt8(0)&&H.getInt8(1)||c(w[m],{setInt8:function(t,n){J.call(this,t,n<<24>>24)},setUint8:function(t,n){J.call(this,t,n<<24>>24)}},!0)}else S=function(t){f(this,S,"ArrayBuffer");var n=d(t);this._b=g.call(new Array(n),0),this[k]=n},w=function(t,n,e){f(this,w,"DataView"),f(t,S,"DataView");var r=t[k],i=l(n);if(i<0||i>r)throw M("Wrong offset!");if(i+(e=void 0===e?r-i:h(e))>r)throw M("Wrong length!");this[R]=t,this[T]=i,this[k]=e},i&&(D(S,"byteLength","_l"),D(w,"buffer","_b"),D(w,"byteLength","_l"),D(w,"byteOffset","_o")),c(w[m],{getInt8:function(t){return U(this,1,t)[0]<<24>>24},getUint8:function(t){return U(this,1,t)[0]},getInt16:function(t){var n=U(this,2,t,arguments[1]);return(n[1]<<8|n[0])<<16>>16},getUint16:function(t){var n=U(this,2,t,arguments[1]);return n[1]<<8|n[0]},getInt32:function(t){return L(U(this,4,t,arguments[1]))},getUint32:function(t){return L(U(this,4,t,arguments[1]))>>>0},getFloat32:function(t){return j(U(this,4,t,arguments[1]),23,4)},getFloat64:function(t){return j(U(this,8,t,arguments[1]),52,8)},setInt8:function(t,n){z(this,1,t,B,n)},setUint8:function(t,n){z(this,1,t,B,n)},setInt16:function(t,n){z(this,2,t,C,n,arguments[2])},setUint16:function(t,n){z(this,2,t,C,n,arguments[2])},setInt32:function(t,n){z(this,4,t,W,n,arguments[2])},setUint32:function(t,n){z(this,4,t,W,n,arguments[2])},setFloat32:function(t,n){z(this,4,t,G,n,arguments[2])},setFloat64:function(t,n){z(this,8,t,V,n,arguments[2])}});y(S,"ArrayBuffer"),y(w,"DataView"),a(w[m],u.VIEW,!0),n.ArrayBuffer=S,n.DataView=w},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,e){t.exports=!e(128)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(91))&&r.__esModule?r:{default:r},o=e(18);function u(t,n){for(var e=0;e0){var u=Object.keys(e),c=a.default.find(u,(function(t){return n.isOS(t)}));if(c){var s=this.satisfies(e[c]);if(void 0!==s)return s}var f=a.default.find(u,(function(t){return n.isPlatform(t)}));if(f){var l=this.satisfies(e[f]);if(void 0!==l)return l}}if(o>0){var h=Object.keys(i),d=a.default.find(h,(function(t){return n.isBrowser(t,!0)}));if(void 0!==d)return this.compareVersion(i[d])}},n.isBrowser=function(t,n){void 0===n&&(n=!1);var e=this.getBrowserName().toLowerCase(),r=t.toLowerCase(),i=a.default.getBrowserTypeByAlias(r);return n&&i&&(r=i.toLowerCase()),r===e},n.compareVersion=function(t){var n=[0],e=t,r=!1,i=this.getBrowserVersion();if("string"==typeof i)return">"===t[0]||"<"===t[0]?(e=t.substr(1),"="===t[1]?(r=!0,e=t.substr(2)):n=[],">"===t[0]?n.push(1):n.push(-1)):"="===t[0]?e=t.substr(1):"~"===t[0]&&(r=!0,e=t.substr(1)),n.indexOf(a.default.compareVersions(i,e,r))>-1},n.isOS=function(t){return this.getOSName(!0)===String(t).toLowerCase()},n.isPlatform=function(t){return this.getPlatformType(!0)===String(t).toLowerCase()},n.isEngine=function(t){return this.getEngineName(!0)===String(t).toLowerCase()},n.is=function(t,n){return void 0===n&&(n=!1),this.isBrowser(t,n)||this.isOS(t)||this.isPlatform(t)},n.some=function(t){var n=this;return void 0===t&&(t=[]),t.some((function(t){return n.is(t)}))},t}();n.default=s,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r};var o=/version\/(\d+(\.?_?\d+)+)/i,u=[{test:[/googlebot/i],describe:function(t){var n={name:"Googlebot"},e=i.default.getFirstMatch(/googlebot\/(\d+(\.\d+))/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/opera/i],describe:function(t){var n={name:"Opera"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/opr\/|opios/i],describe:function(t){var n={name:"Opera"},e=i.default.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/SamsungBrowser/i],describe:function(t){var n={name:"Samsung Internet for Android"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/Whale/i],describe:function(t){var n={name:"NAVER Whale Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/MZBrowser/i],describe:function(t){var n={name:"MZ Browser"},e=i.default.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/focus/i],describe:function(t){var n={name:"Focus"},e=i.default.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/swing/i],describe:function(t){var n={name:"Swing"},e=i.default.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/coast/i],describe:function(t){var n={name:"Opera Coast"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/opt\/\d+(?:.?_?\d+)+/i],describe:function(t){var n={name:"Opera Touch"},e=i.default.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/yabrowser/i],describe:function(t){var n={name:"Yandex Browser"},e=i.default.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/ucbrowser/i],describe:function(t){var n={name:"UC Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/Maxthon|mxios/i],describe:function(t){var n={name:"Maxthon"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/epiphany/i],describe:function(t){var n={name:"Epiphany"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/puffin/i],describe:function(t){var n={name:"Puffin"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/sleipnir/i],describe:function(t){var n={name:"Sleipnir"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/k-meleon/i],describe:function(t){var n={name:"K-Meleon"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/micromessenger/i],describe:function(t){var n={name:"WeChat"},e=i.default.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/qqbrowser/i],describe:function(t){var n={name:/qqbrowserlite/i.test(t)?"QQ Browser Lite":"QQ Browser"},e=i.default.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/msie|trident/i],describe:function(t){var n={name:"Internet Explorer"},e=i.default.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/\sedg\//i],describe:function(t){var n={name:"Microsoft Edge"},e=i.default.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/edg([ea]|ios)/i],describe:function(t){var n={name:"Microsoft Edge"},e=i.default.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/vivaldi/i],describe:function(t){var n={name:"Vivaldi"},e=i.default.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/seamonkey/i],describe:function(t){var n={name:"SeaMonkey"},e=i.default.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/sailfish/i],describe:function(t){var n={name:"Sailfish"},e=i.default.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i,t);return e&&(n.version=e),n}},{test:[/silk/i],describe:function(t){var n={name:"Amazon Silk"},e=i.default.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/phantom/i],describe:function(t){var n={name:"PhantomJS"},e=i.default.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/slimerjs/i],describe:function(t){var n={name:"SlimerJS"},e=i.default.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(t){var n={name:"BlackBerry"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/(web|hpw)[o0]s/i],describe:function(t){var n={name:"WebOS Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/bada/i],describe:function(t){var n={name:"Bada"},e=i.default.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/tizen/i],describe:function(t){var n={name:"Tizen"},e=i.default.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/qupzilla/i],describe:function(t){var n={name:"QupZilla"},e=i.default.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/firefox|iceweasel|fxios/i],describe:function(t){var n={name:"Firefox"},e=i.default.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/electron/i],describe:function(t){var n={name:"Electron"},e=i.default.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/MiuiBrowser/i],describe:function(t){var n={name:"Miui"},e=i.default.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/chromium/i],describe:function(t){var n={name:"Chromium"},e=i.default.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/chrome|crios|crmo/i],describe:function(t){var n={name:"Chrome"},e=i.default.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/GSA/i],describe:function(t){var n={name:"Google Search"},e=i.default.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){var n=!t.test(/like android/i),e=t.test(/android/i);return n&&e},describe:function(t){var n={name:"Android Browser"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/playstation 4/i],describe:function(t){var n={name:"PlayStation 4"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/safari|applewebkit/i],describe:function(t){var n={name:"Safari"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/.*/i],describe:function(t){var n=-1!==t.search("\\(")?/^(.*)\/(.*)[ \t]\((.*)/:/^(.*)\/(.*) /;return{name:i.default.getFirstMatch(n,t),version:i.default.getSecondMatch(n,t)}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:[/Roku\/DVP/],describe:function(t){var n=i.default.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i,t);return{name:o.OS_MAP.Roku,version:n}}},{test:[/windows phone/i],describe:function(t){var n=i.default.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.WindowsPhone,version:n}}},{test:[/windows /i],describe:function(t){var n=i.default.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i,t),e=i.default.getWindowsVersionName(n);return{name:o.OS_MAP.Windows,version:n,versionName:e}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(t){var n={name:o.OS_MAP.iOS},e=i.default.getSecondMatch(/(Version\/)(\d[\d.]+)/,t);return e&&(n.version=e),n}},{test:[/macintosh/i],describe:function(t){var n=i.default.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i,t).replace(/[_\s]/g,"."),e=i.default.getMacOSVersionName(n),r={name:o.OS_MAP.MacOS,version:n};return e&&(r.versionName=e),r}},{test:[/(ipod|iphone|ipad)/i],describe:function(t){var n=i.default.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i,t).replace(/[_\s]/g,".");return{name:o.OS_MAP.iOS,version:n}}},{test:function(t){var n=!t.test(/like android/i),e=t.test(/android/i);return n&&e},describe:function(t){var n=i.default.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i,t),e=i.default.getAndroidVersionName(n),r={name:o.OS_MAP.Android,version:n};return e&&(r.versionName=e),r}},{test:[/(web|hpw)[o0]s/i],describe:function(t){var n=i.default.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i,t),e={name:o.OS_MAP.WebOS};return n&&n.length&&(e.version=n),e}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(t){var n=i.default.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i,t)||i.default.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i,t)||i.default.getFirstMatch(/\bbb(\d+)/i,t);return{name:o.OS_MAP.BlackBerry,version:n}}},{test:[/bada/i],describe:function(t){var n=i.default.getFirstMatch(/bada\/(\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.Bada,version:n}}},{test:[/tizen/i],describe:function(t){var n=i.default.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.Tizen,version:n}}},{test:[/linux/i],describe:function(){return{name:o.OS_MAP.Linux}}},{test:[/CrOS/],describe:function(){return{name:o.OS_MAP.ChromeOS}}},{test:[/PlayStation 4/],describe:function(t){var n=i.default.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.PlayStation4,version:n}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:[/googlebot/i],describe:function(){return{type:"bot",vendor:"Google"}}},{test:[/huawei/i],describe:function(t){var n=i.default.getFirstMatch(/(can-l01)/i,t)&&"Nova",e={type:o.PLATFORMS_MAP.mobile,vendor:"Huawei"};return n&&(e.model=n),e}},{test:[/nexus\s*(?:7|8|9|10).*/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Nexus"}}},{test:[/ipad/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/kftt build/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Amazon",model:"Kindle Fire HD 7"}}},{test:[/silk/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Amazon"}}},{test:[/tablet(?! pc)/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet}}},{test:function(t){var n=t.test(/ipod|iphone/i),e=t.test(/like (ipod|iphone)/i);return n&&!e},describe:function(t){var n=i.default.getFirstMatch(/(ipod|iphone)/i,t);return{type:o.PLATFORMS_MAP.mobile,vendor:"Apple",model:n}}},{test:[/nexus\s*[0-6].*/i,/galaxy nexus/i],describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"Nexus"}}},{test:[/[^-]mobi/i],describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"blackberry"===t.getBrowserName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"BlackBerry"}}},{test:function(t){return"bada"===t.getBrowserName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"windows phone"===t.getBrowserName()},describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"Microsoft"}}},{test:function(t){var n=Number(String(t.getOSVersion()).split(".")[0]);return"android"===t.getOSName(!0)&&n>=3},describe:function(){return{type:o.PLATFORMS_MAP.tablet}}},{test:function(t){return"android"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"macos"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop,vendor:"Apple"}}},{test:function(t){return"windows"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop}}},{test:function(t){return"linux"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop}}},{test:function(t){return"playstation 4"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.tv}}},{test:function(t){return"roku"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.tv}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:function(t){return"microsoft edge"===t.getBrowserName(!0)},describe:function(t){if(/\sedg\//i.test(t))return{name:o.ENGINE_MAP.Blink};var n=i.default.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i,t);return{name:o.ENGINE_MAP.EdgeHTML,version:n}}},{test:[/trident/i],describe:function(t){var n={name:o.ENGINE_MAP.Trident},e=i.default.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){return t.test(/presto/i)},describe:function(t){var n={name:o.ENGINE_MAP.Presto},e=i.default.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){var n=t.test(/gecko/i),e=t.test(/like gecko/i);return n&&!e},describe:function(t){var n={name:o.ENGINE_MAP.Gecko},e=i.default.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/(apple)?webkit\/537\.36/i],describe:function(){return{name:o.ENGINE_MAP.Blink}}},{test:[/(apple)?webkit/i],describe:function(t){var n={name:o.ENGINE_MAP.WebKit},e=i.default.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}}];n.default=u,t.exports=n.default},function(t,n,e){t.exports=!e(8)&&!e(2)((function(){return 7!=Object.defineProperty(e(62)("div"),"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(1),i=e(7),o=e(32),u=e(63),a=e(9).f;t.exports=function(t){var n=i.Symbol||(i.Symbol=o?{}:r.Symbol||{});"_"==t.charAt(0)||t in n||a(n,t,{value:u.f(t)})}},function(t,n,e){var r=e(13),i=e(15),o=e(51)(!1),u=e(64)("IE_PROTO");t.exports=function(t,n){var e,a=i(t),c=0,s=[];for(e in a)e!=u&&r(a,e)&&s.push(e);for(;n.length>c;)r(a,e=n[c++])&&(~o(s,e)||s.push(e));return s}},function(t,n,e){var r=e(9),i=e(3),o=e(33);t.exports=e(8)?Object.defineProperties:function(t,n){i(t);for(var e,u=o(n),a=u.length,c=0;a>c;)r.f(t,e=u[c++],n[e]);return t}},function(t,n,e){var r=e(15),i=e(36).f,o={}.toString,u="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[];t.exports.f=function(t){return u&&"[object Window]"==o.call(t)?function(t){try{return i(t)}catch(t){return u.slice()}}(t):i(r(t))}},function(t,n,e){"use strict";var r=e(8),i=e(33),o=e(52),u=e(47),a=e(10),c=e(46),s=Object.assign;t.exports=!s||e(2)((function(){var t={},n={},e=Symbol(),r="abcdefghijklmnopqrst";return t[e]=7,r.split("").forEach((function(t){n[t]=t})),7!=s({},t)[e]||Object.keys(s({},n)).join("")!=r}))?function(t,n){for(var e=a(t),s=arguments.length,f=1,l=o.f,h=u.f;s>f;)for(var d,p=c(arguments[f++]),v=l?i(p).concat(l(p)):i(p),g=v.length,y=0;g>y;)d=v[y++],r&&!h.call(p,d)||(e[d]=p[d]);return e}:s},function(t,n){t.exports=Object.is||function(t,n){return t===n?0!==t||1/t==1/n:t!=t&&n!=n}},function(t,n,e){"use strict";var r=e(20),i=e(4),o=e(104),u=[].slice,a={},c=function(t,n,e){if(!(n in a)){for(var r=[],i=0;i>>0||(u.test(e)?16:10))}:r},function(t,n,e){var r=e(1).parseFloat,i=e(41).trim;t.exports=1/r(e(68)+"-0")!=-1/0?function(t){var n=i(String(t),3),e=r(n);return 0===e&&"-"==n.charAt(0)?-0:e}:r},function(t,n,e){var r=e(25);t.exports=function(t,n){if("number"!=typeof t&&"Number"!=r(t))throw TypeError(n);return+t}},function(t,n,e){var r=e(4),i=Math.floor;t.exports=function(t){return!r(t)&&isFinite(t)&&i(t)===t}},function(t,n){t.exports=Math.log1p||function(t){return(t=+t)>-1e-8&&t<1e-8?t-t*t/2:Math.log(1+t)}},function(t,n,e){"use strict";var r=e(35),i=e(30),o=e(40),u={};e(14)(u,e(5)("iterator"),(function(){return this})),t.exports=function(t,n,e){t.prototype=r(u,{next:i(1,e)}),o(t,n+" Iterator")}},function(t,n,e){var r=e(3);t.exports=function(t,n,e,i){try{return i?n(r(e)[0],e[1]):n(e)}catch(n){var o=t.return;throw void 0!==o&&r(o.call(t)),n}}},function(t,n,e){var r=e(224);t.exports=function(t,n){return new(r(t))(n)}},function(t,n,e){var r=e(20),i=e(10),o=e(46),u=e(6);t.exports=function(t,n,e,a,c){r(n);var s=i(t),f=o(s),l=u(s.length),h=c?l-1:0,d=c?-1:1;if(e<2)for(;;){if(h in f){a=f[h],h+=d;break}if(h+=d,c?h<0:l<=h)throw TypeError("Reduce of empty array with no initial value")}for(;c?h>=0:l>h;h+=d)h in f&&(a=n(a,f[h],h,s));return a}},function(t,n,e){"use strict";var r=e(10),i=e(34),o=e(6);t.exports=[].copyWithin||function(t,n){var e=r(this),u=o(e.length),a=i(t,u),c=i(n,u),s=arguments.length>2?arguments[2]:void 0,f=Math.min((void 0===s?u:i(s,u))-c,u-a),l=1;for(c0;)c in e?e[a]=e[c]:delete e[a],a+=l,c+=l;return e}},function(t,n){t.exports=function(t,n){return{value:n,done:!!t}}},function(t,n,e){"use strict";var r=e(83);e(0)({target:"RegExp",proto:!0,forced:r!==/./.exec},{exec:r})},function(t,n,e){e(8)&&"g"!=/./g.flags&&e(9).f(RegExp.prototype,"flags",{configurable:!0,get:e(55)})},function(t,n,e){"use strict";var r,i,o,u,a=e(32),c=e(1),s=e(19),f=e(48),l=e(0),h=e(4),d=e(20),p=e(44),v=e(58),g=e(49),y=e(85).set,m=e(244)(),b=e(119),S=e(245),w=e(59),_=e(120),M=c.TypeError,x=c.process,P=x&&x.versions,O=P&&P.v8||"",F=c.Promise,A="process"==f(x),E=function(){},N=i=b.f,R=!!function(){try{var t=F.resolve(1),n=(t.constructor={})[e(5)("species")]=function(t){t(E,E)};return(A||"function"==typeof PromiseRejectionEvent)&&t.then(E)instanceof n&&0!==O.indexOf("6.6")&&-1===w.indexOf("Chrome/66")}catch(t){}}(),k=function(t){var n;return!(!h(t)||"function"!=typeof(n=t.then))&&n},T=function(t,n){if(!t._n){t._n=!0;var e=t._c;m((function(){for(var r=t._v,i=1==t._s,o=0,u=function(n){var e,o,u,a=i?n.ok:n.fail,c=n.resolve,s=n.reject,f=n.domain;try{a?(i||(2==t._h&&L(t),t._h=1),!0===a?e=r:(f&&f.enter(),e=a(r),f&&(f.exit(),u=!0)),e===n.promise?s(M("Promise-chain cycle")):(o=k(e))?o.call(e,c,s):c(e)):s(r)}catch(t){f&&!u&&f.exit(),s(t)}};e.length>o;)u(e[o++]);t._c=[],t._n=!1,n&&!t._h&&I(t)}))}},I=function(t){y.call(c,(function(){var n,e,r,i=t._v,o=j(t);if(o&&(n=S((function(){A?x.emit("unhandledRejection",i,t):(e=c.onunhandledrejection)?e({promise:t,reason:i}):(r=c.console)&&r.error&&r.error("Unhandled promise rejection",i)})),t._h=A||j(t)?2:1),t._a=void 0,o&&n.e)throw n.v}))},j=function(t){return 1!==t._h&&0===(t._a||t._c).length},L=function(t){y.call(c,(function(){var n;A?x.emit("rejectionHandled",t):(n=c.onrejectionhandled)&&n({promise:t,reason:t._v})}))},B=function(t){var n=this;n._d||(n._d=!0,(n=n._w||n)._v=t,n._s=2,n._a||(n._a=n._c.slice()),T(n,!0))},C=function(t){var n,e=this;if(!e._d){e._d=!0,e=e._w||e;try{if(e===t)throw M("Promise can't be resolved itself");(n=k(t))?m((function(){var r={_w:e,_d:!1};try{n.call(t,s(C,r,1),s(B,r,1))}catch(t){B.call(r,t)}})):(e._v=t,e._s=1,T(e,!1))}catch(t){B.call({_w:e,_d:!1},t)}}};R||(F=function(t){p(this,F,"Promise","_h"),d(t),r.call(this);try{t(s(C,this,1),s(B,this,1))}catch(t){B.call(this,t)}},(r=function(t){this._c=[],this._a=void 0,this._s=0,this._d=!1,this._v=void 0,this._h=0,this._n=!1}).prototype=e(45)(F.prototype,{then:function(t,n){var e=N(g(this,F));return e.ok="function"!=typeof t||t,e.fail="function"==typeof n&&n,e.domain=A?x.domain:void 0,this._c.push(e),this._a&&this._a.push(e),this._s&&T(this,!1),e.promise},catch:function(t){return this.then(void 0,t)}}),o=function(){var t=new r;this.promise=t,this.resolve=s(C,t,1),this.reject=s(B,t,1)},b.f=N=function(t){return t===F||t===u?new o(t):i(t)}),l(l.G+l.W+l.F*!R,{Promise:F}),e(40)(F,"Promise"),e(43)("Promise"),u=e(7).Promise,l(l.S+l.F*!R,"Promise",{reject:function(t){var n=N(this);return(0,n.reject)(t),n.promise}}),l(l.S+l.F*(a||!R),"Promise",{resolve:function(t){return _(a&&this===u?F:this,t)}}),l(l.S+l.F*!(R&&e(54)((function(t){F.all(t).catch(E)}))),"Promise",{all:function(t){var n=this,e=N(n),r=e.resolve,i=e.reject,o=S((function(){var e=[],o=0,u=1;v(t,!1,(function(t){var a=o++,c=!1;e.push(void 0),u++,n.resolve(t).then((function(t){c||(c=!0,e[a]=t,--u||r(e))}),i)})),--u||r(e)}));return o.e&&i(o.v),e.promise},race:function(t){var n=this,e=N(n),r=e.reject,i=S((function(){v(t,!1,(function(t){n.resolve(t).then(e.resolve,r)}))}));return i.e&&r(i.v),e.promise}})},function(t,n,e){"use strict";var r=e(20);function i(t){var n,e;this.promise=new t((function(t,r){if(void 0!==n||void 0!==e)throw TypeError("Bad Promise constructor");n=t,e=r})),this.resolve=r(n),this.reject=r(e)}t.exports.f=function(t){return new i(t)}},function(t,n,e){var r=e(3),i=e(4),o=e(119);t.exports=function(t,n){if(r(t),i(n)&&n.constructor===t)return n;var e=o.f(t);return(0,e.resolve)(n),e.promise}},function(t,n,e){"use strict";var r=e(9).f,i=e(35),o=e(45),u=e(19),a=e(44),c=e(58),s=e(74),f=e(115),l=e(43),h=e(8),d=e(29).fastKey,p=e(39),v=h?"_s":"size",g=function(t,n){var e,r=d(n);if("F"!==r)return t._i[r];for(e=t._f;e;e=e.n)if(e.k==n)return e};t.exports={getConstructor:function(t,n,e,s){var f=t((function(t,r){a(t,f,n,"_i"),t._t=n,t._i=i(null),t._f=void 0,t._l=void 0,t[v]=0,null!=r&&c(r,e,t[s],t)}));return o(f.prototype,{clear:function(){for(var t=p(this,n),e=t._i,r=t._f;r;r=r.n)r.r=!0,r.p&&(r.p=r.p.n=void 0),delete e[r.i];t._f=t._l=void 0,t[v]=0},delete:function(t){var e=p(this,n),r=g(e,t);if(r){var i=r.n,o=r.p;delete e._i[r.i],r.r=!0,o&&(o.n=i),i&&(i.p=o),e._f==r&&(e._f=i),e._l==r&&(e._l=o),e[v]--}return!!r},forEach:function(t){p(this,n);for(var e,r=u(t,arguments.length>1?arguments[1]:void 0,3);e=e?e.n:this._f;)for(r(e.v,e.k,this);e&&e.r;)e=e.p},has:function(t){return!!g(p(this,n),t)}}),h&&r(f.prototype,"size",{get:function(){return p(this,n)[v]}}),f},def:function(t,n,e){var r,i,o=g(t,n);return o?o.v=e:(t._l=o={i:i=d(n,!0),k:n,v:e,p:r=t._l,n:void 0,r:!1},t._f||(t._f=o),r&&(r.n=o),t[v]++,"F"!==i&&(t._i[i]=o)),t},getEntry:g,setStrong:function(t,n,e){s(t,n,(function(t,e){this._t=p(t,n),this._k=e,this._l=void 0}),(function(){for(var t=this._k,n=this._l;n&&n.r;)n=n.p;return this._t&&(this._l=n=n?n.n:this._t._f)?f(0,"keys"==t?n.k:"values"==t?n.v:[n.k,n.v]):(this._t=void 0,f(1))}),e?"entries":"values",!e,!0),l(n)}}},function(t,n,e){"use strict";var r=e(45),i=e(29).getWeak,o=e(3),u=e(4),a=e(44),c=e(58),s=e(24),f=e(13),l=e(39),h=s(5),d=s(6),p=0,v=function(t){return t._l||(t._l=new g)},g=function(){this.a=[]},y=function(t,n){return h(t.a,(function(t){return t[0]===n}))};g.prototype={get:function(t){var n=y(this,t);if(n)return n[1]},has:function(t){return!!y(this,t)},set:function(t,n){var e=y(this,t);e?e[1]=n:this.a.push([t,n])},delete:function(t){var n=d(this.a,(function(n){return n[0]===t}));return~n&&this.a.splice(n,1),!!~n}},t.exports={getConstructor:function(t,n,e,o){var s=t((function(t,r){a(t,s,n,"_i"),t._t=n,t._i=p++,t._l=void 0,null!=r&&c(r,e,t[o],t)}));return r(s.prototype,{delete:function(t){if(!u(t))return!1;var e=i(t);return!0===e?v(l(this,n)).delete(t):e&&f(e,this._i)&&delete e[this._i]},has:function(t){if(!u(t))return!1;var e=i(t);return!0===e?v(l(this,n)).has(t):e&&f(e,this._i)}}),s},def:function(t,n,e){var r=i(o(n),!0);return!0===r?v(t).set(n,e):r[t._i]=e,t},ufstore:v}},function(t,n,e){var r=e(21),i=e(6);t.exports=function(t){if(void 0===t)return 0;var n=r(t),e=i(n);if(n!==e)throw RangeError("Wrong length!");return e}},function(t,n,e){var r=e(36),i=e(52),o=e(3),u=e(1).Reflect;t.exports=u&&u.ownKeys||function(t){var n=r.f(o(t)),e=i.f;return e?n.concat(e(t)):n}},function(t,n,e){var r=e(6),i=e(70),o=e(26);t.exports=function(t,n,e,u){var a=String(o(t)),c=a.length,s=void 0===e?" ":String(e),f=r(n);if(f<=c||""==s)return a;var l=f-c,h=i.call(s,Math.ceil(l/s.length));return h.length>l&&(h=h.slice(0,l)),u?h+a:a+h}},function(t,n,e){var r=e(8),i=e(33),o=e(15),u=e(47).f;t.exports=function(t){return function(n){for(var e,a=o(n),c=i(a),s=c.length,f=0,l=[];s>f;)e=c[f++],r&&!u.call(a,e)||l.push(t?[e,a[e]]:a[e]);return l}}},function(t,n){var e=t.exports={version:"2.6.9"};"number"==typeof __e&&(__e=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,e){e(130),t.exports=e(90)},function(t,n,e){"use strict";e(131);var r,i=(r=e(303))&&r.__esModule?r:{default:r};i.default._babelPolyfill&&"undefined"!=typeof console&&console.warn&&console.warn("@babel/polyfill is loaded more than once on this page. This is probably not desirable/intended and may have consequences if different versions of the polyfills are applied sequentially. If you do need to load the polyfill more than once, use @babel/polyfill/noConflict instead to bypass the warning."),i.default._babelPolyfill=!0},function(t,n,e){"use strict";e(132),e(275),e(277),e(280),e(282),e(284),e(286),e(288),e(290),e(292),e(294),e(296),e(298),e(302)},function(t,n,e){e(133),e(136),e(137),e(138),e(139),e(140),e(141),e(142),e(143),e(144),e(145),e(146),e(147),e(148),e(149),e(150),e(151),e(152),e(153),e(154),e(155),e(156),e(157),e(158),e(159),e(160),e(161),e(162),e(163),e(164),e(165),e(166),e(167),e(168),e(169),e(170),e(171),e(172),e(173),e(174),e(175),e(176),e(177),e(179),e(180),e(181),e(182),e(183),e(184),e(185),e(186),e(187),e(188),e(189),e(190),e(191),e(192),e(193),e(194),e(195),e(196),e(197),e(198),e(199),e(200),e(201),e(202),e(203),e(204),e(205),e(206),e(207),e(208),e(209),e(210),e(211),e(212),e(214),e(215),e(217),e(218),e(219),e(220),e(221),e(222),e(223),e(225),e(226),e(227),e(228),e(229),e(230),e(231),e(232),e(233),e(234),e(235),e(236),e(237),e(82),e(238),e(116),e(239),e(117),e(240),e(241),e(242),e(243),e(118),e(246),e(247),e(248),e(249),e(250),e(251),e(252),e(253),e(254),e(255),e(256),e(257),e(258),e(259),e(260),e(261),e(262),e(263),e(264),e(265),e(266),e(267),e(268),e(269),e(270),e(271),e(272),e(273),e(274),t.exports=e(7)},function(t,n,e){"use strict";var r=e(1),i=e(13),o=e(8),u=e(0),a=e(11),c=e(29).KEY,s=e(2),f=e(50),l=e(40),h=e(31),d=e(5),p=e(63),v=e(97),g=e(135),y=e(53),m=e(3),b=e(4),S=e(10),w=e(15),_=e(28),M=e(30),x=e(35),P=e(100),O=e(22),F=e(52),A=e(9),E=e(33),N=O.f,R=A.f,k=P.f,T=r.Symbol,I=r.JSON,j=I&&I.stringify,L=d("_hidden"),B=d("toPrimitive"),C={}.propertyIsEnumerable,W=f("symbol-registry"),V=f("symbols"),G=f("op-symbols"),D=Object.prototype,U="function"==typeof T&&!!F.f,z=r.QObject,q=!z||!z.prototype||!z.prototype.findChild,K=o&&s((function(){return 7!=x(R({},"a",{get:function(){return R(this,"a",{value:7}).a}})).a}))?function(t,n,e){var r=N(D,n);r&&delete D[n],R(t,n,e),r&&t!==D&&R(D,n,r)}:R,Y=function(t){var n=V[t]=x(T.prototype);return n._k=t,n},Q=U&&"symbol"==typeof T.iterator?function(t){return"symbol"==typeof t}:function(t){return t instanceof T},H=function(t,n,e){return t===D&&H(G,n,e),m(t),n=_(n,!0),m(e),i(V,n)?(e.enumerable?(i(t,L)&&t[L][n]&&(t[L][n]=!1),e=x(e,{enumerable:M(0,!1)})):(i(t,L)||R(t,L,M(1,{})),t[L][n]=!0),K(t,n,e)):R(t,n,e)},J=function(t,n){m(t);for(var e,r=g(n=w(n)),i=0,o=r.length;o>i;)H(t,e=r[i++],n[e]);return t},X=function(t){var n=C.call(this,t=_(t,!0));return!(this===D&&i(V,t)&&!i(G,t))&&(!(n||!i(this,t)||!i(V,t)||i(this,L)&&this[L][t])||n)},Z=function(t,n){if(t=w(t),n=_(n,!0),t!==D||!i(V,n)||i(G,n)){var e=N(t,n);return!e||!i(V,n)||i(t,L)&&t[L][n]||(e.enumerable=!0),e}},$=function(t){for(var n,e=k(w(t)),r=[],o=0;e.length>o;)i(V,n=e[o++])||n==L||n==c||r.push(n);return r},tt=function(t){for(var n,e=t===D,r=k(e?G:w(t)),o=[],u=0;r.length>u;)!i(V,n=r[u++])||e&&!i(D,n)||o.push(V[n]);return o};U||(a((T=function(){if(this instanceof T)throw TypeError("Symbol is not a constructor!");var t=h(arguments.length>0?arguments[0]:void 0),n=function(e){this===D&&n.call(G,e),i(this,L)&&i(this[L],t)&&(this[L][t]=!1),K(this,t,M(1,e))};return o&&q&&K(D,t,{configurable:!0,set:n}),Y(t)}).prototype,"toString",(function(){return this._k})),O.f=Z,A.f=H,e(36).f=P.f=$,e(47).f=X,F.f=tt,o&&!e(32)&&a(D,"propertyIsEnumerable",X,!0),p.f=function(t){return Y(d(t))}),u(u.G+u.W+u.F*!U,{Symbol:T});for(var nt="hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables".split(","),et=0;nt.length>et;)d(nt[et++]);for(var rt=E(d.store),it=0;rt.length>it;)v(rt[it++]);u(u.S+u.F*!U,"Symbol",{for:function(t){return i(W,t+="")?W[t]:W[t]=T(t)},keyFor:function(t){if(!Q(t))throw TypeError(t+" is not a symbol!");for(var n in W)if(W[n]===t)return n},useSetter:function(){q=!0},useSimple:function(){q=!1}}),u(u.S+u.F*!U,"Object",{create:function(t,n){return void 0===n?x(t):J(x(t),n)},defineProperty:H,defineProperties:J,getOwnPropertyDescriptor:Z,getOwnPropertyNames:$,getOwnPropertySymbols:tt});var ot=s((function(){F.f(1)}));u(u.S+u.F*ot,"Object",{getOwnPropertySymbols:function(t){return F.f(S(t))}}),I&&u(u.S+u.F*(!U||s((function(){var t=T();return"[null]"!=j([t])||"{}"!=j({a:t})||"{}"!=j(Object(t))}))),"JSON",{stringify:function(t){for(var n,e,r=[t],i=1;arguments.length>i;)r.push(arguments[i++]);if(e=n=r[1],(b(n)||void 0!==t)&&!Q(t))return y(n)||(n=function(t,n){if("function"==typeof e&&(n=e.call(this,t,n)),!Q(n))return n}),r[1]=n,j.apply(I,r)}}),T.prototype[B]||e(14)(T.prototype,B,T.prototype.valueOf),l(T,"Symbol"),l(Math,"Math",!0),l(r.JSON,"JSON",!0)},function(t,n,e){t.exports=e(50)("native-function-to-string",Function.toString)},function(t,n,e){var r=e(33),i=e(52),o=e(47);t.exports=function(t){var n=r(t),e=i.f;if(e)for(var u,a=e(t),c=o.f,s=0;a.length>s;)c.call(t,u=a[s++])&&n.push(u);return n}},function(t,n,e){var r=e(0);r(r.S,"Object",{create:e(35)})},function(t,n,e){var r=e(0);r(r.S+r.F*!e(8),"Object",{defineProperty:e(9).f})},function(t,n,e){var r=e(0);r(r.S+r.F*!e(8),"Object",{defineProperties:e(99)})},function(t,n,e){var r=e(15),i=e(22).f;e(23)("getOwnPropertyDescriptor",(function(){return function(t,n){return i(r(t),n)}}))},function(t,n,e){var r=e(10),i=e(37);e(23)("getPrototypeOf",(function(){return function(t){return i(r(t))}}))},function(t,n,e){var r=e(10),i=e(33);e(23)("keys",(function(){return function(t){return i(r(t))}}))},function(t,n,e){e(23)("getOwnPropertyNames",(function(){return e(100).f}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("freeze",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("seal",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("preventExtensions",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4);e(23)("isFrozen",(function(t){return function(n){return!r(n)||!!t&&t(n)}}))},function(t,n,e){var r=e(4);e(23)("isSealed",(function(t){return function(n){return!r(n)||!!t&&t(n)}}))},function(t,n,e){var r=e(4);e(23)("isExtensible",(function(t){return function(n){return!!r(n)&&(!t||t(n))}}))},function(t,n,e){var r=e(0);r(r.S+r.F,"Object",{assign:e(101)})},function(t,n,e){var r=e(0);r(r.S,"Object",{is:e(102)})},function(t,n,e){var r=e(0);r(r.S,"Object",{setPrototypeOf:e(67).set})},function(t,n,e){"use strict";var r=e(48),i={};i[e(5)("toStringTag")]="z",i+""!="[object z]"&&e(11)(Object.prototype,"toString",(function(){return"[object "+r(this)+"]"}),!0)},function(t,n,e){var r=e(0);r(r.P,"Function",{bind:e(103)})},function(t,n,e){var r=e(9).f,i=Function.prototype,o=/^\s*function ([^ (]*)/;"name"in i||e(8)&&r(i,"name",{configurable:!0,get:function(){try{return(""+this).match(o)[1]}catch(t){return""}}})},function(t,n,e){"use strict";var r=e(4),i=e(37),o=e(5)("hasInstance"),u=Function.prototype;o in u||e(9).f(u,o,{value:function(t){if("function"!=typeof this||!r(t))return!1;if(!r(this.prototype))return t instanceof this;for(;t=i(t);)if(this.prototype===t)return!0;return!1}})},function(t,n,e){var r=e(0),i=e(105);r(r.G+r.F*(parseInt!=i),{parseInt:i})},function(t,n,e){var r=e(0),i=e(106);r(r.G+r.F*(parseFloat!=i),{parseFloat:i})},function(t,n,e){"use strict";var r=e(1),i=e(13),o=e(25),u=e(69),a=e(28),c=e(2),s=e(36).f,f=e(22).f,l=e(9).f,h=e(41).trim,d=r.Number,p=d,v=d.prototype,g="Number"==o(e(35)(v)),y="trim"in String.prototype,m=function(t){var n=a(t,!1);if("string"==typeof n&&n.length>2){var e,r,i,o=(n=y?n.trim():h(n,3)).charCodeAt(0);if(43===o||45===o){if(88===(e=n.charCodeAt(2))||120===e)return NaN}else if(48===o){switch(n.charCodeAt(1)){case 66:case 98:r=2,i=49;break;case 79:case 111:r=8,i=55;break;default:return+n}for(var u,c=n.slice(2),s=0,f=c.length;si)return NaN;return parseInt(c,r)}}return+n};if(!d(" 0o1")||!d("0b1")||d("+0x1")){d=function(t){var n=arguments.length<1?0:t,e=this;return e instanceof d&&(g?c((function(){v.valueOf.call(e)})):"Number"!=o(e))?u(new p(m(n)),e,d):m(n)};for(var b,S=e(8)?s(p):"MAX_VALUE,MIN_VALUE,NaN,NEGATIVE_INFINITY,POSITIVE_INFINITY,EPSILON,isFinite,isInteger,isNaN,isSafeInteger,MAX_SAFE_INTEGER,MIN_SAFE_INTEGER,parseFloat,parseInt,isInteger".split(","),w=0;S.length>w;w++)i(p,b=S[w])&&!i(d,b)&&l(d,b,f(p,b));d.prototype=v,v.constructor=d,e(11)(r,"Number",d)}},function(t,n,e){"use strict";var r=e(0),i=e(21),o=e(107),u=e(70),a=1..toFixed,c=Math.floor,s=[0,0,0,0,0,0],f="Number.toFixed: incorrect invocation!",l=function(t,n){for(var e=-1,r=n;++e<6;)r+=t*s[e],s[e]=r%1e7,r=c(r/1e7)},h=function(t){for(var n=6,e=0;--n>=0;)e+=s[n],s[n]=c(e/t),e=e%t*1e7},d=function(){for(var t=6,n="";--t>=0;)if(""!==n||0===t||0!==s[t]){var e=String(s[t]);n=""===n?e:n+u.call("0",7-e.length)+e}return n},p=function(t,n,e){return 0===n?e:n%2==1?p(t,n-1,e*t):p(t*t,n/2,e)};r(r.P+r.F*(!!a&&("0.000"!==8e-5.toFixed(3)||"1"!==.9.toFixed(0)||"1.25"!==1.255.toFixed(2)||"1000000000000000128"!==(0xde0b6b3a7640080).toFixed(0))||!e(2)((function(){a.call({})}))),"Number",{toFixed:function(t){var n,e,r,a,c=o(this,f),s=i(t),v="",g="0";if(s<0||s>20)throw RangeError(f);if(c!=c)return"NaN";if(c<=-1e21||c>=1e21)return String(c);if(c<0&&(v="-",c=-c),c>1e-21)if(e=(n=function(t){for(var n=0,e=t;e>=4096;)n+=12,e/=4096;for(;e>=2;)n+=1,e/=2;return n}(c*p(2,69,1))-69)<0?c*p(2,-n,1):c/p(2,n,1),e*=4503599627370496,(n=52-n)>0){for(l(0,e),r=s;r>=7;)l(1e7,0),r-=7;for(l(p(10,r,1),0),r=n-1;r>=23;)h(1<<23),r-=23;h(1<0?v+((a=g.length)<=s?"0."+u.call("0",s-a)+g:g.slice(0,a-s)+"."+g.slice(a-s)):v+g}})},function(t,n,e){"use strict";var r=e(0),i=e(2),o=e(107),u=1..toPrecision;r(r.P+r.F*(i((function(){return"1"!==u.call(1,void 0)}))||!i((function(){u.call({})}))),"Number",{toPrecision:function(t){var n=o(this,"Number#toPrecision: incorrect invocation!");return void 0===t?u.call(n):u.call(n,t)}})},function(t,n,e){var r=e(0);r(r.S,"Number",{EPSILON:Math.pow(2,-52)})},function(t,n,e){var r=e(0),i=e(1).isFinite;r(r.S,"Number",{isFinite:function(t){return"number"==typeof t&&i(t)}})},function(t,n,e){var r=e(0);r(r.S,"Number",{isInteger:e(108)})},function(t,n,e){var r=e(0);r(r.S,"Number",{isNaN:function(t){return t!=t}})},function(t,n,e){var r=e(0),i=e(108),o=Math.abs;r(r.S,"Number",{isSafeInteger:function(t){return i(t)&&o(t)<=9007199254740991}})},function(t,n,e){var r=e(0);r(r.S,"Number",{MAX_SAFE_INTEGER:9007199254740991})},function(t,n,e){var r=e(0);r(r.S,"Number",{MIN_SAFE_INTEGER:-9007199254740991})},function(t,n,e){var r=e(0),i=e(106);r(r.S+r.F*(Number.parseFloat!=i),"Number",{parseFloat:i})},function(t,n,e){var r=e(0),i=e(105);r(r.S+r.F*(Number.parseInt!=i),"Number",{parseInt:i})},function(t,n,e){var r=e(0),i=e(109),o=Math.sqrt,u=Math.acosh;r(r.S+r.F*!(u&&710==Math.floor(u(Number.MAX_VALUE))&&u(1/0)==1/0),"Math",{acosh:function(t){return(t=+t)<1?NaN:t>94906265.62425156?Math.log(t)+Math.LN2:i(t-1+o(t-1)*o(t+1))}})},function(t,n,e){var r=e(0),i=Math.asinh;r(r.S+r.F*!(i&&1/i(0)>0),"Math",{asinh:function t(n){return isFinite(n=+n)&&0!=n?n<0?-t(-n):Math.log(n+Math.sqrt(n*n+1)):n}})},function(t,n,e){var r=e(0),i=Math.atanh;r(r.S+r.F*!(i&&1/i(-0)<0),"Math",{atanh:function(t){return 0==(t=+t)?t:Math.log((1+t)/(1-t))/2}})},function(t,n,e){var r=e(0),i=e(71);r(r.S,"Math",{cbrt:function(t){return i(t=+t)*Math.pow(Math.abs(t),1/3)}})},function(t,n,e){var r=e(0);r(r.S,"Math",{clz32:function(t){return(t>>>=0)?31-Math.floor(Math.log(t+.5)*Math.LOG2E):32}})},function(t,n,e){var r=e(0),i=Math.exp;r(r.S,"Math",{cosh:function(t){return(i(t=+t)+i(-t))/2}})},function(t,n,e){var r=e(0),i=e(72);r(r.S+r.F*(i!=Math.expm1),"Math",{expm1:i})},function(t,n,e){var r=e(0);r(r.S,"Math",{fround:e(178)})},function(t,n,e){var r=e(71),i=Math.pow,o=i(2,-52),u=i(2,-23),a=i(2,127)*(2-u),c=i(2,-126);t.exports=Math.fround||function(t){var n,e,i=Math.abs(t),s=r(t);return ia||e!=e?s*(1/0):s*e}},function(t,n,e){var r=e(0),i=Math.abs;r(r.S,"Math",{hypot:function(t,n){for(var e,r,o=0,u=0,a=arguments.length,c=0;u0?(r=e/c)*r:e;return c===1/0?1/0:c*Math.sqrt(o)}})},function(t,n,e){var r=e(0),i=Math.imul;r(r.S+r.F*e(2)((function(){return-5!=i(4294967295,5)||2!=i.length})),"Math",{imul:function(t,n){var e=+t,r=+n,i=65535&e,o=65535&r;return 0|i*o+((65535&e>>>16)*o+i*(65535&r>>>16)<<16>>>0)}})},function(t,n,e){var r=e(0);r(r.S,"Math",{log10:function(t){return Math.log(t)*Math.LOG10E}})},function(t,n,e){var r=e(0);r(r.S,"Math",{log1p:e(109)})},function(t,n,e){var r=e(0);r(r.S,"Math",{log2:function(t){return Math.log(t)/Math.LN2}})},function(t,n,e){var r=e(0);r(r.S,"Math",{sign:e(71)})},function(t,n,e){var r=e(0),i=e(72),o=Math.exp;r(r.S+r.F*e(2)((function(){return-2e-17!=!Math.sinh(-2e-17)})),"Math",{sinh:function(t){return Math.abs(t=+t)<1?(i(t)-i(-t))/2:(o(t-1)-o(-t-1))*(Math.E/2)}})},function(t,n,e){var r=e(0),i=e(72),o=Math.exp;r(r.S,"Math",{tanh:function(t){var n=i(t=+t),e=i(-t);return n==1/0?1:e==1/0?-1:(n-e)/(o(t)+o(-t))}})},function(t,n,e){var r=e(0);r(r.S,"Math",{trunc:function(t){return(t>0?Math.floor:Math.ceil)(t)}})},function(t,n,e){var r=e(0),i=e(34),o=String.fromCharCode,u=String.fromCodePoint;r(r.S+r.F*(!!u&&1!=u.length),"String",{fromCodePoint:function(t){for(var n,e=[],r=arguments.length,u=0;r>u;){if(n=+arguments[u++],i(n,1114111)!==n)throw RangeError(n+" is not a valid code point");e.push(n<65536?o(n):o(55296+((n-=65536)>>10),n%1024+56320))}return e.join("")}})},function(t,n,e){var r=e(0),i=e(15),o=e(6);r(r.S,"String",{raw:function(t){for(var n=i(t.raw),e=o(n.length),r=arguments.length,u=[],a=0;e>a;)u.push(String(n[a++])),a=n.length?{value:void 0,done:!0}:(t=r(n,e),this._i+=t.length,{value:t,done:!1})}))},function(t,n,e){"use strict";var r=e(0),i=e(73)(!1);r(r.P,"String",{codePointAt:function(t){return i(this,t)}})},function(t,n,e){"use strict";var r=e(0),i=e(6),o=e(75),u="".endsWith;r(r.P+r.F*e(77)("endsWith"),"String",{endsWith:function(t){var n=o(this,t,"endsWith"),e=arguments.length>1?arguments[1]:void 0,r=i(n.length),a=void 0===e?r:Math.min(i(e),r),c=String(t);return u?u.call(n,c,a):n.slice(a-c.length,a)===c}})},function(t,n,e){"use strict";var r=e(0),i=e(75);r(r.P+r.F*e(77)("includes"),"String",{includes:function(t){return!!~i(this,t,"includes").indexOf(t,arguments.length>1?arguments[1]:void 0)}})},function(t,n,e){var r=e(0);r(r.P,"String",{repeat:e(70)})},function(t,n,e){"use strict";var r=e(0),i=e(6),o=e(75),u="".startsWith;r(r.P+r.F*e(77)("startsWith"),"String",{startsWith:function(t){var n=o(this,t,"startsWith"),e=i(Math.min(arguments.length>1?arguments[1]:void 0,n.length)),r=String(t);return u?u.call(n,r,e):n.slice(e,e+r.length)===r}})},function(t,n,e){"use strict";e(12)("anchor",(function(t){return function(n){return t(this,"a","name",n)}}))},function(t,n,e){"use strict";e(12)("big",(function(t){return function(){return t(this,"big","","")}}))},function(t,n,e){"use strict";e(12)("blink",(function(t){return function(){return t(this,"blink","","")}}))},function(t,n,e){"use strict";e(12)("bold",(function(t){return function(){return t(this,"b","","")}}))},function(t,n,e){"use strict";e(12)("fixed",(function(t){return function(){return t(this,"tt","","")}}))},function(t,n,e){"use strict";e(12)("fontcolor",(function(t){return function(n){return t(this,"font","color",n)}}))},function(t,n,e){"use strict";e(12)("fontsize",(function(t){return function(n){return t(this,"font","size",n)}}))},function(t,n,e){"use strict";e(12)("italics",(function(t){return function(){return t(this,"i","","")}}))},function(t,n,e){"use strict";e(12)("link",(function(t){return function(n){return t(this,"a","href",n)}}))},function(t,n,e){"use strict";e(12)("small",(function(t){return function(){return t(this,"small","","")}}))},function(t,n,e){"use strict";e(12)("strike",(function(t){return function(){return t(this,"strike","","")}}))},function(t,n,e){"use strict";e(12)("sub",(function(t){return function(){return t(this,"sub","","")}}))},function(t,n,e){"use strict";e(12)("sup",(function(t){return function(){return t(this,"sup","","")}}))},function(t,n,e){var r=e(0);r(r.S,"Date",{now:function(){return(new Date).getTime()}})},function(t,n,e){"use strict";var r=e(0),i=e(10),o=e(28);r(r.P+r.F*e(2)((function(){return null!==new Date(NaN).toJSON()||1!==Date.prototype.toJSON.call({toISOString:function(){return 1}})})),"Date",{toJSON:function(t){var n=i(this),e=o(n);return"number"!=typeof e||isFinite(e)?n.toISOString():null}})},function(t,n,e){var r=e(0),i=e(213);r(r.P+r.F*(Date.prototype.toISOString!==i),"Date",{toISOString:i})},function(t,n,e){"use strict";var r=e(2),i=Date.prototype.getTime,o=Date.prototype.toISOString,u=function(t){return t>9?t:"0"+t};t.exports=r((function(){return"0385-07-25T07:06:39.999Z"!=o.call(new Date(-5e13-1))}))||!r((function(){o.call(new Date(NaN))}))?function(){if(!isFinite(i.call(this)))throw RangeError("Invalid time value");var t=this,n=t.getUTCFullYear(),e=t.getUTCMilliseconds(),r=n<0?"-":n>9999?"+":"";return r+("00000"+Math.abs(n)).slice(r?-6:-4)+"-"+u(t.getUTCMonth()+1)+"-"+u(t.getUTCDate())+"T"+u(t.getUTCHours())+":"+u(t.getUTCMinutes())+":"+u(t.getUTCSeconds())+"."+(e>99?e:"0"+u(e))+"Z"}:o},function(t,n,e){var r=Date.prototype,i=r.toString,o=r.getTime;new Date(NaN)+""!="Invalid Date"&&e(11)(r,"toString",(function(){var t=o.call(this);return t==t?i.call(this):"Invalid Date"}))},function(t,n,e){var r=e(5)("toPrimitive"),i=Date.prototype;r in i||e(14)(i,r,e(216))},function(t,n,e){"use strict";var r=e(3),i=e(28);t.exports=function(t){if("string"!==t&&"number"!==t&&"default"!==t)throw TypeError("Incorrect hint");return i(r(this),"number"!=t)}},function(t,n,e){var r=e(0);r(r.S,"Array",{isArray:e(53)})},function(t,n,e){"use strict";var r=e(19),i=e(0),o=e(10),u=e(111),a=e(78),c=e(6),s=e(79),f=e(80);i(i.S+i.F*!e(54)((function(t){Array.from(t)})),"Array",{from:function(t){var n,e,i,l,h=o(t),d="function"==typeof this?this:Array,p=arguments.length,v=p>1?arguments[1]:void 0,g=void 0!==v,y=0,m=f(h);if(g&&(v=r(v,p>2?arguments[2]:void 0,2)),null==m||d==Array&&a(m))for(e=new d(n=c(h.length));n>y;y++)s(e,y,g?v(h[y],y):h[y]);else for(l=m.call(h),e=new d;!(i=l.next()).done;y++)s(e,y,g?u(l,v,[i.value,y],!0):i.value);return e.length=y,e}})},function(t,n,e){"use strict";var r=e(0),i=e(79);r(r.S+r.F*e(2)((function(){function t(){}return!(Array.of.call(t)instanceof t)})),"Array",{of:function(){for(var t=0,n=arguments.length,e=new("function"==typeof this?this:Array)(n);n>t;)i(e,t,arguments[t++]);return e.length=n,e}})},function(t,n,e){"use strict";var r=e(0),i=e(15),o=[].join;r(r.P+r.F*(e(46)!=Object||!e(16)(o)),"Array",{join:function(t){return o.call(i(this),void 0===t?",":t)}})},function(t,n,e){"use strict";var r=e(0),i=e(66),o=e(25),u=e(34),a=e(6),c=[].slice;r(r.P+r.F*e(2)((function(){i&&c.call(i)})),"Array",{slice:function(t,n){var e=a(this.length),r=o(this);if(n=void 0===n?e:n,"Array"==r)return c.call(this,t,n);for(var i=u(t,e),s=u(n,e),f=a(s-i),l=new Array(f),h=0;h1&&(r=Math.min(r,o(arguments[1]))),r<0&&(r=e+r);r>=0;r--)if(r in n&&n[r]===t)return r||0;return-1}})},function(t,n,e){var r=e(0);r(r.P,"Array",{copyWithin:e(114)}),e(38)("copyWithin")},function(t,n,e){var r=e(0);r(r.P,"Array",{fill:e(81)}),e(38)("fill")},function(t,n,e){"use strict";var r=e(0),i=e(24)(5),o=!0;"find"in[]&&Array(1).find((function(){o=!1})),r(r.P+r.F*o,"Array",{find:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)("find")},function(t,n,e){"use strict";var r=e(0),i=e(24)(6),o="findIndex",u=!0;o in[]&&Array(1)[o]((function(){u=!1})),r(r.P+r.F*u,"Array",{findIndex:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)(o)},function(t,n,e){e(43)("Array")},function(t,n,e){var r=e(1),i=e(69),o=e(9).f,u=e(36).f,a=e(76),c=e(55),s=r.RegExp,f=s,l=s.prototype,h=/a/g,d=/a/g,p=new s(h)!==h;if(e(8)&&(!p||e(2)((function(){return d[e(5)("match")]=!1,s(h)!=h||s(d)==d||"/a/i"!=s(h,"i")})))){s=function(t,n){var e=this instanceof s,r=a(t),o=void 0===n;return!e&&r&&t.constructor===s&&o?t:i(p?new f(r&&!o?t.source:t,n):f((r=t instanceof s)?t.source:t,r&&o?c.call(t):n),e?this:l,s)};for(var v=function(t){t in s||o(s,t,{configurable:!0,get:function(){return f[t]},set:function(n){f[t]=n}})},g=u(f),y=0;g.length>y;)v(g[y++]);l.constructor=s,s.prototype=l,e(11)(r,"RegExp",s)}e(43)("RegExp")},function(t,n,e){"use strict";e(117);var r=e(3),i=e(55),o=e(8),u=/./.toString,a=function(t){e(11)(RegExp.prototype,"toString",t,!0)};e(2)((function(){return"/a/b"!=u.call({source:"a",flags:"b"})}))?a((function(){var t=r(this);return"/".concat(t.source,"/","flags"in t?t.flags:!o&&t instanceof RegExp?i.call(t):void 0)})):"toString"!=u.name&&a((function(){return u.call(this)}))},function(t,n,e){"use strict";var r=e(3),i=e(6),o=e(84),u=e(56);e(57)("match",1,(function(t,n,e,a){return[function(e){var r=t(this),i=null==e?void 0:e[n];return void 0!==i?i.call(e,r):new RegExp(e)[n](String(r))},function(t){var n=a(e,t,this);if(n.done)return n.value;var c=r(t),s=String(this);if(!c.global)return u(c,s);var f=c.unicode;c.lastIndex=0;for(var l,h=[],d=0;null!==(l=u(c,s));){var p=String(l[0]);h[d]=p,""===p&&(c.lastIndex=o(s,i(c.lastIndex),f)),d++}return 0===d?null:h}]}))},function(t,n,e){"use strict";var r=e(3),i=e(10),o=e(6),u=e(21),a=e(84),c=e(56),s=Math.max,f=Math.min,l=Math.floor,h=/\$([$&`']|\d\d?|<[^>]*>)/g,d=/\$([$&`']|\d\d?)/g;e(57)("replace",2,(function(t,n,e,p){return[function(r,i){var o=t(this),u=null==r?void 0:r[n];return void 0!==u?u.call(r,o,i):e.call(String(o),r,i)},function(t,n){var i=p(e,t,this,n);if(i.done)return i.value;var l=r(t),h=String(this),d="function"==typeof n;d||(n=String(n));var g=l.global;if(g){var y=l.unicode;l.lastIndex=0}for(var m=[];;){var b=c(l,h);if(null===b)break;if(m.push(b),!g)break;""===String(b[0])&&(l.lastIndex=a(h,o(l.lastIndex),y))}for(var S,w="",_=0,M=0;M=_&&(w+=h.slice(_,P)+N,_=P+x.length)}return w+h.slice(_)}];function v(t,n,r,o,u,a){var c=r+t.length,s=o.length,f=d;return void 0!==u&&(u=i(u),f=h),e.call(a,f,(function(e,i){var a;switch(i.charAt(0)){case"$":return"$";case"&":return t;case"`":return n.slice(0,r);case"'":return n.slice(c);case"<":a=u[i.slice(1,-1)];break;default:var f=+i;if(0===f)return e;if(f>s){var h=l(f/10);return 0===h?e:h<=s?void 0===o[h-1]?i.charAt(1):o[h-1]+i.charAt(1):e}a=o[f-1]}return void 0===a?"":a}))}}))},function(t,n,e){"use strict";var r=e(3),i=e(102),o=e(56);e(57)("search",1,(function(t,n,e,u){return[function(e){var r=t(this),i=null==e?void 0:e[n];return void 0!==i?i.call(e,r):new RegExp(e)[n](String(r))},function(t){var n=u(e,t,this);if(n.done)return n.value;var a=r(t),c=String(this),s=a.lastIndex;i(s,0)||(a.lastIndex=0);var f=o(a,c);return i(a.lastIndex,s)||(a.lastIndex=s),null===f?-1:f.index}]}))},function(t,n,e){"use strict";var r=e(76),i=e(3),o=e(49),u=e(84),a=e(6),c=e(56),s=e(83),f=e(2),l=Math.min,h=[].push,d=!f((function(){RegExp(4294967295,"y")}));e(57)("split",2,(function(t,n,e,f){var p;return p="c"=="abbc".split(/(b)*/)[1]||4!="test".split(/(?:)/,-1).length||2!="ab".split(/(?:ab)*/).length||4!=".".split(/(.?)(.?)/).length||".".split(/()()/).length>1||"".split(/.?/).length?function(t,n){var i=String(this);if(void 0===t&&0===n)return[];if(!r(t))return e.call(i,t,n);for(var o,u,a,c=[],f=(t.ignoreCase?"i":"")+(t.multiline?"m":"")+(t.unicode?"u":"")+(t.sticky?"y":""),l=0,d=void 0===n?4294967295:n>>>0,p=new RegExp(t.source,f+"g");(o=s.call(p,i))&&!((u=p.lastIndex)>l&&(c.push(i.slice(l,o.index)),o.length>1&&o.index=d));)p.lastIndex===o.index&&p.lastIndex++;return l===i.length?!a&&p.test("")||c.push(""):c.push(i.slice(l)),c.length>d?c.slice(0,d):c}:"0".split(void 0,0).length?function(t,n){return void 0===t&&0===n?[]:e.call(this,t,n)}:e,[function(e,r){var i=t(this),o=null==e?void 0:e[n];return void 0!==o?o.call(e,i,r):p.call(String(i),e,r)},function(t,n){var r=f(p,t,this,n,p!==e);if(r.done)return r.value;var s=i(t),h=String(this),v=o(s,RegExp),g=s.unicode,y=(s.ignoreCase?"i":"")+(s.multiline?"m":"")+(s.unicode?"u":"")+(d?"y":"g"),m=new v(d?s:"^(?:"+s.source+")",y),b=void 0===n?4294967295:n>>>0;if(0===b)return[];if(0===h.length)return null===c(m,h)?[h]:[];for(var S=0,w=0,_=[];w0?arguments[0]:void 0)}}),{get:function(t){var n=r.getEntry(i(this,"Map"),t);return n&&n.v},set:function(t,n){return r.def(i(this,"Map"),0===t?0:t,n)}},r,!0)},function(t,n,e){"use strict";var r=e(121),i=e(39);t.exports=e(60)("Set",(function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}}),{add:function(t){return r.def(i(this,"Set"),t=0===t?0:t,t)}},r)},function(t,n,e){"use strict";var r,i=e(1),o=e(24)(0),u=e(11),a=e(29),c=e(101),s=e(122),f=e(4),l=e(39),h=e(39),d=!i.ActiveXObject&&"ActiveXObject"in i,p=a.getWeak,v=Object.isExtensible,g=s.ufstore,y=function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}},m={get:function(t){if(f(t)){var n=p(t);return!0===n?g(l(this,"WeakMap")).get(t):n?n[this._i]:void 0}},set:function(t,n){return s.def(l(this,"WeakMap"),t,n)}},b=t.exports=e(60)("WeakMap",y,m,s,!0,!0);h&&d&&(c((r=s.getConstructor(y,"WeakMap")).prototype,m),a.NEED=!0,o(["delete","has","get","set"],(function(t){var n=b.prototype,e=n[t];u(n,t,(function(n,i){if(f(n)&&!v(n)){this._f||(this._f=new r);var o=this._f[t](n,i);return"set"==t?this:o}return e.call(this,n,i)}))})))},function(t,n,e){"use strict";var r=e(122),i=e(39);e(60)("WeakSet",(function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}}),{add:function(t){return r.def(i(this,"WeakSet"),t,!0)}},r,!1,!0)},function(t,n,e){"use strict";var r=e(0),i=e(61),o=e(86),u=e(3),a=e(34),c=e(6),s=e(4),f=e(1).ArrayBuffer,l=e(49),h=o.ArrayBuffer,d=o.DataView,p=i.ABV&&f.isView,v=h.prototype.slice,g=i.VIEW;r(r.G+r.W+r.F*(f!==h),{ArrayBuffer:h}),r(r.S+r.F*!i.CONSTR,"ArrayBuffer",{isView:function(t){return p&&p(t)||s(t)&&g in t}}),r(r.P+r.U+r.F*e(2)((function(){return!new h(2).slice(1,void 0).byteLength})),"ArrayBuffer",{slice:function(t,n){if(void 0!==v&&void 0===n)return v.call(u(this),t);for(var e=u(this).byteLength,r=a(t,e),i=a(void 0===n?e:n,e),o=new(l(this,h))(c(i-r)),s=new d(this),f=new d(o),p=0;r=n.length)return{value:void 0,done:!0}}while(!((t=n[this._i++])in this._t));return{value:t,done:!1}})),r(r.S,"Reflect",{enumerate:function(t){return new o(t)}})},function(t,n,e){var r=e(22),i=e(37),o=e(13),u=e(0),a=e(4),c=e(3);u(u.S,"Reflect",{get:function t(n,e){var u,s,f=arguments.length<3?n:arguments[2];return c(n)===f?n[e]:(u=r.f(n,e))?o(u,"value")?u.value:void 0!==u.get?u.get.call(f):void 0:a(s=i(n))?t(s,e,f):void 0}})},function(t,n,e){var r=e(22),i=e(0),o=e(3);i(i.S,"Reflect",{getOwnPropertyDescriptor:function(t,n){return r.f(o(t),n)}})},function(t,n,e){var r=e(0),i=e(37),o=e(3);r(r.S,"Reflect",{getPrototypeOf:function(t){return i(o(t))}})},function(t,n,e){var r=e(0);r(r.S,"Reflect",{has:function(t,n){return n in t}})},function(t,n,e){var r=e(0),i=e(3),o=Object.isExtensible;r(r.S,"Reflect",{isExtensible:function(t){return i(t),!o||o(t)}})},function(t,n,e){var r=e(0);r(r.S,"Reflect",{ownKeys:e(124)})},function(t,n,e){var r=e(0),i=e(3),o=Object.preventExtensions;r(r.S,"Reflect",{preventExtensions:function(t){i(t);try{return o&&o(t),!0}catch(t){return!1}}})},function(t,n,e){var r=e(9),i=e(22),o=e(37),u=e(13),a=e(0),c=e(30),s=e(3),f=e(4);a(a.S,"Reflect",{set:function t(n,e,a){var l,h,d=arguments.length<4?n:arguments[3],p=i.f(s(n),e);if(!p){if(f(h=o(n)))return t(h,e,a,d);p=c(0)}if(u(p,"value")){if(!1===p.writable||!f(d))return!1;if(l=i.f(d,e)){if(l.get||l.set||!1===l.writable)return!1;l.value=a,r.f(d,e,l)}else r.f(d,e,c(0,a));return!0}return void 0!==p.set&&(p.set.call(d,a),!0)}})},function(t,n,e){var r=e(0),i=e(67);i&&r(r.S,"Reflect",{setPrototypeOf:function(t,n){i.check(t,n);try{return i.set(t,n),!0}catch(t){return!1}}})},function(t,n,e){e(276),t.exports=e(7).Array.includes},function(t,n,e){"use strict";var r=e(0),i=e(51)(!0);r(r.P,"Array",{includes:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)("includes")},function(t,n,e){e(278),t.exports=e(7).Array.flatMap},function(t,n,e){"use strict";var r=e(0),i=e(279),o=e(10),u=e(6),a=e(20),c=e(112);r(r.P,"Array",{flatMap:function(t){var n,e,r=o(this);return a(t),n=u(r.length),e=c(r,0),i(e,r,r,n,0,1,t,arguments[1]),e}}),e(38)("flatMap")},function(t,n,e){"use strict";var r=e(53),i=e(4),o=e(6),u=e(19),a=e(5)("isConcatSpreadable");t.exports=function t(n,e,c,s,f,l,h,d){for(var p,v,g=f,y=0,m=!!h&&u(h,d,3);y0)g=t(n,e,p,o(p.length),g,l-1)-1;else{if(g>=9007199254740991)throw TypeError();n[g]=p}g++}y++}return g}},function(t,n,e){e(281),t.exports=e(7).String.padStart},function(t,n,e){"use strict";var r=e(0),i=e(125),o=e(59),u=/Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(o);r(r.P+r.F*u,"String",{padStart:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!0)}})},function(t,n,e){e(283),t.exports=e(7).String.padEnd},function(t,n,e){"use strict";var r=e(0),i=e(125),o=e(59),u=/Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(o);r(r.P+r.F*u,"String",{padEnd:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!1)}})},function(t,n,e){e(285),t.exports=e(7).String.trimLeft},function(t,n,e){"use strict";e(41)("trimLeft",(function(t){return function(){return t(this,1)}}),"trimStart")},function(t,n,e){e(287),t.exports=e(7).String.trimRight},function(t,n,e){"use strict";e(41)("trimRight",(function(t){return function(){return t(this,2)}}),"trimEnd")},function(t,n,e){e(289),t.exports=e(63).f("asyncIterator")},function(t,n,e){e(97)("asyncIterator")},function(t,n,e){e(291),t.exports=e(7).Object.getOwnPropertyDescriptors},function(t,n,e){var r=e(0),i=e(124),o=e(15),u=e(22),a=e(79);r(r.S,"Object",{getOwnPropertyDescriptors:function(t){for(var n,e,r=o(t),c=u.f,s=i(r),f={},l=0;s.length>l;)void 0!==(e=c(r,n=s[l++]))&&a(f,n,e);return f}})},function(t,n,e){e(293),t.exports=e(7).Object.values},function(t,n,e){var r=e(0),i=e(126)(!1);r(r.S,"Object",{values:function(t){return i(t)}})},function(t,n,e){e(295),t.exports=e(7).Object.entries},function(t,n,e){var r=e(0),i=e(126)(!0);r(r.S,"Object",{entries:function(t){return i(t)}})},function(t,n,e){"use strict";e(118),e(297),t.exports=e(7).Promise.finally},function(t,n,e){"use strict";var r=e(0),i=e(7),o=e(1),u=e(49),a=e(120);r(r.P+r.R,"Promise",{finally:function(t){var n=u(this,i.Promise||o.Promise),e="function"==typeof t;return this.then(e?function(e){return a(n,t()).then((function(){return e}))}:t,e?function(e){return a(n,t()).then((function(){throw e}))}:t)}})},function(t,n,e){e(299),e(300),e(301),t.exports=e(7)},function(t,n,e){var r=e(1),i=e(0),o=e(59),u=[].slice,a=/MSIE .\./.test(o),c=function(t){return function(n,e){var r=arguments.length>2,i=!!r&&u.call(arguments,2);return t(r?function(){("function"==typeof n?n:Function(n)).apply(this,i)}:n,e)}};i(i.G+i.B+i.F*a,{setTimeout:c(r.setTimeout),setInterval:c(r.setInterval)})},function(t,n,e){var r=e(0),i=e(85);r(r.G+r.B,{setImmediate:i.set,clearImmediate:i.clear})},function(t,n,e){for(var r=e(82),i=e(33),o=e(11),u=e(1),a=e(14),c=e(42),s=e(5),f=s("iterator"),l=s("toStringTag"),h=c.Array,d={CSSRuleList:!0,CSSStyleDeclaration:!1,CSSValueList:!1,ClientRectList:!1,DOMRectList:!1,DOMStringList:!1,DOMTokenList:!0,DataTransferItemList:!1,FileList:!1,HTMLAllCollection:!1,HTMLCollection:!1,HTMLFormElement:!1,HTMLSelectElement:!1,MediaList:!0,MimeTypeArray:!1,NamedNodeMap:!1,NodeList:!0,PaintRequestList:!1,Plugin:!1,PluginArray:!1,SVGLengthList:!1,SVGNumberList:!1,SVGPathSegList:!1,SVGPointList:!1,SVGStringList:!1,SVGTransformList:!1,SourceBufferList:!1,StyleSheetList:!0,TextTrackCueList:!1,TextTrackList:!1,TouchList:!1},p=i(d),v=0;v=0;--o){var u=this.tryEntries[o],a=u.completion;if("root"===u.tryLoc)return i("end");if(u.tryLoc<=this.prev){var c=r.call(u,"catchLoc"),s=r.call(u,"finallyLoc");if(c&&s){if(this.prev=0;--e){var i=this.tryEntries[e];if(i.tryLoc<=this.prev&&r.call(i,"finallyLoc")&&this.prev=0;--n){var e=this.tryEntries[n];if(e.finallyLoc===t)return this.complete(e.completion,e.afterLoc),O(e),p}},catch:function(t){for(var n=this.tryEntries.length-1;n>=0;--n){var e=this.tryEntries[n];if(e.tryLoc===t){var r=e.completion;if("throw"===r.type){var i=r.arg;O(e)}return i}}throw new Error("illegal catch attempt")},delegateYield:function(t,e,r){return this.delegate={iterator:A(t),resultName:e,nextLoc:r},"next"===this.method&&(this.arg=n),p}},t}(t.exports);try{regeneratorRuntime=r}catch(t){Function("r","regeneratorRuntime = r")(r)}},function(t,n,e){e(304),t.exports=e(127).global},function(t,n,e){var r=e(305);r(r.G,{global:e(87)})},function(t,n,e){var r=e(87),i=e(127),o=e(306),u=e(308),a=e(315),c=function(t,n,e){var s,f,l,h=t&c.F,d=t&c.G,p=t&c.S,v=t&c.P,g=t&c.B,y=t&c.W,m=d?i:i[n]||(i[n]={}),b=m.prototype,S=d?r:p?r[n]:(r[n]||{}).prototype;for(s in d&&(e=n),e)(f=!h&&S&&void 0!==S[s])&&a(m,s)||(l=f?S[s]:e[s],m[s]=d&&"function"!=typeof S[s]?e[s]:g&&f?o(l,r):y&&S[s]==l?function(t){var n=function(n,e,r){if(this instanceof t){switch(arguments.length){case 0:return new t;case 1:return new t(n);case 2:return new t(n,e)}return new t(n,e,r)}return t.apply(this,arguments)};return n.prototype=t.prototype,n}(l):v&&"function"==typeof l?o(Function.call,l):l,v&&((m.virtual||(m.virtual={}))[s]=l,t&c.R&&b&&!b[s]&&u(b,s,l)))};c.F=1,c.G=2,c.S=4,c.P=8,c.B=16,c.W=32,c.U=64,c.R=128,t.exports=c},function(t,n,e){var r=e(307);t.exports=function(t,n,e){if(r(t),void 0===n)return t;switch(e){case 1:return function(e){return t.call(n,e)};case 2:return function(e,r){return t.call(n,e,r)};case 3:return function(e,r,i){return t.call(n,e,r,i)}}return function(){return t.apply(n,arguments)}}},function(t,n){t.exports=function(t){if("function"!=typeof t)throw TypeError(t+" is not a function!");return t}},function(t,n,e){var r=e(309),i=e(314);t.exports=e(89)?function(t,n,e){return r.f(t,n,i(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){var r=e(310),i=e(311),o=e(313),u=Object.defineProperty;n.f=e(89)?Object.defineProperty:function(t,n,e){if(r(t),n=o(n,!0),r(e),i)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n,e){var r=e(88);t.exports=function(t){if(!r(t))throw TypeError(t+" is not an object!");return t}},function(t,n,e){t.exports=!e(89)&&!e(128)((function(){return 7!=Object.defineProperty(e(312)("div"),"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(88),i=e(87).document,o=r(i)&&r(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,e){var r=e(88);t.exports=function(t,n){if(!r(t))return t;var e,i;if(n&&"function"==typeof(e=t.toString)&&!r(i=e.call(t)))return i;if("function"==typeof(e=t.valueOf)&&!r(i=e.call(t)))return i;if(!n&&"function"==typeof(e=t.toString)&&!r(i=e.call(t)))return i;throw TypeError("Can't convert object to primitive value")}},function(t,n){t.exports=function(t,n){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:n}}},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}}])})); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/bowser/es5.js b/amplify/functions/downloadDocument/node_modules/bowser/es5.js new file mode 100644 index 0000000..bb8ec3d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/es5.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.bowser=t():e.bowser=t()}(this,(function(){return function(e){var t={};function r(n){if(t[n])return t[n].exports;var i=t[n]={i:n,l:!1,exports:{}};return e[n].call(i.exports,i,i.exports,r),i.l=!0,i.exports}return r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var i in e)r.d(n,i,function(t){return e[t]}.bind(null,i));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=90)}({17:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n=r(18),i=function(){function e(){}return e.getFirstMatch=function(e,t){var r=t.match(e);return r&&r.length>0&&r[1]||""},e.getSecondMatch=function(e,t){var r=t.match(e);return r&&r.length>1&&r[2]||""},e.matchAndReturnConst=function(e,t,r){if(e.test(t))return r},e.getWindowsVersionName=function(e){switch(e){case"NT":return"NT";case"XP":return"XP";case"NT 5.0":return"2000";case"NT 5.1":return"XP";case"NT 5.2":return"2003";case"NT 6.0":return"Vista";case"NT 6.1":return"7";case"NT 6.2":return"8";case"NT 6.3":return"8.1";case"NT 10.0":return"10";default:return}},e.getMacOSVersionName=function(e){var t=e.split(".").splice(0,2).map((function(e){return parseInt(e,10)||0}));if(t.push(0),10===t[0])switch(t[1]){case 5:return"Leopard";case 6:return"Snow Leopard";case 7:return"Lion";case 8:return"Mountain Lion";case 9:return"Mavericks";case 10:return"Yosemite";case 11:return"El Capitan";case 12:return"Sierra";case 13:return"High Sierra";case 14:return"Mojave";case 15:return"Catalina";default:return}},e.getAndroidVersionName=function(e){var t=e.split(".").splice(0,2).map((function(e){return parseInt(e,10)||0}));if(t.push(0),!(1===t[0]&&t[1]<5))return 1===t[0]&&t[1]<6?"Cupcake":1===t[0]&&t[1]>=6?"Donut":2===t[0]&&t[1]<2?"Eclair":2===t[0]&&2===t[1]?"Froyo":2===t[0]&&t[1]>2?"Gingerbread":3===t[0]?"Honeycomb":4===t[0]&&t[1]<1?"Ice Cream Sandwich":4===t[0]&&t[1]<4?"Jelly Bean":4===t[0]&&t[1]>=4?"KitKat":5===t[0]?"Lollipop":6===t[0]?"Marshmallow":7===t[0]?"Nougat":8===t[0]?"Oreo":9===t[0]?"Pie":void 0},e.getVersionPrecision=function(e){return e.split(".").length},e.compareVersions=function(t,r,n){void 0===n&&(n=!1);var i=e.getVersionPrecision(t),s=e.getVersionPrecision(r),a=Math.max(i,s),o=0,u=e.map([t,r],(function(t){var r=a-e.getVersionPrecision(t),n=t+new Array(r+1).join(".0");return e.map(n.split("."),(function(e){return new Array(20-e.length).join("0")+e})).reverse()}));for(n&&(o=a-Math.min(i,s)),a-=1;a>=o;){if(u[0][a]>u[1][a])return 1;if(u[0][a]===u[1][a]){if(a===o)return 0;a-=1}else if(u[0][a]1?i-1:0),a=1;a0){var a=Object.keys(r),u=o.default.find(a,(function(e){return t.isOS(e)}));if(u){var d=this.satisfies(r[u]);if(void 0!==d)return d}var c=o.default.find(a,(function(e){return t.isPlatform(e)}));if(c){var f=this.satisfies(r[c]);if(void 0!==f)return f}}if(s>0){var l=Object.keys(i),h=o.default.find(l,(function(e){return t.isBrowser(e,!0)}));if(void 0!==h)return this.compareVersion(i[h])}},t.isBrowser=function(e,t){void 0===t&&(t=!1);var r=this.getBrowserName().toLowerCase(),n=e.toLowerCase(),i=o.default.getBrowserTypeByAlias(n);return t&&i&&(n=i.toLowerCase()),n===r},t.compareVersion=function(e){var t=[0],r=e,n=!1,i=this.getBrowserVersion();if("string"==typeof i)return">"===e[0]||"<"===e[0]?(r=e.substr(1),"="===e[1]?(n=!0,r=e.substr(2)):t=[],">"===e[0]?t.push(1):t.push(-1)):"="===e[0]?r=e.substr(1):"~"===e[0]&&(n=!0,r=e.substr(1)),t.indexOf(o.default.compareVersions(i,r,n))>-1},t.isOS=function(e){return this.getOSName(!0)===String(e).toLowerCase()},t.isPlatform=function(e){return this.getPlatformType(!0)===String(e).toLowerCase()},t.isEngine=function(e){return this.getEngineName(!0)===String(e).toLowerCase()},t.is=function(e,t){return void 0===t&&(t=!1),this.isBrowser(e,t)||this.isOS(e)||this.isPlatform(e)},t.some=function(e){var t=this;return void 0===e&&(e=[]),e.some((function(e){return t.is(e)}))},e}();t.default=d,e.exports=t.default},92:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n};var s=/version\/(\d+(\.?_?\d+)+)/i,a=[{test:[/googlebot/i],describe:function(e){var t={name:"Googlebot"},r=i.default.getFirstMatch(/googlebot\/(\d+(\.\d+))/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/opera/i],describe:function(e){var t={name:"Opera"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/opr\/|opios/i],describe:function(e){var t={name:"Opera"},r=i.default.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/SamsungBrowser/i],describe:function(e){var t={name:"Samsung Internet for Android"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/Whale/i],describe:function(e){var t={name:"NAVER Whale Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/MZBrowser/i],describe:function(e){var t={name:"MZ Browser"},r=i.default.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/focus/i],describe:function(e){var t={name:"Focus"},r=i.default.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/swing/i],describe:function(e){var t={name:"Swing"},r=i.default.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/coast/i],describe:function(e){var t={name:"Opera Coast"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/opt\/\d+(?:.?_?\d+)+/i],describe:function(e){var t={name:"Opera Touch"},r=i.default.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/yabrowser/i],describe:function(e){var t={name:"Yandex Browser"},r=i.default.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/ucbrowser/i],describe:function(e){var t={name:"UC Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/Maxthon|mxios/i],describe:function(e){var t={name:"Maxthon"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/epiphany/i],describe:function(e){var t={name:"Epiphany"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/puffin/i],describe:function(e){var t={name:"Puffin"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/sleipnir/i],describe:function(e){var t={name:"Sleipnir"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/k-meleon/i],describe:function(e){var t={name:"K-Meleon"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/micromessenger/i],describe:function(e){var t={name:"WeChat"},r=i.default.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/qqbrowser/i],describe:function(e){var t={name:/qqbrowserlite/i.test(e)?"QQ Browser Lite":"QQ Browser"},r=i.default.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/msie|trident/i],describe:function(e){var t={name:"Internet Explorer"},r=i.default.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/\sedg\//i],describe:function(e){var t={name:"Microsoft Edge"},r=i.default.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/edg([ea]|ios)/i],describe:function(e){var t={name:"Microsoft Edge"},r=i.default.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/vivaldi/i],describe:function(e){var t={name:"Vivaldi"},r=i.default.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/seamonkey/i],describe:function(e){var t={name:"SeaMonkey"},r=i.default.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/sailfish/i],describe:function(e){var t={name:"Sailfish"},r=i.default.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i,e);return r&&(t.version=r),t}},{test:[/silk/i],describe:function(e){var t={name:"Amazon Silk"},r=i.default.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/phantom/i],describe:function(e){var t={name:"PhantomJS"},r=i.default.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/slimerjs/i],describe:function(e){var t={name:"SlimerJS"},r=i.default.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(e){var t={name:"BlackBerry"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/(web|hpw)[o0]s/i],describe:function(e){var t={name:"WebOS Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/bada/i],describe:function(e){var t={name:"Bada"},r=i.default.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/tizen/i],describe:function(e){var t={name:"Tizen"},r=i.default.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/qupzilla/i],describe:function(e){var t={name:"QupZilla"},r=i.default.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/firefox|iceweasel|fxios/i],describe:function(e){var t={name:"Firefox"},r=i.default.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/electron/i],describe:function(e){var t={name:"Electron"},r=i.default.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/MiuiBrowser/i],describe:function(e){var t={name:"Miui"},r=i.default.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/chromium/i],describe:function(e){var t={name:"Chromium"},r=i.default.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/chrome|crios|crmo/i],describe:function(e){var t={name:"Chrome"},r=i.default.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/GSA/i],describe:function(e){var t={name:"Google Search"},r=i.default.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){var t=!e.test(/like android/i),r=e.test(/android/i);return t&&r},describe:function(e){var t={name:"Android Browser"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/playstation 4/i],describe:function(e){var t={name:"PlayStation 4"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/safari|applewebkit/i],describe:function(e){var t={name:"Safari"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/.*/i],describe:function(e){var t=-1!==e.search("\\(")?/^(.*)\/(.*)[ \t]\((.*)/:/^(.*)\/(.*) /;return{name:i.default.getFirstMatch(t,e),version:i.default.getSecondMatch(t,e)}}}];t.default=a,e.exports=t.default},93:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:[/Roku\/DVP/],describe:function(e){var t=i.default.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i,e);return{name:s.OS_MAP.Roku,version:t}}},{test:[/windows phone/i],describe:function(e){var t=i.default.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.WindowsPhone,version:t}}},{test:[/windows /i],describe:function(e){var t=i.default.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i,e),r=i.default.getWindowsVersionName(t);return{name:s.OS_MAP.Windows,version:t,versionName:r}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(e){var t={name:s.OS_MAP.iOS},r=i.default.getSecondMatch(/(Version\/)(\d[\d.]+)/,e);return r&&(t.version=r),t}},{test:[/macintosh/i],describe:function(e){var t=i.default.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i,e).replace(/[_\s]/g,"."),r=i.default.getMacOSVersionName(t),n={name:s.OS_MAP.MacOS,version:t};return r&&(n.versionName=r),n}},{test:[/(ipod|iphone|ipad)/i],describe:function(e){var t=i.default.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i,e).replace(/[_\s]/g,".");return{name:s.OS_MAP.iOS,version:t}}},{test:function(e){var t=!e.test(/like android/i),r=e.test(/android/i);return t&&r},describe:function(e){var t=i.default.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i,e),r=i.default.getAndroidVersionName(t),n={name:s.OS_MAP.Android,version:t};return r&&(n.versionName=r),n}},{test:[/(web|hpw)[o0]s/i],describe:function(e){var t=i.default.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i,e),r={name:s.OS_MAP.WebOS};return t&&t.length&&(r.version=t),r}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(e){var t=i.default.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i,e)||i.default.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i,e)||i.default.getFirstMatch(/\bbb(\d+)/i,e);return{name:s.OS_MAP.BlackBerry,version:t}}},{test:[/bada/i],describe:function(e){var t=i.default.getFirstMatch(/bada\/(\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.Bada,version:t}}},{test:[/tizen/i],describe:function(e){var t=i.default.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.Tizen,version:t}}},{test:[/linux/i],describe:function(){return{name:s.OS_MAP.Linux}}},{test:[/CrOS/],describe:function(){return{name:s.OS_MAP.ChromeOS}}},{test:[/PlayStation 4/],describe:function(e){var t=i.default.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.PlayStation4,version:t}}}];t.default=a,e.exports=t.default},94:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:[/googlebot/i],describe:function(){return{type:"bot",vendor:"Google"}}},{test:[/huawei/i],describe:function(e){var t=i.default.getFirstMatch(/(can-l01)/i,e)&&"Nova",r={type:s.PLATFORMS_MAP.mobile,vendor:"Huawei"};return t&&(r.model=t),r}},{test:[/nexus\s*(?:7|8|9|10).*/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Nexus"}}},{test:[/ipad/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/kftt build/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Amazon",model:"Kindle Fire HD 7"}}},{test:[/silk/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Amazon"}}},{test:[/tablet(?! pc)/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet}}},{test:function(e){var t=e.test(/ipod|iphone/i),r=e.test(/like (ipod|iphone)/i);return t&&!r},describe:function(e){var t=i.default.getFirstMatch(/(ipod|iphone)/i,e);return{type:s.PLATFORMS_MAP.mobile,vendor:"Apple",model:t}}},{test:[/nexus\s*[0-6].*/i,/galaxy nexus/i],describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"Nexus"}}},{test:[/[^-]mobi/i],describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"blackberry"===e.getBrowserName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"BlackBerry"}}},{test:function(e){return"bada"===e.getBrowserName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"windows phone"===e.getBrowserName()},describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"Microsoft"}}},{test:function(e){var t=Number(String(e.getOSVersion()).split(".")[0]);return"android"===e.getOSName(!0)&&t>=3},describe:function(){return{type:s.PLATFORMS_MAP.tablet}}},{test:function(e){return"android"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"macos"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop,vendor:"Apple"}}},{test:function(e){return"windows"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop}}},{test:function(e){return"linux"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop}}},{test:function(e){return"playstation 4"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.tv}}},{test:function(e){return"roku"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.tv}}}];t.default=a,e.exports=t.default},95:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:function(e){return"microsoft edge"===e.getBrowserName(!0)},describe:function(e){if(/\sedg\//i.test(e))return{name:s.ENGINE_MAP.Blink};var t=i.default.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i,e);return{name:s.ENGINE_MAP.EdgeHTML,version:t}}},{test:[/trident/i],describe:function(e){var t={name:s.ENGINE_MAP.Trident},r=i.default.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){return e.test(/presto/i)},describe:function(e){var t={name:s.ENGINE_MAP.Presto},r=i.default.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){var t=e.test(/gecko/i),r=e.test(/like gecko/i);return t&&!r},describe:function(e){var t={name:s.ENGINE_MAP.Gecko},r=i.default.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/(apple)?webkit\/537\.36/i],describe:function(){return{name:s.ENGINE_MAP.Blink}}},{test:[/(apple)?webkit/i],describe:function(e){var t={name:s.ENGINE_MAP.WebKit},r=i.default.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}}];t.default=a,e.exports=t.default}})})); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/bowser/index.d.ts b/amplify/functions/downloadDocument/node_modules/bowser/index.d.ts new file mode 100644 index 0000000..d95656a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/index.d.ts @@ -0,0 +1,250 @@ +// Type definitions for Bowser v2 +// Project: https://github.com/lancedikson/bowser +// Definitions by: Alexander P. Cerutti , + +export = Bowser; +export as namespace Bowser; + +declare namespace Bowser { + /** + * Creates a Parser instance + * @param {string} UA - User agent string + * @param {boolean} skipParsing + */ + + function getParser(UA: string, skipParsing?: boolean): Parser.Parser; + + /** + * Creates a Parser instance and runs Parser.getResult immediately + * @param UA - User agent string + * @returns {Parser.ParsedResult} + */ + + function parse(UA: string): Parser.ParsedResult; + + /** + * Constants exposed via bowser getters + */ + const BROWSER_MAP: Record; + const ENGINE_MAP: Record; + const OS_MAP: Record; + const PLATFORMS_MAP: Record; + + namespace Parser { + interface Parser { + constructor(UA: string, skipParsing?: boolean): Parser.Parser; + + /** + * Get parsed browser object + * @return {BrowserDetails} Browser's details + */ + + getBrowser(): BrowserDetails; + + /** + * Get browser's name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} Browser's name or an empty string + */ + + getBrowserName(toLowerCase?: boolean): string; + + /** + * Get browser's version + * @return {String} version of browser + */ + + getBrowserVersion(): string; + + /** + * Get OS + * @return {OSDetails} - OS Details + * + * @example + * this.getOS(); // { + * // name: 'macOS', + * // version: '10.11.12', + * // } + */ + + getOS(): OSDetails; + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + + getOSName(toLowerCase?: boolean): string; + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + + getOSVersion(): string; + + /** + * Get parsed platform + * @returns {PlatformDetails} + */ + + getPlatform(): PlatformDetails; + + /** + * Get platform name + * @param {boolean} toLowerCase + */ + + getPlatformType(toLowerCase?: boolean): string; + + /** + * Get parsed engine + * @returns {EngineDetails} + */ + + getEngine(): EngineDetails; + + /** + * Get parsed engine's name + * @returns {String} Engine's name or an empty string + */ + + getEngineName(): string; + + /** + * Get parsed result + * @return {ParsedResult} + */ + + getResult(): ParsedResult; + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + */ + + getUA(): string; + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @returns {Boolean} + */ + + is(anything: any): boolean; + + /** + * Parse full information about the browser + * @returns {Parser.Parser} + */ + + parse(): Parser.Parser; + + /** + * Get parsed browser object + * @returns {BrowserDetails} + */ + + parseBrowser(): BrowserDetails; + + /** + * Get parsed engine + * @returns {EngineDetails} + */ + + parseEngine(): EngineDetails; + + /** + * Parse OS and save it to this.parsedResult.os + * @returns {OSDetails} + */ + + parseOS(): OSDetails; + + /** + * Get parsed platform + * @returns {PlatformDetails} + */ + + parsePlatform(): PlatformDetails; + + /** + * Check if parsed browser matches certain conditions + * + * @param {checkTree} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = new Bowser(UA); + * if (browser.check({chrome: '>118.01.1322' })) + * // or with os + * if (browser.check({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.check({desktop: { chrome: '>118.01.1322' } })) + */ + + satisfies(checkTree: checkTree): boolean | undefined; + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + + + isBrowser(browserName: string, includingAlias?: boolean): boolean; + + /** + * Check if any of the given values satifies `.is(anything)` + * @param {string[]} anythings + * @returns {boolean} true if at least one condition is satisfied, false otherwise. + */ + + some(anythings: string[]): boolean | undefined; + + /** + * Test a UA string for a regexp + * @param regex + * @returns {boolean} true if the regex matches the UA, false otherwise. + */ + + test(regex: RegExp): boolean; + } + + interface ParsedResult { + browser: BrowserDetails; + os: OSDetails; + platform: PlatformDetails; + engine: EngineDetails; + } + + interface Details { + name?: string; + version?: string; + } + + interface OSDetails extends Details { + versionName?: string; + } + + interface PlatformDetails { + type?: string; + vendor?: string; + model?: string; + } + + type BrowserDetails = Details; + type EngineDetails = Details; + + interface checkTree { + [key: string]: any; + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/bowser/package.json b/amplify/functions/downloadDocument/node_modules/bowser/package.json new file mode 100644 index 0000000..3fb7c83 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/package.json @@ -0,0 +1,83 @@ +{ + "name": "bowser", + "version": "2.11.0", + "description": "Lightweight browser detector", + "keywords": [ + "browser", + "useragent", + "user-agent", + "parser", + "ua", + "detection", + "ender", + "sniff" + ], + "homepage": "https://github.com/lancedikson/bowser", + "author": "Dustin Diaz (http://dustindiaz.com)", + "contributors": [ + { + "name": "Denis Demchenko", + "url": "http://twitter.com/lancedikson" + } + ], + "main": "es5.js", + "browser": "es5.js", + "module": "src/bowser.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git+https://github.com/lancedikson/bowser.git" + }, + "devDependencies": { + "@babel/cli": "^7.11.6", + "@babel/core": "^7.8.0", + "@babel/polyfill": "^7.8.3", + "@babel/preset-env": "^7.8.2", + "@babel/register": "^7.8.3", + "ava": "^3.0.0", + "babel-eslint": "^10.0.3", + "babel-loader": "^8.0.6", + "babel-plugin-add-module-exports": "^1.0.2", + "babel-plugin-istanbul": "^6.0.0", + "compression-webpack-plugin": "^4.0.0", + "coveralls": "^3.0.6", + "docdash": "^1.1.1", + "eslint": "^6.5.1", + "eslint-config-airbnb-base": "^13.2.0", + "eslint-plugin-ava": "^10.0.0", + "eslint-plugin-import": "^2.18.2", + "gh-pages": "^3.0.0", + "jsdoc": "^3.6.3", + "nyc": "^15.0.0", + "sinon": "^9.0.0", + "testem": "^3.0.0", + "webpack": "^4.41.0", + "webpack-bundle-analyzer": "^3.5.2", + "webpack-cli": "^3.3.9", + "yamljs": "^0.3.0" + }, + "ava": { + "require": [ + "@babel/register" + ] + }, + "bugs": { + "url": "https://github.com/lancedikson/bowser/issues" + }, + "directories": { + "test": "test" + }, + "scripts": { + "build": "webpack --config webpack.config.js", + "generate-and-deploy-docs": "npm run generate-docs && gh-pages --dist docs --dest docs", + "watch": "webpack --watch --config webpack.config.js", + "prepublishOnly": "npm run build", + "lint": "eslint ./src", + "testem": "testem", + "test": "nyc --reporter=html --reporter=text ava", + "test:watch": "ava --watch", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "generate-docs": "jsdoc -c jsdoc.json" + }, + "license": "MIT" +} diff --git a/amplify/functions/downloadDocument/node_modules/bowser/src/bowser.js b/amplify/functions/downloadDocument/node_modules/bowser/src/bowser.js new file mode 100644 index 0000000..f79e6e0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/src/bowser.js @@ -0,0 +1,77 @@ +/*! + * Bowser - a browser detector + * https://github.com/lancedikson/bowser + * MIT License | (c) Dustin Diaz 2012-2015 + * MIT License | (c) Denis Demchenko 2015-2019 + */ +import Parser from './parser.js'; +import { + BROWSER_MAP, + ENGINE_MAP, + OS_MAP, + PLATFORMS_MAP, +} from './constants.js'; + +/** + * Bowser class. + * Keep it simple as much as it can be. + * It's supposed to work with collections of {@link Parser} instances + * rather then solve one-instance problems. + * All the one-instance stuff is located in Parser class. + * + * @class + * @classdesc Bowser is a static object, that provides an API to the Parsers + * @hideconstructor + */ +class Bowser { + /** + * Creates a {@link Parser} instance + * + * @param {String} UA UserAgent string + * @param {Boolean} [skipParsing=false] Will make the Parser postpone parsing until you ask it + * explicitly. Same as `skipParsing` for {@link Parser}. + * @returns {Parser} + * @throws {Error} when UA is not a String + * + * @example + * const parser = Bowser.getParser(window.navigator.userAgent); + * const result = parser.getResult(); + */ + static getParser(UA, skipParsing = false) { + if (typeof UA !== 'string') { + throw new Error('UserAgent should be a string'); + } + return new Parser(UA, skipParsing); + } + + /** + * Creates a {@link Parser} instance and runs {@link Parser.getResult} immediately + * + * @param UA + * @return {ParsedResult} + * + * @example + * const result = Bowser.parse(window.navigator.userAgent); + */ + static parse(UA) { + return (new Parser(UA)).getResult(); + } + + static get BROWSER_MAP() { + return BROWSER_MAP; + } + + static get ENGINE_MAP() { + return ENGINE_MAP; + } + + static get OS_MAP() { + return OS_MAP; + } + + static get PLATFORMS_MAP() { + return PLATFORMS_MAP; + } +} + +export default Bowser; diff --git a/amplify/functions/downloadDocument/node_modules/bowser/src/constants.js b/amplify/functions/downloadDocument/node_modules/bowser/src/constants.js new file mode 100644 index 0000000..f335032 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/src/constants.js @@ -0,0 +1,116 @@ +// NOTE: this list must be up-to-date with browsers listed in +// test/acceptance/useragentstrings.yml +export const BROWSER_ALIASES_MAP = { + 'Amazon Silk': 'amazon_silk', + 'Android Browser': 'android', + Bada: 'bada', + BlackBerry: 'blackberry', + Chrome: 'chrome', + Chromium: 'chromium', + Electron: 'electron', + Epiphany: 'epiphany', + Firefox: 'firefox', + Focus: 'focus', + Generic: 'generic', + 'Google Search': 'google_search', + Googlebot: 'googlebot', + 'Internet Explorer': 'ie', + 'K-Meleon': 'k_meleon', + Maxthon: 'maxthon', + 'Microsoft Edge': 'edge', + 'MZ Browser': 'mz', + 'NAVER Whale Browser': 'naver', + Opera: 'opera', + 'Opera Coast': 'opera_coast', + PhantomJS: 'phantomjs', + Puffin: 'puffin', + QupZilla: 'qupzilla', + QQ: 'qq', + QQLite: 'qqlite', + Safari: 'safari', + Sailfish: 'sailfish', + 'Samsung Internet for Android': 'samsung_internet', + SeaMonkey: 'seamonkey', + Sleipnir: 'sleipnir', + Swing: 'swing', + Tizen: 'tizen', + 'UC Browser': 'uc', + Vivaldi: 'vivaldi', + 'WebOS Browser': 'webos', + WeChat: 'wechat', + 'Yandex Browser': 'yandex', + Roku: 'roku', +}; + +export const BROWSER_MAP = { + amazon_silk: 'Amazon Silk', + android: 'Android Browser', + bada: 'Bada', + blackberry: 'BlackBerry', + chrome: 'Chrome', + chromium: 'Chromium', + electron: 'Electron', + epiphany: 'Epiphany', + firefox: 'Firefox', + focus: 'Focus', + generic: 'Generic', + googlebot: 'Googlebot', + google_search: 'Google Search', + ie: 'Internet Explorer', + k_meleon: 'K-Meleon', + maxthon: 'Maxthon', + edge: 'Microsoft Edge', + mz: 'MZ Browser', + naver: 'NAVER Whale Browser', + opera: 'Opera', + opera_coast: 'Opera Coast', + phantomjs: 'PhantomJS', + puffin: 'Puffin', + qupzilla: 'QupZilla', + qq: 'QQ Browser', + qqlite: 'QQ Browser Lite', + safari: 'Safari', + sailfish: 'Sailfish', + samsung_internet: 'Samsung Internet for Android', + seamonkey: 'SeaMonkey', + sleipnir: 'Sleipnir', + swing: 'Swing', + tizen: 'Tizen', + uc: 'UC Browser', + vivaldi: 'Vivaldi', + webos: 'WebOS Browser', + wechat: 'WeChat', + yandex: 'Yandex Browser', +}; + +export const PLATFORMS_MAP = { + tablet: 'tablet', + mobile: 'mobile', + desktop: 'desktop', + tv: 'tv', +}; + +export const OS_MAP = { + WindowsPhone: 'Windows Phone', + Windows: 'Windows', + MacOS: 'macOS', + iOS: 'iOS', + Android: 'Android', + WebOS: 'WebOS', + BlackBerry: 'BlackBerry', + Bada: 'Bada', + Tizen: 'Tizen', + Linux: 'Linux', + ChromeOS: 'Chrome OS', + PlayStation4: 'PlayStation 4', + Roku: 'Roku', +}; + +export const ENGINE_MAP = { + EdgeHTML: 'EdgeHTML', + Blink: 'Blink', + Trident: 'Trident', + Presto: 'Presto', + Gecko: 'Gecko', + WebKit: 'WebKit', +}; diff --git a/amplify/functions/downloadDocument/node_modules/bowser/src/parser-browsers.js b/amplify/functions/downloadDocument/node_modules/bowser/src/parser-browsers.js new file mode 100644 index 0000000..ee7840c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/src/parser-browsers.js @@ -0,0 +1,700 @@ +/** + * Browsers' descriptors + * + * The idea of descriptors is simple. You should know about them two simple things: + * 1. Every descriptor has a method or property called `test` and a `describe` method. + * 2. Order of descriptors is important. + * + * More details: + * 1. Method or property `test` serves as a way to detect whether the UA string + * matches some certain browser or not. The `describe` method helps to make a result + * object with params that show some browser-specific things: name, version, etc. + * 2. Order of descriptors is important because a Parser goes through them one by one + * in course. For example, if you insert Chrome's descriptor as the first one, + * more then a half of browsers will be described as Chrome, because they will pass + * the Chrome descriptor's test. + * + * Descriptor's `test` could be a property with an array of RegExps, where every RegExp + * will be applied to a UA string to test it whether it matches or not. + * If a descriptor has two or more regexps in the `test` array it tests them one by one + * with a logical sum operation. Parser stops if it has found any RegExp that matches the UA. + * + * Or `test` could be a method. In that case it gets a Parser instance and should + * return true/false to get the Parser know if this browser descriptor matches the UA or not. + */ + +import Utils from './utils.js'; + +const commonVersionIdentifier = /version\/(\d+(\.?_?\d+)+)/i; + +const browsersList = [ + /* Googlebot */ + { + test: [/googlebot/i], + describe(ua) { + const browser = { + name: 'Googlebot', + }; + const version = Utils.getFirstMatch(/googlebot\/(\d+(\.\d+))/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera < 13.0 */ + { + test: [/opera/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera > 13.0 */ + { + test: [/opr\/|opios/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/SamsungBrowser/i], + describe(ua) { + const browser = { + name: 'Samsung Internet for Android', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Whale/i], + describe(ua) { + const browser = { + name: 'NAVER Whale Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MZBrowser/i], + describe(ua) { + const browser = { + name: 'MZ Browser', + }; + const version = Utils.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/focus/i], + describe(ua) { + const browser = { + name: 'Focus', + }; + const version = Utils.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/swing/i], + describe(ua) { + const browser = { + name: 'Swing', + }; + const version = Utils.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/coast/i], + describe(ua) { + const browser = { + name: 'Opera Coast', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/opt\/\d+(?:.?_?\d+)+/i], + describe(ua) { + const browser = { + name: 'Opera Touch', + }; + const version = Utils.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/yabrowser/i], + describe(ua) { + const browser = { + name: 'Yandex Browser', + }; + const version = Utils.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/ucbrowser/i], + describe(ua) { + const browser = { + name: 'UC Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Maxthon|mxios/i], + describe(ua) { + const browser = { + name: 'Maxthon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/epiphany/i], + describe(ua) { + const browser = { + name: 'Epiphany', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/puffin/i], + describe(ua) { + const browser = { + name: 'Puffin', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sleipnir/i], + describe(ua) { + const browser = { + name: 'Sleipnir', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/k-meleon/i], + describe(ua) { + const browser = { + name: 'K-Meleon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/micromessenger/i], + describe(ua) { + const browser = { + name: 'WeChat', + }; + const version = Utils.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qqbrowser/i], + describe(ua) { + const browser = { + name: (/qqbrowserlite/i).test(ua) ? 'QQ Browser Lite' : 'QQ Browser', + }; + const version = Utils.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/msie|trident/i], + describe(ua) { + const browser = { + name: 'Internet Explorer', + }; + const version = Utils.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/\sedg\//i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/edg([ea]|ios)/i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/vivaldi/i], + describe(ua) { + const browser = { + name: 'Vivaldi', + }; + const version = Utils.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/seamonkey/i], + describe(ua) { + const browser = { + name: 'SeaMonkey', + }; + const version = Utils.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sailfish/i], + describe(ua) { + const browser = { + name: 'Sailfish', + }; + + const version = Utils.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/silk/i], + describe(ua) { + const browser = { + name: 'Amazon Silk', + }; + const version = Utils.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/phantom/i], + describe(ua) { + const browser = { + name: 'PhantomJS', + }; + const version = Utils.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/slimerjs/i], + describe(ua) { + const browser = { + name: 'SlimerJS', + }; + const version = Utils.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const browser = { + name: 'BlackBerry', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const browser = { + name: 'WebOS Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/bada/i], + describe(ua) { + const browser = { + name: 'Bada', + }; + const version = Utils.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/tizen/i], + describe(ua) { + const browser = { + name: 'Tizen', + }; + const version = Utils.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qupzilla/i], + describe(ua) { + const browser = { + name: 'QupZilla', + }; + const version = Utils.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/firefox|iceweasel|fxios/i], + describe(ua) { + const browser = { + name: 'Firefox', + }; + const version = Utils.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/electron/i], + describe(ua) { + const browser = { + name: 'Electron', + }; + const version = Utils.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MiuiBrowser/i], + describe(ua) { + const browser = { + name: 'Miui', + }; + const version = Utils.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chromium/i], + describe(ua) { + const browser = { + name: 'Chromium', + }; + const version = Utils.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chrome|crios|crmo/i], + describe(ua) { + const browser = { + name: 'Chrome', + }; + const version = Utils.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/GSA/i], + describe(ua) { + const browser = { + name: 'Google Search', + }; + const version = Utils.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Android Browser */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const browser = { + name: 'Android Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* PlayStation 4 */ + { + test: [/playstation 4/i], + describe(ua) { + const browser = { + name: 'PlayStation 4', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Safari */ + { + test: [/safari|applewebkit/i], + describe(ua) { + const browser = { + name: 'Safari', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Something else */ + { + test: [/.*/i], + describe(ua) { + /* Here we try to make sure that there are explicit details about the device + * in order to decide what regexp exactly we want to apply + * (as there is a specific decision based on that conclusion) + */ + const regexpWithoutDeviceSpec = /^(.*)\/(.*) /; + const regexpWithDeviceSpec = /^(.*)\/(.*)[ \t]\((.*)/; + const hasDeviceSpec = ua.search('\\(') !== -1; + const regexp = hasDeviceSpec ? regexpWithDeviceSpec : regexpWithoutDeviceSpec; + return { + name: Utils.getFirstMatch(regexp, ua), + version: Utils.getSecondMatch(regexp, ua), + }; + }, + }, +]; + +export default browsersList; diff --git a/amplify/functions/downloadDocument/node_modules/bowser/src/parser-engines.js b/amplify/functions/downloadDocument/node_modules/bowser/src/parser-engines.js new file mode 100644 index 0000000..d46d0e5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/src/parser-engines.js @@ -0,0 +1,120 @@ +import Utils from './utils.js'; +import { ENGINE_MAP } from './constants.js'; + +/* + * More specific goes first + */ +export default [ + /* EdgeHTML */ + { + test(parser) { + return parser.getBrowserName(true) === 'microsoft edge'; + }, + describe(ua) { + const isBlinkBased = /\sedg\//i.test(ua); + + // return blink if it's blink-based one + if (isBlinkBased) { + return { + name: ENGINE_MAP.Blink, + }; + } + + // otherwise match the version and return EdgeHTML + const version = Utils.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i, ua); + + return { + name: ENGINE_MAP.EdgeHTML, + version, + }; + }, + }, + + /* Trident */ + { + test: [/trident/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.Trident, + }; + + const version = Utils.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Presto */ + { + test(parser) { + return parser.test(/presto/i); + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Presto, + }; + + const version = Utils.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Gecko */ + { + test(parser) { + const isGecko = parser.test(/gecko/i); + const likeGecko = parser.test(/like gecko/i); + return isGecko && !likeGecko; + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Gecko, + }; + + const version = Utils.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Blink */ + { + test: [/(apple)?webkit\/537\.36/i], + describe() { + return { + name: ENGINE_MAP.Blink, + }; + }, + }, + + /* WebKit */ + { + test: [/(apple)?webkit/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.WebKit, + }; + + const version = Utils.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, +]; diff --git a/amplify/functions/downloadDocument/node_modules/bowser/src/parser-os.js b/amplify/functions/downloadDocument/node_modules/bowser/src/parser-os.js new file mode 100644 index 0000000..4c516dd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/src/parser-os.js @@ -0,0 +1,199 @@ +import Utils from './utils.js'; +import { OS_MAP } from './constants.js'; + +export default [ + /* Roku */ + { + test: [/Roku\/DVP/], + describe(ua) { + const version = Utils.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i, ua); + return { + name: OS_MAP.Roku, + version, + }; + }, + }, + + /* Windows Phone */ + { + test: [/windows phone/i], + describe(ua) { + const version = Utils.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.WindowsPhone, + version, + }; + }, + }, + + /* Windows */ + { + test: [/windows /i], + describe(ua) { + const version = Utils.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i, ua); + const versionName = Utils.getWindowsVersionName(version); + + return { + name: OS_MAP.Windows, + version, + versionName, + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe(ua) { + const result = { + name: OS_MAP.iOS, + }; + const version = Utils.getSecondMatch(/(Version\/)(\d[\d.]+)/, ua); + if (version) { + result.version = version; + } + return result; + }, + }, + + /* macOS */ + { + test: [/macintosh/i], + describe(ua) { + const version = Utils.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i, ua).replace(/[_\s]/g, '.'); + const versionName = Utils.getMacOSVersionName(version); + + const os = { + name: OS_MAP.MacOS, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* iOS */ + { + test: [/(ipod|iphone|ipad)/i], + describe(ua) { + const version = Utils.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i, ua).replace(/[_\s]/g, '.'); + + return { + name: OS_MAP.iOS, + version, + }; + }, + }, + + /* Android */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const version = Utils.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i, ua); + const versionName = Utils.getAndroidVersionName(version); + const os = { + name: OS_MAP.Android, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* WebOS */ + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const version = Utils.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i, ua); + const os = { + name: OS_MAP.WebOS, + }; + + if (version && version.length) { + os.version = version; + } + return os; + }, + }, + + /* BlackBerry */ + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const version = Utils.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i, ua) + || Utils.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i, ua) + || Utils.getFirstMatch(/\bbb(\d+)/i, ua); + + return { + name: OS_MAP.BlackBerry, + version, + }; + }, + }, + + /* Bada */ + { + test: [/bada/i], + describe(ua) { + const version = Utils.getFirstMatch(/bada\/(\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Bada, + version, + }; + }, + }, + + /* Tizen */ + { + test: [/tizen/i], + describe(ua) { + const version = Utils.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Tizen, + version, + }; + }, + }, + + /* Linux */ + { + test: [/linux/i], + describe() { + return { + name: OS_MAP.Linux, + }; + }, + }, + + /* Chrome OS */ + { + test: [/CrOS/], + describe() { + return { + name: OS_MAP.ChromeOS, + }; + }, + }, + + /* Playstation 4 */ + { + test: [/PlayStation 4/], + describe(ua) { + const version = Utils.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.PlayStation4, + version, + }; + }, + }, +]; diff --git a/amplify/functions/downloadDocument/node_modules/bowser/src/parser-platforms.js b/amplify/functions/downloadDocument/node_modules/bowser/src/parser-platforms.js new file mode 100644 index 0000000..48b1eb1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/src/parser-platforms.js @@ -0,0 +1,266 @@ +import Utils from './utils.js'; +import { PLATFORMS_MAP } from './constants.js'; + +/* + * Tablets go first since usually they have more specific + * signs to detect. + */ + +export default [ + /* Googlebot */ + { + test: [/googlebot/i], + describe() { + return { + type: 'bot', + vendor: 'Google', + }; + }, + }, + + /* Huawei */ + { + test: [/huawei/i], + describe(ua) { + const model = Utils.getFirstMatch(/(can-l01)/i, ua) && 'Nova'; + const platform = { + type: PLATFORMS_MAP.mobile, + vendor: 'Huawei', + }; + if (model) { + platform.model = model; + } + return platform; + }, + }, + + /* Nexus Tablet */ + { + test: [/nexus\s*(?:7|8|9|10).*/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Nexus', + }; + }, + }, + + /* iPad */ + { + test: [/ipad/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Amazon Kindle Fire */ + { + test: [/kftt build/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + model: 'Kindle Fire HD 7', + }; + }, + }, + + /* Another Amazon Tablet with Silk */ + { + test: [/silk/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + }; + }, + }, + + /* Tablet */ + { + test: [/tablet(?! pc)/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* iPod/iPhone */ + { + test(parser) { + const iDevice = parser.test(/ipod|iphone/i); + const likeIDevice = parser.test(/like (ipod|iphone)/i); + return iDevice && !likeIDevice; + }, + describe(ua) { + const model = Utils.getFirstMatch(/(ipod|iphone)/i, ua); + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Apple', + model, + }; + }, + }, + + /* Nexus Mobile */ + { + test: [/nexus\s*[0-6].*/i, /galaxy nexus/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Nexus', + }; + }, + }, + + /* Mobile */ + { + test: [/[^-]mobi/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* BlackBerry */ + { + test(parser) { + return parser.getBrowserName(true) === 'blackberry'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'BlackBerry', + }; + }, + }, + + /* Bada */ + { + test(parser) { + return parser.getBrowserName(true) === 'bada'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* Windows Phone */ + { + test(parser) { + return parser.getBrowserName() === 'windows phone'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Microsoft', + }; + }, + }, + + /* Android Tablet */ + { + test(parser) { + const osMajorVersion = Number(String(parser.getOSVersion()).split('.')[0]); + return parser.getOSName(true) === 'android' && (osMajorVersion >= 3); + }, + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* Android Mobile */ + { + test(parser) { + return parser.getOSName(true) === 'android'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* desktop */ + { + test(parser) { + return parser.getOSName(true) === 'macos'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + vendor: 'Apple', + }; + }, + }, + + /* Windows */ + { + test(parser) { + return parser.getOSName(true) === 'windows'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* Linux */ + { + test(parser) { + return parser.getOSName(true) === 'linux'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* PlayStation 4 */ + { + test(parser) { + return parser.getOSName(true) === 'playstation 4'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, + + /* Roku */ + { + test(parser) { + return parser.getOSName(true) === 'roku'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, +]; diff --git a/amplify/functions/downloadDocument/node_modules/bowser/src/parser.js b/amplify/functions/downloadDocument/node_modules/bowser/src/parser.js new file mode 100644 index 0000000..2f9f39f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/src/parser.js @@ -0,0 +1,496 @@ +import browserParsersList from './parser-browsers.js'; +import osParsersList from './parser-os.js'; +import platformParsersList from './parser-platforms.js'; +import enginesParsersList from './parser-engines.js'; +import Utils from './utils.js'; + +/** + * The main class that arranges the whole parsing process. + */ +class Parser { + /** + * Create instance of Parser + * + * @param {String} UA User-Agent string + * @param {Boolean} [skipParsing=false] parser can skip parsing in purpose of performance + * improvements if you need to make a more particular parsing + * like {@link Parser#parseBrowser} or {@link Parser#parsePlatform} + * + * @throw {Error} in case of empty UA String + * + * @constructor + */ + constructor(UA, skipParsing = false) { + if (UA === void (0) || UA === null || UA === '') { + throw new Error("UserAgent parameter can't be empty"); + } + + this._ua = UA; + + /** + * @typedef ParsedResult + * @property {Object} browser + * @property {String|undefined} [browser.name] + * Browser name, like `"Chrome"` or `"Internet Explorer"` + * @property {String|undefined} [browser.version] Browser version as a String `"12.01.45334.10"` + * @property {Object} os + * @property {String|undefined} [os.name] OS name, like `"Windows"` or `"macOS"` + * @property {String|undefined} [os.version] OS version, like `"NT 5.1"` or `"10.11.1"` + * @property {String|undefined} [os.versionName] OS name, like `"XP"` or `"High Sierra"` + * @property {Object} platform + * @property {String|undefined} [platform.type] + * platform type, can be either `"desktop"`, `"tablet"` or `"mobile"` + * @property {String|undefined} [platform.vendor] Vendor of the device, + * like `"Apple"` or `"Samsung"` + * @property {String|undefined} [platform.model] Device model, + * like `"iPhone"` or `"Kindle Fire HD 7"` + * @property {Object} engine + * @property {String|undefined} [engine.name] + * Can be any of this: `WebKit`, `Blink`, `Gecko`, `Trident`, `Presto`, `EdgeHTML` + * @property {String|undefined} [engine.version] String version of the engine + */ + this.parsedResult = {}; + + if (skipParsing !== true) { + this.parse(); + } + } + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + * + * @public + */ + getUA() { + return this._ua; + } + + /** + * Test a UA string for a regexp + * @param {RegExp} regex + * @return {Boolean} + */ + test(regex) { + return regex.test(this._ua); + } + + /** + * Get parsed browser object + * @return {Object} + */ + parseBrowser() { + this.parsedResult.browser = {}; + + const browserDescriptor = Utils.find(browserParsersList, (_browser) => { + if (typeof _browser.test === 'function') { + return _browser.test(this); + } + + if (_browser.test instanceof Array) { + return _browser.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (browserDescriptor) { + this.parsedResult.browser = browserDescriptor.describe(this.getUA()); + } + + return this.parsedResult.browser; + } + + /** + * Get parsed browser object + * @return {Object} + * + * @public + */ + getBrowser() { + if (this.parsedResult.browser) { + return this.parsedResult.browser; + } + + return this.parseBrowser(); + } + + /** + * Get browser's name + * @return {String} Browser's name or an empty string + * + * @public + */ + getBrowserName(toLowerCase) { + if (toLowerCase) { + return String(this.getBrowser().name).toLowerCase() || ''; + } + return this.getBrowser().name || ''; + } + + + /** + * Get browser's version + * @return {String} version of browser + * + * @public + */ + getBrowserVersion() { + return this.getBrowser().version; + } + + /** + * Get OS + * @return {Object} + * + * @example + * this.getOS(); + * { + * name: 'macOS', + * version: '10.11.12' + * } + */ + getOS() { + if (this.parsedResult.os) { + return this.parsedResult.os; + } + + return this.parseOS(); + } + + /** + * Parse OS and save it to this.parsedResult.os + * @return {*|{}} + */ + parseOS() { + this.parsedResult.os = {}; + + const os = Utils.find(osParsersList, (_os) => { + if (typeof _os.test === 'function') { + return _os.test(this); + } + + if (_os.test instanceof Array) { + return _os.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (os) { + this.parsedResult.os = os.describe(this.getUA()); + } + + return this.parsedResult.os; + } + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + getOSName(toLowerCase) { + const { name } = this.getOS(); + + if (toLowerCase) { + return String(name).toLowerCase() || ''; + } + + return name || ''; + } + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + getOSVersion() { + return this.getOS().version; + } + + /** + * Get parsed platform + * @return {{}} + */ + getPlatform() { + if (this.parsedResult.platform) { + return this.parsedResult.platform; + } + + return this.parsePlatform(); + } + + /** + * Get platform name + * @param {Boolean} [toLowerCase=false] + * @return {*} + */ + getPlatformType(toLowerCase = false) { + const { type } = this.getPlatform(); + + if (toLowerCase) { + return String(type).toLowerCase() || ''; + } + + return type || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parsePlatform() { + this.parsedResult.platform = {}; + + const platform = Utils.find(platformParsersList, (_platform) => { + if (typeof _platform.test === 'function') { + return _platform.test(this); + } + + if (_platform.test instanceof Array) { + return _platform.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (platform) { + this.parsedResult.platform = platform.describe(this.getUA()); + } + + return this.parsedResult.platform; + } + + /** + * Get parsed engine + * @return {{}} + */ + getEngine() { + if (this.parsedResult.engine) { + return this.parsedResult.engine; + } + + return this.parseEngine(); + } + + /** + * Get engines's name + * @return {String} Engines's name or an empty string + * + * @public + */ + getEngineName(toLowerCase) { + if (toLowerCase) { + return String(this.getEngine().name).toLowerCase() || ''; + } + return this.getEngine().name || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parseEngine() { + this.parsedResult.engine = {}; + + const engine = Utils.find(enginesParsersList, (_engine) => { + if (typeof _engine.test === 'function') { + return _engine.test(this); + } + + if (_engine.test instanceof Array) { + return _engine.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (engine) { + this.parsedResult.engine = engine.describe(this.getUA()); + } + + return this.parsedResult.engine; + } + + /** + * Parse full information about the browser + * @returns {Parser} + */ + parse() { + this.parseBrowser(); + this.parseOS(); + this.parsePlatform(); + this.parseEngine(); + + return this; + } + + /** + * Get parsed result + * @return {ParsedResult} + */ + getResult() { + return Utils.assign({}, this.parsedResult); + } + + /** + * Check if parsed browser matches certain conditions + * + * @param {Object} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = Bowser.getParser(window.navigator.userAgent); + * if (browser.satisfies({chrome: '>118.01.1322' })) + * // or with os + * if (browser.satisfies({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.satisfies({desktop: { chrome: '>118.01.1322' } })) + */ + satisfies(checkTree) { + const platformsAndOSes = {}; + let platformsAndOSCounter = 0; + const browsers = {}; + let browsersCounter = 0; + + const allDefinitions = Object.keys(checkTree); + + allDefinitions.forEach((key) => { + const currentDefinition = checkTree[key]; + if (typeof currentDefinition === 'string') { + browsers[key] = currentDefinition; + browsersCounter += 1; + } else if (typeof currentDefinition === 'object') { + platformsAndOSes[key] = currentDefinition; + platformsAndOSCounter += 1; + } + }); + + if (platformsAndOSCounter > 0) { + const platformsAndOSNames = Object.keys(platformsAndOSes); + const OSMatchingDefinition = Utils.find(platformsAndOSNames, name => (this.isOS(name))); + + if (OSMatchingDefinition) { + const osResult = this.satisfies(platformsAndOSes[OSMatchingDefinition]); + + if (osResult !== void 0) { + return osResult; + } + } + + const platformMatchingDefinition = Utils.find( + platformsAndOSNames, + name => (this.isPlatform(name)), + ); + if (platformMatchingDefinition) { + const platformResult = this.satisfies(platformsAndOSes[platformMatchingDefinition]); + + if (platformResult !== void 0) { + return platformResult; + } + } + } + + if (browsersCounter > 0) { + const browserNames = Object.keys(browsers); + const matchingDefinition = Utils.find(browserNames, name => (this.isBrowser(name, true))); + + if (matchingDefinition !== void 0) { + return this.compareVersion(browsers[matchingDefinition]); + } + } + + return undefined; + } + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + isBrowser(browserName, includingAlias = false) { + const defaultBrowserName = this.getBrowserName().toLowerCase(); + let browserNameLower = browserName.toLowerCase(); + const alias = Utils.getBrowserTypeByAlias(browserNameLower); + + if (includingAlias && alias) { + browserNameLower = alias.toLowerCase(); + } + return browserNameLower === defaultBrowserName; + } + + compareVersion(version) { + let expectedResults = [0]; + let comparableVersion = version; + let isLoose = false; + + const currentBrowserVersion = this.getBrowserVersion(); + + if (typeof currentBrowserVersion !== 'string') { + return void 0; + } + + if (version[0] === '>' || version[0] === '<') { + comparableVersion = version.substr(1); + if (version[1] === '=') { + isLoose = true; + comparableVersion = version.substr(2); + } else { + expectedResults = []; + } + if (version[0] === '>') { + expectedResults.push(1); + } else { + expectedResults.push(-1); + } + } else if (version[0] === '=') { + comparableVersion = version.substr(1); + } else if (version[0] === '~') { + isLoose = true; + comparableVersion = version.substr(1); + } + + return expectedResults.indexOf( + Utils.compareVersions(currentBrowserVersion, comparableVersion, isLoose), + ) > -1; + } + + isOS(osName) { + return this.getOSName(true) === String(osName).toLowerCase(); + } + + isPlatform(platformType) { + return this.getPlatformType(true) === String(platformType).toLowerCase(); + } + + isEngine(engineName) { + return this.getEngineName(true) === String(engineName).toLowerCase(); + } + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {Boolean} + */ + is(anything, includingAlias = false) { + return this.isBrowser(anything, includingAlias) || this.isOS(anything) + || this.isPlatform(anything); + } + + /** + * Check if any of the given values satisfies this.is(anything) + * @param {String[]} anythings + * @returns {Boolean} + */ + some(anythings = []) { + return anythings.some(anything => this.is(anything)); + } +} + +export default Parser; diff --git a/amplify/functions/downloadDocument/node_modules/bowser/src/utils.js b/amplify/functions/downloadDocument/node_modules/bowser/src/utils.js new file mode 100644 index 0000000..d1174bf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/bowser/src/utils.js @@ -0,0 +1,309 @@ +import { BROWSER_MAP, BROWSER_ALIASES_MAP } from './constants.js'; + +export default class Utils { + /** + * Get first matched item for a string + * @param {RegExp} regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getFirstMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 0 && match[1]) || ''; + } + + /** + * Get second matched item for a string + * @param regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getSecondMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 1 && match[2]) || ''; + } + + /** + * Match a regexp and return a constant or undefined + * @param {RegExp} regexp + * @param {String} ua + * @param {*} _const Any const that will be returned if regexp matches the string + * @return {*} + */ + static matchAndReturnConst(regexp, ua, _const) { + if (regexp.test(ua)) { + return _const; + } + return void (0); + } + + static getWindowsVersionName(version) { + switch (version) { + case 'NT': return 'NT'; + case 'XP': return 'XP'; + case 'NT 5.0': return '2000'; + case 'NT 5.1': return 'XP'; + case 'NT 5.2': return '2003'; + case 'NT 6.0': return 'Vista'; + case 'NT 6.1': return '7'; + case 'NT 6.2': return '8'; + case 'NT 6.3': return '8.1'; + case 'NT 10.0': return '10'; + default: return undefined; + } + } + + /** + * Get macOS version name + * 10.5 - Leopard + * 10.6 - Snow Leopard + * 10.7 - Lion + * 10.8 - Mountain Lion + * 10.9 - Mavericks + * 10.10 - Yosemite + * 10.11 - El Capitan + * 10.12 - Sierra + * 10.13 - High Sierra + * 10.14 - Mojave + * 10.15 - Catalina + * + * @example + * getMacOSVersionName("10.14") // 'Mojave' + * + * @param {string} version + * @return {string} versionName + */ + static getMacOSVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] !== 10) return undefined; + switch (v[1]) { + case 5: return 'Leopard'; + case 6: return 'Snow Leopard'; + case 7: return 'Lion'; + case 8: return 'Mountain Lion'; + case 9: return 'Mavericks'; + case 10: return 'Yosemite'; + case 11: return 'El Capitan'; + case 12: return 'Sierra'; + case 13: return 'High Sierra'; + case 14: return 'Mojave'; + case 15: return 'Catalina'; + default: return undefined; + } + } + + /** + * Get Android version name + * 1.5 - Cupcake + * 1.6 - Donut + * 2.0 - Eclair + * 2.1 - Eclair + * 2.2 - Froyo + * 2.x - Gingerbread + * 3.x - Honeycomb + * 4.0 - Ice Cream Sandwich + * 4.1 - Jelly Bean + * 4.4 - KitKat + * 5.x - Lollipop + * 6.x - Marshmallow + * 7.x - Nougat + * 8.x - Oreo + * 9.x - Pie + * + * @example + * getAndroidVersionName("7.0") // 'Nougat' + * + * @param {string} version + * @return {string} versionName + */ + static getAndroidVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] === 1 && v[1] < 5) return undefined; + if (v[0] === 1 && v[1] < 6) return 'Cupcake'; + if (v[0] === 1 && v[1] >= 6) return 'Donut'; + if (v[0] === 2 && v[1] < 2) return 'Eclair'; + if (v[0] === 2 && v[1] === 2) return 'Froyo'; + if (v[0] === 2 && v[1] > 2) return 'Gingerbread'; + if (v[0] === 3) return 'Honeycomb'; + if (v[0] === 4 && v[1] < 1) return 'Ice Cream Sandwich'; + if (v[0] === 4 && v[1] < 4) return 'Jelly Bean'; + if (v[0] === 4 && v[1] >= 4) return 'KitKat'; + if (v[0] === 5) return 'Lollipop'; + if (v[0] === 6) return 'Marshmallow'; + if (v[0] === 7) return 'Nougat'; + if (v[0] === 8) return 'Oreo'; + if (v[0] === 9) return 'Pie'; + return undefined; + } + + /** + * Get version precisions count + * + * @example + * getVersionPrecision("1.10.3") // 3 + * + * @param {string} version + * @return {number} + */ + static getVersionPrecision(version) { + return version.split('.').length; + } + + /** + * Calculate browser version weight + * + * @example + * compareVersions('1.10.2.1', '1.8.2.1.90') // 1 + * compareVersions('1.010.2.1', '1.09.2.1.90'); // 1 + * compareVersions('1.10.2.1', '1.10.2.1'); // 0 + * compareVersions('1.10.2.1', '1.0800.2'); // -1 + * compareVersions('1.10.2.1', '1.10', true); // 0 + * + * @param {String} versionA versions versions to compare + * @param {String} versionB versions versions to compare + * @param {boolean} [isLoose] enable loose comparison + * @return {Number} comparison result: -1 when versionA is lower, + * 1 when versionA is bigger, 0 when both equal + */ + /* eslint consistent-return: 1 */ + static compareVersions(versionA, versionB, isLoose = false) { + // 1) get common precision for both versions, for example for "10.0" and "9" it should be 2 + const versionAPrecision = Utils.getVersionPrecision(versionA); + const versionBPrecision = Utils.getVersionPrecision(versionB); + + let precision = Math.max(versionAPrecision, versionBPrecision); + let lastPrecision = 0; + + const chunks = Utils.map([versionA, versionB], (version) => { + const delta = precision - Utils.getVersionPrecision(version); + + // 2) "9" -> "9.0" (for precision = 2) + const _version = version + new Array(delta + 1).join('.0'); + + // 3) "9.0" -> ["000000000"", "000000009"] + return Utils.map(_version.split('.'), chunk => new Array(20 - chunk.length).join('0') + chunk).reverse(); + }); + + // adjust precision for loose comparison + if (isLoose) { + lastPrecision = precision - Math.min(versionAPrecision, versionBPrecision); + } + + // iterate in reverse order by reversed chunks array + precision -= 1; + while (precision >= lastPrecision) { + // 4) compare: "000000009" > "000000010" = false (but "9" > "10" = true) + if (chunks[0][precision] > chunks[1][precision]) { + return 1; + } + + if (chunks[0][precision] === chunks[1][precision]) { + if (precision === lastPrecision) { + // all version chunks are same + return 0; + } + + precision -= 1; + } else if (chunks[0][precision] < chunks[1][precision]) { + return -1; + } + } + + return undefined; + } + + /** + * Array::map polyfill + * + * @param {Array} arr + * @param {Function} iterator + * @return {Array} + */ + static map(arr, iterator) { + const result = []; + let i; + if (Array.prototype.map) { + return Array.prototype.map.call(arr, iterator); + } + for (i = 0; i < arr.length; i += 1) { + result.push(iterator(arr[i])); + } + return result; + } + + /** + * Array::find polyfill + * + * @param {Array} arr + * @param {Function} predicate + * @return {Array} + */ + static find(arr, predicate) { + let i; + let l; + if (Array.prototype.find) { + return Array.prototype.find.call(arr, predicate); + } + for (i = 0, l = arr.length; i < l; i += 1) { + const value = arr[i]; + if (predicate(value, i)) { + return value; + } + } + return undefined; + } + + /** + * Object::assign polyfill + * + * @param {Object} obj + * @param {Object} ...objs + * @return {Object} + */ + static assign(obj, ...assigners) { + const result = obj; + let i; + let l; + if (Object.assign) { + return Object.assign(obj, ...assigners); + } + for (i = 0, l = assigners.length; i < l; i += 1) { + const assigner = assigners[i]; + if (typeof assigner === 'object' && assigner !== null) { + const keys = Object.keys(assigner); + keys.forEach((key) => { + result[key] = assigner[key]; + }); + } + } + return obj; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('Microsoft Edge') // edge + * + * @param {string} browserName + * @return {string} + */ + static getBrowserAlias(browserName) { + return BROWSER_ALIASES_MAP[browserName]; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('edge') // Microsoft Edge + * + * @param {string} browserAlias + * @return {string} + */ + static getBrowserTypeByAlias(browserAlias) { + return BROWSER_MAP[browserAlias] || ''; + } +} diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/CHANGELOG.md new file mode 100644 index 0000000..021eab6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/CHANGELOG.md @@ -0,0 +1,594 @@ +Note: If you find missing information about particular minor version, that version must have been changed without any functional change in this library. + +**4.4.1 / 2024-07-28** +- v5 fix: maximum length limit to currency value +- fix #634: build attributes with oneListGroup and attributesGroupName (#653)(By [Andreas Naziris](https://github.com/a-rasin)) +- fix: get oneListGroup to work as expected for array of strings (#662)(By [Andreas Naziris](https://github.com/a-rasin)) + +**4.4.0 / 2024-05-18** +- fix #654: parse attribute list correctly for self closing stop node. +- fix: validator bug when closing tag is not opened. (#647) (By [Ryosuke Fukatani](https://github.com/RyosukeFukatani)) +- fix #581: typings; return type of `tagValueProcessor` & `attributeValueProcessor` (#582) (By [monholm]()) + +**4.3.6 / 2024-03-16** +- Add support for parsing HTML numeric entities (#645) (By [Jonas Schade ](https://github.com/DerZade)) + +**4.3.5 / 2024-02-24** +- code for v5 is added for experimental use + +**4.3.4 / 2024-01-10** +- fix: Don't escape entities in CDATA sections (#633) (By [wackbyte](https://github.com/wackbyte)) + +**4.3.3 / 2024-01-10** +- Remove unnecessary regex + +**4.3.2 / 2023-10-02** +- fix `jObj.hasOwnProperty` when give input is null (By [Arda TANRIKULU](https://github.com/ardatan)) + +**4.3.1 / 2023-09-24** +- revert back "Fix typings for builder and parser to make return type generic" to avoid failure of existing projects. Need to decide a common approach. + +**4.3.0 / 2023-09-20** +- Fix stopNodes to work with removeNSPrefix (#607) (#608) (By [Craig Andrews]https://github.com/candrews)) +- Fix #610 ignore properties set to Object.prototype +- Fix typings for builder and parser to make return type generic (By [Sarah Dayan](https://github.com/sarahdayan)) + +**4.2.7 / 2023-07-30** +- Fix: builder should set text node correctly when only textnode is present (#589) (By [qianqing](https://github.com/joneqian)) +- Fix: Fix for null and undefined attributes when building xml (#585) (#598). A null or undefined value should be ignored. (By [Eugenio Ceschia](https://github.com/cecia234)) + +**4.2.6 / 2023-07-17** +- Fix: Remove trailing slash from jPath for self-closing tags (#595) (By [Maciej Radzikowski](https://github.com/m-radzikowski)) + +**4.2.5 / 2023-06-22** +- change code implementation + +**4.2.4 / 2023-06-06** +- fix security bug + +**4.2.3 / 2023-06-05** +- fix security bug + +**4.2.2 / 2023-04-18** +- fix #562: fix unpaired tag when it comes in last of a nested tag. Also throw error when unpaired tag is used as closing tag + +**4.2.1 / 2023-04-18** +- fix: jpath after unpaired tags + +**4.2.0 / 2023-04-09** +- support `updateTag` parser property + +**4.1.4 / 2023-04-08** +- update typings to let user create XMLBuilder instance without options (#556) (By [Patrick](https://github.com/omggga)) +- fix: IsArray option isn't parsing tags with 0 as value correctly #490 (#557) (By [Aleksandr Murashkin](https://github.com/p-kuen)) +- feature: support `oneListGroup` to group repeated children tags udder single group + +**4.1.3 / 2023-02-26** +- fix #546: Support complex entity value + +**4.1.2 / 2023-02-12** +- Security Fix + +**4.1.1 / 2023-02-03** +- Fix #540: ignoreAttributes breaks unpairedTags +- Refactor XML builder code + +**4.1.0 / 2023-02-02** +- Fix '<' or '>' in DTD comment throwing an error. (#533) (By [Adam Baker](https://github.com/Cwazywierdo)) +- Set "eNotation" to 'true' as default + +**4.0.15 / 2023-01-25** +- make "eNotation" optional + +**4.0.14 / 2023-01-22** +- fixed: add missed typing "eNotation" to parse values + +**4.0.13 / 2023-01-07** +- preserveorder formatting (By [mdeknowis](https://github.com/mdeknowis)) +- support `transformAttributeName` (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.12 / 2022-11-19** +- fix typescript + +**4.0.11 / 2022-10-05** +- fix #501: parse for entities only once + +**4.0.10 / 2022-09-14** +- fix broken links in demo site (By [Yannick Lang](https://github.com/layaxx)) +- fix #491: tagValueProcessor type definition (By [Andrea Francesco Speziale](https://github.com/andreafspeziale)) +- Add jsdocs for tagValueProcessor + + +**4.0.9 / 2022-07-10** +- fix #470: stop-tag can have self-closing tag with same name +- fix #472: stopNode can have any special tag inside +- Allow !ATTLIST and !NOTATION with DOCTYPE +- Add transformTagName option to transform tag names when parsing (#469) (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.8 / 2022-05-28** +- Fix CDATA parsing returning empty string when value = 0 (#451) (By [ndelanou](https://github.com/ndelanou)) +- Fix stopNodes when same tag appears inside node (#456) (By [patrickshipe](https://github.com/patrickshipe)) +- fix #468: prettify own properties only + +**4.0.7 / 2022-03-18** +- support CDATA even if tag order is not preserved +- support Comments even if tag order is not preserved +- fix #446: XMLbuilder should not indent XML declaration + +**4.0.6 / 2022-03-08** +- fix: call tagValueProcessor only once for array items +- fix: missing changed for #437 + +**4.0.5 / 2022-03-06** +- fix #437: call tagValueProcessor from XML builder + +**4.0.4 / 2022-03-03** +- fix #435: should skip unpaired and self-closing nodes when set as stopnodes + +**4.0.3 / 2022-02-15** +- fix: ReferenceError when Bundled with Strict (#431) (By [Andreas Heissenberger](https://github.com/aheissenberger)) + + +**4.0.2 / 2022-02-04** +- builder supports `suppressUnpairedNode` +- parser supports `ignoreDeclaration` and `ignorePiTags` +- fix: when comment is parsed as text value if given as ` ...` #423 +- builder supports decoding `&` + +**4.0.1 / 2022-01-08** +- fix builder for pi tag +- fix: support suppressBooleanAttrs by builder + +**4.0.0 / 2022-01-06** +- Generating different combined, parser only, builder only, validator only browser bundles +- Keeping cjs modules as they can be imported in cjs and esm modules both. Otherwise refer `esm` branch. + +**4.0.0-beta.8 / 2021-12-13** +- call tagValueProcessor for stop nodes + +**4.0.0-beta.7 / 2021-12-09** +- fix Validator bug when an attribute has no value but '=' only +- XML Builder should suppress unpaired tags by default. +- documents update for missing features +- refactoring to use Object.assign +- refactoring to remove repeated code + +**4.0.0-beta.6 / 2021-12-05** +- Support PI Tags processing +- Support `suppressBooleanAttributes` by XML Builder for attributes with value `true`. + +**4.0.0-beta.5 / 2021-12-04** +- fix: when a tag with name "attributes" + +**4.0.0-beta.4 / 2021-12-02** +- Support HTML document parsing +- skip stop nodes parsing when building the XML from JS object +- Support external entites without DOCTYPE +- update dev dependency: strnum v1.0.5 to fix long number issue + +**4.0.0-beta.3 / 2021-11-30** +- support global stopNodes expression like "*.stop" +- support self-closing and paired unpaired tags +- fix: CDATA should not be parsed. +- Fix typings for XMLBuilder (#396)(By [Anders Emil Salvesen](https://github.com/andersem)) +- supports XML entities, HTML entities, DOCTYPE entities + +**⚠️ 4.0.0-beta.2 / 2021-11-19** +- rename `attrMap` to `attibutes` in parser output when `preserveOrder:true` +- supports unpairedTags + +**⚠️ 4.0.0-beta.1 / 2021-11-18** +- Parser returns an array now + - to make the structure common + - and to return root level detail +- renamed `cdataTagName` to `cdataPropName` +- Added `commentPropName` +- fix typings + +**⚠️ 4.0.0-beta.0 / 2021-11-16** +- Name change of many configuration properties. + - `attrNodeName` to `attributesGroupName` + - `attrValueProcessor` to `attributeValueProcessor` + - `parseNodeValue` to `parseTagValue` + - `ignoreNameSpace` to `removeNSPrefix` + - `numParseOptions` to `numberParseOptions` + - spelling correction for `suppressEmptyNode` +- Name change of cli and browser bundle to **fxparser** +- `isArray` option is added to parse a tag into array +- `preserveOrder` option is added to render XML in such a way that the result js Object maintains the order of properties same as in XML. +- Processing behaviour of `tagValueProcessor` and `attributeValueProcessor` are changes with extra input parameters +- j2xparser is renamed to XMLBuilder. +- You need to build XML parser instance for given options first before parsing XML. +- fix #327, #336: throw error when extra text after XML content +- fix #330: attribute value can have '\n', +- fix #350: attrbiutes can be separated by '\n' from tagname + +3.21.1 / 2021-10-31 +- Correctly format JSON elements with a text prop but no attribute props ( By [haddadnj](https://github.com/haddadnj) ) + +3.21.0 / 2021-10-25 + - feat: added option `rootNodeName` to set tag name for array input when converting js object to XML. + - feat: added option `alwaysCreateTextNode` to force text node creation (by: *@massimo-ua*) + - ⚠️ feat: Better error location for unclosed tags. (by *@Gei0r*) + - Some error messages would be changed when validating XML. Eg + - `{ InvalidXml: "Invalid '[ \"rootNode\"]' found." }` → `{InvalidTag: "Unclosed tag 'rootNode'."}` + - `{ InvalidTag: "Closing tag 'rootNode' is expected inplace of 'rootnode'." }` → `{ InvalidTag: "Expected closing tag 'rootNode' (opened in line 1) instead of closing tag 'rootnode'."}` + - ⚠️ feat: Column in error response when validating XML +```js +{ + "code": "InvalidAttr", + "msg": "Attribute 'abc' is repeated.", + "line": 1, + "col": 22 +} +``` + +3.20.1 / 2021-09-25 + - update strnum package + +3.20.0 / 2021-09-10 + - Use strnum npm package to parse string to number + - breaking change: long number will be parsed to scientific notation. + +3.19.0 / 2021-03-14 + - License changed to MIT original + - Fix #321 : namespace tag parsing + +3.18.0 / 2021-02-05 + - Support RegEx and function in arrayMode option + - Fix #317 : validate nested PI tags + +3.17.4 / 2020-06-07 + - Refactor some code to support IE11 + - Fix: `` space as attribute string + +3.17.3 / 2020-05-23 + - Fix: tag name separated by \n \t + - Fix: throw error for unclosed tags + +3.17.2 / 2020-05-23 + - Fixed an issue in processing doctype tag + - Fixed tagName where it should not have whitespace chars + +3.17.1 / 2020-05-19 + - Fixed an issue in checking opening tag + +3.17.0 / 2020-05-18 + - parser: fix '<' issue when it comes in aatr value + - parser: refactoring to remove dependency from regex + - validator: fix IE 11 issue for error messages + - updated dev dependencies + - separated benchmark module to sub-module + - breaking change: comments will not be removed from CDATA data + +3.16.0 / 2020-01-12 + - validaor: fix for ampersand characters (#215) + - refactoring to support unicode chars in tag name + - update typing for validator error + +3.15.1 / 2019-12-09 + - validaor: fix multiple roots are not allowed + +3.15.0 / 2019-11-23 + - validaor: improve error messaging + - validator: add line number in case of error + - validator: add more error scenarios to make it more descriptive + +3.14.0 / 2019-10-25 + - arrayMode for XML to JS obj parsing + +3.13.0 / 2019-10-02 + - pass tag/attr name to tag/attr value processor + - inbuilt optional validation with XML parser + +3.12.21 / 2019-10-02 + - Fix validator for unclosed XMLs + - move nimnjs dependency to dev dependency + - update dependencies + +3.12.20 / 2019-08-16 + - Revert: Fix #167: '>' in attribute value as it is causing high performance degrade. + +3.12.19 / 2019-07-28 + - Fix js to xml parser should work for date values. (broken: `tagValueProcessor` will receive the original value instead of string always) (breaking change) + +3.12.18 / 2019-07-27 + - remove configstore dependency + +3.12.17 / 2019-07-14 + - Fix #167: '>' in attribute value + +3.12.16 / 2019-03-23 + - Support a new option "stopNodes". (#150) +Accept the list of tags which are not required to be parsed. Instead, all the nested tag and data will be assigned as string. + - Don't show post-install message + +3.12.12 / 2019-01-11 + - fix : IE parseInt, parseFloat error + +3.12.11 / 2018-12-24 + - fix #132: "/" should not be parsed as boolean attr in case of self closing tags + +3.12.9 / 2018-11-23 + - fix #129 : validator should not fail when an atrribute name is 'length' + +3.12.8 / 2018-11-22 + - fix #128 : use 'attrValueProcessor' to process attribute value in json2xml parser + +3.12.6 / 2018-11-10 + - Fix #126: check for type + +3.12.4 / 2018-09-12 + - Fix: include tasks in npm package + +3.12.3 / 2018-09-12 + - Fix CLI issue raised in last PR + +3.12.2 / 2018-09-11 + - Fix formatting for JSON to XML output + - Migrate to webpack (PR merged) + - fix cli (PR merged) + +3.12.0 / 2018-08-06 + - Support hexadecimal values + - Support true number parsing + +3.11.2 / 2018-07-23 + - Update Demo for more options + - Update license information + - Update readme for formatting, users, and spelling mistakes + - Add missing typescript definition for j2xParser + - refactoring: change filenames + +3.11.1 / 2018-06-05 + - fix #93: read the text after self closing tag + +3.11.0 / 2018-05-20 + - return defaultOptions if there are not options in buildOptions function + - added localeRange declaration in parser.d.ts + - Added support of cyrillic characters in validator XML + - fixed bug in validator work when XML data with byte order marker + +3.10.0 / 2018-05-13 + - Added support of cyrillic characters in parsing XML to JSON + +3.9.11 / 2018-05-09 + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/80 fix nimn chars + - update package information + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/86: json 2 xml parser : property with null value should be parsed to self closing tag. + - update online demo + - revert zombiejs to old version to support old version of node + - update dependencies + +3.3.10 / 2018-04-23 + - fix #77 : parse even if closing tag has space before '>' + - include all css & js lib in demo app + - remove babel dependencies until needed + +3.3.9 / 2018-04-18 + - fix #74 : TS2314 TypeScript compiler error + +3.3.8 / 2018-04-17 + - fix #73 : IE doesn't support Object.assign + +3.3.7 / 2018-04-14 + - fix: use let insted of const in for loop of validator + - Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/71 from bb/master + first draft of typings for typescript + https://github.com/NaturalIntelligence/fast-xml-parser/issues/69 + - Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/70 from bb/patch-1 + fix some typos in readme + +3.3.6 / 2018-03-21 + - change arrow functions to full notation for IE compatibility + +3.3.5 / 2018-03-15 + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/67 : attrNodeName invalid behavior + - fix: remove decodeHTML char condition + +3.3.4 / 2018-03-14 + - remove dependency on "he" package + - refactor code to separate methods in separate files. + - draft code for transforming XML to json string. It is not officially documented due to performance issue. + +3.3.0 / 2018-03-05 + - use common default options for XML parsing for consistency. And add `parseToNimn` method. + - update nexttodo + - update README about XML to Nimn transformation and remove special notes about 3.x release + - update CONTRIBUTING.ms mentioning nexttodo + - add negative case for XML PIs + - validate xml processing instruction tags https://github.com/NaturalIntelligence/fast-xml-parser/issues/62 + - nimndata: handle array with object + - nimndata: node with nested node and text node + - nimndata: handle attributes and text node + - nimndata: add options, handle array + - add xml to nimn data converter + - x2j: direct access property with tagname + - update changelog + - fix validator when single quote presents in value enclosed with double quotes or vice versa + - Revert "remove unneded nimnjs dependency, move opencollective to devDependencies and replace it + with more light opencollective-postinstall" + This reverts commit d47aa7181075d82db4fee97fd8ea32b056fe3f46. + - Merge pull request: https://github.com/NaturalIntelligence/fast-xml-parser/issues/63 from HaroldPutman/suppress-undefined + Keep undefined nodes out of the XML output : This is useful when you are deleting nodes from the JSON and rewriting XML. + +3.2.4 / 2018-03-01 + - fix #59 fix in validator when open quote presents in attribute value + - Create nexttodo.md + - exclude static from bitHound tests + - add package lock + +3.2.3 / 2018-02-28 + - Merge pull request from Delagen/master: fix namespaces can contain the same characters as xml names + +3.2.2 / 2018-02-22 + - fix: attribute xmlns should not be removed if ignoreNameSpace is false + - create CONTRIBUTING.md + +3.2.1 / 2018-02-17 + - fix: empty attribute should be parsed + +3.2.0 / 2018-02-16 + - Merge pull request : Dev to Master + - Update README and version + - j2x:add performance test + - j2x: Remove extra empty line before closing tag + - j2x: suppress empty nodes to self closing node if configured + - j2x: provide option to give indentation depth + - j2x: make optional formatting + - j2x: encodeHTMLchat + - j2x: handle cdata tag + - j2x: handle grouped attributes + - convert json to xml + - nested object + - array + - attributes + - text value + - small refactoring + - Merge pull request: Update cli.js to let user validate XML file or data + - Add option for rendering CDATA as separate property + +3.0.1 / 2018-02-09 + - fix CRLF: replace it with single space in attributes value only. + +3.0.0 / 2018-02-08 + - change online tool with new changes + - update info about new options + - separate tag value processing to separate function + - make HTML decoding optional + - give an option to allow boolean attributes + - change cli options as per v3 + - Correct comparison table format on README + - update v3 information + - some performance improvement changes + - Make regex object local to the method and move some common methods to util + - Change parser to + - handle multiple instances of CDATA + - make triming of value optionals + - HTML decode attribute and text value + - refactor code to separate files + - Ignore newline chars without RE (in validator) + - validate for XML prolog + - Validate DOCTYPE without RE + - Update validator to return error response + - Update README to add detail about V3 + - Separate xmlNode model class + - include vscode debug config + - fix for repeated object + - fix attribute regex for boolean attributes + - Fix validator for invalid attributes +2.9.4 / 2018-02-02 + - Merge pull request: Decode HTML characters + - refactor source folder name + - ignore bundle / browser js to be published to npm +2.9.3 / 2018-01-26 + - Merge pull request: Correctly remove CRLF line breaks + - Enable to parse attribute in online editor + - Fix testing demo app test + - Describe parsing options + - Add options for online demo +2.9.2 / 2018-01-18 + - Remove check if tag starting with "XML" + - Fix: when there are spaces before / after CDATA + +2.9.1 / 2018-01-16 + - Fix: newline should be replaced with single space + - Fix: for single and multiline comments + - validate xml with CDATA + - Fix: the issue when there is no space between 2 attributes + - Fix: https://github.com/NaturalIntelligence/fast-xml-parser/issues/33: when there is newline char in attr val, it doesn't parse + - Merge pull request: fix ignoreNamespace + - fix: don't wrap attributes if only namespace attrs + - fix: use portfinder for run tests, update deps + - fix: don't treat namespaces as attributes when ignoreNamespace enabled + +2.9.0 / 2018-01-10 + - Rewrite the validator to handle large files. + Ignore DOCTYPE validation. + - Fix: When attribute value has equal sign + +2.8.3 / 2017-12-15 + - Fix: when a tag has value along with subtags + +2.8.2 / 2017-12-04 + - Fix value parsing for IE + +2.8.1 / 2017-12-01 + - fix: validator should return false instead of err when invalid XML + +2.8.0 / 2017-11-29 + - Add CLI option to ignore value conversion + - Fix variable name when filename is given on CLI + - Update CLI help text + - Merge pull request: xml2js: Accept standard input + - Test Node 8 + - Update dependencies + - Bundle readToEnd + - Add ability to read from standard input + +2.7.4 / 2017-09-22 + - Merge pull request: Allow wrap attributes with subobject to compatible with other parsers output + +2.7.3 / 2017-08-02 + - fix: handle CDATA with regx + +2.7.2 / 2017-07-30 + - Change travis config for yarn caching + - fix validator: when tag property is same as array property + - Merge pull request: Failing test case in validator for valid SVG + +2.7.1 / 2017-07-26 + - Fix: Handle val 0 + +2.7.0 / 2017-07-25 + - Fix test for arrayMode + - Merge pull request: Add arrayMode option to parse any nodes as arrays + +2.6.0 / 2017-07-14 + - code improvement + - Add unit tests for value conversion for attr + - Merge pull request: option of an attribute value conversion to a number (textAttrConversion) the same way as the textNodeConversion option does. Default value is false. + +2.5.1 / 2017-07-01 + - Fix XML element name pattern + - Fix XML element name pattern while parsing + - Fix validation for xml tag element + +2.5.0 / 2017-06-25 + - Improve Validator performance + - update attr matching regex + - Add perf tests + - Improve atrr regex to handle all cases + +2.4.4 / 2017-06-08 + - Bug fix: when an attribute has single or double quote in value + +2.4.3 / 2017-06-05 + - Bug fix: when multiple CDATA tags are given + - Merge pull request: add option "textNodeConversion" + - add option "textNodeConversion" + +2.4.1 / 2017-04-14 + - fix tests + - Bug fix: preserve initial space of node value + - Handle CDATA + +2.3.1 / 2017-03-15 + - Bug fix: when single self closing tag + - Merge pull request: fix .codeclimate.yml + - Update .codeclimate.yml - Fixed config so it does not error anymore. + - Update .codeclimate.yml + +2.3.0 / 2017-02-26 + - Code improvement + - add bithound config + - Update usage + - Update travis to generate bundle js before running tests + - 1.Browserify, 2. add more tests for validator + - Add validator + - Fix CLI default parameter bug + +2.2.1 / 2017-02-05 + - Bug fix: CLI default option diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/LICENSE b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/LICENSE new file mode 100644 index 0000000..d7da622 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Amit Kumar Gupta + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/README.md b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/README.md new file mode 100644 index 0000000..1891838 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/README.md @@ -0,0 +1,236 @@ +# [fast-xml-parser](https://www.npmjs.com/package/fast-xml-parser) +[![NPM quality][quality-image]][quality-url] +[![Coverage Status](https://coveralls.io/repos/github/NaturalIntelligence/fast-xml-parser/badge.svg?branch=master)](https://coveralls.io/github/NaturalIntelligence/fast-xml-parser?branch=master) +[Try me](https://naturalintelligence.github.io/fast-xml-parser/) +[![NPM total downloads](https://img.shields.io/npm/dt/fast-xml-parser.svg)](https://npm.im/fast-xml-parser) + + +Validate XML, Parse XML to JS Object, or Build XML from JS Object without C/C++ based libraries and no callback. + +--- + +ads-thePowerGlassesBook +I had recently published a book, The Power Glasses. Please have a look. Your feedback would be helpful. You can [mail](githubissues@proton.me) me for a free copy. +
+ +Sponsor this project + + + + + + + + Stubmatic donate button +
+
+
+ + + +![fxp_sponsors](https://github.com/NaturalIntelligence/fast-xml-parser/assets/7692328/c9367497-d67e-410a-90a6-66e3808be929) + +## Users + + + + + + + + + + + + + + + + + + + +[more](./USERs.md) + +The list of users are mostly published by Github or communicated directly. Feel free to contact if you find any information wrong. + +--- + +## Main Features + +FXP logo + +* Validate XML data syntactically +* Parse XML to JS Object +* Build XML from JS Object +* Compatible to node js packages, in browser, and in CLI (click try me button above for demo) +* Faster than any other pure JS implementation. +* It can handle big files (tested up to 100mb). +* Controlled parsing using various options +* XML Entities, HTML entities, and DOCTYPE entites are supported. +* unpaired tags (Eg `
` in HTML), stop nodes (Eg ` +: + +``` + +Bundle size + +| Bundle Name | Size | +| ------------------ | ---- | +| fxbuilder.min.js | 6.5K | +| fxparser.min.js | 20K | +| fxp.min.js | 26K | +| fxvalidator.min.js | 5.7K | + +### Documents + + + + + + + +
v3v4v5
+ documents +
    +
  1. Getting Started
  2. +
  3. XML Parser
  4. +
  5. XML Builder
  6. +
  7. XML Validator
  8. +
  9. Entities
  10. +
  11. HTML Document Parsing
  12. +
  13. PI Tag processing
  14. +
    +
  1. Getting Started +
  2. Features
  3. +
  4. Options
  5. +
  6. Output Builders
  7. +
  8. Value Parsers
  9. +
+ +**note**: version 5 is released with version 4 tfor experimental use. Based on it's demand, it'll be developed and the features can be different in final release. + +## Performance +negative means error + +### XML Parser + + + + +* Y-axis: requests per second +* X-axis: File size + +### XML Builder + + +* Y-axis: requests per second + + + + + + +## Usage Trend + +[Usage Trend of fast-xml-parser](https://npm-compare.com/fast-xml-parser#timeRange=THREE_YEARS) + + + NPM Usage Trend of fast-xml-parser + + +## Supporters +### Contributors + +This project exists thanks to [all](graphs/contributors) the people who contribute. [[Contribute](docs/CONTRIBUTING.md)]. + + + + +### Backers + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/fast-xml-parser#backer)] + + + + + +# License +* MIT License + +![Donate $5](static/img/donation_quote.png) diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/package.json b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/package.json new file mode 100644 index 0000000..1fd52c1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/package.json @@ -0,0 +1,74 @@ +{ + "name": "fast-xml-parser", + "version": "4.4.1", + "description": "Validate XML, Parse XML, Build XML without C/C++ based libraries", + "main": "./src/fxp.js", + "scripts": { + "test": "nyc --reporter=lcov --reporter=text jasmine spec/*spec.js", + "test-types": "tsc --noEmit spec/typings/typings-test.ts", + "unit": "jasmine", + "coverage": "nyc report --reporter html --reporter text -t .nyc_output --report-dir .nyc_output/summary", + "perf": "node ./benchmark/perfTest3.js", + "lint": "eslint src/*.js spec/*.js", + "bundle": "webpack --config webpack-prod.config.js", + "prettier": "prettier --write src/**/*.js", + "publish-please": "publish-please", + "checkReadiness": "publish-please --dry-run" + }, + "bin": { + "fxparser": "./src/cli/cli.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/fast-xml-parser" + }, + "keywords": [ + "fast", + "xml", + "json", + "parser", + "xml2js", + "x2js", + "xml2json", + "js", + "cli", + "validator", + "validate", + "transformer", + "assert", + "js2xml", + "json2xml", + "html" + ], + "author": "Amit Gupta (https://solothought.com)", + "license": "MIT", + "devDependencies": { + "@babel/core": "^7.13.10", + "@babel/plugin-transform-runtime": "^7.13.10", + "@babel/preset-env": "^7.13.10", + "@babel/register": "^7.13.8", + "@types/node": "20", + "babel-loader": "^8.2.2", + "cytorus": "^0.2.9", + "eslint": "^8.3.0", + "he": "^1.2.0", + "jasmine": "^3.6.4", + "nyc": "^15.1.0", + "prettier": "^1.19.1", + "publish-please": "^5.5.2", + "typescript": "5", + "webpack": "^5.64.4", + "webpack-cli": "^4.9.1" + }, + "typings": "src/fxp.d.ts", + "funding": [{ + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + },{ + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }], + "dependencies": { + "strnum": "^1.0.5" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/cli.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/cli.js new file mode 100755 index 0000000..984534c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/cli.js @@ -0,0 +1,93 @@ +#!/usr/bin/env node +'use strict'; +/*eslint-disable no-console*/ +const fs = require('fs'); +const path = require('path'); +const {XMLParser, XMLValidator} = require("../fxp"); +const readToEnd = require('./read').readToEnd; + +const version = require('./../../package.json').version; +if (process.argv[2] === '--help' || process.argv[2] === '-h') { + console.log(require("./man")); +} else if (process.argv[2] === '--version') { + console.log(version); +} else { + const options = { + removeNSPrefix: true, + ignoreAttributes: false, + parseTagValue: true, + parseAttributeValue: true, + }; + let fileName = ''; + let outputFileName; + let validate = false; + let validateOnly = false; + for (let i = 2; i < process.argv.length; i++) { + if (process.argv[i] === '-ns') { + options.removeNSPrefix = false; + } else if (process.argv[i] === '-a') { + options.ignoreAttributes = true; + } else if (process.argv[i] === '-c') { + options.parseTagValue = false; + options.parseAttributeValue = false; + } else if (process.argv[i] === '-o') { + outputFileName = process.argv[++i]; + } else if (process.argv[i] === '-v') { + validate = true; + } else if (process.argv[i] === '-V') { + validateOnly = true; + } else { + //filename + fileName = process.argv[i]; + } + } + + const callback = function(xmlData) { + let output = ''; + if (validate) { + const parser = new XMLParser(options); + output = parser.parse(xmlData,validate); + } else if (validateOnly) { + output = XMLValidator.validate(xmlData); + process.exitCode = output === true ? 0 : 1; + } else { + const parser = new XMLParser(options); + output = JSON.stringify(parser.parse(xmlData,validate), null, 4); + } + if (outputFileName) { + writeToFile(outputFileName, output); + } else { + console.log(output); + } + }; + + try { + + if (!fileName) { + readToEnd(process.stdin, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } else { + fs.readFile(fileName, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } + } catch (e) { + console.log('Seems an invalid file or stream.' + e); + } +} + +function writeToFile(fileName, data) { + fs.writeFile(fileName, data, function(err) { + if (err) { + throw err; + } + console.log('JSON output has been written to ' + fileName); + }); +} diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/man.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/man.js new file mode 100644 index 0000000..89947cc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/man.js @@ -0,0 +1,12 @@ +module.exports = `Fast XML Parser 4.0.0 +---------------- +$ fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] +$ cat xmlfile.xml | fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] + +Options +---------------- +-ns: remove namespace from tag and atrribute name. +-a: don't parse attributes. +-c: parse values to premitive type. +-v: validate before parsing. +-V: validate only.` \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/read.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/read.js new file mode 100644 index 0000000..642da52 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/cli/read.js @@ -0,0 +1,92 @@ +'use strict'; + +// Copyright 2013 Timothy J Fontaine +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE + +/* + +Read any stream all the way to the end and trigger a single cb + +const http = require('http'); + +const rte = require('readtoend'); + +http.get('http://nodejs.org', function(response) { + rte.readToEnd(response, function(err, body) { + console.log(body); + }); +}); + +*/ + +let stream = require('stream'); +const util = require('util'); + +if (!stream.Transform) { + stream = require('readable-stream'); +} + +function ReadToEnd(opts) { + if (!(this instanceof ReadToEnd)) { + return new ReadToEnd(opts); + } + + stream.Transform.call(this, opts); + + this._rte_encoding = opts.encoding || 'utf8'; + + this._buff = ''; +} + +module.exports = ReadToEnd; +util.inherits(ReadToEnd, stream.Transform); + +ReadToEnd.prototype._transform = function(chunk, encoding, done) { + this._buff += chunk.toString(this._rte_encoding); + this.push(chunk); + done(); +}; + +ReadToEnd.prototype._flush = function(done) { + this.emit('complete', undefined, this._buff); + done(); +}; + +ReadToEnd.readToEnd = function(stream, options, cb) { + if (!cb) { + cb = options; + options = {}; + } + + const dest = new ReadToEnd(options); + + stream.pipe(dest); + + stream.on('error', function(err) { + stream.unpipe(dest); + cb(err); + }); + + dest.on('complete', cb); + + dest.resume(); + + return dest; +}; diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/fxp.d.ts b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/fxp.d.ts new file mode 100644 index 0000000..bddcfef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/fxp.d.ts @@ -0,0 +1,402 @@ +type X2jOptions = { + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Whether to remove namespace string from tag and attribute names + * + * Defaults to `false` + */ + removeNSPrefix?: boolean; + + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `true` + */ + parseTagValue?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `false` + */ + parseAttributeValue?: boolean; + + /** + * Whether to remove surrounding whitespace from tag or attribute value + * + * Defaults to `true` + */ + trimValues?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (tagName: string, tagValue: string, jPath: string, hasAttributes: boolean, isLeafNode: boolean) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (attrName: string, attrValue: string, jPath: string) => unknown; + + /** + * Options to pass to `strnum` for parsing numbers + * + * Defaults to `{ hex: true, leadingZeros: true, eNotation: true }` + */ + numberParseOptions?: strnumOptions; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Whether to always create a text node + * + * Defaults to `false` + */ + alwaysCreateTextNode?: boolean; + + /** + * Determine whether a tag should be parsed as an array + * + * @param tagName + * @param jPath + * @param isLeafNode + * @param isAttribute + * @returns {boolean} + * + * Defaults to `() => false` + */ + isArray?: (tagName: string, jPath: string, isLeafNode: boolean, isAttribute: boolean) => boolean; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + /** + * Whether to process HTML entities + * + * Defaults to `false` + */ + htmlEntities?: boolean; + + /** + * Whether to ignore the declaration tag from output + * + * Defaults to `false` + */ + ignoreDeclaration?: boolean; + + /** + * Whether to ignore Pi tags + * + * Defaults to `false` + */ + ignorePiTags?: boolean; + + /** + * Transform tag names + * + * Defaults to `false` + */ + transformTagName?: ((tagName: string) => string) | false; + + /** + * Transform attribute names + * + * Defaults to `false` + */ + transformAttributeName?: ((attributeName: string) => string) | false; + + /** + * Change the tag name when a different name is returned. Skip the tag from parsed result when false is returned. + * Modify `attrs` object to control attributes for the given tag. + * + * @returns {string} new tag name. + * @returns false to skip the tag + * + * Defaults to `(tagName, jPath, attrs) => tagName` + */ + updateTag?: (tagName: string, jPath: string, attrs: {[k: string]: string}) => string | boolean; +}; + +type strnumOptions = { + hex: boolean; + leadingZeros: boolean, + skipLike?: RegExp, + eNotation?: boolean +} + +type validationOptions = { + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; +}; + +type XmlBuilderOptions = { + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Whether to make output pretty instead of single line + * + * Defaults to `false` + */ + format?: boolean; + + + /** + * If `format` is set to `true`, sets the indent string + * + * Defaults to ` ` + */ + indentBy?: string; + + /** + * Give a name to a top-level array + * + * Defaults to `undefined` + */ + arrayNodeName?: string; + + /** + * Create empty tags for tags with no text value + * + * Defaults to `false` + */ + suppressEmptyNode?: boolean; + + /** + * Suppress an unpaired tag + * + * Defaults to `true` + */ + suppressUnpairedNode?: boolean; + + /** + * Don't put a value for boolean attributes + * + * Defaults to `true` + */ + suppressBooleanAttributes?: boolean; + + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (name: string, value: unknown) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (name: string, value: unknown) => unknown; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + + oneListGroup?: boolean; +}; + +type ESchema = string | object | Array; + +type ValidationError = { + err: { + code: string; + msg: string, + line: number, + col: number + }; +}; + +export class XMLParser { + constructor(options?: X2jOptions); + parse(xmlData: string | Buffer ,validationOptions?: validationOptions | boolean): any; + /** + * Add Entity which is not by default supported by this library + * @param entityIdentifier {string} Eg: 'ent' for &ent; + * @param entityValue {string} Eg: '\r' + */ + addEntity(entityIdentifier: string, entityValue: string): void; +} + +export class XMLValidator{ + static validate( xmlData: string, options?: validationOptions): true | ValidationError; +} +export class XMLBuilder { + constructor(options?: XmlBuilderOptions); + build(jObj: any): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/fxp.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/fxp.js new file mode 100644 index 0000000..9cfa0ac --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/fxp.js @@ -0,0 +1,11 @@ +'use strict'; + +const validator = require('./validator'); +const XMLParser = require('./xmlparser/XMLParser'); +const XMLBuilder = require('./xmlbuilder/json2xml'); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/util.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/util.js new file mode 100644 index 0000000..df0a60d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/util.js @@ -0,0 +1,72 @@ +'use strict'; + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/CharsSymbol.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/CharsSymbol.js new file mode 100644 index 0000000..fa5ce9e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/CharsSymbol.js @@ -0,0 +1,16 @@ +modules.export = { + "<" : "<", //tag start + ">" : ">", //tag end + "/" : "/", //close tag + "!" : "!", //comment or docttype + "!--" : "!--", //comment + "-->" : "-->", //comment end + "?" : "?", //pi + "?>" : "?>", //pi end + "?xml" : "?xml", //pi end + "![" : "![", //cdata + "]]>" : "]]>", //cdata end + "[" : "[", + "-" : "-", + "D" : "D", +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/EntitiesParser.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/EntitiesParser.js new file mode 100644 index 0000000..62cc02f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js new file mode 100755 index 0000000..be1f1d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js @@ -0,0 +1,64 @@ + +const JsObjOutputBuilder = require("./OutputBuilders/JsObjBuilder"); + +const defaultOptions = { + preserveOrder: false, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + //ignoreRootElement : false, + stopNodes: [], //nested tags will not be parsed even for errors + // isArray: () => false, //User will set it + htmlEntities: false, + // skipEmptyListItem: false + tags:{ + unpaired: [], + nameFor:{ + cdata: false, + comment: false, + text: '#text' + }, + separateTextProperty: false, + }, + attributes:{ + ignore: false, + booleanType: true, + entities: true, + }, + + // select: ["img[src]"], + // stop: ["anim", "[ads]"] + only: [], // rest tags will be skipped. It will result in flat array + hierarchy: false, //will be used when a particular tag is set to be parsed. + skip: [], // will be skipped from parse result. on('skip') will be triggered + + select: [], // on('select', tag => tag ) will be called if match + stop: [], //given tagPath will not be parsed. innerXML will be set as string value + OutputBuilder: new JsObjOutputBuilder(), +}; + +const buildOptions = function(options) { + const finalOptions = { ... defaultOptions}; + copyProperties(finalOptions,options) + return finalOptions; +}; + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (key === 'OutputBuilder') { + target[key] = source[key]; + }else if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js new file mode 100644 index 0000000..be2d478 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js @@ -0,0 +1,71 @@ +class BaseOutputBuilder{ + constructor(){ + // this.attributes = {}; + } + + addAttribute(name, value){ + if(this.options.onAttribute){ + //TODO: better to pass tag path + const v = this.options.onAttribute(name, value, this.tagName); + if(v) this.attributes[v.name] = v.value; + }else{ + name = this.options.attributes.prefix + name + this.options.attributes.suffix; + this.attributes[name] = this.parseValue(value, this.options.attributes.valueParsers); + } + } + + /** + * parse value by chain of parsers + * @param {string} val + * @returns {any} parsed value if matching parser found + */ + parseValue = function(val, valParsers){ + for (let i = 0; i < valParsers.length; i++) { + let valParser = valParsers[i]; + if(typeof valParser === "string"){ + valParser = this.registeredParsers[valParser]; + } + if(valParser){ + val = valParser.parse(val); + } + } + return val; + } + + /** + * To add a nested empty tag. + * @param {string} key + * @param {any} val + */ + _addChild(key, val){} + + /** + * skip the comment if property is not set + */ + addComment(text){ + if(this.options.nameFor.comment) + this._addChild(this.options.nameFor.comment, text); + } + + //store CDATA separately if property is set + //otherwise add to tag's value + addCdata(text){ + if (this.options.nameFor.cdata) { + this._addChild(this.options.nameFor.cdata, text); + } else { + this.addRawValue(text || ""); + } + } + + addRawValue = text => this.addValue(text); + + addDeclaration(){ + if(!this.options.declaration){ + }else{ + this.addPi("?xml"); + } + this.attributes = {} + } +} + +module.exports = BaseOutputBuilder; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js new file mode 100644 index 0000000..c63f627 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js @@ -0,0 +1,103 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const rootName = '!js_arr'; +const BaseOutputBuilder = require("./BaseOutputBuilder"); + +class JsArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = new Node(rootName); + this.currentNode = this.root; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push(this.currentNode); + this.currentNode = new Node(tag.name, this.attributes); + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + this.currentNode = this.tagsStack.pop(); //set parent node in scope + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode.child.push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.child.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode.child.push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + //TODO: set pi flag + if(!this.options.ignorePiTags){ + const node = new Node(name, this.attributes); + this.currentNode[":@"] = this.attributes; + this.currentNode.child.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root.child[0]; + } +} + + + +class Node{ + constructor(tagname, attributes){ + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments + if(attributes && Object.keys(attributes).length > 0) + this[":@"] = attributes; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js new file mode 100644 index 0000000..e0dc1e9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js @@ -0,0 +1,102 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsMinArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsMinArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = {[rootName]: []}; + this.currentNode = this.root; + this.currentNodeTagName = rootName; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push([this.currentNodeTagName,this.currentNode]); //this.currentNode is parent node here + this.currentNodeTagName = tag.name; + this.currentNode = { [tag.name]:[]} + if(Object.keys(this.attributes).length > 0){ + this.currentNode[":@"] = this.attributes; + this.attributes = {}; + } + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + const nodeName = this.currentNodeTagName; + const arr = this.tagsStack.pop(); //set parent node in scope + this.currentNodeTagName = arr[0]; + this.currentNode = arr[1]; + + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode[this.currentNodeTagName].push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode[this.currentNodeTagName].push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + if(!this.options.ignorePiTags){ + const node = { [name]:[]} + if(this.attributes){ + node[":@"] = this.attributes; + } + this.currentNode.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root[rootName]; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js new file mode 100644 index 0000000..37036c5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js @@ -0,0 +1,156 @@ + + +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(builderOptions){ + this.options = buildOptions(builderOptions); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsObjBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsObjBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, builderOptions,registeredParsers) { + super(); + //hold the raw detail of a tag and sequence with reference to the output + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = builderOptions; + this.registeredParsers = registeredParsers; + + this.root = {}; + this.parent = this.root; + this.tagName = rootName; + this.value = {}; + this.textValue = ""; + this.attributes = {}; + } + + addTag(tag){ + + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + + this.tagsStack.push([this.tagName, this.textValue, this.value]); //parent tag, parent text value, parent tag value (jsobj) + this.tagName = tag.name; + this.value = value; + this.textValue = ""; + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const tagName = this.tagName; + let value = this.value; + let textValue = this.textValue; + + //update tag text value + if(typeof value !== "object" && !Array.isArray(value)){ + value = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + }else if(textValue.length > 0){ + value[this.options.nameFor.text] = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + } + + + let resultTag= { + tagName: tagName, + value: value + }; + + if(this.options.onTagClose !== undefined){ + //TODO TagPathMatcher + resultTag = this.options.onClose(tagName, value, this.textValue, new TagPathMatcher(this.tagsStack,node)); + + if(!resultTag) return; + } + + //set parent node in scope + let arr = this.tagsStack.pop(); + let parentTag = arr[2]; + parentTag=this._addChildTo(resultTag.tagName, resultTag.value, parentTag); + + this.tagName = arr[0]; + this.textValue = arr[1]; + this.value = parentTag; + } + + _addChild(key, val){ + if(typeof this.value === "string"){ + this.value = { [this.options.nameFor.text] : this.value }; + } + + this._addChildTo(key, val, this.value); + // this.currentNode.leafType = false; + this.attributes = {}; + } + + _addChildTo(key, val, node){ + if(typeof node === 'string') node = {}; + if(!node[key]){ + node[key] = val; + }else{ //Repeated + if(!Array.isArray(node[key])){ //but not stored as array + node[key] = [node[key]]; + } + node[key].push(val); + } + return node; + } + + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + //TODO: use bytes join + if(this.textValue.length > 0) this.textValue += " " + text; + else this.textValue = text; + } + + addPi(name){ + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + this._addChild(name, value); + + } + getOutput(){ + return this.value; + } +} + +function isEmpty(obj) { + return Object.keys(obj).length === 0; +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js new file mode 100644 index 0000000..c71ea94 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js @@ -0,0 +1,99 @@ +const trimParser = require("../valueParsers/trim") +const booleanParser = require("../valueParsers/booleanParser") +const currencyParser = require("../valueParsers/currency") +const numberParser = require("../valueParsers/number") + +const defaultOptions={ + nameFor:{ + text: "#text", + comment: "", + cdata: "", + }, + // onTagClose: () => {}, + // onAttribute: () => {}, + piTag: false, + declaration: false, //"?xml" + tags: { + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + attributes:{ + prefix: "@_", + suffix: "", + groupBy: "", + + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + dataType:{ + + } +} + +//TODO +const withJoin = ["trim","join", /*"entities",*/"number","boolean","currency"/*, "date"*/] +const withoutJoin = ["trim", /*"entities",*/"number","boolean","currency"/*, "date"*/] + +function buildOptions(options){ + //clone + const finalOptions = { ... defaultOptions}; + + //add config missed in cloning + finalOptions.tags.valueParsers.push(...withJoin) + if(!this.preserveOrder) + finalOptions.tags.valueParsers.push(...withoutJoin); + + //add config missed in cloning + finalOptions.attributes.valueParsers.push(...withJoin) + + //override configuration + copyProperties(finalOptions,options); + return finalOptions; +} + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +function registerCommonValueParsers(options){ + return { + "trim": new trimParser(), + // "join": this.entityParser.parse, + "boolean": new booleanParser(), + "number": new numberParser({ + hex: true, + leadingZeros: true, + eNotation: true + }), + "currency": new currencyParser(), + // "date": this.entityParser.parse, + } +} + +module.exports = { + buildOptions : buildOptions, + registerCommonValueParsers: registerCommonValueParsers +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/Report.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/Report.js new file mode 100644 index 0000000..e69de29 diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/TagPath.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/TagPath.js new file mode 100644 index 0000000..d901cc3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/TagPath.js @@ -0,0 +1,81 @@ +class TagPath{ + constructor(pathStr){ + let text = ""; + let tName = ""; + let pos; + let aName = ""; + let aVal = ""; + this.stack = [] + + for (let i = 0; i < pathStr.length; i++) { + let ch = pathStr[i]; + if(ch === " ") { + if(text.length === 0) continue; + tName = text; text = ""; + }else if(ch === "["){ + if(tName.length === 0){ + tName = text; text = ""; + } + i++; + for (; i < pathStr.length; i++) { + ch = pathStr[i]; + if(ch=== "=") continue; + else if(ch=== "]") {aName = text.trim(); text=""; break; i--;} + else if(ch === "'" || ch === '"'){ + let attrEnd = pathStr.indexOf(ch,i+1); + aVal = pathStr.substring(i+1, attrEnd); + i = attrEnd; + }else{ + text +=ch; + } + } + }else if(ch !== " " && text.length === 0 && tName.length > 0){//reading tagName + //save previous tag + this.stack.push(new TagPathNode(tName,pos,aName,aVal)); + text = ch; tName = ""; aName = ""; aVal = ""; + }else{ + text+=ch; + } + } + + //last tag in the path + if(tName.length >0 || text.length>0){ + this.stack.push(new TagPathNode(text||tName,pos,aName,aVal)); + } + } + + match(tagStack,node){ + if(this.stack[0].name !== "*"){ + if(this.stack.length !== tagStack.length +1) return false; + + //loop through tagPath and tagStack and match + for (let i = 0; i < this.tagStack.length; i++) { + if(!this.stack[i].match(tagStack[i])) return false; + } + } + if(!this.stack[this.stack.length - 1].match(node)) return false; + return true; + } +} + +class TagPathNode{ + constructor(name,position,attrName,attrVal){ + this.name = name; + this.position = position; + this.attrName = attrName, + this.attrVal = attrVal; + } + + match(node){ + let matching = true; + matching = node.name === this.name; + if(this.position) matching = node.position === this.position; + if(this.attrName) matching = node.attrs[this.attrName !== undefined]; + if(this.attrVal) matching = node.attrs[this.attrName !== this.attrVal]; + return matching; + } +} + +// console.log((new TagPath("* b[b]")).stack); +// console.log((new TagPath("a[a] b[b] c")).stack); +// console.log((new TagPath(" b [ b= 'cf sdadwa' ] a ")).stack); \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js new file mode 100644 index 0000000..af23607 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js @@ -0,0 +1,15 @@ +const TagPath = require("./TagPath"); + +class TagPathMatcher{ + constructor(stack,node){ + this.stack = stack; + this.node= node; + } + + match(path){ + const tagPath = new TagPath(path); + return tagPath.match(this.stack, this.node); + } +} + +module.exports = TagPathMatcher; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XMLParser.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XMLParser.js new file mode 100755 index 0000000..6de58ed --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XMLParser.js @@ -0,0 +1,85 @@ +const { buildOptions} = require("./OptionsBuilder"); +const Xml2JsParser = require("./Xml2JsParser"); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + // console.log(this.options) + } + /** + * Parse XML data string to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + return this.parse(xmlData); + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + // if( validationOption){ + // if(validationOption === true) validationOption = {}; //validate with default options + + // const result = validator.validate(xmlData, validationOption); + // if (result !== true) { + // throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + // } + // } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parse(xmlData); + } + /** + * Parse XML data buffer to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parseBytesArr(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + }else{ + throw new Error("XML data is accepted in Bytes[] form.") + } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parseBytesArr(xmlData); + } + /** + * Parse XML data stream to JS object + * @param {fs.ReadableStream} xmlDataStream + */ + parseStream(xmlDataStream){ + if(!isStream(xmlDataStream)) throw new Error("FXP: Invalid stream input"); + + const orderedObjParser = new Xml2JsParser(this.options); + orderedObjParser.entityParser.addExternalEntities(this.externalEntities); + return orderedObjParser.parseStream(xmlDataStream); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +function isStream(stream){ + if(stream && typeof stream.read === "function" && typeof stream.on === "function" && typeof stream.readableEnded === "boolean") return true; + return false; +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js new file mode 100644 index 0000000..c4baab4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js @@ -0,0 +1,237 @@ +const StringSource = require("./inputSource/StringSource"); +const BufferSource = require("./inputSource/BufferSource"); +const {readTagExp,readClosingTagName} = require("./XmlPartReader"); +const {readComment, readCdata,readDocType,readPiTag} = require("./XmlSpecialTagsReader"); +const TagPath = require("./TagPath"); +const TagPathMatcher = require("./TagPathMatcher"); +const EntitiesParser = require('./EntitiesParser'); + +//To hold the data of current tag +//This is usually used to compare jpath expression against current tag +class TagDetail{ + constructor(name){ + this.name = name; + this.position = 0; + // this.attributes = {}; + } +} + +class Xml2JsParser { + constructor(options) { + this.options = options; + + this.currentTagDetail = null; + this.tagTextData = ""; + this.tagsStack = []; + this.entityParser = new EntitiesParser(options.htmlEntities); + this.stopNodes = []; + for (let i = 0; i < this.options.stopNodes.length; i++) { + this.stopNodes.push(new TagPath(this.options.stopNodes[i])); + } + } + + parse(strData) { + this.source = new StringSource(strData); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + parseBytesArr(data) { + this.source = new BufferSource(data ); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + + parseXml() { + //TODO: Separate TagValueParser as separate class. So no scope issue in node builder class + + //OutputBuilder should be set in XML Parser + this.outputBuilder = this.options.OutputBuilder.getInstance(this.options); + this.root = { root: true}; + this.currentTagDetail = this.root; + + while(this.source.canRead()){ + let ch = this.source.readCh(); + if (ch === "") break; + + if(ch === "<"){//tagStart + let nextChar = this.source.readChAt(0); + if (nextChar === "" ) throw new Error("Unexpected end of source"); + + + if(nextChar === "!" || nextChar === "?"){ + this.source.updateBufferBoundary(); + //previously collected text should be added to current node + this.addTextNode(); + + this.readSpecialTag(nextChar);// Read DOCTYPE, comment, CDATA, PI tag + }else if(nextChar === "/"){ + this.source.updateBufferBoundary(); + this.readClosingTag(); + // console.log(this.source.buffer.length, this.source.readable); + // console.log(this.tagsStack.length); + }else{//opening tag + this.readOpeningTag(); + } + }else{ + this.tagTextData += ch; + } + }//End While loop + if(this.tagsStack.length > 0 || ( this.tagTextData !== "undefined" && this.tagTextData.trimEnd().length > 0) ) throw new Error("Unexpected data in the end of document"); + } + + /** + * read closing paired tag. Set parent tag in scope. + * skip a node on user's choice + */ + readClosingTag(){ + const tagName = this.processTagName(readClosingTagName(this.source)); + // console.log(tagName, this.tagsStack.length); + this.validateClosingTag(tagName); + // All the text data collected, belongs to current tag. + if(!this.currentTagDetail.root) this.addTextNode(); + this.outputBuilder.closeTag(); + // Since the tag is closed now, parent tag comes in scope + this.currentTagDetail = this.tagsStack.pop(); + } + + validateClosingTag(tagName){ + // This can't be unpaired tag, or a stop tag. + if(this.isUnpaired(tagName) || this.isStopNode(tagName)) throw new Error(`Unexpected closing tag '${tagName}'`); + // This must match with last opening tag + else if(tagName !== this.currentTagDetail.name) + throw new Error(`Unexpected closing tag '${tagName}' expecting '${this.currentTagDetail.name}'`) + } + + /** + * Read paired, unpaired, self-closing, stop and special tags. + * Create a new node + * Push paired tag in stack. + */ + readOpeningTag(){ + //save previously collected text data to current node + this.addTextNode(); + + //create new tag + let tagExp = readTagExp(this, ">" ); + + // process and skip from tagsStack For unpaired tag, self closing tag, and stop node + const tagDetail = new TagDetail(tagExp.tagName); + if(this.isUnpaired(tagExp.tagName)) { + //TODO: this will lead 2 extra stack operation + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(tagExp.selfClosing){ + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(this.isStopNode(this.currentTagDetail)){ + // TODO: let's user set a stop node boundary detector for complex contents like script tag + //TODO: pass tag name only to avoid string operations + const content = source.readUptoCloseTag(` 0){ + //TODO: shift parsing to output builder + + this.outputBuilder.addValue(this.replaceEntities(this.tagTextData)); + } + this.tagTextData = ""; + } + // } + } + + processAttrName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + processTagName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + /** + * Generate tags path from tagsStack + */ + tagsPath(tagName){ + //TODO: return TagPath Object. User can call match method with path + return ""; + } + + isUnpaired(tagName){ + return this.options.tags.unpaired.indexOf(tagName) !== -1; + } + + /** + * valid expressions are + * tag nested + * * nested + * tag nested[attribute] + * tag nested[attribute=""] + * tag nested[attribute!=""] + * tag nested:0 //for future + * @param {string} tagName + * @returns + */ + isStopNode(node){ + for (let i = 0; i < this.stopNodes.length; i++) { + const givenPath = this.stopNodes[i]; + if(givenPath.match(this.tagsStack, node)) return true; + } + return false + } + + replaceEntities(text){ + //TODO: if option is set then replace entities + return this.entityParser.parse(text) + } +} + +function resolveNameSpace(name, removeNSPrefix) { + if (removeNSPrefix) { + const parts = name.split(':'); + if(parts.length === 2){ + if (parts[0] === 'xmlns') return ''; + else return parts[1]; + }else reportError(`Multiple namespaces ${name}`) + } + return name; +} + +module.exports = Xml2JsParser; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XmlPartReader.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XmlPartReader.js new file mode 100644 index 0000000..56b180e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XmlPartReader.js @@ -0,0 +1,212 @@ +'use strict'; + +/** + * find paired tag for a stop node + * @param {string} xmlDoc + * @param {string} tagName + * @param {number} i : start index + */ +function readStopNode(xmlDoc, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlDoc.length; i++) { + if( xmlDoc[i] === "<"){ + if (xmlDoc[i+1] === "/") {//close tag + const closeIndex = findSubStrIndex(xmlDoc, ">", i, `${tagName} is not closed`); + let closeTagName = xmlDoc.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlDoc.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlDoc[i+1] === '?') { + const closeIndex = findSubStrIndex(xmlDoc, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 3) === '!--') { + const closeIndex = findSubStrIndex(xmlDoc, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 2) === '![') { + const closeIndex = findSubStrIndex(xmlDoc, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlDoc, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +/** + * Read closing tag name + * @param {Source} source + * @returns tag name + */ +function readClosingTagName(source){ + let text = ""; //temporary data + while(source.canRead()){ + let ch = source.readCh(); + // if (ch === null || ch === undefined) break; + // source.updateBuffer(); + + if (ch === ">") return text.trimEnd(); + else text += ch; + } + throw new Error(`Unexpected end of source. Reading '${substr}'`); +} + +/** + * Read XML tag and build attributes map + * This function can be used to read normal tag, pi tag. + * This function can't be used to read comment, CDATA, DOCTYPE. + * Eg + * @param {string} xmlDoc + * @param {number} startIndex starting index + * @returns tag expression includes tag name & attribute string + */ +function readTagExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i); i++) { + const char = parser.source.readChAt(i); + + if (char === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (char === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } else if (char === '>' && !inSingleQuotes && !inDoubleQuotes) { + // If not inside quotes, stop reading at '>' + EOE = true; + break; + } + + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '>'"); + + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function readPiExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i) ; i++) { + const currentChar = parser.source.readChAt(i); + const nextChar = parser.source.readChAt(i+1); + + if (currentChar === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (currentChar === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } + + if (!inSingleQuotes && !inDoubleQuotes) { + if (currentChar === '?' && nextChar === '>') { + EOE = true; + break; // Exit the loop when '?>' is found + } + } + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed in PI tag expression"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '?>'"); + + if(!parser.options.attributes.ignore){ + //TODO: use regex to verify attributes if not set to ignore + } + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function buildTagExpObj(exp, parser){ + const tagExp = { + tagName: "", + selfClosing: false + }; + let attrsExp = ""; + + if(exp[exp.length -1] === "/") tagExp.selfClosing = true; + + //separate tag name + let i = 0; + for (; i < exp.length; i++) { + const char = exp[i]; + if(char === " "){ + tagExp.tagName = exp.substring(0, i); + attrsExp = exp.substring(i + 1); + break; + } + } + //only tag + if(tagExp.tagName.length === 0 && i === exp.length)tagExp.tagName = exp; + + tagExp.tagName = tagExp.tagName.trimEnd(); + + if(!parser.options.attributes.ignore && attrsExp.length > 0){ + parseAttributesExp(attrsExp,parser) + } + + return tagExp; +} + +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function parseAttributesExp(attrStr, parser) { + const matches = getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + for (let i = 0; i < len; i++) { + let attrName = parser.processAttrName(matches[i][1]); + let attrVal = parser.replaceEntities(matches[i][4] || true); + + parser.outputBuilder.addAttribute(attrName, attrVal); + } +} + + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +module.exports = { + readStopNode: readStopNode, + readClosingTagName: readClosingTagName, + readTagExp: readTagExp, + readPiExp: readPiExp, +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js new file mode 100644 index 0000000..0fba196 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js @@ -0,0 +1,118 @@ +const {readPiExp} = require("./XmlPartReader"); + +function readCdata(parser){ + //"); + parser.outputBuilder.addCdata(text); +} +function readPiTag(parser){ + //"); + if(!tagExp) throw new Error("Invalid Pi Tag expression."); + + if (tagExp.tagName === "?xml") {//TODO: test if tagName is just xml + parser.outputBuilder.addDeclaration(); + } else { + parser.outputBuilder.addPi("?"+tagExp.tagName); + } +} + +function readComment(parser){ + //"); + parser.outputBuilder.addComment(text); +} + +const DOCTYPE_tags = { + "EL":/^EMENT\s+([^\s>]+)\s+(ANY|EMPTY|\(.+\)\s*$)/m, + "AT":/^TLIST\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+$/m, + "NO":/^TATION.+$/m +} +function readDocType(parser){ + //"); + const regx = DOCTYPE_tags[str]; + if(regx){ + const match = dTagExp.match(regx); + if(!match) throw new Error("Invalid DOCTYPE"); + }else throw new Error("Invalid DOCTYPE"); + } + }else if( ch === '>' && lastch === "]"){//end of doctype + return; + } + }else if( ch === '>'){//end of doctype + return; + }else if( ch === '['){ + hasBody = true; + }else{ + lastch = ch; + } + }//End While loop + +} + +function registerEntity(parser){ + //read Entity + let attrBoundary=""; + let name ="", val =""; + while(source.canRead()){ + let ch = source.readCh(); + + if(attrBoundary){ + if (ch === attrBoundary){ + val = text; + text = "" + } + }else if(ch === " " || ch === "\t"){ + if(!name){ + name = text.trimStart(); + text = ""; + } + }else if (ch === '"' || ch === "'") {//start of attrBoundary + attrBoundary = ch; + }else if(ch === ">"){ + parser.entityParser.addExternalEntity(name,val); + return; + }else{ + text+=ch; + } + } +} + +module.exports = { + readCdata: readCdata, + readComment:readComment, + readDocType:readDocType, + readPiTag:readPiTag +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js new file mode 100644 index 0000000..b83ce46 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js @@ -0,0 +1,118 @@ +const Constants = { + space: 32, + tab: 9 +} +class BufferSource{ + constructor(bytesArr){ + this.line = 1; + this.cols = 0; + this.buffer = bytesArr; + this.startIndex = 0; + } + + + + readCh() { + return String.fromCharCode(this.buffer[this.startIndex++]); + } + + readChAt(index) { + return String.fromCharCode(this.buffer[this.startIndex+index]); + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.slice(from, from + n).toString(); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + const stopBuffer = Buffer.from(stopStr); + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.slice(this.startIndex, i).toString(); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + +readUptoCloseTag(stopStr) { //stopStr: "'){ //TODO: if it should be equivalent ASCII + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.slice(this.startIndex, stopIndex - 1 ).toString(); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + + readFromBuffer(n, shouldUpdate) { + let ch; + if (n === 1) { + ch = this.buffer[this.startIndex]; + if (ch === 10) { + this.line++; + this.cols = 1; + } else { + this.cols++; + } + ch = String.fromCharCode(ch); + } else { + this.cols += n; + ch = this.buffer.slice(this.startIndex, this.startIndex + n).toString(); + } + if (shouldUpdate) this.updateBuffer(n); + return ch; + } + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = BufferSource; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js new file mode 100644 index 0000000..a996528 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js @@ -0,0 +1,123 @@ +const whiteSpaces = [" ", "\n", "\t"]; + + +class StringSource{ + constructor(str){ + this.line = 1; + this.cols = 0; + this.buffer = str; + //a boundary pointer to indicate where from the buffer dat should be read + // data before this pointer can be deleted to free the memory + this.startIndex = 0; + } + + readCh() { + return this.buffer[this.startIndex++]; + } + + readChAt(index) { + return this.buffer[this.startIndex+index]; + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.substring(from, from + n); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.substring(this.startIndex, i); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readUptoCloseTag(stopStr) { //stopStr: "'){ + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.substring(this.startIndex, stopIndex - 1 ); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readFromBuffer(n, updateIndex){ + let ch; + if(n===1){ + ch = this.buffer[this.startIndex]; + // if(ch === "\n") { + // this.line++; + // this.cols = 1; + // }else{ + // this.cols++; + // } + }else{ + ch = this.buffer.substring(this.startIndex, this.startIndex + n); + // if("".indexOf("\n") !== -1){ + // //TODO: handle the scenario when there are multiple lines + // //TODO: col should be set to number of chars after last '\n' + // // this.cols = 1; + // }else{ + // this.cols += n; + + // } + } + if(updateIndex) this.updateBufferBoundary(n); + return ch; + } + + //TODO: rename to updateBufferReadIndex + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = StringSource; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js new file mode 100644 index 0000000..62cc02f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js new file mode 100644 index 0000000..f8f5d12 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js @@ -0,0 +1,23 @@ +class boolParser{ + constructor(trueList, falseList){ + if(trueList) + this.trueList = trueList; + else + this.trueList = ["true"]; + + if(falseList) + this.falseList = falseList; + else + this.falseList = ["false"]; + } + parse(val){ + if (typeof val === 'string') { + //TODO: performance: don't convert + const temp = val.toLowerCase(); + if(this.trueList.indexOf(temp) !== -1) return true; + else if(this.falseList.indexOf(temp) !== -1 ) return false; + } + return val; + } +} +module.exports = boolParser; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js new file mode 100644 index 0000000..21b8050 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js @@ -0,0 +1,20 @@ +function boolParserExt(val){ + if(isArray(val)){ + for (let i = 0; i < val.length; i++) { + val[i] = parse(val[i]) + } + }else{ + val = parse(val) + } + return val; +} + +function parse(val){ + if (typeof val === 'string') { + const temp = val.toLowerCase(); + if(temp === 'true' || temp ==="yes" || temp==="1") return true; + else if(temp === 'false' || temp ==="no" || temp==="0") return false; + } + return val; +} +module.exports = boolParserExt; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js new file mode 100644 index 0000000..82e21e7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js @@ -0,0 +1,40 @@ +const defaultOptions = { + maxLength: 200, + // locale: "en-IN" +} +const localeMap = { + "$":"en-US", + "€":"de-DE", + "£":"en-GB", + "¥":"ja-JP", + "₹":"en-IN", +} +const sign = "(?:-|\+)?"; +const digitsAndSeparator = "(?:\d+|\d{1,3}(?:,\d{3})+)"; +const decimalPart = "(?:\.\d{1,2})?"; +const symbol = "(?:\$|€|¥|₹)?"; + +const currencyCheckRegex = /^\s*(?:-|\+)?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d{1,2})?\s*(?:\$|€|¥|₹)?\s*$/u; + +class CurrencyParser{ + constructor(options){ + this.options = options || defaultOptions; + } + parse(val){ + if (typeof val === 'string' && val.length <= this.options.maxLength) { + if(val.indexOf(",,") !== -1 && val.indexOf(".." !== -1)){ + const match = val.match(currencyCheckRegex); + if(match){ + const locale = this.options.locale || localeMap[match[2]||match[5]||"₹"]; + const formatter = new Intl.NumberFormat(locale) + val = val.replace(/[^0-9,.]/g, '').trim(); + val = Number(val.replace(formatter.format(1000)[1], '')); + } + } + } + return val; + } +} +CurrencyParser.defaultOptions = defaultOptions; + +module.exports = CurrencyParser; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/join.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/join.js new file mode 100644 index 0000000..d7f2027 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/join.js @@ -0,0 +1,14 @@ +/** + * + * @param {array} val + * @param {string} by + * @returns + */ +function join(val, by=" "){ + if(isArray(val)){ + val.join(by) + } + return val; +} + +module.exports = join; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/number.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/number.js new file mode 100644 index 0000000..bef3803 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/number.js @@ -0,0 +1,16 @@ +const toNumber = require("strnum"); + + +class numParser{ + constructor(options){ + this.options = options; + } + parse(val){ + if (typeof val === 'string') { + val = toNumber(val,this.options); + } + return val; + } +} + +module.exports = numParser; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js new file mode 100644 index 0000000..ecce49a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js @@ -0,0 +1,8 @@ +class trimmer{ + parse(val){ + if(typeof val === "string") return val.trim(); + else return val; + } +} + +module.exports = trimmer; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/validator.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/validator.js new file mode 100644 index 0000000..3b1b2ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/validator.js @@ -0,0 +1,425 @@ +'use strict'; + +const util = require('./util'); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js new file mode 100644 index 0000000..f30604a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js @@ -0,0 +1,281 @@ +'use strict'; +//parse Empty Node as self closing node +const buildFromOrderedJs = require('./orderedJs2Xml'); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } +}; + +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js new file mode 100644 index 0000000..e69de29 diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js new file mode 100644 index 0000000..bcf9dee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js @@ -0,0 +1,152 @@ +const util = require('../util'); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js new file mode 100644 index 0000000..bca3776 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js @@ -0,0 +1,48 @@ + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js new file mode 100644 index 0000000..ffd3f24 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js @@ -0,0 +1,601 @@ +'use strict'; +///@ts-check + +const util = require('../util'); +const xmlNode = require('./xmlNode'); +const readDocType = require("./DocTypeReader"); +const toNumber = require("strnum"); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js new file mode 100644 index 0000000..ffaf59b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js @@ -0,0 +1,58 @@ +const { buildOptions} = require("./OptionsBuilder"); +const OrderedObjParser = require("./OrderedObjParser"); +const { prettify} = require("./node2json"); +const validator = require('../validator'); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/node2json.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/node2json.js new file mode 100644 index 0000000..3045573 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/node2json.js @@ -0,0 +1,113 @@ +'use strict'; + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; diff --git a/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js new file mode 100644 index 0000000..9319524 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js @@ -0,0 +1,25 @@ +'use strict'; + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/mnemonist/CHANGELOG.md new file mode 100644 index 0000000..286c2fe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/CHANGELOG.md @@ -0,0 +1,305 @@ +# Changelog + +## 0.38.3 + +* Refactoring `VPTree` memory layout. +* Fixing `VPTree.nearestNeighbors` edge case. +* Various `VPTree` optimizations. + +## 0.38.2 + +* Fixing `Heap.replace` & `Heap.pusphpop` types (@wholenews). + +## 0.38.1 + +* Fixing `SparseQueueSet` deopt. + +## 0.38.0 + +* Adding `TrieMap.update` (@wholenews). + +## 0.37.0 + +* Adding `DefaultWeakMap` (@yoursunny). + +## 0.36.1 + +* Improved typings for iteration methods (@yoursunny). + +## 0.36.0 + +* Adding `SparseQueueSet`. + +## 0.35.0 + +* Adding `SparseMap`. +* Enhancing `SparseSet` performance. + +## 0.34.0 + +* Adding `set.overlap`. + +## 0.33.1 + +* Fixing build by including missing `sort` folder. + +## 0.33.0 + +* Adding `KDTree`. +* Adding `set.intersectionSize`. +* Adding `set.unionSize`. +* Adding `set.jaccard`. +* Adding `FixedReverseHeap.peek`. + +## 0.32.0 + +* Adding `PassjoinIndex`. + +## 0.31.3 + +* Fixing `Heap.nsmallest` & `Heap.nlargest` docs & typings. +* Fixing `Heap.nsmallest` & `Heap.nlargest` not using custom comparator function when `n = 1`. + +## 0.31.2 + +* Fixing `BitSet` & `BitVector` iteration methods edge case. +* Fixing `BitSet` & `BitVector` `#.select` method. + +## 0.31.1 + +* Fixing `BitSet` & `BitVector` `#.size` caching edge case. + +## 0.31.0 + +* Adding `DefaultMap.peek`. +* Fixing some error messages. +* Fixing `BitSet` & `BitVector` `#.size` caching. + +## 0.30.0 + +* Stricter TS definitions (`--noImplicitAny`, `--noImplicitReturns`) (@pbadenski). + +## 0.29.0 + +* Adding `LRUCache.setpop` and `LRUMap.setpop` (@veggiesaurus). + +## 0.28.0 + +* Adding `LRUCache.peek` and `LRUMap.peek` (@veggiesaurus). + +## 0.27.2 + +* Fixing usage with TypeScript. + +## 0.27.1 + +* Fixing `CircularBuffer` and `FixedDeque` types. + +## 0.27.0 + +* Adding `FixedDeque`. +* Adding `CircularBuffer.unshift`. +* Changing `CircularBuffer` semantics to now overwrite values when wrapping around. + +## 0.26.0 + +* Adding the `DefaultMap.autoIncrement` factory. +* Removing the `IncrementalMap`. +* Fixing `Vector` typings. +* Fixing `BitVector` typings. + +## 0.25.1 + +* Fixing custom inspect methods for node >= 10. + +## 0.25.0 + +* Adding `LRUCache`. +* Adding `LRUMap`. + +## 0.24.0 + +* Adding `#.forEachMultiplicity` to `MultiSet`. +* Adding `#.forEachAssociation` to `MultiMap`. +* Adding `DefaultMap`. + +## 0.23.0 + +* Adding `FixedReverseHeap`. +* Adding `Heap.nsmallest` & `Heap.nlargest`. +* Adding `MultiSet.isSubset` & `MultiSet.isSuperset`. +* Adding `#.top` to `MultiSet`. +* Adding missing `Heap` types. +* Renaming `FiniteStack` to `FixedStack`. + +## 0.22.0 + +* Adding `FuzzyMultiMap.dimension`. +* Adding `#.consume` to `Heap`. +* Adding `#.replace` to `Heap`. +* Adding `#.pushpop` to `Heap`. +* Improving `BitSet` and `BitVector` `#.toJSON`. +* Improving `FiniteStack.from` & `CircularBuffer.from` performance when handling arrays. +* `Heap.from` is now linear time. +* Refactoring `Heap` inner logic. +* Fixing `CircularBuffer`'s `#.unshift` to `#.shift`. +* Fixing `SparseSet.delete` return consistency. + +## 0.21.0 + +* Library is now fully typed. +* Adding `CircularBuffer`. +* Adding `#.toArray` to `Heap`. + +## 0.20.0 + +* Adding `TrieMap`. +* Reworking the `Trie` considerably. + +## 0.19.0 + +* Adding `StaticIntervalTree`. +* Adding `PointerVector`. +* Adding `Queue.of`. +* Adding `Stack.of`. +* Improving `Vector` & `BitVector` reallocation performance. +* Improving `InvertedIndex` performance. + +## 0.18.O + +* Adding `FiniteStack`. +* Adding `#.keys` to `MultiSet`. +* Adding `#.count` alias to `MultiSet`. +* Adding `#.count` alias to `MultiMap`. +* Adding `#.remove` to `MultiMap`. +* Adding `Vector.from`. +* Adding `#.values` to `Vector`. +* Adding `#.entries` to `Vector`. +* Fixing bug when feeding invalid values to a `MultiSet`. +* Fixing `.from` static methods not taking byte arrays into account. +* Fixing bugs related to `Stack.pop` edge cases. +* Optimizing `Stack` performance. + +## 0.17.0 + +* Adding `HashedArrayTree`. +* Adding `BitVector`. +* Adding `#.frequency` to `MultiSet`. +* Adding `#.grow` to `DynamicArray`. +* Adding `#.reallocate` to `DynamicArray`. +* Adding `#.resize` to `DynamicArray`. +* Fixing several `MultiSet` issues. +* Renaming `DynamicArray` to `Vector`. +* Renaming the `DynamicArray.initialLength` option to `initialCapacity`. +* Renaming `DynamicArray.allocated` to `capacity`. +* Optimizing `MultiSet` performance. +* Optimizing `SparseSet` memory consumption. + +## 0.16.0 + +* Adding `#.has` to `FuzzyMap`. +* Adding `#.has` to `FuzzyMultiMap`. +* Adding `#.multiplicity` to `MultiMap`. +* Renaming `RangeMap` to `IncrementalMap`. +* Renaming `Index` to `FuzzyMap`. +* Renaming `MultiIndex` to `FuzzyMultiMap`. +* Renaming `DynamicArray` `initialSize` option to `initialLength`. +* Improving `MultiMap.set` performance. +* Improving `BitSet.reset` performance. +* Improving `Set.isSubset` & `Set.isSuperset` performance. + +## 0.15.0 + +* Adding `RangeMap`. +* Improving `MultiSet`. +* Out-of-bound `DynamicArray.set` will now correctly grow the array. +* Fixing `StaticDisjointSet.find` complexity. + +## O.14.0 + +* Adding `DynamicArray`. +* Adding `SparseSet`. +* Adding `StaticDisjointSet`. +* Adding iterator methods to `BitSet`. +* Adding `#.rank` & `#.select` to `BitSet`. +* `BitSet` now relies on `Uint32Array` rather than `Uint8Array`. +* Improving `BitSet` performances. +* Using `obliterator` to handle iterators. + +## 0.13.0 + +* Adding `BiMap`. +* Adding `BitSet`. +* Fixing universal iterator. + +## 0.12.0 + +* Adding `InvertedIndex`. + +## 0.11.0 + +* Adding bunch of set functions. + +## 0.10.2 + +* Fixing error in `Trie.get`. +* Fixing error related to `Trie.size`. + +## 0.10.1 + +* Fixing an error in `VPTree.neighbors`. + +## 0.10.0 + +* Adding `Index`. +* Adding `MultiIndex`. +* Adding `MultiMap`. +* Adding `MultiSet`. +* Adding `SymSpell`. + +## 0.9.0 + +* Adding `VPTree`. + +## 0.8.0 + +* Adding `BKTree`. + +## 0.7.0 + +* Adding `BloomFilter`. +* Adding static `#.from` method to all relevant structures. +* Adding iterators to all relevant structures. +* Removing the `MultiSet` until proper API is found. + +## 0.6.0 + +* Adding `MultiSet`. + +## 0.5.0 + +* Adding `SuffixArray` & `GeneralizedSuffixArray`. +* Better `Trie` sentinel. + +## 0.4.0 + +* Adding `Queue`. +* Adding possibility to pass custom comparator to `Heap` & `FibonacciHeap`. + +## 0.3.0 + +* Adding `FibonacciHeap`. +* Fixing bug related to `Heap`. + +## 0.2.0 + +* Adding `Trie`. + +## 0.1.0 + +* Adding `Heap`. + +## 0.0.1 + +* Adding `LinkedList`. +* Adding `Stack`. diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/LICENSE.txt b/amplify/functions/downloadDocument/node_modules/mnemonist/LICENSE.txt new file mode 100644 index 0000000..2d8d205 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Guillaume Plique (Yomguithereal) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/README.md b/amplify/functions/downloadDocument/node_modules/mnemonist/README.md new file mode 100644 index 0000000..dffc9ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/README.md @@ -0,0 +1,112 @@ +[![Build Status](https://travis-ci.org/Yomguithereal/mnemonist.svg)](https://travis-ci.org/Yomguithereal/mnemonist) + +# Mnemonist + +Mnemonist is a curated collection of data structures for the JavaScript language. + +It gathers classic data structures (think heap, trie etc.) as well as more exotic ones such as Buckhard-Keller trees etc. + +It strives at being: + +* As performant as possible for a high-level language. +* Completely modular (don't need to import the whole library just to use a simple heap). +* Simple & straightforward to use and consistent with JavaScript standard objects' API. +* Completely typed and comfortably usable with Typescript. + +## Installation + +``` +npm install --save mnemonist +``` + +## Documentation + +Full documentation for the library can be found [here](https://yomguithereal.github.io/mnemonist). + +**Classics** + +* [Heap](https://yomguithereal.github.io/mnemonist/heap) +* [Linked List](https://yomguithereal.github.io/mnemonist/linked-list) +* [LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache), [LRUMap](https://yomguithereal.github.io/mnemonist/lru-map) +* [MultiMap](https://yomguithereal.github.io/mnemonist/multi-map) +* [MultiSet](https://yomguithereal.github.io/mnemonist/multi-set) +* [Queue](https://yomguithereal.github.io/mnemonist/queue) +* [Set (helpers)](https://yomguithereal.github.io/mnemonist/set) +* [Stack](https://yomguithereal.github.io/mnemonist/stack) +* [Trie](https://yomguithereal.github.io/mnemonist/trie) +* [TrieMap](https://yomguithereal.github.io/mnemonist/trie-map) + +**Low-level & structures for very specific use cases** + +* [Circular Buffer](https://yomguithereal.github.io/mnemonist/circular-buffer) +* [Fixed Deque](https://yomguithereal.github.io/mnemonist/fixed-deque) +* [Fibonacci Heap](https://yomguithereal.github.io/mnemonist/fibonacci-heap) +* [Fixed Reverse Heap](https://yomguithereal.github.io/mnemonist/fixed-reverse-heap) +* [Fixed Stack](https://yomguithereal.github.io/mnemonist/fixed-stack) +* [Hashed Array Tree](https://yomguithereal.github.io/mnemonist/hashed-array-tree) +* [Static DisjointSet](https://yomguithereal.github.io/mnemonist/static-disjoint-set) +* [SparseQueueSet](https://yomguithereal.github.io/mnemonist/sparse-queue-set) +* [SparseMap](https://yomguithereal.github.io/mnemonist/sparse-map) +* [SparseSet](https://yomguithereal.github.io/mnemonist/sparse-set) +* [Suffix Array](https://yomguithereal.github.io/mnemonist/suffix-array) +* [Generalized Suffix Array](https://yomguithereal.github.io/mnemonist/generalized-suffix-array) +* [Vector](https://yomguithereal.github.io/mnemonist/vector) + +**Information retrieval & Natural language processing** + +* [Fuzzy Map](https://yomguithereal.github.io/mnemonist/fuzzy-map) +* [Fuzzy MultiMap](https://yomguithereal.github.io/mnemonist/fuzzy-multi-map) +* [Inverted Index](https://yomguithereal.github.io/mnemonist/inverted-index) +* [Passjoin Index](https://yomguithereal.github.io/mnemonist/passjoin-index) +* [SymSpell](https://yomguithereal.github.io/mnemonist/symspell) + +**Space & time indexation** + +* [Static IntervalTree](https://yomguithereal.github.io/mnemonist/static-interval-tree) +* [KD-Tree](https://yomguithereal.github.io/mnemonist/kd-tree) + +**Metric space indexation** + +* [Burkhard-Keller Tree](https://yomguithereal.github.io/mnemonist/bk-tree) +* [Vantage Point Tree](https://yomguithereal.github.io/mnemonist/vp-tree) + +**Probabilistic & succinct data structures** + +* [BitSet](https://yomguithereal.github.io/mnemonist/bit-set) +* [BitVector](https://yomguithereal.github.io/mnemonist/bit-vector) +* [Bloom Filter](https://yomguithereal.github.io/mnemonist/bloom-filter) + +**Utility classes** + +* [BiMap](https://yomguithereal.github.io/mnemonist/bi-map) +* [DefaultMap](https://yomguithereal.github.io/mnemonist/default-map) +* [DefaultWeakMap](https://yomguithereal.github.io/mnemonist/default-weak-map) + +--- + +Note that this list does not include a `Graph` data structure, whose implementation is usually far too complex for the scope of this library. + +However, we advise the reader to take a look at the [`graphology`](https://graphology.github.io/) library instead. + +Don't find the data structure you need? Maybe we can work it out [together](https://github.com/Yomguithereal/mnemonist/issues). + +## Contribution + +Contributions are obviously welcome. Be sure to lint the code & add relevant unit tests. + +``` +# Installing +git clone git@github.com:Yomguithereal/mnemonist.git +cd mnemonist +npm install + +# Linting +npm run lint + +# Running the unit tests +npm test +``` + +## License + +[MIT](LICENSE.txt) diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bi-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/bi-map.d.ts new file mode 100644 index 0000000..d0c2f76 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bi-map.d.ts @@ -0,0 +1,46 @@ +/** + * Mnemonist BiMap Typings + * ======================== + */ +export class InverseMap implements Iterable<[K, V]> { + + // Members + size: number; + inverse: BiMap; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; +} + +export default class BiMap implements Iterable<[K, V]> { + + // Members + size: number; + inverse: InverseMap; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, J]> | {[key: string]: J}): BiMap; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bi-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/bi-map.js new file mode 100644 index 0000000..3d5d03f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bi-map.js @@ -0,0 +1,195 @@ +/** + * Mnemonist BiMap + * ================ + * + * JavaScript implementation of a BiMap. + */ +var forEach = require('obliterator/foreach'); + +/** + * Inverse Map. + * + * @constructor + */ +function InverseMap(original) { + + this.size = 0; + this.items = new Map(); + this.inverse = original; +} + +/** + * BiMap. + * + * @constructor + */ +function BiMap() { + + this.size = 0; + this.items = new Map(); + this.inverse = new InverseMap(this); +} + +/** + * Method used to clear the map. + * + * @return {undefined} + */ +function clear() { + this.size = 0; + this.items.clear(); + this.inverse.items.clear(); +} + +BiMap.prototype.clear = clear; +InverseMap.prototype.clear = clear; + +/** + * Method used to set a relation. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {BiMap|InverseMap} + */ +function set(key, value) { + + // First we need to attempt to see if the relation is not flawed + if (this.items.has(key)) { + var currentValue = this.items.get(key); + + // The relation already exists, we do nothing + if (currentValue === value) + return this; + else + this.inverse.items.delete(currentValue); + } + + if (this.inverse.items.has(value)) { + var currentKey = this.inverse.items.get(value); + + if (currentKey === key) + return this; + else + this.items.delete(currentKey); + } + + // Here we actually add the relation + this.items.set(key, value); + this.inverse.items.set(value, key); + + // Size + this.size = this.items.size; + this.inverse.size = this.inverse.items.size; + + return this; +} + +BiMap.prototype.set = set; +InverseMap.prototype.set = set; + +/** + * Method used to delete a relation. + * + * @param {any} key - Key. + * @return {boolean} + */ +function del(key) { + if (this.items.has(key)) { + var currentValue = this.items.get(key); + + this.items.delete(key); + this.inverse.items.delete(currentValue); + + // Size + this.size = this.items.size; + this.inverse.size = this.inverse.items.size; + + return true; + } + + return false; +} + +BiMap.prototype.delete = del; +InverseMap.prototype.delete = del; + +/** + * Mapping some Map prototype function unto our two classes. + */ +var METHODS = ['has', 'get', 'forEach', 'keys', 'values', 'entries']; + +METHODS.forEach(function(name) { + BiMap.prototype[name] = InverseMap.prototype[name] = function() { + return Map.prototype[name].apply(this.items, arguments); + }; +}); + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') { + BiMap.prototype[Symbol.iterator] = BiMap.prototype.entries; + InverseMap.prototype[Symbol.iterator] = InverseMap.prototype.entries; +} + +/** + * Convenience known methods. + */ +BiMap.prototype.inspect = function() { + var dummy = { + left: this.items, + right: this.inverse.items + }; + + // Trick so that node displays the name of the constructor + Object.defineProperty(dummy, 'constructor', { + value: BiMap, + enumerable: false + }); + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + BiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = BiMap.prototype.inspect; + +InverseMap.prototype.inspect = function() { + var dummy = { + left: this.inverse.items, + right: this.items + }; + + // Trick so that node displays the name of the constructor + Object.defineProperty(dummy, 'constructor', { + value: InverseMap, + enumerable: false + }); + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + InverseMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = InverseMap.prototype.inspect; + + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a bimap. + * + * @param {Iterable} iterable - Target iterable. + * @return {BiMap} + */ +BiMap.from = function(iterable) { + var bimap = new BiMap(); + + forEach(iterable, function(value, key) { + bimap.set(key, value); + }); + + return bimap; +}; + +/** + * Exporting. + */ +module.exports = BiMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bit-set.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/bit-set.d.ts new file mode 100644 index 0000000..cfeb0d1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bit-set.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist BitSet Typings + * ========================= + */ +export default class BitSet implements Iterable { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + set(index: number, value?: boolean | number): void; + reset(index: number, value: boolean | number): void; + flip(index: number, value: boolean | number): void; + get(index: number): number; + test(index: number): boolean; + rank(r: number): number; + select(r: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): Array; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bit-set.js b/amplify/functions/downloadDocument/node_modules/mnemonist/bit-set.js new file mode 100644 index 0000000..f2445a0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bit-set.js @@ -0,0 +1,379 @@ +/** + * Mnemonist BitSet + * ================= + * + * JavaScript implementation of a fixed-size BitSet based upon a Uint32Array. + * + * Notes: + * - (i >> 5) is the same as ((i / 32) | 0) + * - (i & 0x0000001f) is the same as (i % 32) + * - I could use a Float64Array to store more in less blocks but I would lose + * the benefits of byte comparison to keep track of size without popcounts. + */ +var Iterator = require('obliterator/iterator'), + bitwise = require('./utils/bitwise.js'); + +/** + * BitSet. + * + * @constructor + */ +function BitSet(length) { + + // Properties + this.length = length; + this.clear(); + + // Methods + + // Statics +} + +/** + * Method used to clear the bit set. + * + * @return {undefined} + */ +BitSet.prototype.clear = function() { + + // Properties + this.size = 0; + this.array = new Uint32Array(Math.ceil(this.length / 32)); +}; + +/** + * Method used to set the given bit's value. + * + * @param {number} index - Target bit index. + * @param {number} value - Value to set. + * @return {BitSet} + */ +BitSet.prototype.set = function(index, value) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + if (value === 0 || value === false) + newBytes = this.array[byteIndex] &= ~(1 << pos); + else + newBytes = this.array[byteIndex] |= (1 << pos); + + // The operands of all bitwise operators are converted to *signed* 32-bit integers. + // Source: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Bitwise_Operators#Signed_32-bit_integers + // Shifting by 31 changes the sign (i.e. 1 << 31 = -2147483648). + // Therefore, get unsigned representation by applying '>>> 0'. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** +* Method used to reset the given bit's value. +* +* @param {number} index - Target bit index. +* @return {BitSet} +*/ +BitSet.prototype.reset = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + newBytes = this.array[byteIndex] &= ~(1 << pos); + + // Updating size + if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to flip the value of the given bit. + * + * @param {number} index - Target bit index. + * @return {BitSet} + */ +BitSet.prototype.flip = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex]; + + var newBytes = this.array[byteIndex] ^= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to get the given bit's value. + * + * @param {number} index - Target bit index. + * @return {number} + */ +BitSet.prototype.get = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to test the given bit's value. + * + * @param {number} index - Target bit index. + * @return {BitSet} + */ +BitSet.prototype.test = function(index) { + return Boolean(this.get(index)); +}; + +/** + * Method used to return the number of 1 from the beginning of the set up to + * the ith index. + * + * @param {number} i - Ith index (cannot be > length). + * @return {number} + */ +BitSet.prototype.rank = function(i) { + if (this.size === 0) + return 0; + + var byteIndex = i >> 5, + pos = i & 0x0000001f, + r = 0; + + // Accessing the bytes before the last one + for (var j = 0; j < byteIndex; j++) + r += bitwise.table8Popcount(this.array[j]); + + // Handling masked last byte + var maskedByte = this.array[byteIndex] & ((1 << pos) - 1); + + r += bitwise.table8Popcount(maskedByte); + + return r; +}; + +/** + * Method used to return the position of the rth 1 in the set or -1 if the + * set is empty. + * + * Note: usually select is implemented using binary search over rank but I + * tend to think the following linear implementation is faster since here + * rank is O(n) anyway. + * + * @param {number} r - Rth 1 to select (should be < length). + * @return {number} + */ +BitSet.prototype.select = function(r) { + if (this.size === 0) + return -1; + + // TODO: throw? + if (r >= this.length) + return -1; + + var byte, + b = 32, + p = 0, + c = 0; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + // The byte is empty, let's continue + if (byte === 0) + continue; + + // TODO: This branching might not be useful here + if (i === l - 1) + b = this.length % 32 || 32; + + // TODO: popcount should speed things up here + + for (var j = 0; j < b; j++, p++) { + c += (byte >> j) & 1; + + if (c === r) + return p; + } + } +}; + +/** + * Method used to iterate over the bit set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +BitSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var length = this.length, + byte, + bit, + b = 32; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + if (i === l - 1) + b = length % 32 || 32; + + for (var j = 0; j < b; j++) { + bit = (byte >> j) & 1; + + callback.call(scope, bit, i * 32 + j); + } + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +BitSet.prototype.values = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: bit + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +BitSet.prototype.entries = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + index, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + index = (~-i) * 32 + j; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: [index, bit] + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + BitSet.prototype[Symbol.iterator] = BitSet.prototype.values; + +/** + * Convenience known methods. + */ +BitSet.prototype.inspect = function() { + var proxy = new Uint8Array(this.length); + + this.forEach(function(bit, i) { + proxy[i] = bit; + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: BitSet, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + BitSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = BitSet.prototype.inspect; + +BitSet.prototype.toJSON = function() { + return Array.from(this.array); +}; + +/** + * Exporting. + */ +module.exports = BitSet; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bit-vector.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/bit-vector.d.ts new file mode 100644 index 0000000..4005d3c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bit-vector.d.ts @@ -0,0 +1,42 @@ +/** + * Mnemonist BitVector Typings + * ============================ + */ +type BitVectorOptions = { + initialLength?: number; + initialCapacity?: number; + policy?: (capacity: number) => number; +} + +export default class BitVector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(length: number); + constructor(options: BitVectorOptions); + + // Methods + clear(): void; + set(index: number, value?: boolean | number): this; + reset(index: number, value: boolean | number): void; + flip(index: number, value: boolean | number): void; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: boolean | number): number; + pop(): number | undefined; + get(index: number): number; + test(index: number): boolean; + rank(r: number): number; + select(r: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): Array; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bit-vector.js b/amplify/functions/downloadDocument/node_modules/mnemonist/bit-vector.js new file mode 100644 index 0000000..5ee01e6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bit-vector.js @@ -0,0 +1,550 @@ +/** + * Mnemonist BitVector + * ==================== + * + * JavaScript implementation of a dynamic BitSet based upon a Uint32Array. + * + * Notes: + * - (i >> 5) is the same as ((i / 32) | 0) + * - (i & 0x0000001f) is the same as (i % 32) + * - I could use a Float64Array to store more in less blocks but I would lose + * the benefits of byte comparison to keep track of size without popcounts. + */ +var Iterator = require('obliterator/iterator'), + bitwise = require('./utils/bitwise.js'); + +/** + * Constants. + */ +var DEFAULT_GROWING_POLICY = function(capacity) { + return Math.max(1, Math.ceil(capacity * 1.5)); +}; + +/** + * Helpers. + */ +function createByteArray(capacity) { + return new Uint32Array(Math.ceil(capacity / 32)); +} + +/** + * BitVector. + * + * @constructor + */ +function BitVector(initialLengthOrOptions) { + var initialLength = initialLengthOrOptions || 0, + policy = DEFAULT_GROWING_POLICY; + + if (typeof initialLengthOrOptions === 'object') { + initialLength = ( + initialLengthOrOptions.initialLength || + initialLengthOrOptions.initialCapacity || + 0 + ); + policy = initialLengthOrOptions.policy || policy; + } + + this.size = 0; + this.length = initialLength; + this.capacity = Math.ceil(this.length / 32) * 32; + this.policy = policy; + this.array = createByteArray(this.capacity); +} + +/** + * Method used to set the given bit's value. + * + * @param {number} index - Target bit index. + * @param {number|boolean} value - Value to set. + * @return {BitVector} + */ +BitVector.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('BitVector.set: index out of bounds.'); + + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + if (value === 0 || value === false) + newBytes = this.array[byteIndex] &= ~(1 << pos); + else + newBytes = this.array[byteIndex] |= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** +* Method used to reset the given bit's value. +* +* @param {number} index - Target bit index. +* @return {BitVector} +*/ +BitVector.prototype.reset = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + newBytes = this.array[byteIndex] &= ~(1 << pos); + + // Updating size + if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to flip the value of the given bit. + * + * @param {number} index - Target bit index. + * @return {BitVector} + */ +BitVector.prototype.flip = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex]; + + var newBytes = this.array[byteIndex] ^= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to apply the growing policy. + * + * @param {number} [override] - Override capacity. + * @return {number} + */ +BitVector.prototype.applyPolicy = function(override) { + var newCapacity = this.policy(override || this.capacity); + + if (typeof newCapacity !== 'number' || newCapacity < 0) + throw new Error('mnemonist/bit-vector.applyPolicy: policy returned an invalid value (expecting a positive integer).'); + + if (newCapacity <= this.capacity) + throw new Error('mnemonist/bit-vector.applyPolicy: policy returned a less or equal capacity to allocate.'); + + // TODO: we should probably check that the returned number is an integer + + // Ceil to nearest 32 + return Math.ceil(newCapacity / 32) * 32; +}; + +/** + * Method used to reallocate the underlying array. + * + * @param {number} capacity - Target capacity. + * @return {BitVector} + */ +BitVector.prototype.reallocate = function(capacity) { + var virtualCapacity = capacity; + + capacity = Math.ceil(capacity / 32) * 32; + + if (virtualCapacity < this.length) + this.length = virtualCapacity; + + if (capacity === this.capacity) + return this; + + var oldArray = this.array; + + var storageLength = capacity / 32; + + if (storageLength === this.array.length) + return this; + + if (storageLength > this.array.length) { + this.array = new Uint32Array(storageLength); + this.array.set(oldArray, 0); + } + else { + this.array = oldArray.slice(0, storageLength); + } + + this.capacity = capacity; + + return this; +}; + +/** + * Method used to grow the array. + * + * @param {number} [capacity] - Optional capacity to match. + * @return {BitVector} + */ +BitVector.prototype.grow = function(capacity) { + var newCapacity; + + if (typeof capacity === 'number') { + + if (this.capacity >= capacity) + return this; + + // We need to match the given capacity + newCapacity = this.capacity; + + while (newCapacity < capacity) + newCapacity = this.applyPolicy(newCapacity); + + this.reallocate(newCapacity); + + return this; + } + + // We need to run the policy once + newCapacity = this.applyPolicy(); + this.reallocate(newCapacity); + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {BitVector} + */ +BitVector.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.reallocate(length); + + return this; +}; + +/** + * Method used to push a value in the set. + * + * @param {number|boolean} value + * @return {BitVector} + */ +BitVector.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + if (value === 0 || value === false) + return ++this.length; + + this.size++; + + var index = this.length++, + byteIndex = index >> 5, + pos = index & 0x0000001f; + + this.array[byteIndex] |= (1 << pos); + + return this.length; +}; + +/** + * Method used to pop the last value of the set. + * + * @return {number} - The popped value. + */ +BitVector.prototype.pop = function() { + if (this.length === 0) + return; + + var index = --this.length; + + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to get the given bit's value. + * + * @param {number} index - Target bit index. + * @return {number} + */ +BitVector.prototype.get = function(index) { + if (this.length < index) + return undefined; + + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to test the given bit's value. + * + * @param {number} index - Target bit index. + * @return {BitVector} + */ +BitVector.prototype.test = function(index) { + if (this.length < index) + return false; + + return Boolean(this.get(index)); +}; + +/** + * Method used to return the number of 1 from the beginning of the set up to + * the ith index. + * + * @param {number} i - Ith index (cannot be > length). + * @return {number} + */ +BitVector.prototype.rank = function(i) { + if (this.size === 0) + return 0; + + var byteIndex = i >> 5, + pos = i & 0x0000001f, + r = 0; + + // Accessing the bytes before the last one + for (var j = 0; j < byteIndex; j++) + r += bitwise.table8Popcount(this.array[j]); + + // Handling masked last byte + var maskedByte = this.array[byteIndex] & ((1 << pos) - 1); + + r += bitwise.table8Popcount(maskedByte); + + return r; +}; + +/** + * Method used to return the position of the rth 1 in the set or -1 if the + * set is empty. + * + * Note: usually select is implemented using binary search over rank but I + * tend to think the following linear implementation is faster since here + * rank is O(n) anyway. + * + * @param {number} r - Rth 1 to select (should be < length). + * @return {number} + */ +BitVector.prototype.select = function(r) { + if (this.size === 0) + return -1; + + // TODO: throw? + if (r >= this.length) + return -1; + + var byte, + b = 32, + p = 0, + c = 0; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + // The byte is empty, let's continue + if (byte === 0) + continue; + + // TODO: This branching might not be useful here + if (i === l - 1) + b = this.length % 32 || 32; + + // TODO: popcount should speed things up here + + for (var j = 0; j < b; j++, p++) { + c += (byte >> j) & 1; + + if (c === r) + return p; + } + } +}; + +/** + * Method used to iterate over the bit set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +BitVector.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var length = this.length, + byte, + bit, + b = 32; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + if (i === l - 1) + b = length % 32 || 32; + + for (var j = 0; j < b; j++) { + bit = (byte >> j) & 1; + + callback.call(scope, bit, i * 32 + j); + } + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +BitVector.prototype.values = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: bit + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +BitVector.prototype.entries = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + index, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + index = (~-i) * 32 + j; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: [index, bit] + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + BitVector.prototype[Symbol.iterator] = BitVector.prototype.values; + +/** + * Convenience known methods. + */ +BitVector.prototype.inspect = function() { + var proxy = new Uint8Array(this.length); + + this.forEach(function(bit, i) { + proxy[i] = bit; + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: BitVector, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + BitVector.prototype[Symbol.for('nodejs.util.inspect.custom')] = BitVector.prototype.inspect; + +BitVector.prototype.toJSON = function() { + return Array.from(this.array.slice(0, (this.length >> 5) + 1)); +}; + +/** + * Exporting. + */ +module.exports = BitVector; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bk-tree.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/bk-tree.d.ts new file mode 100644 index 0000000..f158dfd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bk-tree.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist BKTree Typings + * ========================= + */ +type DistanceFunction = (a: T, b: T) => number; + +export default class BKTree { + + // Members + distance: DistanceFunction; + size: number; + + // Constructor + constructor(distance: DistanceFunction); + + // Methods + add(item: T): this; + search(n: number, query: T): Array<{item: T, distance: number}>; + toJSON(): object; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, distance: DistanceFunction): BKTree; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bk-tree.js b/amplify/functions/downloadDocument/node_modules/mnemonist/bk-tree.js new file mode 100644 index 0000000..9c9792d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bk-tree.js @@ -0,0 +1,180 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist BK Tree + * ================== + * + * Implementation of a Burkhard-Keller tree, allowing fast lookups of words + * that lie within a specified distance of the query word. + * + * [Reference]: + * https://en.wikipedia.org/wiki/BK-tree + * + * [Article]: + * W. Burkhard and R. Keller. Some approaches to best-match file searching, + * CACM, 1973 + */ +var forEach = require('obliterator/foreach'); + +/** + * BK Tree. + * + * @constructor + * @param {function} distance - Distance function to use. + */ +function BKTree(distance) { + + if (typeof distance !== 'function') + throw new Error('mnemonist/BKTree.constructor: given `distance` should be a function.'); + + this.distance = distance; + this.clear(); +} + +/** + * Method used to add an item to the tree. + * + * @param {any} item - Item to add. + * @return {BKTree} + */ +BKTree.prototype.add = function(item) { + + // Initializing the tree with the first given word + if (!this.root) { + this.root = { + item: item, + children: {} + }; + + this.size++; + return this; + } + + var node = this.root, + d; + + while (true) { + d = this.distance(item, node.item); + + if (!node.children[d]) + break; + + node = node.children[d]; + } + + node.children[d] = { + item: item, + children: {} + }; + + this.size++; + return this; +}; + +/** + * Method used to query the tree. + * + * @param {number} n - Maximum distance between query & item. + * @param {any} query - Query + * @return {BKTree} + */ +BKTree.prototype.search = function(n, query) { + if (!this.root) + return []; + + var found = [], + stack = [this.root], + node, + child, + d, + i, + l; + + while (stack.length) { + node = stack.pop(); + d = this.distance(query, node.item); + + if (d <= n) + found.push({item: node.item, distance: d}); + + for (i = d - n, l = d + n + 1; i < l; i++) { + child = node.children[i]; + + if (child) + stack.push(child); + } + } + + return found; +}; + +/** + * Method used to clear the tree. + * + * @return {undefined} + */ +BKTree.prototype.clear = function() { + + // Properties + this.size = 0; + this.root = null; +}; + +/** + * Convenience known methods. + */ +BKTree.prototype.toJSON = function() { + return this.root; +}; + +BKTree.prototype.inspect = function() { + var array = [], + stack = [this.root], + node, + d; + + while (stack.length) { + node = stack.pop(); + + if (!node) + continue; + + array.push(node.item); + + for (d in node.children) + stack.push(node.children[d]); + } + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: BKTree, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + BKTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = BKTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a tree. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} distance - Distance function. + * @return {Heap} + */ +BKTree.from = function(iterable, distance) { + var tree = new BKTree(distance); + + forEach(iterable, function(value) { + tree.add(value); + }); + + return tree; +}; + +/** + * Exporting. + */ +module.exports = BKTree; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bloom-filter.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/bloom-filter.d.ts new file mode 100644 index 0000000..dc9b2fa --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bloom-filter.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist BloomFilter Typings + * ============================== + */ +type BloomFilterOptions = { + capacity: number; + errorRate?: number; +} + +export default class BloomFilter { + + // Members + capacity: number; + errorRate: number; + hashFunctions: number; + + // Constructor + constructor(capacity: number); + constructor(options: BloomFilterOptions); + + // Methods + clear(): void; + add(string: string): this; + test(string: string): boolean; + toJSON(): Uint8Array; + + // Statics + from(iterable: Iterable, options?: number | BloomFilterOptions): BloomFilter; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/bloom-filter.js b/amplify/functions/downloadDocument/node_modules/mnemonist/bloom-filter.js new file mode 100644 index 0000000..ba3ee76 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/bloom-filter.js @@ -0,0 +1,186 @@ +/** + * Mnemonist Bloom Filter + * ======================= + * + * Bloom Filter implementation relying on MurmurHash3. + */ +var murmurhash3 = require('./utils/murmurhash3.js'), + forEach = require('obliterator/foreach'); + +/** + * Constants. + */ +var LN2_SQUARED = Math.LN2 * Math.LN2; + +/** + * Defaults. + */ +var DEFAULTS = { + errorRate: 0.005 +}; + +/** + * Function used to convert a string into a Uint16 byte array. + * + * @param {string} string - Target string. + * @return {Uint16Array} + */ +function stringToByteArray(string) { + var array = new Uint16Array(string.length), + i, + l; + + for (i = 0, l = string.length; i < l; i++) + array[i] = string.charCodeAt(i); + + return array; +} + +/** + * Function used to hash the given byte array. + * + * @param {number} length - Length of the filter's byte array. + * @param {number} seed - Seed to use for the hash function. + * @param {Uint16Array} - Byte array representing the string. + * @return {number} - The hash. + * + * @note length * 8 should probably already be computed as well as seeds. + */ +function hashArray(length, seed, array) { + var hash = murmurhash3((seed * 0xFBA4C795) & 0xFFFFFFFF, array); + + return hash % (length * 8); +} + +/** + * Bloom Filter. + * + * @constructor + * @param {number|object} capacityOrOptions - Capacity or options. + */ +function BloomFilter(capacityOrOptions) { + var options = {}; + + if (!capacityOrOptions) + throw new Error('mnemonist/BloomFilter.constructor: a BloomFilter must be created with a capacity.'); + + if (typeof capacityOrOptions === 'object') + options = capacityOrOptions; + else + options.capacity = capacityOrOptions; + + // Handling capacity + if (typeof options.capacity !== 'number' || options.capacity <= 0) + throw new Error('mnemonist/BloomFilter.constructor: `capacity` option should be a positive integer.'); + + this.capacity = options.capacity; + + // Handling error rate + this.errorRate = options.errorRate || DEFAULTS.errorRate; + + if (typeof this.errorRate !== 'number' || options.errorRate <= 0) + throw new Error('mnemonist/BloomFilter.constructor: `errorRate` option should be a positive float.'); + + this.clear(); +} + +/** + * Method used to clear the filter. + * + * @return {undefined} + */ +BloomFilter.prototype.clear = function() { + + // Optimizing number of bits & number of hash functions + var bits = -1 / LN2_SQUARED * this.capacity * Math.log(this.errorRate), + length = (bits / 8) | 0; + + this.hashFunctions = (length * 8 / this.capacity * Math.LN2) | 0; + + // Creating the data array + this.data = new Uint8Array(length); + + return; +}; + +/** + * Method used to add an string to the filter. + * + * @param {string} string - Item to add. + * @return {BloomFilter} + * + * @note Should probably create a hash function working directly on a string. + */ +BloomFilter.prototype.add = function(string) { + + // Converting the string to a byte array + var array = stringToByteArray(string); + + // Applying the n hash functions + for (var i = 0, l = this.hashFunctions; i < l; i++) { + var index = hashArray(this.data.length, i, array), + position = (1 << (7 & index)); + + this.data[index >> 3] |= position; + } + + return this; +}; + +/** + * Method used to test the given string. + * + * @param {string} string - Item to test. + * @return {boolean} + */ +BloomFilter.prototype.test = function(string) { + + // Converting the string to a byte array + var array = stringToByteArray(string); + + // Applying the n hash functions + for (var i = 0, l = this.hashFunctions; i < l; i++) { + var index = hashArray(this.data.length, i, array); + + if (!(this.data[index >> 3] & (1 << (7 & index)))) + return false; + } + + return true; +}; + +/** + * Convenience known methods. + */ +BloomFilter.prototype.toJSON = function() { + return this.data; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a filter. + * + * @param {Iterable} iterable - Target iterable. + * @return {BloomFilter} + */ +BloomFilter.from = function(iterable, options) { + if (!options) { + options = iterable.length || iterable.size; + + if (typeof options !== 'number') + throw new Error('BloomFilter.from: could not infer the filter\'s capacity. Try passing it as second argument.'); + } + + var filter = new BloomFilter(options); + + forEach(iterable, function(value) { + filter.add(value); + }); + + return filter; +}; + +/** + * Exporting. + */ +module.exports = BloomFilter; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/circular-buffer.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/circular-buffer.d.ts new file mode 100644 index 0000000..ec1fa4c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/circular-buffer.d.ts @@ -0,0 +1,34 @@ +/** + * Mnemonist CircularBuffer Typings + * ================================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class CircularBuffer implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + unshift(item: T): number; + pop(): T | undefined; + shift(): T | undefined; + peekFirst(): T | undefined; + peekLast(): T | undefined; + get(index: number): T | undefined; + forEach(callback: (item: T, index: number, buffer: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): CircularBuffer; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/circular-buffer.js b/amplify/functions/downloadDocument/node_modules/mnemonist/circular-buffer.js new file mode 100644 index 0000000..d3ef950 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/circular-buffer.js @@ -0,0 +1,131 @@ +/** + * Mnemonist CircularBuffer + * ========================= + * + * Circular buffer implementation fit to use as a finite deque. + */ +var iterables = require('./utils/iterables.js'), + FixedDeque = require('./fixed-deque'); + +/** + * CircularBuffer. + * + * @constructor + */ +function CircularBuffer(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/circular-buffer: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/circular-buffer: `capacity` should be a positive number.'); + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + this.items = new ArrayClass(this.capacity); + this.clear(); +} + +/** + * Pasting most of the prototype from FixedDeque. + */ +function paste(name) { + CircularBuffer.prototype[name] = FixedDeque.prototype[name]; +} + +Object.keys(FixedDeque.prototype).forEach(paste); + +if (typeof Symbol !== 'undefined') + Object.getOwnPropertySymbols(FixedDeque.prototype).forEach(paste); + +/** + * Method used to append a value to the buffer. + * + * @param {any} item - Item to append. + * @return {number} - Returns the new size of the buffer. + */ +CircularBuffer.prototype.push = function(item) { + var index = (this.start + this.size) % this.capacity; + + this.items[index] = item; + + // Overwriting? + if (this.size === this.capacity) { + + // If start is at the end, we wrap around the buffer + this.start = (index + 1) % this.capacity; + + return this.size; + } + + return ++this.size; +}; + +/** + * Method used to prepend a value to the buffer. + * + * @param {any} item - Item to prepend. + * @return {number} - Returns the new size of the buffer. + */ +CircularBuffer.prototype.unshift = function(item) { + var index = this.start - 1; + + if (this.start === 0) + index = this.capacity - 1; + + this.items[index] = item; + + // Overwriting + if (this.size === this.capacity) { + + this.start = index; + + return this.size; + } + + this.start = index; + + return ++this.size; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a circular buffer. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FiniteStack} + */ +CircularBuffer.from = function(iterable, ArrayClass, capacity) { + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/circular-buffer.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var buffer = new CircularBuffer(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + buffer.items[i] = iterable[i]; + + buffer.size = l; + + return buffer; + } + + iterables.forEach(iterable, function(value) { + buffer.push(value); + }); + + return buffer; +}; + +/** + * Exporting. + */ +module.exports = CircularBuffer; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/critbit-tree-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/critbit-tree-map.js new file mode 100644 index 0000000..1c41a9a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/critbit-tree-map.js @@ -0,0 +1,515 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist CritBitTreeMap + * ========================= + * + * JavaScript implementation of a crit-bit tree, also called PATRICIA tree. + * This tree is a basically a bitwise radix tree and is supposedly much more + * efficient than a standard Trie. + * + * [References]: + * https://cr.yp.to/critbit.html + * https://www.imperialviolet.org/binary/critbit.pdf + */ +var bitwise = require('./utils/bitwise.js'); + +/** + * Helpers. + */ + +/** + * Helper returning the direction we need to take given a key and an + * encoded critbit. + * + * @param {string} key - Target key. + * @param {number} critbit - Packed address of byte + mask. + * @return {number} - 0, left or 1, right. + */ +function getDirection(key, critbit) { + var byteIndex = critbit >> 8; + + if (byteIndex > key.length - 1) + return 0; + + var byte = key.charCodeAt(byteIndex), + mask = critbit & 0xff; + + return (1 + (byte | mask)) >> 8; +} + +/** + * Helper returning the packed address of byte + mask or -1 if strings + * are identical. + * + * @param {string} a - First key. + * @param {string} b - Second key. + * @return {number} - Packed address of byte + mask. + */ +function findCriticalBit(a, b) { + var i = 0, + tmp; + + // Swapping so a is the shortest + if (a.length > b.length) { + tmp = b; + b = a; + a = tmp; + } + + var l = a.length, + mask; + + while (i < l) { + if (a[i] !== b[i]) { + mask = bitwise.criticalBit8Mask( + a.charCodeAt(i), + b.charCodeAt(i) + ); + + return (i << 8) | mask; + } + + i++; + } + + // Strings are identical + if (a.length === b.length) + return -1; + + // NOTE: x ^ 0 is the same as x + mask = bitwise.criticalBit8Mask(b.charCodeAt(i)); + + return (i << 8) | mask; +} + +/** + * Class representing a crit-bit tree's internal node. + * + * @constructor + * @param {number} critbit - Packed address of byte + mask. + */ +function InternalNode(critbit) { + this.critbit = critbit; + this.left = null; + this.right = null; +} + +/** + * Class representing a crit-bit tree's external node. + * Note that it is possible to replace those nodes by flat arrays. + * + * @constructor + * @param {string} key - Node's key. + * @param {any} value - Arbitrary value. + */ +function ExternalNode(key, value) { + this.key = key; + this.value = value; +} + +/** + * CritBitTreeMap. + * + * @constructor + */ +function CritBitTreeMap() { + + // Properties + this.root = null; + this.size = 0; + + this.clear(); +} + +/** + * Method used to clear the CritBitTreeMap. + * + * @return {undefined} + */ +CritBitTreeMap.prototype.clear = function() { + + // Properties + this.root = null; + this.size = 0; +}; + +/** + * Method used to set the value of the given key in the trie. + * + * @param {string} key - Key to set. + * @param {any} value - Arbitrary value. + * @return {CritBitTreeMap} + */ +CritBitTreeMap.prototype.set = function(key, value) { + + // Tree is empty + if (this.size === 0) { + this.root = new ExternalNode(key, value); + this.size++; + + return this; + } + + // Walk state + var node = this.root, + ancestors = [], + path = [], + ancestor, + parent, + child, + critbit, + internal, + left, + leftPath, + best, + dir, + i, + l; + + // Walking the tree + while (true) { + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + // Going left & creating key if not yet there + if (dir === 0) { + if (!node.left) { + node.left = new ExternalNode(key, value); + return this; + } + + ancestors.push(node); + path.push(true); + + node = node.left; + } + + // Going right & creating key if not yet there + else { + if (!node.right) { + node.right = new ExternalNode(key, value); + return this; + } + + ancestors.push(node); + path.push(false); + + node = node.right; + } + } + + // Reaching an external node + else { + + // 1. Creating a new external node + critbit = findCriticalBit(key, node.key); + + // Key is identical, we just replace the value + if (critbit === -1) { + node.value = value; + return this; + } + + this.size++; + + internal = new InternalNode(critbit); + + left = getDirection(key, critbit) === 0; + + // TODO: maybe setting opposite pointer is not necessary + if (left) { + internal.left = new ExternalNode(key, value); + internal.right = node; + } + else { + internal.left = node; + internal.right = new ExternalNode(key, value); + } + + // 2. Bubbling up + best = -1; + l = ancestors.length; + + for (i = l - 1; i >= 0; i--) { + ancestor = ancestors[i]; + + if (ancestor.critbit > critbit) + continue; + + best = i; + break; + } + + // Do we need to attach to the root? + if (best < 0) { + this.root = internal; + + // Need to rewire parent as child? + if (l > 0) { + parent = ancestors[0]; + + if (left) + internal.right = parent; + else + internal.left = parent; + } + } + + // Simple case without rotation + else if (best === l - 1) { + parent = ancestors[best]; + leftPath = path[best]; + + if (leftPath) + parent.left = internal; + else + parent.right = internal; + } + + // Full rotation + else { + parent = ancestors[best]; + leftPath = path[best]; + child = ancestors[best + 1]; + + if (leftPath) + parent.left = internal; + else + parent.right = internal; + + if (left) + internal.right = child; + else + internal.left = child; + } + + return this; + } + } +}; + +/** + * Method used to get the value attached to the given key in the tree or + * undefined if not found. + * + * @param {string} key - Key to get. + * @return {any} + */ +CritBitTreeMap.prototype.get = function(key) { + + // Walk state + var node = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + node = dir ? node.right : node.left; + } + + // Reaching an external node + else { + if (node.key !== key) + return; + + return node.value; + } + } +}; + +/** + * Method used to return whether the given key exists in the tree. + * + * @param {string} key - Key to test. + * @return {boolean} + */ +CritBitTreeMap.prototype.has = function(key) { + + // Walk state + var node = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return false; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + node = dir ? node.right : node.left; + } + + // Reaching an external node + else { + return node.key === key; + } + } +}; + +/** + * Method used to delete the given key from the tree and return whether the + * key did exist or not. + * + * @param {string} key - Key to delete. + * @return {boolean} + */ +CritBitTreeMap.prototype.delete = function(key) { + + // Walk state + var node = this.root, + dir; + + var parent = null, + grandParent = null, + wentLeftForParent = false, + wentLeftForGrandparent = false; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return false; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + if (dir === 0) { + grandParent = parent; + wentLeftForGrandparent = wentLeftForParent; + parent = node; + wentLeftForParent = true; + + node = node.left; + } + else { + grandParent = parent; + wentLeftForGrandparent = wentLeftForParent; + parent = node; + wentLeftForParent = false; + + node = node.right; + } + } + + // Reaching an external node + else { + if (key !== node.key) + return false; + + this.size--; + + // Rewiring + if (parent === null) { + this.root = null; + } + + else if (grandParent === null) { + if (wentLeftForParent) + this.root = parent.right; + else + this.root = parent.left; + } + + else { + if (wentLeftForGrandparent) { + if (wentLeftForParent) { + grandParent.left = parent.right; + } + else { + grandParent.left = parent.left; + } + } + else { + if (wentLeftForParent) { + grandParent.right = parent.right; + } + else { + grandParent.right = parent.left; + } + } + } + + return true; + } + } +}; + +/** + * Method used to iterate over the tree in key order. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +CritBitTreeMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inorder traversal of the tree + var current = this.root, + stack = []; + + while (true) { + + if (current !== null) { + stack.push(current); + + current = current instanceof InternalNode ? current.left : null; + } + + else { + if (stack.length > 0) { + current = stack.pop(); + + if (current instanceof ExternalNode) + callback.call(scope, current.value, current.key); + + current = current instanceof InternalNode ? current.right : null; + } + else { + break; + } + } + } +}; + +/** + * Convenience known methods. + */ +CritBitTreeMap.prototype.inspect = function() { + return this; +}; + +if (typeof Symbol !== 'undefined') + CritBitTreeMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = CritBitTreeMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a CritBitTreeMap. + * + * @param {Iterable} iterable - Target iterable. + * @return {CritBitTreeMap} + */ +// CritBitTreeMap.from = function(iterable) { + +// }; + +/** + * Exporting. + */ +module.exports = CritBitTreeMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/default-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/default-map.d.ts new file mode 100644 index 0000000..186878c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/default-map.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist DefaultMap Typings + * ============================= + */ +export default class DefaultMap implements Iterable<[K, V]> { + + // Members + size: number; + + // Constructor + constructor(factory: (key: K, index: number) => V); + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + peek(key: K): V | undefined; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static autoIncrement(): number; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/default-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/default-map.js new file mode 100644 index 0000000..dbe41d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/default-map.js @@ -0,0 +1,162 @@ +/** + * Mnemonist DefaultMap + * ===================== + * + * JavaScript implementation of a default map that will return a constructed + * value any time one tries to access an inexisting key. It's quite similar + * to python's defaultdict. + */ + +/** + * DefaultMap. + * + * @constructor + */ +function DefaultMap(factory) { + if (typeof factory !== 'function') + throw new Error('mnemonist/DefaultMap.constructor: expecting a function.'); + + this.items = new Map(); + this.factory = factory; + this.size = 0; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +DefaultMap.prototype.clear = function() { + + // Properties + this.items.clear(); + this.size = 0; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * the value will be created using the provided factory. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultMap.prototype.get = function(key) { + var value = this.items.get(key); + + if (typeof value === 'undefined') { + value = this.factory(key, this.size); + this.items.set(key, value); + this.size++; + } + + return value; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * a value won't be created. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultMap.prototype.peek = function(key) { + return this.items.get(key); +}; + +/** + * Method used to set a value for given key. + * + * @param {any} key - Target key. + * @param {any} value - Value. + * @return {DefaultMap} + */ +DefaultMap.prototype.set = function(key, value) { + this.items.set(key, value); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to test the existence of a key in the map. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to delete target key. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultMap.prototype.delete = function(key) { + var deleted = this.items.delete(key); + + this.size = this.items.size; + + return deleted; +}; + +/** + * Method used to iterate over each of the key/value pairs. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +DefaultMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Iterators. + */ +DefaultMap.prototype.entries = function() { + return this.items.entries(); +}; + +DefaultMap.prototype.keys = function() { + return this.items.keys(); +}; + +DefaultMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + DefaultMap.prototype[Symbol.iterator] = DefaultMap.prototype.entries; + +/** + * Convenience known methods. + */ +DefaultMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + DefaultMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = DefaultMap.prototype.inspect; + +/** + * Typical factories. + */ +DefaultMap.autoIncrement = function() { + var i = 0; + + return function() { + return i++; + }; +}; + +/** + * Exporting. + */ +module.exports = DefaultMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/default-weak-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/default-weak-map.d.ts new file mode 100644 index 0000000..579a883 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/default-weak-map.d.ts @@ -0,0 +1,18 @@ +/** + * Mnemonist DefaultWeakMap Typings + * ================================ + */ +export default class DefaultWeakMap { + + // Constructor + constructor(factory: (key: K) => V); + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + peek(key: K): V | undefined; + inspect(): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/default-weak-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/default-weak-map.js new file mode 100644 index 0000000..aa8931c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/default-weak-map.js @@ -0,0 +1,108 @@ +/** + * Mnemonist DefaultWeakMap + * ========================= + * + * JavaScript implementation of a default weak map that will return a constructed + * value any time one tries to access an non-existing key. It is similar to + * DefaultMap but uses ES6 WeakMap that only holds weak reference to keys. + */ + +/** + * DefaultWeakMap. + * + * @constructor + */ +function DefaultWeakMap(factory) { + if (typeof factory !== 'function') + throw new Error('mnemonist/DefaultWeakMap.constructor: expecting a function.'); + + this.items = new WeakMap(); + this.factory = factory; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +DefaultWeakMap.prototype.clear = function() { + + // Properties + this.items = new WeakMap(); +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * the value will be created using the provided factory. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultWeakMap.prototype.get = function(key) { + var value = this.items.get(key); + + if (typeof value === 'undefined') { + value = this.factory(key); + this.items.set(key, value); + } + + return value; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * a value won't be created. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultWeakMap.prototype.peek = function(key) { + return this.items.get(key); +}; + +/** + * Method used to set a value for given key. + * + * @param {any} key - Target key. + * @param {any} value - Value. + * @return {DefaultMap} + */ +DefaultWeakMap.prototype.set = function(key, value) { + this.items.set(key, value); + return this; +}; + +/** + * Method used to test the existence of a key in the map. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultWeakMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to delete target key. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultWeakMap.prototype.delete = function(key) { + return this.items.delete(key); +}; + +/** + * Convenience known methods. + */ +DefaultWeakMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + DefaultWeakMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = DefaultWeakMap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = DefaultWeakMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fibonacci-heap.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/fibonacci-heap.d.ts new file mode 100644 index 0000000..cb15ab0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fibonacci-heap.d.ts @@ -0,0 +1,65 @@ +/** + * Mnemonist FibonacciHeap Typings + * ================================ + */ +type FibonacciHeapComparator = (a: T, b: T) => number; + +export default class FibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + comparator?: FibonacciHeapComparator + ): FibonacciHeap; +} + +export class MinFibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): FibonacciHeap; +} + +export class MaxFibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): FibonacciHeap; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fibonacci-heap.js b/amplify/functions/downloadDocument/node_modules/mnemonist/fibonacci-heap.js new file mode 100644 index 0000000..f41334f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fibonacci-heap.js @@ -0,0 +1,320 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Fibonacci Heap + * ========================= + * + * Fibonacci heap implementation. + */ +var comparators = require('./utils/comparators.js'), + forEach = require('obliterator/foreach'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Fibonacci Heap. + * + * @constructor + */ +function FibonacciHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FibonacciHeap.constructor: given comparator should be a function.'); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +FibonacciHeap.prototype.clear = function() { + + // Properties + this.root = null; + this.min = null; + this.size = 0; +}; + +/** + * Function used to create a node. + * + * @param {any} item - Target item. + * @return {object} + */ +function createNode(item) { + return { + item: item, + degree: 0 + }; +} + +/** + * Function used to merge the given node with the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} node - Target node. + */ +function mergeWithRoot(heap, node) { + if (!heap.root) { + heap.root = node; + } + else { + node.right = heap.root.right; + node.left = heap.root; + heap.root.right.left = node; + heap.root.right = node; + } +} + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +FibonacciHeap.prototype.push = function(item) { + var node = createNode(item); + node.left = node; + node.right = node; + mergeWithRoot(this, node); + + if (!this.min || this.comparator(node.item, this.min.item) <= 0) + this.min = node; + + return ++this.size; +}; + +/** + * Method used to get the "first" item of the heap. + * + * @return {any} + */ +FibonacciHeap.prototype.peek = function() { + return this.min ? this.min.item : undefined; +}; + +/** + * Function used to consume the given linked list. + * + * @param {Node} head - Head node. + * @param {array} + */ +function consumeLinkedList(head) { + var nodes = [], + node = head, + flag = false; + + while (true) { + if (node === head && flag) + break; + else if (node === head) + flag = true; + + nodes.push(node); + node = node.right; + } + + return nodes; +} + +/** + * Function used to remove the target node from the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} node - Target node. + */ +function removeFromRoot(heap, node) { + if (heap.root === node) + heap.root = node.right; + node.left.right = node.right; + node.right.left = node.left; +} + +/** + * Function used to merge the given node with the child list of a root node. + * + * @param {Node} parent - Parent node. + * @param {Node} node - Target node. + */ +function mergeWithChild(parent, node) { + if (!parent.child) { + parent.child = node; + } + else { + node.right = parent.child.right; + node.left = parent.child; + parent.child.right.left = node; + parent.child.right = node; + } +} + +/** + * Function used to link one node to another in the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} y - Y node. + * @param {Node} x - X node. + */ +function link(heap, y, x) { + removeFromRoot(heap, y); + y.left = y; + y.right = y; + mergeWithChild(x, y); + x.degree++; + y.parent = x; +} + +/** + * Function used to consolidate the heap. + * + * @param {FibonacciHeap} heap - Target heap. + */ +function consolidate(heap) { + var A = new Array(heap.size), + nodes = consumeLinkedList(heap.root), + i, l, x, y, d, t; + + for (i = 0, l = nodes.length; i < l; i++) { + x = nodes[i]; + d = x.degree; + + while (A[d]) { + y = A[d]; + + if (heap.comparator(x.item, y.item) > 0) { + t = x; + x = y; + y = t; + } + + link(heap, y, x); + A[d] = null; + d++; + } + + A[d] = x; + } + + for (i = 0; i < heap.size; i++) { + if (A[i] && heap.comparator(A[i].item, heap.min.item) <= 0) + heap.min = A[i]; + } +} + +/** + * Method used to retrieve & remove the "first" item of the heap. + * + * @return {any} + */ +FibonacciHeap.prototype.pop = function() { + if (!this.size) + return undefined; + + var z = this.min; + + if (z.child) { + var nodes = consumeLinkedList(z.child), + node, + i, + l; + + for (i = 0, l = nodes.length; i < l; i++) { + node = nodes[i]; + + mergeWithRoot(this, node); + delete node.parent; + } + } + + removeFromRoot(this, z); + + if (z === z.right) { + this.min = null; + this.root = null; + } + else { + this.min = z.right; + consolidate(this); + } + + this.size--; + + return z.item; +}; + +/** + * Convenience known methods. + */ +FibonacciHeap.prototype.inspect = function() { + var proxy = { + size: this.size + }; + + if (this.min && 'item' in this.min) + proxy.top = this.min.item; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: FibonacciHeap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + FibonacciHeap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FibonacciHeap.prototype.inspect; + +/** + * Fibonacci Maximum Heap. + * + * @constructor + */ +function MaxFibonacciHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FibonacciHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +MaxFibonacciHeap.prototype = FibonacciHeap.prototype; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a heap. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} comparator - Custom comparator function. + * @return {FibonacciHeap} + */ +FibonacciHeap.from = function(iterable, comparator) { + var heap = new FibonacciHeap(comparator); + + forEach(iterable, function(value) { + heap.push(value); + }); + + return heap; +}; + +MaxFibonacciHeap.from = function(iterable, comparator) { + var heap = new MaxFibonacciHeap(comparator); + + forEach(iterable, function(value) { + heap.push(value); + }); + + return heap; +}; + +/** + * Exporting. + */ +FibonacciHeap.MinFibonacciHeap = FibonacciHeap; +FibonacciHeap.MaxFibonacciHeap = MaxFibonacciHeap; +module.exports = FibonacciHeap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-critbit-tree-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-critbit-tree-map.js new file mode 100644 index 0000000..9658fee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-critbit-tree-map.js @@ -0,0 +1,427 @@ +/* eslint no-constant-condition: 0 */ + +/* eslint-disable */ + +/** + * Mnemonist FixedFixedCritBitTreeMap + * =================================== + * + * TODO... + * + * [References]: + * https://cr.yp.to/critbit.html + * https://www.imperialviolet.org/binary/critbit.pdf + */ +var bitwise = require('./utils/bitwise.js'), + typed = require('./utils/typed-arrays.js'); + +/** + * Helpers. + */ + +/** + * Helper returning the direction we need to take given a key and an + * encoded critbit. + * + * @param {string} key - Target key. + * @param {number} critbit - Packed address of byte + mask. + * @return {number} - 0, left or 1, right. + */ +function getDirection(key, critbit) { + var byteIndex = critbit >> 8; + + if (byteIndex > key.length - 1) + return 0; + + var byte = key.charCodeAt(byteIndex), + mask = critbit & 0xff; + + return byte & mask; +} + +/** + * Helper returning the packed address of byte + mask or -1 if strings + * are identical. + * + * @param {string} a - First key. + * @param {string} b - Second key. + * @return {number} - Packed address of byte + mask. + */ +function findCriticalBit(a, b) { + var i = 0, + tmp; + + // Swapping so a is the shortest + if (a.length > b.length) { + tmp = b; + b = a; + a = tmp; + } + + var l = a.length, + mask; + + while (i < l) { + if (a[i] !== b[i]) { + mask = bitwise.msb8( + a.charCodeAt(i) ^ b.charCodeAt(i) + ); + + return (i << 8) | mask; + } + + i++; + } + + // Strings are identical + if (a.length === b.length) + return -1; + + // NOTE: x ^ 0 is the same as x + mask = bitwise.msb8(b.charCodeAt(i)); + + return (i << 8) | mask; +} + +/** + * FixedCritBitTreeMap. + * + * @constructor + */ +function FixedCritBitTreeMap(capacity) { + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-critbit-tree-map: `capacity` should be a positive number.'); + + // Properties + this.capacity = capacity; + this.offset = 0; + this.root = 0; + this.size = 0; + + var PointerArray = typed.getSignedPointerArray(capacity + 1); + + this.keys = new Array(capacity); + this.values = new Array(capacity); + this.lefts = new PointerArray(capacity - 1); + this.rights = new PointerArray(capacity - 1); + this.critbits = new Uint32Array(capacity); +} + +/** + * Method used to clear the FixedCritBitTreeMap. + * + * @return {undefined} + */ +FixedCritBitTreeMap.prototype.clear = function() { + + // Properties + // TODO... + this.root = null; + this.size = 0; +}; + +/** + * Method used to set the value of the given key in the trie. + * + * @param {string} key - Key to set. + * @param {any} value - Arbitrary value. + * @return {FixedCritBitTreeMap} + */ +FixedCritBitTreeMap.prototype.set = function(key, value) { + var pointer; + + // TODO: yell if capacity is already full! + + // Tree is empty + if (this.size === 0) { + this.keys[0] = key; + this.values[0] = value; + + this.size++; + + this.root = -1; + + return this; + } + + // Walk state + var pointer = this.root, + newPointer, + leftOrRight, + opposite, + ancestors = [], + path = [], + ancestor, + parent, + child, + critbit, + internal, + best, + dir, + i, + l; + + // Walking the tree + while (true) { + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + + // Choosing the correct direction + dir = getDirection(key, this.critbits[pointer]); + + leftOrRight = dir === 0 ? this.lefts : this.rights; + newPointer = leftOrRight[pointer]; + + if (newPointer === 0) { + + // Creating a fitting external node + pointer = this.size++; + leftOrRight[newPointer] = -(pointer + 1); + this.keys[pointer] = key; + this.values[pointer] = value; + return this; + } + + ancestors.push(pointer); + path.push(dir); + pointer = newPointer; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + // 1. Creating a new external node + critbit = findCriticalBit(key, this.keys[pointer]); + + // Key is identical, we just replace the value + if (critbit === -1) { + this.values[pointer] = value; + return this; + } + + internal = this.offset++; + newPointer = this.size++; + + this.keys[newPointer] = key; + this.values[newPointer] = value; + + this.critbits[internal] = critbit; + + dir = getDirection(key, critbit); + leftOrRight = dir === 0 ? this.lefts : this.rights; + opposite = dir === 0 ? this.rights : this.lefts; + + leftOrRight[internal] = -(newPointer + 1); + opposite[internal] = -(pointer + 1); + + // 2. Bubbling up + best = -1; + l = ancestors.length; + + for (i = l - 1; i >= 0; i--) { + ancestor = ancestors[i]; + + // TODO: this can be made faster + if ((this.critbits[ancestor] >> 8) > (critbit >> 8)) { + continue; + } + else if ((this.critbits[ancestor] >> 8) === (critbit >> 8)) { + if ((this.critbits[ancestor] & 0xff) < (critbit & 0xff)) + continue; + } + + best = i; + break; + } + + // Do we need to attach to the root? + if (best < 0) { + this.root = internal + 1; + + // Need to rewire parent as child? + if (l > 0) { + parent = ancestors[0]; + + opposite[internal] = parent + 1; + } + } + + // Simple case without rotation + else if (best === l - 1) { + parent = ancestors[best]; + dir = path[best]; + + leftOrRight = dir === 0 ? this.lefts : this.rights; + + leftOrRight[parent] = internal + 1; + } + + // Full rotation + else { + parent = ancestors[best]; + dir = path[best]; + child = ancestors[best + 1]; + + opposite[internal] = child + 1; + + leftOrRight = dir === 0 ? this.lefts : this.rights; + + leftOrRight[parent] = internal + 1; + } + + return this; + } + } +}; + +/** + * Method used to get the value attached to the given key in the tree or + * undefined if not found. + * + * @param {string} key - Key to get. + * @return {any} + */ +FixedCritBitTreeMap.prototype.get = function(key) { + + // Walk state + var pointer = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (pointer === 0) + return; + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + dir = getDirection(key, this.critbits[pointer]); + + pointer = dir === 0 ? this.lefts[pointer] : this.rights[pointer]; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + if (this.keys[pointer] !== key) + return; + + return this.values[pointer]; + } + } +}; + +/** + * Method used to return whether the given key exists in the tree. + * + * @param {string} key - Key to test. + * @return {boolean} + */ +FixedCritBitTreeMap.prototype.has = function(key) { + + // Walk state + var pointer = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (pointer === 0) + return false; + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + dir = getDirection(key, this.critbits[pointer]); + + pointer = dir === 0 ? this.lefts[pointer] : this.rights[pointer]; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + return this.keys[pointer] === key; + } + } +}; + +/** + * Method used to iterate over the tree in key order. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedCritBitTreeMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inorder traversal of the tree + var current = this.root, + stack = [], + p; + + while (true) { + + if (current !== 0) { + stack.push(current); + + current = current > 0 ? this.lefts[current - 1] : 0; + } + + else { + if (stack.length > 0) { + current = stack.pop(); + + if (current < 0) { + p = -current; + p -= 1; + + callback.call(scope, this.values[p], this.keys[p]); + } + + current = current > 0 ? this.rights[current - 1] : 0; + } + else { + break; + } + } + } +}; + +/** + * Convenience known methods. + */ +FixedCritBitTreeMap.prototype.inspect = function() { + return this; +}; + +if (typeof Symbol !== 'undefined') + FixedCritBitTreeMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedCritBitTreeMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a FixedCritBitTreeMap. + * + * @param {Iterable} iterable - Target iterable. + * @return {FixedCritBitTreeMap} + */ +// FixedCritBitTreeMap.from = function(iterable) { + +// }; + +/** + * Exporting. + */ +module.exports = FixedCritBitTreeMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-deque.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-deque.d.ts new file mode 100644 index 0000000..6e6b908 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-deque.d.ts @@ -0,0 +1,34 @@ +/** + * Mnemonist FixedDeque Typings + * ============================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class FixedDeque implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + unshift(item: T): number; + pop(): T | undefined; + shift(): T | undefined; + peekFirst(): T | undefined; + peekLast(): T | undefined; + get(index: number): T | undefined; + forEach(callback: (item: T, index: number, buffer: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): FixedDeque; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-deque.js b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-deque.js new file mode 100644 index 0000000..7b29858 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-deque.js @@ -0,0 +1,351 @@ +/** + * Mnemonist FixedDeque + * ===================== + * + * Fixed capacity double-ended queue implemented as ring deque. + */ +var iterables = require('./utils/iterables.js'), + Iterator = require('obliterator/iterator'); + +/** + * FixedDeque. + * + * @constructor + */ +function FixedDeque(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/fixed-deque: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-deque: `capacity` should be a positive number.'); + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + this.items = new ArrayClass(this.capacity); + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FixedDeque.prototype.clear = function() { + + // Properties + this.start = 0; + this.size = 0; +}; + +/** + * Method used to append a value to the deque. + * + * @param {any} item - Item to append. + * @return {number} - Returns the new size of the deque. + */ +FixedDeque.prototype.push = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-deque.push: deque capacity (' + this.capacity + ') exceeded!'); + + var index = (this.start + this.size) % this.capacity; + + this.items[index] = item; + + return ++this.size; +}; + +/** + * Method used to prepend a value to the deque. + * + * @param {any} item - Item to prepend. + * @return {number} - Returns the new size of the deque. + */ +FixedDeque.prototype.unshift = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-deque.unshift: deque capacity (' + this.capacity + ') exceeded!'); + + var index = this.start - 1; + + if (this.start === 0) + index = this.capacity - 1; + + this.items[index] = item; + this.start = index; + + return ++this.size; +}; + +/** + * Method used to pop the deque. + * + * @return {any} - Returns the popped item. + */ +FixedDeque.prototype.pop = function() { + if (this.size === 0) + return; + + const index = (this.start + this.size - 1) % this.capacity; + + this.size--; + + return this.items[index]; +}; + +/** + * Method used to shift the deque. + * + * @return {any} - Returns the shifted item. + */ +FixedDeque.prototype.shift = function() { + if (this.size === 0) + return; + + var index = this.start; + + this.size--; + this.start++; + + if (this.start === this.capacity) + this.start = 0; + + return this.items[index]; +}; + +/** + * Method used to peek the first value of the deque. + * + * @return {any} + */ +FixedDeque.prototype.peekFirst = function() { + if (this.size === 0) + return; + + return this.items[this.start]; +}; + +/** + * Method used to peek the last value of the deque. + * + * @return {any} + */ +FixedDeque.prototype.peekLast = function() { + if (this.size === 0) + return; + + var index = this.start + this.size - 1; + + if (index > this.capacity) + index -= this.capacity; + + return this.items[index]; +}; + +/** + * Method used to get the desired value of the deque. + * + * @param {number} index + * @return {any} + */ +FixedDeque.prototype.get = function(index) { + if (this.size === 0) + return; + + index = this.start + index; + + if (index > this.capacity) + index -= this.capacity; + + return this.items[index]; +}; + +/** + * Method used to iterate over the deque. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedDeque.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + callback.call(scope, this.items[i], j, this); + i++; + j++; + + if (i === c) + i = 0; + } +}; + +/** + * Method used to convert the deque to a JavaScript array. + * + * @return {array} + */ +// TODO: optional array class as argument? +FixedDeque.prototype.toArray = function() { + + // Optimization + var offset = this.start + this.size; + + if (offset < this.capacity) + return this.items.slice(this.start, offset); + + var array = new this.ArrayClass(this.size), + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + array[j] = this.items[i]; + i++; + j++; + + if (i === c) + i = 0; + } + + return array; +}; + +/** + * Method used to create an iterator over the deque's values. + * + * @return {Iterator} + */ +FixedDeque.prototype.values = function() { + var items = this.items, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = items[i]; + + i++; + j++; + + if (i === c) + i = 0; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over the deque's entries. + * + * @return {Iterator} + */ +FixedDeque.prototype.entries = function() { + var items = this.items, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = items[i]; + + i++; + + if (i === c) + i = 0; + + return { + value: [j++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FixedDeque.prototype[Symbol.iterator] = FixedDeque.prototype.values; + +/** + * Convenience known methods. + */ +FixedDeque.prototype.inspect = function() { + var array = this.toArray(); + + array.type = this.ArrayClass.name; + array.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: FixedDeque, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FixedDeque.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedDeque.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a deque. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FiniteStack} + */ +FixedDeque.from = function(iterable, ArrayClass, capacity) { + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/fixed-deque.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var deque = new FixedDeque(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + deque.items[i] = iterable[i]; + + deque.size = l; + + return deque; + } + + iterables.forEach(iterable, function(value) { + deque.push(value); + }); + + return deque; +}; + +/** + * Exporting. + */ +module.exports = FixedDeque; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-reverse-heap.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-reverse-heap.d.ts new file mode 100644 index 0000000..668c556 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-reverse-heap.d.ts @@ -0,0 +1,25 @@ +/** + * Mnemonist FixedReverseHeap Typings + * =================================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +type HeapComparator = (a: T, b: T) => number; + +export default class FixedReverseHeap { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, comparator: HeapComparator, capacity: number); + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + consume(): Iterable; + toArray(): Iterable; + inspect(): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-reverse-heap.js b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-reverse-heap.js new file mode 100644 index 0000000..197aac4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-reverse-heap.js @@ -0,0 +1,209 @@ +/** + * Mnemonist Fixed Reverse Heap + * ============================= + * + * Static heap implementation with fixed capacity. It's a "reverse" heap + * because it stores the elements in reverse so we can replace the worst + * item in logarithmic time. As such, one cannot pop this heap but can only + * consume it at the end. This structure is very efficient when trying to + * find the n smallest/largest items from a larger query (k nearest neigbors + * for instance). + */ +var comparators = require('./utils/comparators.js'), + Heap = require('./heap.js'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Helper functions. + */ + +/** + * Function used to sift up. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} size - Heap's true size. + * @param {number} i - Index. + */ +function siftUp(compare, heap, size, i) { + var endIndex = size, + startIndex = i, + item = heap[i], + childIndex = 2 * i + 1, + rightIndex; + + while (childIndex < endIndex) { + rightIndex = childIndex + 1; + + if ( + rightIndex < endIndex && + compare(heap[childIndex], heap[rightIndex]) >= 0 + ) { + childIndex = rightIndex; + } + + heap[i] = heap[childIndex]; + i = childIndex; + childIndex = 2 * i + 1; + } + + heap[i] = item; + Heap.siftDown(compare, heap, startIndex, i); +} + +/** + * Fully consumes the given heap. + * + * @param {function} ArrayClass - Array class to use. + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} size - True size of the heap. + * @return {array} + */ +function consume(ArrayClass, compare, heap, size) { + var l = size, + i = l; + + var array = new ArrayClass(size), + lastItem, + item; + + while (i > 0) { + lastItem = heap[--i]; + + if (i !== 0) { + item = heap[0]; + heap[0] = lastItem; + siftUp(compare, heap, --size, 0); + lastItem = item; + } + + array[i] = lastItem; + } + + return array; +} + +/** + * Binary Minimum FixedReverseHeap. + * + * @constructor + * @param {function} ArrayClass - The class of array to use. + * @param {function} comparator - Comparator function. + * @param {number} capacity - Maximum number of items to keep. + */ +function FixedReverseHeap(ArrayClass, comparator, capacity) { + + // Comparator can be omitted + if (arguments.length === 2) { + capacity = comparator; + comparator = null; + } + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + + this.items = new ArrayClass(capacity); + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof capacity !== 'number' && capacity <= 0) + throw new Error('mnemonist/FixedReverseHeap.constructor: capacity should be a number > 0.'); + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FixedReverseHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +FixedReverseHeap.prototype.clear = function() { + + // Properties + this.size = 0; +}; + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +FixedReverseHeap.prototype.push = function(item) { + + // Still some place + if (this.size < this.capacity) { + this.items[this.size] = item; + Heap.siftDown(this.comparator, this.items, 0, this.size); + this.size++; + } + + // Heap is full, we need to replace worst item + else { + + if (this.comparator(item, this.items[0]) > 0) + Heap.replace(this.comparator, this.items, item); + } + + return this.size; +}; + +/** + * Method used to peek the worst item in the heap. + * + * @return {any} + */ +FixedReverseHeap.prototype.peek = function() { + return this.items[0]; +}; + +/** + * Method used to consume the heap fully and return its items as a sorted array. + * + * @return {array} + */ +FixedReverseHeap.prototype.consume = function() { + var items = consume(this.ArrayClass, this.comparator, this.items, this.size); + this.size = 0; + + return items; +}; + +/** + * Method used to convert the heap to an array. Note that it basically clone + * the heap and consumes it completely. This is hardly performant. + * + * @return {array} + */ +FixedReverseHeap.prototype.toArray = function() { + return consume(this.ArrayClass, this.comparator, this.items.slice(0, this.size), this.size); +}; + +/** + * Convenience known methods. + */ +FixedReverseHeap.prototype.inspect = function() { + var proxy = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: FixedReverseHeap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + FixedReverseHeap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedReverseHeap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = FixedReverseHeap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-stack.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-stack.d.ts new file mode 100644 index 0000000..9965853 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-stack.d.ts @@ -0,0 +1,36 @@ +/** + * Mnemonist FixedStack Typings + * ============================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class FixedStack implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + pop(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, stack: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Iterable; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + ArrayClass: IArrayLikeConstructor, + capacity?: number + ): FixedStack; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-stack.js b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-stack.js new file mode 100644 index 0000000..c5b5f48 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fixed-stack.js @@ -0,0 +1,242 @@ +/** + * Mnemonist FixedStack + * ===================== + * + * The fixed stack is a stack whose capacity is defined beforehand and that + * cannot be exceeded. This class is really useful when combined with + * byte arrays to save up some memory and avoid memory re-allocation, hence + * speeding up computations. + * + * This has however a downside: you need to know the maximum size you stack + * can have during your iteration (which is not too difficult to compute when + * performing, say, a DFS on a balanced binary tree). + */ +var Iterator = require('obliterator/iterator'), + iterables = require('./utils/iterables.js'); + +/** + * FixedStack + * + * @constructor + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + */ +function FixedStack(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/fixed-stack: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-stack: `capacity` should be a positive number.'); + + this.capacity = capacity; + this.ArrayClass = ArrayClass; + this.items = new this.ArrayClass(this.capacity); + this.clear(); +} + +/** + * Method used to clear the stack. + * + * @return {undefined} + */ +FixedStack.prototype.clear = function() { + + // Properties + this.size = 0; +}; + +/** + * Method used to add an item to the stack. + * + * @param {any} item - Item to add. + * @return {number} + */ +FixedStack.prototype.push = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-stack.push: stack capacity (' + this.capacity + ') exceeded!'); + + this.items[this.size++] = item; + return this.size; +}; + +/** + * Method used to retrieve & remove the last item of the stack. + * + * @return {any} + */ +FixedStack.prototype.pop = function() { + if (this.size === 0) + return; + + return this.items[--this.size]; +}; + +/** + * Method used to get the last item of the stack. + * + * @return {any} + */ +FixedStack.prototype.peek = function() { + return this.items[this.size - 1]; +}; + +/** + * Method used to iterate over the stack. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedStack.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.items.length; i < l; i++) + callback.call(scope, this.items[l - i - 1], i, this); +}; + +/** + * Method used to convert the stack to a JavaScript array. + * + * @return {array} + */ +FixedStack.prototype.toArray = function() { + var array = new this.ArrayClass(this.size), + l = this.size - 1, + i = this.size; + + while (i--) + array[i] = this.items[l - i]; + + return array; +}; + +/** + * Method used to create an iterator over a stack's values. + * + * @return {Iterator} + */ +FixedStack.prototype.values = function() { + var items = this.items, + l = this.size, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a stack's entries. + * + * @return {Iterator} + */ +FixedStack.prototype.entries = function() { + var items = this.items, + l = this.size, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FixedStack.prototype[Symbol.iterator] = FixedStack.prototype.values; + + +/** + * Convenience known methods. + */ +FixedStack.prototype.toString = function() { + return this.toArray().join(','); +}; + +FixedStack.prototype.toJSON = function() { + return this.toArray(); +}; + +FixedStack.prototype.inspect = function() { + var array = this.toArray(); + + array.type = this.ArrayClass.name; + array.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: FixedStack, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FixedStack.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedStack.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a stack. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FixedStack} + */ +FixedStack.from = function(iterable, ArrayClass, capacity) { + + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/fixed-stack.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var stack = new FixedStack(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + stack.items[i] = iterable[i]; + + stack.size = l; + + return stack; + } + + iterables.forEach(iterable, function(value) { + stack.push(value); + }); + + return stack; +}; + +/** + * Exporting. + */ +module.exports = FixedStack; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-map.d.ts new file mode 100644 index 0000000..7a1644d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-map.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist FuzzyMap Typings + * ========================== + */ +type HashFunction = (key: any) => K; +type HashFunctionsTuple = [HashFunction, HashFunction]; + +export default class FuzzyMap implements Iterable { + + // Members + size: number; + + // Constructor + constructor(hashFunction: HashFunction); + constructor(hashFunctionsTuple: HashFunctionsTuple); + + // Methods + clear(): void; + add(key: V): this; + set(key: K, value: V): this; + get(key: any): V | undefined; + has(key: any): boolean; + forEach(callback: (value: V, key: V) => void, scope?: this): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + hashFunction: HashFunction | HashFunctionsTuple, + ): FuzzyMap; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-map.js new file mode 100644 index 0000000..b0d52e1 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-map.js @@ -0,0 +1,185 @@ +/** + * Mnemonist Fuzzy Map + * ==================== + * + * The fuzzy map is a map whose keys are processed by a function before + * read/write operations. This can often result in multiple keys accessing + * the same resource (example: a map with lowercased keys). + */ +var forEach = require('obliterator/foreach'); + +var identity = function(x) { + return x; +}; + +/** + * FuzzyMap. + * + * @constructor + * @param {array|function} descriptor - Hash functions descriptor. + */ +function FuzzyMap(descriptor) { + this.items = new Map(); + this.clear(); + + if (Array.isArray(descriptor)) { + this.writeHashFunction = descriptor[0]; + this.readHashFunction = descriptor[1]; + } + else { + this.writeHashFunction = descriptor; + this.readHashFunction = descriptor; + } + + if (!this.writeHashFunction) + this.writeHashFunction = identity; + if (!this.readHashFunction) + this.readHashFunction = identity; + + if (typeof this.writeHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMap.constructor: invalid hash function given.'); + + if (typeof this.readHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMap.constructor: invalid hash function given.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FuzzyMap.prototype.clear = function() { + this.items.clear(); + + // Properties + this.size = 0; +}; + +/** + * Method used to add an item to the FuzzyMap. + * + * @param {any} item - Item to add. + * @return {FuzzyMap} + */ +FuzzyMap.prototype.add = function(item) { + var key = this.writeHashFunction(item); + + this.items.set(key, item); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to set an item in the FuzzyMap using the given key. + * + * @param {any} key - Key to use. + * @param {any} item - Item to add. + * @return {FuzzyMap} + */ +FuzzyMap.prototype.set = function(key, item) { + key = this.writeHashFunction(key); + + this.items.set(key, item); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to retrieve an item from the FuzzyMap. + * + * @param {any} key - Key to use. + * @return {any} + */ +FuzzyMap.prototype.get = function(key) { + key = this.readHashFunction(key); + + return this.items.get(key); +}; + +/** + * Method used to test the existence of an item in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +FuzzyMap.prototype.has = function(key) { + key = this.readHashFunction(key); + + return this.items.has(key); +}; + +/** + * Method used to iterate over each of the FuzzyMap's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FuzzyMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(function(value) { + callback.call(scope, value, value); + }); +}; + +/** + * Method returning an iterator over the FuzzyMap's values. + * + * @return {FuzzyMapIterator} + */ +FuzzyMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FuzzyMap.prototype[Symbol.iterator] = FuzzyMap.prototype.values; + +/** + * Convenience known method. + */ +FuzzyMap.prototype.inspect = function() { + var array = Array.from(this.items.values()); + + Object.defineProperty(array, 'constructor', { + value: FuzzyMap, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FuzzyMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FuzzyMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {array|function} descriptor - Hash functions descriptor. + * @param {boolean} useSet - Whether to use #.set or #.add + * @return {FuzzyMap} + */ +FuzzyMap.from = function(iterable, descriptor, useSet) { + var map = new FuzzyMap(descriptor); + + forEach(iterable, function(value, key) { + if (useSet) + map.set(key, value); + else + map.add(value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = FuzzyMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-multi-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-multi-map.d.ts new file mode 100644 index 0000000..62b8250 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-multi-map.d.ts @@ -0,0 +1,36 @@ +/** + * Mnemonist FuzzyMultiMap Typings + * ================================ + */ +type HashFunction = (key: any) => K; +type HashFunctionsTuple = [HashFunction, HashFunction]; +type FuzzyMultiMapContainer = ArrayConstructor | SetConstructor; + +export default class FuzzyMultiMap implements Iterable { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(hashFunction: HashFunction, Container?: FuzzyMultiMapContainer); + constructor(hashFunctions: HashFunctionsTuple, Container?: FuzzyMultiMapContainer); + + // Methods + clear(): void; + add(value: V): this; + set(key: K, value: V): this; + get(key: any): Array | Set | undefined; + has(key: any): boolean; + forEach(callback: (value: V, key: V) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + hashFunction: HashFunction | HashFunctionsTuple, + Container?: FuzzyMultiMapContainer + ): FuzzyMultiMap; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-multi-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-multi-map.js new file mode 100644 index 0000000..78b2b08 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/fuzzy-multi-map.js @@ -0,0 +1,196 @@ +/** + * Mnemonist FuzzyMultiMap + * ======================== + * + * Same as the fuzzy map but relying on a MultiMap rather than a Map. + */ +var MultiMap = require('./multi-map.js'), + forEach = require('obliterator/foreach'); + +var identity = function(x) { + return x; +}; + +/** + * FuzzyMultiMap. + * + * @constructor + * @param {array|function} descriptor - Hash functions descriptor. + * @param {function} Container - Container to use. + */ +function FuzzyMultiMap(descriptor, Container) { + this.items = new MultiMap(Container); + this.clear(); + + if (Array.isArray(descriptor)) { + this.writeHashFunction = descriptor[0]; + this.readHashFunction = descriptor[1]; + } + else { + this.writeHashFunction = descriptor; + this.readHashFunction = descriptor; + } + + if (!this.writeHashFunction) + this.writeHashFunction = identity; + if (!this.readHashFunction) + this.readHashFunction = identity; + + if (typeof this.writeHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMultiMap.constructor: invalid hash function given.'); + + if (typeof this.readHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMultiMap.constructor: invalid hash function given.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FuzzyMultiMap.prototype.clear = function() { + this.items.clear(); + + // Properties + this.size = 0; + this.dimension = 0; +}; + +/** + * Method used to add an item to the index. + * + * @param {any} item - Item to add. + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.prototype.add = function(item) { + var key = this.writeHashFunction(item); + + this.items.set(key, item); + this.size = this.items.size; + this.dimension = this.items.dimension; + + return this; +}; + +/** + * Method used to set an item in the index using the given key. + * + * @param {any} key - Key to use. + * @param {any} item - Item to add. + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.prototype.set = function(key, item) { + key = this.writeHashFunction(key); + + this.items.set(key, item); + this.size = this.items.size; + this.dimension = this.items.dimension; + + return this; +}; + +/** + * Method used to retrieve an item from the index. + * + * @param {any} key - Key to use. + * @return {any} + */ +FuzzyMultiMap.prototype.get = function(key) { + key = this.readHashFunction(key); + + return this.items.get(key); +}; + +/** + * Method used to test the existence of an item in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +FuzzyMultiMap.prototype.has = function(key) { + key = this.readHashFunction(key); + + return this.items.has(key); +}; + +/** + * Method used to iterate over each of the index's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FuzzyMultiMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(function(value) { + callback.call(scope, value, value); + }); +}; + +/** + * Method returning an iterator over the index's values. + * + * @return {FuzzyMultiMapIterator} + */ +FuzzyMultiMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FuzzyMultiMap.prototype[Symbol.iterator] = FuzzyMultiMap.prototype.values; + +/** + * Convenience known method. + */ +FuzzyMultiMap.prototype.inspect = function() { + var array = Array.from(this); + + Object.defineProperty(array, 'constructor', { + value: FuzzyMultiMap, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FuzzyMultiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FuzzyMultiMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {array|function} descriptor - Hash functions descriptor. + * @param {function} Container - Container to use. + * @param {boolean} useSet - Whether to use #.set or #.add + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.from = function(iterable, descriptor, Container, useSet) { + if (arguments.length === 3) { + if (typeof Container === 'boolean') { + useSet = Container; + Container = Array; + } + } + + var map = new FuzzyMultiMap(descriptor, Container); + + forEach(iterable, function(value, key) { + if (useSet) + map.set(key, value); + else + map.add(value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = FuzzyMultiMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/hashed-array-tree.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/hashed-array-tree.d.ts new file mode 100644 index 0000000..eb56f7c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/hashed-array-tree.d.ts @@ -0,0 +1,32 @@ +/** + * Mnemonist HashedArrayTree Typings + * ================================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +type HashedArrayTreeOptions = { + initialCapacity?: number; + initialLength?: number; + blockSize?: number; +} + +export default class HashedArrayTree { + + // Members + blockSize: number; + capacity: number; + length: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + constructor(ArrayClass: IArrayLikeConstructor, options: HashedArrayTreeOptions); + + // Methods + set(index: number, value: T): this; + get(index: number): T | undefined; + grow(capacity: number): this; + resize(length: number): this; + push(value: T): number; + pop(): T | undefined; + inspect(): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/hashed-array-tree.js b/amplify/functions/downloadDocument/node_modules/mnemonist/hashed-array-tree.js new file mode 100644 index 0000000..a51667c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/hashed-array-tree.js @@ -0,0 +1,209 @@ +/** + * Mnemonist HashedArrayTree + * ========================== + * + * Abstract implementation of a hashed array tree representing arrays growing + * dynamically. + */ + +/** + * Defaults. + */ +var DEFAULT_BLOCK_SIZE = 1024; + +/** + * Helpers. + */ +function powerOfTwo(x) { + return (x & (x - 1)) === 0; +} + +/** + * HashedArrayTree. + * + * @constructor + * @param {function} ArrayClass - An array constructor. + * @param {number|object} initialCapacityOrOptions - Self-explanatory. + */ +function HashedArrayTree(ArrayClass, initialCapacityOrOptions) { + if (arguments.length < 1) + throw new Error('mnemonist/hashed-array-tree: expecting at least a byte array constructor.'); + + var initialCapacity = initialCapacityOrOptions || 0, + blockSize = DEFAULT_BLOCK_SIZE, + initialLength = 0; + + if (typeof initialCapacityOrOptions === 'object') { + initialCapacity = initialCapacityOrOptions.initialCapacity || 0; + initialLength = initialCapacityOrOptions.initialLength || 0; + blockSize = initialCapacityOrOptions.blockSize || DEFAULT_BLOCK_SIZE; + } + + if (!blockSize || !powerOfTwo(blockSize)) + throw new Error('mnemonist/hashed-array-tree: block size should be a power of two.'); + + var capacity = Math.max(initialLength, initialCapacity), + initialBlocks = Math.ceil(capacity / blockSize); + + this.ArrayClass = ArrayClass; + this.length = initialLength; + this.capacity = initialBlocks * blockSize; + this.blockSize = blockSize; + this.offsetMask = blockSize - 1; + this.blockMask = Math.log2(blockSize); + + // Allocating initial blocks + this.blocks = new Array(initialBlocks); + + for (var i = 0; i < initialBlocks; i++) + this.blocks[i] = new this.ArrayClass(this.blockSize); +} + +/** + * Method used to set a value. + * + * @param {number} index - Index to edit. + * @param {any} value - Value. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('HashedArrayTree(' + this.ArrayClass.name + ').set: index out of bounds.'); + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + this.blocks[block][i] = value; + + return this; +}; + +/** + * Method used to get a value. + * + * @param {number} index - Index to retrieve. + * @return {any} + */ +HashedArrayTree.prototype.get = function(index) { + if (this.length < index) + return; + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + return this.blocks[block][i]; +}; + +/** + * Method used to grow the array. + * + * @param {number} capacity - Optional capacity to accomodate. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.grow = function(capacity) { + if (typeof capacity !== 'number') + capacity = this.capacity + this.blockSize; + + if (this.capacity >= capacity) + return this; + + while (this.capacity < capacity) { + this.blocks.push(new this.ArrayClass(this.blockSize)); + this.capacity += this.blockSize; + } + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.grow(length); + + return this; +}; + +/** + * Method used to push a value into the array. + * + * @param {any} value - Value to push. + * @return {number} - Length of the array. + */ +HashedArrayTree.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + var index = this.length; + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + this.blocks[block][i] = value; + + return ++this.length; +}; + +/** + * Method used to pop the last value of the array. + * + * @return {number} - The popped value. + */ +HashedArrayTree.prototype.pop = function() { + if (this.length === 0) + return; + + var lastBlock = this.blocks[this.blocks.length - 1]; + + var i = (--this.length) & this.offsetMask; + + return lastBlock[i]; +}; + +/** + * Convenience known methods. + */ +HashedArrayTree.prototype.inspect = function() { + var proxy = new this.ArrayClass(this.length), + block; + + for (var i = 0, l = this.length; i < l; i++) { + block = i >> this.blockMask; + proxy[i] = this.blocks[block][i & this.offsetMask]; + } + + proxy.type = this.ArrayClass.name; + proxy.items = this.length; + proxy.capacity = this.capacity; + proxy.blockSize = this.blockSize; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: HashedArrayTree, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + HashedArrayTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = HashedArrayTree.prototype.inspect; + +/** + * Exporting. + */ +module.exports = HashedArrayTree; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/heap.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/heap.d.ts new file mode 100644 index 0000000..c6aa219 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/heap.d.ts @@ -0,0 +1,84 @@ +/** + * Mnemonist Heap Typings + * ======================= + */ +type HeapComparator = (a: T, b: T) => number; + +export default class Heap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + comparator?: HeapComparator + ): Heap; +} + +export class MinHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; +} + +export class MaxHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; +} + +// Static helpers +export function push(comparator: HeapComparator, heap: Array, item: T): void; +export function pop(comparator: HeapComparator, heap: Array): T; +export function replace(comparator: HeapComparator, heap: Array, item: T): T; +export function pushpop(comparator: HeapComparator, heap: Array, item: T): T; +export function heapify(comparator: HeapComparator, array: Array): void; +export function consume(comparator: HeapComparator, heap: Array): Array; + +export function nsmallest(comparator: HeapComparator, n: number, values: Iterable): Array; +export function nsmallest(n: number, values: Iterable): Array; +export function nlargest(comparator: HeapComparator, n: number, values: Iterable): Array; +export function nlargest(n: number, values: Iterable): Array; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/heap.js b/amplify/functions/downloadDocument/node_modules/mnemonist/heap.js new file mode 100644 index 0000000..90eb971 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/heap.js @@ -0,0 +1,576 @@ +/** + * Mnemonist Binary Heap + * ====================== + * + * Binary heap implementation. + */ +var forEach = require('obliterator/foreach'), + comparators = require('./utils/comparators.js'), + iterables = require('./utils/iterables.js'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Heap helper functions. + */ + +/** + * Function used to sift down. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} startIndex - Starting index. + * @param {number} i - Index. + */ +function siftDown(compare, heap, startIndex, i) { + var item = heap[i], + parentIndex, + parent; + + while (i > startIndex) { + parentIndex = (i - 1) >> 1; + parent = heap[parentIndex]; + + if (compare(item, parent) < 0) { + heap[i] = parent; + i = parentIndex; + continue; + } + + break; + } + + heap[i] = item; +} + +/** + * Function used to sift up. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} i - Index. + */ +function siftUp(compare, heap, i) { + var endIndex = heap.length, + startIndex = i, + item = heap[i], + childIndex = 2 * i + 1, + rightIndex; + + while (childIndex < endIndex) { + rightIndex = childIndex + 1; + + if ( + rightIndex < endIndex && + compare(heap[childIndex], heap[rightIndex]) >= 0 + ) { + childIndex = rightIndex; + } + + heap[i] = heap[childIndex]; + i = childIndex; + childIndex = 2 * i + 1; + } + + heap[i] = item; + siftDown(compare, heap, startIndex, i); +} + +/** + * Function used to push an item into a heap represented by a raw array. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - Item to push. + */ +function push(compare, heap, item) { + heap.push(item); + siftDown(compare, heap, 0, heap.length - 1); +} + +/** + * Function used to pop an item from a heap represented by a raw array. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @return {any} + */ +function pop(compare, heap) { + var lastItem = heap.pop(); + + if (heap.length !== 0) { + var item = heap[0]; + heap[0] = lastItem; + siftUp(compare, heap, 0); + + return item; + } + + return lastItem; +} + +/** + * Function used to pop the heap then push a new value into it, thus "replacing" + * it. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - The item to push. + * @return {any} + */ +function replace(compare, heap, item) { + if (heap.length === 0) + throw new Error('mnemonist/heap.replace: cannot pop an empty heap.'); + + var popped = heap[0]; + heap[0] = item; + siftUp(compare, heap, 0); + + return popped; +} + +/** + * Function used to push an item in the heap then pop the heap and return the + * popped value. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - The item to push. + * @return {any} + */ +function pushpop(compare, heap, item) { + var tmp; + + if (heap.length !== 0 && compare(heap[0], item) < 0) { + tmp = heap[0]; + heap[0] = item; + item = tmp; + siftUp(compare, heap, 0); + } + + return item; +} + +/** + * Converts and array into an abstract heap in linear time. + * + * @param {function} compare - Comparison function. + * @param {array} array - Target array. + */ +function heapify(compare, array) { + var n = array.length, + l = n >> 1, + i = l; + + while (--i >= 0) + siftUp(compare, array, i); +} + +/** + * Fully consumes the given heap. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @return {array} + */ +function consume(compare, heap) { + var l = heap.length, + i = 0; + + var array = new Array(l); + + while (i < l) + array[i++] = pop(compare, heap); + + return array; +} + +/** + * Function used to retrieve the n smallest items from the given iterable. + * + * @param {function} compare - Comparison function. + * @param {number} n - Number of top items to retrieve. + * @param {any} iterable - Arbitrary iterable. + * @param {array} + */ +function nsmallest(compare, n, iterable) { + if (arguments.length === 2) { + iterable = n; + n = compare; + compare = DEFAULT_COMPARATOR; + } + + var reverseCompare = reverseComparator(compare); + + var i, l, v; + + var min = Infinity; + + var result; + + // If n is equal to 1, it's just a matter of finding the minimum + if (n === 1) { + if (iterables.isArrayLike(iterable)) { + for (i = 0, l = iterable.length; i < l; i++) { + v = iterable[i]; + + if (min === Infinity || compare(v, min) < 0) + min = v; + } + + result = new iterable.constructor(1); + result[0] = min; + + return result; + } + + forEach(iterable, function(value) { + if (min === Infinity || compare(value, min) < 0) + min = value; + }); + + return [min]; + } + + if (iterables.isArrayLike(iterable)) { + + // If n > iterable length, we just clone and sort + if (n >= iterable.length) + return iterable.slice().sort(compare); + + result = iterable.slice(0, n); + heapify(reverseCompare, result); + + for (i = n, l = iterable.length; i < l; i++) + if (reverseCompare(iterable[i], result[0]) > 0) + replace(reverseCompare, result, iterable[i]); + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(compare); + } + + // Correct for size + var size = iterables.guessLength(iterable); + + if (size !== null && size < n) + n = size; + + result = new Array(n); + i = 0; + + forEach(iterable, function(value) { + if (i < n) { + result[i] = value; + } + else { + if (i === n) + heapify(reverseCompare, result); + + if (reverseCompare(value, result[0]) > 0) + replace(reverseCompare, result, value); + } + + i++; + }); + + if (result.length > i) + result.length = i; + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(compare); +} + +/** + * Function used to retrieve the n largest items from the given iterable. + * + * @param {function} compare - Comparison function. + * @param {number} n - Number of top items to retrieve. + * @param {any} iterable - Arbitrary iterable. + * @param {array} + */ +function nlargest(compare, n, iterable) { + if (arguments.length === 2) { + iterable = n; + n = compare; + compare = DEFAULT_COMPARATOR; + } + + var reverseCompare = reverseComparator(compare); + + var i, l, v; + + var max = -Infinity; + + var result; + + // If n is equal to 1, it's just a matter of finding the maximum + if (n === 1) { + if (iterables.isArrayLike(iterable)) { + for (i = 0, l = iterable.length; i < l; i++) { + v = iterable[i]; + + if (max === -Infinity || compare(v, max) > 0) + max = v; + } + + result = new iterable.constructor(1); + result[0] = max; + + return result; + } + + forEach(iterable, function(value) { + if (max === -Infinity || compare(value, max) > 0) + max = value; + }); + + return [max]; + } + + if (iterables.isArrayLike(iterable)) { + + // If n > iterable length, we just clone and sort + if (n >= iterable.length) + return iterable.slice().sort(reverseCompare); + + result = iterable.slice(0, n); + heapify(compare, result); + + for (i = n, l = iterable.length; i < l; i++) + if (compare(iterable[i], result[0]) > 0) + replace(compare, result, iterable[i]); + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(reverseCompare); + } + + // Correct for size + var size = iterables.guessLength(iterable); + + if (size !== null && size < n) + n = size; + + result = new Array(n); + i = 0; + + forEach(iterable, function(value) { + if (i < n) { + result[i] = value; + } + else { + if (i === n) + heapify(compare, result); + + if (compare(value, result[0]) > 0) + replace(compare, result, value); + } + + i++; + }); + + if (result.length > i) + result.length = i; + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(reverseCompare); +} + +/** + * Binary Minimum Heap. + * + * @constructor + * @param {function} comparator - Comparator function to use. + */ +function Heap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/Heap.constructor: given comparator should be a function.'); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +Heap.prototype.clear = function() { + + // Properties + this.items = []; + this.size = 0; +}; + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +Heap.prototype.push = function(item) { + push(this.comparator, this.items, item); + return ++this.size; +}; + +/** + * Method used to retrieve the "first" item of the heap. + * + * @return {any} + */ +Heap.prototype.peek = function() { + return this.items[0]; +}; + +/** + * Method used to retrieve & remove the "first" item of the heap. + * + * @return {any} + */ +Heap.prototype.pop = function() { + if (this.size !== 0) + this.size--; + + return pop(this.comparator, this.items); +}; + +/** + * Method used to pop the heap, then push an item and return the popped + * item. + * + * @param {any} item - Item to push into the heap. + * @return {any} + */ +Heap.prototype.replace = function(item) { + return replace(this.comparator, this.items, item); +}; + +/** + * Method used to push the heap, the pop it and return the pooped item. + * + * @param {any} item - Item to push into the heap. + * @return {any} + */ +Heap.prototype.pushpop = function(item) { + return pushpop(this.comparator, this.items, item); +}; + +/** + * Method used to consume the heap fully and return its items as a sorted array. + * + * @return {array} + */ +Heap.prototype.consume = function() { + this.size = 0; + return consume(this.comparator, this.items); +}; + +/** + * Method used to convert the heap to an array. Note that it basically clone + * the heap and consumes it completely. This is hardly performant. + * + * @return {array} + */ +Heap.prototype.toArray = function() { + return consume(this.comparator, this.items.slice()); +}; + +/** + * Convenience known methods. + */ +Heap.prototype.inspect = function() { + var proxy = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Heap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Heap.prototype[Symbol.for('nodejs.util.inspect.custom')] = Heap.prototype.inspect; + +/** + * Binary Maximum Heap. + * + * @constructor + * @param {function} comparator - Comparator function to use. + */ +function MaxHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/MaxHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +MaxHeap.prototype = Heap.prototype; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a heap. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} comparator - Custom comparator function. + * @return {Heap} + */ +Heap.from = function(iterable, comparator) { + var heap = new Heap(comparator); + + var items; + + // If iterable is an array, we can be clever about it + if (iterables.isArrayLike(iterable)) + items = iterable.slice(); + else + items = iterables.toArray(iterable); + + heapify(heap.comparator, items); + heap.items = items; + heap.size = items.length; + + return heap; +}; + +MaxHeap.from = function(iterable, comparator) { + var heap = new MaxHeap(comparator); + + var items; + + // If iterable is an array, we can be clever about it + if (iterables.isArrayLike(iterable)) + items = iterable.slice(); + else + items = iterables.toArray(iterable); + + heapify(heap.comparator, items); + heap.items = items; + heap.size = items.length; + + return heap; +}; + +/** + * Exporting. + */ +Heap.siftUp = siftUp; +Heap.siftDown = siftDown; +Heap.push = push; +Heap.pop = pop; +Heap.replace = replace; +Heap.pushpop = pushpop; +Heap.heapify = heapify; +Heap.consume = consume; + +Heap.nsmallest = nsmallest; +Heap.nlargest = nlargest; + +Heap.MinHeap = Heap; +Heap.MaxHeap = MaxHeap; + +module.exports = Heap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/index.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/index.d.ts new file mode 100644 index 0000000..cbdc86c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/index.d.ts @@ -0,0 +1,46 @@ +/** + * Mnemonist Typings + * ================== + * + * Gathering the library's typings. + */ +import * as set from './set'; + +export {default as BiMap, InverseMap} from './bi-map'; +export {default as BitSet} from './bit-set'; +export {default as BitVector} from './bit-vector'; +export {default as BKTree} from './bk-tree'; +export {default as BloomFilter} from './bloom-filter'; +export {default as CircularBuffer} from './circular-buffer'; +export {default as DefaultMap} from './default-map'; +export {default as DefaultWeakMap} from './default-weak-map'; +export {default as FixedDeque} from './fixed-deque'; +export {default as FibonacciHeap, MinFibonacciHeap, MaxFibonacciHeap} from './fibonacci-heap'; +export {default as FixedReverseHeap} from './fixed-reverse-heap'; +export {default as FixedStack} from './fixed-stack'; +export {default as FuzzyMap} from './fuzzy-map'; +export {default as FuzzyMultiMap} from './fuzzy-multi-map'; +export {default as HashedArrayTree} from './hashed-array-tree'; +export {default as Heap, MinHeap, MaxHeap} from './heap'; +export {default as InvertedIndex} from './inverted-index'; +export {default as KDTree} from './kd-tree'; +export {default as LinkedList} from './linked-list'; +export {default as LRUCache} from './lru-cache'; +export {default as LRUMap} from './lru-map'; +export {default as MultiMap} from './multi-map'; +export {default as MultiSet} from './multi-set'; +export {default as PassjoinIndex} from './passjoin-index'; +export {default as Queue} from './queue'; +export {set}; +export {default as SparseQueueSet} from './sparse-queue-set'; +export {default as SparseMap} from './sparse-map'; +export {default as SparseSet} from './sparse-set'; +export {default as Stack} from './stack'; +export {default as StaticDisjointSet} from './static-disjoint-set'; +export {default as StaticIntervalTree} from './static-interval-tree'; +export {default as SuffixArray, GeneralizedSuffixArray} from './suffix-array'; +export {default as SymSpell} from './symspell'; +export {default as Trie} from './trie'; +export {default as TrieMap} from './trie-map'; +export {default as Vector, Uint8Vector, Uint8ClampedVector, Int8Vector, Uint16Vector, Int16Vector, Uint32Vector, Int32Vector, Float32Vector, Float64Array} from './vector'; +export {default as VPTree} from './vp-tree'; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/index.js b/amplify/functions/downloadDocument/node_modules/mnemonist/index.js new file mode 100644 index 0000000..80c18d4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/index.js @@ -0,0 +1,56 @@ +/** + * Mnemonist Library Endpoint + * =========================== + * + * Exporting every data structure through a unified endpoint. Consumers + * of this library should prefer the modular access though. + */ +var Heap = require('./heap.js'), + FibonacciHeap = require('./fibonacci-heap.js'), + SuffixArray = require('./suffix-array.js'); + +module.exports = { + BiMap: require('./bi-map.js'), + BitSet: require('./bit-set.js'), + BitVector: require('./bit-vector.js'), + BloomFilter: require('./bloom-filter.js'), + BKTree: require('./bk-tree.js'), + CircularBuffer: require('./circular-buffer.js'), + DefaultMap: require('./default-map.js'), + DefaultWeakMap: require('./default-weak-map.js'), + FixedDeque: require('./fixed-deque.js'), + StaticDisjointSet: require('./static-disjoint-set.js'), + FibonacciHeap: FibonacciHeap, + MinFibonacciHeap: FibonacciHeap.MinFibonacciHeap, + MaxFibonacciHeap: FibonacciHeap.MaxFibonacciHeap, + FixedReverseHeap: require('./fixed-reverse-heap.js'), + FuzzyMap: require('./fuzzy-map.js'), + FuzzyMultiMap: require('./fuzzy-multi-map.js'), + HashedArrayTree: require('./hashed-array-tree.js'), + Heap: Heap, + MinHeap: Heap.MinHeap, + MaxHeap: Heap.MaxHeap, + StaticIntervalTree: require('./static-interval-tree.js'), + InvertedIndex: require('./inverted-index.js'), + KDTree: require('./kd-tree.js'), + LinkedList: require('./linked-list.js'), + LRUCache: require('./lru-cache.js'), + LRUMap: require('./lru-map.js'), + MultiMap: require('./multi-map.js'), + MultiSet: require('./multi-set.js'), + PassjoinIndex: require('./passjoin-index.js'), + Queue: require('./queue.js'), + FixedStack: require('./fixed-stack.js'), + Stack: require('./stack.js'), + SuffixArray: SuffixArray, + GeneralizedSuffixArray: SuffixArray.GeneralizedSuffixArray, + Set: require('./set.js'), + SparseQueueSet: require('./sparse-queue-set.js'), + SparseMap: require('./sparse-map.js'), + SparseSet: require('./sparse-set.js'), + SymSpell: require('./symspell.js'), + Trie: require('./trie.js'), + TrieMap: require('./trie-map.js'), + Vector: require('./vector.js'), + VPTree: require('./vp-tree.js') +}; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/inverted-index.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/inverted-index.d.ts new file mode 100644 index 0000000..4596ff8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/inverted-index.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist InvertedIndex Typings + * ================================ + */ +type Tokenizer = (key: any) => Array; +type TokenizersTuple = [Tokenizer, Tokenizer]; + +export default class InvertedIndex implements Iterable { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(tokenizer?: Tokenizer); + constructor(tokenizers?: TokenizersTuple); + + // Methods + clear(): void; + add(document: D): this; + get(query: any): Array; + forEach(callback: (document: D, index: number, invertedIndex: this) => void, scope?: any): void; + documents(): IterableIterator; + tokens(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + tokenizer?: Tokenizer | TokenizersTuple + ): InvertedIndex; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/inverted-index.js b/amplify/functions/downloadDocument/node_modules/mnemonist/inverted-index.js new file mode 100644 index 0000000..a352d19 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/inverted-index.js @@ -0,0 +1,249 @@ +/** + * Mnemonist Inverted Index + * ========================= + * + * JavaScript implementation of an inverted index. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + helpers = require('./utils/merge.js'); + +function identity(x) { + return x; +} + +/** + * InvertedIndex. + * + * @constructor + * @param {function} tokenizer - Tokenizer function. + */ +function InvertedIndex(descriptor) { + this.clear(); + + if (Array.isArray(descriptor)) { + this.documentTokenizer = descriptor[0]; + this.queryTokenizer = descriptor[1]; + } + else { + this.documentTokenizer = descriptor; + this.queryTokenizer = descriptor; + } + + if (!this.documentTokenizer) + this.documentTokenizer = identity; + if (!this.queryTokenizer) + this.queryTokenizer = identity; + + if (typeof this.documentTokenizer !== 'function') + throw new Error('mnemonist/InvertedIndex.constructor: document tokenizer is not a function.'); + + if (typeof this.queryTokenizer !== 'function') + throw new Error('mnemonist/InvertedIndex.constructor: query tokenizer is not a function.'); +} + +/** + * Method used to clear the InvertedIndex. + * + * @return {undefined} + */ +InvertedIndex.prototype.clear = function() { + + // Properties + this.items = []; + this.mapping = new Map(); + this.size = 0; + this.dimension = 0; +}; + +/** + * Method used to add a document to the index. + * + * @param {any} doc - Item to add. + * @return {InvertedIndex} + */ +InvertedIndex.prototype.add = function(doc) { + + // Increasing size + this.size++; + + // Storing document + var key = this.items.length; + this.items.push(doc); + + // Tokenizing the document + var tokens = this.documentTokenizer(doc); + + if (!Array.isArray(tokens)) + throw new Error('mnemonist/InvertedIndex.add: tokenizer function should return an array of tokens.'); + + // Indexing + var done = new Set(), + token, + container; + + for (var i = 0, l = tokens.length; i < l; i++) { + token = tokens[i]; + + if (done.has(token)) + continue; + + done.add(token); + + container = this.mapping.get(token); + + if (!container) { + container = []; + this.mapping.set(token, container); + } + + container.push(key); + } + + this.dimension = this.mapping.size; + + return this; +}; + +/** + * Method used to query the index in a AND fashion. + * + * @param {any} query - Query + * @return {Set} - Intersection of documents matching the query. + */ +InvertedIndex.prototype.get = function(query) { + + // Early termination + if (!this.size) + return []; + + // First we need to tokenize the query + var tokens = this.queryTokenizer(query); + + if (!Array.isArray(tokens)) + throw new Error('mnemonist/InvertedIndex.query: tokenizer function should return an array of tokens.'); + + if (!tokens.length) + return []; + + var results = this.mapping.get(tokens[0]), + c, + i, + l; + + if (typeof results === 'undefined' || results.length === 0) + return []; + + if (tokens.length > 1) { + for (i = 1, l = tokens.length; i < l; i++) { + c = this.mapping.get(tokens[i]); + + if (typeof c === 'undefined' || c.length === 0) + return []; + + results = helpers.intersectionUniqueArrays(results, c); + } + } + + var docs = new Array(results.length); + + for (i = 0, l = docs.length; i < l; i++) + docs[i] = this.items[results[i]]; + + return docs; +}; + +/** + * Method used to iterate over each of the documents. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +InvertedIndex.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.documents.length; i < l; i++) + callback.call(scope, this.documents[i], i, this); +}; + +/** + * Method returning an iterator over the index's documents. + * + * @return {Iterator} + */ +InvertedIndex.prototype.documents = function() { + var documents = this.items, + l = documents.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = documents[i++]; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method returning an iterator over the index's tokens. + * + * @return {Iterator} + */ +InvertedIndex.prototype.tokens = function() { + return this.mapping.keys(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + InvertedIndex.prototype[Symbol.iterator] = InvertedIndex.prototype.documents; + +/** + * Convenience known methods. + */ +InvertedIndex.prototype.inspect = function() { + var array = this.items.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: InvertedIndex, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + InvertedIndex.prototype[Symbol.for('nodejs.util.inspect.custom')] = InvertedIndex.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a InvertedIndex. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} tokenizer - Tokenizer function. + * @return {InvertedIndex} + */ +InvertedIndex.from = function(iterable, descriptor) { + var index = new InvertedIndex(descriptor); + + forEach(iterable, function(doc) { + index.add(doc); + }); + + return index; +}; + +/** + * Exporting. + */ +module.exports = InvertedIndex; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/kd-tree.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/kd-tree.d.ts new file mode 100644 index 0000000..10294f3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/kd-tree.d.ts @@ -0,0 +1,25 @@ +/** + * Mnemonist KDTree Typings + * ========================= + */ +import {IArrayLike} from './utils/types'; + +export default class KDTree { + + // Members + dimensions: number; + size: number; + visited: number; + + // Methods + nearestNeighbor(point: Array): V; + kNearestNeighbors(k: number, point: Array): Array; + linearKNearestNeighbors(k: number, point: Array): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, Array]>, dimensions: number): KDTree; + static from(axes: IArrayLike): KDTree; + static from(axes: IArrayLike, labels: Array): KDTree; +} + diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/kd-tree.js b/amplify/functions/downloadDocument/node_modules/mnemonist/kd-tree.js new file mode 100644 index 0000000..fe5d1ca --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/kd-tree.js @@ -0,0 +1,447 @@ +/** + * Mnemonist KDTree + * ================= + * + * Low-level JavaScript implementation of a k-dimensional tree. + */ +var iterables = require('./utils/iterables.js'); +var typed = require('./utils/typed-arrays.js'); +var createTupleComparator = require('./utils/comparators.js').createTupleComparator; +var FixedReverseHeap = require('./fixed-reverse-heap.js'); +var inplaceQuickSortIndices = require('./sort/quick.js').inplaceQuickSortIndices; + +/** + * Helper function used to compute the squared distance between a query point + * and an indexed points whose values are stored in a tree's axes. + * + * Note that squared distance is used instead of euclidean to avoid + * costly sqrt computations. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} axes - Axes data. + * @param {number} pivot - Pivot. + * @param {array} point - Query point. + * @return {number} + */ +function squaredDistanceAxes(dimensions, axes, pivot, b) { + var d; + + var dist = 0, + step; + + for (d = 0; d < dimensions; d++) { + step = axes[d][pivot] - b[d]; + dist += step * step; + } + + return dist; +} + +/** + * Helper function used to reshape input data into low-level axes data. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} data - Data in the shape [label, [x, y, z...]] + * @return {object} + */ +function reshapeIntoAxes(dimensions, data) { + var l = data.length; + + var axes = new Array(dimensions), + labels = new Array(l), + axis; + + var PointerArray = typed.getPointerArray(l); + + var ids = new PointerArray(l); + + var d, i, row; + + var f = true; + + for (d = 0; d < dimensions; d++) { + axis = new Float64Array(l); + + for (i = 0; i < l; i++) { + row = data[i]; + axis[i] = row[1][d]; + + if (f) { + labels[i] = row[0]; + ids[i] = i; + } + } + + f = false; + axes[d] = axis; + } + + return {axes: axes, ids: ids, labels: labels}; +} + +/** + * Helper function used to build a kd-tree from axes data. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} axes - Axes. + * @param {array} ids - Indices to sort. + * @param {array} labels - Point labels. + * @return {object} + */ +function buildTree(dimensions, axes, ids, labels) { + var l = labels.length; + + // NOTE: +1 because we need to keep 0 as null pointer + var PointerArray = typed.getPointerArray(l + 1); + + // Building the tree + var pivots = new PointerArray(l), + lefts = new PointerArray(l), + rights = new PointerArray(l); + + var stack = [[0, 0, ids.length, -1, 0]], + step, + parent, + direction, + median, + pivot, + lo, + hi; + + var d, i = 0; + + while (stack.length !== 0) { + step = stack.pop(); + + d = step[0]; + lo = step[1]; + hi = step[2]; + parent = step[3]; + direction = step[4]; + + inplaceQuickSortIndices(axes[d], ids, lo, hi); + + l = hi - lo; + median = lo + (l >>> 1); // Fancy floor(l / 2) + pivot = ids[median]; + pivots[i] = pivot; + + if (parent > -1) { + if (direction === 0) + lefts[parent] = i + 1; + else + rights[parent] = i + 1; + } + + d = (d + 1) % dimensions; + + // Right + if (median !== lo && median !== hi - 1) { + stack.push([d, median + 1, hi, i, 1]); + } + + // Left + if (median !== lo) { + stack.push([d, lo, median, i, 0]); + } + + i++; + } + + return { + axes: axes, + labels: labels, + pivots: pivots, + lefts: lefts, + rights: rights + }; +} + +/** + * KDTree. + * + * @constructor + */ +function KDTree(dimensions, build) { + this.dimensions = dimensions; + this.visited = 0; + + this.axes = build.axes; + this.labels = build.labels; + + this.pivots = build.pivots; + this.lefts = build.lefts; + this.rights = build.rights; + + this.size = this.labels.length; +} + +/** + * Method returning the query's nearest neighbor. + * + * @param {array} query - Query point. + * @return {any} + */ +KDTree.prototype.nearestNeighbor = function(query) { + var bestDistance = Infinity, + best = null; + + var dimensions = this.dimensions, + axes = this.axes, + pivots = this.pivots, + lefts = this.lefts, + rights = this.rights; + + var visited = 0; + + function recurse(d, node) { + visited++; + + var left = lefts[node], + right = rights[node], + pivot = pivots[node]; + + var dist = squaredDistanceAxes( + dimensions, + axes, + pivot, + query + ); + + if (dist < bestDistance) { + best = pivot; + bestDistance = dist; + + if (dist === 0) + return; + } + + var dx = axes[d][pivot] - query[d]; + + d = (d + 1) % dimensions; + + // Going the correct way? + if (dx > 0) { + if (left !== 0) + recurse(d, left - 1); + } + else { + if (right !== 0) + recurse(d, right - 1); + } + + // Going the other way? + if (dx * dx < bestDistance) { + if (dx > 0) { + if (right !== 0) + recurse(d, right - 1); + } + else { + if (left !== 0) + recurse(d, left - 1); + } + } + } + + recurse(0, 0); + + this.visited = visited; + return this.labels[best]; +}; + +var KNN_HEAP_COMPARATOR_3 = createTupleComparator(3); +var KNN_HEAP_COMPARATOR_2 = createTupleComparator(2); + +/** + * Method returning the query's k nearest neighbors. + * + * @param {number} k - Number of nearest neighbor to retrieve. + * @param {array} query - Query point. + * @return {array} + */ + +// TODO: can do better by improving upon static-kdtree here +KDTree.prototype.kNearestNeighbors = function(k, query) { + if (k <= 0) + throw new Error('mnemonist/kd-tree.kNearestNeighbors: k should be a positive number.'); + + k = Math.min(k, this.size); + + if (k === 1) + return [this.nearestNeighbor(query)]; + + var heap = new FixedReverseHeap(Array, KNN_HEAP_COMPARATOR_3, k); + + var dimensions = this.dimensions, + axes = this.axes, + pivots = this.pivots, + lefts = this.lefts, + rights = this.rights; + + var visited = 0; + + function recurse(d, node) { + var left = lefts[node], + right = rights[node], + pivot = pivots[node]; + + var dist = squaredDistanceAxes( + dimensions, + axes, + pivot, + query + ); + + heap.push([dist, visited++, pivot]); + + var point = query[d], + split = axes[d][pivot], + dx = point - split; + + d = (d + 1) % dimensions; + + // Going the correct way? + if (point < split) { + if (left !== 0) { + recurse(d, left - 1); + } + } + else { + if (right !== 0) { + recurse(d, right - 1); + } + } + + // Going the other way? + if (dx * dx < heap.peek()[0] || heap.size < k) { + if (point < split) { + if (right !== 0) { + recurse(d, right - 1); + } + } + else { + if (left !== 0) { + recurse(d, left - 1); + } + } + } + } + + recurse(0, 0); + + this.visited = visited; + + var best = heap.consume(); + + for (var i = 0; i < best.length; i++) + best[i] = this.labels[best[i][2]]; + + return best; +}; + +/** + * Method returning the query's k nearest neighbors by linear search. + * + * @param {number} k - Number of nearest neighbor to retrieve. + * @param {array} query - Query point. + * @return {array} + */ +KDTree.prototype.linearKNearestNeighbors = function(k, query) { + if (k <= 0) + throw new Error('mnemonist/kd-tree.kNearestNeighbors: k should be a positive number.'); + + k = Math.min(k, this.size); + + var heap = new FixedReverseHeap(Array, KNN_HEAP_COMPARATOR_2, k); + + var i, l, dist; + + for (i = 0, l = this.size; i < l; i++) { + dist = squaredDistanceAxes( + this.dimensions, + this.axes, + this.pivots[i], + query + ); + + heap.push([dist, i]); + } + + var best = heap.consume(); + + for (i = 0; i < best.length; i++) + best[i] = this.labels[this.pivots[best[i][1]]]; + + return best; +}; + +/** + * Convenience known methods. + */ +KDTree.prototype.inspect = function() { + var dummy = new Map(); + + dummy.dimensions = this.dimensions; + + Object.defineProperty(dummy, 'constructor', { + value: KDTree, + enumerable: false + }); + + var i, j, point; + + for (i = 0; i < this.size; i++) { + point = new Array(this.dimensions); + + for (j = 0; j < this.dimensions; j++) + point[j] = this.axes[j][i]; + + dummy.set(this.labels[i], point); + } + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + KDTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = KDTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {number} dimensions - Space dimensions. + * @return {KDTree} + */ +KDTree.from = function(iterable, dimensions) { + var data = iterables.toArray(iterable); + + var reshaped = reshapeIntoAxes(dimensions, data); + + var result = buildTree(dimensions, reshaped.axes, reshaped.ids, reshaped.labels); + + return new KDTree(dimensions, result); +}; + +/** + * Static @.from function building a KDTree from given axes. + * + * @param {Iterable} iterable - Target iterable. + * @param {number} dimensions - Space dimensions. + * @return {KDTree} + */ +KDTree.fromAxes = function(axes, labels) { + if (!labels) + labels = typed.indices(axes[0].length); + + var dimensions = axes.length; + + var result = buildTree(axes.length, axes, typed.indices(labels.length), labels); + + return new KDTree(dimensions, result); +}; + +/** + * Exporting. + */ +module.exports = KDTree; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/linked-list.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/linked-list.d.ts new file mode 100644 index 0000000..4eec48c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/linked-list.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist LinkedList Typings + * ============================= + */ +export default class LinkedList implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + first(): T | undefined; + last(): T | undefined; + peek(): T | undefined; + push(value: T): number; + shift(): T | undefined; + unshift(value: T): number; + forEach(callback: (value: T, index: number, list: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): LinkedList; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/linked-list.js b/amplify/functions/downloadDocument/node_modules/mnemonist/linked-list.js new file mode 100644 index 0000000..17dca06 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/linked-list.js @@ -0,0 +1,261 @@ +/** + * Mnemonist Linked List + * ====================== + * + * Singly linked list implementation. Uses raw JavaScript objects as nodes + * as benchmarks proved it was the fastest thing to do. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Linked List. + * + * @constructor + */ +function LinkedList() { + this.clear(); +} + +/** + * Method used to clear the list. + * + * @return {undefined} + */ +LinkedList.prototype.clear = function() { + + // Properties + this.head = null; + this.tail = null; + this.size = 0; +}; + +/** + * Method used to get the first item of the list. + * + * @return {any} + */ +LinkedList.prototype.first = function() { + return this.head ? this.head.item : undefined; +}; +LinkedList.prototype.peek = LinkedList.prototype.first; + +/** + * Method used to get the last item of the list. + * + * @return {any} + */ +LinkedList.prototype.last = function() { + return this.tail ? this.tail.item : undefined; +}; + +/** + * Method used to add an item at the end of the list. + * + * @param {any} item - The item to add. + * @return {number} + */ +LinkedList.prototype.push = function(item) { + var node = {item: item, next: null}; + + if (!this.head) { + this.head = node; + this.tail = node; + } + else { + this.tail.next = node; + this.tail = node; + } + + this.size++; + + return this.size; +}; + +/** + * Method used to add an item at the beginning of the list. + * + * @param {any} item - The item to add. + * @return {number} + */ +LinkedList.prototype.unshift = function(item) { + var node = {item: item, next: null}; + + if (!this.head) { + this.head = node; + this.tail = node; + } + else { + if (!this.head.next) + this.tail = this.head; + node.next = this.head; + this.head = node; + } + + this.size++; + + return this.size; +}; + +/** + * Method used to retrieve & remove the first item of the list. + * + * @return {any} + */ +LinkedList.prototype.shift = function() { + if (!this.size) + return undefined; + + var node = this.head; + + this.head = node.next; + this.size--; + + return node.item; +}; + +/** + * Method used to iterate over the list. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +LinkedList.prototype.forEach = function(callback, scope) { + if (!this.size) + return; + + scope = arguments.length > 1 ? scope : this; + + var n = this.head, + i = 0; + + while (n) { + callback.call(scope, n.item, i, this); + n = n.next; + i++; + } +}; + +/** + * Method used to convert the list into an array. + * + * @return {array} + */ +LinkedList.prototype.toArray = function() { + if (!this.size) + return []; + + var array = new Array(this.size); + + for (var i = 0, l = this.size, n = this.head; i < l; i++) { + array[i] = n.item; + n = n.next; + } + + return array; +}; + +/** + * Method used to create an iterator over a list's values. + * + * @return {Iterator} + */ +LinkedList.prototype.values = function() { + var n = this.head; + + return new Iterator(function() { + if (!n) + return { + done: true + }; + + var value = n.item; + n = n.next; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a list's entries. + * + * @return {Iterator} + */ +LinkedList.prototype.entries = function() { + var n = this.head, + i = 0; + + return new Iterator(function() { + if (!n) + return { + done: true + }; + + var value = n.item; + n = n.next; + i++; + + return { + value: [i - 1, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LinkedList.prototype[Symbol.iterator] = LinkedList.prototype.values; + +/** + * Convenience known methods. + */ +LinkedList.prototype.toString = function() { + return this.toArray().join(','); +}; + +LinkedList.prototype.toJSON = function() { + return this.toArray(); +}; + +LinkedList.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: LinkedList, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + LinkedList.prototype[Symbol.for('nodejs.util.inspect.custom')] = LinkedList.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a list. + * + * @param {Iterable} iterable - Target iterable. + * @return {LinkedList} + */ +LinkedList.from = function(iterable) { + var list = new LinkedList(); + + forEach(iterable, function(value) { + list.push(value); + }); + + return list; +}; + +/** + * Exporting. + */ +module.exports = LinkedList; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/lru-cache.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/lru-cache.d.ts new file mode 100644 index 0000000..45b61e0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/lru-cache.d.ts @@ -0,0 +1,43 @@ +/** + * Mnemonist LRUCache Typings + * =========================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class LRUCache implements Iterable<[K, V]> { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(capacity: number); + constructor(KeyArrayClass: IArrayLikeConstructor, ValueArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + set(key: K, value: V): this; + setpop(key: K, value: V): {evicted: boolean, key: K, value: V}; + get(key: K): V | undefined; + peek(key: K): V | undefined; + has(key: K): boolean; + forEach(callback: (value: V, key: K, cache: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + KeyArrayClass: IArrayLikeConstructor, + ValueArrayClass: IArrayLikeConstructor, + capacity?: number + ): LRUCache; + + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + capacity?: number + ): LRUCache; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/lru-cache.js b/amplify/functions/downloadDocument/node_modules/mnemonist/lru-cache.js new file mode 100644 index 0000000..9cab8bc --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/lru-cache.js @@ -0,0 +1,433 @@ +/** + * Mnemonist LRUCache + * =================== + * + * JavaScript implementation of the LRU Cache data structure. To save up + * memory and allocations this implementation represents its underlying + * doubly-linked list as static arrays and pointers. Thus, memory is allocated + * only once at instantiation and JS objects are never created to serve as + * pointers. This also means this implementation does not trigger too many + * garbage collections. + * + * Note that to save up memory, a LRU Cache can be implemented using a singly + * linked list by storing predecessors' pointers as hashmap values. + * However, this means more hashmap lookups and would probably slow the whole + * thing down. What's more, pointers are not the things taking most space in + * memory. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + typed = require('./utils/typed-arrays.js'), + iterables = require('./utils/iterables.js'); + +/** + * LRUCache. + * + * @constructor + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Desired capacity. + */ +function LRUCache(Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + this.capacity = capacity; + + if (typeof this.capacity !== 'number' || this.capacity <= 0) + throw new Error('mnemonist/lru-cache: capacity should be positive number.'); + + var PointerArray = typed.getPointerArray(capacity); + + this.forward = new PointerArray(capacity); + this.backward = new PointerArray(capacity); + this.K = typeof Keys === 'function' ? new Keys(capacity) : new Array(capacity); + this.V = typeof Values === 'function' ? new Values(capacity) : new Array(capacity); + + // Properties + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = {}; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +LRUCache.prototype.clear = function() { + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = {}; +}; + +/** + * Method used to splay a value on top. + * + * @param {number} pointer - Pointer of the value to splay on top. + * @return {LRUCache} + */ +LRUCache.prototype.splayOnTop = function(pointer) { + var oldHead = this.head; + + if (this.head === pointer) + return this; + + var previous = this.backward[pointer], + next = this.forward[pointer]; + + if (this.tail === pointer) { + this.tail = previous; + } + else { + this.backward[next] = previous; + } + + this.forward[previous] = next; + + this.backward[oldHead] = pointer; + this.head = pointer; + this.forward[pointer] = oldHead; + + return this; +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {undefined} + */ +LRUCache.prototype.set = function(key, value) { + + // The key already exists, we just need to update the value and splay on top + var pointer = this.items[key]; + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + this.V[pointer] = value; + + return; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + delete this.items[this.K[pointer]]; + } + + // Storing key & value + this.items[key] = pointer; + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; +}; + +/** + * Method used to set the value for the given key in the cache + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {{evicted: boolean, key: any, value: any}} An object containing the + * key and value of an item that was overwritten or evicted in the set + * operation, as well as a boolean indicating whether it was evicted due to + * limited capacity. Return value is null if nothing was evicted or overwritten + * during the set operation. + */ +LRUCache.prototype.setpop = function(key, value) { + var oldValue = null; + var oldKey = null; + // The key already exists, we just need to update the value and splay on top + var pointer = this.items[key]; + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + oldValue = this.V[pointer]; + this.V[pointer] = value; + return {evicted: false, key: key, value: oldValue}; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + oldValue = this.V[pointer]; + oldKey = this.K[pointer]; + delete this.items[this.K[pointer]]; + } + + // Storing key & value + this.items[key] = pointer; + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; + + // Return object if eviction took place, otherwise return null + if (oldKey) { + return {evicted: true, key: oldKey, value: oldValue}; + } + else { + return null; + } +}; + +/** + * Method used to check whether the key exists in the cache. + * + * @param {any} key - Key. + * @return {boolean} + */ +LRUCache.prototype.has = function(key) { + return key in this.items; +}; + +/** + * Method used to get the value attached to the given key. Will move the + * related key to the front of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUCache.prototype.get = function(key) { + var pointer = this.items[key]; + + if (typeof pointer === 'undefined') + return; + + this.splayOnTop(pointer); + + return this.V[pointer]; +}; + +/** + * Method used to get the value attached to the given key. Does not modify + * the ordering of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUCache.prototype.peek = function(key) { + var pointer = this.items[key]; + + if (typeof pointer === 'undefined') + return; + + return this.V[pointer]; +}; + +/** + * Method used to iterate over the cache's entries using a callback. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +LRUCache.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + values = this.V, + forward = this.forward; + + while (i < l) { + + callback.call(scope, values[pointer], keys[pointer], this); + pointer = forward[pointer]; + + i++; + } +}; + +/** + * Method used to create an iterator over the cache's keys from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.keys = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var key = keys[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: key + }; + }); +}; + +/** + * Method used to create an iterator over the cache's values from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.values = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + values = this.V, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var value = values[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: value + }; + }); +}; + +/** + * Method used to create an iterator over the cache's entries from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.entries = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + values = this.V, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var key = keys[pointer], + value = values[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: [key, value] + }; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LRUCache.prototype[Symbol.iterator] = LRUCache.prototype.entries; + +/** + * Convenience known methods. + */ +LRUCache.prototype.inspect = function() { + var proxy = new Map(); + + var iterator = this.entries(), + step; + + while ((step = iterator.next(), !step.done)) + proxy.set(step.value[0], step.value[1]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: LRUCache, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + LRUCache.prototype[Symbol.for('nodejs.util.inspect.custom')] = LRUCache.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Cache's capacity. + * @return {LRUCache} + */ +LRUCache.from = function(iterable, Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/lru-cache.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + else if (arguments.length === 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + var cache = new LRUCache(Keys, Values, capacity); + + forEach(iterable, function(value, key) { + cache.set(key, value); + }); + + return cache; +}; + +/** + * Exporting. + */ +module.exports = LRUCache; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/lru-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/lru-map.d.ts new file mode 100644 index 0000000..0943543 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/lru-map.d.ts @@ -0,0 +1,43 @@ +/** + * Mnemonist LRUMap Typings + * ========================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class LRUMap implements Iterable<[K, V]> { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(capacity: number); + constructor(KeyArrayClass: IArrayLikeConstructor, ValueArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + set(key: K, value: V): this; + setpop(key: K, value: V): {evicted: boolean, key: K, value: V}; + get(key: K): V | undefined; + peek(key: K): V | undefined; + has(key: K): boolean; + forEach(callback: (value: V, key: K, cache: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + KeyArrayClass: IArrayLikeConstructor, + ValueArrayClass: IArrayLikeConstructor, + capacity?: number + ): LRUMap; + + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + capacity?: number + ): LRUMap; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/lru-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/lru-map.js new file mode 100644 index 0000000..26afe27 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/lru-map.js @@ -0,0 +1,258 @@ +/** + * Mnemonist LRUMap + * ================= + * + * Variant of the LRUCache class that leverages an ES6 Map instead of an object. + * It might be faster for some use case but it is still hard to understand + * when a Map can outperform an object in v8. + */ +var LRUCache = require('./lru-cache.js'), + forEach = require('obliterator/foreach'), + typed = require('./utils/typed-arrays.js'), + iterables = require('./utils/iterables.js'); + +/** + * LRUMap. + * + * @constructor + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Desired capacity. + */ +function LRUMap(Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + this.capacity = capacity; + + if (typeof this.capacity !== 'number' || this.capacity <= 0) + throw new Error('mnemonist/lru-map: capacity should be positive number.'); + + var PointerArray = typed.getPointerArray(capacity); + + this.forward = new PointerArray(capacity); + this.backward = new PointerArray(capacity); + this.K = typeof Keys === 'function' ? new Keys(capacity) : new Array(capacity); + this.V = typeof Values === 'function' ? new Values(capacity) : new Array(capacity); + + // Properties + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = new Map(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +LRUMap.prototype.clear = function() { + this.size = 0; + this.head = 0; + this.tail = 0; + this.items.clear(); +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {undefined} + */ +LRUMap.prototype.set = function(key, value) { + + // The key already exists, we just need to update the value and splay on top + var pointer = this.items.get(key); + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + this.V[pointer] = value; + + return; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + this.items.delete(this.K[pointer]); + } + + // Storing key & value + this.items.set(key, pointer); + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {{evicted: boolean, key: any, value: any}} An object containing the + * key and value of an item that was overwritten or evicted in the set + * operation, as well as a boolean indicating whether it was evicted due to + * limited capacity. Return value is null if nothing was evicted or overwritten + * during the set operation. + */ +LRUMap.prototype.setpop = function(key, value) { + var oldValue = null; + var oldKey = null; + // The key already exists, we just need to update the value and splay on top + var pointer = this.items.get(key); + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + oldValue = this.V[pointer]; + this.V[pointer] = value; + return {evicted: false, key: key, value: oldValue}; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + oldValue = this.V[pointer]; + oldKey = this.K[pointer]; + this.items.delete(this.K[pointer]); + } + + // Storing key & value + this.items.set(key, pointer); + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; + + // Return object if eviction took place, otherwise return null + if (oldKey) { + return {evicted: true, key: oldKey, value: oldValue}; + } + else { + return null; + } +}; + +/** + * Method used to check whether the key exists in the cache. + * + * @param {any} key - Key. + * @return {boolean} + */ +LRUMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to get the value attached to the given key. Will move the + * related key to the front of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUMap.prototype.get = function(key) { + var pointer = this.items.get(key); + + if (typeof pointer === 'undefined') + return; + + this.splayOnTop(pointer); + + return this.V[pointer]; +}; + +/** + * Method used to get the value attached to the given key. Does not modify + * the ordering of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUMap.prototype.peek = function(key) { + var pointer = this.items.get(key); + + if (typeof pointer === 'undefined') + return; + + return this.V[pointer]; +}; + +/** + * Methods that can be reused as-is from LRUCache. + */ +LRUMap.prototype.splayOnTop = LRUCache.prototype.splayOnTop; +LRUMap.prototype.forEach = LRUCache.prototype.forEach; +LRUMap.prototype.keys = LRUCache.prototype.keys; +LRUMap.prototype.values = LRUCache.prototype.values; +LRUMap.prototype.entries = LRUCache.prototype.entries; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LRUMap.prototype[Symbol.iterator] = LRUMap.prototype.entries; + +/** + * Convenience known methods. + */ +LRUMap.prototype.inspect = LRUCache.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Cache's capacity. + * @return {LRUMap} + */ +LRUMap.from = function(iterable, Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/lru-cache.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + else if (arguments.length === 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + var cache = new LRUMap(Keys, Values, capacity); + + forEach(iterable, function(value, key) { + cache.set(key, value); + }); + + return cache; +}; + +/** + * Exporting. + */ +module.exports = LRUMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/multi-array.js b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-array.js new file mode 100644 index 0000000..c165b55 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-array.js @@ -0,0 +1,447 @@ +/** + * Mnemonist MultiArray + * ===================== + * + * Memory-efficient representation of an array of arrays. In JavaScript and + * most high-level languages, creating objects has a cost. This implementation + * is therefore able to represent nested containers without needing to create + * objects. This works by storing singly linked lists in a single flat array. + * However, this means that this structure comes with some read/write + * overhead but consume very few memory. + * + * This structure should be particularly suited to indices that will need to + * merge arrays anyway when queried and that are quite heavily hit (such as + * an inverted index or a quad tree). + * + * Note: the implementation does not require to keep track of head pointers + * but this comes with some advantages such as not needing to offset pointers + * by 1 and being able to perform in-order iteration. This remains quite lean + * in memory and does not hinder performance whatsoever. + */ +var typed = require('./utils/typed-arrays.js'), + Vector = require('./vector.js'), + Iterator = require('obliterator/iterator'); + +var PointerVector = Vector.PointerVector; + +/** + * MultiArray. + * + * @constructor + */ +function MultiArray(Container, capacity) { + this.capacity = capacity || null; + this.Container = Container || Array; + this.hasFixedCapacity = this.capacity !== null; + + if (typeof this.Container !== 'function') + throw new Error('mnemonist/multi-array.constructor: container should be a function.'); + + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiArray.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + + // NOTE: #.heads, #.tails & #.lengths have a length equal to the dimension of + // the array, while #.pointers has a length equal to its size. + + // Storage + if (this.hasFixedCapacity) { + var capacity = this.capacity; + + var PointerArray = typed.getPointerArray(capacity); + + var policy = function(currentCapacity) { + var newCapacity = Math.max(1, Math.ceil(currentCapacity * 1.5)); + + // Clamping max allocation + return Math.min(newCapacity, capacity); + }; + + var initialCapacity = Math.max(8, capacity); + + this.tails = new Vector(PointerArray, {policy: policy, initialCapacity: initialCapacity}); + this.lengths = new Vector(PointerArray, {policy: policy, initialCapacity: initialCapacity}); + this.pointers = new PointerArray(capacity); + + this.items = new this.Container(capacity); + } + else { + + this.tails = new PointerVector(); + this.lengths = new PointerVector(); + this.pointers = new PointerVector(); + + this.items = new this.Container(); + } +}; + +/** + * Method used to add an item to the container at the given index. + * + * @param {number} index - Index of the container. + * @param {any} item - Item to add. + * @return {MultiArray} + */ +MultiArray.prototype.set = function(index, item) { + var pointer = this.size; + + // TODO: this can be factorized! + + if (this.hasFixedCapacity) { + + if (index >= this.capacity || this.size === this.capacity) + throw new Error('mnemonist/multi-array: attempting to allocate further than capacity.'); + + // This linked list does not exist yet. Let's create it + if (index >= this.dimension) { + + // We may be required to grow the vectors + this.dimension = index + 1; + this.tails.grow(this.dimension); + this.lengths.grow(this.dimension); + + this.tails.resize(this.dimension); + this.lengths.resize(this.dimension); + + this.lengths.array[index] = 1; + } + + // Appending to the list + else { + this.pointers[pointer] = this.tails.array[index]; + this.lengths.array[index]++; + } + + this.tails.array[index] = pointer; + this.items[pointer] = item; + } + else { + + // This linked list does not exist yet. Let's create it + if (index >= this.dimension) { + + // We may be required to grow the vectors + this.dimension = index + 1; + this.tails.grow(this.dimension); + this.lengths.grow(this.dimension); + + this.tails.resize(this.dimension); + this.lengths.resize(this.dimension); + + this.pointers.push(0); + this.lengths.array[index] = 1; + } + + // Appending to the list + else { + this.pointers.push(this.tails.array[index]); + this.lengths.array[index]++; + } + + this.tails.array[index] = pointer; + this.items.push(item); + } + + this.size++; + + return this; +}; + +/** + * Method used to push a new container holding the given value. + * Note: it might be useful to make this function able to take an iterable + * or variadic someday. For the time being it's just a convenience for + * implementing compact multi maps and such. + * + * @param {any} item - Item to add. + * @return {MultiArray} + */ +MultiArray.prototype.push = function(item) { + var pointer = this.size, + index = this.dimension; + + if (this.hasFixedCapacity) { + + if (index >= this.capacity || this.size === this.capacity) + throw new Error('mnemonist/multi-array: attempting to allocate further than capacity.'); + + this.items[pointer] = item; + } + else { + this.items.push(item); + this.pointers.push(0); + } + + this.lengths.push(1); + this.tails.push(pointer); + + this.dimension++; + this.size++; + + return this; +}; + +/** + * Method used to get the desired container. + * + * @param {number} index - Index of the container. + * @return {array} + */ +MultiArray.prototype.get = function(index) { + if (index >= this.dimension) + return; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array; + + var pointer = this.tails.array[index], + length = this.lengths.array[index], + i = length; + + var array = new this.Container(length); + + while (i !== 0) { + array[--i] = this.items[pointer]; + pointer = pointers[pointer]; + } + + return array; +}; + +/** + * Method used to check if a container exists at the given index. + * + * @param {number} index - Index of the container. + * @return {boolean} + */ +MultiArray.prototype.has = function(index) { + return index < this.dimension; +}; + +/** + * Method used to get the size of the container stored at given index. + * + * @param {number} index - Index of the container. + * @return {number} + */ +MultiArray.prototype.multiplicity = function(index) { + if (index >= this.dimension) + return 0; + + return this.lengths.array[index]; +}; +MultiArray.prototype.count = MultiArray.prototype.multiplicity; + +/** + * Method used to iterate over the structure's containers. + * + * @return {Iterator} + */ +MultiArray.prototype.containers = function() { + var self = this, + l = this.dimension, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {value: self.get(i++)}; + }); +}; + +/** + * Method used to iterate over the structure's associations. + * + * @return {Iterator} + */ +MultiArray.prototype.associations = function() { + var self = this, + l = this.dimension, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var data = {value: [i, self.get(i)]}; + + i++; + + return data; + }); +}; + +/** + * Method used to iterate over the structure's values in the global insertion + * order. + * + * @param {number} [index] - Optionally, iterate over the values of a single + * container at index. + * @return {Iterator} + */ +MultiArray.prototype.values = function(index) { + var items = this.items, + length, + i = 0; + + if (typeof index === 'number') { + if (index >= this.dimension) + return Iterator.empty(); + + length = this.lengths.array[index]; + items = this.items; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array; + + if (length === 0) + return Iterator.empty(); + + var pointer = this.tails.array[index], + v; + + return new Iterator(function() { + if (i === length) + return {done: true}; + + i++; + v = items[pointer]; + pointer = pointers[pointer]; + + return {done: false, value: v}; + }); + } + + length = this.size; + + return new Iterator(function() { + if (i >= length) + return {done: true}; + + return {done: false, value: items[i++]}; + }); +}; + +/** + * Method used to iterate over the structure's entries. + * + * @return {Iterator} + */ +MultiArray.prototype.entries = function() { + if (this.size === 0) + return Iterator.empty(); + + var inContainer = false, + pointer, + length, + i = 0, + j = 0, + l = this.dimension, + v; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array, + items = this.items, + tails = this.tails.array, + lengths = this.lengths.array; + + var iterator = new Iterator(function next() { + if (!inContainer) { + + if (i >= l) + return {done: true}; + + length = lengths[i]; + pointer = tails[i]; + i++; + + if (length === 0) + return next(); + + j = 0; + inContainer = true; + } + + if (j === length) { + inContainer = false; + return next(); + } + + v = items[pointer]; + + // TODO: guard for out-of-bounds + pointer = pointers[pointer]; + + j++; + + return { + done: false, + value: [i - 1, v] + }; + }); + + return iterator; +}; + +/** + * Method used to iterate over the structure's keys. + * + * @return {Iterator} + */ +MultiArray.prototype.keys = function() { + var i = 0, + l = this.dimension; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {done: false, value: i++}; + }); +}; + +/** + * Convenience known methods. + */ +MultiArray.prototype.inspect = function() { + var proxy = new Array(this.dimension), + i, + l; + + for (i = 0, l = this.dimension; i < l; i++) + proxy[i] = Array.from(this.get(i)); + + if (this.hasFixedCapacity) { + proxy.type = this.Container.name; + proxy.capacity = this.capacity; + } + + proxy.size = this.size; + proxy.dimension = this.dimension; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: MultiArray, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + MultiArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiArray.prototype.inspect; + +// TODO: .from + +/** + * Exporting. + */ +module.exports = MultiArray; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/multi-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-map.d.ts new file mode 100644 index 0000000..e4c8543 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-map.d.ts @@ -0,0 +1,47 @@ +/** + * Mnemonist MultiMap Typings + * =========================== + */ + +interface MultiMap = V[]> extends Iterable<[K, V]> { + + // Members + dimension: number; + size: number; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + remove(key: K, value: V): boolean; + has(key: K): boolean; + get(key: K): C | undefined; + multiplicity(key: K): number; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + forEachAssociation(callback: (value: C, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + containers(): IterableIterator; + associations(): IterableIterator<[K, C]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + toJSON(): any; +} + +interface MultiMapConstructor { + new (container: SetConstructor): MultiMap>; + new (container?: ArrayConstructor): MultiMap; + + from( + iterable: Iterable<[K, V]> | {[key: string]: V}, + Container: SetConstructor + ): MultiMap>; + from( + iterable: Iterable<[K, V]> | {[key: string]: V}, + Container?: ArrayConstructor + ): MultiMap; +} + +declare const MultiMap: MultiMapConstructor; +export default MultiMap; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/multi-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-map.js new file mode 100644 index 0000000..0b36e15 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-map.js @@ -0,0 +1,408 @@ +/** + * Mnemonist MultiMap + * =================== + * + * Implementation of a MultiMap with custom container. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * MultiMap. + * + * @constructor + */ +function MultiMap(Container) { + + this.Container = Container || Array; + this.items = new Map(); + this.clear(); + + Object.defineProperty(this.items, 'constructor', { + value: MultiMap, + enumerable: false + }); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiMap.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + this.items.clear(); +}; + +/** + * Method used to set a value. + * + * @param {any} key - Key. + * @param {any} value - Value to add. + * @return {MultiMap} + */ +MultiMap.prototype.set = function(key, value) { + var container = this.items.get(key), + sizeBefore; + + if (!container) { + this.dimension++; + container = new this.Container(); + this.items.set(key, container); + } + + if (this.Container === Set) { + sizeBefore = container.size; + container.add(value); + + if (sizeBefore < container.size) + this.size++; + } + else { + container.push(value); + this.size++; + } + + return this; +}; + +/** + * Method used to delete the given key. + * + * @param {any} key - Key to delete. + * @return {boolean} + */ +MultiMap.prototype.delete = function(key) { + var container = this.items.get(key); + + if (!container) + return false; + + this.size -= (this.Container === Set ? container.size : container.length); + this.dimension--; + this.items.delete(key); + + return true; +}; + +/** + * Method used to delete the remove an item in the container stored at the + * given key. + * + * @param {any} key - Key to delete. + * @return {boolean} + */ +MultiMap.prototype.remove = function(key, value) { + var container = this.items.get(key), + wasDeleted, + index; + + if (!container) + return false; + + if (this.Container === Set) { + wasDeleted = container.delete(value); + + if (wasDeleted) + this.size--; + + if (container.size === 0) { + this.items.delete(key); + this.dimension--; + } + + return wasDeleted; + } + else { + index = container.indexOf(value); + + if (index === -1) + return false; + + this.size--; + + if (container.length === 1) { + this.items.delete(key); + this.dimension--; + + return true; + } + + container.splice(index, 1); + + return true; + } +}; + +/** + * Method used to return whether the given keys exists in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +MultiMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to return the container stored at the given key or `undefined`. + * + * @param {any} key - Key to get. + * @return {boolean} + */ +MultiMap.prototype.get = function(key) { + return this.items.get(key); +}; + +/** + * Method used to return the multiplicity of the given key, meaning the number + * of times it is set, or, more trivially, the size of the attached container. + * + * @param {any} key - Key to check. + * @return {number} + */ +MultiMap.prototype.multiplicity = function(key) { + var container = this.items.get(key); + + if (typeof container === 'undefined') + return 0; + + return this.Container === Set ? container.size : container.length; +}; +MultiMap.prototype.count = MultiMap.prototype.multiplicity; + +/** + * Method used to iterate over each of the key/value pairs. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inner iteration function is created here to avoid creating it in the loop + var key; + function inner(value) { + callback.call(scope, value, key); + } + + this.items.forEach(function(container, k) { + key = k; + container.forEach(inner); + }); +}; + +/** + * Method used to iterate over each of the associations. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiMap.prototype.forEachAssociation = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Method returning an iterator over the map's keys. + * + * @return {Iterator} + */ +MultiMap.prototype.keys = function() { + return this.items.keys(); +}; + +/** + * Method returning an iterator over the map's keys. + * + * @return {Iterator} + */ +MultiMap.prototype.values = function() { + var iterator = this.items.values(), + inContainer = false, + countainer, + step, + i, + l; + + if (this.Container === Set) + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + countainer = step.value.values(); + } + + step = countainer.next(); + + if (step.done) { + inContainer = false; + return next(); + } + + return { + done: false, + value: step.value + }; + }); + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + countainer = step.value; + i = 0; + l = countainer.length; + } + + if (i >= l) { + inContainer = false; + return next(); + } + + return { + done: false, + value: countainer[i++] + }; + }); +}; + +/** + * Method returning an iterator over the map's entries. + * + * @return {Iterator} + */ +MultiMap.prototype.entries = function() { + var iterator = this.items.entries(), + inContainer = false, + countainer, + step, + key, + i, + l; + + if (this.Container === Set) + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + key = step.value[0]; + countainer = step.value[1].values(); + } + + step = countainer.next(); + + if (step.done) { + inContainer = false; + return next(); + } + + return { + done: false, + value: [key, step.value] + }; + }); + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + key = step.value[0]; + countainer = step.value[1]; + i = 0; + l = countainer.length; + } + + if (i >= l) { + inContainer = false; + return next(); + } + + return { + done: false, + value: [key, countainer[i++]] + }; + }); +}; + +/** + * Method returning an iterator over the map's containers. + * + * @return {Iterator} + */ +MultiMap.prototype.containers = function() { + return this.items.values(); +}; + +/** + * Method returning an iterator over the map's associations. + * + * @return {Iterator} + */ +MultiMap.prototype.associations = function() { + return this.items.entries(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + MultiMap.prototype[Symbol.iterator] = MultiMap.prototype.entries; + +/** + * Convenience known methods. + */ +MultiMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + MultiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiMap.prototype.inspect; +MultiMap.prototype.toJSON = function() { + return this.items; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {Class} Container - Container. + * @return {MultiMap} + */ +MultiMap.from = function(iterable, Container) { + var map = new MultiMap(Container); + + forEach(iterable, function(value, key) { + map.set(key, value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = MultiMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/multi-set.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-set.d.ts new file mode 100644 index 0000000..0e40bc4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-set.d.ts @@ -0,0 +1,37 @@ +/** + * Mnemonist MultiSet Typings + * =========================== + */ +export default class MultiSet implements Iterable { + + // Members + dimension: number; + size: number; + + // Methods + clear(): void; + add(key: K, count?: number): this; + set(key: K, count: number): this; + has(key: K): boolean; + delete(key: K): boolean; + remove(key: K, count?: number): void; + edit(a: K, b: K): this; + multiplicity(key: K): number; + count(key: K): number; + get(key: K): number; + frequency(key: K): number; + top(n: number): Array<[K, number]>; + forEach(callback: (value: K, key: K, set: this) => void, scope?: any): void; + forEachMultiplicity(callback: (value: number, key: K, set: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + multiplicities(): IterableIterator<[K, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string]: I}): MultiSet; + static isSubset(a: MultiSet, b: MultiSet): boolean; + static isSuperset(a: MultiSet, b: MultiSet): boolean; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/multi-set.js b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-set.js new file mode 100644 index 0000000..3206af7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/multi-set.js @@ -0,0 +1,440 @@ +/** + * Mnemonist MultiSet + * ==================== + * + * JavaScript implementation of a MultiSet. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + FixedReverseHeap = require('./fixed-reverse-heap.js'); + +/** + * Helpers. + */ +var MULTISET_ITEM_COMPARATOR = function(a, b) { + if (a[1] > b[1]) + return -1; + if (a[1] < b[1]) + return 1; + + return 0; +}; + +// TODO: helper functions: union, intersection, sum, difference, subtract + +/** + * MultiSet. + * + * @constructor + */ +function MultiSet() { + this.items = new Map(); + + Object.defineProperty(this.items, 'constructor', { + value: MultiSet, + enumerable: false + }); + + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiSet.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + this.items.clear(); +}; + +/** + * Method used to add an item to the set. + * + * @param {any} item - Item to add. + * @param {number} count - Optional count. + * @return {MultiSet} + */ +MultiSet.prototype.add = function(item, count) { + if (count === 0) + return this; + + if (count < 0) + return this.remove(item, -count); + + count = count || 1; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.add: given count should be a number.'); + + this.size += count; + + const currentCount = this.items.get(item); + + if (currentCount === undefined) + this.dimension++; + else + count += currentCount; + + this.items.set(item, count); + + return this; +}; + +/** + * Method used to set the multiplicity of an item in the set. + * + * @param {any} item - Target item. + * @param {number} count - Desired multiplicity. + * @return {MultiSet} + */ +MultiSet.prototype.set = function(item, count) { + var currentCount; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.set: given count should be a number.'); + + // Setting an item to 0 or to a negative number means deleting it from the set + if (count <= 0) { + currentCount = this.items.get(item); + + if (typeof currentCount !== 'undefined') { + this.size -= currentCount; + this.dimension--; + } + + this.items.delete(item); + return this; + } + + count = count || 1; + + currentCount = this.items.get(item); + + if (typeof currentCount === 'number') { + this.items.set(item, currentCount + count); + } + else { + this.dimension++; + this.items.set(item, count); + } + + this.size += count; + + return this; +}; + +/** + * Method used to return whether the item exists in the set. + * + * @param {any} item - Item to check. + * @return {boolan} + */ +MultiSet.prototype.has = function(item) { + return this.items.has(item); +}; + +/** + * Method used to delete an item from the set. + * + * @param {any} item - Item to delete. + * @return {boolan} + */ +MultiSet.prototype.delete = function(item) { + var count = this.items.get(item); + + if (count === 0) + return false; + + this.size -= count; + this.dimension--; + this.items.delete(item); + + return true; +}; + +/** + * Method used to remove an item from the set. + * + * @param {any} item - Item to delete. + * @param {number} count - Optional count. + * @return {undefined} + */ +MultiSet.prototype.remove = function(item, count) { + if (count === 0) + return; + + if (count < 0) + return this.add(item, -count); + + count = count || 1; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.remove: given count should be a number.'); + + var currentCount = this.multiplicity(item), + newCount = Math.max(0, currentCount - count); + + if (newCount === 0) { + this.delete(item); + } + else { + this.items.set(item, newCount); + this.size -= (currentCount - newCount); + } + + return; +}; + +/** + * Method used to change a key into another one, merging counts if the target + * key already exists. + * + * @param {any} a - From key. + * @param {any} b - To key. + * @return {MultiSet} + */ +MultiSet.prototype.edit = function(a, b) { + var am = this.multiplicity(a); + + // If a does not exist in the set, we can stop right there + if (am === 0) + return; + + var bm = this.multiplicity(b); + + this.items.set(b, am + bm); + this.items.delete(a); + + return this; +}; + +/** + * Method used to return the multiplicity of the given item. + * + * @param {any} item - Item to get. + * @return {number} + */ +MultiSet.prototype.multiplicity = function(item) { + var count = this.items.get(item); + + if (typeof count === 'undefined') + return 0; + + return count; +}; +MultiSet.prototype.get = MultiSet.prototype.multiplicity; +MultiSet.prototype.count = MultiSet.prototype.multiplicity; + +/** + * Method used to return the frequency of the given item in the set. + * + * @param {any} item - Item to get. + * @return {number} + */ +MultiSet.prototype.frequency = function(item) { + if (this.size === 0) + return 0; + + var count = this.multiplicity(item); + + return count / this.size; +}; + +/** + * Method used to return the n most common items from the set. + * + * @param {number} n - Number of items to retrieve. + * @return {array} + */ +MultiSet.prototype.top = function(n) { + if (typeof n !== 'number' || n <= 0) + throw new Error('mnemonist/multi-set.top: n must be a number > 0.'); + + var heap = new FixedReverseHeap(Array, MULTISET_ITEM_COMPARATOR, n); + + var iterator = this.items.entries(), + step; + + while ((step = iterator.next(), !step.done)) + heap.push(step.value); + + return heap.consume(); +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var i; + + this.items.forEach(function(multiplicity, value) { + + for (i = 0; i < multiplicity; i++) + callback.call(scope, value, value); + }); +}; + +/** + * Method used to iterate over the set's multiplicities. + * + * @param {function} callback - Function to call for each multiplicity. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiSet.prototype.forEachMultiplicity = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Method returning an iterator over the set's keys. I.e. its unique values, + * in a sense. + * + * @return {Iterator} + */ +MultiSet.prototype.keys = function() { + return this.items.keys(); +}; + +/** + * Method returning an iterator over the set's values. + * + * @return {Iterator} + */ +MultiSet.prototype.values = function() { + var iterator = this.items.entries(), + inContainer = false, + step, + value, + multiplicity, + i; + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + value = step.value[0]; + multiplicity = step.value[1]; + i = 0; + } + + if (i >= multiplicity) { + inContainer = false; + return next(); + } + + i++; + + return { + done: false, + value: value + }; + }); +}; + +/** + * Method returning an iterator over the set's multiplicities. + * + * @return {Iterator} + */ +MultiSet.prototype.multiplicities = function() { + return this.items.entries(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + MultiSet.prototype[Symbol.iterator] = MultiSet.prototype.values; + +/** + * Convenience known methods. + */ +MultiSet.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + MultiSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiSet.prototype.inspect; +MultiSet.prototype.toJSON = function() { + return this.items; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {MultiSet} + */ +MultiSet.from = function(iterable) { + var set = new MultiSet(); + + forEach(iterable, function(value) { + set.add(value); + }); + + return set; +}; + +/** + * Function returning whether the multiset A is a subset of the multiset B. + * + * @param {MultiSet} A - First set. + * @param {MultiSet} B - Second set. + * @return {boolean} + */ +MultiSet.isSubset = function(A, B) { + var iterator = A.multiplicities(), + step, + key, + mA; + + // Shortcuts + if (A === B) + return true; + + if (A.dimension > B.dimension) + return false; + + while ((step = iterator.next(), !step.done)) { + key = step.value[0]; + mA = step.value[1]; + + if (B.multiplicity(key) < mA) + return false; + } + + return true; +}; + +/** + * Function returning whether the multiset A is a superset of the multiset B. + * + * @param {MultiSet} A - First set. + * @param {MultiSet} B - Second set. + * @return {boolean} + */ +MultiSet.isSuperset = function(A, B) { + return MultiSet.isSubset(B, A); +}; + +/** + * Exporting. + */ +module.exports = MultiSet; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/package.json b/amplify/functions/downloadDocument/node_modules/mnemonist/package.json new file mode 100644 index 0000000..79e8f19 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/package.json @@ -0,0 +1,119 @@ +{ + "name": "mnemonist", + "version": "0.38.3", + "description": "Curated collection of data structures for the JavaScript language.", + "scripts": { + "lint": "eslint ./*.js ./utils ./test", + "prepublish": "npm run lint && npm test && npm run test:types", + "test": "mocha", + "test:types": "tsc --target es2015 --noEmit --noImplicitAny --noImplicitReturns ./test/types.ts" + }, + "main": "./index.js", + "types": "./index.d.ts", + "files": [ + "sort", + "utils", + "*.d.ts", + "*.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/yomguithereal/mnemonist.git" + }, + "keywords": [ + "bag", + "bimap", + "bit array", + "bit set", + "bit vector", + "bitset", + "bk tree", + "burkhard-keller tree", + "cache", + "circular buffer", + "counter", + "data structures", + "default map", + "deque", + "disjoint set", + "fibonacci heap", + "fuzzy map", + "hashed array tree", + "heap", + "interval tree", + "inverted index", + "kd tree", + "linked list", + "lru", + "lru cache", + "multimap", + "multiset", + "passjoin", + "queue", + "sparse map", + "sparse set", + "stack", + "structures", + "suffix tree", + "symspell", + "trie", + "union find", + "vantage point tree", + "vector", + "vp tree" + ], + "author": { + "name": "Guillaume Plique", + "url": "http://github.com/Yomguithereal" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/yomguithereal/mnemonist/issues" + }, + "homepage": "https://github.com/yomguithereal/mnemonist#readme", + "dependencies": { + "obliterator": "^1.6.1" + }, + "devDependencies": { + "@yomguithereal/eslint-config": "^4.0.0", + "asciitree": "^1.0.2", + "damerau-levenshtein": "^1.0.6", + "eslint": "^7.21.0", + "leven": "^3.1.0", + "lodash": "^4.17.21", + "matcha": "^0.7.0", + "mocha": "^8.3.0", + "pandemonium": "^2.0.0", + "seedrandom": "^3.0.5", + "static-kdtree": "^1.0.2", + "typescript": "^4.2.2" + }, + "eslintConfig": { + "extends": "@yomguithereal/eslint-config", + "globals": { + "Set": true, + "Map": true, + "WeakMap": true, + "Symbol": true, + "ArrayBuffer": true, + "Uint8Array": true, + "Uint8ClampedArray": true, + "Uint16Array": true, + "Uint32Array": true, + "Int8Array": true, + "Int16Array": true, + "Int32Array": true, + "Float32Array": true, + "Float64Array": true + }, + "parserOptions": { + "ecmaVersion": 6, + "ecmaFeatures": { + "forOf": true + } + }, + "rules": { + "no-new": 0 + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/passjoin-index.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/passjoin-index.d.ts new file mode 100644 index 0000000..4d91746 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/passjoin-index.d.ts @@ -0,0 +1,54 @@ +/** + * Mnemonist PassjoinIndex Typings + * ================================ + */ +type LevenshteinDistanceFunction = (a: T, b: T) => number; + +export default class PassjoinIndex implements Iterable { + + // Members + size: number; + + // Constructor + constructor(levenshtein: LevenshteinDistanceFunction, k: number); + + // Methods + add(value: T): this; + search(query: T): Set; + clear(): void; + forEach(callback: (value: T, index: number, self: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + levenshtein: LevenshteinDistanceFunction, + k: number + ): PassjoinIndex; +} + +export function countKeys(k: number, s: number): number; +export function comparator(a: T, b: T): number; +export function partition(k: number, l: number): Array<[number, number]>; +export function segments(k: number, string: T): Array; +export function segmentPos(k: number, i: number, string: T): number; + +export function multiMatchAwareInterval( + k: number, + delta: number, + i: number, + s: number, + pi: number, + li: number +): [number, number]; + +export function multiMatchAwareSubstrings( + k: number, + string: T, + l: number, + i: number, + pi: number, + li: number +): Array; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/passjoin-index.js b/amplify/functions/downloadDocument/node_modules/mnemonist/passjoin-index.js new file mode 100644 index 0000000..652d614 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/passjoin-index.js @@ -0,0 +1,518 @@ +/** + * Mnemonist PassjoinIndex + * ======================== + * + * The PassjoinIndex is an index leveraging the "passjoin" algorithm as a mean + * to index strings for Levenshtein distance queries. It features a complexity + * related to the Levenshtein query threshold k rather than the number of + * strings to test (roughly O(k^3)). + * + * [References]: + * Jiang, Yu, Dong Deng, Jiannan Wang, Guoliang Li, et Jianhua Feng. + * « Efficient Parallel Partition-Based Algorithms for Similarity Search and Join + * with Edit Distance Constraints ». In Proceedings of the Joint EDBT/ICDT 2013 + * Workshops on - EDBT ’13, 341. Genoa, Italy: ACM Press, 2013. + * https://doi.org/10.1145/2457317.2457382. + * + * Li, Guoliang, Dong Deng, et Jianhua Feng. « A Partition-Based Method for + * String Similarity Joins with Edit-Distance Constraints ». ACM Transactions on + * Database Systems 38, no 2 (1 juin 2013): 1‑33. + * https://doi.org/10.1145/2487259.2487261. + * + * [Urls]: + * http://people.csail.mit.edu/dongdeng/projects/passjoin/index.html + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +// TODO: leveraging BagDistance as an upper bound of Levenshtein +// TODO: leverage n-grams recursive indexing +// TODO: try the MultiArray as a memory backend +// TODO: what about damerau levenshtein + +/** + * Helpers. + */ + +/** + * Function returning the number of substrings that will be selected by the + * multi-match-aware selection scheme for theshold `k`, for a string of length + * `s` to match strings of length `l`. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @param {number} l - Length of strings to match. + * @returns {number} - The number of selected substrings. + */ +function countSubstringsL(k, s, l) { + return (((Math.pow(k, 2) - Math.pow(Math.abs(s - l), 2)) / 2) | 0) + k + 1; +} + +/** + * Function returning the minimum number of substrings that will be selected by + * the multi-match-aware selection scheme for theshold `k`, for a string of + * length `s` to match any string of relevant length. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @returns {number} - The number of selected substrings. + */ +function countKeys(k, s) { + var c = 0; + + for (var l = 0, m = s + 1; l < m; l++) + c += countSubstringsL(k, s, l); + + return c; +} + +/** + * Function used to compare two keys in order to sort them first by decreasing + * length and then alphabetically as per the "4.2 Effective Indexing Strategy" + * point of the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @returns {number} - The number of selected substrings. + */ +function comparator(a, b) { + if (a.length > b.length) + return -1; + if (a.length < b.length) + return 1; + + if (a < b) + return -1; + if (a > b) + return 1; + + return 0; +} + +/** + * Function partitioning a string into k + 1 uneven segments, the shorter + * ones, then the longer ones. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} l - Length of the string. + * @returns {Array} - The partition tuples (start, length). + */ +function partition(k, l) { + var m = k + 1, + a = (l / m) | 0, + b = a + 1, + i, + j; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + var tuples = new Array(k + 1); + + for (i = 0; i < smallSegments; i++) + tuples[i] = [i * a, a]; + + var offset = (i - 1) * a + a; + + for (j = 0; j < largeSegments; j++) + tuples[i + j] = [offset + j * b, b]; + + return tuples; +} + +/** + * Function yielding a string's k + 1 passjoin segments to index. + * + * @param {number} k - Levenshtein distance threshold. + * @param {string} string - Target string. + * @returns {Array} - The string's segments. + */ +function segments(k, string) { + var l = string.length, + m = k + 1, + a = (l / m) | 0, + b = a + 1, + o, + i, + j; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + var S = new Array(k + 1); + + for (i = 0; i < smallSegments; i++) { + o = i * a; + S[i] = string.slice(o, o + a); + } + + var offset = (i - 1) * a + a; + + for (j = 0; j < largeSegments; j++) { + o = offset + j * b; + S[i + j] = string.slice(o, o + b); + } + + return S; +} + +// TODO: jsdocs +function segmentPos(k, i, string) { + if (i === 0) + return 0; + + var l = string.length; + + var m = k + 1, + a = (l / m) | 0, + b = a + 1; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + if (i <= smallSegments - 1) + return i * a; + + var offset = i - smallSegments; + + return smallSegments * a + offset * b; +} + +/** + * Function returning the interval of relevant substrings to lookup using the + * multi-match-aware substring selection scheme described in the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} delta - Signed length difference between both considered strings. + * @param {number} i - k + 1 segment index. + * @param {number} s - String's length. + * @param {number} pi - k + 1 segment position in target string. + * @param {number} li - k + 1 segment length. + * @returns {Array} - The interval (start, stop). + */ +function multiMatchAwareInterval(k, delta, i, s, pi, li) { + var start1 = pi - i, + end1 = pi + i; + + var o = k - i; + + var start2 = pi + delta - o, + end2 = pi + delta + o; + + var end3 = s - li; + + return [Math.max(0, start1, start2), Math.min(end1, end2, end3)]; +} + +/** + * Function yielding relevant substrings to lookup using the multi-match-aware + * substring selection scheme described in the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {string} string - Target string. + * @param {number} l - Length of strings to match. + * @param {number} i - k + 1 segment index. + * @param {number} pi - k + 1 segment position in target string. + * @param {number} li - k + 1 segment length. + * @returns {Array} - The contiguous substrings. + */ +function multiMatchAwareSubstrings(k, string, l, i, pi, li) { + var s = string.length; + + // Note that we need to keep the non-absolute delta for this function + // to work in both directions, up & down + var delta = s - l; + + var interval = multiMatchAwareInterval(k, delta, i, s, pi, li); + + var start = interval[0], + stop = interval[1]; + + var currentSubstring = ''; + + var substrings = []; + + var substring, j, m; + + for (j = start, m = stop + 1; j < m; j++) { + substring = string.slice(j, j + li); + + // We skip identical consecutive substrings (to avoid repetition in case + // of contiguous letter duplication) + if (substring === currentSubstring) + continue; + + substrings.push(substring); + + currentSubstring = substring; + } + + return substrings; +} + +/** + * PassjoinIndex. + * + * @note I tried to apply the paper's optimizations regarding Levenshtein + * distance computations but it did not provide a performance boost, quite + * the contrary. This is because since we are mostly using the index for small k + * here, most of the strings we work on are quite small and the bookkeeping + * induced by Ukkonen's method and the paper's one are slowing us down more than + * they actually help us go faster. + * + * @note This implementation does not try to ensure that you add the same string + * more than once. + * + * @constructor + * @param {function} levenshtein - Levenshtein distance function. + * @param {number} k - Levenshtein distance threshold. + */ +function PassjoinIndex(levenshtein, k) { + if (typeof levenshtein !== 'function') + throw new Error('mnemonist/passjoin-index: `levenshtein` should be a function returning edit distance between two strings.'); + + if (typeof k !== 'number' || k < 1) + throw new Error('mnemonist/passjoin-index: `k` should be a number > 0'); + + this.levenshtein = levenshtein; + this.k = k; + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +PassjoinIndex.prototype.clear = function() { + + // Properties + this.size = 0; + this.strings = []; + this.invertedIndices = {}; +}; + +/** + * Method used to add a new value to the index. + * + * @param {string|Array} value - Value to add. + * @return {PassjoinIndex} + */ +PassjoinIndex.prototype.add = function(value) { + var l = value.length; + + var stringIndex = this.size; + + this.strings.push(value); + this.size++; + + var S = segments(this.k, value); + + var Ll = this.invertedIndices[l]; + + if (typeof Ll === 'undefined') { + Ll = {}; + this.invertedIndices[l] = Ll; + } + + var segment, + matches, + key, + i, + m; + + for (i = 0, m = S.length; i < m; i++) { + segment = S[i]; + key = segment + i; + matches = Ll[key]; + + if (typeof matches === 'undefined') { + matches = [stringIndex]; + Ll[key] = matches; + } + else { + matches.push(stringIndex); + } + } + + return this; +}; + +/** + * Method used to search for string matching the given query. + * + * @param {string|Array} query - Query string. + * @return {Array} + */ +PassjoinIndex.prototype.search = function(query) { + var s = query.length, + k = this.k; + + var M = new Set(); + + var candidates, + candidate, + queryPos, + querySegmentLength, + key, + S, + P, + l, + m, + i, + n1, + j, + n2, + y, + n3; + + for (l = Math.max(0, s - k), m = s + k + 1; l < m; l++) { + var Ll = this.invertedIndices[l]; + + if (typeof Ll === 'undefined') + continue; + + P = partition(k, l); + + for (i = 0, n1 = P.length; i < n1; i++) { + queryPos = P[i][0]; + querySegmentLength = P[i][1]; + + S = multiMatchAwareSubstrings( + k, + query, + l, + i, + queryPos, + querySegmentLength + ); + + // Empty string edge case + if (!S.length) + S = ['']; + + for (j = 0, n2 = S.length; j < n2; j++) { + key = S[j] + i; + candidates = Ll[key]; + + if (typeof candidates === 'undefined') + continue; + + for (y = 0, n3 = candidates.length; y < n3; y++) { + candidate = this.strings[candidates[y]]; + + // NOTE: first condition is here not to compute Levenshtein + // distance for tiny strings + + // NOTE: maintaining a Set of rejected candidate is not really useful + // because it consumes more memory and because non-matches are + // less likely to be candidates agains + if ( + s <= k && l <= k || + ( + !M.has(candidate) && + this.levenshtein(query, candidate) <= k + ) + ) + M.add(candidate); + } + } + } + } + + return M; +}; + +/** + * Method used to iterate over the index. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +PassjoinIndex.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.strings.length; i < l; i++) + callback.call(scope, this.strings[i], i, this); +}; + +/** + * Method used to create an iterator over a index's values. + * + * @return {Iterator} + */ +PassjoinIndex.prototype.values = function() { + var strings = this.strings, + l = strings.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = strings[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + PassjoinIndex.prototype[Symbol.iterator] = PassjoinIndex.prototype.values; + +/** + * Convenience known methods. + */ +PassjoinIndex.prototype.inspect = function() { + var array = this.strings.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: PassjoinIndex, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + PassjoinIndex.prototype[Symbol.for('nodejs.util.inspect.custom')] = PassjoinIndex.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {PassjoinIndex} + */ +PassjoinIndex.from = function(iterable, levenshtein, k) { + var index = new PassjoinIndex(levenshtein, k); + + forEach(iterable, function(string) { + index.add(string); + }); + + return index; +}; + +/** + * Exporting. + */ +PassjoinIndex.countKeys = countKeys; +PassjoinIndex.comparator = comparator; +PassjoinIndex.partition = partition; +PassjoinIndex.segments = segments; +PassjoinIndex.segmentPos = segmentPos; +PassjoinIndex.multiMatchAwareInterval = multiMatchAwareInterval; +PassjoinIndex.multiMatchAwareSubstrings = multiMatchAwareSubstrings; + +module.exports = PassjoinIndex; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/queue.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/queue.d.ts new file mode 100644 index 0000000..2d3e434 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/queue.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist Queue Typings + * ======================== + */ +export default class Queue implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + enqueue(item: T): number; + dequeue(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, queue: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): Queue; + static of(...items: Array): Queue; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/queue.js b/amplify/functions/downloadDocument/node_modules/mnemonist/queue.js new file mode 100644 index 0000000..aa554b6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/queue.js @@ -0,0 +1,215 @@ +/** + * Mnemonist Queue + * ================ + * + * Queue implementation based on the ideas of Queue.js that seems to beat + * a LinkedList one in performance. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Queue + * + * @constructor + */ +function Queue() { + this.clear(); +} + +/** + * Method used to clear the queue. + * + * @return {undefined} + */ +Queue.prototype.clear = function() { + + // Properties + this.items = []; + this.offset = 0; + this.size = 0; +}; + +/** + * Method used to add an item to the queue. + * + * @param {any} item - Item to enqueue. + * @return {number} + */ +Queue.prototype.enqueue = function(item) { + + this.items.push(item); + return ++this.size; +}; + +/** + * Method used to retrieve & remove the first item of the queue. + * + * @return {any} + */ +Queue.prototype.dequeue = function() { + if (!this.size) + return; + + var item = this.items[this.offset]; + + if (++this.offset * 2 >= this.items.length) { + this.items = this.items.slice(this.offset); + this.offset = 0; + } + + this.size--; + + return item; +}; + +/** + * Method used to retrieve the first item of the queue. + * + * @return {any} + */ +Queue.prototype.peek = function() { + if (!this.size) + return; + + return this.items[this.offset]; +}; + +/** + * Method used to iterate over the queue. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +Queue.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = this.offset, j = 0, l = this.items.length; i < l; i++, j++) + callback.call(scope, this.items[i], j, this); +}; + +/* + * Method used to convert the queue to a JavaScript array. + * + * @return {array} + */ +Queue.prototype.toArray = function() { + return this.items.slice(this.offset); +}; + +/** + * Method used to create an iterator over a queue's values. + * + * @return {Iterator} + */ +Queue.prototype.values = function() { + var items = this.items, + i = this.offset; + + return new Iterator(function() { + if (i >= items.length) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a queue's entries. + * + * @return {Iterator} + */ +Queue.prototype.entries = function() { + var items = this.items, + i = this.offset, + j = 0; + + return new Iterator(function() { + if (i >= items.length) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: [j++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Queue.prototype[Symbol.iterator] = Queue.prototype.values; + +/** + * Convenience known methods. + */ +Queue.prototype.toString = function() { + return this.toArray().join(','); +}; + +Queue.prototype.toJSON = function() { + return this.toArray(); +}; + +Queue.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: Queue, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + Queue.prototype[Symbol.for('nodejs.util.inspect.custom')] = Queue.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a queue. + * + * @param {Iterable} iterable - Target iterable. + * @return {Queue} + */ +Queue.from = function(iterable) { + var queue = new Queue(); + + forEach(iterable, function(value) { + queue.enqueue(value); + }); + + return queue; +}; + +/** + * Static @.of function taking an arbitrary number of arguments & converting it + * into a queue. + * + * @param {...any} args + * @return {Queue} + */ +Queue.of = function() { + return Queue.from(arguments); +}; + +/** + * Exporting. + */ +module.exports = Queue; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/semi-dynamic-trie.js b/amplify/functions/downloadDocument/node_modules/mnemonist/semi-dynamic-trie.js new file mode 100644 index 0000000..6627d34 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/semi-dynamic-trie.js @@ -0,0 +1,251 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist SemiDynamicTrie + * ========================== + * + * Lowlevel Trie working at character level, storing information in typed + * array and organizing its children in linked lists. + * + * This implementation also uses a "fat node" strategy to boost access to some + * bloated node's children when the number of children rises above a certain + * threshold. + */ +var Vector = require('./vector.js'); + +// TODO: rename => ternary search tree + +/** + * Constants. + */ +const MAX_LINKED = 7; + +/** + * SemiDynamicTrie. + * + * @constructor + */ +function SemiDynamicTrie() { + + // Properties + + // TODO: make it 16 bits + this.characters = new Vector.Uint8Vector(256); + this.nextPointers = new Vector.Int32Vector(256); + this.childPointers = new Vector.Uint32Vector(256); + this.maps = new Vector.Uint32Vector(256); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SemiDynamicTrie.prototype.clear = function() { + + // Properties +}; + +SemiDynamicTrie.prototype.ensureSibling = function(block, character) { + var nextCharacter, + nextBlock, + newBlock; + + // Do we have a root? + if (this.characters.length === 0) { + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + return block; + } + + // Are we traversing a fat node? + var fatNode = this.nextPointers.array[block]; + + if (fatNode < 0) { + var mapIndex = -fatNode + character; + + nextBlock = this.maps.array[mapIndex]; + + if (nextBlock !== 0) + return nextBlock; + + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + this.maps.set(mapIndex, newBlock); + + return newBlock; + } + + var listLength = 1, + startingBlock = block; + + while (true) { + nextCharacter = this.characters.array[block]; + + if (nextCharacter === character) + return block; + + nextBlock = this.nextPointers.array[block]; + + if (nextBlock === 0) + break; + + listLength++; + block = nextBlock; + } + + // If the list is too long, we create a fat node + if (listLength > MAX_LINKED) { + block = startingBlock; + + var offset = this.maps.length; + + this.maps.resize(offset + 255); + this.maps.set(offset + 255, 0); + + while (true) { + nextBlock = this.nextPointers.array[block]; + + if (nextBlock === 0) + break; + + nextCharacter = this.characters.array[nextBlock]; + this.maps.set(offset + nextCharacter, nextBlock); + + block = nextBlock; + } + + this.nextPointers.set(startingBlock, -offset); + + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + this.maps.set(offset + character, newBlock); + + return newBlock; + } + + // Else, we append the character to the list + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.nextPointers.set(block, newBlock); + this.characters.push(character); + + return newBlock; +}; + +SemiDynamicTrie.prototype.findSibling = function(block, character) { + var nextCharacter; + + // Do we have a fat node? + var fatNode = this.nextPointers.array[block]; + + if (fatNode < 0) { + var mapIndex = -fatNode + character; + + var nextBlock = this.maps.array[mapIndex]; + + if (nextBlock === 0) + return -1; + + return nextBlock; + } + + while (true) { + nextCharacter = this.characters.array[block]; + + if (nextCharacter === character) + return block; + + block = this.nextPointers.array[block]; + + if (block === 0) + return -1; + } +}; + +SemiDynamicTrie.prototype.add = function(key) { + var keyCharacter, + childBlock, + block = 0; + + var i = 0, l = key.length; + + // Going as far as possible + while (i < l) { + keyCharacter = key.charCodeAt(i); + + // Ensuring a correct sibling exists + block = this.ensureSibling(block, keyCharacter); + + i++; + + if (i < l) { + + // Descending + childBlock = this.childPointers.array[block]; + + if (childBlock === 0) + break; + + block = childBlock; + } + } + + // Adding as many blocks as necessary + while (i < l) { + + childBlock = this.characters.length; + this.characters.push(key.charCodeAt(i)); + + this.childPointers.push(0); + this.nextPointers.push(0); + this.childPointers.set(block, childBlock); + + block = childBlock; + + i++; + } +}; + +SemiDynamicTrie.prototype.has = function(key) { + var i, l; + + var block = 0, + siblingBlock; + + for (i = 0, l = key.length; i < l; i++) { + siblingBlock = this.findSibling(block, key.charCodeAt(i)); + + if (siblingBlock === -1) + return false; + + // TODO: be sure + if (i === l - 1) + return true; + + block = this.childPointers.array[siblingBlock]; + + if (block === 0) + return false; + } + + // TODO: fix, should have a leaf pointer somehow + return true; +}; + +/** + * Exporting. + */ +module.exports = SemiDynamicTrie; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/set.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/set.d.ts new file mode 100644 index 0000000..fc8dae8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/set.d.ts @@ -0,0 +1,18 @@ +/** + * Mnemonist Set Typings + * ====================== + */ +export function intersection(...set: Array>): Set; +export function union(...set: Array>): Set; +export function difference(a: Set, b: Set): Set; +export function symmetricDifference(a: Set, b: Set): Set; +export function isSubset(a: Set, b: Set): boolean; +export function isSuperset(a: Set, b: Set): boolean; +export function add(a: Set, b: Set): void; +export function subtract(a: Set, b: Set): void; +export function intersect(a: Set, b: Set): void; +export function disjunct(a: Set, b: Set): void; +export function intersectionSize(a: Set, b:Set): number; +export function unionSize(a: Set, b:Set): number; +export function jaccard(a: Set, b:Set): number; +export function overlap(a: Set, b: Set): number; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/set.js b/amplify/functions/downloadDocument/node_modules/mnemonist/set.js new file mode 100644 index 0000000..e0d020b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/set.js @@ -0,0 +1,356 @@ +/** + * Mnemonist Set + * ============== + * + * Useful function related to sets such as union, intersection and so on... + */ + +// TODO: optimize versions for less variadicities + +/** + * Variadic function computing the intersection of multiple sets. + * + * @param {...Set} sets - Sets to intersect. + * @return {Set} - The intesection. + */ +exports.intersection = function() { + if (arguments.length < 2) + throw new Error('mnemonist/Set.intersection: needs at least two arguments.'); + + var I = new Set(); + + // First we need to find the smallest set + var smallestSize = Infinity, + smallestSet = null; + + var s, i, l = arguments.length; + + for (i = 0; i < l; i++) { + s = arguments[i]; + + // If one of the set has no items, we can stop right there + if (s.size === 0) + return I; + + if (s.size < smallestSize) { + smallestSize = s.size; + smallestSet = s; + } + } + + // Now we need to intersect this set with the others + var iterator = smallestSet.values(), + step, + item, + add, + set; + + // TODO: we can optimize by iterating each next time over the current intersection + // but this probably means more RAM to consume since we'll create n-1 sets rather than + // only the one. + while ((step = iterator.next(), !step.done)) { + item = step.value; + add = true; + + for (i = 0; i < l; i++) { + set = arguments[i]; + + if (set === smallestSet) + continue; + + if (!set.has(item)) { + add = false; + break; + } + } + + if (add) + I.add(item); + } + + return I; +}; + +/** + * Variadic function computing the union of multiple sets. + * + * @param {...Set} sets - Sets to unite. + * @return {Set} - The union. + */ +exports.union = function() { + if (arguments.length < 2) + throw new Error('mnemonist/Set.union: needs at least two arguments.'); + + var U = new Set(); + + var i, l = arguments.length; + + var iterator, + step; + + for (i = 0; i < l; i++) { + iterator = arguments[i].values(); + + while ((step = iterator.next(), !step.done)) + U.add(step.value); + } + + return U; +}; + +/** + * Function computing the difference between two sets. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {Set} - The difference. + */ +exports.difference = function(A, B) { + + // If first set is empty + if (!A.size) + return new Set(); + + if (!B.size) + return new Set(A); + + var D = new Set(); + + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + D.add(step.value); + } + + return D; +}; + +/** + * Function computing the symmetric difference between two sets. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {Set} - The symmetric difference. + */ +exports.symmetricDifference = function(A, B) { + var S = new Set(); + + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + S.add(step.value); + } + + iterator = B.values(); + + while ((step = iterator.next(), !step.done)) { + if (!A.has(step.value)) + S.add(step.value); + } + + return S; +}; + +/** + * Function returning whether A is a subset of B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {boolean} + */ +exports.isSubset = function(A, B) { + var iterator = A.values(), + step; + + // Shortcuts + if (A === B) + return true; + + if (A.size > B.size) + return false; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + return false; + } + + return true; +}; + +/** + * Function returning whether A is a superset of B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {boolean} + */ +exports.isSuperset = function(A, B) { + return exports.isSubset(B, A); +}; + +/** + * Function adding the items of set B to the set A. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.add = function(A, B) { + var iterator = B.values(), + step; + + while ((step = iterator.next(), !step.done)) + A.add(step.value); + + return; +}; + +/** + * Function subtracting the items of set B from the set A. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.subtract = function(A, B) { + var iterator = B.values(), + step; + + while ((step = iterator.next(), !step.done)) + A.delete(step.value); + + return; +}; + +/** + * Function intersecting the items of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.intersect = function(A, B) { + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + A.delete(step.value); + } + + return; +}; + +/** + * Function disjuncting the items of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.disjunct = function(A, B) { + var iterator = A.values(), + step; + + var toRemove = []; + + while ((step = iterator.next(), !step.done)) { + if (B.has(step.value)) + toRemove.push(step.value); + } + + iterator = B.values(); + + while ((step = iterator.next(), !step.done)) { + if (!A.has(step.value)) + A.add(step.value); + } + + for (var i = 0, l = toRemove.length; i < l; i++) + A.delete(toRemove[i]); + + return; +}; + +/** + * Function returning the size of the intersection of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.intersectionSize = function(A, B) { + var tmp; + + // We need to know the smallest set + if (A.size > B.size) { + tmp = A; + A = B; + B = tmp; + } + + if (A.size === 0) + return 0; + + if (A === B) + return A.size; + + var iterator = A.values(), + step; + + var I = 0; + + while ((step = iterator.next(), !step.done)) { + if (B.has(step.value)) + I++; + } + + return I; +}; + +/** + * Function returning the size of the union of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.unionSize = function(A, B) { + var I = exports.intersectionSize(A, B); + + return A.size + B.size - I; +}; + +/** + * Function returning the Jaccard similarity between A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.jaccard = function(A, B) { + var I = exports.intersectionSize(A, B); + + if (I === 0) + return 0; + + var U = A.size + B.size - I; + + return I / U; +}; + +/** + * Function returning the overlap coefficient between A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.overlap = function(A, B) { + var I = exports.intersectionSize(A, B); + + if (I === 0) + return 0; + + return I / Math.min(A.size, B.size); +}; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sort/insertion.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/sort/insertion.d.ts new file mode 100644 index 0000000..db22f9b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sort/insertion.d.ts @@ -0,0 +1,4 @@ +import {IArrayLike} from '../utils/types'; + +export function inplaceInsertionSort(array: IArrayLike, lo: number, hi: number): IArrayLike; +export function inplaceInsertionSortIndices(array: IArrayLike, indices: IArrayLike, lo: number, hi: number): IArrayLike; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sort/insertion.js b/amplify/functions/downloadDocument/node_modules/mnemonist/sort/insertion.js new file mode 100644 index 0000000..aebd1ad --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sort/insertion.js @@ -0,0 +1,50 @@ +/** + * Mnemonist Insertion Sort + * ========================= + * + * Insertion sort related functions. + */ +function inplaceInsertionSort(array, lo, hi) { + i = lo + 1; + + var j, k; + + for (; i < hi; i++) { + k = array[i]; + j = i - 1; + + while (j >= lo && array[j] > k) { + array[j + 1] = array[j]; + j--; + } + + array[j + 1] = k; + } + + return array; +} + +exports.inplaceInsertionSort = inplaceInsertionSort; + +function inplaceInsertionSortIndices(array, indices, lo, hi) { + i = lo + 1; + + var j, k, t; + + for (; i < hi; i++) { + t = indices[i]; + k = array[t]; + j = i - 1; + + while (j >= lo && array[indices[j]] > k) { + indices[j + 1] = indices[j]; + j--; + } + + indices[j + 1] = t; + } + + return indices; +} + +exports.inplaceInsertionSortIndices = inplaceInsertionSortIndices; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sort/quick.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/sort/quick.d.ts new file mode 100644 index 0000000..5e6c90d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sort/quick.d.ts @@ -0,0 +1,4 @@ +import {IArrayLike} from '../utils/types'; + +export function inplaceQuickSort(array: IArrayLike, lo: number, hi: number): IArrayLike; +export function inplaceQuickSortIndices(array: IArrayLike, indices: IArrayLike, lo: number, hi: number): IArrayLike; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sort/quick.js b/amplify/functions/downloadDocument/node_modules/mnemonist/sort/quick.js new file mode 100644 index 0000000..008d0fd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sort/quick.js @@ -0,0 +1,116 @@ +/** + * Mnemonist Quick Sort + * ===================== + * + * Quick sort related functions. + * Adapted from: https://alienryderflex.com/quicksort/ + */ +var LOS = new Float64Array(64), + HIS = new Float64Array(64); + +function inplaceQuickSort(array, lo, hi) { + var p, i, l, r, swap; + + LOS[0] = lo; + HIS[0] = hi; + i = 0; + + while (i >= 0) { + l = LOS[i]; + r = HIS[i] - 1; + + if (l < r) { + p = array[l]; + + while (l < r) { + while (array[r] >= p && l < r) + r--; + + if (l < r) + array[l++] = array[r]; + + while (array[l] <= p && l < r) + l++; + + if (l < r) + array[r--] = array[l]; + } + + array[l] = p; + LOS[i + 1] = l + 1; + HIS[i + 1] = HIS[i]; + HIS[i++] = l; + + if (HIS[i] - LOS[i] > HIS[i - 1] - LOS[i - 1]) { + swap = LOS[i]; + LOS[i] = LOS[i - 1]; + LOS[i - 1] = swap; + + swap = HIS[i]; + HIS[i] = HIS[i - 1]; + HIS[i - 1] = swap; + } + } + else { + i--; + } + } + + return array; +} + +exports.inplaceQuickSort = inplaceQuickSort; + +function inplaceQuickSortIndices(array, indices, lo, hi) { + var p, i, l, r, t, swap; + + LOS[0] = lo; + HIS[0] = hi; + i = 0; + + while (i >= 0) { + l = LOS[i]; + r = HIS[i] - 1; + + if (l < r) { + t = indices[l]; + p = array[t]; + + while (l < r) { + while (array[indices[r]] >= p && l < r) + r--; + + if (l < r) + indices[l++] = indices[r]; + + while (array[indices[l]] <= p && l < r) + l++; + + if (l < r) + indices[r--] = indices[l]; + } + + indices[l] = t; + LOS[i + 1] = l + 1; + HIS[i + 1] = HIS[i]; + HIS[i++] = l; + + if (HIS[i] - LOS[i] > HIS[i - 1] - LOS[i - 1]) { + swap = LOS[i]; + LOS[i] = LOS[i - 1]; + LOS[i - 1] = swap; + + swap = HIS[i]; + HIS[i] = HIS[i - 1]; + HIS[i - 1] = swap; + } + } + else { + i--; + } + } + + return indices; +} + +exports.inplaceQuickSortIndices = inplaceQuickSortIndices; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-map.d.ts new file mode 100644 index 0000000..0b22f90 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-map.d.ts @@ -0,0 +1,26 @@ +/** + * Mnemonist SparseMap Typings + * ============================ + */ +export default class SparseMap implements Iterable<[number, V]> { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(key: number): boolean; + get(key: number): V | undefined; + set(key: number, value: V): this; + delete(key: number): boolean; + forEach(callback: (value: V, key: number, set: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[number, V]>; + [Symbol.iterator](): IterableIterator<[number, V]>; + inspect(): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-map.js new file mode 100644 index 0000000..d5cf20d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-map.js @@ -0,0 +1,243 @@ +/** + * Mnemonist SparseMap + * ==================== + * + * JavaScript sparse map implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseMap. + * + * @constructor + */ +function SparseMap(Values, length) { + if (arguments.length < 2) { + length = Values; + Values = Array; + } + + var ByteArray = getPointerArray(length); + + // Properties + this.size = 0; + this.length = length; + this.dense = new ByteArray(length); + this.sparse = new ByteArray(length); + this.vals = new Values(length); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseMap.prototype.clear = function() { + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the set. + * + * @param {number} member - Member to test. + * @return {SparseMap} + */ +SparseMap.prototype.has = function(member) { + var index = this.sparse[member]; + + return ( + index < this.size && + this.dense[index] === member + ); +}; + +/** + * Method used to get the value associated to a member in the set. + * + * @param {number} member - Member to test. + * @return {any} + */ +SparseMap.prototype.get = function(member) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) + return this.vals[index]; + + return; +}; + +/** + * Method used to set a value into the map. + * + * @param {number} member - Member to set. + * @param {any} value - Associated value. + * @return {SparseMap} + */ +SparseMap.prototype.set = function(member, value) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) { + this.vals[index] = value; + return this; + } + + this.dense[this.size] = member; + this.sparse[member] = this.size; + this.vals[this.size] = value; + this.size++; + + return this; +}; + +/** + * Method used to remove a member from the set. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseMap.prototype.delete = function(member) { + var index = this.sparse[member]; + + if (index >= this.size || this.dense[index] !== member) + return false; + + index = this.dense[this.size - 1]; + this.dense[this.sparse[member]] = index; + this.sparse[index] = this.sparse[member]; + this.size--; + + return true; +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0; i < this.size; i++) + callback.call(scope, this.vals[i], this.dense[i]); +}; + +/** + * Method used to create an iterator over a set's members. + * + * @return {Iterator} + */ +SparseMap.prototype.keys = function() { + var size = this.size, + dense = this.dense, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = dense[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseMap.prototype.values = function() { + var size = this.size, + values = this.vals, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = values[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +SparseMap.prototype.entries = function() { + var size = this.size, + dense = this.dense, + values = this.vals, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = [dense[i], values[i]]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseMap.prototype[Symbol.iterator] = SparseMap.prototype.entries; + +/** + * Convenience known methods. + */ +SparseMap.prototype.inspect = function() { + var proxy = new Map(); + + for (var i = 0; i < this.size; i++) + proxy.set(this.dense[i], this.vals[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseMap, + enumerable: false + }); + + proxy.length = this.length; + + if (this.vals.constructor !== Array) + proxy.type = this.vals.constructor.name; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseMap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-queue-set.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-queue-set.d.ts new file mode 100644 index 0000000..e7463bf --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-queue-set.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist SparseQueueSet Typings + * ================================= + */ +export default class SparseQueueSet implements Iterable { + + // Members + capacity: number; + start: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(value: number): boolean; + enqueue(value: number): this; + dequeue(): number | undefined; + forEach(callback: (value: number, key: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-queue-set.js b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-queue-set.js new file mode 100644 index 0000000..b5f42b3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-queue-set.js @@ -0,0 +1,218 @@ +/** + * Mnemonist SparseQueueSet + * ========================= + * + * JavaScript sparse queue set implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseQueueSet. + * + * @constructor + */ +function SparseQueueSet(capacity) { + + var ByteArray = getPointerArray(capacity); + + // Properties + this.start = 0; + this.size = 0; + this.capacity = capacity; + this.dense = new ByteArray(capacity); + this.sparse = new ByteArray(capacity); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseQueueSet.prototype.clear = function() { + this.start = 0; + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the queue. + * + * @param {number} member - Member to test. + * @return {SparseQueueSet} + */ +SparseQueueSet.prototype.has = function(member) { + if (this.size === 0) + return false; + + var index = this.sparse[member]; + + var inBounds = ( + index < this.capacity && + ( + index >= this.start && + index < this.start + this.size + ) || + ( + index < ((this.start + this.size) % this.capacity) + ) + ); + + return ( + inBounds && + this.dense[index] === member + ); +}; + +/** + * Method used to add a member to the queue. + * + * @param {number} member - Member to add. + * @return {SparseQueueSet} + */ +SparseQueueSet.prototype.enqueue = function(member) { + var index = this.sparse[member]; + + if (this.size !== 0) { + var inBounds = ( + index < this.capacity && + ( + index >= this.start && + index < this.start + this.size + ) || + ( + index < ((this.start + this.size) % this.capacity) + ) + ); + + if (inBounds && this.dense[index] === member) + return this; + } + + index = (this.start + this.size) % this.capacity; + + this.dense[index] = member; + this.sparse[member] = index; + this.size++; + + return this; +}; + +/** + * Method used to remove the next member from the queue. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseQueueSet.prototype.dequeue = function() { + if (this.size === 0) + return; + + var index = this.start; + + this.size--; + this.start++; + + if (this.start === this.capacity) + this.start = 0; + + var member = this.dense[index]; + + this.sparse[member] = this.capacity; + + return member; +}; + +/** + * Method used to iterate over the queue's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseQueueSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + callback.call(scope, this.dense[i], j, this); + i++; + j++; + + if (i === c) + i = 0; + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseQueueSet.prototype.values = function() { + var dense = this.dense, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = dense[i]; + + i++; + j++; + + if (i === c) + i = 0; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseQueueSet.prototype[Symbol.iterator] = SparseQueueSet.prototype.values; + +/** + * Convenience known methods. + */ +SparseQueueSet.prototype.inspect = function() { + var proxy = []; + + this.forEach(function(member) { + proxy.push(member); + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseQueueSet, + enumerable: false + }); + + proxy.capacity = this.capacity; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseQueueSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseQueueSet.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseQueueSet; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-set.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-set.d.ts new file mode 100644 index 0000000..99fe655 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-set.d.ts @@ -0,0 +1,23 @@ +/** + * Mnemonist SparseSet Typings + * ============================ + */ +export default class SparseSet implements Iterable { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(value: number): boolean; + add(value: number): this; + delete(value: number): boolean; + forEach(callback: (value: number, key: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-set.js b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-set.js new file mode 100644 index 0000000..7498f33 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/sparse-set.js @@ -0,0 +1,168 @@ +/** + * Mnemonist SparseSet + * ==================== + * + * JavaScript sparse set implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseSet. + * + * @constructor + */ +function SparseSet(length) { + + var ByteArray = getPointerArray(length); + + // Properties + this.size = 0; + this.length = length; + this.dense = new ByteArray(length); + this.sparse = new ByteArray(length); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseSet.prototype.clear = function() { + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the set. + * + * @param {number} member - Member to test. + * @return {SparseSet} + */ +SparseSet.prototype.has = function(member) { + var index = this.sparse[member]; + + return ( + index < this.size && + this.dense[index] === member + ); +}; + +/** + * Method used to add a member to the set. + * + * @param {number} member - Member to add. + * @return {SparseSet} + */ +SparseSet.prototype.add = function(member) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) + return this; + + this.dense[this.size] = member; + this.sparse[member] = this.size; + this.size++; + + return this; +}; + +/** + * Method used to remove a member from the set. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseSet.prototype.delete = function(member) { + var index = this.sparse[member]; + + if (index >= this.size || this.dense[index] !== member) + return false; + + index = this.dense[this.size - 1]; + this.dense[this.sparse[member]] = index; + this.sparse[index] = this.sparse[member]; + this.size--; + + return true; +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var item; + + for (var i = 0; i < this.size; i++) { + item = this.dense[i]; + + callback.call(scope, item, item); + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseSet.prototype.values = function() { + var size = this.size, + dense = this.dense, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = dense[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseSet.prototype[Symbol.iterator] = SparseSet.prototype.values; + +/** + * Convenience known methods. + */ +SparseSet.prototype.inspect = function() { + var proxy = new Set(); + + for (var i = 0; i < this.size; i++) + proxy.add(this.dense[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseSet, + enumerable: false + }); + + proxy.length = this.length; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseSet.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseSet; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/stack.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/stack.d.ts new file mode 100644 index 0000000..fa6998b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/stack.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist Stack Typings + * ======================== + */ +export default class Stack implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + push(item: T): number; + pop(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, stack: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): Stack; + static of(...items: Array): Stack; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/stack.js b/amplify/functions/downloadDocument/node_modules/mnemonist/stack.js new file mode 100644 index 0000000..9e83519 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/stack.js @@ -0,0 +1,210 @@ +/** + * Mnemonist Stack + * ================ + * + * Stack implementation relying on JavaScript arrays, which are fast enough & + * correctly optimized for this kind of work. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Stack + * + * @constructor + */ +function Stack() { + this.clear(); +} + +/** + * Method used to clear the stack. + * + * @return {undefined} + */ +Stack.prototype.clear = function() { + + // Properties + this.items = []; + this.size = 0; +}; + +/** + * Method used to add an item to the stack. + * + * @param {any} item - Item to add. + * @return {number} + */ +Stack.prototype.push = function(item) { + this.items.push(item); + return ++this.size; +}; + +/** + * Method used to retrieve & remove the last item of the stack. + * + * @return {any} + */ +Stack.prototype.pop = function() { + if (this.size === 0) + return; + + this.size--; + return this.items.pop(); +}; + +/** + * Method used to get the last item of the stack. + * + * @return {any} + */ +Stack.prototype.peek = function() { + return this.items[this.size - 1]; +}; + +/** + * Method used to iterate over the stack. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +Stack.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.items.length; i < l; i++) + callback.call(scope, this.items[l - i - 1], i, this); +}; + +/** + * Method used to convert the stack to a JavaScript array. + * + * @return {array} + */ +Stack.prototype.toArray = function() { + var array = new Array(this.size), + l = this.size - 1, + i = this.size; + + while (i--) + array[i] = this.items[l - i]; + + return array; +}; + +/** + * Method used to create an iterator over a stack's values. + * + * @return {Iterator} + */ +Stack.prototype.values = function() { + var items = this.items, + l = items.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a stack's entries. + * + * @return {Iterator} + */ +Stack.prototype.entries = function() { + var items = this.items, + l = items.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Stack.prototype[Symbol.iterator] = Stack.prototype.values; + + +/** + * Convenience known methods. + */ +Stack.prototype.toString = function() { + return this.toArray().join(','); +}; + +Stack.prototype.toJSON = function() { + return this.toArray(); +}; + +Stack.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: Stack, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + Stack.prototype[Symbol.for('nodejs.util.inspect.custom')] = Stack.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a stack. + * + * @param {Iterable} iterable - Target iterable. + * @return {Stack} + */ +Stack.from = function(iterable) { + var stack = new Stack(); + + forEach(iterable, function(value) { + stack.push(value); + }); + + return stack; +}; + +/** + * Static @.of function taking an arbitrary number of arguments & converting it + * into a stack. + * + * @param {...any} args + * @return {Stack} + */ +Stack.of = function() { + return Stack.from(arguments); +}; + +/** + * Exporting. + */ +module.exports = Stack; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/static-disjoint-set.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/static-disjoint-set.d.ts new file mode 100644 index 0000000..3e808da --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/static-disjoint-set.d.ts @@ -0,0 +1,23 @@ +/** + * Mnemonist StaticDisjointSet Typings + * ==================================== + */ +import {ArrayLike} from './utils/types'; + +export default class StaticDisjointSet { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(size: number); + + // Methods + find(x: number): number; + union(x: number, y: number): this; + connected(x: number, y: number): boolean; + mapping(): ArrayLike; + compile(): Array>; + inspect(): any; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/static-disjoint-set.js b/amplify/functions/downloadDocument/node_modules/mnemonist/static-disjoint-set.js new file mode 100644 index 0000000..7a84b93 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/static-disjoint-set.js @@ -0,0 +1,195 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist StaticDisjointSet + * ============================ + * + * JavaScript implementation of a static disjoint set (union-find). + * + * Note that to remain performant, this implementation needs to know a size + * beforehand. + */ +var helpers = require('./utils/typed-arrays.js'); + +/** + * StaticDisjointSet. + * + * @constructor + */ +function StaticDisjointSet(size) { + + // Optimizing the typed array types + var ParentsTypedArray = helpers.getPointerArray(size), + RanksTypedArray = helpers.getPointerArray(Math.log2(size)); + + // Properties + this.size = size; + this.dimension = size; + this.parents = new ParentsTypedArray(size); + this.ranks = new RanksTypedArray(size); + + // Initializing parents + for (var i = 0; i < size; i++) + this.parents[i] = i; +} + +/** + * Method used to find the root of the given item. + * + * @param {number} x - Target item. + * @return {number} + */ +StaticDisjointSet.prototype.find = function(x) { + var y = x; + + var c, p; + + while (true) { + c = this.parents[y]; + + if (y === c) + break; + + y = c; + } + + // Path compression + while (true) { + p = this.parents[x]; + + if (p === y) + break; + + this.parents[x] = y; + x = p; + } + + return y; +}; + +/** + * Method used to perform the union of two items. + * + * @param {number} x - First item. + * @param {number} y - Second item. + * @return {StaticDisjointSet} + */ +StaticDisjointSet.prototype.union = function(x, y) { + var xRoot = this.find(x), + yRoot = this.find(y); + + // x and y are already in the same set + if (xRoot === yRoot) + return this; + + this.dimension--; + + // x and y are not in the same set, we merge them + var xRank = this.ranks[x], + yRank = this.ranks[y]; + + if (xRank < yRank) { + this.parents[xRoot] = yRoot; + } + else if (xRank > yRank) { + this.parents[yRoot] = xRoot; + } + else { + this.parents[yRoot] = xRoot; + this.ranks[xRoot]++; + } + + return this; +}; + +/** + * Method returning whether two items are connected. + * + * @param {number} x - First item. + * @param {number} y - Second item. + * @return {boolean} + */ +StaticDisjointSet.prototype.connected = function(x, y) { + var xRoot = this.find(x); + + return xRoot === this.find(y); +}; + +/** + * Method returning the set mapping. + * + * @return {TypedArray} + */ +StaticDisjointSet.prototype.mapping = function() { + var MappingClass = helpers.getPointerArray(this.dimension); + + var ids = {}, + mapping = new MappingClass(this.size), + c = 0; + + var r; + + for (var i = 0, l = this.parents.length; i < l; i++) { + r = this.find(i); + + if (typeof ids[r] === 'undefined') { + mapping[i] = c; + ids[r] = c++; + } + else { + mapping[i] = ids[r]; + } + } + + return mapping; +}; + +/** + * Method used to compile the disjoint set into an array of arrays. + * + * @return {array} + */ +StaticDisjointSet.prototype.compile = function() { + var ids = {}, + result = new Array(this.dimension), + c = 0; + + var r; + + for (var i = 0, l = this.parents.length; i < l; i++) { + r = this.find(i); + + if (typeof ids[r] === 'undefined') { + result[c] = [i]; + ids[r] = c++; + } + else { + result[ids[r]].push(i); + } + } + + return result; +}; + +/** + * Convenience known methods. + */ +StaticDisjointSet.prototype.inspect = function() { + var array = this.compile(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: StaticDisjointSet, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + StaticDisjointSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = StaticDisjointSet.prototype.inspect; + + +/** + * Exporting. + */ +module.exports = StaticDisjointSet; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/static-interval-tree.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/static-interval-tree.d.ts new file mode 100644 index 0000000..5302f1e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/static-interval-tree.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist StaticIntervalTree Typings + * ===================================== + */ +type StaticIntervalTreeGetter = (item: T) => number; +type StaticIntervalTreeGettersTuple = [StaticIntervalTreeGetter, StaticIntervalTreeGetter]; + +export default class StaticIntervalTree { + + // Members + height: number; + size: number; + + // Constructor + constructor(intervals: Array, getters?: StaticIntervalTreeGettersTuple); + + // Methods + intervalsContainingPoint(point: number): Array; + intervalsOverlappingInterval(interval: T): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): StaticIntervalTree; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/static-interval-tree.js b/amplify/functions/downloadDocument/node_modules/mnemonist/static-interval-tree.js new file mode 100644 index 0000000..41452f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/static-interval-tree.js @@ -0,0 +1,387 @@ +/* + * Mnemonist StaticIntervalTree + * ============================= + * + * JavaScript implementation of a static interval tree. This tree is static in + * that you are required to know all its items beforehand and to built it + * from an iterable. + * + * This implementation represents the interval tree as an augmented balanced + * binary search tree. It works by sorting the intervals by startpoint first + * then proceeds building the augmented balanced BST bottom-up from the + * sorted list. + * + * Note that this implementation considers every given intervals as closed for + * simplicity's sake. + * + * For more information: https://en.wikipedia.org/wiki/Interval_tree + */ +var iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'); + +var FixedStack = require('./fixed-stack.js'); + + +// TODO: pass index to getters +// TODO: custom comparison +// TODO: possibility to pass offset buffer + +// TODO: intervals() => Symbol.iterator +// TODO: dfs() + +/** + * Helpers. + */ + +/** + * Recursive function building the BST from the sorted list of interval + * indices. + * + * @param {array} intervals - Array of intervals to index. + * @param {function} endGetter - Getter function for end of intervals. + * @param {array} sortedIndices - Sorted indices of the intervals. + * @param {array} tree - BST memory. + * @param {array} augmentations - Array of node augmentations. + * @param {number} i - BST index of current node. + * @param {number} low - Dichotomy low index. + * @param {number} high - Dichotomy high index. + * @return {number} - Created node augmentation value. + */ +function buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + i, + low, + high +) { + var mid = (low + (high - low) / 2) | 0, + midMinusOne = ~-mid, + midPlusOne = -~mid; + + var current = sortedIndices[mid]; + tree[i] = current + 1; + + var end = endGetter ? endGetter(intervals[current]) : intervals[current][1]; + + var left = i * 2 + 1, + right = i * 2 + 2; + + var leftEnd = -Infinity, + rightEnd = -Infinity; + + if (low <= midMinusOne) { + leftEnd = buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + left, + low, + midMinusOne + ); + } + + if (midPlusOne <= high) { + rightEnd = buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + right, + midPlusOne, + high + ); + } + + var augmentation = Math.max(end, leftEnd, rightEnd); + + var augmentationPointer = current; + + if (augmentation === leftEnd) + augmentationPointer = augmentations[tree[left] - 1]; + else if (augmentation === rightEnd) + augmentationPointer = augmentations[tree[right] - 1]; + + augmentations[current] = augmentationPointer; + + return augmentation; +} + +/** + * StaticIntervalTree. + * + * @constructor + * @param {array} intervals - Array of intervals to index. + * @param {array} getters - Optional getters. + */ +function StaticIntervalTree(intervals, getters) { + + // Properties + this.size = intervals.length; + this.intervals = intervals; + + var startGetter = null, + endGetter = null; + + if (Array.isArray(getters)) { + startGetter = getters[0]; + endGetter = getters[1]; + } + + // Building the indices array + var length = intervals.length; + + var IndicesArray = typed.getPointerArray(length + 1); + + var indices = new IndicesArray(length); + + var i; + + for (i = 1; i < length; i++) + indices[i] = i; + + // Sorting indices array + // TODO: check if some version of radix sort can outperform this part + indices.sort(function(a, b) { + a = intervals[a]; + b = intervals[b]; + + if (startGetter) { + a = startGetter(a); + b = startGetter(b); + } + else { + a = a[0]; + b = b[0]; + } + + if (a < b) + return -1; + + if (a > b) + return 1; + + // TODO: use getters + // TODO: this ordering has the following invariant: if query interval + // contains [nodeStart, max], then whole right subtree can be collected + // a = a[1]; + // b = b[1]; + + // if (a < b) + // return 1; + + // if (a > b) + // return -1; + + return 0; + }); + + // Building the binary tree + var height = Math.ceil(Math.log2(length + 1)), + treeSize = Math.pow(2, height) - 1; + + var tree = new IndicesArray(treeSize); + + var augmentations = new IndicesArray(length); + + buildBST( + intervals, + endGetter, + indices, + tree, + augmentations, + 0, + 0, + length - 1 + ); + + // Dropping indices + indices = null; + + // Storing necessary information + this.height = height; + this.tree = tree; + this.augmentations = augmentations; + this.startGetter = startGetter; + this.endGetter = endGetter; + + // Initializing DFS stack + this.stack = new FixedStack(IndicesArray, this.height); +} + +/** + * Method returning a list of intervals containing the given point. + * + * @param {any} point - Target point. + * @return {array} + */ +StaticIntervalTree.prototype.intervalsContainingPoint = function(point) { + var matches = []; + + var stack = this.stack; + + stack.clear(); + stack.push(0); + + var l = this.tree.length; + + var bstIndex, + intervalIndex, + interval, + maxInterval, + start, + end, + max, + left, + right; + + while (stack.size) { + bstIndex = stack.pop(); + intervalIndex = this.tree[bstIndex] - 1; + interval = this.intervals[intervalIndex]; + maxInterval = this.intervals[this.augmentations[intervalIndex]]; + + max = this.endGetter ? this.endGetter(maxInterval) : maxInterval[1]; + + // No possible match, point is farther right than the max end value + if (point > max) + continue; + + // Searching left + left = bstIndex * 2 + 1; + + if (left < l && this.tree[left] !== 0) + stack.push(left); + + start = this.startGetter ? this.startGetter(interval) : interval[0]; + end = this.endGetter ? this.endGetter(interval) : interval[1]; + + // Checking current node + if (point >= start && point <= end) + matches.push(interval); + + // If the point is to the left of the start of the current interval, + // then it cannot be in the right child + if (point < start) + continue; + + // Searching right + right = bstIndex * 2 + 2; + + if (right < l && this.tree[right] !== 0) + stack.push(right); + } + + return matches; +}; + +/** + * Method returning a list of intervals overlapping the given interval. + * + * @param {any} interval - Target interval. + * @return {array} + */ +StaticIntervalTree.prototype.intervalsOverlappingInterval = function(interval) { + var intervalStart = this.startGetter ? this.startGetter(interval) : interval[0], + intervalEnd = this.endGetter ? this.endGetter(interval) : interval[1]; + + var matches = []; + + var stack = this.stack; + + stack.clear(); + stack.push(0); + + var l = this.tree.length; + + var bstIndex, + intervalIndex, + currentInterval, + maxInterval, + start, + end, + max, + left, + right; + + while (stack.size) { + bstIndex = stack.pop(); + intervalIndex = this.tree[bstIndex] - 1; + currentInterval = this.intervals[intervalIndex]; + maxInterval = this.intervals[this.augmentations[intervalIndex]]; + + max = this.endGetter ? this.endGetter(maxInterval) : maxInterval[1]; + + // No possible match, start is farther right than the max end value + if (intervalStart > max) + continue; + + // Searching left + left = bstIndex * 2 + 1; + + if (left < l && this.tree[left] !== 0) + stack.push(left); + + start = this.startGetter ? this.startGetter(currentInterval) : currentInterval[0]; + end = this.endGetter ? this.endGetter(currentInterval) : currentInterval[1]; + + // Checking current node + if (intervalEnd >= start && intervalStart <= end) + matches.push(currentInterval); + + // If the end is to the left of the start of the current interval, + // then it cannot be in the right child + if (intervalEnd < start) + continue; + + // Searching right + right = bstIndex * 2 + 2; + + if (right < l && this.tree[right] !== 0) + stack.push(right); + } + + return matches; +}; + +/** + * Convenience known methods. + */ +StaticIntervalTree.prototype.inspect = function() { + var proxy = this.intervals.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: StaticIntervalTree, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + StaticIntervalTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = StaticIntervalTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {StaticIntervalTree} + */ +StaticIntervalTree.from = function(iterable, getters) { + if (iterables.isArrayLike(iterable)) + return new StaticIntervalTree(iterable, getters); + + return new StaticIntervalTree(Array.from(iterable), getters); +}; + +/** + * Exporting. + */ +module.exports = StaticIntervalTree; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/suffix-array.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/suffix-array.d.ts new file mode 100644 index 0000000..b959403 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/suffix-array.d.ts @@ -0,0 +1,37 @@ +/** + * Mnemonist SuffixArray Typings + * ============================== + */ +export default class SuffixArray { + + // Members + array: Array; + length: number; + string: string | Array; + + // Constructor + constructor(string: string | Array); + + // Methods + toString(): string; + toJSON(): Array; + inspect(): any; +} + +export class GeneralizedSuffixArray { + + // Members + array: Array; + length: number; + size: number; + text: string | Array; + + // Constructor + constructor(strings: Array | Array>); + + // Methods + longestCommonSubsequence(): string | Array; + toString(): string; + toJSON(): Array; + inspect(): any; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/suffix-array.js b/amplify/functions/downloadDocument/node_modules/mnemonist/suffix-array.js new file mode 100644 index 0000000..14990f4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/suffix-array.js @@ -0,0 +1,352 @@ +/** + * Mnemonist Suffix Array + * ======================= + * + * Linear time implementation of a suffix array using the recursive + * method by Karkkainen and Sanders. + * + * [References]: + * https://www.cs.helsinki.fi/u/tpkarkka/publications/jacm05-revised.pdf + * http://people.mpi-inf.mpg.de/~sanders/programs/suffix/ + * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.184.442&rep=rep1&type=pdf + * + * [Article]: + * "Simple Linear Work Suffix Array Construction", Karkkainen and Sanders. + * + * [Note]: + * A paper by Simon J. Puglisi, William F. Smyth & Andrew Turpin named + * "The Performance of Linear Time Suffix Sorting Algorithms" seems to + * prove that supralinear algorithm are in fact better faring for + * "real" world use cases. It would be nice to check this out in JavaScript + * because the high level of the language could change a lot to the fact. + * + * The current code is largely inspired by the following: + * https://github.com/tixxit/suffixarray/blob/master/suffixarray.js + */ + +/** + * Constants. + */ +var SEPARATOR = '\u0001'; + +/** + * Function used to sort the triples. + * + * @param {string|array} string - Padded sequence. + * @param {array} array - Array to sort (will be mutated). + * @param {number} offset - Index offset. + */ +function sort(string, array, offset) { + var l = array.length, + buckets = [], + i = l, + j = -1, + b, + d = 0, + bits; + + while (i--) + j = Math.max(string[array[i] + offset], j); + + bits = j >> 24 && 32 || j >> 16 && 24 || j >> 8 && 16 || 8; + + for (; d < bits; d += 4) { + for (i = 16; i--;) + buckets[i] = []; + for (i = l; i--;) + buckets[((string[array[i] + offset]) >> d) & 15].push(array[i]); + for (b = 0; b < 16; b++) { + for (j = buckets[b].length; j--;) + array[++i] = buckets[b][j]; + } + } +} + +/** + * Comparison helper. + */ +function compare(string, lookup, m, n) { + return ( + (string[m] - string[n]) || + (m % 3 === 2 ? + (string[m + 1] - string[n + 1]) || (lookup[m + 2] - lookup[n + 2]) : + (lookup[m + 1] - lookup[n + 1])) + ); +} + +/** + * Recursive function used to build the suffix tree in linear time. + * + * @param {string|array} string - Padded sequence. + * @param {number} l - True length of sequence (unpadded). + * @return {array} + */ +function build(string, l) { + var a = [], + b = [], + al = (2 * l / 3) | 0, + bl = l - al, + r = (al + 1) >> 1, + i = al, + j = 0, + k, + lookup = [], + result = []; + + if (l === 1) + return [0]; + + while (i--) + a[i] = ((i * 3) >> 1) + 1; + + for (i = 3; i--;) + sort(string, a, i); + + j = b[((a[0] / 3) | 0) + (a[0] % 3 === 1 ? 0 : r)] = 1; + + for (i = 1; i < al; i++) { + if (string[a[i]] !== string[a[i - 1]] || + string[a[i] + 1] !== string[a[i - 1] + 1] || + string[a[i] + 2] !== string[a[i - 1] + 2]) + j++; + + b[((a[i] / 3) | 0) + (a[i] % 3 === 1 ? 0 : r)] = j; + } + + if (j < al) { + b = build(b, al); + + for (i = al; i--;) + a[i] = b[i] < r ? b[i] * 3 + 1 : ((b[i] - r) * 3 + 2); + } + + for (i = al; i--;) + lookup[a[i]] = i; + lookup[l] = -1; + lookup[l + 1] = -2; + + b = l % 3 === 1 ? [l - 1] : []; + + for (i = 0; i < al; i++) { + if (a[i] % 3 === 1) + b.push(a[i] - 1); + } + + sort(string, b, 0); + + for (i = 0, j = 0, k = 0; i < al && j < bl;) + result[k++] = ( + compare(string, lookup, a[i], b[j]) < 0 ? + a[i++] : + b[j++] + ); + + while (i < al) + result[k++] = a[i++]; + + while (j < bl) + result[k++] = b[j++]; + + return result; +} + +/** + * Function used to create the array we are going to work on. + * + * @param {string|array} target - Target sequence. + * @return {array} + */ +function convert(target) { + + // Creating the alphabet array + var length = target.length, + paddingOffset = length % 3, + array = new Array(length + paddingOffset), + l, + i; + + // If we have an arbitrary sequence, we need to transform it + if (typeof target !== 'string') { + var uniqueTokens = Object.create(null); + + for (i = 0; i < length; i++) { + if (!uniqueTokens[target[i]]) + uniqueTokens[target[i]] = true; + } + + var alphabet = Object.create(null), + sortedUniqueTokens = Object.keys(uniqueTokens).sort(); + + for (i = 0, l = sortedUniqueTokens.length; i < l; i++) + alphabet[sortedUniqueTokens[i]] = i + 1; + + for (i = 0; i < length; i++) { + array[i] = alphabet[target[i]]; + } + } + else { + for (i = 0; i < length; i++) + array[i] = target.charCodeAt(i); + } + + // Padding the array + for (; i < paddingOffset; i++) + array[i] = 0; + + return array; +} + +/** + * Suffix Array. + * + * @constructor + * @param {string|array} string - Sequence for which to build the suffix array. + */ +function SuffixArray(string) { + + // Properties + this.hasArbitrarySequence = typeof string !== 'string'; + this.string = string; + this.length = string.length; + + // Building the array + this.array = build(convert(string), this.length); +} + +/** + * Convenience known methods. + */ +SuffixArray.prototype.toString = function() { + return this.array.join(','); +}; + +SuffixArray.prototype.toJSON = function() { + return this.array; +}; + +SuffixArray.prototype.inspect = function() { + var array = new Array(this.length); + + for (var i = 0; i < this.length; i++) + array[i] = this.string.slice(this.array[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: SuffixArray, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + SuffixArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = SuffixArray.prototype.inspect; + +/** + * Generalized Suffix Array. + * + * @constructor + */ +function GeneralizedSuffixArray(strings) { + + // Properties + this.hasArbitrarySequence = typeof strings[0] !== 'string'; + this.size = strings.length; + + if (this.hasArbitrarySequence) { + this.text = []; + + for (var i = 0, l = this.size; i < l; i++) { + this.text.push.apply(this.text, strings[i]); + + if (i < l - 1) + this.text.push(SEPARATOR); + } + } + else { + this.text = strings.join(SEPARATOR); + } + + this.firstLength = strings[0].length; + this.length = this.text.length; + + // Building the array + this.array = build(convert(this.text), this.length); +} + +/** + * Method used to retrieve the longest common subsequence of the generalized + * suffix array. + * + * @return {string|array} + */ +GeneralizedSuffixArray.prototype.longestCommonSubsequence = function() { + var lcs = this.hasArbitrarySequence ? [] : '', + lcp, + i, + j, + s, + t; + + for (i = 1; i < this.length; i++) { + s = this.array[i]; + t = this.array[i - 1]; + + if (s < this.firstLength && + t < this.firstLength) + continue; + + if (s > this.firstLength && + t > this.firstLength) + continue; + + lcp = Math.min(this.length - s, this.length - t); + + for (j = 0; j < lcp; j++) { + if (this.text[s + j] !== this.text[t + j]) { + lcp = j; + break; + } + } + + if (lcp > lcs.length) + lcs = this.text.slice(s, s + lcp); + } + + return lcs; +}; + +/** + * Convenience known methods. + */ +GeneralizedSuffixArray.prototype.toString = function() { + return this.array.join(','); +}; + +GeneralizedSuffixArray.prototype.toJSON = function() { + return this.array; +}; + +GeneralizedSuffixArray.prototype.inspect = function() { + var array = new Array(this.length); + + for (var i = 0; i < this.length; i++) + array[i] = this.text.slice(this.array[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: GeneralizedSuffixArray, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + GeneralizedSuffixArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = GeneralizedSuffixArray.prototype.inspect; + +/** + * Exporting. + */ +SuffixArray.GeneralizedSuffixArray = GeneralizedSuffixArray; +module.exports = SuffixArray; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/symspell.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/symspell.d.ts new file mode 100644 index 0000000..0e926d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/symspell.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist SymSpell Typings + * =========================== + */ +type SymSpellVerbosity = 0 | 1 | 2; + +type SymSpellOptions = { + maxDistance?: number; + verbosity?: SymSpellVerbosity +}; + +type SymSpellMatch = { + term: string; + distance: number; + count: number; +} + +export default class SymSpell { + + // Members + size: number; + + // Constructor + constructor(options?: SymSpellOptions); + + // Methods + clear(): void; + add(string: string): this; + search(query: string): Array; + + // Statics + static from(strings: Iterable | {[key: string]: string}, options?: SymSpellOptions): SymSpell; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/symspell.js b/amplify/functions/downloadDocument/node_modules/mnemonist/symspell.js new file mode 100644 index 0000000..365ee43 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/symspell.js @@ -0,0 +1,547 @@ +/* eslint no-loop-func: 0 */ +/** + * Mnemonist SymSpell + * =================== + * + * JavaScript implementation of the Symmetric Delete Spelling dictionary to + * efficiently index & query expression based on edit distance. + * Note that the current implementation target the v3.0 of the algorithm. + * + * [Reference]: + * http://blog.faroo.com/2012/06/07/improved-edit-distance-based-spelling-correction/ + * https://github.com/wolfgarbe/symspell + * + * [Author]: + * Wolf Garbe + */ +var forEach = require('obliterator/foreach'); + +/** + * Constants. + */ +var DEFAULT_MAX_DISTANCE = 2, + DEFAULT_VERBOSITY = 2; + +var VERBOSITY = new Set([ + // Returns only the top suggestion + 0, + // Returns suggestions with the smallest edit distance + 1, + // Returns every suggestion (no early termination) + 2 +]); + +var VERBOSITY_EXPLANATIONS = { + 0: 'Returns only the top suggestion', + 1: 'Returns suggestions with the smallest edit distance', + 2: 'Returns every suggestion (no early termination)' +}; + +/** + * Functions. + */ + +/** + * Function creating a dictionary item. + * + * @param {number} [value] - An optional suggestion. + * @return {object} - The created item. + */ +function createDictionaryItem(value) { + var suggestions = new Set(); + + if (typeof value === 'number') + suggestions.add(value); + + return { + suggestions, + count: 0 + }; +} + +/** + * Function creating a suggestion item. + * + * @return {object} - The created item. + */ +function createSuggestionItem(term, distance, count) { + return { + term: term || '', + distance: distance || 0, + count: count || 0 + }; +} + +/** + * Simplified edit function. + * + * @param {string} word - Target word. + * @param {number} distance - Distance. + * @param {number} max - Max distance. + * @param {Set} [deletes] - Set mutated to store deletes. + */ +function edits(word, distance, max, deletes) { + deletes = deletes || new Set(); + distance++; + + var deletedItem, + l = word.length, + i; + + if (l > 1) { + for (i = 0; i < l; i++) { + deletedItem = word.substring(0, i) + word.substring(i + 1); + + if (!deletes.has(deletedItem)) { + deletes.add(deletedItem); + + if (distance < max) + edits(deletedItem, distance, max, deletes); + } + } + } + + return deletes; +} + +/** + * Function used to conditionally add suggestions. + * + * @param {array} words - Words list. + * @param {number} verbosity - Verbosity level. + * @param {object} item - The target item. + * @param {string} suggestion - The target suggestion. + * @param {number} int - Integer key of the word. + * @param {object} deletedItem - Considered deleted item. + * @param {SymSpell} + */ +function addLowestDistance(words, verbosity, item, suggestion, int, deletedItem) { + var first = item.suggestions.values().next().value; + + if (verbosity < 2 && + item.suggestions.size > 0 && + words[first].length - deletedItem.length > suggestion.length - deletedItem.length) { + item.suggestions = new Set(); + item.count = 0; + } + + if (verbosity === 2 || + !item.suggestions.size || + words[first].length - deletedItem.length >= suggestion.length - deletedItem.length) { + item.suggestions.add(int); + } +} + +/** + * Custom Damerau-Levenshtein used by the algorithm. + * + * @param {string} source - First string. + * @param {string} target - Second string. + * @return {number} - The distance. + */ +function damerauLevenshtein(source, target) { + var m = source.length, + n = target.length, + H = [[]], + INF = m + n, + sd = new Map(), + i, + l, + j; + + H[0][0] = INF; + + for (i = 0; i <= m; i++) { + if (!H[i + 1]) + H[i + 1] = []; + H[i + 1][1] = i; + H[i + 1][0] = INF; + } + + for (j = 0; j <= n; j++) { + H[1][j + 1] = j; + H[0][j + 1] = INF; + } + + var st = source + target, + letter; + + for (i = 0, l = st.length; i < l; i++) { + letter = st[i]; + + if (!sd.has(letter)) + sd.set(letter, 0); + } + + // Iterating + for (i = 1; i <= m; i++) { + var DB = 0; + + for (j = 1; j <= n; j++) { + var i1 = sd.get(target[j - 1]), + j1 = DB; + + if (source[i - 1] === target[j - 1]) { + H[i + 1][j + 1] = H[i][j]; + DB = j; + } + else { + H[i + 1][j + 1] = Math.min( + H[i][j], + H[i + 1][j], + H[i][j + 1] + ) + 1; + } + + H[i + 1][j + 1] = Math.min( + H[i + 1][j + 1], + H[i1][j1] + (i - i1 - 1) + 1 + (j - j1 - 1) + ); + } + + sd.set(source[i - 1], i); + } + + return H[m + 1][n + 1]; +} + +/** + * Lookup function. + * + * @param {object} dictionary - A SymSpell dictionary. + * @param {array} words - Unique words list. + * @param {number} verbosity - Verbosity level. + * @param {number} maxDistance - Maximum distance. + * @param {number} maxLength - Maximum word length in the dictionary. + * @param {string} input - Input string. + * @return {array} - The list of suggestions. + */ +function lookup(dictionary, words, verbosity, maxDistance, maxLength, input) { + var length = input.length; + + if (length - maxDistance > maxLength) + return []; + + var candidates = [input], + candidateSet = new Set(), + suggestionSet = new Set(); + + var suggestions = [], + candidate, + item; + + // Exhausting every candidates + while (candidates.length > 0) { + candidate = candidates.shift(); + + // Early termination + if ( + verbosity < 2 && + suggestions.length > 0 && + length - candidate.length > suggestions[0].distance + ) + break; + + item = dictionary[candidate]; + + if (item !== undefined) { + if (typeof item === 'number') + item = createDictionaryItem(item); + + if (item.count > 0 && !suggestionSet.has(candidate)) { + suggestionSet.add(candidate); + + var suggestItem = createSuggestionItem( + candidate, + length - candidate.length, + item.count + ); + + suggestions.push(suggestItem); + + // Another early termination + if (verbosity < 2 && length - candidate.length === 0) + break; + } + + // Iterating over the item's suggestions + item.suggestions.forEach(index => { + var suggestion = words[index]; + + // Do we already have this suggestion? + if (suggestionSet.has(suggestion)) + return; + + suggestionSet.add(suggestion); + + // Computing distance between candidate & suggestion + var distance = 0; + + if (input !== suggestion) { + if (suggestion.length === candidate.length) { + distance = length - candidate.length; + } + else if (length === candidate.length) { + distance = suggestion.length - candidate.length; + } + else { + var ii = 0, + jj = 0; + + var l = suggestion.length; + + while ( + ii < l && + ii < length && + suggestion[ii] === input[ii] + ) { + ii++; + } + + while ( + jj < l - ii && + jj < length && + suggestion[l - jj - 1] === input[length - jj - 1] + ) { + jj++; + } + + if (ii > 0 || jj > 0) { + distance = damerauLevenshtein( + suggestion.substr(ii, l - ii - jj), + input.substr(ii, length - ii - jj) + ); + } + else { + distance = damerauLevenshtein(suggestion, input); + } + } + } + + // Removing suggestions of higher distance + if (verbosity < 2 && + suggestions.length > 0 && + suggestions[0].distance > distance) { + suggestions = []; + } + + if (verbosity < 2 && + suggestions.length > 0 && + distance > suggestions[0].distance) { + return; + } + + if (distance <= maxDistance) { + var target = dictionary[suggestion]; + + if (target !== undefined) { + suggestions.push(createSuggestionItem( + suggestion, + distance, + target.count + )); + } + } + }); + } + + // Adding edits + if (length - candidate.length < maxDistance) { + + if (verbosity < 2 && + suggestions.length > 0 && + length - candidate.length >= suggestions[0].distance) + continue; + + for (var i = 0, l = candidate.length; i < l; i++) { + var deletedItem = ( + candidate.substring(0, i) + + candidate.substring(i + 1) + ); + + if (!candidateSet.has(deletedItem)) { + candidateSet.add(deletedItem); + candidates.push(deletedItem); + } + } + } + } + + if (verbosity === 0) + return suggestions.slice(0, 1); + + return suggestions; +} + +/** + * SymSpell. + * + * @constructor + */ +function SymSpell(options) { + options = options || {}; + + this.clear(); + + // Properties + this.maxDistance = typeof options.maxDistance === 'number' ? + options.maxDistance : + DEFAULT_MAX_DISTANCE; + this.verbosity = typeof options.verbosity === 'number' ? + options.verbosity : + DEFAULT_VERBOSITY; + + // Sanity checks + if (typeof this.maxDistance !== 'number' || this.maxDistance <= 0) + throw Error('mnemonist/SymSpell.constructor: invalid `maxDistance` option. Should be a integer greater than 0.'); + + if (!VERBOSITY.has(this.verbosity)) + throw Error('mnemonist/SymSpell.constructor: invalid `verbosity` option. Should be either 0, 1 or 2.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SymSpell.prototype.clear = function() { + + // Properties + this.size = 0; + this.dictionary = Object.create(null); + this.maxLength = 0; + this.words = []; +}; + +/** + * Method used to add a word to the index. + * + * @param {string} word - Word to add. + * @param {SymSpell} + */ +SymSpell.prototype.add = function(word) { + var item = this.dictionary[word]; + + if (item !== undefined) { + if (typeof item === 'number') { + item = createDictionaryItem(item); + this.dictionary[word] = item; + } + + item.count++; + } + + else { + item = createDictionaryItem(); + item.count++; + + this.dictionary[word] = item; + + if (word.length > this.maxLength) + this.maxLength = word.length; + } + + if (item.count === 1) { + var number = this.words.length; + this.words.push(word); + + var deletes = edits(word, 0, this.maxDistance); + + deletes.forEach(deletedItem => { + var target = this.dictionary[deletedItem]; + + if (target !== undefined) { + if (typeof target === 'number') { + target = createDictionaryItem(target); + + this.dictionary[deletedItem] = target; + } + + if (!target.suggestions.has(number)) { + addLowestDistance( + this.words, + this.verbosity, + target, + word, + number, + deletedItem + ); + } + } + else { + this.dictionary[deletedItem] = number; + } + }); + } + + this.size++; + + return this; +}; + +/** + * Method used to search the index. + * + * @param {string} input - Input query. + * @return {array} - The found suggestions. + */ +SymSpell.prototype.search = function(input) { + return lookup( + this.dictionary, + this.words, + this.verbosity, + this.maxDistance, + this.maxLength, + input + ); +}; + +/** + * Convenience known methods. + */ +SymSpell.prototype.inspect = function() { + var array = []; + + array.size = this.size; + array.maxDistance = this.maxDistance; + array.verbosity = this.verbosity; + array.behavior = VERBOSITY_EXPLANATIONS[this.verbosity]; + + for (var k in this.dictionary) { + if (typeof this.dictionary[k] === 'object' && this.dictionary[k].count) + array.push([k, this.dictionary[k].count]); + } + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: SymSpell, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + SymSpell.prototype[Symbol.for('nodejs.util.inspect.custom')] = SymSpell.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {SymSpell} + */ +SymSpell.from = function(iterable, options) { + var index = new SymSpell(options); + + forEach(iterable, function(value) { + index.add(value); + }); + + return index; +}; + +/** + * Exporting. + */ +module.exports = SymSpell; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/trie-map.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/trie-map.d.ts new file mode 100644 index 0000000..b083304 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/trie-map.d.ts @@ -0,0 +1,30 @@ +/** + * Mnemonist TrieMap Typings + * ========================== + */ +export default class TrieMap implements Iterable<[K, V]> { + + // Members + size: number; + + // Constructor + constructor(Token?: new () => K); + + // Methods + clear(): void; + set(prefix: K, value: V): this; + update(prefix: K, updateFunction: (oldValue: V | undefined) => V): this + get(prefix: K): V; + delete(prefix: K): boolean; + has(prefix: K): boolean; + find(prefix: K): Array<[K, V]>; + values(): IterableIterator; + prefixes(): IterableIterator; + keys(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, J]> | {[key: string]: J}): TrieMap; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/trie-map.js b/amplify/functions/downloadDocument/node_modules/mnemonist/trie-map.js new file mode 100644 index 0000000..d601448 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/trie-map.js @@ -0,0 +1,477 @@ +/** + * Mnemonist TrieMap + * ================== + * + * JavaScript TrieMap implementation based upon plain objects. As such this + * structure is more a convenience building upon the trie's advantages than + * a real performant alternative to already existing structures. + * + * Note that the Trie is based upon the TrieMap since the underlying machine + * is the very same. The Trie just does not let you set values and only + * considers the existence of the given prefixes. + */ +var forEach = require('obliterator/foreach'), + Iterator = require('obliterator/iterator'); + +/** + * Constants. + */ +var SENTINEL = String.fromCharCode(0); + +/** + * TrieMap. + * + * @constructor + */ +function TrieMap(Token) { + this.mode = Token === Array ? 'array' : 'string'; + this.clear(); +} + +/** + * Method used to clear the trie. + * + * @return {undefined} + */ +TrieMap.prototype.clear = function() { + + // Properties + this.root = {}; + this.size = 0; +}; + +/** + * Method used to set the value of the given prefix in the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @param {any} value - Value for the prefix. + * @return {TrieMap} + */ +TrieMap.prototype.set = function(prefix, value) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = value; + + return this; +}; + +/** + * Method used to update the value of the given prefix in the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @param {(oldValue: any | undefined) => any} updateFunction - Update value visitor callback. + * @return {TrieMap} + */ +TrieMap.prototype.update = function(prefix, updateFunction) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = updateFunction(node[SENTINEL]); + + return this; +}; + +/** + * Method used to return the value sitting at the end of the given prefix or + * undefined if none exist. + * + * @param {string|array} prefix - Prefix to follow. + * @return {any|undefined} + */ +TrieMap.prototype.get = function(prefix) { + var node = this.root, + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // Prefix does not exist + if (typeof node === 'undefined') + return; + } + + if (!(SENTINEL in node)) + return; + + return node[SENTINEL]; +}; + +/** + * Method used to delete a prefix from the trie. + * + * @param {string|array} prefix - Prefix to delete. + * @return {boolean} + */ +TrieMap.prototype.delete = function(prefix) { + var node = this.root, + toPrune = null, + tokenToPrune = null, + parent, + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + parent = node; + node = node[token]; + + // Prefix does not exist + if (typeof node === 'undefined') + return false; + + // Keeping track of a potential branch to prune + if (toPrune !== null) { + if (Object.keys(node).length > 1) { + toPrune = null; + tokenToPrune = null; + } + } + else { + if (Object.keys(node).length < 2) { + toPrune = parent; + tokenToPrune = token; + } + } + } + + if (!(SENTINEL in node)) + return false; + + this.size--; + + if (toPrune) + delete toPrune[tokenToPrune]; + else + delete node[SENTINEL]; + + return true; +}; + +// TODO: add #.prune? + +/** + * Method used to assert whether the given prefix exists in the TrieMap. + * + * @param {string|array} prefix - Prefix to check. + * @return {boolean} + */ +TrieMap.prototype.has = function(prefix) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return false; + } + + return SENTINEL in node; +}; + +/** + * Method used to retrieve every item in the trie with the given prefix. + * + * @param {string|array} prefix - Prefix to query. + * @return {array} + */ +TrieMap.prototype.find = function(prefix) { + var isString = typeof prefix === 'string'; + + var node = this.root, + matches = [], + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return matches; + } + + // Performing DFS from prefix + var nodeStack = [node], + prefixStack = [prefix], + k; + + while (nodeStack.length) { + prefix = prefixStack.pop(); + node = nodeStack.pop(); + + for (k in node) { + if (k === SENTINEL) { + matches.push([prefix, node[SENTINEL]]); + continue; + } + + nodeStack.push(node[k]); + prefixStack.push(isString ? prefix + k : prefix.concat(k)); + } + } + + return matches; +}; + +/** + * Method returning an iterator over the trie's values. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.values = function(prefix) { + var node = this.root, + nodeStack = [], + token, + i, + l; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + + nodeStack.push(node); + + return new Iterator(function() { + var currentNode, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + } + + if (hasValue) + return {done: false, value: currentNode[SENTINEL]}; + } + + return {done: true}; + }); +}; + +/** + * Method returning an iterator over the trie's prefixes. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.prefixes = function(prefix) { + var node = this.root, + nodeStack = [], + prefixStack = [], + token, + i, + l; + + var isString = this.mode === 'string'; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + else { + prefix = isString ? '' : []; + } + + nodeStack.push(node); + prefixStack.push(prefix); + + return new Iterator(function() { + var currentNode, + currentPrefix, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + currentPrefix = prefixStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + prefixStack.push(isString ? currentPrefix + k : currentPrefix.concat(k)); + } + + if (hasValue) + return {done: false, value: currentPrefix}; + } + + return {done: true}; + }); +}; +TrieMap.prototype.keys = TrieMap.prototype.prefixes; + +/** + * Method returning an iterator over the trie's entries. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.entries = function(prefix) { + var node = this.root, + nodeStack = [], + prefixStack = [], + token, + i, + l; + + var isString = this.mode === 'string'; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + else { + prefix = isString ? '' : []; + } + + nodeStack.push(node); + prefixStack.push(prefix); + + return new Iterator(function() { + var currentNode, + currentPrefix, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + currentPrefix = prefixStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + prefixStack.push(isString ? currentPrefix + k : currentPrefix.concat(k)); + } + + if (hasValue) + return {done: false, value: [currentPrefix, currentNode[SENTINEL]]}; + } + + return {done: true}; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + TrieMap.prototype[Symbol.iterator] = TrieMap.prototype.entries; + +/** + * Convenience known methods. + */ +TrieMap.prototype.inspect = function() { + var proxy = new Array(this.size); + + var iterator = this.entries(), + step, + i = 0; + + while ((step = iterator.next(), !step.done)) + proxy[i++] = step.value; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: TrieMap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + TrieMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = TrieMap.prototype.inspect; + +TrieMap.prototype.toJSON = function() { + return this.root; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a trie. + * + * @param {Iterable} iterable - Target iterable. + * @return {TrieMap} + */ +TrieMap.from = function(iterable) { + var trie = new TrieMap(); + + forEach(iterable, function(value, key) { + trie.set(key, value); + }); + + return trie; +}; + +/** + * Exporting. + */ +TrieMap.SENTINEL = SENTINEL; +module.exports = TrieMap; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/trie.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/trie.d.ts new file mode 100644 index 0000000..4b2a202 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/trie.d.ts @@ -0,0 +1,26 @@ +/** + * Mnemonist Trie Typings + * ======================= + */ +export default class Trie implements Iterable { + + // Members + size: number; + + // Constructor + constructor(Token?: new () => T); + + // Methods + clear(): void; + add(prefix: T): this; + delete(prefix: T): boolean; + has(prefix: T): boolean; + find(prefix: T): Array; + prefixes(): IterableIterator; + keys(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string]: I}): Trie; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/trie.js b/amplify/functions/downloadDocument/node_modules/mnemonist/trie.js new file mode 100644 index 0000000..9562aef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/trie.js @@ -0,0 +1,167 @@ +/** + * Mnemonist Trie + * =============== + * + * JavaScript Trie implementation based upon plain objects. As such this + * structure is more a convenience building upon the trie's advantages than + * a real performant alternative to already existing structures. + * + * Note that the Trie is based upon the TrieMap since the underlying machine + * is the very same. The Trie just does not let you set values and only + * considers the existence of the given prefixes. + */ +var forEach = require('obliterator/foreach'), + TrieMap = require('./trie-map.js'); + +/** + * Constants. + */ +var SENTINEL = String.fromCharCode(0); + +/** + * Trie. + * + * @constructor + */ +function Trie(Token) { + this.mode = Token === Array ? 'array' : 'string'; + this.clear(); +} + +// Re-using TrieMap's prototype +for (var methodName in TrieMap.prototype) + Trie.prototype[methodName] = TrieMap.prototype[methodName]; + +// Dropping irrelevant methods +delete Trie.prototype.set; +delete Trie.prototype.get; +delete Trie.prototype.values; +delete Trie.prototype.entries; + +/** + * Method used to add the given prefix to the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @return {TrieMap} + */ +Trie.prototype.add = function(prefix) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = true; + + return this; +}; + +/** + * Method used to retrieve every item in the trie with the given prefix. + * + * @param {string|array} prefix - Prefix to query. + * @return {array} + */ +Trie.prototype.find = function(prefix) { + var isString = typeof prefix === 'string'; + + var node = this.root, + matches = [], + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return matches; + } + + // Performing DFS from prefix + var nodeStack = [node], + prefixStack = [prefix], + k; + + while (nodeStack.length) { + prefix = prefixStack.pop(); + node = nodeStack.pop(); + + for (k in node) { + if (k === SENTINEL) { + matches.push(prefix); + continue; + } + + nodeStack.push(node[k]); + prefixStack.push(isString ? prefix + k : prefix.concat(k)); + } + } + + return matches; +}; + +/** + * Attaching the #.keys method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Trie.prototype[Symbol.iterator] = Trie.prototype.keys; + +/** + * Convenience known methods. + */ +Trie.prototype.inspect = function() { + var proxy = new Set(); + + var iterator = this.keys(), + step; + + while ((step = iterator.next(), !step.done)) + proxy.add(step.value); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Trie, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Trie.prototype[Symbol.for('nodejs.util.inspect.custom')] = Trie.prototype.inspect; + +Trie.prototype.toJSON = function() { + return this.root; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a trie. + * + * @param {Iterable} iterable - Target iterable. + * @return {Trie} + */ +Trie.from = function(iterable) { + var trie = new Trie(); + + forEach(iterable, function(value) { + trie.add(value); + }); + + return trie; +}; + +/** + * Exporting. + */ +Trie.SENTINEL = SENTINEL; +module.exports = Trie; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/binary-search.js b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/binary-search.js new file mode 100644 index 0000000..0666c82 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/binary-search.js @@ -0,0 +1,216 @@ +/** + * Mnemonist Binary Search Helpers + * ================================ + * + * Typical binary search functions. + */ + +/** + * Function returning the index of the search value in the array or `-1` if + * not found. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.search = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + hi--; + + var current; + + while (lo <= hi) { + mid = (lo + hi) >>> 1; + + current = array[mid]; + + if (current > value) { + hi = ~-mid; + } + else if (current < value) { + lo = -~mid; + } + else { + return mid; + } + } + + return -1; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.searchWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = ~-array.length, + comparison; + + while (lo <= hi) { + mid = (lo + hi) >>> 1; + + comparison = comparator(array[mid], value); + + if (comparison > 0) { + hi = ~-mid; + } + else if (comparison < 0) { + lo = -~mid; + } + else { + return mid; + } + } + + return -1; +}; + +/** + * Function returning the lower bound of the given value in the array. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @param {number} [lo] - Start index. + * @param {numner} [hi] - End index. + * @return {number} + */ +exports.lowerBound = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value <= array[mid]) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.lowerBoundWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (comparator(value, array[mid]) <= 0) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Same as above, but can work on sorted indices. + * + * @param {array} array - Haystack. + * @param {array} array - Indices. + * @param {any} value - Needle. + * @return {number} + */ +exports.lowerBoundIndices = function(array, indices, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value <= array[indices[mid]]) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Function returning the upper bound of the given value in the array. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @param {number} [lo] - Start index. + * @param {numner} [hi] - End index. + * @return {number} + */ +exports.upperBound = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value >= array[mid]) { + lo = -~mid; + } + else { + hi = mid; + } + } + + return lo; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.upperBoundWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (comparator(value, array[mid]) >= 0) { + lo = -~mid; + } + else { + hi = mid; + } + } + + return lo; +}; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/bitwise.js b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/bitwise.js new file mode 100644 index 0000000..191dfc2 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/bitwise.js @@ -0,0 +1,109 @@ +/** + * Mnemonist Bitwise Helpers + * ========================== + * + * Miscellaneous helpers helping with bitwise operations. + */ + +/** + * Takes a 32 bits integer and returns its MSB using SWAR strategy. + * + * @param {number} x - Target number. + * @return {number} + */ +function msb32(x) { + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + + return (x & ~(x >> 1)); +} +exports.msb32 = msb32; + +/** + * Takes a byte and returns its MSB using SWAR strategy. + * + * @param {number} x - Target number. + * @return {number} + */ +function msb8(x) { + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + + return (x & ~(x >> 1)); +} +exports.msb8 = msb8; + +/** + * Takes a number and return bit at position. + * + * @param {number} x - Target number. + * @param {number} pos - Position. + * @return {number} + */ +exports.test = function(x, pos) { + return (x >> pos) & 1; +}; + +/** + * Compare two bytes and return their critical bit. + * + * @param {number} a - First byte. + * @param {number} b - Second byte. + * @return {number} + */ +exports.criticalBit8 = function(a, b) { + return msb8(a ^ b); +}; + +exports.criticalBit8Mask = function(a, b) { + return (~msb8(a ^ b) >>> 0) & 0xff; +}; + +exports.testCriticalBit8 = function(x, mask) { + return (1 + (x | mask)) >> 8; +}; + +exports.criticalBit32Mask = function(a, b) { + return (~msb32(a ^ b) >>> 0) & 0xffffffff; +}; + +/** + * Takes a 32 bits integer and returns its population count (number of 1 of + * the binary representation). + * + * @param {number} x - Target number. + * @return {number} + */ +exports.popcount = function(x) { + x -= x >> 1 & 0x55555555; + x = (x & 0x33333333) + (x >> 2 & 0x33333333); + x = x + (x >> 4) & 0x0f0f0f0f; + x += x >> 8; + x += x >> 16; + return x & 0x7f; +}; + +/** + * Slightly faster popcount function based on a precomputed table of 8bits + * words. + * + * @param {number} x - Target number. + * @return {number} + */ +var TABLE8 = new Uint8Array(Math.pow(2, 8)); + +for (var i = 0, l = TABLE8.length; i < l; i++) + TABLE8[i] = exports.popcount(i); + +exports.table8Popcount = function(x) { + return ( + TABLE8[x & 0xff] + + TABLE8[(x >> 8) & 0xff] + + TABLE8[(x >> 16) & 0xff] + + TABLE8[(x >> 24) & 0xff] + ); +}; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/comparators.js b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/comparators.js new file mode 100644 index 0000000..498b4a6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/comparators.js @@ -0,0 +1,79 @@ +/** + * Mnemonist Heap Comparators + * =========================== + * + * Default comparators & functions dealing with comparators reversing etc. + */ +var DEFAULT_COMPARATOR = function(a, b) { + if (a < b) + return -1; + if (a > b) + return 1; + + return 0; +}; + +var DEFAULT_REVERSE_COMPARATOR = function(a, b) { + if (a < b) + return 1; + if (a > b) + return -1; + + return 0; +}; + +/** + * Function used to reverse a comparator. + */ +function reverseComparator(comparator) { + return function(a, b) { + return comparator(b, a); + }; +} + +/** + * Function returning a tuple comparator. + */ +function createTupleComparator(size) { + if (size === 2) { + return function(a, b) { + if (a[0] < b[0]) + return -1; + + if (a[0] > b[0]) + return 1; + + if (a[1] < b[1]) + return -1; + + if (a[1] > b[1]) + return 1; + + return 0; + }; + } + + return function(a, b) { + var i = 0; + + while (i < size) { + if (a[i] < b[i]) + return -1; + + if (a[i] > b[i]) + return 1; + + i++; + } + + return 0; + }; +} + +/** + * Exporting. + */ +exports.DEFAULT_COMPARATOR = DEFAULT_COMPARATOR; +exports.DEFAULT_REVERSE_COMPARATOR = DEFAULT_REVERSE_COMPARATOR; +exports.reverseComparator = reverseComparator; +exports.createTupleComparator = createTupleComparator; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/hash-tables.js b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/hash-tables.js new file mode 100644 index 0000000..dfed95e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/hash-tables.js @@ -0,0 +1,107 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Hashtable Helpers + * ============================ + * + * Miscellaneous helpers helper function dealing with hashtables. + */ +function jenkinsInt32(a) { + + a = (a + 0x7ed55d16) + (a << 12); + a = (a ^ 0xc761c23c) ^ (a >> 19); + a = (a + 0x165667b1) + (a << 5); + a = (a + 0xd3a2646c) ^ (a << 9); + a = (a + 0xfd7046c5) + (a << 3); + a = (a ^ 0xb55a4f09) ^ (a >> 16); + + return a; +} + +function linearProbingGet(hash, keys, values, key) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === key) + return values[i]; + + else if (c === 0) + return; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + return; + } +} + +function linearProbingHas(hash, keys, key) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === key) + return true; + + else if (c === 0) + return false; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + return false; + } +} + +function linearProbingSet(hash, keys, values, key, value) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === 0 || c === key) + break; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + throw new Error('mnemonist/utils/hash-tables.linearProbingSet: table is full.'); + } + + keys[i] = key; + values[i] = value; +} + +module.exports = { + hashes: { + jenkinsInt32: jenkinsInt32 + }, + linearProbing: { + get: linearProbingGet, + has: linearProbingHas, + set: linearProbingSet + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/iterables.js b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/iterables.js new file mode 100644 index 0000000..d95f701 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/iterables.js @@ -0,0 +1,93 @@ +/** + * Mnemonist Iterable Function + * ============================ + * + * Harmonized iteration helpers over mixed iterable targets. + */ +var forEach = require('obliterator/foreach'); + +var typed = require('./typed-arrays.js'); + +/** + * Function used to determine whether the given object supports array-like + * random access. + * + * @param {any} target - Target object. + * @return {boolean} + */ +function isArrayLike(target) { + return Array.isArray(target) || typed.isTypedArray(target); +} + +/** + * Function used to guess the length of the structure over which we are going + * to iterate. + * + * @param {any} target - Target object. + * @return {number|undefined} + */ +function guessLength(target) { + if (typeof target.length === 'number') + return target.length; + + if (typeof target.size === 'number') + return target.size; + + return; +} + +/** + * Function used to convert an iterable to an array. + * + * @param {any} target - Iteration target. + * @return {array} + */ +function toArray(target) { + var l = guessLength(target); + + var array = typeof l === 'number' ? new Array(l) : []; + + var i = 0; + + // TODO: we could optimize when given target is array like + forEach(target, function(value) { + array[i++] = value; + }); + + return array; +} + +/** + * Same as above but returns a supplementary indices array. + * + * @param {any} target - Iteration target. + * @return {array} + */ +function toArrayWithIndices(target) { + var l = guessLength(target); + + var IndexArray = typeof l === 'number' ? + typed.getPointerArray(l) : + Array; + + var array = typeof l === 'number' ? new Array(l) : []; + var indices = typeof l === 'number' ? new IndexArray(l) : []; + + var i = 0; + + // TODO: we could optimize when given target is array like + forEach(target, function(value) { + array[i] = value; + indices[i] = i++; + }); + + return [array, indices]; +} + +/** + * Exporting. + */ +exports.isArrayLike = isArrayLike; +exports.guessLength = guessLength; +exports.toArray = toArray; +exports.toArrayWithIndices = toArrayWithIndices; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/merge.js b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/merge.js new file mode 100644 index 0000000..bf40d45 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/merge.js @@ -0,0 +1,563 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Merge Helpers + * ======================== + * + * Various merge algorithms used to handle sorted lists. Note that the given + * functions are optimized and won't accept mixed arguments. + * + * Note: maybe this piece of code belong to sortilege, along with binary-search. + */ +var typed = require('./typed-arrays.js'), + isArrayLike = require('./iterables.js').isArrayLike, + binarySearch = require('./binary-search.js'), + FibonacciHeap = require('../fibonacci-heap.js'); + +// TODO: update to use exponential search +// TODO: when not knowing final length => should use plain arrays rather than +// same type as input + +/** + * Merge two sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +function mergeArrays(a, b) { + + // One of the arrays is empty + if (a.length === 0) + return b.slice(); + if (b.length === 0) + return a.slice(); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, we can just concatenate them + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd <= bStart) { + if (typed.isTypedArray(a)) + return typed.concat(a, b); + return a.concat(b); + } + + // Initializing target + var array = new a.constructor(a.length + b.length); + + // Iterating until we overlap + var i, l, v; + + for (i = 0, l = a.length; i < l; i++) { + v = a[i]; + + if (v <= bStart) + array[i] = v; + else + break; + } + + // Handling overlap + var aPointer = i, + aLength = a.length, + bPointer = 0, + bLength = b.length, + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead <= bHead) { + array[i++] = aHead; + aPointer++; + } + else { + array[i++] = bHead; + bPointer++; + } + } + + // Filling + while (aPointer < aLength) + array[i++] = a[aPointer++]; + while (bPointer < bLength) + array[i++] = b[bPointer++]; + + return array; +} + +/** + * Perform the union of two already unique sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +function unionUniqueArrays(a, b) { + + // One of the arrays is empty + if (a.length === 0) + return b.slice(); + if (b.length === 0) + return a.slice(); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, we can just concatenate them + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd < bStart) { + if (typed.isTypedArray(a)) + return typed.concat(a, b); + return a.concat(b); + } + + // Initializing target + var array = new a.constructor(); + + // Iterating until we overlap + var i, l, v; + + for (i = 0, l = a.length; i < l; i++) { + v = a[i]; + + if (v < bStart) + array.push(v); + else + break; + } + + // Handling overlap + var aPointer = i, + aLength = a.length, + bPointer = 0, + bLength = b.length, + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead <= bHead) { + + if (array.length === 0 || array[array.length - 1] !== aHead) + array.push(aHead); + + aPointer++; + } + else { + if (array.length === 0 || array[array.length - 1] !== bHead) + array.push(bHead); + + bPointer++; + } + } + + // Filling + // TODO: it's possible to optimize a bit here, since the condition is only + // relevant the first time + while (aPointer < aLength) { + aHead = a[aPointer++]; + + if (array.length === 0 || array[array.length - 1] !== aHead) + array.push(aHead); + } + while (bPointer < bLength) { + bHead = b[bPointer++]; + + if (array.length === 0 || array[array.length - 1] !== bHead) + array.push(bHead); + } + + return array; +} + +/** + * Perform the intersection of two already unique sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +exports.intersectionUniqueArrays = function(a, b) { + + // One of the arrays is empty + if (a.length === 0 || b.length === 0) + return new a.constructor(0); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, there is no intersection + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd < bStart) + return new a.constructor(0); + + // Initializing target + var array = new a.constructor(); + + // Handling overlap + var aPointer = binarySearch.lowerBound(a, bStart), + aLength = a.length, + bPointer = 0, + bLength = binarySearch.upperBound(b, aEnd), + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead < bHead) { + aPointer = binarySearch.lowerBound(a, bHead, aPointer + 1); + } + else if (aHead > bHead) { + bPointer = binarySearch.lowerBound(b, aHead, bPointer + 1); + } + else { + array.push(aHead); + aPointer++; + bPointer++; + } + } + + return array; +}; + +/** + * Merge k sorted array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +function kWayMergeArrays(arrays) { + var length = 0, + max = -Infinity, + al, + i, + l; + + var filtered = []; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + if (al === 0) + continue; + + filtered.push(arrays[i]); + + length += al; + + if (al > max) + max = al; + } + + if (filtered.length === 0) + return new arrays[0].constructor(0); + + if (filtered.length === 1) + return filtered[0].slice(); + + if (filtered.length === 2) + return mergeArrays(filtered[0], filtered[1]); + + arrays = filtered; + + var array = new arrays[0].constructor(length); + + var PointerArray = typed.getPointerArray(max); + + var pointers = new PointerArray(arrays.length); + + // TODO: benchmark vs. a binomial heap + var heap = new FibonacciHeap(function(a, b) { + a = arrays[a][pointers[a]]; + b = arrays[b][pointers[b]]; + + if (a < b) + return -1; + + if (a > b) + return 1; + + return 0; + }); + + for (i = 0; i < l; i++) + heap.push(i); + + i = 0; + + var p, + v; + + while (heap.size) { + p = heap.pop(); + v = arrays[p][pointers[p]++]; + array[i++] = v; + + if (pointers[p] < arrays[p].length) + heap.push(p); + } + + return array; +} + +/** + * Perform the union of k sorted unique array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +function kWayUnionUniqueArrays(arrays) { + var max = -Infinity, + al, + i, + l; + + var filtered = []; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + if (al === 0) + continue; + + filtered.push(arrays[i]); + + if (al > max) + max = al; + } + + if (filtered.length === 0) + return new arrays[0].constructor(0); + + if (filtered.length === 1) + return filtered[0].slice(); + + if (filtered.length === 2) + return unionUniqueArrays(filtered[0], filtered[1]); + + arrays = filtered; + + var array = new arrays[0].constructor(); + + var PointerArray = typed.getPointerArray(max); + + var pointers = new PointerArray(arrays.length); + + // TODO: benchmark vs. a binomial heap + var heap = new FibonacciHeap(function(a, b) { + a = arrays[a][pointers[a]]; + b = arrays[b][pointers[b]]; + + if (a < b) + return -1; + + if (a > b) + return 1; + + return 0; + }); + + for (i = 0; i < l; i++) + heap.push(i); + + var p, + v; + + while (heap.size) { + p = heap.pop(); + v = arrays[p][pointers[p]++]; + + if (array.length === 0 || array[array.length - 1] !== v) + array.push(v); + + if (pointers[p] < arrays[p].length) + heap.push(p); + } + + return array; +} + +/** + * Perform the intersection of k sorted array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +exports.kWayIntersectionUniqueArrays = function(arrays) { + var max = -Infinity, + maxStart = -Infinity, + minEnd = Infinity, + first, + last, + al, + i, + l; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + // If one of the arrays is empty, so is the intersection + if (al === 0) + return []; + + if (al > max) + max = al; + + first = arrays[i][0]; + last = arrays[i][al - 1]; + + if (first > maxStart) + maxStart = first; + + if (last < minEnd) + minEnd = last; + } + + // Full overlap is impossible + if (maxStart > minEnd) + return []; + + // Only one value + if (maxStart === minEnd) + return [maxStart]; + + // NOTE: trying to outsmart I(D,I(C,I(A,B))) is pointless unfortunately... + // NOTE: I tried to be very clever about bounds but it does not seem + // to improve the performance of the algorithm. + var a, b, + array = arrays[0], + aPointer, + bPointer, + aLimit, + bLimit, + aHead, + bHead, + start = maxStart; + + for (i = 1; i < l; i++) { + a = array; + b = arrays[i]; + + // Change that to `[]` and observe some perf drops on V8... + array = new Array(); + + aPointer = 0; + bPointer = binarySearch.lowerBound(b, start); + + aLimit = a.length; + bLimit = b.length; + + while (aPointer < aLimit && bPointer < bLimit) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead < bHead) { + aPointer = binarySearch.lowerBound(a, bHead, aPointer + 1); + } + else if (aHead > bHead) { + bPointer = binarySearch.lowerBound(b, aHead, bPointer + 1); + } + else { + array.push(aHead); + aPointer++; + bPointer++; + } + } + + if (array.length === 0) + return array; + + start = array[0]; + } + + return array; +}; + +/** + * Variadic merging all of the given arrays. + * + * @param {...array} + * @return {array} + */ +exports.merge = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return mergeArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return kWayMergeArrays(arguments); + } + + return null; +}; + +/** + * Variadic function performing the union of all the given unique arrays. + * + * @param {...array} + * @return {array} + */ +exports.unionUnique = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return unionUniqueArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return kWayUnionUniqueArrays(arguments); + } + + return null; +}; + +/** + * Variadic function performing the intersection of all the given unique arrays. + * + * @param {...array} + * @return {array} + */ +exports.intersectionUnique = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return exports.intersectionUniqueArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return exports.kWayIntersectionUniqueArrays(arguments); + } + + return null; +}; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/murmurhash3.js b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/murmurhash3.js new file mode 100644 index 0000000..c09ec8a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/murmurhash3.js @@ -0,0 +1,87 @@ +/* eslint no-fallthrough: 0 */ +/** + * Mnemonist MurmurHash 3 + * ======================= + * + * Straightforward implementation of the third version of MurmurHash. + * + * Note: this piece of code belong to haschisch. + */ + +/** + * Various helpers. + */ +function mul32(a, b) { + return (a & 0xffff) * b + (((a >>> 16) * b & 0xffff) << 16) & 0xffffffff; +} + +function sum32(a, b) { + return (a & 0xffff) + (b >>> 16) + (((a >>> 16) + b & 0xffff) << 16) & 0xffffffff; +} + +function rotl32(a, b) { + return (a << b) | (a >>> (32 - b)); +} + +/** + * MumurHash3 function. + * + * @param {number} seed - Seed. + * @param {ByteArray} data - Data. + */ +module.exports = function murmurhash3(seed, data) { + var c1 = 0xcc9e2d51, + c2 = 0x1b873593, + r1 = 15, + r2 = 13, + m = 5, + n = 0x6b64e654; + + var hash = seed, + k1, + i, + l; + + for (i = 0, l = data.length - 4; i <= l; i += 4) { + k1 = ( + data[i] | + (data[i + 1] << 8) | + (data[i + 2] << 16) | + (data[i + 3] << 24) + ); + + k1 = mul32(k1, c1); + k1 = rotl32(k1, r1); + k1 = mul32(k1, c2); + + hash ^= k1; + hash = rotl32(hash, r2); + hash = mul32(hash, m); + hash = sum32(hash, n); + } + + k1 = 0; + + switch (data.length & 3) { + case 3: + k1 ^= data[i + 2] << 16; + case 2: + k1 ^= data[i + 1] << 8; + case 1: + k1 ^= data[i]; + k1 = mul32(k1, c1); + k1 = rotl32(k1, r1); + k1 = mul32(k1, c2); + hash ^= k1; + default: + } + + hash ^= data.length; + hash ^= hash >>> 16; + hash = mul32(hash, 0x85ebca6b); + hash ^= hash >>> 13; + hash = mul32(hash, 0xc2b2ae35); + hash ^= hash >>> 16; + + return hash >>> 0; +}; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/typed-arrays.js b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/typed-arrays.js new file mode 100644 index 0000000..474a2cb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/typed-arrays.js @@ -0,0 +1,187 @@ +/** + * Mnemonist Typed Array Helpers + * ============================== + * + * Miscellaneous helpers related to typed arrays. + */ + +/** + * When using an unsigned integer array to store pointers, one might want to + * choose the optimal word size in regards to the actual numbers of pointers + * to store. + * + * This helpers does just that. + * + * @param {number} size - Expected size of the array to map. + * @return {TypedArray} + */ +var MAX_8BIT_INTEGER = Math.pow(2, 8) - 1, + MAX_16BIT_INTEGER = Math.pow(2, 16) - 1, + MAX_32BIT_INTEGER = Math.pow(2, 32) - 1; + +var MAX_SIGNED_8BIT_INTEGER = Math.pow(2, 7) - 1, + MAX_SIGNED_16BIT_INTEGER = Math.pow(2, 15) - 1, + MAX_SIGNED_32BIT_INTEGER = Math.pow(2, 31) - 1; + +exports.getPointerArray = function(size) { + var maxIndex = size - 1; + + if (maxIndex <= MAX_8BIT_INTEGER) + return Uint8Array; + + if (maxIndex <= MAX_16BIT_INTEGER) + return Uint16Array; + + if (maxIndex <= MAX_32BIT_INTEGER) + return Uint32Array; + + return Float64Array; +}; + +exports.getSignedPointerArray = function(size) { + var maxIndex = size - 1; + + if (maxIndex <= MAX_SIGNED_8BIT_INTEGER) + return Int8Array; + + if (maxIndex <= MAX_SIGNED_16BIT_INTEGER) + return Int16Array; + + if (maxIndex <= MAX_SIGNED_32BIT_INTEGER) + return Int32Array; + + return Float64Array; +}; + +/** + * Function returning the minimal type able to represent the given number. + * + * @param {number} value - Value to test. + * @return {TypedArrayClass} + */ +exports.getNumberType = function(value) { + + // <= 32 bits itnteger? + if (value === (value | 0)) { + + // Negative + if (Math.sign(value) === -1) { + if (value <= 127 && value >= -128) + return Int8Array; + + if (value <= 32767 && value >= -32768) + return Int16Array; + + return Int32Array; + } + else { + + if (value <= 255) + return Uint8Array; + + if (value <= 65535) + return Uint16Array; + + return Uint32Array; + } + } + + // 53 bits integer & floats + // NOTE: it's kinda hard to tell whether we could use 32bits or not... + return Float64Array; +}; + +/** + * Function returning the minimal type able to represent the given array + * of JavaScript numbers. + * + * @param {array} array - Array to represent. + * @param {function} getter - Optional getter. + * @return {TypedArrayClass} + */ +var TYPE_PRIORITY = { + Uint8Array: 1, + Int8Array: 2, + Uint16Array: 3, + Int16Array: 4, + Uint32Array: 5, + Int32Array: 6, + Float32Array: 7, + Float64Array: 8 +}; + +// TODO: make this a one-shot for one value +exports.getMinimalRepresentation = function(array, getter) { + var maxType = null, + maxPriority = 0, + p, + t, + v, + i, + l; + + for (i = 0, l = array.length; i < l; i++) { + v = getter ? getter(array[i]) : array[i]; + t = exports.getNumberType(v); + p = TYPE_PRIORITY[t.name]; + + if (p > maxPriority) { + maxPriority = p; + maxType = t; + } + } + + return maxType; +}; + +/** + * Function returning whether the given value is a typed array. + * + * @param {any} value - Value to test. + * @return {boolean} + */ +exports.isTypedArray = function(value) { + return typeof ArrayBuffer !== 'undefined' && ArrayBuffer.isView(value); +}; + +/** + * Function used to concat byte arrays. + * + * @param {...ByteArray} + * @return {ByteArray} + */ +exports.concat = function() { + var length = 0, + i, + o, + l; + + for (i = 0, l = arguments.length; i < l; i++) + length += arguments[i].length; + + var array = new (arguments[0].constructor)(length); + + for (i = 0, o = 0; i < l; i++) { + array.set(arguments[i], o); + o += arguments[i].length; + } + + return array; +}; + +/** + * Function used to initialize a byte array of indices. + * + * @param {number} length - Length of target. + * @return {ByteArray} + */ +exports.indices = function(length) { + var PointerArray = exports.getPointerArray(length); + + var array = new PointerArray(length); + + for (var i = 0; i < length; i++) + array[i] = i; + + return array; +}; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/utils/types.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/types.d.ts new file mode 100644 index 0000000..1a199d6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/utils/types.d.ts @@ -0,0 +1,16 @@ +/** + * Mnemonist Generic Types + * ======================== + * + * Collection of types used throughout the library. + */ +export interface IArrayLike { + length: number; + slice(from: number, to?: number): IArrayLike; +} + +export type ArrayLike = IArrayLike | ArrayBuffer; + +export interface IArrayLikeConstructor { + new(...args: any[]): ArrayLike; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/vector.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/vector.d.ts new file mode 100644 index 0000000..414f969 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/vector.d.ts @@ -0,0 +1,81 @@ +/** + * Mnemonist Vector Typings + * ========================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +type VectorOptions = { + initialLength?: number; + initialCapacity?: number; + policy?: (capacity: number) => number; +} + +export default class Vector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, length: number | VectorOptions); + + // Methods + clear(): void; + set(index: number, value: number): this; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: number): number; + pop(): number | undefined; + get(index: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): Vector; +} + +declare class TypedVector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(length: number | VectorOptions); + + // Methods + clear(): void; + set(index: number, value: number): this; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: number): number; + pop(): number | undefined; + get(index: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, capacity?: number): TypedVector; +} + +export class Int8Vector extends TypedVector {} +export class Uint8Vector extends TypedVector {} +export class Uint8ClampedVector extends TypedVector {} +export class Int16Vector extends TypedVector {} +export class Uint16Vector extends TypedVector {} +export class Int32Vector extends TypedVector {} +export class Uint32Vector extends TypedVector {} +export class Float32Vector extends TypedVector {} +export class Float64Array extends TypedVector {} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/vector.js b/amplify/functions/downloadDocument/node_modules/mnemonist/vector.js new file mode 100644 index 0000000..467bf20 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/vector.js @@ -0,0 +1,373 @@ +/** + * Mnemonist Vector + * ================= + * + * Abstract implementation of a growing array that can be used with JavaScript + * typed arrays and other array-like structures. + * + * Note: should try and use ArrayBuffer.transfer when it will be available. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'); + +/** + * Defaults. + */ +var DEFAULT_GROWING_POLICY = function(currentCapacity) { + return Math.max(1, Math.ceil(currentCapacity * 1.5)); +}; + +var pointerArrayFactory = function(capacity) { + var PointerArray = typed.getPointerArray(capacity); + + return new PointerArray(capacity); +}; + +/** + * Vector. + * + * @constructor + * @param {function} ArrayClass - An array constructor. + * @param {number|object} initialCapacityOrOptions - Self-explanatory: + * @param {number} initialCapacity - Initial capacity. + * @param {number} initialLength - Initial length. + * @param {function} policy - Allocation policy. + */ +function Vector(ArrayClass, initialCapacityOrOptions) { + if (arguments.length < 1) + throw new Error('mnemonist/vector: expecting at least a byte array constructor.'); + + var initialCapacity = initialCapacityOrOptions || 0, + policy = DEFAULT_GROWING_POLICY, + initialLength = 0, + factory = false; + + if (typeof initialCapacityOrOptions === 'object') { + initialCapacity = initialCapacityOrOptions.initialCapacity || 0; + initialLength = initialCapacityOrOptions.initialLength || 0; + policy = initialCapacityOrOptions.policy || policy; + factory = initialCapacityOrOptions.factory === true; + } + + this.factory = factory ? ArrayClass : null; + this.ArrayClass = ArrayClass; + this.length = initialLength; + this.capacity = Math.max(initialLength, initialCapacity); + this.policy = policy; + this.array = new ArrayClass(this.capacity); +} + +/** + * Method used to set a value. + * + * @param {number} index - Index to edit. + * @param {any} value - Value. + * @return {Vector} + */ +Vector.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('Vector(' + this.ArrayClass.name + ').set: index out of bounds.'); + + // Updating value + this.array[index] = value; + + return this; +}; + +/** + * Method used to get a value. + * + * @param {number} index - Index to retrieve. + * @return {any} + */ +Vector.prototype.get = function(index) { + if (this.length < index) + return undefined; + + return this.array[index]; +}; + +/** + * Method used to apply the growing policy. + * + * @param {number} [override] - Override capacity. + * @return {number} + */ +Vector.prototype.applyPolicy = function(override) { + var newCapacity = this.policy(override || this.capacity); + + if (typeof newCapacity !== 'number' || newCapacity < 0) + throw new Error('mnemonist/vector.applyPolicy: policy returned an invalid value (expecting a positive integer).'); + + if (newCapacity <= this.capacity) + throw new Error('mnemonist/vector.applyPolicy: policy returned a less or equal capacity to allocate.'); + + // TODO: we should probably check that the returned number is an integer + return newCapacity; +}; + +/** + * Method used to reallocate the underlying array. + * + * @param {number} capacity - Target capacity. + * @return {Vector} + */ +Vector.prototype.reallocate = function(capacity) { + if (capacity === this.capacity) + return this; + + var oldArray = this.array; + + if (capacity < this.length) + this.length = capacity; + + if (capacity > this.capacity) { + if (this.factory === null) + this.array = new this.ArrayClass(capacity); + else + this.array = this.factory(capacity); + + if (typed.isTypedArray(this.array)) { + this.array.set(oldArray, 0); + } + else { + for (var i = 0, l = this.length; i < l; i++) + this.array[i] = oldArray[i]; + } + } + else { + this.array = oldArray.slice(0, capacity); + } + + this.capacity = capacity; + + return this; +}; + +/** + * Method used to grow the array. + * + * @param {number} [capacity] - Optional capacity to match. + * @return {Vector} + */ +Vector.prototype.grow = function(capacity) { + var newCapacity; + + if (typeof capacity === 'number') { + + if (this.capacity >= capacity) + return this; + + // We need to match the given capacity + newCapacity = this.capacity; + + while (newCapacity < capacity) + newCapacity = this.applyPolicy(newCapacity); + + this.reallocate(newCapacity); + + return this; + } + + // We need to run the policy once + newCapacity = this.applyPolicy(); + this.reallocate(newCapacity); + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {Vector} + */ +Vector.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.reallocate(length); + + return this; +}; + +/** + * Method used to push a value into the array. + * + * @param {any} value - Value to push. + * @return {number} - Length of the array. + */ +Vector.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + this.array[this.length++] = value; + + return this.length; +}; + +/** + * Method used to pop the last value of the array. + * + * @return {number} - The popped value. + */ +Vector.prototype.pop = function() { + if (this.length === 0) + return; + + return this.array[--this.length]; +}; + +/** + * Method used to create an iterator over a vector's values. + * + * @return {Iterator} + */ +Vector.prototype.values = function() { + var items = this.array, + l = this.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a vector's entries. + * + * @return {Iterator} + */ +Vector.prototype.entries = function() { + var items = this.array, + l = this.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[i]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Vector.prototype[Symbol.iterator] = Vector.prototype.values; + +/** + * Convenience known methods. + */ +Vector.prototype.inspect = function() { + var proxy = this.array.slice(0, this.length); + + proxy.type = this.array.constructor.name; + proxy.items = this.length; + proxy.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Vector, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Vector.prototype[Symbol.for('nodejs.util.inspect.custom')] = Vector.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a vector. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Byte array class. + * @param {number} capacity - Desired capacity. + * @return {Vector} + */ +Vector.from = function(iterable, ArrayClass, capacity) { + + if (arguments.length < 3) { + + // Attempting to guess the needed capacity + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/vector.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var vector = new Vector(ArrayClass, capacity); + + forEach(iterable, function(value) { + vector.push(value); + }); + + return vector; +}; + +/** + * Exporting. + */ +function subClass(ArrayClass) { + var SubClass = function(initialCapacityOrOptions) { + Vector.call(this, ArrayClass, initialCapacityOrOptions); + }; + + for (var k in Vector.prototype) { + if (Vector.prototype.hasOwnProperty(k)) + SubClass.prototype[k] = Vector.prototype[k]; + } + + SubClass.from = function(iterable, capacity) { + return Vector.from(iterable, ArrayClass, capacity); + }; + + if (typeof Symbol !== 'undefined') + SubClass.prototype[Symbol.iterator] = SubClass.prototype.values; + + return SubClass; +} + +Vector.Int8Vector = subClass(Int8Array); +Vector.Uint8Vector = subClass(Uint8Array); +Vector.Uint8ClampedVector = subClass(Uint8ClampedArray); +Vector.Int16Vector = subClass(Int16Array); +Vector.Uint16Vector = subClass(Uint16Array); +Vector.Int32Vector = subClass(Int32Array); +Vector.Uint32Vector = subClass(Uint32Array); +Vector.Float32Vector = subClass(Float32Array); +Vector.Float64Vector = subClass(Float64Array); +Vector.PointerVector = subClass(pointerArrayFactory); + +module.exports = Vector; diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/vp-tree.d.ts b/amplify/functions/downloadDocument/node_modules/mnemonist/vp-tree.d.ts new file mode 100644 index 0000000..2c03354 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/vp-tree.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist VPTree Typings + * ========================= + */ +type DistanceFunction = (a: T, b: T) => number; +type QueryMatch = {distance: number, item: T}; + +export default class VPTree { + + // Members + distance: DistanceFunction; + size: number; + D: number; + + // Constructor + constructor(distance: DistanceFunction, items: Iterable); + + // Methods + nearestNeighbors(k: number, query: T): Array>; + neighbors(radius: number, query: T): Array>; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + distance: DistanceFunction + ): VPTree; +} diff --git a/amplify/functions/downloadDocument/node_modules/mnemonist/vp-tree.js b/amplify/functions/downloadDocument/node_modules/mnemonist/vp-tree.js new file mode 100644 index 0000000..2acd01e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/mnemonist/vp-tree.js @@ -0,0 +1,367 @@ +/** + * Mnemonist Vantage Point Tree + * ============================= + * + * JavaScript implementation of the Vantage Point Tree storing the binary + * tree as a flat byte array. + * + * Note that a VPTree has worst cases and is likely not to be perfectly + * balanced because of median ambiguity. It is therefore not suitable + * for hairballs and tiny datasets. + * + * [Reference]: + * https://en.wikipedia.org/wiki/Vantage-point_tree + */ +var iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'), + inplaceQuickSortIndices = require('./sort/quick.js').inplaceQuickSortIndices, + lowerBoundIndices = require('./utils/binary-search.js').lowerBoundIndices, + Heap = require('./heap.js'); + +var getPointerArray = typed.getPointerArray; + +// TODO: implement vantage point selection techniques (by swapping with last) +// TODO: is this required to implement early termination for k <= size? + +/** + * Heap comparator used by the #.nearestNeighbors method. + */ +function comparator(a, b) { + if (a.distance < b.distance) + return 1; + + if (a.distance > b.distance) + return -1; + + return 0; +} + +/** + * Function used to create the binary tree. + * + * @param {function} distance - Distance function to use. + * @param {array} items - Items to index (will be mutated). + * @param {array} indices - Indexes of the items. + * @return {Float64Array} - The flat binary tree. + */ +function createBinaryTree(distance, items, indices) { + var N = indices.length; + + var PointerArray = getPointerArray(N); + + var C = 0, + nodes = new PointerArray(N), + lefts = new PointerArray(N), + rights = new PointerArray(N), + mus = new Float64Array(N), + stack = [0, 0, N], + distances = new Float64Array(N), + nodeIndex, + vantagePoint, + medianIndex, + lo, + hi, + mid, + mu, + i, + l; + + while (stack.length) { + hi = stack.pop(); + lo = stack.pop(); + nodeIndex = stack.pop(); + + // Getting our vantage point + vantagePoint = indices[hi - 1]; + hi--; + + l = hi - lo; + + // Storing vantage point + nodes[nodeIndex] = vantagePoint; + + // We are in a leaf + if (l === 0) + continue; + + // We only have two elements, the second one has to go right + if (l === 1) { + + // We put remaining item to the right + mu = distance(items[vantagePoint], items[indices[lo]]); + + mus[nodeIndex] = mu; + + // Right + C++; + rights[nodeIndex] = C; + nodes[C] = indices[lo]; + + continue; + } + + // Computing distance from vantage point to other points + for (i = lo; i < hi; i++) + distances[indices[i]] = distance(items[vantagePoint], items[indices[i]]); + + inplaceQuickSortIndices(distances, indices, lo, hi); + + // Finding median of distances + medianIndex = lo + (l / 2) - 1; + + // Need to interpolate? + if (medianIndex === (medianIndex | 0)) { + mu = ( + distances[indices[medianIndex]] + + distances[indices[medianIndex + 1]] + ) / 2; + } + else { + mu = distances[indices[Math.ceil(medianIndex)]]; + } + + // Storing mu + mus[nodeIndex] = mu; + + mid = lowerBoundIndices(distances, indices, mu, lo, hi); + + // console.log('Vantage point', items[vantagePoint], vantagePoint); + // console.log('mu =', mu); + // console.log('lo =', lo); + // console.log('hi =', hi); + // console.log('mid =', mid); + + // console.log('need to split', Array.from(indices).slice(lo, hi).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + + // Right + if (hi - mid > 0) { + C++; + rights[nodeIndex] = C; + stack.push(C, mid, hi); + // console.log('Went right with ', Array.from(indices).slice(mid, hi).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + } + + // Left + if (mid - lo > 0) { + C++; + lefts[nodeIndex] = C; + stack.push(C, lo, mid); + // console.log('Went left with', Array.from(indices).slice(lo, mid).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + } + + // console.log(); + } + + return { + nodes: nodes, + lefts: lefts, + rights: rights, + mus: mus + }; +} + +/** + * VPTree. + * + * @constructor + * @param {function} distance - Distance function to use. + * @param {Iterable} items - Items to store. + */ +function VPTree(distance, items) { + if (typeof distance !== 'function') + throw new Error('mnemonist/VPTree.constructor: given `distance` must be a function.'); + + if (!items) + throw new Error('mnemonist/VPTree.constructor: you must provide items to the tree. A VPTree cannot be updated after its creation.'); + + // Properties + this.distance = distance; + this.heap = new Heap(comparator); + this.D = 0; + + var arrays = iterables.toArrayWithIndices(items); + this.items = arrays[0]; + var indices = arrays[1]; + + // Creating the binary tree + this.size = indices.length; + + var result = createBinaryTree(distance, this.items, indices); + + this.nodes = result.nodes; + this.lefts = result.lefts; + this.rights = result.rights; + this.mus = result.mus; +} + +/** + * Function used to retrieve the k nearest neighbors of the query. + * + * @param {number} k - Number of neighbors to retrieve. + * @param {any} query - The query. + * @return {array} + */ +VPTree.prototype.nearestNeighbors = function(k, query) { + var neighbors = this.heap, + stack = [0], + tau = Infinity, + nodeIndex, + itemIndex, + vantagePoint, + leftIndex, + rightIndex, + mu, + d; + + this.D = 0; + + while (stack.length) { + nodeIndex = stack.pop(); + itemIndex = this.nodes[nodeIndex]; + vantagePoint = this.items[itemIndex]; + + // Distance between query & the current vantage point + d = this.distance(vantagePoint, query); + this.D++; + + if (d < tau) { + neighbors.push({distance: d, item: vantagePoint}); + + // Trimming + if (neighbors.size > k) + neighbors.pop(); + + // Adjusting tau (only if we already have k items, else it stays Infinity) + if (neighbors.size >= k) + tau = neighbors.peek().distance; + } + + leftIndex = this.lefts[nodeIndex]; + rightIndex = this.rights[nodeIndex]; + + // We are a leaf + if (!leftIndex && !rightIndex) + continue; + + mu = this.mus[nodeIndex]; + + if (d < mu) { + if (leftIndex && d < mu + tau) + stack.push(leftIndex); + if (rightIndex && d >= mu - tau) // Might not be necessary to test d + stack.push(rightIndex); + } + else { + if (rightIndex && d >= mu - tau) + stack.push(rightIndex); + if (leftIndex && d < mu + tau) // Might not be necessary to test d + stack.push(leftIndex); + } + } + + var array = new Array(neighbors.size); + + for (var i = neighbors.size - 1; i >= 0; i--) + array[i] = neighbors.pop(); + + return array; +}; + +/** + * Function used to retrieve every neighbors of query in the given radius. + * + * @param {number} radius - Radius. + * @param {any} query - The query. + * @return {array} + */ +VPTree.prototype.neighbors = function(radius, query) { + var neighbors = [], + stack = [0], + nodeIndex, + itemIndex, + vantagePoint, + leftIndex, + rightIndex, + mu, + d; + + this.D = 0; + + while (stack.length) { + nodeIndex = stack.pop(); + itemIndex = this.nodes[nodeIndex]; + vantagePoint = this.items[itemIndex]; + + // Distance between query & the current vantage point + d = this.distance(vantagePoint, query); + this.D++; + + if (d <= radius) + neighbors.push({distance: d, item: vantagePoint}); + + leftIndex = this.lefts[nodeIndex]; + rightIndex = this.rights[nodeIndex]; + + // We are a leaf + if (!leftIndex && !rightIndex) + continue; + + mu = this.mus[nodeIndex]; + + if (d < mu) { + if (leftIndex && d < mu + radius) + stack.push(leftIndex); + if (rightIndex && d >= mu - radius) // Might not be necessary to test d + stack.push(rightIndex); + } + else { + if (rightIndex && d >= mu - radius) + stack.push(rightIndex); + if (leftIndex && d < mu + radius) // Might not be necessary to test d + stack.push(leftIndex); + } + } + + return neighbors; +}; + +/** + * Convenience known methods. + */ +VPTree.prototype.inspect = function() { + var array = this.items.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: VPTree, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + VPTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = VPTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a tree. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} distance - Distance function to use. + * @return {VPTree} + */ +VPTree.from = function(iterable, distance) { + return new VPTree(distance, iterable); +}; + +/** + * Exporting. + */ +module.exports = VPTree; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/LICENSE.txt b/amplify/functions/downloadDocument/node_modules/obliterator/LICENSE.txt new file mode 100644 index 0000000..ca37c96 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Guillaume Plique (Yomguithereal) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/README.md b/amplify/functions/downloadDocument/node_modules/obliterator/README.md new file mode 100644 index 0000000..f611e39 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/README.md @@ -0,0 +1,321 @@ +[![Build Status](https://travis-ci.org/Yomguithereal/obliterator.svg)](https://travis-ci.org/Yomguithereal/obliterator) + +# Obliterator + +Obliterator is a dead simple JavaScript/TypeScript library providing miscellaneous higher-order iterator functions such as combining two or more iterators into a single one. + +# Installation + +``` +npm install --save obliterator +``` + +Note `obliterator` comes along with its TypeScript declarations. + +# Usage + +## Summary + +*Classes* + +* [Iterator](#iterator) + +*Functions* + +* [chain](#chain) +* [combinations](#combinations) +* [consume](#consume) +* [filter](#filter) +* [forEach](#foreach) +* [map](#map) +* [match](#match) +* [permutations](#permutations) +* [powerSet](#powerSet) +* [split](#split) +* [take](#take) + +## Iterator + +A handy Iterator class with safeguards and usable with ES2015's `for ... of` loop constructs & spread operator. + +```js +import Iterator from 'obliterator/iterator'; +// Or +import {Iterator} from 'obliterator'; + +const iterator = new Iterator(function() { + // Define what the `next` function does +}); + +// Checking that the given value is an iterator (native or else) +Iterator.is(value); + +// Creating an empty iterator +const emptyIterator = Iterator.empty(); + +// Creating a simple iterator from a single value +const simpleIterator = Iterator.of(34); + +// Creating a simple iterator from multiple values +const multipleIterator = Iterator.of(1, 2, 3); +``` + +## chain + +Variadic function chaining all the given iterators. + +```js +import chain from 'obliterator/chain'; +// Or +import {chain} from 'obliterator'; + +const set1 = new Set('a'); +const set2 = new Set('bc'); + +const chained = chain(set1.values(), set2.values()); + +chained.next(); +>>> {done: false, value: 'a'} +chained.next(); +>>> {done: false, value: 'b'} +``` + +## combinations + +Returns an iterator of combinations of the given array and of the given size. + +Note that for performance reasons, the yielded combination is always the same object. + +```js +import combinations from 'obliterator/combinations'; +// Or +import {combinations} from 'obliterator'; + +const iterator = combinations(['A', 'B', 'C', 'D'], 2); + +iterator.next().value; +>>> ['A', 'B'] +iterator.next().value; +>>> ['A', 'C'] +``` + +## consume + +Function consuming the given iterator fully or for n steps. + +```js +import consume from 'obliterator/consume'; +// Or +import {consume} from 'obliterator'; + +const set = new Set([1, 2, 3]); + +// Consuming the whole iterator +let iterator = set.values(); +consume(iterator); +iterator.next().done +>>> true + +// Consuming n steps +let iterator = set.values(); +consume(iterator, 2); +iterator.next().value +>>> 3 +``` + +## filter + +Function returning an iterator filtering another one's values using the given predicate. + +```js +import filter from 'obliterator/filter'; +// Or +import {filter} from 'obliterator'; + +const set = new Set([1, 2, 3, 4, 5]); + +const even = x => x % 2 === 0; + +const iterator = filter(even, set.values()); + +iterator.next().value +>>> 2 +iterator.next().value +>>> 4 +``` + +## forEach + +Function able to iterate over almost any JavaScript iterable value using a callback. + +Supported values range from arrays, typed arrays, sets, maps, objects, strings, arguments, iterators, arbitrary iterables etc. + +```js +import forEach from 'obliterator/foreach'; +// Or +import {forEach} from 'obliterator'; + +const set = new Set(['apple', 'banana']); + +forEach(set.values(), (value, i) => { + console.log(i, value); +}); + +// Iterating over a string +forEach('abc', (char, i) => ...); + +// Iterating over a map +forEach(map, (value, key) => ...); +``` + +Optionally, one can use the `forEachWithNullKeys` function to iterate over mixed values but with the twist that iterables without proper keys (lists, sets etc.), will yield `null` instead of an index key. + +```js +import {forEachWithNullKeys} from 'obliterator/foreach'; + +const set = new Set(['apple', 'banana']); + +forEach(set, (value, key) => { + console.log(key, value); +}); +>>> null, 'apple' +>>> null, 'banana' +``` + +## map + +Function returning an iterator mapping another one's values using the given function. + +```js +import map from 'obliterator/map'; +// Or +import {map} from 'obliterator'; + +const set = new Set([1, 2, 3, 4, 5]); + +const triple = x => x * 3; + +const iterator = map(triple, set.values()); + +iterator.next().value +>>> 3 +iterator.next().value +>>> 6 +``` + +## match + +Function returning an iterator over the matches of a given regex applied to the target string. + +```js +import match from 'obliterator/match'; +// Or +import {match} from 'obliterator'; + +const iterator = match(/t/, 'test'); + +iterator.next().value.index +>>> 0 +iterator.next().value.index +>>> 3 +``` + +## permutations + +Returns an iterator of permutations of the given array and of the given size. + +Note that for performance reasons, the yielded permutation is always the same object. + +```js +import permutations from 'obliterator/permutations'; +// Or +import {permutations} from 'obliterator'; + +let iterator = permutations([1, 2, 3]); + +iterator.next().value +>>> [1, 2, 3] +iterator.next().value +>>> [1, 3, 2] + +iterator = permutations(['A', 'B', 'C', 'D'], 2); + +iterator.next().value; +>>> ['A', 'B'] +iterator.next().value; +>>> ['A', 'C'] +``` + +## powerSet + +Returns an iterator of sets composing the power set of the given array. + +```js +import powerSet from 'obliterator/power-set'; +// Or +import {powerSet} from 'obliterator'; + +const iterator = powerSet(['A', 'B', 'C']); + +iterator.next().value; +>>> [] +iterator.next().value; +>>> ['A'] +``` + +## split + +Returns an iterator over the splits of the target string, according to the given RegExp pattern. + +```js +import split from 'obliterator/split'; +// Or +import {split} from 'obliterator'; + +const iterator = split(/;/g, 'hello;world;super'); + +iterator.next().value; +>>> 'hello' +iterator.next().value; +>>> 'world' +``` + +## take + +Function taking values from given iterator and returning them in an array. + +```js +import take from 'obliterator/take'; +// Or +import {take} from 'obliterator'; + +const set = new Set([1, 2, 3]); + +// To take n values from the iterator +take(set.values(), 2); +>>> [1, 2] + +// To convert the full iterator into an array +take(set.values()); +>>> [1, 2, 3] +``` + +# Contribution + +Contributions are obviously welcome. Please be sure to lint the code & add the relevant unit tests before submitting any PR. + +``` +git clone git@github.com:Yomguithereal/obliterator.git +cd obliterator +npm install + +# To lint the code +npm run lint + +# To run the unit tests +npm test +``` + +# License + +[MIT](LICENSE.txt) diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/chain.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/chain.d.ts new file mode 100644 index 0000000..298e592 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/chain.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function chain(...iterators: Iterator[]): ObliteratorIterator; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/chain.js b/amplify/functions/downloadDocument/node_modules/obliterator/chain.js new file mode 100644 index 0000000..1e4e25d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/chain.js @@ -0,0 +1,39 @@ +/** + * Obliterator Chain Function + * =========================== + * + * Variadic function combining the given iterators. + */ +var Iterator = require('./iterator.js'); + +/** + * Chain. + * + * @param {...Iterator} iterators - Target iterators. + * @return {Iterator} + */ +module.exports = function chain() { + var iterators = arguments, + current, + i = -1; + + return new Iterator(function iterate() { + if (!current) { + i++; + + if (i >= iterators.length) + return {done: true}; + + current = iterators[i]; + } + + var step = current.next(); + + if (step.done) { + current = null; + return iterate(); + } + + return step; + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/combinations.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/combinations.d.ts new file mode 100644 index 0000000..206dea9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/combinations.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function combinations(array: Array, r: number): ObliteratorIterator>; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/combinations.js b/amplify/functions/downloadDocument/node_modules/obliterator/combinations.js new file mode 100644 index 0000000..d1cf456 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/combinations.js @@ -0,0 +1,76 @@ +/** + * Obliterator Combinations Function + * ================================== + * + * Iterator returning combinations of the given array. + */ +var Iterator = require('./iterator.js'); + +/** + * Helper mapping indices to items. + */ +function indicesToItems(target, items, indices, r) { + for (var i = 0; i < r; i++) + target[i] = items[indices[i]]; +} + +/** + * Combinations. + * + * @param {array} array - Target array. + * @param {number} r - Size of the subsequences. + * @return {Iterator} + */ +module.exports = function combinations(array, r) { + if (!Array.isArray(array)) + throw new Error('obliterator/combinations: first argument should be an array.'); + + var n = array.length; + + if (typeof r !== 'number') + throw new Error('obliterator/combinations: second argument should be omitted or a number.'); + + if (r > n) + throw new Error('obliterator/combinations: the size of the subsequences should not exceed the length of the array.'); + + if (r === n) + return Iterator.of(array.slice()); + + var indices = new Array(r), + subsequence = new Array(r), + first = true, + i; + + for (i = 0; i < r; i++) + indices[i] = i; + + return new Iterator(function next() { + if (first) { + first = false; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + if (indices[r - 1]++ < n - 1) { + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + i = r - 2; + + while (i >= 0 && indices[i] >= (n - (r - i))) + --i; + + if (i < 0) + return {done: true}; + + indices[i]++; + + while (++i < r) + indices[i] = indices[i - 1] + 1; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/consume.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/consume.d.ts new file mode 100644 index 0000000..16812ee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/consume.d.ts @@ -0,0 +1 @@ +export default function consume(iterator: Iterator, steps?: number): void; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/consume.js b/amplify/functions/downloadDocument/node_modules/obliterator/consume.js new file mode 100644 index 0000000..455fea5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/consume.js @@ -0,0 +1,32 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Consume Function + * ============================= + * + * Function consuming the given iterator for n or every steps. + */ + +/** + * Consume. + * + * @param {Iterator} iterator - Target iterator. + * @param {number} [steps] - Optional steps. + */ +module.exports = function consume(iterator, steps) { + var step, + l = arguments.length > 1 ? steps : Infinity, + i = 0; + + while (true) { + + if (i === l) + return; + + step = iterator.next(); + + if (step.done) + return; + + i++; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/filter.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/filter.d.ts new file mode 100644 index 0000000..18b17f9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/filter.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +type PredicateFunction = (item: T) => boolean; + +export default function filter(predicate: PredicateFunction, iterator: Iterator): ObliteratorIterator; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/filter.js b/amplify/functions/downloadDocument/node_modules/obliterator/filter.js new file mode 100644 index 0000000..25a519b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/filter.js @@ -0,0 +1,28 @@ +/** + * Obliterator Filter Function + * =========================== + * + * Function returning a iterator filtering the given iterator. + */ +var Iterator = require('./iterator.js'); + +/** + * Filter. + * + * @param {function} predicate - Predicate function. + * @param {Iterator} target - Target iterator. + * @return {Iterator} + */ +module.exports = function filter(predicate, target) { + return new Iterator(function next() { + var step = target.next(); + + if (step.done) + return step; + + if (!predicate(step.value)) + return next(); + + return step; + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/foreach.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/foreach.d.ts new file mode 100644 index 0000000..61fb9ea --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/foreach.d.ts @@ -0,0 +1 @@ +export default function forEach(iterable: any, callback: (item: any, key: any) => void): void; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/foreach.js b/amplify/functions/downloadDocument/node_modules/obliterator/foreach.js new file mode 100644 index 0000000..ef90051 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/foreach.js @@ -0,0 +1,156 @@ +/** + * Obliterator ForEach Function + * ============================= + * + * Helper function used to easily iterate over mixed values. + */ + +/** + * Constants. + */ +var ARRAY_BUFFER_SUPPORT = typeof ArrayBuffer !== 'undefined', + SYMBOL_SUPPORT = typeof Symbol !== 'undefined'; + +/** + * Function able to iterate over almost any iterable JS value. + * + * @param {any} iterable - Iterable value. + * @param {function} callback - Callback function. + */ +function forEach(iterable, callback) { + var iterator, k, i, l, s; + + if (!iterable) + throw new Error('obliterator/forEach: invalid iterable.'); + + if (typeof callback !== 'function') + throw new Error('obliterator/forEach: expecting a callback.'); + + // The target is an array or a string or function arguments + if ( + Array.isArray(iterable) || + (ARRAY_BUFFER_SUPPORT && ArrayBuffer.isView(iterable)) || + typeof iterable === 'string' || + iterable.toString() === '[object Arguments]' + ) { + for (i = 0, l = iterable.length; i < l; i++) + callback(iterable[i], i); + return; + } + + // The target has a #.forEach method + if (typeof iterable.forEach === 'function') { + iterable.forEach(callback); + return; + } + + // The target is iterable + if ( + SYMBOL_SUPPORT && + Symbol.iterator in iterable && + typeof iterable.next !== 'function' + ) { + iterable = iterable[Symbol.iterator](); + } + + // The target is an iterator + if (typeof iterable.next === 'function') { + iterator = iterable; + i = 0; + + while ((s = iterator.next(), s.done !== true)) { + callback(s.value, i); + i++; + } + + return; + } + + // The target is a plain object + for (k in iterable) { + if (iterable.hasOwnProperty(k)) { + callback(iterable[k], k); + } + } + + return; +} + +/** + * Same function as the above `forEach` but will yield `null` when the target + * does not have keys. + * + * @param {any} iterable - Iterable value. + * @param {function} callback - Callback function. + */ +forEach.forEachWithNullKeys = function(iterable, callback) { + var iterator, k, i, l, s; + + if (!iterable) + throw new Error('obliterator/forEachWithNullKeys: invalid iterable.'); + + if (typeof callback !== 'function') + throw new Error('obliterator/forEachWithNullKeys: expecting a callback.'); + + // The target is an array or a string or function arguments + if ( + Array.isArray(iterable) || + (ARRAY_BUFFER_SUPPORT && ArrayBuffer.isView(iterable)) || + typeof iterable === 'string' || + iterable.toString() === '[object Arguments]' + ) { + for (i = 0, l = iterable.length; i < l; i++) + callback(iterable[i], null); + return; + } + + // The target is a Set + if (iterable instanceof Set) { + iterable.forEach(function(value) { + callback(value, null); + }); + return; + } + + // The target has a #.forEach method + if (typeof iterable.forEach === 'function') { + iterable.forEach(callback); + return; + } + + // The target is iterable + if ( + SYMBOL_SUPPORT && + Symbol.iterator in iterable && + typeof iterable.next !== 'function' + ) { + iterable = iterable[Symbol.iterator](); + } + + // The target is an iterator + if (typeof iterable.next === 'function') { + iterator = iterable; + i = 0; + + while ((s = iterator.next(), s.done !== true)) { + callback(s.value, null); + i++; + } + + return; + } + + // The target is a plain object + for (k in iterable) { + if (iterable.hasOwnProperty(k)) { + callback(iterable[k], k); + } + } + + return; +}; + +/** + * Exporting. + */ +module.exports = forEach; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/index.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/index.d.ts new file mode 100644 index 0000000..9aa15bb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/index.d.ts @@ -0,0 +1,14 @@ +export {default as Iterator} from './iterator'; +export {default as chain} from './chain'; +export {default as combinations} from './combinations'; +export {default as consume} from './consume'; +export {default as filter} from './filter'; +export {default as forEach} from './foreach'; +export {default as map} from './map'; +export {default as match} from './match'; +export {default as permutations} from './permutations'; +export {default as powerSet} from './power-set'; +export {default as range} from './range'; +export {default as split} from './split'; +export {default as take} from './take'; +export {default as takeInto} from './take-into'; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/index.js b/amplify/functions/downloadDocument/node_modules/obliterator/index.js new file mode 100644 index 0000000..d84da62 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/index.js @@ -0,0 +1,22 @@ +/** + * Obliterator Library Endpoint + * ============================= + * + * Exporting the library's functions. + */ +module.exports = { + Iterator: require('./iterator.js'), + chain: require('./chain.js'), + combinations: require('./combinations.js'), + consume: require('./consume.js'), + filter: require('./filter.js'), + forEach: require('./foreach.js'), + map: require('./map.js'), + match: require('./match.js'), + permutations: require('./permutations.js'), + powerSet: require('./power-set.js'), + range: require('./range.js'), + split: require('./split.js'), + take: require('./take.js'), + takeInto: require('./take-into.js') +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/iterator.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/iterator.d.ts new file mode 100644 index 0000000..a8ea6a8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/iterator.d.ts @@ -0,0 +1,19 @@ +type NextFunction = () => IteratorResult; + +export default class Iterator implements IterableIterator { + + // Constructor + constructor(next: NextFunction); + + // Members + done: boolean; + + // Well-known methods + next(): IteratorResult; + [Symbol.iterator](): IterableIterator; + + // Static methods + static of(...args: T[]): Iterator; + static empty(): Iterator; + static is(value: any): boolean; +} diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/iterator.js b/amplify/functions/downloadDocument/node_modules/obliterator/iterator.js new file mode 100644 index 0000000..67652ab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/iterator.js @@ -0,0 +1,104 @@ +/** + * Obliterator Iterator Class + * =========================== + * + * Simple class representing the library's iterators. + */ + +/** + * Iterator class. + * + * @constructor + * @param {function} next - Next function. + */ +function Iterator(next) { + + // Hiding the given function + Object.defineProperty(this, '_next', { + writable: false, + enumerable: false, + value: next + }); + + // Is the iterator complete? + this.done = false; +} + +/** + * Next function. + * + * @return {object} + */ +// NOTE: maybe this should dropped for performance? +Iterator.prototype.next = function() { + if (this.done) + return {done: true}; + + var step = this._next(); + + if (step.done) + this.done = true; + + return step; +}; + +/** + * If symbols are supported, we add `next` to `Symbol.iterator`. + */ +if (typeof Symbol !== 'undefined') + Iterator.prototype[Symbol.iterator] = function() { + return this; + }; + +/** + * Returning an iterator of the given values. + * + * @param {any...} values - Values. + * @return {Iterator} + */ +Iterator.of = function() { + var args = arguments, + l = args.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {done: false, value: args[i++]}; + }); +}; + +/** + * Returning an empty iterator. + * + * @return {Iterator} + */ +Iterator.empty = function() { + var iterator = new Iterator(null); + iterator.done = true; + + return iterator; +}; + +/** + * Returning whether the given value is an iterator. + * + * @param {any} value - Value. + * @return {boolean} + */ +Iterator.is = function(value) { + if (value instanceof Iterator) + return true; + + return ( + typeof value === 'object' && + value !== null && + typeof value.next === 'function' + ); +}; + +/** + * Exporting. + */ +module.exports = Iterator; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/map.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/map.d.ts new file mode 100644 index 0000000..389591b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/map.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +type MapFunction = (item: S) => T; + +export default function map(predicate: MapFunction, iterator: Iterator): ObliteratorIterator; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/map.js b/amplify/functions/downloadDocument/node_modules/obliterator/map.js new file mode 100644 index 0000000..fd6dd17 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/map.js @@ -0,0 +1,27 @@ +/** + * Obliterator Map Function + * =========================== + * + * Function returning a iterator mapping the given iterator's values. + */ +var Iterator = require('./iterator.js'); + +/** + * Map. + * + * @param {function} mapper - Map function. + * @param {Iterator} target - Target iterator. + * @return {Iterator} + */ +module.exports = function map(mapper, target) { + return new Iterator(function next() { + var step = target.next(); + + if (step.done) + return step; + + return { + value: mapper(step.value) + }; + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/match.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/match.d.ts new file mode 100644 index 0000000..9a42616 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/match.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function match(pattern: RegExp, string: string): ObliteratorIterator; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/match.js b/amplify/functions/downloadDocument/node_modules/obliterator/match.js new file mode 100644 index 0000000..82edf41 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/match.js @@ -0,0 +1,42 @@ +/** + * Obliterator Match Function + * =========================== + * + * Function returning an iterator over the matches of the given regex on the + * target string. + */ +var Iterator = require('./iterator.js'); + +/** + * Match. + * + * @param {RegExp} pattern - Regular expression to use. + * @param {string} string - Target string. + * @return {Iterator} + */ +module.exports = function match(pattern, string) { + var executed = false; + + if (!(pattern instanceof RegExp)) + throw new Error('obliterator/match: invalid pattern. Expecting a regular expression.'); + + if (typeof string !== 'string') + throw new Error('obliterator/match: invalid target. Expecting a string.'); + + return new Iterator(function() { + if (executed && !pattern.global) { + pattern.lastIndex = 0; + return {done: true}; + } + + executed = true; + + var m = pattern.exec(string); + + if (m) + return {value: m}; + + pattern.lastIndex = 0; + return {done: true}; + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/package.json b/amplify/functions/downloadDocument/node_modules/obliterator/package.json new file mode 100644 index 0000000..ddfaead --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/package.json @@ -0,0 +1,45 @@ +{ + "name": "obliterator", + "version": "1.6.1", + "description": "Higher order iterator library for JavaScript.", + "main": "index.js", + "scripts": { + "lint": "eslint *.js", + "prepublish": "npm run lint && npm test", + "test": "mocha test.js && npm run test:types", + "test:types": "tsc --lib es2015,dom --noEmit --noImplicitAny --noImplicitReturns ./test-types.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/yomguithereal/obliterator.git" + }, + "keywords": [ + "iterator" + ], + "author": { + "name": "Guillaume Plique", + "url": "http://github.com/Yomguithereal" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/yomguithereal/obliterator/issues" + }, + "homepage": "https://github.com/yomguithereal/obliterator#readme", + "devDependencies": { + "@yomguithereal/eslint-config": "^4.0.0", + "eslint": "^6.8.0", + "mocha": "^7.0.0", + "typescript": "^3.7.5" + }, + "eslintConfig": { + "extends": "@yomguithereal/eslint-config", + "globals": { + "ArrayBuffer": true, + "Map": true, + "Set": true, + "Symbol": true, + "Uint8Array": true, + "Uint32Array": true + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/permutations.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/permutations.d.ts new file mode 100644 index 0000000..d48dffd --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/permutations.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function permutations(array: Array, r: number): ObliteratorIterator>; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/permutations.js b/amplify/functions/downloadDocument/node_modules/obliterator/permutations.js new file mode 100644 index 0000000..4f4752f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/permutations.js @@ -0,0 +1,96 @@ +/** + * Obliterator Permutations Function + * ================================== + * + * Iterator returning permutations of the given array. + */ +var Iterator = require('./iterator.js'); + +/** + * Helper mapping indices to items. + */ +function indicesToItems(target, items, indices, r) { + for (var i = 0; i < r; i++) + target[i] = items[indices[i]]; +} + +/** + * Permutations. + * + * @param {array} array - Target array. + * @param {number} r - Size of the subsequences. + * @return {Iterator} + */ +module.exports = function permutations(array, r) { + if (!Array.isArray(array)) + throw new Error('obliterator/permutations: first argument should be an array.'); + + var n = array.length; + + if (arguments.length < 2) + r = n; + + if (typeof r !== 'number') + throw new Error('obliterator/permutations: second argument should be omitted or a number.'); + + if (r > n) + throw new Error('obliterator/permutations: the size of the subsequences should not exceed the length of the array.'); + + var indices = new Uint32Array(n), + subsequence = new Array(r), + cycles = new Uint32Array(r), + first = true, + i; + + for (i = 0; i < n; i++) { + indices[i] = i; + + if (i < r) + cycles[i] = n - i; + } + + i = r; + + return new Iterator(function next() { + if (first) { + first = false; + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + var tmp, + j; + + i--; + + if (i < 0) + return {done: true}; + + cycles[i]--; + + if (cycles[i] === 0) { + + tmp = indices[i]; + + for (j = i; j < n - 1; j++) + indices[j] = indices[j + 1]; + + indices[n - 1] = tmp; + + cycles[i] = n - i; + return next(); + } + else { + j = cycles[i]; + tmp = indices[i]; + + indices[i] = indices[n - j]; + indices[n - j] = tmp; + + i = r; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/power-set.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/power-set.d.ts new file mode 100644 index 0000000..1f238d0 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/power-set.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function powerSet(array: Array): ObliteratorIterator>; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/power-set.js b/amplify/functions/downloadDocument/node_modules/obliterator/power-set.js new file mode 100644 index 0000000..a9d1850 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/power-set.js @@ -0,0 +1,28 @@ +/** + * Obliterator Power Set Function + * =============================== + * + * Iterator returning the power set of the given array. + */ +var Iterator = require('./iterator.js'), + combinations = require('./combinations.js'), + chain = require('./chain.js'); + +/** + * Power set. + * + * @param {array} array - Target array. + * @return {Iterator} + */ +module.exports = function powerSet(array) { + var n = array.length; + + var iterators = new Array(n + 1); + + iterators[0] = Iterator.of([]); + + for (var i = 1; i < n + 1; i++) + iterators[i] = combinations(array, i); + + return chain.apply(null, iterators); +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/range.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/range.d.ts new file mode 100644 index 0000000..498229e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/range.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function range(end: number): ObliteratorIterator; +export default function range(start: number, end: number): ObliteratorIterator; +export default function range(start: number, end: number, step: number): ObliteratorIterator; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/range.js b/amplify/functions/downloadDocument/node_modules/obliterator/range.js new file mode 100644 index 0000000..eea7e4d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/range.js @@ -0,0 +1,45 @@ +/** + * Obliterator Range Function + * =========================== + * + * Function returning a range iterator. + */ +var Iterator = require('./iterator.js'); + +/** + * Range. + * + * @param {number} start - Start. + * @param {number} end - End. + * @param {number} step - Step. + * @return {Iterator} + */ +module.exports = function range(start, end, step) { + if (arguments.length === 1) { + end = start; + start = 0; + } + + if (arguments.length < 3) + step = 1; + + var i = start; + + var iterator = new Iterator(function() { + if (i < end) { + var value = i; + + i += step; + + return {value: value}; + } + + return {done: true}; + }); + + iterator.start = start; + iterator.end = end; + iterator.step = step; + + return iterator; +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/split.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/split.d.ts new file mode 100644 index 0000000..e9124ab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/split.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function split(pattern: RegExp, string: string): ObliteratorIterator; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/split.js b/amplify/functions/downloadDocument/node_modules/obliterator/split.js new file mode 100644 index 0000000..09abf83 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/split.js @@ -0,0 +1,68 @@ +/** + * Obliterator Split Function + * =========================== + * + * Function returning an iterator over the pieces of a regex split. + */ +var Iterator = require('./iterator.js'); + +/** + * Function used to make the given pattern global. + * + * @param {RegExp} pattern - Regular expression to make global. + * @return {RegExp} + */ +function makeGlobal(pattern) { + var flags = 'g'; + + if (pattern.multiline) flags += 'm'; + if (pattern.ignoreCase) flags += 'i'; + if (pattern.sticky) flags += 'y'; + if (pattern.unicode) flags += 'u'; + + return new RegExp(pattern.source, flags); +} + +/** + * Split. + * + * @param {RegExp} pattern - Regular expression to use. + * @param {string} string - Target string. + * @return {Iterator} + */ +module.exports = function split(pattern, string) { + if (!(pattern instanceof RegExp)) + throw new Error('obliterator/split: invalid pattern. Expecting a regular expression.'); + + if (typeof string !== 'string') + throw new Error('obliterator/split: invalid target. Expecting a string.'); + + // NOTE: cloning the pattern has a performance cost but side effects for not + // doing so might be worse. + pattern = makeGlobal(pattern); + + var consumed = false, + current = 0; + + return new Iterator(function() { + if (consumed) + return {done: true}; + + var match = pattern.exec(string), + value, + length; + + if (match) { + length = match.index + match[0].length; + + value = string.slice(current, match.index); + current = length; + } + else { + consumed = true; + value = string.slice(current); + } + + return {value: value}; + }); +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/take-into.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/take-into.d.ts new file mode 100644 index 0000000..d2bf004 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/take-into.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +// Requires a resolution of https://github.com/microsoft/TypeScript/issues/1213 +// export default function takeInto, T>(ArrayClass: new (n: number) => C, iterator: Iterator, n: number): C; +export default function takeInto(ArrayClass: new (arrayLength: number) => T[], iterator: Iterator, n: number): T[]; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/take-into.js b/amplify/functions/downloadDocument/node_modules/obliterator/take-into.js new file mode 100644 index 0000000..6b814f8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/take-into.js @@ -0,0 +1,40 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Take Into Function + * =============================== + * + * Same as the take function but enables the user to select an array class + * in which to insert the retrieved values. + */ + +/** + * Take Into. + * + * @param {function} ArrayClass - Array class to use. + * @param {Iterator} iterator - Target iterator. + * @param {number} n - Number of items to take. + * @return {array} + */ +module.exports = function takeInto(ArrayClass, iterator, n) { + var array = new ArrayClass(n), + step, + i = 0; + + while (true) { + + if (i === n) + return array; + + step = iterator.next(); + + if (step.done) { + + if (i !== n) + return array.slice(0, i); + + return array; + } + + array[i++] = step.value; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/take.d.ts b/amplify/functions/downloadDocument/node_modules/obliterator/take.d.ts new file mode 100644 index 0000000..c9b5026 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/take.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function take(iterator: Iterator, n: number): Array; diff --git a/amplify/functions/downloadDocument/node_modules/obliterator/take.js b/amplify/functions/downloadDocument/node_modules/obliterator/take.js new file mode 100644 index 0000000..d7c5e96 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/obliterator/take.js @@ -0,0 +1,40 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Take Function + * ========================== + * + * Function taking n or every value of the given iterator and returns them + * into an array. + */ + +/** + * Take. + * + * @param {Iterator} iterator - Target iterator. + * @param {number} [n] - Optional number of items to take. + * @return {array} + */ +module.exports = function take(iterator, n) { + var l = arguments.length > 1 ? n : Infinity, + array = l !== Infinity ? new Array(l) : [], + step, + i = 0; + + while (true) { + + if (i === l) + return array; + + step = iterator.next(); + + if (step.done) { + + if (i !== n) + return array.slice(0, i); + + return array; + } + + array[i++] = step.value; + } +}; diff --git a/amplify/functions/downloadDocument/node_modules/strnum/.vscode/launch.json b/amplify/functions/downloadDocument/node_modules/strnum/.vscode/launch.json new file mode 100644 index 0000000..b87b349 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/strnum/.vscode/launch.json @@ -0,0 +1,25 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Jasmine Tests", + "program": "${workspaceFolder}/node_modules/jasmine/bin/jasmine.js", + "args": [ + "${workspaceFolder}/spec/attr_spec.js" + ], + "internalConsoleOptions": "openOnSessionStart" + },{ + "type": "node", + "request": "launch", + "name": "Jasmine Tests current test file", + "program": "${workspaceFolder}/node_modules/jasmine/bin/jasmine.js", + "args": [ + "${file}" + ], + "internalConsoleOptions": "openOnSessionStart" + } + ] + +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/strnum/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/strnum/CHANGELOG.md new file mode 100644 index 0000000..582e460 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/strnum/CHANGELOG.md @@ -0,0 +1,22 @@ + +**1.1.2 / 2025-02-27** +- fix skiplike for 0 + +**1.1.1 / 2025-02-21** +- All recent fixes of version 2 + +**2.0.4 / 2025-02-20** +- remove console log + +**2.0.3 / 2025-02-20** +- fix for string which are falsly identified as e-notation + +**2.0.1 / 2025-02-20** +- fix: handle only zeros +- fix: return original string when NaN + +**2.0.0 / 2025-02-20** +- Migrating to ESM modules. No functional change + +**1.1.0 / 2025-02-20** +- fix (#9): support missing floating point and e notations \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/strnum/LICENSE b/amplify/functions/downloadDocument/node_modules/strnum/LICENSE new file mode 100644 index 0000000..6450554 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/strnum/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Natural Intelligence + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/amplify/functions/downloadDocument/node_modules/strnum/README.md b/amplify/functions/downloadDocument/node_modules/strnum/README.md new file mode 100644 index 0000000..419e8ef --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/strnum/README.md @@ -0,0 +1,97 @@ +# strnum +Parse string into Number based on configuration + +## Users + + + + + +Many React Native projects and plugins + +## Usage + +```bash +npm install strnum +``` +```js +const toNumber = require("strnum"); + +toNumber(undefined) // undefined +toNumber(null)) //null +toNumber("")) // "" +toNumber("string"); //"string") +toNumber("12,12"); //"12,12") +toNumber("12 12"); //"12 12") +toNumber("12-12"); //"12-12") +toNumber("12.12.12"); //"12.12.12") +toNumber("0x2f"); //47) +toNumber("-0x2f"); //-47) +toNumber("0x2f", { hex : true}); //47) +toNumber("-0x2f", { hex : true}); //-47) +toNumber("0x2f", { hex : false}); //"0x2f") +toNumber("-0x2f", { hex : false}); //"-0x2f") +toNumber("06"); //6) +toNumber("06", { leadingZeros : true}); //6) +toNumber("06", { leadingZeros : false}); //"06") + +toNumber("006"); //6) +toNumber("006", { leadingZeros : true}); //6) +toNumber("006", { leadingZeros : false}); //"006") +toNumber("0.0"); //0) +toNumber("00.00"); //0) +toNumber("0.06"); //0.06) +toNumber("00.6"); //0.6) +toNumber(".006"); //0.006) +toNumber("6.0"); //6) +toNumber("06.0"); //6) + +toNumber("0.0", { leadingZeros : false}); //0) +toNumber("00.00", { leadingZeros : false}); //"00.00") +toNumber("0.06", { leadingZeros : false}); //0.06) +toNumber("00.6", { leadingZeros : false}); //"00.6") +toNumber(".006", { leadingZeros : false}); //0.006) +toNumber("6.0" , { leadingZeros : false}); //6) +toNumber("06.0" , { leadingZeros : false}); //"06.0") +toNumber("-06"); //-6) +toNumber("-06", { leadingZeros : true}); //-6) +toNumber("-06", { leadingZeros : false}); //"-06") + +toNumber("-0.0"); //-0) +toNumber("-00.00"); //-0) +toNumber("-0.06"); //-0.06) +toNumber("-00.6"); //-0.6) +toNumber("-.006"); //-0.006) +toNumber("-6.0"); //-6) +toNumber("-06.0"); //-6) + +toNumber("-0.0" , { leadingZeros : false}); //-0) +toNumber("-00.00", { leadingZeros : false}); //"-00.00") +toNumber("-0.06", { leadingZeros : false}); //-0.06) +toNumber("-00.6", { leadingZeros : false}); //"-00.6") +toNumber("-.006", {leadingZeros : false}); //-0.006) +toNumber("-6.0" , { leadingZeros : false}); //-6) +toNumber("-06.0" , { leadingZeros : false}); //"-06.0") +toNumber("420926189200190257681175017717") ; //4.209261892001902e+29) +toNumber("000000000000000000000000017717" , { leadingZeros : false}); //"000000000000000000000000017717") +toNumber("000000000000000000000000017717" , { leadingZeros : true}); //17717) +toNumber("01.0e2" , { leadingZeros : false}); //"01.0e2") +toNumber("-01.0e2" , { leadingZeros : false}); //"-01.0e2") +toNumber("01.0e2") ; //100) +toNumber("-01.0e2") ; //-100) +toNumber("1.0e2") ; //100) + +toNumber("-1.0e2") ; //-100) +toNumber("1.0e-2"); //0.01) + +toNumber("+1212121212"); // 1212121212 +toNumber("+1212121212", { skipLike: /\+[0-9]{10}/} )); //"+1212121212" +``` + +Supported Options +```js +hex: true, //when hexadecimal string should be parsed +leadingZeros: true, //when number with leading zeros like 08 should be parsed. 0.0 is not impacted +eNotation: true, //when number with eNotation or number parsed in eNotation should be considered +skipLike: /regex/ //when string should not be parsed when it matches the specified regular expression +``` diff --git a/amplify/functions/downloadDocument/node_modules/strnum/package.json b/amplify/functions/downloadDocument/node_modules/strnum/package.json new file mode 100644 index 0000000..90a1b96 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/strnum/package.json @@ -0,0 +1,30 @@ +{ + "name": "strnum", + "version": "1.1.2", + "description": "Parse String to Number based on configuration", + "main": "strnum.js", + "scripts": { + "test": "jasmine strnum.test.js" + }, + "keywords": [ + "string", + "number", + "parse", + "convert" + ], + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/strnum" + }, + "author": "Amit Gupta (https://amitkumargupta.work/)", + "license": "MIT", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "devDependencies": { + "jasmine": "^5.6.0" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/strnum/strnum.js b/amplify/functions/downloadDocument/node_modules/strnum/strnum.js new file mode 100644 index 0000000..c3bd08e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/strnum/strnum.js @@ -0,0 +1,111 @@ +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/; +// const octRegex = /^0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +const consider = { + hex : true, + // oct: false, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true, + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if(str==="0") return 0; + else if (options.hex && hexRegex.test(trimmedStr)) { + return parse_int(trimmedStr, 16); + // }else if (options.oct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + }else if (trimmedStr.search(/[eE]/)!== -1) { //eNotation + const notation = trimmedStr.match(/^([-\+])?(0*)([0-9]*(\.[0-9]*)?[eE][-\+]?[0-9]+)$/); + // +00.123 => [ , '+', '00', '.123', .. + if(notation){ + // console.log(notation) + if(options.leadingZeros){ //accept with leading zeros + trimmedStr = (notation[1] || "") + notation[3]; + }else{ + if(notation[2] === "0" && notation[3][0]=== "."){ //valid number + }else{ + return str; + } + } + return options.eNotation ? Number(trimmedStr) : str; + }else{ + return str; + } + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + // +00.123 => [ , '+', '00', '.123', .. + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else if(options.leadingZeros && leadingZeros===str) return 0; //00 + + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + return (numTrimmedByZeros === numStr) || (sign+numTrimmedByZeros === numStr) ? num : str + }else { + return (trimmedStr === numStr) || (trimmedStr === sign+numStr) ? num : str + } + } + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} + +function parse_int(numStr, base){ + //polyfill + if(parseInt) return parseInt(numStr, base); + else if(Number.parseInt) return Number.parseInt(numStr, base); + else if(window && window.parseInt) return window.parseInt(numStr, base); + else throw new Error("parseInt, Number.parseInt, window.parseInt are not supported") +} + +module.exports = toNumber; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/strnum/strnum.test.js b/amplify/functions/downloadDocument/node_modules/strnum/strnum.test.js new file mode 100644 index 0000000..c476614 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/strnum/strnum.test.js @@ -0,0 +1,165 @@ +const toNumber = require("./strnum.js"); + +describe("Should convert all the valid numeric strings to number", () => { + it("should return undefined, null, empty string, or non-numeric as it is", () => { + expect(toNumber(undefined)).not.toBeDefined(); + expect(toNumber(null)).toEqual(null); + expect(toNumber("")).toEqual(""); + expect(toNumber("string")).toEqual("string"); + expect(toNumber("e89794659669cb7bb967db73a7ea6889c3891727")).toEqual("e89794659669cb7bb967db73a7ea6889c3891727"); + + }); + it("should not parse number with spaces or comma", () => { + expect(toNumber("12,12")).toEqual("12,12"); + expect(toNumber("12 12")).toEqual("12 12"); + expect(toNumber("12-12")).toEqual("12-12"); + expect(toNumber("12.12.12")).toEqual("12.12.12"); + }) + it("should consider + sign", () => { + expect(toNumber("+12")).toEqual(12); + expect(toNumber("+ 12")).toEqual("+ 12"); + expect(toNumber("12+12")).toEqual("12+12"); + expect(toNumber("1212+")).toEqual("1212+"); + }) + it("should parse hexadecimal values", () => { + expect(toNumber("0x2f")).toEqual(47); + expect(toNumber("-0x2f")).toEqual(-47); + expect(toNumber("0x2f", { hex : true})).toEqual(47); + expect(toNumber("-0x2f", { hex : true})).toEqual(-47); + expect(toNumber("0x2f", { hex : false})).toEqual("0x2f"); + expect(toNumber("-0x2f", { hex : false})).toEqual("-0x2f"); + }) + it("should not parse strings with 0x embedded", () => { + expect(toNumber("0xzz")).toEqual("0xzz"); + expect(toNumber("iweraf0x123qwerqwer")).toEqual("iweraf0x123qwerqwer"); + expect(toNumber("1230x55")).toEqual("1230x55"); + expect(toNumber("JVBERi0xLjMNCiXi48")).toEqual("JVBERi0xLjMNCiXi48"); + }) + it("leading zeros", () => { + expect(toNumber("0")).toEqual(0); + expect(toNumber("00")).toEqual(0); + expect(toNumber("00.0")).toEqual(0); + + expect(toNumber("0",{ leadingZeros : false})).toEqual(0); + expect(toNumber("00",{ leadingZeros : false})).toEqual("00"); + expect(toNumber("00.0",{ leadingZeros : false})).toEqual("00.0"); + + expect(toNumber("06")).toEqual(6); + expect(toNumber("06", { leadingZeros : true})).toEqual(6); + expect(toNumber("06", { leadingZeros : false})).toEqual("06"); + + expect(toNumber("006")).toEqual(6); + expect(toNumber("006", { leadingZeros : true})).toEqual(6); + expect(toNumber("006", { leadingZeros : false})).toEqual("006"); + + expect(toNumber("000000000000000000000000017717" , { leadingZeros : false})).toEqual("000000000000000000000000017717"); + expect(toNumber("000000000000000000000000017717" , { leadingZeros : true})).toEqual(17717); + expect(toNumber("020211201030005811824") ).toEqual("020211201030005811824"); + expect(toNumber("0420926189200190257681175017717") ).toEqual(4.209261892001902e+29); + }) + it("invalid floating number", () => { + expect(toNumber("20.21.030") ).toEqual("20.21.030"); + expect(toNumber("0.21.030") ).toEqual("0.21.030"); + expect(toNumber("0.21.") ).toEqual("0.21."); + }); + it("floating point and leading zeros", () => { + expect(toNumber("0.")).toEqual(0); + expect(toNumber("+0.")).toEqual(0); + expect(toNumber("-0.")).toEqual(-0); + expect(toNumber("1.") ).toEqual(1); + expect(toNumber("00.00")).toEqual(0); + expect(toNumber("0.06")).toEqual(0.06); + expect(toNumber("00.6")).toEqual(0.6); + expect(toNumber(".006")).toEqual(0.006); + expect(toNumber("6.0")).toEqual(6); + expect(toNumber("06.0")).toEqual(6); + + expect(toNumber("0.0", { leadingZeros : false})).toEqual(0); + expect(toNumber("00.00", { leadingZeros : false})).toEqual("00.00"); + expect(toNumber("0.06", { leadingZeros : false})).toEqual(0.06); + expect(toNumber("00.6", { leadingZeros : false})).toEqual("00.6"); + expect(toNumber(".006", { leadingZeros : false})).toEqual(0.006); + expect(toNumber("6.0" , { leadingZeros : false})).toEqual(6); + expect(toNumber("06.0" , { leadingZeros : false})).toEqual("06.0"); + }) + it("negative number leading zeros", () => { + expect(toNumber("+06")).toEqual(6); + expect(toNumber("-06")).toEqual(-6); + expect(toNumber("-06", { leadingZeros : true})).toEqual(-6); + expect(toNumber("-06", { leadingZeros : false})).toEqual("-06"); + + expect(toNumber("-0.0")).toEqual(-0); + expect(toNumber("-00.00")).toEqual(-0); + expect(toNumber("-0.06")).toEqual(-0.06); + expect(toNumber("-00.6")).toEqual(-0.6); + expect(toNumber("-.006")).toEqual(-0.006); + expect(toNumber("-6.0")).toEqual(-6); + expect(toNumber("-06.0")).toEqual(-6); + + expect(toNumber("-0.0" , { leadingZeros : false})).toEqual(-0); + expect(toNumber("-00.00", { leadingZeros : false})).toEqual("-00.00"); + expect(toNumber("-0.06", { leadingZeros : false})).toEqual(-0.06); + expect(toNumber("-00.6", { leadingZeros : false})).toEqual("-00.6"); + expect(toNumber("-.006", {leadingZeros : false})).toEqual(-0.006); + expect(toNumber("-6.0" , { leadingZeros : false})).toEqual(-6); + expect(toNumber("-06.0" , { leadingZeros : false})).toEqual("-06.0"); + }) + it("long number", () => { + expect(toNumber("020211201030005811824") ).toEqual("020211201030005811824"); + expect(toNumber("20211201030005811824") ).toEqual("20211201030005811824"); + expect(toNumber("20.211201030005811824") ).toEqual("20.211201030005811824"); + expect(toNumber("0.211201030005811824") ).toEqual("0.211201030005811824"); + }); + it("scientific notation", () => { + expect(toNumber("01.0e2" , { leadingZeros : false})).toEqual("01.0e2"); + expect(toNumber("-01.0e2" , { leadingZeros : false})).toEqual("-01.0e2"); + expect(toNumber("01.0e2") ).toEqual(100); + expect(toNumber("-01.0e2") ).toEqual(-100); + expect(toNumber("1.0e2") ).toEqual(100); + + expect(toNumber("-1.0e2") ).toEqual(-100); + expect(toNumber("1.0e-2")).toEqual(0.01); + + expect(toNumber("420926189200190257681175017717") ).toEqual(4.209261892001902e+29); + expect(toNumber("420926189200190257681175017717" , { eNotation: false} )).toEqual("420926189200190257681175017717"); + + expect(toNumber("1e-2")).toEqual(0.01); + expect(toNumber("1e+2")).toEqual(100); + expect(toNumber("1.e+2")).toEqual(100); + }); + + it("scientific notation with upper E", () => { + expect(toNumber("01.0E2" , { leadingZeros : false})).toEqual("01.0E2"); + expect(toNumber("-01.0E2" , { leadingZeros : false})).toEqual("-01.0E2"); + expect(toNumber("01.0E2") ).toEqual(100); + expect(toNumber("-01.0E2") ).toEqual(-100); + expect(toNumber("1.0E2") ).toEqual(100); + + expect(toNumber("-1.0E2") ).toEqual(-100); + expect(toNumber("1.0E-2")).toEqual(0.01); + }); + + it("should skip matching pattern", () => { + expect(toNumber("0", { skipLike: /.*/ })).toEqual("0"); + expect(toNumber("+12", { skipLike: /\+[0-9]{10}/} )).toEqual(12); + expect(toNumber("12+12", { skipLike: /\+[0-9]{10}/} )).toEqual("12+12"); + expect(toNumber("12+1212121212", { skipLike: /\+[0-9]{10}/} )).toEqual("12+1212121212"); + expect(toNumber("+1212121212") ).toEqual(1212121212); + expect(toNumber("+1212121212", { skipLike: /\+[0-9]{10}/} )).toEqual("+1212121212"); + }) + it("should not change string if not number", () => { + expect(toNumber("+12 12")).toEqual("+12 12"); + expect(toNumber(" +12 12 ")).toEqual(" +12 12 "); + }) + it("should ignore sorrounded spaces ", () => { + expect(toNumber(" +1212 ")).toEqual(1212); + }) + + it("negative numbers", () => { + expect(toNumber("+1212")).toEqual(1212); + expect(toNumber("+12.12")).toEqual(12.12); + expect(toNumber("-12.12")).toEqual(-12.12); + expect(toNumber("-012.12")).toEqual(-12.12); + expect(toNumber("-012.12")).toEqual(-12.12); + }) +}); diff --git a/amplify/functions/downloadDocument/node_modules/tslib/CopyrightNotice.txt b/amplify/functions/downloadDocument/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000..0e42542 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/amplify/functions/downloadDocument/node_modules/tslib/LICENSE.txt b/amplify/functions/downloadDocument/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000..bfe6430 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/tslib/README.md b/amplify/functions/downloadDocument/node_modules/tslib/README.md new file mode 100644 index 0000000..290cc61 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](https://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/amplify/functions/downloadDocument/node_modules/tslib/SECURITY.md b/amplify/functions/downloadDocument/node_modules/tslib/SECURITY.md new file mode 100644 index 0000000..869fdfe --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/SECURITY.md @@ -0,0 +1,41 @@ + + +## Security + +Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). + +If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. + +## Reporting Security Issues + +**Please do not report security vulnerabilities through public GitHub issues.** + +Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). + +If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). + +You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). + +Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: + + * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) + * Full paths of source file(s) related to the manifestation of the issue + * The location of the affected source code (tag/branch/commit or direct URL) + * Any special configuration required to reproduce the issue + * Step-by-step instructions to reproduce the issue + * Proof-of-concept or exploit code (if possible) + * Impact of the issue, including how an attacker might exploit the issue + +This information will help us triage your report more quickly. + +If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. + +## Preferred Languages + +We prefer all communications to be in English. + +## Policy + +Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). + + diff --git a/amplify/functions/downloadDocument/node_modules/tslib/modules/index.d.ts b/amplify/functions/downloadDocument/node_modules/tslib/modules/index.d.ts new file mode 100644 index 0000000..3244fab --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/modules/index.d.ts @@ -0,0 +1,38 @@ +// Note: named reexports are used instead of `export *` because +// TypeScript itself doesn't resolve the `export *` when checking +// if a particular helper exists. +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __createBinding, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +} from '../tslib.js'; +export * as default from '../tslib.js'; diff --git a/amplify/functions/downloadDocument/node_modules/tslib/modules/index.js b/amplify/functions/downloadDocument/node_modules/tslib/modules/index.js new file mode 100644 index 0000000..c91f618 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/modules/index.js @@ -0,0 +1,70 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +}; +export default tslib; diff --git a/amplify/functions/downloadDocument/node_modules/tslib/modules/package.json b/amplify/functions/downloadDocument/node_modules/tslib/modules/package.json new file mode 100644 index 0000000..aafa0e4 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/tslib/package.json b/amplify/functions/downloadDocument/node_modules/tslib/package.json new file mode 100644 index 0000000..57d0578 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/package.json @@ -0,0 +1,47 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.8.1", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": { + "types": "./modules/index.d.ts", + "default": "./tslib.es6.mjs" + }, + "import": { + "node": "./modules/index.js", + "default": { + "types": "./modules/index.d.ts", + "default": "./tslib.es6.mjs" + } + }, + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/amplify/functions/downloadDocument/node_modules/tslib/tslib.d.ts b/amplify/functions/downloadDocument/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000..f23df55 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/tslib.d.ts @@ -0,0 +1,460 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Applies decorators to a class or class member, following the native ECMAScript decorator specification. + * @param ctor For non-field class members, the class constructor. Otherwise, `null`. + * @param descriptorIn The `PropertyDescriptor` to use when unable to look up the property from `ctor`. + * @param decorators The decorators to apply + * @param contextIn The `DecoratorContext` to clone for each decorator application. + * @param initializers An array of field initializer mutation functions into which new initializers are written. + * @param extraInitializers An array of extra initializer functions into which new initializers are written. + */ +export declare function __esDecorate(ctor: Function | null, descriptorIn: object | null, decorators: Function[], contextIn: object, initializers: Function[] | null, extraInitializers: Function[]): void; + +/** + * Runs field initializers or extra initializers generated by `__esDecorate`. + * @param thisArg The `this` argument to use. + * @param initializers The array of initializers to evaluate. + * @param value The initial value to pass to the initializers. + */ +export declare function __runInitializers(thisArg: unknown, initializers: Function[], value?: any): any; + +/** + * Converts a computed property name into a `string` or `symbol` value. + */ +export declare function __propKey(x: any): string | symbol; + +/** + * Assigns the name of a function derived from the left-hand side of an assignment. + * @param f The function to rename. + * @param name The new name for the function. + * @param prefix A prefix (such as `"get"` or `"set"`) to insert before the name. + */ +export declare function __setFunctionName(f: Function, name: string | symbol, prefix?: string): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param o The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; + +/** + * Adds a disposable resource to a resource-tracking environment object. + * @param env A resource-tracking environment object. + * @param value Either a Disposable or AsyncDisposable object, `null`, or `undefined`. + * @param async When `true`, `AsyncDisposable` resources can be added. When `false`, `AsyncDisposable` resources cannot be added. + * @returns The {@link value} argument. + * + * @throws {TypeError} If {@link value} is not an object, or if either `Symbol.dispose` or `Symbol.asyncDispose` are not + * defined, or if {@link value} does not have an appropriate `Symbol.dispose` or `Symbol.asyncDispose` method. + */ +export declare function __addDisposableResource(env: { stack: { value?: unknown, dispose?: Function, async: boolean }[]; error: unknown; hasError: boolean; }, value: T, async: boolean): T; + +/** + * Disposes all resources in a resource-tracking environment object. + * @param env A resource-tracking environment object. + * @returns A {@link Promise} if any resources in the environment were marked as `async` when added; otherwise, `void`. + * + * @throws {SuppressedError} if an error thrown during disposal would have suppressed a prior error from disposal or the + * error recorded in the resource-tracking environment object. + * @seealso {@link __addDisposableResource} + */ +export declare function __disposeResources(env: { stack: { value?: unknown, dispose?: Function, async: boolean }[]; error: unknown; hasError: boolean; }): any; + +/** + * Transforms a relative import specifier ending in a non-declaration TypeScript file extension to its JavaScript file extension counterpart. + * @param path The import specifier. + * @param preserveJsx Causes '*.tsx' to transform to '*.jsx' instead of '*.js'. Should be true when `--jsx` is set to `preserve`. + */ +export declare function __rewriteRelativeImportExtension(path: string, preserveJsx?: boolean): string; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.html b/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000..b122e41 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.js b/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000..6c1739b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.js @@ -0,0 +1,402 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise, SuppressedError, Symbol, Iterator */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; + +export function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; + +export function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +}; + +export function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +}; + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} + +export function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + +} + +var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; +}; + +export function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); +} + +export function __rewriteRelativeImportExtension(path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; +} + +export default { + __extends: __extends, + __assign: __assign, + __rest: __rest, + __decorate: __decorate, + __param: __param, + __esDecorate: __esDecorate, + __runInitializers: __runInitializers, + __propKey: __propKey, + __setFunctionName: __setFunctionName, + __metadata: __metadata, + __awaiter: __awaiter, + __generator: __generator, + __createBinding: __createBinding, + __exportStar: __exportStar, + __values: __values, + __read: __read, + __spread: __spread, + __spreadArrays: __spreadArrays, + __spreadArray: __spreadArray, + __await: __await, + __asyncGenerator: __asyncGenerator, + __asyncDelegator: __asyncDelegator, + __asyncValues: __asyncValues, + __makeTemplateObject: __makeTemplateObject, + __importStar: __importStar, + __importDefault: __importDefault, + __classPrivateFieldGet: __classPrivateFieldGet, + __classPrivateFieldSet: __classPrivateFieldSet, + __classPrivateFieldIn: __classPrivateFieldIn, + __addDisposableResource: __addDisposableResource, + __disposeResources: __disposeResources, + __rewriteRelativeImportExtension: __rewriteRelativeImportExtension, +}; diff --git a/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.mjs b/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.mjs new file mode 100644 index 0000000..c17990a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/tslib.es6.mjs @@ -0,0 +1,401 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise, SuppressedError, Symbol, Iterator */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; + +export function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; + +export function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +}; + +export function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +}; + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} + +export function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; +} + +var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; +}; + +export function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); +} + +export function __rewriteRelativeImportExtension(path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; +} + +export default { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __createBinding, + __exportStar, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +}; diff --git a/amplify/functions/downloadDocument/node_modules/tslib/tslib.html b/amplify/functions/downloadDocument/node_modules/tslib/tslib.html new file mode 100644 index 0000000..44c9ba5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/tslib/tslib.js b/amplify/functions/downloadDocument/node_modules/tslib/tslib.js new file mode 100644 index 0000000..5e12ace --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/tslib/tslib.js @@ -0,0 +1,484 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError, Iterator */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +var __rewriteRelativeImportExtension; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); + }; + + __rewriteRelativeImportExtension = function (path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); + exporter("__rewriteRelativeImportExtension", __rewriteRelativeImportExtension); +}); + +0 && (module.exports = { + __extends: __extends, + __assign: __assign, + __rest: __rest, + __decorate: __decorate, + __param: __param, + __esDecorate: __esDecorate, + __runInitializers: __runInitializers, + __propKey: __propKey, + __setFunctionName: __setFunctionName, + __metadata: __metadata, + __awaiter: __awaiter, + __generator: __generator, + __exportStar: __exportStar, + __createBinding: __createBinding, + __values: __values, + __read: __read, + __spread: __spread, + __spreadArrays: __spreadArrays, + __spreadArray: __spreadArray, + __await: __await, + __asyncGenerator: __asyncGenerator, + __asyncDelegator: __asyncDelegator, + __asyncValues: __asyncValues, + __makeTemplateObject: __makeTemplateObject, + __importStar: __importStar, + __importDefault: __importDefault, + __classPrivateFieldGet: __classPrivateFieldGet, + __classPrivateFieldSet: __classPrivateFieldSet, + __classPrivateFieldIn: __classPrivateFieldIn, + __addDisposableResource: __addDisposableResource, + __disposeResources: __disposeResources, + __rewriteRelativeImportExtension: __rewriteRelativeImportExtension, +}); diff --git a/amplify/functions/downloadDocument/node_modules/uuid/CHANGELOG.md b/amplify/functions/downloadDocument/node_modules/uuid/CHANGELOG.md new file mode 100644 index 0000000..0412ad8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,274 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [9.0.1](https://github.com/uuidjs/uuid/compare/v9.0.0...v9.0.1) (2023-09-12) + +### build + +- Fix CI to work with Node.js 20.x + +## [9.0.0](https://github.com/uuidjs/uuid/compare/v8.3.2...v9.0.0) (2022-09-05) + +### ⚠ BREAKING CHANGES + +- Drop Node.js 10.x support. This library always aims at supporting one EOLed LTS release which by this time now is 12.x which has reached EOL 30 Apr 2022. + +- Remove the minified UMD build from the package. + + Minified code is hard to audit and since this is a widely used library it seems more appropriate nowadays to optimize for auditability than to ship a legacy module format that, at best, serves educational purposes nowadays. + + For production browser use cases, users should be using a bundler. For educational purposes, today's online sandboxes like replit.com offer convenient ways to load npm modules, so the use case for UMD through repos like UNPKG or jsDelivr has largely vanished. + +- Drop IE 11 and Safari 10 support. Drop support for browsers that don't correctly implement const/let and default arguments, and no longer transpile the browser build to ES2015. + + This also removes the fallback on msCrypto instead of the crypto API. + + Browser tests are run in the first supported version of each supported browser and in the latest (as of this commit) version available on Browserstack. + +### Features + +- optimize uuid.v1 by 1.3x uuid.v4 by 4.3x (430%) ([#597](https://github.com/uuidjs/uuid/issues/597)) ([3a033f6](https://github.com/uuidjs/uuid/commit/3a033f6bab6bb3780ece6d645b902548043280bc)) +- remove UMD build ([#645](https://github.com/uuidjs/uuid/issues/645)) ([e948a0f](https://github.com/uuidjs/uuid/commit/e948a0f22bf22f4619b27bd913885e478e20fe6f)), closes [#620](https://github.com/uuidjs/uuid/issues/620) +- use native crypto.randomUUID when available ([#600](https://github.com/uuidjs/uuid/issues/600)) ([c9e076c](https://github.com/uuidjs/uuid/commit/c9e076c852edad7e9a06baaa1d148cf4eda6c6c4)) + +### Bug Fixes + +- add Jest/jsdom compatibility ([#642](https://github.com/uuidjs/uuid/issues/642)) ([16f9c46](https://github.com/uuidjs/uuid/commit/16f9c469edf46f0786164cdf4dc980743984a6fd)) +- change default export to named function ([#545](https://github.com/uuidjs/uuid/issues/545)) ([c57bc5a](https://github.com/uuidjs/uuid/commit/c57bc5a9a0653273aa639cda9177ce52efabe42a)) +- handle error when parameter is not set in v3 and v5 ([#622](https://github.com/uuidjs/uuid/issues/622)) ([fcd7388](https://github.com/uuidjs/uuid/commit/fcd73881692d9fabb63872576ba28e30ff852091)) +- run npm audit fix ([#644](https://github.com/uuidjs/uuid/issues/644)) ([04686f5](https://github.com/uuidjs/uuid/commit/04686f54c5fed2cfffc1b619f4970c4bb8532353)) +- upgrading from uuid3 broken link ([#568](https://github.com/uuidjs/uuid/issues/568)) ([1c849da](https://github.com/uuidjs/uuid/commit/1c849da6e164259e72e18636726345b13a7eddd6)) + +### build + +- drop Node.js 8.x from babel transpile target ([#603](https://github.com/uuidjs/uuid/issues/603)) ([aa11485](https://github.com/uuidjs/uuid/commit/aa114858260402107ec8a1e1a825dea0a259bcb5)) +- drop support for legacy browsers (IE11, Safari 10) ([#604](https://github.com/uuidjs/uuid/issues/604)) ([0f433e5](https://github.com/uuidjs/uuid/commit/0f433e5ec444edacd53016de67db021102f36148)) + +- drop node 10.x to upgrade dev dependencies ([#653](https://github.com/uuidjs/uuid/issues/653)) ([28a5712](https://github.com/uuidjs/uuid/commit/28a571283f8abda6b9d85e689f95b7d3ee9e282e)), closes [#643](https://github.com/uuidjs/uuid/issues/643) + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/amplify/functions/downloadDocument/node_modules/uuid/CONTRIBUTING.md b/amplify/functions/downloadDocument/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 0000000..4a4503d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/amplify/functions/downloadDocument/node_modules/uuid/LICENSE.md b/amplify/functions/downloadDocument/node_modules/uuid/LICENSE.md new file mode 100644 index 0000000..3934168 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/amplify/functions/downloadDocument/node_modules/uuid/README.md b/amplify/functions/downloadDocument/node_modules/uuid/README.md new file mode 100644 index 0000000..4f51e09 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/README.md @@ -0,0 +1,466 @@ + + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](https://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - NodeJS 12+ ([LTS releases](https://github.com/nodejs/Release)) + - Chrome, Safari, Firefox, Edge browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +> **Note** Upgrading from `uuid@3`? Your code is probably okay, but check out [Upgrading From `uuid@3`](#upgrading-from-uuid3) for details. + +> **Note** Only interested in creating a version 4 UUID? You might be able to use [`crypto.randomUUID()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/randomUUID), eliminating the need to install this library. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, 0xc0, 0xbd, 0x7f, 0x11, 0xc0, 0x43, 0xda, 0x97, 0x5e, 0x2a, 0x8a, 0xd9, 0xeb, 0xae, 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanoseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ npx uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ npx uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +As of `uuid@9` [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds are no longer shipped with this library. + +If you need a UMD build of this library, use a bundler like Webpack or Rollup. Alternatively, refer to the documentation of [`uuid@8.3.2`](https://github.com/uuidjs/uuid/blob/v8.3.2/README.md#umd) which was the last version that shipped UMD builds. + +## Known issues + +### Duplicate UUIDs (Googlebot) + +This module may generate duplicate UUIDs when run in clients with _deterministic_ random number generators, such as [Googlebot crawlers](https://developers.google.com/search/docs/advanced/crawling/overview-google-crawlers). This can cause problems for apps that expect client-generated UUIDs to always be unique. Developers should be prepared for this and have a strategy for dealing with possible collisions, such as: + +- Check for duplicate UUIDs, fail gracefully +- Disable write operations for Googlebot clients + +### "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +### IE 11 (Internet Explorer) + +Support for IE11 and other legacy browsers has been dropped as of `uuid@9`. If you need to support legacy browsers, you can always transpile the uuid module source yourself (e.g. using [Babel](https://babeljs.io/)). + +## Upgrading From `uuid@7` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3` + +"_Wait... what happened to `uuid@4` thru `uuid@6`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3` and has been removed in `uuid@7`. + +--- + +Markdown generated from [README_js.md](README_js.md) by diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/bin/uuid b/amplify/functions/downloadDocument/node_modules/uuid/dist/bin/uuid new file mode 100755 index 0000000..f38d2ee --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/index.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/index.js new file mode 100644 index 0000000..5586dd3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function get() { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function get() { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function get() { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function get() { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function get() { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function get() { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function get() { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function get() { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function get() { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/md5.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/md5.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/md5.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/native.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/native.js new file mode 100644 index 0000000..c2eea59 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/native.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/nil.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/parse.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/regex.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/rng.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/rng.js new file mode 100644 index 0000000..d067cdb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/rng.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/sha1.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/sha1.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/sha1.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/stringify.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/stringify.js new file mode 100644 index 0000000..390bf89 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v1.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v1.js new file mode 100644 index 0000000..125bc58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v3.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v35.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v35.js new file mode 100644 index 0000000..7c522d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v4.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v4.js new file mode 100644 index 0000000..959d698 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v5.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/validate.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/version.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/version.js new file mode 100644 index 0000000..f63af01 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/commonjs-browser/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/index.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/md5.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 0000000..f12212e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/native.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/native.js new file mode 100644 index 0000000..b22292c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/native.js @@ -0,0 +1,4 @@ +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +export default { + randomUUID +}; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/nil.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/parse.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/regex.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/rng.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 0000000..6e65234 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,18 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/sha1.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 0000000..d3c2565 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/stringify.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 0000000..a6e4c88 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v1.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 0000000..382e5d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v3.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v35.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 0000000..3355e1f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v4.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 0000000..95ea879 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v5.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/validate.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/version.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 0000000..9363076 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/index.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/md5.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 0000000..4d68b04 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/native.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/native.js new file mode 100644 index 0000000..f0d1992 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/native.js @@ -0,0 +1,4 @@ +import crypto from 'crypto'; +export default { + randomUUID: crypto.randomUUID +}; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/nil.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/parse.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/regex.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/rng.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 0000000..8006244 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/sha1.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 0000000..e23850b --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/stringify.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 0000000..a6e4c88 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v1.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 0000000..382e5d7 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v3.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v35.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 0000000..3355e1f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v4.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 0000000..95ea879 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v5.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/validate.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/version.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 0000000..9363076 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/index.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/index.js new file mode 100644 index 0000000..88d676a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/md5-browser.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/md5.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/md5.js new file mode 100644 index 0000000..824d481 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/native-browser.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/native-browser.js new file mode 100644 index 0000000..c2eea59 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/native-browser.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/native.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/native.js new file mode 100644 index 0000000..de80469 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/native.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/nil.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/parse.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/regex.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/rng-browser.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 0000000..d067cdb --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/rng.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/rng.js new file mode 100644 index 0000000..3507f93 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/sha1-browser.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/sha1.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/sha1.js new file mode 100644 index 0000000..03bdd63 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/stringify.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/stringify.js new file mode 100644 index 0000000..390bf89 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/uuid-bin.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 0000000..50a7a9f --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/v1.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/v1.js new file mode 100644 index 0000000..125bc58 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/v3.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/v35.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/v35.js new file mode 100644 index 0000000..7c522d9 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/v4.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/v4.js new file mode 100644 index 0000000..959d698 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/v5.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/validate.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/dist/version.js b/amplify/functions/downloadDocument/node_modules/uuid/dist/version.js new file mode 100644 index 0000000..f63af01 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/downloadDocument/node_modules/uuid/package.json b/amplify/functions/downloadDocument/node_modules/uuid/package.json new file mode 100644 index 0000000..6cc3361 --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/package.json @@ -0,0 +1,135 @@ +{ + "name": "uuid", + "version": "9.0.1", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "browser": { + "import": "./dist/esm-browser/index.js", + "require": "./dist/commonjs-browser/index.js" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/native.js": "./dist/native-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.18.10", + "@babel/core": "7.18.10", + "@babel/eslint-parser": "7.18.9", + "@babel/preset-env": "7.18.10", + "@commitlint/cli": "17.0.3", + "@commitlint/config-conventional": "17.0.3", + "bundlewatch": "0.3.3", + "eslint": "8.21.0", + "eslint-config-prettier": "8.5.0", + "eslint-config-standard": "17.0.0", + "eslint-plugin-import": "2.26.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-promise": "6.0.0", + "husky": "8.0.1", + "jest": "28.1.3", + "lint-staged": "13.0.3", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.7.1", + "random-seed": "0.3.0", + "runmd": "1.3.9", + "standard-version": "9.5.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "7.16.10", + "@wdio/cli": "7.16.10", + "@wdio/jasmine-framework": "7.16.6", + "@wdio/local-runner": "7.16.10", + "@wdio/spec-reporter": "7.16.9", + "@wdio/static-server-service": "7.16.6" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "examples:node:jest:test": "cd examples/node-jest && npm install && npm test", + "prepare": "cd $( git rev-parse --show-toplevel ) && husky install", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjsNode node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v18' ) && ( npm run build && npx runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/amplify/functions/downloadDocument/node_modules/uuid/wrapper.mjs b/amplify/functions/downloadDocument/node_modules/uuid/wrapper.mjs new file mode 100644 index 0000000..c31e9ce --- /dev/null +++ b/amplify/functions/downloadDocument/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/amplify/functions/downloadDocument/package-lock.json b/amplify/functions/downloadDocument/package-lock.json new file mode 100644 index 0000000..e699eab --- /dev/null +++ b/amplify/functions/downloadDocument/package-lock.json @@ -0,0 +1,1349 @@ +{ + "name": "downloaddocument", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "downloaddocument", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0", + "@aws-sdk/lib-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.803.0.tgz", + "integrity": "sha512-rJPidxfyTQHz/1Naq3FukSoIt40GwXfv3npVR15bCBFpqx9TXEt7GoIUbiqm+Ftx8sx9hqJ6XNhf80FIa243gw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.803.0.tgz", + "integrity": "sha512-TT3BRD1yiL3IGXBKfq560vvEdyOJtJr8bp+R82dD6P0IoS8aFcNtF822BOJy7CqvxksOc3hQKLaPVzE82gE8Ow==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.803.0.tgz", + "integrity": "sha512-XtbFftJex18GobpRWJxg5V7stVwvmV2gdBYW+zRM0YW6NZAR4NP/4vcc9ktM3++BWW5OF4Kvl7Nu7N4mAzRHmw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.803.0.tgz", + "integrity": "sha512-lPdRYbjxwmv7gRqbaEe1Y1Yl5fD4c43AuK3P31eKjf1j41hZEQ0dg9a9KLk7i6ehEoVsxewnJrvbC2pVoYrCmQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.803.0.tgz", + "integrity": "sha512-HEAcxSHrHxVekGnZqjFrkqdYAf4jFiZIMhuh0jqiqY6A4udEyXy1V623HVcTz/XXj6UBRnyD+zmOmlbzBvkfQg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.803.0.tgz", + "integrity": "sha512-oChnEpwI25OW4GPvhI1VnXM3IQEkDhESGFZd5JHzJDHyvSF2NU58V86jkJyaa4H4X25IbGaThuulNI5xCOngjw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/endpoint-cache": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.723.0.tgz", + "integrity": "sha512-2+a4WXRc+07uiPR+zJiPGKSOWaNJQNqitkks+6Hhm/haTLJqNVTgY2OWDh2PXvwMNpKB+AlGdhE65Oy6NzUgXg==", + "license": "Apache-2.0", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.803.0.tgz", + "integrity": "sha512-J9oeaKnF0vfw1ixUc0Bu1GTcYwp/riiGCst/MSLPHeGqoFiYzyox/im1Pbuv2Ipx7/0QI7w5PxYdxOpwvqMCFg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/middleware-endpoint-discovery": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.775.0.tgz", + "integrity": "sha512-L0PmjSg7t+wovRo/Lin1kpei3e7wBhrENWb1Bbccu3PWUIfxolGeWplOmNhSlXjuQe9GXjf3z8kJRYOGBMFOvw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.803.0.tgz", + "integrity": "sha512-wiWiYaFQxK2u37G9IOXuWkHelEbU8ulLxdHpoPf0TSu/1boqLW7fcofuZATAvFcvigQx3oebwO8G4c/mmixTTw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.803.0.tgz", + "integrity": "sha512-lDbMgVjWWEPT7a6lLaAEPPljwOeLTjPX2sJ7MoDICpQotg4Yd8cQfX3nqScSyLAGSc7Rq/21UPnPoij/E0K3lg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.803.0.tgz", + "integrity": "sha512-QiXvurnve8xIm41Zf/jNXwcYotDX3KZbHcsTaJ7ILhyFomqCjJ6bjLcCRdfndG600N5ua6AEK2XGw1luyBQxig==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz", + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.1.tgz", + "integrity": "sha512-W7AppgQD3fP1aBmo8wWo0id5zeR2/aYRy067vZsDVaa6v/mdhkg6DxXwEVuSPjZl+ZnvWAQbUMCd5ckw38+tHQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.2.tgz", + "integrity": "sha512-EqOy3xaEGQpsKxLlzYstDRJ8eY90CbyBP4cl+w7r45mE60S8YliyL9AgWsdWcyNiB95E2PMqHBEv67nNl1zLfg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.3.tgz", + "integrity": "sha512-AsJtI9KiFoEGAhcEKZyzzPfrszAQGcf4HSYKmenz0WGx/6YNvoPPv4OSGfZTCsDmgPHv4pXzxE+7QV7jcGWNKw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz", + "integrity": "sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.2.tgz", + "integrity": "sha512-3AnHfsMdq9Wg7+3BeR1HuLWI9+DMA/SoHVpCWq6xSsa52ikNd6nlF/wFzdpHyGtVa+Aji6lMgvwOF4sGcVA7SA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.10.tgz", + "integrity": "sha512-2k6fgUNOZ1Rn0gEjvGPGrDEINLG8qSBHsN7xlkkbO+fnHJ36BQPDzhFfMmYSDS8AgzoygqQiDOQ+6Hp2vBTUdA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.10.tgz", + "integrity": "sha512-2XR1WRglLVmoIFts7bODUTgBdVyvkfKNkydHrlsI5VxW9q3s1hnJCuY+f1OHzvj5ue23q4vydM2fjrMjf2HSdQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz", + "integrity": "sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "license": "MIT" + }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==", + "license": "MIT" + }, + "node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/mnemonist": { + "version": "0.38.3", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz", + "integrity": "sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw==", + "license": "MIT", + "dependencies": { + "obliterator": "^1.6.1" + } + }, + "node_modules/obliterator": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz", + "integrity": "sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig==", + "license": "MIT" + }, + "node_modules/strnum": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", + "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + } + } +} diff --git a/amplify/functions/downloadDocument/package.json b/amplify/functions/downloadDocument/package.json index e69de29..517dd29 100644 --- a/amplify/functions/downloadDocument/package.json +++ b/amplify/functions/downloadDocument/package.json @@ -0,0 +1,16 @@ +{ + "name": "downloaddocument", + "version": "1.0.0", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0", + "@aws-sdk/lib-dynamodb": "^3.803.0" + } +} diff --git a/amplify/functions/fetchDocuments/index.mjs b/amplify/functions/fetchDocuments/index.mjs index fe59eeb..f2e65f5 100644 --- a/amplify/functions/fetchDocuments/index.mjs +++ b/amplify/functions/fetchDocuments/index.mjs @@ -7,7 +7,7 @@ const client = new DynamoDBClient({}); const docClient = DynamoDBDocumentClient.from(client); // Get the DynamoDB table name from environment variables -const tableName = process.env.DOCUMENT_TABLE_NAME; +const tableName = "Document-nu434abnqjhf3kcbgxbcibzamu-NONE"; export const handler = async (event) => { console.log("Event received:", JSON.stringify(event, null, 2)); diff --git a/amplify/functions/fetchDocuments/node_modules/.bin/fxparser b/amplify/functions/fetchDocuments/node_modules/.bin/fxparser new file mode 120000 index 0000000..75327ed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/.bin/fxparser @@ -0,0 +1 @@ +../fast-xml-parser/src/cli/cli.js \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/.bin/uuid b/amplify/functions/fetchDocuments/node_modules/.bin/uuid new file mode 120000 index 0000000..588f70e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/dist/bin/uuid \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/.package-lock.json b/amplify/functions/fetchDocuments/node_modules/.package-lock.json new file mode 100644 index 0000000..9c7cfce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/.package-lock.json @@ -0,0 +1,1340 @@ +{ + "name": "fetchdocuments", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.803.0.tgz", + "integrity": "sha512-rJPidxfyTQHz/1Naq3FukSoIt40GwXfv3npVR15bCBFpqx9TXEt7GoIUbiqm+Ftx8sx9hqJ6XNhf80FIa243gw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.803.0.tgz", + "integrity": "sha512-TT3BRD1yiL3IGXBKfq560vvEdyOJtJr8bp+R82dD6P0IoS8aFcNtF822BOJy7CqvxksOc3hQKLaPVzE82gE8Ow==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.803.0.tgz", + "integrity": "sha512-XtbFftJex18GobpRWJxg5V7stVwvmV2gdBYW+zRM0YW6NZAR4NP/4vcc9ktM3++BWW5OF4Kvl7Nu7N4mAzRHmw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.803.0.tgz", + "integrity": "sha512-lPdRYbjxwmv7gRqbaEe1Y1Yl5fD4c43AuK3P31eKjf1j41hZEQ0dg9a9KLk7i6ehEoVsxewnJrvbC2pVoYrCmQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.803.0.tgz", + "integrity": "sha512-HEAcxSHrHxVekGnZqjFrkqdYAf4jFiZIMhuh0jqiqY6A4udEyXy1V623HVcTz/XXj6UBRnyD+zmOmlbzBvkfQg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.803.0.tgz", + "integrity": "sha512-oChnEpwI25OW4GPvhI1VnXM3IQEkDhESGFZd5JHzJDHyvSF2NU58V86jkJyaa4H4X25IbGaThuulNI5xCOngjw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/endpoint-cache": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.723.0.tgz", + "integrity": "sha512-2+a4WXRc+07uiPR+zJiPGKSOWaNJQNqitkks+6Hhm/haTLJqNVTgY2OWDh2PXvwMNpKB+AlGdhE65Oy6NzUgXg==", + "license": "Apache-2.0", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.803.0.tgz", + "integrity": "sha512-J9oeaKnF0vfw1ixUc0Bu1GTcYwp/riiGCst/MSLPHeGqoFiYzyox/im1Pbuv2Ipx7/0QI7w5PxYdxOpwvqMCFg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/middleware-endpoint-discovery": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.775.0.tgz", + "integrity": "sha512-L0PmjSg7t+wovRo/Lin1kpei3e7wBhrENWb1Bbccu3PWUIfxolGeWplOmNhSlXjuQe9GXjf3z8kJRYOGBMFOvw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.803.0.tgz", + "integrity": "sha512-wiWiYaFQxK2u37G9IOXuWkHelEbU8ulLxdHpoPf0TSu/1boqLW7fcofuZATAvFcvigQx3oebwO8G4c/mmixTTw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.803.0.tgz", + "integrity": "sha512-lDbMgVjWWEPT7a6lLaAEPPljwOeLTjPX2sJ7MoDICpQotg4Yd8cQfX3nqScSyLAGSc7Rq/21UPnPoij/E0K3lg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.803.0.tgz", + "integrity": "sha512-QiXvurnve8xIm41Zf/jNXwcYotDX3KZbHcsTaJ7ILhyFomqCjJ6bjLcCRdfndG600N5ua6AEK2XGw1luyBQxig==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz", + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.1.tgz", + "integrity": "sha512-W7AppgQD3fP1aBmo8wWo0id5zeR2/aYRy067vZsDVaa6v/mdhkg6DxXwEVuSPjZl+ZnvWAQbUMCd5ckw38+tHQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.2.tgz", + "integrity": "sha512-EqOy3xaEGQpsKxLlzYstDRJ8eY90CbyBP4cl+w7r45mE60S8YliyL9AgWsdWcyNiB95E2PMqHBEv67nNl1zLfg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.3.tgz", + "integrity": "sha512-AsJtI9KiFoEGAhcEKZyzzPfrszAQGcf4HSYKmenz0WGx/6YNvoPPv4OSGfZTCsDmgPHv4pXzxE+7QV7jcGWNKw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz", + "integrity": "sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.2.tgz", + "integrity": "sha512-3AnHfsMdq9Wg7+3BeR1HuLWI9+DMA/SoHVpCWq6xSsa52ikNd6nlF/wFzdpHyGtVa+Aji6lMgvwOF4sGcVA7SA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.10.tgz", + "integrity": "sha512-2k6fgUNOZ1Rn0gEjvGPGrDEINLG8qSBHsN7xlkkbO+fnHJ36BQPDzhFfMmYSDS8AgzoygqQiDOQ+6Hp2vBTUdA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.10.tgz", + "integrity": "sha512-2XR1WRglLVmoIFts7bODUTgBdVyvkfKNkydHrlsI5VxW9q3s1hnJCuY+f1OHzvj5ue23q4vydM2fjrMjf2HSdQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz", + "integrity": "sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "license": "MIT" + }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==", + "license": "MIT" + }, + "node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/mnemonist": { + "version": "0.38.3", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz", + "integrity": "sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw==", + "license": "MIT", + "dependencies": { + "obliterator": "^1.6.1" + } + }, + "node_modules/obliterator": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz", + "integrity": "sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig==", + "license": "MIT" + }, + "node_modules/strnum": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", + "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md new file mode 100644 index 0000000..e6036f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/CHANGELOG.md @@ -0,0 +1,118 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +- feat!: drop support for IE 11 (#629) ([6c49fb6](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6c49fb6c1b1f18bbff02dbd77a37a21bdb40c959)), closes [#629](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/629) + +### BREAKING CHANGES + +- Remove support for IE11 + +Co-authored-by: texastony <5892063+texastony@users.noreply.github.com> + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +### Bug Fixes + +- **docs:** sha256 packages, clarify hmac support ([#455](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/455)) ([1be5043](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/1be5043325991f3f5ccb52a8dd928f004b4d442e)) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) + +## [1.1.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.1.0...@aws-crypto/sha256-browser@1.1.1) (2021-07-13) + +### Bug Fixes + +- **sha256-browser:** throw errors not string ([#194](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/194)) ([7fa7ac4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/7fa7ac445ef7a04dfb1ff479e7114aba045b2b2c)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.0.0...@aws-crypto/sha256-browser@1.1.0) (2021-01-13) + +### Bug Fixes + +- remove package lock ([6002a5a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6002a5ab9218dc8798c19dc205d3eebd3bec5b43)) +- **aws-crypto:** export explicit dependencies on [@aws-types](https://github.com/aws-types) ([6a1873a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6a1873a4dcc2aaa4a1338595703cfa7099f17b8c)) +- **deps-dev:** move @aws-sdk/types to devDependencies ([#188](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/188)) ([08efdf4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/08efdf46dcc612d88c441e29945d787f253ee77d)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@1.0.0-alpha.0...@aws-crypto/sha256-browser@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.4...@aws-crypto/sha256-browser@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/sha256-browser + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.2...@aws-crypto/sha256-browser@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.2...@aws-crypto/sha256-browser@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/sha256-browser@0.1.0-preview.1...@aws-crypto/sha256-browser@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/README.md new file mode 100644 index 0000000..75bf105 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/README.md @@ -0,0 +1,31 @@ +# @aws-crypto/sha256-browser + +SHA256 wrapper for browsers that prefers `window.crypto.subtle` but will +fall back to a pure JS implementation in @aws-crypto/sha256-js +to provide a consistent interface for SHA256. + +## Usage + +- To hash "some data" +``` +import {Sha256} from '@aws-crypto/sha256-browser' + +const hash = new Sha256(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +- To hmac "some data" with "a key" +``` +import {Sha256} from '@aws-crypto/sha256-browser' + +const hash = new Sha256('a key'); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts new file mode 100644 index 0000000..fe8def7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_256_HASH: { + name: "SHA-256"; +}; +export declare const SHA_256_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-256"; + }; +}; +export declare const EMPTY_DATA_SHA_256: Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.js new file mode 100644 index 0000000..acb5c55 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EMPTY_DATA_SHA_256 = exports.SHA_256_HMAC_ALGO = exports.SHA_256_HASH = void 0; +exports.SHA_256_HASH = { name: "SHA-256" }; +exports.SHA_256_HMAC_ALGO = { + name: "HMAC", + hash: exports.SHA_256_HASH +}; +exports.EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map new file mode 100644 index 0000000..217561a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAa,QAAA,YAAY,GAAwB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AAExD,QAAA,iBAAiB,GAAgD;IAC5E,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,oBAAY;CACnB,CAAC;AAEW,QAAA,kBAAkB,GAAG,IAAI,UAAU,CAAC;IAC/C,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;CACH,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts new file mode 100644 index 0000000..055d3ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js new file mode 100644 index 0000000..cde2a42 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var webCryptoSha256_1 = require("./webCryptoSha256"); +var sha256_js_1 = require("@aws-crypto/sha256-js"); +var supports_web_crypto_1 = require("@aws-crypto/supports-web-crypto"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var util_1 = require("@aws-crypto/util"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + if ((0, supports_web_crypto_1.supportsWebCrypto)((0, util_locate_window_1.locateWindow)())) { + this.hash = new webCryptoSha256_1.Sha256(secret); + } + else { + this.hash = new sha256_js_1.Sha256(secret); + } + } + Sha256.prototype.update = function (data, encoding) { + this.hash.update((0, util_1.convertToBuffer)(data)); + }; + Sha256.prototype.digest = function () { + return this.hash.digest(); + }; + Sha256.prototype.reset = function () { + this.hash.reset(); + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +//# sourceMappingURL=crossPlatformSha256.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map new file mode 100644 index 0000000..9a177dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/crossPlatformSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha256.js","sourceRoot":"","sources":["../../src/crossPlatformSha256.ts"],"names":[],"mappings":";;;AAAA,qDAA8D;AAC9D,mDAA2D;AAE3D,uEAAoE;AACpE,kEAA2D;AAC3D,yCAAmD;AAEnD;IAGE,gBAAY,MAAmB;QAC7B,IAAI,IAAA,uCAAiB,EAAC,IAAA,iCAAY,GAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,wBAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,IAAI,CAAC,IAAI,GAAG,IAAI,kBAAQ,CAAC,MAAM,CAAC,CAAC;SAClC;IACH,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,aAAC;AAAD,CAAC,AAtBD,IAsBC;AAtBY,wBAAM"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.js new file mode 100644 index 0000000..a270349 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WebCryptoSha256 = void 0; +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./crossPlatformSha256"), exports); +var webCryptoSha256_1 = require("./webCryptoSha256"); +Object.defineProperty(exports, "WebCryptoSha256", { enumerable: true, get: function () { return webCryptoSha256_1.Sha256; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map new file mode 100644 index 0000000..64b19eb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAAA,gEAAsC;AACtC,qDAA8D;AAArD,kHAAA,MAAM,OAAmB"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js new file mode 100644 index 0000000..fe91548 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map new file mode 100644 index 0000000..20ccfd6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";;;AAEA,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts new file mode 100644 index 0000000..ec0e214 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.d.ts @@ -0,0 +1,10 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js new file mode 100644 index 0000000..778fdd9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var util_1 = require("@aws-crypto/util"); +var constants_1 = require("./constants"); +var util_locate_window_1 = require("@aws-sdk/util-locate-window"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.toHash = new Uint8Array(0); + this.secret = secret; + this.reset(); + } + Sha256.prototype.update = function (data) { + if ((0, util_1.isEmptyData)(data)) { + return; + } + var update = (0, util_1.convertToBuffer)(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha256.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return (0, util_locate_window_1.locateWindow)() + .crypto.subtle.sign(constants_1.SHA_256_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if ((0, util_1.isEmptyData)(this.toHash)) { + return Promise.resolve(constants_1.EMPTY_DATA_SHA_256); + } + return Promise.resolve() + .then(function () { + return (0, util_locate_window_1.locateWindow)().crypto.subtle.digest(constants_1.SHA_256_HASH, _this.toHash); + }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha256.prototype.reset = function () { + var _this = this; + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + (0, util_locate_window_1.locateWindow)() + .crypto.subtle.importKey("raw", (0, util_1.convertToBuffer)(_this.secret), constants_1.SHA_256_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +//# sourceMappingURL=webCryptoSha256.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map new file mode 100644 index 0000000..7b55a07 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/main/webCryptoSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha256.js","sourceRoot":"","sources":["../../src/webCryptoSha256.ts"],"names":[],"mappings":";;;AACA,yCAAgE;AAChE,yCAIqB;AACrB,kEAA2D;AAE3D;IAKE,gBAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAA,kBAAW,EAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,IAAA,sBAAe,EAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,uBAAM,GAAN;QAAA,iBAkBC;QAjBC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,IAAA,iCAAY,GAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,6BAAiB,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACvD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,IAAA,kBAAW,EAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,8BAAkB,CAAC,CAAC;SAC5C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC;YACJ,OAAA,IAAA,iCAAY,GAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,wBAAY,EAAE,KAAI,CAAC,MAAM,CAAC;QAA9D,CAA8D,CAC/D;aACA,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,sBAAK,GAAL;QAAA,iBAgBC;QAfC,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,IAAA,iCAAY,GAAE;qBACT,MAAM,CAAC,MAAM,CAAC,SAAS,CACxB,KAAK,EACL,IAAA,sBAAe,EAAC,KAAI,CAAC,MAAoB,CAAC,EAC1C,6BAAiB,EACjB,KAAK,EACL,CAAC,MAAM,CAAC,CACX;qBACI,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA7DD,IA6DC;AA7DY,wBAAM"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts new file mode 100644 index 0000000..fe8def7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.d.ts @@ -0,0 +1,10 @@ +export declare const SHA_256_HASH: { + name: "SHA-256"; +}; +export declare const SHA_256_HMAC_ALGO: { + name: "HMAC"; + hash: { + name: "SHA-256"; + }; +}; +export declare const EMPTY_DATA_SHA_256: Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.js new file mode 100644 index 0000000..7fb1613 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.js @@ -0,0 +1,40 @@ +export var SHA_256_HASH = { name: "SHA-256" }; +export var SHA_256_HMAC_ALGO = { + name: "HMAC", + hash: SHA_256_HASH +}; +export var EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map new file mode 100644 index 0000000..09ed9a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,IAAM,YAAY,GAAwB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC;AAErE,MAAM,CAAC,IAAM,iBAAiB,GAAgD;IAC5E,IAAI,EAAE,MAAM;IACZ,IAAI,EAAE,YAAY;CACnB,CAAC;AAEF,MAAM,CAAC,IAAM,kBAAkB,GAAG,IAAI,UAAU,CAAC;IAC/C,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,GAAG;IACH,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;IACF,GAAG;IACH,EAAE;CACH,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts new file mode 100644 index 0000000..055d3ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.d.ts @@ -0,0 +1,8 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private hash; + constructor(secret?: SourceData); + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js new file mode 100644 index 0000000..5ae82ea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js @@ -0,0 +1,27 @@ +import { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +import { Sha256 as JsSha256 } from "@aws-crypto/sha256-js"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha256(secret); + } + else { + this.hash = new JsSha256(secret); + } + } + Sha256.prototype.update = function (data, encoding) { + this.hash.update(convertToBuffer(data)); + }; + Sha256.prototype.digest = function () { + return this.hash.digest(); + }; + Sha256.prototype.reset = function () { + this.hash.reset(); + }; + return Sha256; +}()); +export { Sha256 }; +//# sourceMappingURL=crossPlatformSha256.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map new file mode 100644 index 0000000..4a83c57 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/crossPlatformSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"crossPlatformSha256.js","sourceRoot":"","sources":["../../src/crossPlatformSha256.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAC9D,OAAO,EAAE,MAAM,IAAI,QAAQ,EAAE,MAAM,uBAAuB,CAAC;AAE3D,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAC3D,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEnD;IAGE,gBAAY,MAAmB;QAC7B,IAAI,iBAAiB,CAAC,YAAY,EAAE,CAAC,EAAE;YACrC,IAAI,CAAC,IAAI,GAAG,IAAI,eAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,IAAI,CAAC,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,CAAC,CAAC;SAClC;IACH,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB,EAAE,QAAsC;QAC7D,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC;IAC1C,CAAC;IAED,uBAAM,GAAN;QACE,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC;IACH,aAAC;AAAD,CAAC,AAtBD,IAsBC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.d.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.js new file mode 100644 index 0000000..94ffb63 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.js @@ -0,0 +1,3 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map new file mode 100644 index 0000000..01d20bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,uBAAuB,CAAC;AACtC,OAAO,EAAE,MAAM,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js new file mode 100644 index 0000000..4f31a61 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js @@ -0,0 +1,7 @@ +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map new file mode 100644 index 0000000..776ce2b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts new file mode 100644 index 0000000..ec0e214 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.d.ts @@ -0,0 +1,10 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private key; + private toHash; + constructor(secret?: SourceData); + update(data: SourceData): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js new file mode 100644 index 0000000..d12acd0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js @@ -0,0 +1,53 @@ +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +import { EMPTY_DATA_SHA_256, SHA_256_HASH, SHA_256_HMAC_ALGO, } from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.toHash = new Uint8Array(0); + this.secret = secret; + this.reset(); + } + Sha256.prototype.update = function (data) { + if (isEmptyData(data)) { + return; + } + var update = convertToBuffer(data); + var typedArray = new Uint8Array(this.toHash.byteLength + update.byteLength); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + }; + Sha256.prototype.digest = function () { + var _this = this; + if (this.key) { + return this.key.then(function (key) { + return locateWindow() + .crypto.subtle.sign(SHA_256_HMAC_ALGO, key, _this.toHash) + .then(function (data) { return new Uint8Array(data); }); + }); + } + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_256); + } + return Promise.resolve() + .then(function () { + return locateWindow().crypto.subtle.digest(SHA_256_HASH, _this.toHash); + }) + .then(function (data) { return Promise.resolve(new Uint8Array(data)); }); + }; + Sha256.prototype.reset = function () { + var _this = this; + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise(function (resolve, reject) { + locateWindow() + .crypto.subtle.importKey("raw", convertToBuffer(_this.secret), SHA_256_HMAC_ALGO, false, ["sign"]) + .then(resolve, reject); + }); + this.key.catch(function () { }); + } + }; + return Sha256; +}()); +export { Sha256 }; +//# sourceMappingURL=webCryptoSha256.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map new file mode 100644 index 0000000..c7b31c0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/build/module/webCryptoSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webCryptoSha256.js","sourceRoot":"","sources":["../../src/webCryptoSha256.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAChE,OAAO,EACL,kBAAkB,EAClB,YAAY,EACZ,iBAAiB,GAClB,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAE3D;IAKE,gBAAY,MAAmB;QAFvB,WAAM,GAAe,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAG7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,WAAW,CAAC,IAAI,CAAC,EAAE;YACrB,OAAO;SACR;QAED,IAAM,MAAM,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;QACrC,IAAM,UAAU,GAAG,IAAI,UAAU,CAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,MAAM,CAAC,UAAU,CAC3C,CAAC;QACF,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;QAC/B,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;IAC3B,CAAC;IAED,uBAAM,GAAN;QAAA,iBAkBC;QAjBC,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,OAAO,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAC,GAAG;gBACvB,OAAA,YAAY,EAAE;qBACX,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,iBAAiB,EAAE,GAAG,EAAE,KAAI,CAAC,MAAM,CAAC;qBACvD,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,UAAU,CAAC,IAAI,CAAC,EAApB,CAAoB,CAAC;YAFvC,CAEuC,CACxC,CAAC;SACH;QAED,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;YAC5B,OAAO,OAAO,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;SAC5C;QAED,OAAO,OAAO,CAAC,OAAO,EAAE;aACrB,IAAI,CAAC;YACJ,OAAA,YAAY,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,EAAE,KAAI,CAAC,MAAM,CAAC;QAA9D,CAA8D,CAC/D;aACA,IAAI,CAAC,UAAC,IAAI,IAAK,OAAA,OAAO,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC,EAArC,CAAqC,CAAC,CAAC;IAC3D,CAAC;IAED,sBAAK,GAAL;QAAA,iBAgBC;QAfC,IAAI,CAAC,MAAM,GAAG,IAAI,UAAU,CAAC,CAAC,CAAC,CAAC;QAChC,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,KAAK,KAAK,CAAC,EAAE;YACzC,IAAI,CAAC,GAAG,GAAG,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACrC,YAAY,EAAE;qBACT,MAAM,CAAC,MAAM,CAAC,SAAS,CACxB,KAAK,EACL,eAAe,CAAC,KAAI,CAAC,MAAoB,CAAC,EAC1C,iBAAiB,EACjB,KAAK,EACL,CAAC,MAAM,CAAC,CACX;qBACI,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,cAAO,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA7DD,IA6DC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..ed8affc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..a12e51c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..78bfb4d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/package.json new file mode 100644 index 0000000..2688ecf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/package.json @@ -0,0 +1,33 @@ +{ + "name": "@aws-crypto/sha256-browser", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha256-browser", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/constants.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/constants.ts new file mode 100644 index 0000000..7f68e2a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/constants.ts @@ -0,0 +1,41 @@ +export const SHA_256_HASH: { name: "SHA-256" } = { name: "SHA-256" }; + +export const SHA_256_HMAC_ALGO: { name: "HMAC"; hash: { name: "SHA-256" } } = { + name: "HMAC", + hash: SHA_256_HASH +}; + +export const EMPTY_DATA_SHA_256 = new Uint8Array([ + 227, + 176, + 196, + 66, + 152, + 252, + 28, + 20, + 154, + 251, + 244, + 200, + 153, + 111, + 185, + 36, + 39, + 174, + 65, + 228, + 100, + 155, + 147, + 76, + 164, + 149, + 153, + 27, + 120, + 82, + 184, + 85 +]); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts new file mode 100644 index 0000000..8cb9ff0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/crossPlatformSha256.ts @@ -0,0 +1,30 @@ +import { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; +import { Sha256 as JsSha256 } from "@aws-crypto/sha256-js"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { supportsWebCrypto } from "@aws-crypto/supports-web-crypto"; +import { locateWindow } from "@aws-sdk/util-locate-window"; +import { convertToBuffer } from "@aws-crypto/util"; + +export class Sha256 implements Checksum { + private hash: Checksum; + + constructor(secret?: SourceData) { + if (supportsWebCrypto(locateWindow())) { + this.hash = new WebCryptoSha256(secret); + } else { + this.hash = new JsSha256(secret); + } + } + + update(data: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void { + this.hash.update(convertToBuffer(data)); + } + + digest(): Promise { + return this.hash.digest(); + } + + reset(): void { + this.hash.reset(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/index.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/index.ts new file mode 100644 index 0000000..60ab397 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/index.ts @@ -0,0 +1,2 @@ +export * from "./crossPlatformSha256"; +export { Sha256 as WebCryptoSha256 } from "./webCryptoSha256"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts new file mode 100644 index 0000000..538971f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/isEmptyData.ts @@ -0,0 +1,9 @@ +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts new file mode 100644 index 0000000..fe4db57 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/src/webCryptoSha256.ts @@ -0,0 +1,71 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +import { + EMPTY_DATA_SHA_256, + SHA_256_HASH, + SHA_256_HMAC_ALGO, +} from "./constants"; +import { locateWindow } from "@aws-sdk/util-locate-window"; + +export class Sha256 implements Checksum { + private readonly secret?: SourceData; + private key: Promise | undefined; + private toHash: Uint8Array = new Uint8Array(0); + + constructor(secret?: SourceData) { + this.secret = secret; + this.reset(); + } + + update(data: SourceData): void { + if (isEmptyData(data)) { + return; + } + + const update = convertToBuffer(data); + const typedArray = new Uint8Array( + this.toHash.byteLength + update.byteLength + ); + typedArray.set(this.toHash, 0); + typedArray.set(update, this.toHash.byteLength); + this.toHash = typedArray; + } + + digest(): Promise { + if (this.key) { + return this.key.then((key) => + locateWindow() + .crypto.subtle.sign(SHA_256_HMAC_ALGO, key, this.toHash) + .then((data) => new Uint8Array(data)) + ); + } + + if (isEmptyData(this.toHash)) { + return Promise.resolve(EMPTY_DATA_SHA_256); + } + + return Promise.resolve() + .then(() => + locateWindow().crypto.subtle.digest(SHA_256_HASH, this.toHash) + ) + .then((data) => Promise.resolve(new Uint8Array(data))); + } + + reset(): void { + this.toHash = new Uint8Array(0); + if (this.secret && this.secret !== void 0) { + this.key = new Promise((resolve, reject) => { + locateWindow() + .crypto.subtle.importKey( + "raw", + convertToBuffer(this.secret as SourceData), + SHA_256_HMAC_ALGO, + false, + ["sign"] + ) + .then(resolve, reject); + }); + this.key.catch(() => {}); + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/tsconfig.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/tsconfig.json new file mode 100644 index 0000000..fb9aa95 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-browser/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/CHANGELOG.md new file mode 100644 index 0000000..97c1f60 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/CHANGELOG.md @@ -0,0 +1,106 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +### Bug Fixes + +- **docs:** sha256 packages, clarify hmac support ([#455](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/455)) ([1be5043](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/1be5043325991f3f5ccb52a8dd928f004b4d442e)) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) + +# [1.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@1.0.0...@aws-crypto/sha256-js@1.1.0) (2021-01-13) + +### Bug Fixes + +- remove package lock ([6002a5a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6002a5ab9218dc8798c19dc205d3eebd3bec5b43)) +- **aws-crypto:** export explicit dependencies on [@aws-types](https://github.com/aws-types) ([6a1873a](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6a1873a4dcc2aaa4a1338595703cfa7099f17b8c)) +- **deps-dev:** move @aws-sdk/types to devDependencies ([#188](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/188)) ([08efdf4](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/08efdf46dcc612d88c441e29945d787f253ee77d)) + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@1.0.0-alpha.0...@aws-crypto/sha256-js@1.0.0) (2020-10-22) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.4...@aws-crypto/sha256-js@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/sha256-js + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.2...@aws-crypto/sha256-js@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.2...@aws-crypto/sha256-js@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- es2015.iterable required ([#10](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/10)) ([6e08d83](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/6e08d83c33667ad8cbeeaaa7cedf1bbe05f79ed8)) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/sha256-js@0.1.0-preview.1...@aws-crypto/sha256-js@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) + +### Features + +- **sha256-js:** expose synchronous digest ([#7](https://github.com/aws/aws-javascript-crypto-helpers/issues/7)) ([9edaef7](https://github.com/aws/aws-javascript-crypto-helpers/commit/9edaef7)), closes [#6](https://github.com/aws/aws-javascript-crypto-helpers/issues/6) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/LICENSE new file mode 100644 index 0000000..ad410e1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/README.md new file mode 100644 index 0000000..f769f5b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/README.md @@ -0,0 +1,29 @@ +# crypto-sha256-js + +A pure JS implementation SHA256. + +## Usage + +- To hash "some data" +``` +import {Sha256} from '@aws-crypto/sha256-js'; + +const hash = new Sha256(); +hash.update('some data'); +const result = await hash.digest(); + +``` + +- To hmac "some data" with "a key" +``` +import {Sha256} from '@aws-crypto/sha256-js'; + +const hash = new Sha256('a key'); +hash.update('some data'); +const result = await hash.digest(); + +``` + +## Test + +`npm test` diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts new file mode 100644 index 0000000..1f580b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export declare class RawSha256 { + private state; + private temp; + private buffer; + private bufferLength; + private bytesHashed; + /** + * @internal + */ + finished: boolean; + update(data: Uint8Array): void; + digest(): Uint8Array; + private hashBuffer; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js new file mode 100644 index 0000000..68ceacc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js @@ -0,0 +1,124 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RawSha256 = void 0; +var constants_1 = require("./constants"); +/** + * @internal + */ +var RawSha256 = /** @class */ (function () { + function RawSha256() { + this.state = Int32Array.from(constants_1.INIT); + this.temp = new Int32Array(64); + this.buffer = new Uint8Array(64); + this.bufferLength = 0; + this.bytesHashed = 0; + /** + * @internal + */ + this.finished = false; + } + RawSha256.prototype.update = function (data) { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + var position = 0; + var byteLength = data.byteLength; + this.bytesHashed += byteLength; + if (this.bytesHashed * 8 > constants_1.MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + if (this.bufferLength === constants_1.BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + }; + RawSha256.prototype.digest = function () { + if (!this.finished) { + var bitsHashed = this.bytesHashed * 8; + var bufferView = new DataView(this.buffer.buffer, this.buffer.byteOffset, this.buffer.byteLength); + var undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % constants_1.BLOCK_SIZE >= constants_1.BLOCK_SIZE - 8) { + for (var i = this.bufferLength; i < constants_1.BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (var i = this.bufferLength; i < constants_1.BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32(constants_1.BLOCK_SIZE - 8, Math.floor(bitsHashed / 0x100000000), true); + bufferView.setUint32(constants_1.BLOCK_SIZE - 4, bitsHashed); + this.hashBuffer(); + this.finished = true; + } + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + var out = new Uint8Array(constants_1.DIGEST_LENGTH); + for (var i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + return out; + }; + RawSha256.prototype.hashBuffer = function () { + var _a = this, buffer = _a.buffer, state = _a.state; + var state0 = state[0], state1 = state[1], state2 = state[2], state3 = state[3], state4 = state[4], state5 = state[5], state6 = state[6], state7 = state[7]; + for (var i = 0; i < constants_1.BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } + else { + var u = this.temp[i - 2]; + var t1_1 = ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + u = this.temp[i - 15]; + var t2_1 = ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + this.temp[i] = + ((t1_1 + this.temp[i - 7]) | 0) + ((t2_1 + this.temp[i - 16]) | 0); + } + var t1 = ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((constants_1.KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + var t2 = ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + }; + return RawSha256; +}()); +exports.RawSha256 = RawSha256; +//# sourceMappingURL=RawSha256.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map new file mode 100644 index 0000000..81659f5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/RawSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RawSha256.js","sourceRoot":"","sources":["../../src/RawSha256.ts"],"names":[],"mappings":";;;AAAA,yCAMqB;AAErB;;GAEG;AACH;IAAA;QACU,UAAK,GAAe,UAAU,CAAC,IAAI,CAAC,gBAAI,CAAC,CAAC;QAC1C,SAAI,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,WAAM,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAEhC;;WAEG;QACH,aAAQ,GAAY,KAAK,CAAC;IA8I5B,CAAC;IA5IC,0BAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;SAClE;QAED,IAAI,QAAQ,GAAG,CAAC,CAAC;QACX,IAAA,UAAU,GAAK,IAAI,WAAT,CAAU;QAC1B,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAE/B,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,GAAG,+BAAmB,EAAE;YAC9C,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,OAAO,UAAU,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpD,UAAU,EAAE,CAAC;YAEb,IAAI,IAAI,CAAC,YAAY,KAAK,sBAAU,EAAE;gBACpC,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;SACF;IACH,CAAC;IAED,0BAAM,GAAN;QACE,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YAClB,IAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;YACxC,IAAM,UAAU,GAAG,IAAI,QAAQ,CAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,EAClB,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CACvB,CAAC;YAEF,IAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC5C,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC;YAE/C,+DAA+D;YAC/D,IAAI,iBAAiB,GAAG,sBAAU,IAAI,sBAAU,GAAG,CAAC,EAAE;gBACpD,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;oBACnD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;iBAC3B;gBACD,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;YAED,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,sBAAU,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;gBACvD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;aAC3B;YACD,UAAU,CAAC,SAAS,CAClB,sBAAU,GAAG,CAAC,EACd,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,WAAW,CAAC,EACpC,IAAI,CACL,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,sBAAU,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;YAEjD,IAAI,CAAC,UAAU,EAAE,CAAC;YAElB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;SACtB;QAED,sEAAsE;QACtE,kCAAkC;QAClC,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,yBAAa,CAAC,CAAC;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC3C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC/C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;YAC9C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;SAC/C;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IAEO,8BAAU,GAAlB;QACQ,IAAA,KAAoB,IAAI,EAAtB,MAAM,YAAA,EAAE,KAAK,WAAS,CAAC;QAE/B,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACnB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAEpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;YACnC,IAAI,CAAC,GAAG,EAAE,EAAE;gBACV,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC9B,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAClC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;wBACjC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;aAC9B;iBAAM;gBACL,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACzB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;gBAEnE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACtB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBAEjE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;aAClE;YAED,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBACzC,CAAC,CAAC;gBACF,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,eAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACjD,CAAC,CAAC;YAEJ,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACjC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC9D,CAAC,CAAC;YAEJ,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;YAC3B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;IACrB,CAAC;IACH,gBAAC;AAAD,CAAC,AAxJD,IAwJC;AAxJY,8BAAS"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts new file mode 100644 index 0000000..63bd764 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.d.ts @@ -0,0 +1,20 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE: number; +/** + * @internal + */ +export declare const DIGEST_LENGTH: number; +/** + * @internal + */ +export declare const KEY: Uint32Array; +/** + * @internal + */ +export declare const INIT: number[]; +/** + * @internal + */ +export declare const MAX_HASHABLE_LENGTH: number; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.js new file mode 100644 index 0000000..c83aa09 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.js @@ -0,0 +1,98 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MAX_HASHABLE_LENGTH = exports.INIT = exports.KEY = exports.DIGEST_LENGTH = exports.BLOCK_SIZE = void 0; +/** + * @internal + */ +exports.BLOCK_SIZE = 64; +/** + * @internal + */ +exports.DIGEST_LENGTH = 32; +/** + * @internal + */ +exports.KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); +/** + * @internal + */ +exports.INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; +/** + * @internal + */ +exports.MAX_HASHABLE_LENGTH = Math.pow(2, 53) - 1; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map new file mode 100644 index 0000000..1132c12 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACU,QAAA,UAAU,GAAW,EAAE,CAAC;AAErC;;GAEG;AACU,QAAA,aAAa,GAAW,EAAE,CAAC;AAExC;;GAEG;AACU,QAAA,GAAG,GAAG,IAAI,WAAW,CAAC;IACjC,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC,CAAC;AAEH;;GAEG;AACU,QAAA,IAAI,GAAG;IAClB,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC;AAEF;;GAEG;AACU,QAAA,mBAAmB,GAAG,SAAA,CAAC,EAAI,EAAE,CAAA,GAAG,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.d.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.js new file mode 100644 index 0000000..4329f10 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./jsSha256"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.js.map new file mode 100644 index 0000000..9f97d54 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,qDAA2B"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts new file mode 100644 index 0000000..d813b25 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.d.ts @@ -0,0 +1,12 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private hash; + private outer?; + private error; + constructor(secret?: SourceData); + update(toHash: SourceData): void; + digestSync(): Uint8Array; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js new file mode 100644 index 0000000..2a4f2f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js @@ -0,0 +1,85 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sha256 = void 0; +var tslib_1 = require("tslib"); +var constants_1 = require("./constants"); +var RawSha256_1 = require("./RawSha256"); +var util_1 = require("@aws-crypto/util"); +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.secret = secret; + this.hash = new RawSha256_1.RawSha256(); + this.reset(); + } + Sha256.prototype.update = function (toHash) { + if ((0, util_1.isEmptyData)(toHash) || this.error) { + return; + } + try { + this.hash.update((0, util_1.convertToBuffer)(toHash)); + } + catch (e) { + this.error = e; + } + }; + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + Sha256.prototype.digestSync = function () { + if (this.error) { + throw this.error; + } + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + return this.outer.digest(); + } + return this.hash.digest(); + }; + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + Sha256.prototype.digest = function () { + return tslib_1.__awaiter(this, void 0, void 0, function () { + return tslib_1.__generator(this, function (_a) { + return [2 /*return*/, this.digestSync()]; + }); + }); + }; + Sha256.prototype.reset = function () { + this.hash = new RawSha256_1.RawSha256(); + if (this.secret) { + this.outer = new RawSha256_1.RawSha256(); + var inner = bufferFromSecret(this.secret); + var outer = new Uint8Array(constants_1.BLOCK_SIZE); + outer.set(inner); + for (var i = 0; i < constants_1.BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + this.hash.update(inner); + this.outer.update(outer); + // overwrite the copied key in memory + for (var i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + }; + return Sha256; +}()); +exports.Sha256 = Sha256; +function bufferFromSecret(secret) { + var input = (0, util_1.convertToBuffer)(secret); + if (input.byteLength > constants_1.BLOCK_SIZE) { + var bufferHash = new RawSha256_1.RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + var buffer = new Uint8Array(constants_1.BLOCK_SIZE); + buffer.set(input); + return buffer; +} +//# sourceMappingURL=jsSha256.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map new file mode 100644 index 0000000..c34eb36 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/jsSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsSha256.js","sourceRoot":"","sources":["../../src/jsSha256.ts"],"names":[],"mappings":";;;;AAAA,yCAAyC;AACzC,yCAAwC;AAExC,yCAAgE;AAEhE;IAME,gBAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,IAAI,qBAAS,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,IAAA,kBAAW,EAAC,MAAM,CAAC,IAAI,IAAI,CAAC,KAAK,EAAE;YACrC,OAAO;SACR;QAED,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC,CAAC;SAC3C;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;SAChB;IACH,CAAC;IAED;;OAEG;IACH,2BAAU,GAAV;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,CAAC,KAAK,CAAC;SAClB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;gBACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;aACvC;YAED,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;SAC5B;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACG,uBAAM,GAAZ;;;gBACE,sBAAO,IAAI,CAAC,UAAU,EAAE,EAAC;;;KAC1B;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,GAAG,IAAI,qBAAS,EAAE,CAAC;QAC5B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,KAAK,GAAG,IAAI,qBAAS,EAAE,CAAC;YAC7B,IAAM,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5C,IAAM,KAAK,GAAG,IAAI,UAAU,CAAC,sBAAU,CAAC,CAAC;YACzC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,sBAAU,EAAE,CAAC,EAAE,EAAE;gBACnC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACjB,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;aAClB;YAED,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEzB,qCAAqC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE;gBACzC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;SACF;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA1ED,IA0EC;AA1EY,wBAAM;AA4EnB,SAAS,gBAAgB,CAAC,MAAkB;IAC1C,IAAI,KAAK,GAAG,IAAA,sBAAe,EAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,KAAK,CAAC,UAAU,GAAG,sBAAU,EAAE;QACjC,IAAM,UAAU,GAAG,IAAI,qBAAS,EAAE,CAAC;QACnC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACzB,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;KAC7B;IAED,IAAM,MAAM,GAAG,IAAI,UAAU,CAAC,sBAAU,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts new file mode 100644 index 0000000..d880343 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.d.ts @@ -0,0 +1,5 @@ +export declare const hashTestVectors: Array<[Uint8Array, Uint8Array]>; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export declare const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js new file mode 100644 index 0000000..3f0dd2f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js @@ -0,0 +1,322 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hmacTestVectors = exports.hashTestVectors = void 0; +var util_hex_encoding_1 = require("@aws-sdk/util-hex-encoding"); +var millionChars = new Uint8Array(1000000); +for (var i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} +exports.hashTestVectors = [ + [ + Uint8Array.from([97, 98, 99]), + (0, util_hex_encoding_1.fromHex)("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + (0, util_hex_encoding_1.fromHex)("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + (0, util_hex_encoding_1.fromHex)("61"), + (0, util_hex_encoding_1.fromHex)("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161"), + (0, util_hex_encoding_1.fromHex)("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161"), + (0, util_hex_encoding_1.fromHex)("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161"), + (0, util_hex_encoding_1.fromHex)("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161"), + (0, util_hex_encoding_1.fromHex)("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161"), + (0, util_hex_encoding_1.fromHex)("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161"), + (0, util_hex_encoding_1.fromHex)("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161"), + (0, util_hex_encoding_1.fromHex)("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161"), + (0, util_hex_encoding_1.fromHex)("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + (0, util_hex_encoding_1.fromHex)("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + (0, util_hex_encoding_1.fromHex)("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + (0, util_hex_encoding_1.fromHex)("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + (0, util_hex_encoding_1.fromHex)("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + (0, util_hex_encoding_1.fromHex)("de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e"), + (0, util_hex_encoding_1.fromHex)("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + (0, util_hex_encoding_1.fromHex)("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +exports.hmacTestVectors = [ + [ + (0, util_hex_encoding_1.fromHex)("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + (0, util_hex_encoding_1.fromHex)("4869205468657265"), + (0, util_hex_encoding_1.fromHex)("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + (0, util_hex_encoding_1.fromHex)("4a656665"), + (0, util_hex_encoding_1.fromHex)("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + (0, util_hex_encoding_1.fromHex)("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"), + (0, util_hex_encoding_1.fromHex)("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + (0, util_hex_encoding_1.fromHex)("0102030405060708090a0b0c0d0e0f10111213141516171819"), + (0, util_hex_encoding_1.fromHex)("cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"), + (0, util_hex_encoding_1.fromHex)("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374"), + (0, util_hex_encoding_1.fromHex)("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + (0, util_hex_encoding_1.fromHex)("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + (0, util_hex_encoding_1.fromHex)("5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e"), + (0, util_hex_encoding_1.fromHex)("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; +//# sourceMappingURL=knownHashes.fixture.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map new file mode 100644 index 0000000..8ffc02e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/main/knownHashes.fixture.js.map @@ -0,0 +1 @@ +{"version":3,"file":"knownHashes.fixture.js","sourceRoot":"","sources":["../../src/knownHashes.fixture.ts"],"names":[],"mappings":";;;AAAA,gEAAqD;AAErD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;IAChC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;CACtB;AAEY,QAAA,eAAe,GAAoC;IAC9D;QACE,UAAU,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,IAAI,CAAC;QACb,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,MAAM,CAAC;QACf,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,QAAQ,CAAC;QACjB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,UAAU,CAAC;QACnB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,YAAY,CAAC;QACrB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,cAAc,CAAC;QACvB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gBAAgB,CAAC;QACzB,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kBAAkB,CAAC;QAC3B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oBAAoB,CAAC;QAC7B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sBAAsB,CAAC;QAC/B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wBAAwB,CAAC;QACjC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0BAA0B,CAAC;QACnC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4BAA4B,CAAC;QACrC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8BAA8B,CAAC;QACvC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gCAAgC,CAAC;QACzC,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kCAAkC,CAAC;QAC3C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oCAAoC,CAAC;QAC7C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sCAAsC,CAAC;QAC/C,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wCAAwC,CAAC;QACjD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4CAA4C,CAAC;QACrD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8CAA8C,CAAC;QACvD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gDAAgD,CAAC;QACzD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kDAAkD,CAAC;QAC3D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oDAAoD,CAAC;QAC7D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,sDAAsD,CAAC;QAC/D,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,wDAAwD,CAAC;QACjE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0DAA0D,CAAC;QACnE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,4DAA4D,CAAC;QACrE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,8DAA8D,CAAC;QACvE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,gEAAgE,CAAC;QACzE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;QAC3E,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oEAAoE,CACrE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sEAAsE,CACvE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wEAAwE,CACzE;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0EAA0E,CAC3E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4EAA4E,CAC7E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8EAA8E,CAC/E;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gFAAgF,CACjF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kFAAkF,CACnF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oFAAoF,CACrF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sFAAsF,CACvF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wFAAwF,CACzF;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0FAA0F,CAC3F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4FAA4F,CAC7F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8FAA8F,CAC/F;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gGAAgG,CACjG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kGAAkG,CACnG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oGAAoG,CACrG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wGAAwG,CACzG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0GAA0G,CAC3G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4GAA4G,CAC7G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8GAA8G,CAC/G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gHAAgH,CACjH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kHAAkH,CACnH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,oHAAoH,CACrH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,sHAAsH,CACvH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wHAAwH,CACzH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,0HAA0H,CAC3H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,4HAA4H,CAC7H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,8HAA8H,CAC/H;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gIAAgI,CACjI;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,kIAAkI,CACnI;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,gHAAgH,CACjH;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,YAAY;QACZ,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC;AAEF;;GAEG;AACU,QAAA,eAAe,GAAgD;IAC1E;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EAAC,kBAAkB,CAAC;QAC3B,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,UAAU,CAAC;QACnB,IAAA,2BAAO,EAAC,0DAA0D,CAAC;QACnE,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,0CAA0C,CAAC;QACnD,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EAAC,oDAAoD,CAAC;QAC7D,IAAA,2BAAO,EACL,sGAAsG,CACvG;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EACL,8GAA8G,CAC/G;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAA,2BAAO,EACL,wQAAwQ,CACzQ;QACD,IAAA,2BAAO,EACL,kTAAkT,CACnT;QACD,IAAA,2BAAO,EAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts new file mode 100644 index 0000000..1f580b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export declare class RawSha256 { + private state; + private temp; + private buffer; + private bufferLength; + private bytesHashed; + /** + * @internal + */ + finished: boolean; + update(data: Uint8Array): void; + digest(): Uint8Array; + private hashBuffer; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js new file mode 100644 index 0000000..f799acd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js @@ -0,0 +1,121 @@ +import { BLOCK_SIZE, DIGEST_LENGTH, INIT, KEY, MAX_HASHABLE_LENGTH } from "./constants"; +/** + * @internal + */ +var RawSha256 = /** @class */ (function () { + function RawSha256() { + this.state = Int32Array.from(INIT); + this.temp = new Int32Array(64); + this.buffer = new Uint8Array(64); + this.bufferLength = 0; + this.bytesHashed = 0; + /** + * @internal + */ + this.finished = false; + } + RawSha256.prototype.update = function (data) { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + var position = 0; + var byteLength = data.byteLength; + this.bytesHashed += byteLength; + if (this.bytesHashed * 8 > MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + }; + RawSha256.prototype.digest = function () { + if (!this.finished) { + var bitsHashed = this.bytesHashed * 8; + var bufferView = new DataView(this.buffer.buffer, this.buffer.byteOffset, this.buffer.byteLength); + var undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (var i = this.bufferLength; i < BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + for (var i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32(BLOCK_SIZE - 8, Math.floor(bitsHashed / 0x100000000), true); + bufferView.setUint32(BLOCK_SIZE - 4, bitsHashed); + this.hashBuffer(); + this.finished = true; + } + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + var out = new Uint8Array(DIGEST_LENGTH); + for (var i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + return out; + }; + RawSha256.prototype.hashBuffer = function () { + var _a = this, buffer = _a.buffer, state = _a.state; + var state0 = state[0], state1 = state[1], state2 = state[2], state3 = state[3], state4 = state[4], state5 = state[5], state6 = state[6], state7 = state[7]; + for (var i = 0; i < BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } + else { + var u = this.temp[i - 2]; + var t1_1 = ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + u = this.temp[i - 15]; + var t2_1 = ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + this.temp[i] = + ((t1_1 + this.temp[i - 7]) | 0) + ((t2_1 + this.temp[i - 16]) | 0); + } + var t1 = ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + var t2 = ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + }; + return RawSha256; +}()); +export { RawSha256 }; +//# sourceMappingURL=RawSha256.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map new file mode 100644 index 0000000..c4d50a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/RawSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RawSha256.js","sourceRoot":"","sources":["../../src/RawSha256.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,UAAU,EACV,aAAa,EACb,IAAI,EACJ,GAAG,EACH,mBAAmB,EACpB,MAAM,aAAa,CAAC;AAErB;;GAEG;AACH;IAAA;QACU,UAAK,GAAe,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC1C,SAAI,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACtC,WAAM,GAAe,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;QACxC,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAEhC;;WAEG;QACH,aAAQ,GAAY,KAAK,CAAC;IA8I5B,CAAC;IA5IC,0BAAM,GAAN,UAAO,IAAgB;QACrB,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;SAClE;QAED,IAAI,QAAQ,GAAG,CAAC,CAAC;QACX,IAAA,UAAU,GAAK,IAAI,WAAT,CAAU;QAC1B,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAE/B,IAAI,IAAI,CAAC,WAAW,GAAG,CAAC,GAAG,mBAAmB,EAAE;YAC9C,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;SACxD;QAED,OAAO,UAAU,GAAG,CAAC,EAAE;YACrB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;YACpD,UAAU,EAAE,CAAC;YAEb,IAAI,IAAI,CAAC,YAAY,KAAK,UAAU,EAAE;gBACpC,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;SACF;IACH,CAAC;IAED,0BAAM,GAAN;QACE,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE;YAClB,IAAM,UAAU,GAAG,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;YACxC,IAAM,UAAU,GAAG,IAAI,QAAQ,CAC7B,IAAI,CAAC,MAAM,CAAC,MAAM,EAClB,IAAI,CAAC,MAAM,CAAC,UAAU,EACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CACvB,CAAC;YAEF,IAAM,iBAAiB,GAAG,IAAI,CAAC,YAAY,CAAC;YAC5C,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,EAAE,IAAI,CAAC,CAAC;YAE/C,+DAA+D;YAC/D,IAAI,iBAAiB,GAAG,UAAU,IAAI,UAAU,GAAG,CAAC,EAAE;gBACpD,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;oBACnD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;iBAC3B;gBACD,IAAI,CAAC,UAAU,EAAE,CAAC;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;aACvB;YAED,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,GAAG,UAAU,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;gBACvD,UAAU,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;aAC3B;YACD,UAAU,CAAC,SAAS,CAClB,UAAU,GAAG,CAAC,EACd,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,WAAW,CAAC,EACpC,IAAI,CACL,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;YAEjD,IAAI,CAAC,UAAU,EAAE,CAAC;YAElB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;SACtB;QAED,sEAAsE;QACtE,kCAAkC;QAClC,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,aAAa,CAAC,CAAC;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;YAC1B,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC3C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC;YAC/C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;YAC9C,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC;SAC/C;QAED,OAAO,GAAG,CAAC;IACb,CAAC;IAEO,8BAAU,GAAlB;QACQ,IAAA,KAAoB,IAAI,EAAtB,MAAM,YAAA,EAAE,KAAK,WAAS,CAAC;QAE/B,IAAI,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACnB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EACjB,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QAEpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,IAAI,CAAC,GAAG,EAAE,EAAE;gBACV,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAC9B,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;wBAClC,CAAC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;wBACjC,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;aAC9B;iBAAM;gBACL,IAAI,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACzB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;gBAEnE,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC;gBACtB,IAAM,IAAE,GACN,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBAEjE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;oBACV,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,IAAE,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;aAClE;YAED,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBACzC,CAAC,CAAC;gBACF,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACjD,CAAC,CAAC;YAEJ,IAAM,EAAE,GACN,CAAC,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBACjC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;gBAClC,CAAC,CAAC,MAAM,KAAK,EAAE,CAAC,GAAG,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,CAAC;gBACnC,CAAC,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC;gBAC9D,CAAC,CAAC;YAEJ,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;YAC3B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAED,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;QACnB,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC;IACrB,CAAC;IACH,gBAAC;AAAD,CAAC,AAxJD,IAwJC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts new file mode 100644 index 0000000..63bd764 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.d.ts @@ -0,0 +1,20 @@ +/** + * @internal + */ +export declare const BLOCK_SIZE: number; +/** + * @internal + */ +export declare const DIGEST_LENGTH: number; +/** + * @internal + */ +export declare const KEY: Uint32Array; +/** + * @internal + */ +export declare const INIT: number[]; +/** + * @internal + */ +export declare const MAX_HASHABLE_LENGTH: number; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.js new file mode 100644 index 0000000..68037b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.js @@ -0,0 +1,95 @@ +/** + * @internal + */ +export var BLOCK_SIZE = 64; +/** + * @internal + */ +export var DIGEST_LENGTH = 32; +/** + * @internal + */ +export var KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); +/** + * @internal + */ +export var INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; +/** + * @internal + */ +export var MAX_HASHABLE_LENGTH = Math.pow(2, 53) - 1; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map new file mode 100644 index 0000000..6c93089 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/constants.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,CAAC,IAAM,UAAU,GAAW,EAAE,CAAC;AAErC;;GAEG;AACH,MAAM,CAAC,IAAM,aAAa,GAAW,EAAE,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,IAAM,GAAG,GAAG,IAAI,WAAW,CAAC;IACjC,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC,CAAC;AAEH;;GAEG;AACH,MAAM,CAAC,IAAM,IAAI,GAAG;IAClB,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;IACV,UAAU;CACX,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,IAAM,mBAAmB,GAAG,SAAA,CAAC,EAAI,EAAE,CAAA,GAAG,CAAC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.d.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.js new file mode 100644 index 0000000..a8f73a0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.js @@ -0,0 +1,2 @@ +export * from "./jsSha256"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.js.map new file mode 100644 index 0000000..030d795 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,YAAY,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts new file mode 100644 index 0000000..d813b25 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.d.ts @@ -0,0 +1,12 @@ +import { Checksum, SourceData } from "@aws-sdk/types"; +export declare class Sha256 implements Checksum { + private readonly secret?; + private hash; + private outer?; + private error; + constructor(secret?: SourceData); + update(toHash: SourceData): void; + digestSync(): Uint8Array; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js new file mode 100644 index 0000000..fa40899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js @@ -0,0 +1,82 @@ +import { __awaiter, __generator } from "tslib"; +import { BLOCK_SIZE } from "./constants"; +import { RawSha256 } from "./RawSha256"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; +var Sha256 = /** @class */ (function () { + function Sha256(secret) { + this.secret = secret; + this.hash = new RawSha256(); + this.reset(); + } + Sha256.prototype.update = function (toHash) { + if (isEmptyData(toHash) || this.error) { + return; + } + try { + this.hash.update(convertToBuffer(toHash)); + } + catch (e) { + this.error = e; + } + }; + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + Sha256.prototype.digestSync = function () { + if (this.error) { + throw this.error; + } + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + return this.outer.digest(); + } + return this.hash.digest(); + }; + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + Sha256.prototype.digest = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, this.digestSync()]; + }); + }); + }; + Sha256.prototype.reset = function () { + this.hash = new RawSha256(); + if (this.secret) { + this.outer = new RawSha256(); + var inner = bufferFromSecret(this.secret); + var outer = new Uint8Array(BLOCK_SIZE); + outer.set(inner); + for (var i = 0; i < BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + this.hash.update(inner); + this.outer.update(outer); + // overwrite the copied key in memory + for (var i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + }; + return Sha256; +}()); +export { Sha256 }; +function bufferFromSecret(secret) { + var input = convertToBuffer(secret); + if (input.byteLength > BLOCK_SIZE) { + var bufferHash = new RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + var buffer = new Uint8Array(BLOCK_SIZE); + buffer.set(input); + return buffer; +} +//# sourceMappingURL=jsSha256.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map new file mode 100644 index 0000000..94fa401 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/jsSha256.js.map @@ -0,0 +1 @@ +{"version":3,"file":"jsSha256.js","sourceRoot":"","sources":["../../src/jsSha256.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEhE;IAME,gBAAY,MAAmB;QAC7B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,IAAI,CAAC,KAAK,EAAE,CAAC;IACf,CAAC;IAED,uBAAM,GAAN,UAAO,MAAkB;QACvB,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,IAAI,CAAC,KAAK,EAAE;YACrC,OAAO;SACR;QAED,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,CAAC;SAC3C;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;SAChB;IACH,CAAC;IAED;;OAEG;IACH,2BAAU,GAAV;QACE,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,CAAC,KAAK,CAAC;SAClB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE;gBACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;aACvC;YAED,OAAO,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;SAC5B;QAED,OAAO,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACG,uBAAM,GAAZ;;;gBACE,sBAAO,IAAI,CAAC,UAAU,EAAE,EAAC;;;KAC1B;IAED,sBAAK,GAAL;QACE,IAAI,CAAC,IAAI,GAAG,IAAI,SAAS,EAAE,CAAC;QAC5B,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;YAC7B,IAAM,KAAK,GAAG,gBAAgB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC5C,IAAM,KAAK,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;YACzC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;gBACnC,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;gBACjB,KAAK,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC;aAClB;YAED,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YACxB,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAEzB,qCAAqC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC,EAAE,EAAE;gBACzC,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;aACd;SACF;IACH,CAAC;IACH,aAAC;AAAD,CAAC,AA1ED,IA0EC;;AAED,SAAS,gBAAgB,CAAC,MAAkB;IAC1C,IAAI,KAAK,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,KAAK,CAAC,UAAU,GAAG,UAAU,EAAE;QACjC,IAAM,UAAU,GAAG,IAAI,SAAS,EAAE,CAAC;QACnC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACzB,KAAK,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC;KAC7B;IAED,IAAM,MAAM,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts new file mode 100644 index 0000000..d880343 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.d.ts @@ -0,0 +1,5 @@ +export declare const hashTestVectors: Array<[Uint8Array, Uint8Array]>; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export declare const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js new file mode 100644 index 0000000..c2d2663 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js @@ -0,0 +1,319 @@ +import { fromHex } from "@aws-sdk/util-hex-encoding"; +var millionChars = new Uint8Array(1000000); +for (var i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} +export var hashTestVectors = [ + [ + Uint8Array.from([97, 98, 99]), + fromHex("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + fromHex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + fromHex("61"), + fromHex("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + fromHex("6161"), + fromHex("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + fromHex("616161"), + fromHex("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + fromHex("61616161"), + fromHex("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + fromHex("6161616161"), + fromHex("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + fromHex("616161616161"), + fromHex("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + fromHex("61616161616161"), + fromHex("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + fromHex("6161616161616161"), + fromHex("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + fromHex("616161616161616161"), + fromHex("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + fromHex("61616161616161616161"), + fromHex("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + fromHex("6161616161616161616161"), + fromHex("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + fromHex("616161616161616161616161"), + fromHex("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + fromHex("61616161616161616161616161"), + fromHex("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + fromHex("6161616161616161616161616161"), + fromHex("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + fromHex("616161616161616161616161616161"), + fromHex("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + fromHex("61616161616161616161616161616161"), + fromHex("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + fromHex("6161616161616161616161616161616161"), + fromHex("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + fromHex("616161616161616161616161616161616161"), + fromHex("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + fromHex("61616161616161616161616161616161616161"), + fromHex("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + fromHex("6161616161616161616161616161616161616161"), + fromHex("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + fromHex("616161616161616161616161616161616161616161"), + fromHex("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + fromHex("61616161616161616161616161616161616161616161"), + fromHex("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + fromHex("6161616161616161616161616161616161616161616161"), + fromHex("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + fromHex("616161616161616161616161616161616161616161616161"), + fromHex("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161"), + fromHex("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161"), + fromHex("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161"), + fromHex("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161"), + fromHex("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161"), + fromHex("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161"), + fromHex("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161"), + fromHex("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161"), + fromHex("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161"), + fromHex("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + fromHex("de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e"), + fromHex("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + fromHex("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export var hmacTestVectors = [ + [ + fromHex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + fromHex("4869205468657265"), + fromHex("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + fromHex("4a656665"), + fromHex("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + fromHex("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"), + fromHex("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + fromHex("0102030405060708090a0b0c0d0e0f10111213141516171819"), + fromHex("cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd"), + fromHex("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374"), + fromHex("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex("5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e"), + fromHex("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; +//# sourceMappingURL=knownHashes.fixture.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map new file mode 100644 index 0000000..1232159 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/build/module/knownHashes.fixture.js.map @@ -0,0 +1 @@ +{"version":3,"file":"knownHashes.fixture.js","sourceRoot":"","sources":["../../src/knownHashes.fixture.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,4BAA4B,CAAC;AAErD,IAAM,YAAY,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,CAAC;AAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,EAAE,EAAE;IAChC,YAAY,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;CACtB;AAED,MAAM,CAAC,IAAM,eAAe,GAAoC;IAC9D;QACE,UAAU,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,IAAI,UAAU,CAAC,CAAC,CAAC;QACjB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,IAAI,CAAC;QACb,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,MAAM,CAAC;QACf,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,QAAQ,CAAC;QACjB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,UAAU,CAAC;QACnB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,YAAY,CAAC;QACrB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,cAAc,CAAC;QACvB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gBAAgB,CAAC;QACzB,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kBAAkB,CAAC;QAC3B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oBAAoB,CAAC;QAC7B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sBAAsB,CAAC;QAC/B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wBAAwB,CAAC;QACjC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0BAA0B,CAAC;QACnC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4BAA4B,CAAC;QACrC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8BAA8B,CAAC;QACvC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gCAAgC,CAAC;QACzC,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kCAAkC,CAAC;QAC3C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oCAAoC,CAAC;QAC7C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sCAAsC,CAAC;QAC/C,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wCAAwC,CAAC;QACjD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4CAA4C,CAAC;QACrD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8CAA8C,CAAC;QACvD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gDAAgD,CAAC;QACzD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kDAAkD,CAAC;QAC3D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oDAAoD,CAAC;QAC7D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,sDAAsD,CAAC;QAC/D,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,wDAAwD,CAAC;QACjE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0DAA0D,CAAC;QACnE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,4DAA4D,CAAC;QACrE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,8DAA8D,CAAC;QACvE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,gEAAgE,CAAC;QACzE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,kEAAkE,CAAC;QAC3E,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oEAAoE,CACrE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sEAAsE,CACvE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wEAAwE,CACzE;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0EAA0E,CAC3E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4EAA4E,CAC7E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8EAA8E,CAC/E;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gFAAgF,CACjF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kFAAkF,CACnF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oFAAoF,CACrF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sFAAsF,CACvF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wFAAwF,CACzF;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0FAA0F,CAC3F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4FAA4F,CAC7F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8FAA8F,CAC/F;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gGAAgG,CACjG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kGAAkG,CACnG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oGAAoG,CACrG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wGAAwG,CACzG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0GAA0G,CAC3G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4GAA4G,CAC7G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8GAA8G,CAC/G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gHAAgH,CACjH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kHAAkH,CACnH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,oHAAoH,CACrH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,sHAAsH,CACvH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wHAAwH,CACzH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,0HAA0H,CAC3H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,4HAA4H,CAC7H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,8HAA8H,CAC/H;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gIAAgI,CACjI;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,kIAAkI,CACnI;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,gHAAgH,CACjH;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,YAAY;QACZ,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC;AAEF;;GAEG;AACH,MAAM,CAAC,IAAM,eAAe,GAAgD;IAC1E;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CAAC,kBAAkB,CAAC;QAC3B,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,UAAU,CAAC;QACnB,OAAO,CAAC,0DAA0D,CAAC;QACnE,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,0CAA0C,CAAC;QACnD,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CAAC,oDAAoD,CAAC;QAC7D,OAAO,CACL,sGAAsG,CACvG;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CACL,8GAA8G,CAC/G;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;IACD;QACE,OAAO,CACL,wQAAwQ,CACzQ;QACD,OAAO,CACL,kTAAkT,CACnT;QACD,OAAO,CAAC,kEAAkE,CAAC;KAC5E;CACF,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/package.json new file mode 100644 index 0000000..e8ef52d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/sha256-js", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/sha256-js", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts new file mode 100644 index 0000000..f4a385c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/RawSha256.ts @@ -0,0 +1,164 @@ +import { + BLOCK_SIZE, + DIGEST_LENGTH, + INIT, + KEY, + MAX_HASHABLE_LENGTH +} from "./constants"; + +/** + * @internal + */ +export class RawSha256 { + private state: Int32Array = Int32Array.from(INIT); + private temp: Int32Array = new Int32Array(64); + private buffer: Uint8Array = new Uint8Array(64); + private bufferLength: number = 0; + private bytesHashed: number = 0; + + /** + * @internal + */ + finished: boolean = false; + + update(data: Uint8Array): void { + if (this.finished) { + throw new Error("Attempted to update an already finished hash."); + } + + let position = 0; + let { byteLength } = data; + this.bytesHashed += byteLength; + + if (this.bytesHashed * 8 > MAX_HASHABLE_LENGTH) { + throw new Error("Cannot hash more than 2^53 - 1 bits"); + } + + while (byteLength > 0) { + this.buffer[this.bufferLength++] = data[position++]; + byteLength--; + + if (this.bufferLength === BLOCK_SIZE) { + this.hashBuffer(); + this.bufferLength = 0; + } + } + } + + digest(): Uint8Array { + if (!this.finished) { + const bitsHashed = this.bytesHashed * 8; + const bufferView = new DataView( + this.buffer.buffer, + this.buffer.byteOffset, + this.buffer.byteLength + ); + + const undecoratedLength = this.bufferLength; + bufferView.setUint8(this.bufferLength++, 0x80); + + // Ensure the final block has enough room for the hashed length + if (undecoratedLength % BLOCK_SIZE >= BLOCK_SIZE - 8) { + for (let i = this.bufferLength; i < BLOCK_SIZE; i++) { + bufferView.setUint8(i, 0); + } + this.hashBuffer(); + this.bufferLength = 0; + } + + for (let i = this.bufferLength; i < BLOCK_SIZE - 8; i++) { + bufferView.setUint8(i, 0); + } + bufferView.setUint32( + BLOCK_SIZE - 8, + Math.floor(bitsHashed / 0x100000000), + true + ); + bufferView.setUint32(BLOCK_SIZE - 4, bitsHashed); + + this.hashBuffer(); + + this.finished = true; + } + + // The value in state is little-endian rather than big-endian, so flip + // each word into a new Uint8Array + const out = new Uint8Array(DIGEST_LENGTH); + for (let i = 0; i < 8; i++) { + out[i * 4] = (this.state[i] >>> 24) & 0xff; + out[i * 4 + 1] = (this.state[i] >>> 16) & 0xff; + out[i * 4 + 2] = (this.state[i] >>> 8) & 0xff; + out[i * 4 + 3] = (this.state[i] >>> 0) & 0xff; + } + + return out; + } + + private hashBuffer(): void { + const { buffer, state } = this; + + let state0 = state[0], + state1 = state[1], + state2 = state[2], + state3 = state[3], + state4 = state[4], + state5 = state[5], + state6 = state[6], + state7 = state[7]; + + for (let i = 0; i < BLOCK_SIZE; i++) { + if (i < 16) { + this.temp[i] = + ((buffer[i * 4] & 0xff) << 24) | + ((buffer[i * 4 + 1] & 0xff) << 16) | + ((buffer[i * 4 + 2] & 0xff) << 8) | + (buffer[i * 4 + 3] & 0xff); + } else { + let u = this.temp[i - 2]; + const t1 = + ((u >>> 17) | (u << 15)) ^ ((u >>> 19) | (u << 13)) ^ (u >>> 10); + + u = this.temp[i - 15]; + const t2 = + ((u >>> 7) | (u << 25)) ^ ((u >>> 18) | (u << 14)) ^ (u >>> 3); + + this.temp[i] = + ((t1 + this.temp[i - 7]) | 0) + ((t2 + this.temp[i - 16]) | 0); + } + + const t1 = + ((((((state4 >>> 6) | (state4 << 26)) ^ + ((state4 >>> 11) | (state4 << 21)) ^ + ((state4 >>> 25) | (state4 << 7))) + + ((state4 & state5) ^ (~state4 & state6))) | + 0) + + ((state7 + ((KEY[i] + this.temp[i]) | 0)) | 0)) | + 0; + + const t2 = + ((((state0 >>> 2) | (state0 << 30)) ^ + ((state0 >>> 13) | (state0 << 19)) ^ + ((state0 >>> 22) | (state0 << 10))) + + ((state0 & state1) ^ (state0 & state2) ^ (state1 & state2))) | + 0; + + state7 = state6; + state6 = state5; + state5 = state4; + state4 = (state3 + t1) | 0; + state3 = state2; + state2 = state1; + state1 = state0; + state0 = (t1 + t2) | 0; + } + + state[0] += state0; + state[1] += state1; + state[2] += state2; + state[3] += state3; + state[4] += state4; + state[5] += state5; + state[6] += state6; + state[7] += state7; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/constants.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/constants.ts new file mode 100644 index 0000000..8cede57 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/constants.ts @@ -0,0 +1,98 @@ +/** + * @internal + */ +export const BLOCK_SIZE: number = 64; + +/** + * @internal + */ +export const DIGEST_LENGTH: number = 32; + +/** + * @internal + */ +export const KEY = new Uint32Array([ + 0x428a2f98, + 0x71374491, + 0xb5c0fbcf, + 0xe9b5dba5, + 0x3956c25b, + 0x59f111f1, + 0x923f82a4, + 0xab1c5ed5, + 0xd807aa98, + 0x12835b01, + 0x243185be, + 0x550c7dc3, + 0x72be5d74, + 0x80deb1fe, + 0x9bdc06a7, + 0xc19bf174, + 0xe49b69c1, + 0xefbe4786, + 0x0fc19dc6, + 0x240ca1cc, + 0x2de92c6f, + 0x4a7484aa, + 0x5cb0a9dc, + 0x76f988da, + 0x983e5152, + 0xa831c66d, + 0xb00327c8, + 0xbf597fc7, + 0xc6e00bf3, + 0xd5a79147, + 0x06ca6351, + 0x14292967, + 0x27b70a85, + 0x2e1b2138, + 0x4d2c6dfc, + 0x53380d13, + 0x650a7354, + 0x766a0abb, + 0x81c2c92e, + 0x92722c85, + 0xa2bfe8a1, + 0xa81a664b, + 0xc24b8b70, + 0xc76c51a3, + 0xd192e819, + 0xd6990624, + 0xf40e3585, + 0x106aa070, + 0x19a4c116, + 0x1e376c08, + 0x2748774c, + 0x34b0bcb5, + 0x391c0cb3, + 0x4ed8aa4a, + 0x5b9cca4f, + 0x682e6ff3, + 0x748f82ee, + 0x78a5636f, + 0x84c87814, + 0x8cc70208, + 0x90befffa, + 0xa4506ceb, + 0xbef9a3f7, + 0xc67178f2 +]); + +/** + * @internal + */ +export const INIT = [ + 0x6a09e667, + 0xbb67ae85, + 0x3c6ef372, + 0xa54ff53a, + 0x510e527f, + 0x9b05688c, + 0x1f83d9ab, + 0x5be0cd19 +]; + +/** + * @internal + */ +export const MAX_HASHABLE_LENGTH = 2 ** 53 - 1; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/index.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/index.ts new file mode 100644 index 0000000..4554d8a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/index.ts @@ -0,0 +1 @@ +export * from "./jsSha256"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts new file mode 100644 index 0000000..f7bd993 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/jsSha256.ts @@ -0,0 +1,94 @@ +import { BLOCK_SIZE } from "./constants"; +import { RawSha256 } from "./RawSha256"; +import { Checksum, SourceData } from "@aws-sdk/types"; +import { isEmptyData, convertToBuffer } from "@aws-crypto/util"; + +export class Sha256 implements Checksum { + private readonly secret?: SourceData; + private hash: RawSha256; + private outer?: RawSha256; + private error: any; + + constructor(secret?: SourceData) { + this.secret = secret; + this.hash = new RawSha256(); + this.reset(); + } + + update(toHash: SourceData): void { + if (isEmptyData(toHash) || this.error) { + return; + } + + try { + this.hash.update(convertToBuffer(toHash)); + } catch (e) { + this.error = e; + } + } + + /* This synchronous method keeps compatibility + * with the v2 aws-sdk. + */ + digestSync(): Uint8Array { + if (this.error) { + throw this.error; + } + + if (this.outer) { + if (!this.outer.finished) { + this.outer.update(this.hash.digest()); + } + + return this.outer.digest(); + } + + return this.hash.digest(); + } + + /* The underlying digest method here is synchronous. + * To keep the same interface with the other hash functions + * the default is to expose this as an async method. + * However, it can sometimes be useful to have a sync method. + */ + async digest(): Promise { + return this.digestSync(); + } + + reset(): void { + this.hash = new RawSha256(); + if (this.secret) { + this.outer = new RawSha256(); + const inner = bufferFromSecret(this.secret); + const outer = new Uint8Array(BLOCK_SIZE); + outer.set(inner); + + for (let i = 0; i < BLOCK_SIZE; i++) { + inner[i] ^= 0x36; + outer[i] ^= 0x5c; + } + + this.hash.update(inner); + this.outer.update(outer); + + // overwrite the copied key in memory + for (let i = 0; i < inner.byteLength; i++) { + inner[i] = 0; + } + } + } +} + +function bufferFromSecret(secret: SourceData): Uint8Array { + let input = convertToBuffer(secret); + + if (input.byteLength > BLOCK_SIZE) { + const bufferHash = new RawSha256(); + bufferHash.update(input); + input = bufferHash.digest(); + } + + const buffer = new Uint8Array(BLOCK_SIZE); + buffer.set(input); + return buffer; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts new file mode 100644 index 0000000..c83dae2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/src/knownHashes.fixture.ts @@ -0,0 +1,401 @@ +import { fromHex } from "@aws-sdk/util-hex-encoding"; + +const millionChars = new Uint8Array(1000000); +for (let i = 0; i < 1000000; i++) { + millionChars[i] = 97; +} + +export const hashTestVectors: Array<[Uint8Array, Uint8Array]> = [ + [ + Uint8Array.from([97, 98, 99]), + fromHex("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad") + ], + [ + new Uint8Array(0), + fromHex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + ], + [ + fromHex("61"), + fromHex("ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb") + ], + [ + fromHex("6161"), + fromHex("961b6dd3ede3cb8ecbaacbd68de040cd78eb2ed5889130cceb4c49268ea4d506") + ], + [ + fromHex("616161"), + fromHex("9834876dcfb05cb167a5c24953eba58c4ac89b1adf57f28f2f9d09af107ee8f0") + ], + [ + fromHex("61616161"), + fromHex("61be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4") + ], + [ + fromHex("6161616161"), + fromHex("ed968e840d10d2d313a870bc131a4e2c311d7ad09bdf32b3418147221f51a6e2") + ], + [ + fromHex("616161616161"), + fromHex("ed02457b5c41d964dbd2f2a609d63fe1bb7528dbe55e1abf5b52c249cd735797") + ], + [ + fromHex("61616161616161"), + fromHex("e46240714b5db3a23eee60479a623efba4d633d27fe4f03c904b9e219a7fbe60") + ], + [ + fromHex("6161616161616161"), + fromHex("1f3ce40415a2081fa3eee75fc39fff8e56c22270d1a978a7249b592dcebd20b4") + ], + [ + fromHex("616161616161616161"), + fromHex("f2aca93b80cae681221f0445fa4e2cae8a1f9f8fa1e1741d9639caad222f537d") + ], + [ + fromHex("61616161616161616161"), + fromHex("bf2cb58a68f684d95a3b78ef8f661c9a4e5b09e82cc8f9cc88cce90528caeb27") + ], + [ + fromHex("6161616161616161616161"), + fromHex("28cb017dfc99073aa1b47c1b30f413e3ce774c4991eb4158de50f9dbb36d8043") + ], + [ + fromHex("616161616161616161616161"), + fromHex("f24abc34b13fade76e805799f71187da6cd90b9cac373ae65ed57f143bd664e5") + ], + [ + fromHex("61616161616161616161616161"), + fromHex("a689d786e81340e45511dec6c7ab2d978434e5db123362450fe10cfac70d19d0") + ], + [ + fromHex("6161616161616161616161616161"), + fromHex("82cab7df0abfb9d95dca4e5937ce2968c798c726fea48c016bf9763221efda13") + ], + [ + fromHex("616161616161616161616161616161"), + fromHex("ef2df0b539c6c23de0f4cbe42648c301ae0e22e887340a4599fb4ef4e2678e48") + ], + [ + fromHex("61616161616161616161616161616161"), + fromHex("0c0beacef8877bbf2416eb00f2b5dc96354e26dd1df5517320459b1236860f8c") + ], + [ + fromHex("6161616161616161616161616161616161"), + fromHex("b860666ee2966dd8f903be44ee605c6e1366f926d9f17a8f49937d11624eb99d") + ], + [ + fromHex("616161616161616161616161616161616161"), + fromHex("c926defaaa3d13eda2fc63a553bb7fb7326bece6e7cb67ca5296e4727d89bab4") + ], + [ + fromHex("61616161616161616161616161616161616161"), + fromHex("a0b4aaab8a966e2193ba172d68162c4656860197f256b5f45f0203397ff3f99c") + ], + [ + fromHex("6161616161616161616161616161616161616161"), + fromHex("42492da06234ad0ac76f5d5debdb6d1ae027cffbe746a1c13b89bb8bc0139137") + ], + [ + fromHex("616161616161616161616161616161616161616161"), + fromHex("7df8e299c834de198e264c3e374bc58ecd9382252a705c183beb02f275571e3b") + ], + [ + fromHex("61616161616161616161616161616161616161616161"), + fromHex("ec7c494df6d2a7ea36668d656e6b8979e33641bfea378c15038af3964db057a3") + ], + [ + fromHex("6161616161616161616161616161616161616161616161"), + fromHex("897d3e95b65f26676081f8b9f3a98b6ee4424566303e8d4e7c7522ebae219eab") + ], + [ + fromHex("616161616161616161616161616161616161616161616161"), + fromHex("09f61f8d9cd65e6a0c258087c485b6293541364e42bd97b2d7936580c8aa3c54") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161"), + fromHex("2f521e2a7d0bd812cbc035f4ed6806eb8d851793b04ba147e8f66b72f5d1f20f") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161"), + fromHex("9976d549a25115dab4e36d0c1fb8f31cb07da87dd83275977360eb7dc09e88de") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161"), + fromHex("cc0616e61cbd6e8e5e34e9fb2d320f37de915820206f5696c31f1fbd24aa16de") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161"), + fromHex("9c547cb8115a44883b9f70ba68f75117cd55359c92611875e386f8af98c172ab") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161"), + fromHex("6913c9c7fd42fe23df8b6bcd4dbaf1c17748948d97f2980b432319c39eddcf6c") + ], + [ + fromHex("616161616161616161616161616161616161616161616161616161616161"), + fromHex("3a54fc0cbc0b0ef48b6507b7788096235d10292dd3ae24e22f5aa062d4f9864a") + ], + [ + fromHex("61616161616161616161616161616161616161616161616161616161616161"), + fromHex("61c60b487d1a921e0bcc9bf853dda0fb159b30bf57b2e2d2c753b00be15b5a09") + ], + [ + fromHex("6161616161616161616161616161616161616161616161616161616161616161"), + fromHex("3ba3f5f43b92602683c19aee62a20342b084dd5971ddd33808d81a328879a547") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("852785c805c77e71a22340a54e9d95933ed49121e7d2bf3c2d358854bc1359ea") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("a27c896c4859204843166af66f0e902b9c3b3ed6d2fd13d435abc020065c526f") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("629362afc62c74497caed2272e30f8125ecd0965f8d8d7cfc4e260f7f8dd319d") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("22c1d24bcd03e9aee9832efccd6da613fc702793178e5f12c945c7b67ddda933") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("21ec055b38ce759cd4d0f477e9bdec2c5b8199945db4439bae334a964df6246c") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("365a9c3e2c2af0a56e47a9dac51c2c5381bf8f41273bad3175e0e619126ad087") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("b4d5e56e929ba4cda349e9274e3603d0be246b82016bca20f363963c5f2d6845") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("e33cdf9c7f7120b98e8c78408953e07f2ecd183006b5606df349b4c212acf43e") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("c0f8bd4dbc2b0c03107c1c37913f2a7501f521467f45dd0fef6958e9a4692719") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("7a538607fdaab9296995929f451565bbb8142e1844117322aafd2b3d76b01aff") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("66d34fba71f8f450f7e45598853e53bfc23bbd129027cbb131a2f4ffd7878cd0") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("16849877c6c21ef0bfa68e4f6747300ddb171b170b9f00e189edc4c2fc4db93e") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("52789e3423b72beeb898456a4f49662e46b0cbb960784c5ef4b1399d327e7c27") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("6643110c5628fff59edf76d82d5bf573bf800f16a4d65dfb1e5d6f1a46296d0b") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("11eaed932c6c6fddfc2efc394e609facf4abe814fc6180d03b14fce13a07d0e5") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("97daac0ee9998dfcad6c9c0970da5ca411c86233a944c25b47566f6a7bc1ddd5") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("8f9bec6a62dd28ebd36d1227745592de6658b36974a3bb98a4c582f683ea6c42") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("160b4e433e384e05e537dc59b467f7cb2403f0214db15c5db58862a3f1156d2e") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("bfc5fe0e360152ca98c50fab4ed7e3078c17debc2917740d5000913b686ca129") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("6c1b3dc7a706b9dc81352a6716b9c666c608d8626272c64b914ab05572fc6e84") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("abe346a7259fc90b4c27185419628e5e6af6466b1ae9b5446cac4bfc26cf05c4") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("a3f01b6939256127582ac8ae9fb47a382a244680806a3f613a118851c1ca1d47") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("9f4390f8d30c2dd92ec9f095b65e2b9ae9b0a925a5258e241c9f1e910f734318") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("b35439a4ac6f0948b6d6f9e3c6af0f5f590ce20f1bde7090ef7970686ec6738a") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("f13b2d724659eb3bf47f2dd6af1accc87b81f09f59f2b75e5c0bed6589dfe8c6") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("d5c039b748aa64665782974ec3dc3025c042edf54dcdc2b5de31385b094cb678") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("111bb261277afd65f0744b247cd3e47d386d71563d0ed995517807d5ebd4fba3") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("11ee391211c6256460b6ed375957fadd8061cafbb31daf967db875aebd5aaad4") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("35d5fc17cfbbadd00f5e710ada39f194c5ad7c766ad67072245f1fad45f0f530") + ], + [ + fromHex( + "6161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("f506898cc7c2e092f9eb9fadae7ba50383f5b46a2a4fe5597dbb553a78981268") + ], + [ + fromHex( + "616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("7d3e74a05d7db15bce4ad9ec0658ea98e3f06eeecf16b4c6fff2da457ddc2f34") + ], + [ + fromHex( + "61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161" + ), + fromHex("ffe054fe7ae0cb6dc65c3af9b61d5209f439851db43d0ba5997337df154668eb") + ], + [ + fromHex( + "de188941a3375d3a8a061e67576e926dc71a7fa3f0cceb97452b4d3227965f9ea8cc75076d9fb9c5417aa5cb30fc22198b34982dbb629e" + ), + fromHex("038051e9c324393bd1ca1978dd0952c2aa3742ca4f1bd5cd4611cea83892d382") + ], + [ + millionChars, + fromHex("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex("45ad4b37c6e2fc0a2cfcc1b5da524132ec707615c2cae1dbbc43c97aa521db81") + ] +]; + +/** + * @see https://tools.ietf.org/html/rfc4231 + */ +export const hmacTestVectors: Array<[Uint8Array, Uint8Array, Uint8Array]> = [ + [ + fromHex("0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b"), + fromHex("4869205468657265"), + fromHex("b0344c61d8db38535ca8afceaf0bf12b881dc200c9833da726e9376c2e32cff7") + ], + [ + fromHex("4a656665"), + fromHex("7768617420646f2079612077616e7420666f72206e6f7468696e673f"), + fromHex("5bdcc146bf60754e6a042426089575c75a003f089d2739839dec58b964ec3843") + ], + [ + fromHex("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + fromHex( + "dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + ), + fromHex("773ea91e36800e46854db8ebd09181a72959098b3ef8c122d9635514ced565fe") + ], + [ + fromHex("0102030405060708090a0b0c0d0e0f10111213141516171819"), + fromHex( + "cdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcdcd" + ), + fromHex("82558a389a443c0ea4cc819899f2083a85f0faa3e578f8077a2e3ff46729665b") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex( + "54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a65204b6579202d2048617368204b6579204669727374" + ), + fromHex("60e431591ee0b67f0d8a26aacbf5b77f8e0bc6213728c5140546040f0ee37f54") + ], + [ + fromHex( + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + ), + fromHex( + "5468697320697320612074657374207573696e672061206c6172676572207468616e20626c6f636b2d73697a65206b657920616e642061206c6172676572207468616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565647320746f20626520686173686564206265666f7265206265696e6720757365642062792074686520484d414320616c676f726974686d2e" + ), + fromHex("9b09ffa71b942fcb27635fbcd5b0e944bfdc63644f0713938a7f51535c3a35e2") + ] +]; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/tsconfig.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/tsconfig.json new file mode 100644 index 0000000..fb9aa95 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + "lib": ["dom"], + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/tsconfig.module.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/sha256-js/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md new file mode 100644 index 0000000..13023ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/CHANGELOG.md @@ -0,0 +1,66 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [1.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@1.0.0-alpha.0...@aws-crypto/supports-web-crypto@1.0.0) (2020-10-22) + +### Bug Fixes + +- replace `sourceRoot` -> `rootDir` in tsconfig ([#169](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/169)) ([d437167](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/d437167b51d1c56a4fcc2bb8a446b74a7e3b7e06)) + +# [1.0.0-alpha.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.4...@aws-crypto/supports-web-crypto@1.0.0-alpha.0) (2020-02-07) + +**Note:** Version bump only for package @aws-crypto/supports-web-crypto + +# [0.1.0-preview.4](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.2...@aws-crypto/supports-web-crypto@0.1.0-preview.4) (2020-01-16) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.3](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.2...@aws-crypto/supports-web-crypto@0.1.0-preview.3) (2019-11-15) + +### Bug Fixes + +- Changed package.json files to point to the right Git repo ([#9](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/9)) ([028245d](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/028245d72e642ca98d82226afb300eb154503c4a)), closes [#8](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/8) +- lerna version maintains package-lock ([#14](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/14)) ([2ef29e1](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/2ef29e13779703a5c9b32e93d18918fcb33b7272)), closes [#13](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/13) + +# [0.1.0-preview.2](https://github.com/aws/aws-javascript-crypto-helpers/compare/@aws-crypto/supports-web-crypto@0.1.0-preview.1...@aws-crypto/supports-web-crypto@0.1.0-preview.2) (2019-10-30) + +### Bug Fixes + +- remove /src/ from .npmignore (for sourcemaps) ([#5](https://github.com/aws/aws-javascript-crypto-helpers/issues/5)) ([ec52056](https://github.com/aws/aws-javascript-crypto-helpers/commit/ec52056)) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/README.md new file mode 100644 index 0000000..7891357 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/README.md @@ -0,0 +1,32 @@ +# @aws-crypto/supports-web-crypto + +Functions to check web crypto support for browsers. + +## Usage + +``` +import {supportsWebCrypto} from '@aws-crypto/supports-web-crypto'; + +if (supportsWebCrypto(window)) { + // window.crypto.subtle.encrypt will exist +} + +``` + +## supportsWebCrypto + +Used to make sure `window.crypto.subtle` exists and implements crypto functions +as well as a cryptographic secure random source exists. + +## supportsSecureRandom + +Used to make sure that a cryptographic secure random source exists. +Does not check for `window.crypto.subtle`. + +## supportsSubtleCrypto + +## supportsZeroByteGCM + +## Test + +`npm test` diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.d.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js new file mode 100644 index 0000000..cc4c93f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./supportsWebCrypto"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map new file mode 100644 index 0000000..df0dd2c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,8DAAoC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts new file mode 100644 index 0000000..f2723dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.d.ts @@ -0,0 +1,4 @@ +export declare function supportsWebCrypto(window: Window): boolean; +export declare function supportsSecureRandom(window: Window): boolean; +export declare function supportsSubtleCrypto(subtle: SubtleCrypto): boolean; +export declare function supportsZeroByteGCM(subtle: SubtleCrypto): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js new file mode 100644 index 0000000..378f31e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.supportsZeroByteGCM = exports.supportsSubtleCrypto = exports.supportsSecureRandom = exports.supportsWebCrypto = void 0; +var tslib_1 = require("tslib"); +var subtleCryptoMethods = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; +function supportsWebCrypto(window) { + if (supportsSecureRandom(window) && + typeof window.crypto.subtle === "object") { + var subtle = window.crypto.subtle; + return supportsSubtleCrypto(subtle); + } + return false; +} +exports.supportsWebCrypto = supportsWebCrypto; +function supportsSecureRandom(window) { + if (typeof window === "object" && typeof window.crypto === "object") { + var getRandomValues = window.crypto.getRandomValues; + return typeof getRandomValues === "function"; + } + return false; +} +exports.supportsSecureRandom = supportsSecureRandom; +function supportsSubtleCrypto(subtle) { + return (subtle && + subtleCryptoMethods.every(function (methodName) { return typeof subtle[methodName] === "function"; })); +} +exports.supportsSubtleCrypto = supportsSubtleCrypto; +function supportsZeroByteGCM(subtle) { + return tslib_1.__awaiter(this, void 0, void 0, function () { + var key, zeroByteAuthTag, _a; + return tslib_1.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!supportsSubtleCrypto(subtle)) + return [2 /*return*/, false]; + _b.label = 1; + case 1: + _b.trys.push([1, 4, , 5]); + return [4 /*yield*/, subtle.generateKey({ name: "AES-GCM", length: 128 }, false, ["encrypt"])]; + case 2: + key = _b.sent(); + return [4 /*yield*/, subtle.encrypt({ + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, key, new Uint8Array(0))]; + case 3: + zeroByteAuthTag = _b.sent(); + return [2 /*return*/, zeroByteAuthTag.byteLength === 16]; + case 4: + _a = _b.sent(); + return [2 /*return*/, false]; + case 5: return [2 /*return*/]; + } + }); + }); +} +exports.supportsZeroByteGCM = supportsZeroByteGCM; +//# sourceMappingURL=supportsWebCrypto.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map new file mode 100644 index 0000000..1cc0ea3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/main/supportsWebCrypto.js.map @@ -0,0 +1 @@ +{"version":3,"file":"supportsWebCrypto.js","sourceRoot":"","sources":["../../src/supportsWebCrypto.ts"],"names":[],"mappings":";;;;AAUA,IAAM,mBAAmB,GAA8B;IACrD,SAAS;IACT,QAAQ;IACR,SAAS;IACT,WAAW;IACX,aAAa;IACb,WAAW;IACX,MAAM;IACN,QAAQ;CACT,CAAC;AAEF,SAAgB,iBAAiB,CAAC,MAAc;IAC9C,IACE,oBAAoB,CAAC,MAAM,CAAC;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,KAAK,QAAQ,EACxC;QACQ,IAAA,MAAM,GAAK,MAAM,CAAC,MAAM,OAAlB,CAAmB;QAEjC,OAAO,oBAAoB,CAAC,MAAM,CAAC,CAAC;KACrC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAXD,8CAWC;AAED,SAAgB,oBAAoB,CAAC,MAAc;IACjD,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;QAC3D,IAAA,eAAe,GAAK,MAAM,CAAC,MAAM,gBAAlB,CAAmB;QAE1C,OAAO,OAAO,eAAe,KAAK,UAAU,CAAC;KAC9C;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AARD,oDAQC;AAED,SAAgB,oBAAoB,CAAC,MAAoB;IACvD,OAAO,CACL,MAAM;QACN,mBAAmB,CAAC,KAAK,CACvB,UAAA,UAAU,IAAI,OAAA,OAAO,MAAM,CAAC,UAAU,CAAC,KAAK,UAAU,EAAxC,CAAwC,CACvD,CACF,CAAC;AACJ,CAAC;AAPD,oDAOC;AAED,SAAsB,mBAAmB,CAAC,MAAoB;;;;;;oBAC5D,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC;wBAAE,sBAAO,KAAK,EAAC;;;;oBAElC,qBAAM,MAAM,CAAC,WAAW,CAClC,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,EAChC,KAAK,EACL,CAAC,SAAS,CAAC,CACZ,EAAA;;oBAJK,GAAG,GAAG,SAIX;oBACuB,qBAAM,MAAM,CAAC,OAAO,CAC1C;4BACE,IAAI,EAAE,SAAS;4BACf,EAAE,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BAC7B,cAAc,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BACzC,SAAS,EAAE,GAAG;yBACf,EACD,GAAG,EACH,IAAI,UAAU,CAAC,CAAC,CAAC,CAClB,EAAA;;oBATK,eAAe,GAAG,SASvB;oBACD,sBAAO,eAAe,CAAC,UAAU,KAAK,EAAE,EAAC;;;oBAEzC,sBAAO,KAAK,EAAC;;;;;CAEhB;AAtBD,kDAsBC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.d.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js new file mode 100644 index 0000000..f5527ea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js @@ -0,0 +1,2 @@ +export * from "./supportsWebCrypto"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map new file mode 100644 index 0000000..b2df430 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,qBAAqB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts new file mode 100644 index 0000000..f2723dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.d.ts @@ -0,0 +1,4 @@ +export declare function supportsWebCrypto(window: Window): boolean; +export declare function supportsSecureRandom(window: Window): boolean; +export declare function supportsSubtleCrypto(subtle: SubtleCrypto): boolean; +export declare function supportsZeroByteGCM(subtle: SubtleCrypto): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js new file mode 100644 index 0000000..70b46e6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js @@ -0,0 +1,62 @@ +import { __awaiter, __generator } from "tslib"; +var subtleCryptoMethods = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; +export function supportsWebCrypto(window) { + if (supportsSecureRandom(window) && + typeof window.crypto.subtle === "object") { + var subtle = window.crypto.subtle; + return supportsSubtleCrypto(subtle); + } + return false; +} +export function supportsSecureRandom(window) { + if (typeof window === "object" && typeof window.crypto === "object") { + var getRandomValues = window.crypto.getRandomValues; + return typeof getRandomValues === "function"; + } + return false; +} +export function supportsSubtleCrypto(subtle) { + return (subtle && + subtleCryptoMethods.every(function (methodName) { return typeof subtle[methodName] === "function"; })); +} +export function supportsZeroByteGCM(subtle) { + return __awaiter(this, void 0, void 0, function () { + var key, zeroByteAuthTag, _a; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!supportsSubtleCrypto(subtle)) + return [2 /*return*/, false]; + _b.label = 1; + case 1: + _b.trys.push([1, 4, , 5]); + return [4 /*yield*/, subtle.generateKey({ name: "AES-GCM", length: 128 }, false, ["encrypt"])]; + case 2: + key = _b.sent(); + return [4 /*yield*/, subtle.encrypt({ + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, key, new Uint8Array(0))]; + case 3: + zeroByteAuthTag = _b.sent(); + return [2 /*return*/, zeroByteAuthTag.byteLength === 16]; + case 4: + _a = _b.sent(); + return [2 /*return*/, false]; + case 5: return [2 /*return*/]; + } + }); + }); +} +//# sourceMappingURL=supportsWebCrypto.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map new file mode 100644 index 0000000..967fc19 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/build/module/supportsWebCrypto.js.map @@ -0,0 +1 @@ +{"version":3,"file":"supportsWebCrypto.js","sourceRoot":"","sources":["../../src/supportsWebCrypto.ts"],"names":[],"mappings":";AAUA,IAAM,mBAAmB,GAA8B;IACrD,SAAS;IACT,QAAQ;IACR,SAAS;IACT,WAAW;IACX,aAAa;IACb,WAAW;IACX,MAAM;IACN,QAAQ;CACT,CAAC;AAEF,MAAM,UAAU,iBAAiB,CAAC,MAAc;IAC9C,IACE,oBAAoB,CAAC,MAAM,CAAC;QAC5B,OAAO,MAAM,CAAC,MAAM,CAAC,MAAM,KAAK,QAAQ,EACxC;QACQ,IAAA,MAAM,GAAK,MAAM,CAAC,MAAM,OAAlB,CAAmB;QAEjC,OAAO,oBAAoB,CAAC,MAAM,CAAC,CAAC;KACrC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAc;IACjD,IAAI,OAAO,MAAM,KAAK,QAAQ,IAAI,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ,EAAE;QAC3D,IAAA,eAAe,GAAK,MAAM,CAAC,MAAM,gBAAlB,CAAmB;QAE1C,OAAO,OAAO,eAAe,KAAK,UAAU,CAAC;KAC9C;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,MAAoB;IACvD,OAAO,CACL,MAAM;QACN,mBAAmB,CAAC,KAAK,CACvB,UAAA,UAAU,IAAI,OAAA,OAAO,MAAM,CAAC,UAAU,CAAC,KAAK,UAAU,EAAxC,CAAwC,CACvD,CACF,CAAC;AACJ,CAAC;AAED,MAAM,UAAgB,mBAAmB,CAAC,MAAoB;;;;;;oBAC5D,IAAI,CAAC,oBAAoB,CAAC,MAAM,CAAC;wBAAE,sBAAO,KAAK,EAAC;;;;oBAElC,qBAAM,MAAM,CAAC,WAAW,CAClC,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,EAChC,KAAK,EACL,CAAC,SAAS,CAAC,CACZ,EAAA;;oBAJK,GAAG,GAAG,SAIX;oBACuB,qBAAM,MAAM,CAAC,OAAO,CAC1C;4BACE,IAAI,EAAE,SAAS;4BACf,EAAE,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BAC7B,cAAc,EAAE,IAAI,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;4BACzC,SAAS,EAAE,GAAG;yBACf,EACD,GAAG,EACH,IAAI,UAAU,CAAC,CAAC,CAAC,CAClB,EAAA;;oBATK,eAAe,GAAG,SASvB;oBACD,sBAAO,eAAe,CAAC,UAAU,KAAK,EAAE,EAAC;;;oBAEzC,sBAAO,KAAK,EAAC;;;;;CAEhB"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/package.json new file mode 100644 index 0000000..a97bf01 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/package.json @@ -0,0 +1,28 @@ +{ + "name": "@aws-crypto/supports-web-crypto", + "version": "5.2.0", + "description": "Provides functions for detecting if the host environment supports the WebCrypto API", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/supports-web-crypto", + "license": "Apache-2.0", + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/src/index.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/src/index.ts new file mode 100644 index 0000000..9725c9c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/src/index.ts @@ -0,0 +1 @@ +export * from "./supportsWebCrypto"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts new file mode 100644 index 0000000..7eef629 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/src/supportsWebCrypto.ts @@ -0,0 +1,76 @@ +type SubtleCryptoMethod = + | "decrypt" + | "digest" + | "encrypt" + | "exportKey" + | "generateKey" + | "importKey" + | "sign" + | "verify"; + +const subtleCryptoMethods: Array = [ + "decrypt", + "digest", + "encrypt", + "exportKey", + "generateKey", + "importKey", + "sign", + "verify" +]; + +export function supportsWebCrypto(window: Window): boolean { + if ( + supportsSecureRandom(window) && + typeof window.crypto.subtle === "object" + ) { + const { subtle } = window.crypto; + + return supportsSubtleCrypto(subtle); + } + + return false; +} + +export function supportsSecureRandom(window: Window): boolean { + if (typeof window === "object" && typeof window.crypto === "object") { + const { getRandomValues } = window.crypto; + + return typeof getRandomValues === "function"; + } + + return false; +} + +export function supportsSubtleCrypto(subtle: SubtleCrypto) { + return ( + subtle && + subtleCryptoMethods.every( + methodName => typeof subtle[methodName] === "function" + ) + ); +} + +export async function supportsZeroByteGCM(subtle: SubtleCrypto) { + if (!supportsSubtleCrypto(subtle)) return false; + try { + const key = await subtle.generateKey( + { name: "AES-GCM", length: 128 }, + false, + ["encrypt"] + ); + const zeroByteAuthTag = await subtle.encrypt( + { + name: "AES-GCM", + iv: new Uint8Array(Array(12)), + additionalData: new Uint8Array(Array(16)), + tagLength: 128 + }, + key, + new Uint8Array(0) + ); + return zeroByteAuthTag.byteLength === 16; + } catch { + return false; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json new file mode 100644 index 0000000..efca6de --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "lib": ["dom"], + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/supports-web-crypto/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/CHANGELOG.md new file mode 100644 index 0000000..df2cecb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/CHANGELOG.md @@ -0,0 +1,71 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [5.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.1.0...v5.2.0) (2023-10-16) + +### Features + +- support ESM artifacts in all packages ([#752](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/752)) ([e930ffb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/e930ffba5cfef66dd242049e7d514ced232c1e3b)) + +# [5.1.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v5.0.0...v5.1.0) (2023-09-22) + +### Bug Fixes + +- Update tsc to 2.x ([#735](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/735)) ([782e0de](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/782e0de9f5fef41f694130580a69d940894b6b8c)) + +### Features + +- Use @smithy/util-utf8 ([#730](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/730)) ([00fb851](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/00fb851ca3559d5a1f370f9256814de1210826b8)), closes [#699](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/699) + +# [5.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v4.0.1...v5.0.0) (2023-07-13) + +**Note:** Version bump only for package @aws-crypto/util + +# [4.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v3.0.0...v4.0.0) (2023-02-20) + +**Note:** Version bump only for package @aws-crypto/util + +# [3.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.2...v3.0.0) (2023-01-12) + +- feat!: replace Hash implementations with Checksum interface (#492) ([da43dc0](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/da43dc0fdf669d9ebb5bfb1b1f7c79e46c4aaae1)), closes [#492](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/492) + +### BREAKING CHANGES + +- All classes that implemented `Hash` now implement `Checksum`. + +## [2.0.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.1...v2.0.2) (2022-09-07) + +### Bug Fixes + +- **#337:** update @aws-sdk/types ([#373](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/373)) ([b26a811](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/b26a811a392f5209c7ec7e57251500d4d78f97ff)), closes [#337](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/337) +- **docs:** update README for packages/util ([#382](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/382)) ([f3e650e](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/f3e650e1b4792ffbea2e8a1a015fd55fb951a3a4)) + +## [2.0.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v2.0.0...v2.0.1) (2021-12-09) + +### Bug Fixes + +- **uint32ArrayFrom:** increment index & polyfill for Uint32Array ([#270](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/270)) ([a70d603](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/a70d603f3ba7600d3c1213f297d4160a4b3793bd)) + +# [2.0.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.2...v2.0.0) (2021-10-25) + +**Note:** Version bump only for package @aws-crypto/util + +## [1.2.2](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.1...v1.2.2) (2021-10-12) + +### Bug Fixes + +- **crc32c:** ie11 does not support Array.from ([#221](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/221)) ([5f49547](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/5f495472ab8988cf203e0f2a70a51f7e1fcd7e60)) + +## [1.2.1](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.2.0...v1.2.1) (2021-09-17) + +### Bug Fixes + +- better pollyfill check for Buffer ([#217](https://github.com/aws/aws-sdk-js-crypto-helpers/issues/217)) ([bc97da2](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/bc97da29aaf473943e4407c9a29cc30f74f15723)) + +# [1.2.0](https://github.com/aws/aws-sdk-js-crypto-helpers/compare/v1.1.1...v1.2.0) (2021-09-17) + +### Features + +- add @aws-crypto/util ([8f489cb](https://github.com/aws/aws-sdk-js-crypto-helpers/commit/8f489cbe4c0e134f826bac66f1bf5172597048b9)) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/LICENSE new file mode 100644 index 0000000..980a15a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/README.md new file mode 100644 index 0000000..4c1c8aa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/README.md @@ -0,0 +1,16 @@ +# @aws-crypto/util + +Helper functions + +## Usage + +``` +import { convertToBuffer } from '@aws-crypto/util'; + +const data = "asdf"; +const utf8EncodedUint8Array = convertToBuffer(data); +``` + +## Test + +`npm test` diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts new file mode 100644 index 0000000..697a5cd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function convertToBuffer(data: SourceData): Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.js new file mode 100644 index 0000000..85bc8af --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.js @@ -0,0 +1,24 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertToBuffer = void 0; +var util_utf8_1 = require("@smithy/util-utf8"); +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : util_utf8_1.fromUtf8; +function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +exports.convertToBuffer = convertToBuffer; +//# sourceMappingURL=convertToBuffer.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map new file mode 100644 index 0000000..916d787 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/convertToBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"convertToBuffer.js","sourceRoot":"","sources":["../../src/convertToBuffer.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAGtC,+CAAgE;AAEhE,iBAAiB;AACjB,IAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,IAAI;IAC1C,CAAC,CAAC,UAAC,KAAa,IAAK,OAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,EAA1B,CAA0B;IAC/C,CAAC,CAAC,oBAAe,CAAC;AAEtB,SAAgB,eAAe,CAAC,IAAgB;IAC9C,8BAA8B;IAC9B,IAAI,IAAI,YAAY,UAAU;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC;AAjBD,0CAiBC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.d.ts new file mode 100644 index 0000000..783c73c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.d.ts @@ -0,0 +1,4 @@ +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.js new file mode 100644 index 0000000..94e1ca9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.uint32ArrayFrom = exports.numToUint8 = exports.isEmptyData = exports.convertToBuffer = void 0; +var convertToBuffer_1 = require("./convertToBuffer"); +Object.defineProperty(exports, "convertToBuffer", { enumerable: true, get: function () { return convertToBuffer_1.convertToBuffer; } }); +var isEmptyData_1 = require("./isEmptyData"); +Object.defineProperty(exports, "isEmptyData", { enumerable: true, get: function () { return isEmptyData_1.isEmptyData; } }); +var numToUint8_1 = require("./numToUint8"); +Object.defineProperty(exports, "numToUint8", { enumerable: true, get: function () { return numToUint8_1.numToUint8; } }); +var uint32ArrayFrom_1 = require("./uint32ArrayFrom"); +Object.defineProperty(exports, "uint32ArrayFrom", { enumerable: true, get: function () { return uint32ArrayFrom_1.uint32ArrayFrom; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.js.map new file mode 100644 index 0000000..a170172 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,qDAAoD;AAA3C,kHAAA,eAAe,OAAA;AACxB,6CAA4C;AAAnC,0GAAA,WAAW,OAAA;AACpB,2CAA0C;AAAjC,wGAAA,UAAU,OAAA;AACnB,qDAAkD;AAA1C,kHAAA,eAAe,OAAA"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.js new file mode 100644 index 0000000..6af1e89 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.js @@ -0,0 +1,13 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmptyData = void 0; +function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +exports.isEmptyData = isEmptyData; +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map new file mode 100644 index 0000000..e1eaa02 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAItC,SAAgB,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC;AAND,kCAMC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts new file mode 100644 index 0000000..5b702e8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.d.ts @@ -0,0 +1 @@ +export declare function numToUint8(num: number): Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.js new file mode 100644 index 0000000..2f070e1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.js @@ -0,0 +1,15 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.numToUint8 = void 0; +function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +exports.numToUint8 = numToUint8; +//# sourceMappingURL=numToUint8.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.js.map new file mode 100644 index 0000000..fea3aca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/numToUint8.js.map @@ -0,0 +1 @@ +{"version":3,"file":"numToUint8.js","sourceRoot":"","sources":["../../src/numToUint8.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,SAAgB,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,UAAU,CAAC;QACpB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC;QACvB,GAAG,GAAG,UAAU;KACjB,CAAC,CAAC;AACL,CAAC;AAPD,gCAOC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts new file mode 100644 index 0000000..fea6607 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.d.ts @@ -0,0 +1 @@ +export declare function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js new file mode 100644 index 0000000..226cdc3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js @@ -0,0 +1,20 @@ +"use strict"; +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.uint32ArrayFrom = void 0; +// IE 11 does not support Array.from, so we do it manually +function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +exports.uint32ArrayFrom = uint32ArrayFrom; +//# sourceMappingURL=uint32ArrayFrom.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map new file mode 100644 index 0000000..fe016e1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/main/uint32ArrayFrom.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uint32ArrayFrom.js","sourceRoot":"","sources":["../../src/uint32ArrayFrom.ts"],"names":[],"mappings":";AAAA,oEAAoE;AACpE,sCAAsC;;;AAEtC,0DAA0D;AAC1D,SAAgB,eAAe,CAAC,aAA4B;IAC1D,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE;QACrB,IAAM,YAAY,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE;YACrC,YAAY,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;YAC9C,OAAO,IAAI,CAAC,CAAA;SACb;QACD,OAAO,YAAY,CAAA;KACpB;IACD,OAAO,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;AACxC,CAAC;AAXD,0CAWC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts new file mode 100644 index 0000000..697a5cd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function convertToBuffer(data: SourceData): Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.js new file mode 100644 index 0000000..c700d1e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.js @@ -0,0 +1,20 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { fromUtf8 as fromUtf8Browser } from "@smithy/util-utf8"; +// Quick polyfill +var fromUtf8 = typeof Buffer !== "undefined" && Buffer.from + ? function (input) { return Buffer.from(input, "utf8"); } + : fromUtf8Browser; +export function convertToBuffer(data) { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) + return data; + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +} +//# sourceMappingURL=convertToBuffer.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map new file mode 100644 index 0000000..92694a4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/convertToBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"convertToBuffer.js","sourceRoot":"","sources":["../../src/convertToBuffer.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAGtC,OAAO,EAAE,QAAQ,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAEhE,iBAAiB;AACjB,IAAM,QAAQ,GACZ,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,IAAI;IAC1C,CAAC,CAAC,UAAC,KAAa,IAAK,OAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,EAA1B,CAA0B;IAC/C,CAAC,CAAC,eAAe,CAAC;AAEtB,MAAM,UAAU,eAAe,CAAC,IAAgB;IAC9C,8BAA8B;IAC9B,IAAI,IAAI,YAAY,UAAU;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC;KACvB;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC5B,OAAO,IAAI,UAAU,CACnB,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC,iBAAiB,CAC/C,CAAC;KACH;IAED,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;AAC9B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.d.ts new file mode 100644 index 0000000..783c73c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.d.ts @@ -0,0 +1,4 @@ +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.js new file mode 100644 index 0000000..077e8b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.js @@ -0,0 +1,7 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export { uint32ArrayFrom } from './uint32ArrayFrom'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.js.map new file mode 100644 index 0000000..4ddb12d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAC,eAAe,EAAC,MAAM,mBAAmB,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts new file mode 100644 index 0000000..43ae4a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.d.ts @@ -0,0 +1,2 @@ +import { SourceData } from "@aws-sdk/types"; +export declare function isEmptyData(data: SourceData): boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.js new file mode 100644 index 0000000..13841c7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.js @@ -0,0 +1,9 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export function isEmptyData(data) { + if (typeof data === "string") { + return data.length === 0; + } + return data.byteLength === 0; +} +//# sourceMappingURL=isEmptyData.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map new file mode 100644 index 0000000..fe0fa02 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/isEmptyData.js.map @@ -0,0 +1 @@ +{"version":3,"file":"isEmptyData.js","sourceRoot":"","sources":["../../src/isEmptyData.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAItC,MAAM,UAAU,WAAW,CAAC,IAAgB;IAC1C,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC;KAC1B;IAED,OAAO,IAAI,CAAC,UAAU,KAAK,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts new file mode 100644 index 0000000..5b702e8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.d.ts @@ -0,0 +1 @@ +export declare function numToUint8(num: number): Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.js new file mode 100644 index 0000000..0ca6e47 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.js @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +export function numToUint8(num) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} +//# sourceMappingURL=numToUint8.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.js.map new file mode 100644 index 0000000..ac53e33 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/numToUint8.js.map @@ -0,0 +1 @@ +{"version":3,"file":"numToUint8.js","sourceRoot":"","sources":["../../src/numToUint8.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,UAAU,CAAC;QACpB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,EAAE;QACxB,CAAC,GAAG,GAAG,UAAU,CAAC,IAAI,CAAC;QACvB,GAAG,GAAG,UAAU;KACjB,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts new file mode 100644 index 0000000..fea6607 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.d.ts @@ -0,0 +1 @@ +export declare function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js new file mode 100644 index 0000000..c69435e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js @@ -0,0 +1,16 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// IE 11 does not support Array.from, so we do it manually +export function uint32ArrayFrom(a_lookUpTable) { + if (!Uint32Array.from) { + var return_array = new Uint32Array(a_lookUpTable.length); + var a_index = 0; + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index]; + a_index += 1; + } + return return_array; + } + return Uint32Array.from(a_lookUpTable); +} +//# sourceMappingURL=uint32ArrayFrom.js.map \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map new file mode 100644 index 0000000..7384b0a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/build/module/uint32ArrayFrom.js.map @@ -0,0 +1 @@ +{"version":3,"file":"uint32ArrayFrom.js","sourceRoot":"","sources":["../../src/uint32ArrayFrom.ts"],"names":[],"mappings":"AAAA,oEAAoE;AACpE,sCAAsC;AAEtC,0DAA0D;AAC1D,MAAM,UAAU,eAAe,CAAC,aAA4B;IAC1D,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE;QACrB,IAAM,YAAY,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,MAAM,CAAC,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE;YACrC,YAAY,CAAC,OAAO,CAAC,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;YAC9C,OAAO,IAAI,CAAC,CAAA;SACb;QACD,OAAO,YAAY,CAAA;KACpB;IACD,OAAO,WAAW,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;AACxC,CAAC"} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..ed8affc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "2.2.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..a12e51c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "2.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^14.14.31", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..78bfb4d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "2.3.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:jest" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=14.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/package.json new file mode 100644 index 0000000..431107a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/package.json @@ -0,0 +1,32 @@ +{ + "name": "@aws-crypto/util", + "version": "5.2.0", + "scripts": { + "prepublishOnly": "tsc -p tsconfig.json && tsc -p tsconfig.module.json", + "pretest": "tsc -p tsconfig.test.json", + "test": "mocha --require ts-node/register test/**/*test.ts" + }, + "main": "./build/main/index.js", + "module": "./build/module/index.js", + "types": "./build/main/index.d.ts", + "repository": { + "type": "git", + "url": "git@github.com:aws/aws-sdk-js-crypto-helpers.git" + }, + "author": { + "name": "AWS Crypto Tools Team", + "email": "aws-cryptools@amazon.com", + "url": "https://docs.aws.amazon.com/aws-crypto-tools/index.html?id=docs_gateway#lang/en_us" + }, + "homepage": "https://github.com/aws/aws-sdk-js-crypto-helpers/tree/master/packages/util", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + }, + "publishConfig": { + "access": "public" + }, + "gitHead": "c11b171b35ec5c093364f0e0d8dc4ab1af68e748" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/convertToBuffer.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/convertToBuffer.ts new file mode 100644 index 0000000..f9f163e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/convertToBuffer.ts @@ -0,0 +1,30 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData } from "@aws-sdk/types"; +import { fromUtf8 as fromUtf8Browser } from "@smithy/util-utf8"; + +// Quick polyfill +const fromUtf8 = + typeof Buffer !== "undefined" && Buffer.from + ? (input: string) => Buffer.from(input, "utf8") + : fromUtf8Browser; + +export function convertToBuffer(data: SourceData): Uint8Array { + // Already a Uint8, do nothing + if (data instanceof Uint8Array) return data; + + if (typeof data === "string") { + return fromUtf8(data); + } + + if (ArrayBuffer.isView(data)) { + return new Uint8Array( + data.buffer, + data.byteOffset, + data.byteLength / Uint8Array.BYTES_PER_ELEMENT + ); + } + + return new Uint8Array(data); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/index.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/index.ts new file mode 100644 index 0000000..2f6c62a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/index.ts @@ -0,0 +1,7 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export { convertToBuffer } from "./convertToBuffer"; +export { isEmptyData } from "./isEmptyData"; +export { numToUint8 } from "./numToUint8"; +export {uint32ArrayFrom} from './uint32ArrayFrom'; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/isEmptyData.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/isEmptyData.ts new file mode 100644 index 0000000..089764d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/isEmptyData.ts @@ -0,0 +1,12 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { SourceData } from "@aws-sdk/types"; + +export function isEmptyData(data: SourceData): boolean { + if (typeof data === "string") { + return data.length === 0; + } + + return data.byteLength === 0; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/numToUint8.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/numToUint8.ts new file mode 100644 index 0000000..2f40ace --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/numToUint8.ts @@ -0,0 +1,11 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +export function numToUint8(num: number) { + return new Uint8Array([ + (num & 0xff000000) >> 24, + (num & 0x00ff0000) >> 16, + (num & 0x0000ff00) >> 8, + num & 0x000000ff, + ]); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts new file mode 100644 index 0000000..b9b6d88 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/src/uint32ArrayFrom.ts @@ -0,0 +1,16 @@ +// Copyright Amazon.com Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// IE 11 does not support Array.from, so we do it manually +export function uint32ArrayFrom(a_lookUpTable: Array): Uint32Array { + if (!Uint32Array.from) { + const return_array = new Uint32Array(a_lookUpTable.length) + let a_index = 0 + while (a_index < a_lookUpTable.length) { + return_array[a_index] = a_lookUpTable[a_index] + a_index += 1 + } + return return_array + } + return Uint32Array.from(a_lookUpTable) +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/tsconfig.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/tsconfig.json new file mode 100644 index 0000000..2b996d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "./src", + "outDir": "./build/main", + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules/**"] +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/tsconfig.module.json b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/tsconfig.module.json new file mode 100644 index 0000000..7d0cfdd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-crypto/util/tsconfig.module.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "outDir": "build/module", + "module": "esnext", + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/README.md new file mode 100644 index 0000000..c1da3dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/README.md @@ -0,0 +1,678 @@ + + +# @aws-sdk/client-dynamodb + +## Description + +AWS SDK for JavaScript DynamoDB Client for Node.js, Browser and React Native. + +Amazon DynamoDB + +

Amazon DynamoDB is a fully managed NoSQL database service that provides fast +and predictable performance with seamless scalability. DynamoDB lets you +offload the administrative burdens of operating and scaling a distributed database, so +that you don't have to worry about hardware provisioning, setup and configuration, +replication, software patching, or cluster scaling.

+

With DynamoDB, you can create database tables that can store and retrieve +any amount of data, and serve any level of request traffic. You can scale up or scale +down your tables' throughput capacity without downtime or performance degradation, and +use the Amazon Web Services Management Console to monitor resource utilization and performance +metrics.

+

DynamoDB automatically spreads the data and traffic for your tables over +a sufficient number of servers to handle your throughput and storage requirements, while +maintaining consistent and fast performance. All of your data is stored on solid state +disks (SSDs) and automatically replicated across multiple Availability Zones in an +Amazon Web Services Region, providing built-in high availability and data +durability.

+ +## Installing + +To install this package, simply type add or install @aws-sdk/client-dynamodb +using your favorite package manager: + +- `npm install @aws-sdk/client-dynamodb` +- `yarn add @aws-sdk/client-dynamodb` +- `pnpm add @aws-sdk/client-dynamodb` + +## Getting Started + +### Import + +The AWS SDK is modulized by clients and commands. +To send a request, you only need to import the `DynamoDBClient` and +the commands you need, for example `ListBackupsCommand`: + +```js +// ES5 example +const { DynamoDBClient, ListBackupsCommand } = require("@aws-sdk/client-dynamodb"); +``` + +```ts +// ES6+ example +import { DynamoDBClient, ListBackupsCommand } from "@aws-sdk/client-dynamodb"; +``` + +### Usage + +To send a request, you: + +- Initiate client with configuration (e.g. credentials, region). +- Initiate command with input parameters. +- Call `send` operation on client with command object as input. +- If you are using a custom http handler, you may call `destroy()` to close open connections. + +```js +// a client can be shared by different commands. +const client = new DynamoDBClient({ region: "REGION" }); + +const params = { + /** input parameters */ +}; +const command = new ListBackupsCommand(params); +``` + +#### Async/await + +We recommend using [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) +operator to wait for the promise returned by send operation as follows: + +```js +// async/await. +try { + const data = await client.send(command); + // process data. +} catch (error) { + // error handling. +} finally { + // finally. +} +``` + +Async-await is clean, concise, intuitive, easy to debug and has better error handling +as compared to using Promise chains or callbacks. + +#### Promises + +You can also use [Promise chaining](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises#chaining) +to execute send operation. + +```js +client.send(command).then( + (data) => { + // process data. + }, + (error) => { + // error handling. + } +); +``` + +Promises can also be called using `.catch()` and `.finally()` as follows: + +```js +client + .send(command) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }) + .finally(() => { + // finally. + }); +``` + +#### Callbacks + +We do not recommend using callbacks because of [callback hell](http://callbackhell.com/), +but they are supported by the send operation. + +```js +// callbacks. +client.send(command, (err, data) => { + // process err and data. +}); +``` + +#### v2 compatible style + +The client can also send requests using v2 compatible style. +However, it results in a bigger bundle size and may be dropped in next major version. More details in the blog post +on [modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/) + +```ts +import * as AWS from "@aws-sdk/client-dynamodb"; +const client = new AWS.DynamoDB({ region: "REGION" }); + +// async/await. +try { + const data = await client.listBackups(params); + // process data. +} catch (error) { + // error handling. +} + +// Promises. +client + .listBackups(params) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }); + +// callbacks. +client.listBackups(params, (err, data) => { + // process err and data. +}); +``` + +### Troubleshooting + +When the service returns an exception, the error will include the exception information, +as well as response metadata (e.g. request id). + +```js +try { + const data = await client.send(command); + // process data. +} catch (error) { + const { requestId, cfId, extendedRequestId } = error.$metadata; + console.log({ requestId, cfId, extendedRequestId }); + /** + * The keys within exceptions are also parsed. + * You can access them by specifying exception names: + * if (error.name === 'SomeServiceException') { + * const value = error.specialKeyInException; + * } + */ +} +``` + +## Getting Help + +Please use these community resources for getting help. +We use the GitHub issues for tracking bugs and feature requests, but have limited bandwidth to address them. + +- Visit [Developer Guide](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) + or [API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html). +- Check out the blog posts tagged with [`aws-sdk-js`](https://aws.amazon.com/blogs/developer/tag/aws-sdk-js/) + on AWS Developer Blog. +- Ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/aws-sdk-js) and tag it with `aws-sdk-js`. +- Join the AWS JavaScript community on [gitter](https://gitter.im/aws/aws-sdk-js-v3). +- If it turns out that you may have found a bug, please [open an issue](https://github.com/aws/aws-sdk-js-v3/issues/new/choose). + +To test your universal JavaScript code in Node.js, browser and react-native environments, +visit our [code samples repo](https://github.com/aws-samples/aws-sdk-js-tests). + +## Contributing + +This client code is generated automatically. Any modifications will be overwritten the next time the `@aws-sdk/client-dynamodb` package is updated. +To contribute to client you can check our [generate clients scripts](https://github.com/aws/aws-sdk-js-v3/tree/main/scripts/generate-clients). + +## License + +This SDK is distributed under the +[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0), +see LICENSE for more information. + +## Client Commands (Operations List) + +
+ +BatchExecuteStatement + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchExecuteStatementCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchExecuteStatementCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchExecuteStatementCommandOutput/) + +
+
+ +BatchGetItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchGetItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchGetItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchGetItemCommandOutput/) + +
+
+ +BatchWriteItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/BatchWriteItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchWriteItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/BatchWriteItemCommandOutput/) + +
+
+ +CreateBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateBackupCommandOutput/) + +
+
+ +CreateGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateGlobalTableCommandOutput/) + +
+
+ +CreateTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/CreateTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/CreateTableCommandOutput/) + +
+
+ +DeleteBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteBackupCommandOutput/) + +
+
+ +DeleteItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteItemCommandOutput/) + +
+
+ +DeleteResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteResourcePolicyCommandOutput/) + +
+
+ +DeleteTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DeleteTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DeleteTableCommandOutput/) + +
+
+ +DescribeBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeBackupCommandOutput/) + +
+
+ +DescribeContinuousBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeContinuousBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContinuousBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContinuousBackupsCommandOutput/) + +
+
+ +DescribeContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeContributorInsightsCommandOutput/) + +
+
+ +DescribeEndpoints + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeEndpointsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeEndpointsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeEndpointsCommandOutput/) + +
+
+ +DescribeExport + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeExportCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeExportCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeExportCommandOutput/) + +
+
+ +DescribeGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableCommandOutput/) + +
+
+ +DescribeGlobalTableSettings + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeGlobalTableSettingsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableSettingsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeGlobalTableSettingsCommandOutput/) + +
+
+ +DescribeImport + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeImportCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeImportCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeImportCommandOutput/) + +
+
+ +DescribeKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeKinesisStreamingDestinationCommandOutput/) + +
+
+ +DescribeLimits + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeLimitsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeLimitsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeLimitsCommandOutput/) + +
+
+ +DescribeTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableCommandOutput/) + +
+
+ +DescribeTableReplicaAutoScaling + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTableReplicaAutoScalingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableReplicaAutoScalingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTableReplicaAutoScalingCommandOutput/) + +
+
+ +DescribeTimeToLive + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DescribeTimeToLiveCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTimeToLiveCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DescribeTimeToLiveCommandOutput/) + +
+
+ +DisableKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/DisableKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DisableKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/DisableKinesisStreamingDestinationCommandOutput/) + +
+
+ +EnableKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/EnableKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/EnableKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/EnableKinesisStreamingDestinationCommandOutput/) + +
+
+ +ExecuteStatement + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExecuteStatementCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteStatementCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteStatementCommandOutput/) + +
+
+ +ExecuteTransaction + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExecuteTransactionCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteTransactionCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExecuteTransactionCommandOutput/) + +
+
+ +ExportTableToPointInTime + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ExportTableToPointInTimeCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExportTableToPointInTimeCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ExportTableToPointInTimeCommandOutput/) + +
+
+ +GetItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/GetItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetItemCommandOutput/) + +
+
+ +GetResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/GetResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/GetResourcePolicyCommandOutput/) + +
+
+ +ImportTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ImportTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ImportTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ImportTableCommandOutput/) + +
+
+ +ListBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListBackupsCommandOutput/) + +
+
+ +ListContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListContributorInsightsCommandOutput/) + +
+
+ +ListExports + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListExportsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListExportsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListExportsCommandOutput/) + +
+
+ +ListGlobalTables + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListGlobalTablesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListGlobalTablesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListGlobalTablesCommandOutput/) + +
+
+ +ListImports + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListImportsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListImportsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListImportsCommandOutput/) + +
+
+ +ListTables + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListTablesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTablesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTablesCommandOutput/) + +
+
+ +ListTagsOfResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ListTagsOfResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTagsOfResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ListTagsOfResourceCommandOutput/) + +
+
+ +PutItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/PutItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutItemCommandOutput/) + +
+
+ +PutResourcePolicy + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/PutResourcePolicyCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutResourcePolicyCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/PutResourcePolicyCommandOutput/) + +
+
+ +Query + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/QueryCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/QueryCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/QueryCommandOutput/) + +
+
+ +RestoreTableFromBackup + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/RestoreTableFromBackupCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableFromBackupCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableFromBackupCommandOutput/) + +
+
+ +RestoreTableToPointInTime + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/RestoreTableToPointInTimeCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableToPointInTimeCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/RestoreTableToPointInTimeCommandOutput/) + +
+
+ +Scan + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/ScanCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ScanCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/ScanCommandOutput/) + +
+
+ +TagResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TagResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TagResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TagResourceCommandOutput/) + +
+
+ +TransactGetItems + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TransactGetItemsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactGetItemsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactGetItemsCommandOutput/) + +
+
+ +TransactWriteItems + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/TransactWriteItemsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactWriteItemsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/TransactWriteItemsCommandOutput/) + +
+
+ +UntagResource + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UntagResourceCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UntagResourceCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UntagResourceCommandOutput/) + +
+
+ +UpdateContinuousBackups + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateContinuousBackupsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContinuousBackupsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContinuousBackupsCommandOutput/) + +
+
+ +UpdateContributorInsights + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateContributorInsightsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContributorInsightsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateContributorInsightsCommandOutput/) + +
+
+ +UpdateGlobalTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateGlobalTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableCommandOutput/) + +
+
+ +UpdateGlobalTableSettings + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateGlobalTableSettingsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableSettingsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateGlobalTableSettingsCommandOutput/) + +
+
+ +UpdateItem + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateItemCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateItemCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateItemCommandOutput/) + +
+
+ +UpdateKinesisStreamingDestination + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateKinesisStreamingDestinationCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateKinesisStreamingDestinationCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateKinesisStreamingDestinationCommandOutput/) + +
+
+ +UpdateTable + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTableCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableCommandOutput/) + +
+
+ +UpdateTableReplicaAutoScaling + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTableReplicaAutoScalingCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableReplicaAutoScalingCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTableReplicaAutoScalingCommandOutput/) + +
+
+ +UpdateTimeToLive + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/dynamodb/command/UpdateTimeToLiveCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTimeToLiveCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-dynamodb/Interface/UpdateTimeToLiveCommandOutput/) + +
diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..db59164 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/auth/httpAuthSchemeProvider.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultDynamoDBHttpAuthSchemeProvider = exports.defaultDynamoDBHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultDynamoDBHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultDynamoDBHttpAuthSchemeParametersProvider = defaultDynamoDBHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "dynamodb", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +const defaultDynamoDBHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultDynamoDBHttpAuthSchemeProvider = defaultDynamoDBHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js new file mode 100644 index 0000000..1df276c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/endpointResolver.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: [ + "AccountId", + "AccountIdEndpointMode", + "Endpoint", + "Region", + "ResourceArn", + "ResourceArnList", + "UseDualStack", + "UseFIPS", + ], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js new file mode 100644 index 0000000..0a9f993 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const S = "required", T = "type", U = "fn", V = "argv", W = "ref", X = "properties", Y = "headers"; +const a = false, b = "isSet", c = "error", d = "endpoint", e = "tree", f = "PartitionResult", g = "stringEquals", h = "dynamodb", i = "getAttr", j = "aws.parseArn", k = "ParsedArn", l = "isValidHostLabel", m = "FirstArn", n = { [S]: false, [T]: "String" }, o = { [S]: true, "default": false, [T]: "Boolean" }, p = { [U]: "booleanEquals", [V]: [{ [W]: "UseFIPS" }, true] }, q = { [U]: "booleanEquals", [V]: [{ [W]: "UseDualStack" }, true] }, r = {}, s = { [W]: "Region" }, t = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsFIPS"] }, true] }, u = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsDualStack"] }, true] }, v = { "conditions": [{ [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], "rules": [{ [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }], [T]: e }, w = { [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, x = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }, y = { [U]: i, [V]: [{ [W]: f }, "name"] }, z = { [d]: { "url": "https://dynamodb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, [T]: d }, A = { [U]: "not", [V]: [p] }, B = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and DualStack is enabled, but DualStack account endpoints are not supported", [T]: c }, C = { [U]: "not", [V]: [{ [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "disabled"] }] }, D = { [U]: g, [V]: [y, "aws"] }, E = { [U]: "not", [V]: [q] }, F = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "service"] }, h] }, G = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, false] }, H = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, "{Region}"] }, I = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "accountId"] }, false] }, J = { "url": "https://{ParsedArn#accountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, K = { [W]: "ResourceArnList" }, L = { [W]: "AccountId" }, M = [p], N = [q], O = [s], P = [w, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], Q = [A], R = [{ [W]: "ResourceArn" }]; +const _data = { version: "1.0", parameters: { Region: n, UseDualStack: o, UseFIPS: o, Endpoint: n, AccountId: n, AccountIdEndpointMode: n, ResourceArn: n, ResourceArnList: { [S]: a, [T]: "stringArray" } }, rules: [{ conditions: [{ [U]: b, [V]: [{ [W]: "Endpoint" }] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [T]: c }, { endpoint: { url: "{Endpoint}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { conditions: [{ [U]: b, [V]: O }], rules: [{ conditions: [{ [U]: "aws.partition", [V]: O, assign: f }], rules: [{ conditions: [{ [U]: g, [V]: [s, "local"] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and local endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and local endpoint are not supported", [T]: c }, { endpoint: { url: "http://localhost:8000", [X]: { authSchemes: [{ signingRegion: "us-east-1", name: "sigv4", signingName: h }] }, [Y]: r }, [T]: d }], [T]: e }, { conditions: [p, q], rules: [{ conditions: [t, u], rules: [v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [T]: c }], [T]: e }, { conditions: M, rules: [{ conditions: [t], rules: [{ conditions: [{ [U]: g, [V]: [y, "aws-us-gov"] }], rules: [v, z], [T]: e }, v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS is enabled but this partition does not support FIPS", [T]: c }], [T]: e }, { conditions: N, rules: [{ conditions: [u], rules: [{ conditions: P, rules: [{ conditions: Q, rules: [B], [T]: e }, x], [T]: e }, { endpoint: { url: "https://dynamodb.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "DualStack is enabled but this partition does not support DualStack", [T]: c }], [T]: e }, { conditions: [w, C, D, A, E, { [U]: b, [V]: R }, { [U]: j, [V]: R, assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [K] }, { [U]: i, [V]: [K, "[0]"], assign: m }, { [U]: j, [V]: [{ [W]: m }], assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [L] }], rules: [{ conditions: [{ [U]: l, [V]: [L, a] }], rules: [{ endpoint: { url: "https://{AccountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "Credentials-sourced account ID parameter is invalid", [T]: c }], [T]: e }, { conditions: P, rules: [{ conditions: Q, rules: [{ conditions: [E], rules: [{ conditions: [D], rules: [{ error: "AccountIdEndpointMode is required but no AccountID was provided or able to be loaded", [T]: c }], [T]: e }, { error: "Invalid Configuration: AccountIdEndpointMode is required but account endpoints are not supported in this partition", [T]: c }], [T]: e }, B], [T]: e }, x], [T]: e }, z], [T]: e }], [T]: e }, { error: "Invalid Configuration: Missing Region", [T]: c }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..b29fd11 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/index.js @@ -0,0 +1,5561 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ApproximateCreationDateTimePrecision: () => ApproximateCreationDateTimePrecision, + AttributeAction: () => AttributeAction, + AttributeValue: () => AttributeValue, + BackupInUseException: () => BackupInUseException, + BackupNotFoundException: () => BackupNotFoundException, + BackupStatus: () => BackupStatus, + BackupType: () => BackupType, + BackupTypeFilter: () => BackupTypeFilter, + BatchExecuteStatementCommand: () => BatchExecuteStatementCommand, + BatchGetItemCommand: () => BatchGetItemCommand, + BatchStatementErrorCodeEnum: () => BatchStatementErrorCodeEnum, + BatchWriteItemCommand: () => BatchWriteItemCommand, + BillingMode: () => BillingMode, + ComparisonOperator: () => ComparisonOperator, + ConditionalCheckFailedException: () => ConditionalCheckFailedException, + ConditionalOperator: () => ConditionalOperator, + ContinuousBackupsStatus: () => ContinuousBackupsStatus, + ContinuousBackupsUnavailableException: () => ContinuousBackupsUnavailableException, + ContributorInsightsAction: () => ContributorInsightsAction, + ContributorInsightsStatus: () => ContributorInsightsStatus, + CreateBackupCommand: () => CreateBackupCommand, + CreateGlobalTableCommand: () => CreateGlobalTableCommand, + CreateTableCommand: () => CreateTableCommand, + DeleteBackupCommand: () => DeleteBackupCommand, + DeleteItemCommand: () => DeleteItemCommand, + DeleteResourcePolicyCommand: () => DeleteResourcePolicyCommand, + DeleteTableCommand: () => DeleteTableCommand, + DescribeBackupCommand: () => DescribeBackupCommand, + DescribeContinuousBackupsCommand: () => DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand: () => DescribeContributorInsightsCommand, + DescribeEndpointsCommand: () => DescribeEndpointsCommand, + DescribeExportCommand: () => DescribeExportCommand, + DescribeGlobalTableCommand: () => DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand: () => DescribeGlobalTableSettingsCommand, + DescribeImportCommand: () => DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand: () => DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand: () => DescribeLimitsCommand, + DescribeTableCommand: () => DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand: () => DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand: () => DescribeTimeToLiveCommand, + DestinationStatus: () => DestinationStatus, + DisableKinesisStreamingDestinationCommand: () => DisableKinesisStreamingDestinationCommand, + DuplicateItemException: () => DuplicateItemException, + DynamoDB: () => DynamoDB, + DynamoDBClient: () => DynamoDBClient, + DynamoDBServiceException: () => DynamoDBServiceException, + EnableKinesisStreamingDestinationCommand: () => EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand: () => ExecuteStatementCommand, + ExecuteTransactionCommand: () => ExecuteTransactionCommand, + ExportConflictException: () => ExportConflictException, + ExportFormat: () => ExportFormat, + ExportNotFoundException: () => ExportNotFoundException, + ExportStatus: () => ExportStatus, + ExportTableToPointInTimeCommand: () => ExportTableToPointInTimeCommand, + ExportType: () => ExportType, + ExportViewType: () => ExportViewType, + GetItemCommand: () => GetItemCommand, + GetResourcePolicyCommand: () => GetResourcePolicyCommand, + GlobalTableAlreadyExistsException: () => GlobalTableAlreadyExistsException, + GlobalTableNotFoundException: () => GlobalTableNotFoundException, + GlobalTableStatus: () => GlobalTableStatus, + IdempotentParameterMismatchException: () => IdempotentParameterMismatchException, + ImportConflictException: () => ImportConflictException, + ImportNotFoundException: () => ImportNotFoundException, + ImportStatus: () => ImportStatus, + ImportTableCommand: () => ImportTableCommand, + IndexNotFoundException: () => IndexNotFoundException, + IndexStatus: () => IndexStatus, + InputCompressionType: () => InputCompressionType, + InputFormat: () => InputFormat, + InternalServerError: () => InternalServerError, + InvalidEndpointException: () => InvalidEndpointException, + InvalidExportTimeException: () => InvalidExportTimeException, + InvalidRestoreTimeException: () => InvalidRestoreTimeException, + ItemCollectionSizeLimitExceededException: () => ItemCollectionSizeLimitExceededException, + KeyType: () => KeyType, + LimitExceededException: () => LimitExceededException, + ListBackupsCommand: () => ListBackupsCommand, + ListContributorInsightsCommand: () => ListContributorInsightsCommand, + ListExportsCommand: () => ListExportsCommand, + ListGlobalTablesCommand: () => ListGlobalTablesCommand, + ListImportsCommand: () => ListImportsCommand, + ListTablesCommand: () => ListTablesCommand, + ListTagsOfResourceCommand: () => ListTagsOfResourceCommand, + MultiRegionConsistency: () => MultiRegionConsistency, + PointInTimeRecoveryStatus: () => PointInTimeRecoveryStatus, + PointInTimeRecoveryUnavailableException: () => PointInTimeRecoveryUnavailableException, + PolicyNotFoundException: () => PolicyNotFoundException, + ProjectionType: () => ProjectionType, + ProvisionedThroughputExceededException: () => ProvisionedThroughputExceededException, + PutItemCommand: () => PutItemCommand, + PutResourcePolicyCommand: () => PutResourcePolicyCommand, + QueryCommand: () => QueryCommand, + ReplicaAlreadyExistsException: () => ReplicaAlreadyExistsException, + ReplicaNotFoundException: () => ReplicaNotFoundException, + ReplicaStatus: () => ReplicaStatus, + ReplicatedWriteConflictException: () => ReplicatedWriteConflictException, + RequestLimitExceeded: () => RequestLimitExceeded, + ResourceInUseException: () => ResourceInUseException, + ResourceNotFoundException: () => ResourceNotFoundException, + RestoreTableFromBackupCommand: () => RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand: () => RestoreTableToPointInTimeCommand, + ReturnConsumedCapacity: () => ReturnConsumedCapacity, + ReturnItemCollectionMetrics: () => ReturnItemCollectionMetrics, + ReturnValue: () => ReturnValue, + ReturnValuesOnConditionCheckFailure: () => ReturnValuesOnConditionCheckFailure, + S3SseAlgorithm: () => S3SseAlgorithm, + SSEStatus: () => SSEStatus, + SSEType: () => SSEType, + ScalarAttributeType: () => ScalarAttributeType, + ScanCommand: () => ScanCommand, + Select: () => Select, + StreamViewType: () => StreamViewType, + TableAlreadyExistsException: () => TableAlreadyExistsException, + TableClass: () => TableClass, + TableInUseException: () => TableInUseException, + TableNotFoundException: () => TableNotFoundException, + TableStatus: () => TableStatus, + TagResourceCommand: () => TagResourceCommand, + TimeToLiveStatus: () => TimeToLiveStatus, + TransactGetItemsCommand: () => TransactGetItemsCommand, + TransactWriteItemsCommand: () => TransactWriteItemsCommand, + TransactionCanceledException: () => TransactionCanceledException, + TransactionConflictException: () => TransactionConflictException, + TransactionInProgressException: () => TransactionInProgressException, + UntagResourceCommand: () => UntagResourceCommand, + UpdateContinuousBackupsCommand: () => UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand: () => UpdateContributorInsightsCommand, + UpdateGlobalTableCommand: () => UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand: () => UpdateGlobalTableSettingsCommand, + UpdateItemCommand: () => UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand: () => UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand: () => UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand: () => UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand: () => UpdateTimeToLiveCommand, + __Client: () => import_smithy_client.Client, + paginateListContributorInsights: () => paginateListContributorInsights, + paginateListExports: () => paginateListExports, + paginateListImports: () => paginateListImports, + paginateListTables: () => paginateListTables, + paginateQuery: () => paginateQuery, + paginateScan: () => paginateScan, + waitForTableExists: () => waitForTableExists, + waitForTableNotExists: () => waitForTableNotExists, + waitUntilTableExists: () => waitUntilTableExists, + waitUntilTableNotExists: () => waitUntilTableNotExists +}); +module.exports = __toCommonJS(index_exports); + +// src/DynamoDBClient.ts +var import_account_id_endpoint = require("@aws-sdk/core/account-id-endpoint"); +var import_middleware_endpoint_discovery = require("@aws-sdk/middleware-endpoint-discovery"); +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core2 = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); + +var import_middleware_retry = require("@smithy/middleware-retry"); + +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/commands/DescribeEndpointsCommand.ts +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); + + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "dynamodb" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + AccountId: { type: "builtInParams", name: "accountId" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + AccountIdEndpointMode: { type: "builtInParams", name: "accountIdEndpointMode" } +}; + +// src/protocols/Aws_json1_0.ts +var import_core = require("@aws-sdk/core"); +var import_protocol_http = require("@smithy/protocol-http"); + +var import_uuid = require("uuid"); + +// src/models/DynamoDBServiceException.ts +var import_smithy_client = require("@smithy/smithy-client"); +var DynamoDBServiceException = class _DynamoDBServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "DynamoDBServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _DynamoDBServiceException.prototype); + } +}; + +// src/models/models_0.ts +var ApproximateCreationDateTimePrecision = { + MICROSECOND: "MICROSECOND", + MILLISECOND: "MILLISECOND" +}; +var AttributeAction = { + ADD: "ADD", + DELETE: "DELETE", + PUT: "PUT" +}; +var ScalarAttributeType = { + B: "B", + N: "N", + S: "S" +}; +var BackupStatus = { + AVAILABLE: "AVAILABLE", + CREATING: "CREATING", + DELETED: "DELETED" +}; +var BackupType = { + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER" +}; +var BillingMode = { + PAY_PER_REQUEST: "PAY_PER_REQUEST", + PROVISIONED: "PROVISIONED" +}; +var KeyType = { + HASH: "HASH", + RANGE: "RANGE" +}; +var ProjectionType = { + ALL: "ALL", + INCLUDE: "INCLUDE", + KEYS_ONLY: "KEYS_ONLY" +}; +var SSEType = { + AES256: "AES256", + KMS: "KMS" +}; +var SSEStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + UPDATING: "UPDATING" +}; +var StreamViewType = { + KEYS_ONLY: "KEYS_ONLY", + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", + OLD_IMAGE: "OLD_IMAGE" +}; +var TimeToLiveStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING" +}; +var BackupInUseException = class _BackupInUseException extends DynamoDBServiceException { + static { + __name(this, "BackupInUseException"); + } + name = "BackupInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "BackupInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _BackupInUseException.prototype); + } +}; +var BackupNotFoundException = class _BackupNotFoundException extends DynamoDBServiceException { + static { + __name(this, "BackupNotFoundException"); + } + name = "BackupNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "BackupNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _BackupNotFoundException.prototype); + } +}; +var BackupTypeFilter = { + ALL: "ALL", + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER" +}; +var ReturnConsumedCapacity = { + INDEXES: "INDEXES", + NONE: "NONE", + TOTAL: "TOTAL" +}; +var ReturnValuesOnConditionCheckFailure = { + ALL_OLD: "ALL_OLD", + NONE: "NONE" +}; +var BatchStatementErrorCodeEnum = { + AccessDenied: "AccessDenied", + ConditionalCheckFailed: "ConditionalCheckFailed", + DuplicateItem: "DuplicateItem", + InternalServerError: "InternalServerError", + ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded", + ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded", + RequestLimitExceeded: "RequestLimitExceeded", + ResourceNotFound: "ResourceNotFound", + ThrottlingError: "ThrottlingError", + TransactionConflict: "TransactionConflict", + ValidationError: "ValidationError" +}; +var InternalServerError = class _InternalServerError extends DynamoDBServiceException { + static { + __name(this, "InternalServerError"); + } + name = "InternalServerError"; + $fault = "server"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServerError", + $fault: "server", + ...opts + }); + Object.setPrototypeOf(this, _InternalServerError.prototype); + } +}; +var RequestLimitExceeded = class _RequestLimitExceeded extends DynamoDBServiceException { + static { + __name(this, "RequestLimitExceeded"); + } + name = "RequestLimitExceeded"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "RequestLimitExceeded", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _RequestLimitExceeded.prototype); + } +}; +var InvalidEndpointException = class _InvalidEndpointException extends DynamoDBServiceException { + static { + __name(this, "InvalidEndpointException"); + } + name = "InvalidEndpointException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidEndpointException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidEndpointException.prototype); + this.Message = opts.Message; + } +}; +var ProvisionedThroughputExceededException = class _ProvisionedThroughputExceededException extends DynamoDBServiceException { + static { + __name(this, "ProvisionedThroughputExceededException"); + } + name = "ProvisionedThroughputExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ProvisionedThroughputExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ProvisionedThroughputExceededException.prototype); + } +}; +var ResourceNotFoundException = class _ResourceNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ResourceNotFoundException"); + } + name = "ResourceNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + } +}; +var ReturnItemCollectionMetrics = { + NONE: "NONE", + SIZE: "SIZE" +}; +var ItemCollectionSizeLimitExceededException = class _ItemCollectionSizeLimitExceededException extends DynamoDBServiceException { + static { + __name(this, "ItemCollectionSizeLimitExceededException"); + } + name = "ItemCollectionSizeLimitExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ItemCollectionSizeLimitExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ItemCollectionSizeLimitExceededException.prototype); + } +}; +var ComparisonOperator = { + BEGINS_WITH: "BEGINS_WITH", + BETWEEN: "BETWEEN", + CONTAINS: "CONTAINS", + EQ: "EQ", + GE: "GE", + GT: "GT", + IN: "IN", + LE: "LE", + LT: "LT", + NE: "NE", + NOT_CONTAINS: "NOT_CONTAINS", + NOT_NULL: "NOT_NULL", + NULL: "NULL" +}; +var ConditionalOperator = { + AND: "AND", + OR: "OR" +}; +var ContinuousBackupsStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED" +}; +var PointInTimeRecoveryStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED" +}; +var ContinuousBackupsUnavailableException = class _ContinuousBackupsUnavailableException extends DynamoDBServiceException { + static { + __name(this, "ContinuousBackupsUnavailableException"); + } + name = "ContinuousBackupsUnavailableException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ContinuousBackupsUnavailableException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ContinuousBackupsUnavailableException.prototype); + } +}; +var ContributorInsightsAction = { + DISABLE: "DISABLE", + ENABLE: "ENABLE" +}; +var ContributorInsightsStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + FAILED: "FAILED" +}; +var LimitExceededException = class _LimitExceededException extends DynamoDBServiceException { + static { + __name(this, "LimitExceededException"); + } + name = "LimitExceededException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "LimitExceededException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _LimitExceededException.prototype); + } +}; +var TableInUseException = class _TableInUseException extends DynamoDBServiceException { + static { + __name(this, "TableInUseException"); + } + name = "TableInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableInUseException.prototype); + } +}; +var TableNotFoundException = class _TableNotFoundException extends DynamoDBServiceException { + static { + __name(this, "TableNotFoundException"); + } + name = "TableNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableNotFoundException.prototype); + } +}; +var GlobalTableStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING" +}; +var IndexStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING" +}; +var ReplicaStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + CREATION_FAILED: "CREATION_FAILED", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + REGION_DISABLED: "REGION_DISABLED", + UPDATING: "UPDATING" +}; +var TableClass = { + STANDARD: "STANDARD", + STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS" +}; +var TableStatus = { + ACTIVE: "ACTIVE", + ARCHIVED: "ARCHIVED", + ARCHIVING: "ARCHIVING", + CREATING: "CREATING", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + UPDATING: "UPDATING" +}; +var GlobalTableAlreadyExistsException = class _GlobalTableAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "GlobalTableAlreadyExistsException"); + } + name = "GlobalTableAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "GlobalTableAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _GlobalTableAlreadyExistsException.prototype); + } +}; +var MultiRegionConsistency = { + EVENTUAL: "EVENTUAL", + STRONG: "STRONG" +}; +var ResourceInUseException = class _ResourceInUseException extends DynamoDBServiceException { + static { + __name(this, "ResourceInUseException"); + } + name = "ResourceInUseException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceInUseException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceInUseException.prototype); + } +}; +var ReturnValue = { + ALL_NEW: "ALL_NEW", + ALL_OLD: "ALL_OLD", + NONE: "NONE", + UPDATED_NEW: "UPDATED_NEW", + UPDATED_OLD: "UPDATED_OLD" +}; +var ReplicatedWriteConflictException = class _ReplicatedWriteConflictException extends DynamoDBServiceException { + static { + __name(this, "ReplicatedWriteConflictException"); + } + name = "ReplicatedWriteConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicatedWriteConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicatedWriteConflictException.prototype); + } +}; +var TransactionConflictException = class _TransactionConflictException extends DynamoDBServiceException { + static { + __name(this, "TransactionConflictException"); + } + name = "TransactionConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionConflictException.prototype); + } +}; +var PolicyNotFoundException = class _PolicyNotFoundException extends DynamoDBServiceException { + static { + __name(this, "PolicyNotFoundException"); + } + name = "PolicyNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PolicyNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PolicyNotFoundException.prototype); + } +}; +var ExportFormat = { + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION" +}; +var ExportStatus = { + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS" +}; +var ExportType = { + FULL_EXPORT: "FULL_EXPORT", + INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT" +}; +var ExportViewType = { + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE" +}; +var S3SseAlgorithm = { + AES256: "AES256", + KMS: "KMS" +}; +var ExportNotFoundException = class _ExportNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ExportNotFoundException"); + } + name = "ExportNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExportNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExportNotFoundException.prototype); + } +}; +var GlobalTableNotFoundException = class _GlobalTableNotFoundException extends DynamoDBServiceException { + static { + __name(this, "GlobalTableNotFoundException"); + } + name = "GlobalTableNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "GlobalTableNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _GlobalTableNotFoundException.prototype); + } +}; +var ImportStatus = { + CANCELLED: "CANCELLED", + CANCELLING: "CANCELLING", + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS" +}; +var InputCompressionType = { + GZIP: "GZIP", + NONE: "NONE", + ZSTD: "ZSTD" +}; +var InputFormat = { + CSV: "CSV", + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION" +}; +var ImportNotFoundException = class _ImportNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ImportNotFoundException"); + } + name = "ImportNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ImportNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ImportNotFoundException.prototype); + } +}; +var DestinationStatus = { + ACTIVE: "ACTIVE", + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLE_FAILED: "ENABLE_FAILED", + ENABLING: "ENABLING", + UPDATING: "UPDATING" +}; +var DuplicateItemException = class _DuplicateItemException extends DynamoDBServiceException { + static { + __name(this, "DuplicateItemException"); + } + name = "DuplicateItemException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "DuplicateItemException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _DuplicateItemException.prototype); + } +}; +var IdempotentParameterMismatchException = class _IdempotentParameterMismatchException extends DynamoDBServiceException { + static { + __name(this, "IdempotentParameterMismatchException"); + } + name = "IdempotentParameterMismatchException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IdempotentParameterMismatchException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IdempotentParameterMismatchException.prototype); + this.Message = opts.Message; + } +}; +var TransactionInProgressException = class _TransactionInProgressException extends DynamoDBServiceException { + static { + __name(this, "TransactionInProgressException"); + } + name = "TransactionInProgressException"; + $fault = "client"; + Message; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionInProgressException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionInProgressException.prototype); + this.Message = opts.Message; + } +}; +var ExportConflictException = class _ExportConflictException extends DynamoDBServiceException { + static { + __name(this, "ExportConflictException"); + } + name = "ExportConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExportConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExportConflictException.prototype); + } +}; +var InvalidExportTimeException = class _InvalidExportTimeException extends DynamoDBServiceException { + static { + __name(this, "InvalidExportTimeException"); + } + name = "InvalidExportTimeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidExportTimeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidExportTimeException.prototype); + } +}; +var PointInTimeRecoveryUnavailableException = class _PointInTimeRecoveryUnavailableException extends DynamoDBServiceException { + static { + __name(this, "PointInTimeRecoveryUnavailableException"); + } + name = "PointInTimeRecoveryUnavailableException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PointInTimeRecoveryUnavailableException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PointInTimeRecoveryUnavailableException.prototype); + } +}; +var ImportConflictException = class _ImportConflictException extends DynamoDBServiceException { + static { + __name(this, "ImportConflictException"); + } + name = "ImportConflictException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ImportConflictException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ImportConflictException.prototype); + } +}; +var Select = { + ALL_ATTRIBUTES: "ALL_ATTRIBUTES", + ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES", + COUNT: "COUNT", + SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES" +}; +var TableAlreadyExistsException = class _TableAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "TableAlreadyExistsException"); + } + name = "TableAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TableAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TableAlreadyExistsException.prototype); + } +}; +var InvalidRestoreTimeException = class _InvalidRestoreTimeException extends DynamoDBServiceException { + static { + __name(this, "InvalidRestoreTimeException"); + } + name = "InvalidRestoreTimeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRestoreTimeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRestoreTimeException.prototype); + } +}; +var ReplicaAlreadyExistsException = class _ReplicaAlreadyExistsException extends DynamoDBServiceException { + static { + __name(this, "ReplicaAlreadyExistsException"); + } + name = "ReplicaAlreadyExistsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicaAlreadyExistsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicaAlreadyExistsException.prototype); + } +}; +var ReplicaNotFoundException = class _ReplicaNotFoundException extends DynamoDBServiceException { + static { + __name(this, "ReplicaNotFoundException"); + } + name = "ReplicaNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ReplicaNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ReplicaNotFoundException.prototype); + } +}; +var IndexNotFoundException = class _IndexNotFoundException extends DynamoDBServiceException { + static { + __name(this, "IndexNotFoundException"); + } + name = "IndexNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IndexNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IndexNotFoundException.prototype); + } +}; +var AttributeValue; +((AttributeValue2) => { + AttributeValue2.visit = /* @__PURE__ */ __name((value, visitor) => { + if (value.S !== void 0) return visitor.S(value.S); + if (value.N !== void 0) return visitor.N(value.N); + if (value.B !== void 0) return visitor.B(value.B); + if (value.SS !== void 0) return visitor.SS(value.SS); + if (value.NS !== void 0) return visitor.NS(value.NS); + if (value.BS !== void 0) return visitor.BS(value.BS); + if (value.M !== void 0) return visitor.M(value.M); + if (value.L !== void 0) return visitor.L(value.L); + if (value.NULL !== void 0) return visitor.NULL(value.NULL); + if (value.BOOL !== void 0) return visitor.BOOL(value.BOOL); + return visitor._(value.$unknown[0], value.$unknown[1]); + }, "visit"); +})(AttributeValue || (AttributeValue = {})); +var ConditionalCheckFailedException = class _ConditionalCheckFailedException extends DynamoDBServiceException { + static { + __name(this, "ConditionalCheckFailedException"); + } + name = "ConditionalCheckFailedException"; + $fault = "client"; + /** + *

Item which caused the ConditionalCheckFailedException.

+ * @public + */ + Item; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ConditionalCheckFailedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ConditionalCheckFailedException.prototype); + this.Item = opts.Item; + } +}; +var TransactionCanceledException = class _TransactionCanceledException extends DynamoDBServiceException { + static { + __name(this, "TransactionCanceledException"); + } + name = "TransactionCanceledException"; + $fault = "client"; + Message; + /** + *

A list of cancellation reasons.

+ * @public + */ + CancellationReasons; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TransactionCanceledException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TransactionCanceledException.prototype); + this.Message = opts.Message; + this.CancellationReasons = opts.CancellationReasons; + } +}; + +// src/protocols/Aws_json1_0.ts +var se_BatchExecuteStatementCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchExecuteStatement"); + let body; + body = JSON.stringify(se_BatchExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchExecuteStatementCommand"); +var se_BatchGetItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchGetItem"); + let body; + body = JSON.stringify(se_BatchGetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchGetItemCommand"); +var se_BatchWriteItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchWriteItem"); + let body; + body = JSON.stringify(se_BatchWriteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchWriteItemCommand"); +var se_CreateBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateBackupCommand"); +var se_CreateGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateGlobalTableCommand"); +var se_CreateTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateTableCommand"); +var se_DeleteBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteBackupCommand"); +var se_DeleteItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteItem"); + let body; + body = JSON.stringify(se_DeleteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteItemCommand"); +var se_DeleteResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteResourcePolicyCommand"); +var se_DeleteTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteTableCommand"); +var se_DescribeBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeBackupCommand"); +var se_DescribeContinuousBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeContinuousBackups"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeContinuousBackupsCommand"); +var se_DescribeContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeContributorInsightsCommand"); +var se_DescribeEndpointsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeEndpoints"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeEndpointsCommand"); +var se_DescribeExportCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeExport"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeExportCommand"); +var se_DescribeGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeGlobalTableCommand"); +var se_DescribeGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTableSettings"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeGlobalTableSettingsCommand"); +var se_DescribeImportCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeImport"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeImportCommand"); +var se_DescribeKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeKinesisStreamingDestinationCommand"); +var se_DescribeLimitsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeLimits"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeLimitsCommand"); +var se_DescribeTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTableCommand"); +var se_DescribeTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTableReplicaAutoScaling"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTableReplicaAutoScalingCommand"); +var se_DescribeTimeToLiveCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeTimeToLive"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeTimeToLiveCommand"); +var se_DisableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DisableKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DisableKinesisStreamingDestinationCommand"); +var se_EnableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("EnableKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_EnableKinesisStreamingDestinationCommand"); +var se_ExecuteStatementCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExecuteStatement"); + let body; + body = JSON.stringify(se_ExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExecuteStatementCommand"); +var se_ExecuteTransactionCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExecuteTransaction"); + let body; + body = JSON.stringify(se_ExecuteTransactionInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExecuteTransactionCommand"); +var se_ExportTableToPointInTimeCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ExportTableToPointInTime"); + let body; + body = JSON.stringify(se_ExportTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ExportTableToPointInTimeCommand"); +var se_GetItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetItem"); + let body; + body = JSON.stringify(se_GetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetItemCommand"); +var se_GetResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetResourcePolicyCommand"); +var se_ImportTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ImportTable"); + let body; + body = JSON.stringify(se_ImportTableInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ImportTableCommand"); +var se_ListBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListBackups"); + let body; + body = JSON.stringify(se_ListBackupsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListBackupsCommand"); +var se_ListContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListContributorInsightsCommand"); +var se_ListExportsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListExports"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListExportsCommand"); +var se_ListGlobalTablesCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListGlobalTables"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListGlobalTablesCommand"); +var se_ListImportsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListImports"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListImportsCommand"); +var se_ListTablesCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListTables"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListTablesCommand"); +var se_ListTagsOfResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListTagsOfResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListTagsOfResourceCommand"); +var se_PutItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("PutItem"); + let body; + body = JSON.stringify(se_PutItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_PutItemCommand"); +var se_PutResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("PutResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_PutResourcePolicyCommand"); +var se_QueryCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("Query"); + let body; + body = JSON.stringify(se_QueryInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_QueryCommand"); +var se_RestoreTableFromBackupCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RestoreTableFromBackup"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RestoreTableFromBackupCommand"); +var se_RestoreTableToPointInTimeCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RestoreTableToPointInTime"); + let body; + body = JSON.stringify(se_RestoreTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RestoreTableToPointInTimeCommand"); +var se_ScanCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("Scan"); + let body; + body = JSON.stringify(se_ScanInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ScanCommand"); +var se_TagResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TagResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TagResourceCommand"); +var se_TransactGetItemsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TransactGetItems"); + let body; + body = JSON.stringify(se_TransactGetItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TransactGetItemsCommand"); +var se_TransactWriteItemsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TransactWriteItems"); + let body; + body = JSON.stringify(se_TransactWriteItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TransactWriteItemsCommand"); +var se_UntagResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UntagResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UntagResourceCommand"); +var se_UpdateContinuousBackupsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateContinuousBackups"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateContinuousBackupsCommand"); +var se_UpdateContributorInsightsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateContributorInsights"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateContributorInsightsCommand"); +var se_UpdateGlobalTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateGlobalTableCommand"); +var se_UpdateGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTableSettings"); + let body; + body = JSON.stringify(se_UpdateGlobalTableSettingsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateGlobalTableSettingsCommand"); +var se_UpdateItemCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateItem"); + let body; + body = JSON.stringify(se_UpdateItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateItemCommand"); +var se_UpdateKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateKinesisStreamingDestination"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateKinesisStreamingDestinationCommand"); +var se_UpdateTableCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTable"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTableCommand"); +var se_UpdateTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTableReplicaAutoScaling"); + let body; + body = JSON.stringify(se_UpdateTableReplicaAutoScalingInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTableReplicaAutoScalingCommand"); +var se_UpdateTimeToLiveCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateTimeToLive"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateTimeToLiveCommand"); +var de_BatchExecuteStatementCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchExecuteStatementCommand"); +var de_BatchGetItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchGetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchGetItemCommand"); +var de_BatchWriteItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchWriteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchWriteItemCommand"); +var de_CreateBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateBackupCommand"); +var de_CreateGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateGlobalTableCommand"); +var de_CreateTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateTableCommand"); +var de_DeleteBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteBackupCommand"); +var de_DeleteItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteItemCommand"); +var de_DeleteResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteResourcePolicyCommand"); +var de_DeleteTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteTableCommand"); +var de_DescribeBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeBackupCommand"); +var de_DescribeContinuousBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeContinuousBackupsCommand"); +var de_DescribeContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeContributorInsightsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeContributorInsightsCommand"); +var de_DescribeEndpointsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeEndpointsCommand"); +var de_DescribeExportCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeExportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeExportCommand"); +var de_DescribeGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeGlobalTableCommand"); +var de_DescribeGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeGlobalTableSettingsCommand"); +var de_DescribeImportCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeImportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeImportCommand"); +var de_DescribeKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeKinesisStreamingDestinationCommand"); +var de_DescribeLimitsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeLimitsCommand"); +var de_DescribeTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTableCommand"); +var de_DescribeTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTableReplicaAutoScalingCommand"); +var de_DescribeTimeToLiveCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeTimeToLiveCommand"); +var de_DisableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DisableKinesisStreamingDestinationCommand"); +var de_EnableKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_EnableKinesisStreamingDestinationCommand"); +var de_ExecuteStatementCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExecuteStatementCommand"); +var de_ExecuteTransactionCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExecuteTransactionOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExecuteTransactionCommand"); +var de_ExportTableToPointInTimeCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ExportTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ExportTableToPointInTimeCommand"); +var de_GetItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_GetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetItemCommand"); +var de_GetResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetResourcePolicyCommand"); +var de_ImportTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ImportTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ImportTableCommand"); +var de_ListBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ListBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListBackupsCommand"); +var de_ListContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListContributorInsightsCommand"); +var de_ListExportsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListExportsCommand"); +var de_ListGlobalTablesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListGlobalTablesCommand"); +var de_ListImportsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ListImportsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListImportsCommand"); +var de_ListTablesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListTablesCommand"); +var de_ListTagsOfResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListTagsOfResourceCommand"); +var de_PutItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_PutItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_PutItemCommand"); +var de_PutResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_PutResourcePolicyCommand"); +var de_QueryCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_QueryOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_QueryCommand"); +var de_RestoreTableFromBackupCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_RestoreTableFromBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RestoreTableFromBackupCommand"); +var de_RestoreTableToPointInTimeCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_RestoreTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RestoreTableToPointInTimeCommand"); +var de_ScanCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ScanOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ScanCommand"); +var de_TagResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await (0, import_smithy_client.collectBody)(output.body, context); + const response = { + $metadata: deserializeMetadata(output) + }; + return response; +}, "de_TagResourceCommand"); +var de_TransactGetItemsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_TransactGetItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_TransactGetItemsCommand"); +var de_TransactWriteItemsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_TransactWriteItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_TransactWriteItemsCommand"); +var de_UntagResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await (0, import_smithy_client.collectBody)(output.body, context); + const response = { + $metadata: deserializeMetadata(output) + }; + return response; +}, "de_UntagResourceCommand"); +var de_UpdateContinuousBackupsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateContinuousBackupsCommand"); +var de_UpdateContributorInsightsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateContributorInsightsCommand"); +var de_UpdateGlobalTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateGlobalTableCommand"); +var de_UpdateGlobalTableSettingsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateGlobalTableSettingsCommand"); +var de_UpdateItemCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateItemCommand"); +var de_UpdateKinesisStreamingDestinationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateKinesisStreamingDestinationCommand"); +var de_UpdateTableCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTableCommand"); +var de_UpdateTableReplicaAutoScalingCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_UpdateTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTableReplicaAutoScalingCommand"); +var de_UpdateTimeToLiveCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateTimeToLiveCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerError": + case "com.amazonaws.dynamodb#InternalServerError": + throw await de_InternalServerErrorRes(parsedOutput, context); + case "RequestLimitExceeded": + case "com.amazonaws.dynamodb#RequestLimitExceeded": + throw await de_RequestLimitExceededRes(parsedOutput, context); + case "InvalidEndpointException": + case "com.amazonaws.dynamodb#InvalidEndpointException": + throw await de_InvalidEndpointExceptionRes(parsedOutput, context); + case "ProvisionedThroughputExceededException": + case "com.amazonaws.dynamodb#ProvisionedThroughputExceededException": + throw await de_ProvisionedThroughputExceededExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.dynamodb#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "ItemCollectionSizeLimitExceededException": + case "com.amazonaws.dynamodb#ItemCollectionSizeLimitExceededException": + throw await de_ItemCollectionSizeLimitExceededExceptionRes(parsedOutput, context); + case "BackupInUseException": + case "com.amazonaws.dynamodb#BackupInUseException": + throw await de_BackupInUseExceptionRes(parsedOutput, context); + case "ContinuousBackupsUnavailableException": + case "com.amazonaws.dynamodb#ContinuousBackupsUnavailableException": + throw await de_ContinuousBackupsUnavailableExceptionRes(parsedOutput, context); + case "LimitExceededException": + case "com.amazonaws.dynamodb#LimitExceededException": + throw await de_LimitExceededExceptionRes(parsedOutput, context); + case "TableInUseException": + case "com.amazonaws.dynamodb#TableInUseException": + throw await de_TableInUseExceptionRes(parsedOutput, context); + case "TableNotFoundException": + case "com.amazonaws.dynamodb#TableNotFoundException": + throw await de_TableNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableAlreadyExistsException": + case "com.amazonaws.dynamodb#GlobalTableAlreadyExistsException": + throw await de_GlobalTableAlreadyExistsExceptionRes(parsedOutput, context); + case "ResourceInUseException": + case "com.amazonaws.dynamodb#ResourceInUseException": + throw await de_ResourceInUseExceptionRes(parsedOutput, context); + case "BackupNotFoundException": + case "com.amazonaws.dynamodb#BackupNotFoundException": + throw await de_BackupNotFoundExceptionRes(parsedOutput, context); + case "ConditionalCheckFailedException": + case "com.amazonaws.dynamodb#ConditionalCheckFailedException": + throw await de_ConditionalCheckFailedExceptionRes(parsedOutput, context); + case "ReplicatedWriteConflictException": + case "com.amazonaws.dynamodb#ReplicatedWriteConflictException": + throw await de_ReplicatedWriteConflictExceptionRes(parsedOutput, context); + case "TransactionConflictException": + case "com.amazonaws.dynamodb#TransactionConflictException": + throw await de_TransactionConflictExceptionRes(parsedOutput, context); + case "PolicyNotFoundException": + case "com.amazonaws.dynamodb#PolicyNotFoundException": + throw await de_PolicyNotFoundExceptionRes(parsedOutput, context); + case "ExportNotFoundException": + case "com.amazonaws.dynamodb#ExportNotFoundException": + throw await de_ExportNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableNotFoundException": + case "com.amazonaws.dynamodb#GlobalTableNotFoundException": + throw await de_GlobalTableNotFoundExceptionRes(parsedOutput, context); + case "ImportNotFoundException": + case "com.amazonaws.dynamodb#ImportNotFoundException": + throw await de_ImportNotFoundExceptionRes(parsedOutput, context); + case "DuplicateItemException": + case "com.amazonaws.dynamodb#DuplicateItemException": + throw await de_DuplicateItemExceptionRes(parsedOutput, context); + case "IdempotentParameterMismatchException": + case "com.amazonaws.dynamodb#IdempotentParameterMismatchException": + throw await de_IdempotentParameterMismatchExceptionRes(parsedOutput, context); + case "TransactionCanceledException": + case "com.amazonaws.dynamodb#TransactionCanceledException": + throw await de_TransactionCanceledExceptionRes(parsedOutput, context); + case "TransactionInProgressException": + case "com.amazonaws.dynamodb#TransactionInProgressException": + throw await de_TransactionInProgressExceptionRes(parsedOutput, context); + case "ExportConflictException": + case "com.amazonaws.dynamodb#ExportConflictException": + throw await de_ExportConflictExceptionRes(parsedOutput, context); + case "InvalidExportTimeException": + case "com.amazonaws.dynamodb#InvalidExportTimeException": + throw await de_InvalidExportTimeExceptionRes(parsedOutput, context); + case "PointInTimeRecoveryUnavailableException": + case "com.amazonaws.dynamodb#PointInTimeRecoveryUnavailableException": + throw await de_PointInTimeRecoveryUnavailableExceptionRes(parsedOutput, context); + case "ImportConflictException": + case "com.amazonaws.dynamodb#ImportConflictException": + throw await de_ImportConflictExceptionRes(parsedOutput, context); + case "TableAlreadyExistsException": + case "com.amazonaws.dynamodb#TableAlreadyExistsException": + throw await de_TableAlreadyExistsExceptionRes(parsedOutput, context); + case "InvalidRestoreTimeException": + case "com.amazonaws.dynamodb#InvalidRestoreTimeException": + throw await de_InvalidRestoreTimeExceptionRes(parsedOutput, context); + case "ReplicaAlreadyExistsException": + case "com.amazonaws.dynamodb#ReplicaAlreadyExistsException": + throw await de_ReplicaAlreadyExistsExceptionRes(parsedOutput, context); + case "ReplicaNotFoundException": + case "com.amazonaws.dynamodb#ReplicaNotFoundException": + throw await de_ReplicaNotFoundExceptionRes(parsedOutput, context); + case "IndexNotFoundException": + case "com.amazonaws.dynamodb#IndexNotFoundException": + throw await de_IndexNotFoundExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var de_BackupInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new BackupInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_BackupInUseExceptionRes"); +var de_BackupNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new BackupNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_BackupNotFoundExceptionRes"); +var de_ConditionalCheckFailedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ConditionalCheckFailedException(body, context); + const exception = new ConditionalCheckFailedException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ConditionalCheckFailedExceptionRes"); +var de_ContinuousBackupsUnavailableExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ContinuousBackupsUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ContinuousBackupsUnavailableExceptionRes"); +var de_DuplicateItemExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new DuplicateItemException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_DuplicateItemExceptionRes"); +var de_ExportConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ExportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ExportConflictExceptionRes"); +var de_ExportNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ExportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ExportNotFoundExceptionRes"); +var de_GlobalTableAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new GlobalTableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_GlobalTableAlreadyExistsExceptionRes"); +var de_GlobalTableNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new GlobalTableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_GlobalTableNotFoundExceptionRes"); +var de_IdempotentParameterMismatchExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new IdempotentParameterMismatchException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_IdempotentParameterMismatchExceptionRes"); +var de_ImportConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ImportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ImportConflictExceptionRes"); +var de_ImportNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ImportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ImportNotFoundExceptionRes"); +var de_IndexNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new IndexNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_IndexNotFoundExceptionRes"); +var de_InternalServerErrorRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InternalServerError({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InternalServerErrorRes"); +var de_InvalidEndpointExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidEndpointException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidEndpointExceptionRes"); +var de_InvalidExportTimeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidExportTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidExportTimeExceptionRes"); +var de_InvalidRestoreTimeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidRestoreTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidRestoreTimeExceptionRes"); +var de_ItemCollectionSizeLimitExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ItemCollectionSizeLimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ItemCollectionSizeLimitExceededExceptionRes"); +var de_LimitExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new LimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_LimitExceededExceptionRes"); +var de_PointInTimeRecoveryUnavailableExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new PointInTimeRecoveryUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PointInTimeRecoveryUnavailableExceptionRes"); +var de_PolicyNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new PolicyNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PolicyNotFoundExceptionRes"); +var de_ProvisionedThroughputExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ProvisionedThroughputExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ProvisionedThroughputExceededExceptionRes"); +var de_ReplicaAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicaAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicaAlreadyExistsExceptionRes"); +var de_ReplicaNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicaNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicaNotFoundExceptionRes"); +var de_ReplicatedWriteConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ReplicatedWriteConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ReplicatedWriteConflictExceptionRes"); +var de_RequestLimitExceededRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new RequestLimitExceeded({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_RequestLimitExceededRes"); +var de_ResourceInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ResourceInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ResourceInUseExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ResourceNotFoundExceptionRes"); +var de_TableAlreadyExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableAlreadyExistsExceptionRes"); +var de_TableInUseExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableInUseExceptionRes"); +var de_TableNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TableNotFoundExceptionRes"); +var de_TransactionCanceledExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_TransactionCanceledException(body, context); + const exception = new TransactionCanceledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionCanceledExceptionRes"); +var de_TransactionConflictExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TransactionConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionConflictExceptionRes"); +var de_TransactionInProgressExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new TransactionInProgressException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_TransactionInProgressExceptionRes"); +var se_AttributeUpdates = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValueUpdate(value, context); + return acc; + }, {}); +}, "se_AttributeUpdates"); +var se_AttributeValue = /* @__PURE__ */ __name((input, context) => { + return AttributeValue.visit(input, { + B: /* @__PURE__ */ __name((value) => ({ B: context.base64Encoder(value) }), "B"), + BOOL: /* @__PURE__ */ __name((value) => ({ BOOL: value }), "BOOL"), + BS: /* @__PURE__ */ __name((value) => ({ BS: se_BinarySetAttributeValue(value, context) }), "BS"), + L: /* @__PURE__ */ __name((value) => ({ L: se_ListAttributeValue(value, context) }), "L"), + M: /* @__PURE__ */ __name((value) => ({ M: se_MapAttributeValue(value, context) }), "M"), + N: /* @__PURE__ */ __name((value) => ({ N: value }), "N"), + NS: /* @__PURE__ */ __name((value) => ({ NS: (0, import_smithy_client._json)(value) }), "NS"), + NULL: /* @__PURE__ */ __name((value) => ({ NULL: value }), "NULL"), + S: /* @__PURE__ */ __name((value) => ({ S: value }), "S"), + SS: /* @__PURE__ */ __name((value) => ({ SS: (0, import_smithy_client._json)(value) }), "SS"), + _: /* @__PURE__ */ __name((name, value) => ({ [name]: value }), "_") + }); +}, "se_AttributeValue"); +var se_AttributeValueList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_AttributeValueList"); +var se_AttributeValueUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Action: [], + Value: /* @__PURE__ */ __name((_) => se_AttributeValue(_, context), "Value") + }); +}, "se_AttributeValueUpdate"); +var se_AutoScalingPolicyUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + PolicyName: [], + TargetTrackingScalingPolicyConfiguration: /* @__PURE__ */ __name((_) => se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate(_, context), "TargetTrackingScalingPolicyConfiguration") + }); +}, "se_AutoScalingPolicyUpdate"); +var se_AutoScalingSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AutoScalingDisabled: [], + AutoScalingRoleArn: [], + MaximumUnits: [], + MinimumUnits: [], + ScalingPolicyUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingPolicyUpdate(_, context), "ScalingPolicyUpdate") + }); +}, "se_AutoScalingSettingsUpdate"); +var se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + DisableScaleIn: [], + ScaleInCooldown: [], + ScaleOutCooldown: [], + TargetValue: import_smithy_client.serializeFloat + }); +}, "se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate"); +var se_BatchExecuteStatementInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ReturnConsumedCapacity: [], + Statements: /* @__PURE__ */ __name((_) => se_PartiQLBatchRequest(_, context), "Statements") + }); +}, "se_BatchExecuteStatementInput"); +var se_BatchGetItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RequestItems: /* @__PURE__ */ __name((_) => se_BatchGetRequestMap(_, context), "RequestItems"), + ReturnConsumedCapacity: [] + }); +}, "se_BatchGetItemInput"); +var se_BatchGetRequestMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_KeysAndAttributes(value, context); + return acc; + }, {}); +}, "se_BatchGetRequestMap"); +var se_BatchStatementRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConsistentRead: [], + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_BatchStatementRequest"); +var se_BatchWriteItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RequestItems: /* @__PURE__ */ __name((_) => se_BatchWriteItemRequestMap(_, context), "RequestItems"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [] + }); +}, "se_BatchWriteItemInput"); +var se_BatchWriteItemRequestMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_WriteRequests(value, context); + return acc; + }, {}); +}, "se_BatchWriteItemRequestMap"); +var se_BinarySetAttributeValue = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return context.base64Encoder(entry); + }); +}, "se_BinarySetAttributeValue"); +var se_Condition = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeValueList: /* @__PURE__ */ __name((_) => se_AttributeValueList(_, context), "AttributeValueList"), + ComparisonOperator: [] + }); +}, "se_Condition"); +var se_ConditionCheck = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_ConditionCheck"); +var se_Delete = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_Delete"); +var se_DeleteItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_DeleteItemInput"); +var se_DeleteRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key") + }); +}, "se_DeleteRequest"); +var se_ExecuteStatementInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConsistentRead: [], + Limit: [], + NextToken: [], + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnConsumedCapacity: [], + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_ExecuteStatementInput"); +var se_ExecuteTransactionInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ReturnConsumedCapacity: [], + TransactStatements: /* @__PURE__ */ __name((_) => se_ParameterizedStatements(_, context), "TransactStatements") + }); +}, "se_ExecuteTransactionInput"); +var se_ExpectedAttributeMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_ExpectedAttributeValue(value, context); + return acc; + }, {}); +}, "se_ExpectedAttributeMap"); +var se_ExpectedAttributeValue = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeValueList: /* @__PURE__ */ __name((_) => se_AttributeValueList(_, context), "AttributeValueList"), + ComparisonOperator: [], + Exists: [], + Value: /* @__PURE__ */ __name((_) => se_AttributeValue(_, context), "Value") + }); +}, "se_ExpectedAttributeValue"); +var se_ExportTableToPointInTimeInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ExportFormat: [], + ExportTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportTime"), + ExportType: [], + IncrementalExportSpecification: /* @__PURE__ */ __name((_) => se_IncrementalExportSpecification(_, context), "IncrementalExportSpecification"), + S3Bucket: [], + S3BucketOwner: [], + S3Prefix: [], + S3SseAlgorithm: [], + S3SseKmsKeyId: [], + TableArn: [] + }); +}, "se_ExportTableToPointInTimeInput"); +var se_ExpressionAttributeValueMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_ExpressionAttributeValueMap"); +var se_FilterConditionMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}, "se_FilterConditionMap"); +var se_Get = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ExpressionAttributeNames: import_smithy_client._json, + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ProjectionExpression: [], + TableName: [] + }); +}, "se_Get"); +var se_GetItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: [], + ExpressionAttributeNames: import_smithy_client._json, + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ProjectionExpression: [], + ReturnConsumedCapacity: [], + TableName: [] + }); +}, "se_GetItemInput"); +var se_GlobalSecondaryIndexAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingUpdate") + }); +}, "se_GlobalSecondaryIndexAutoScalingUpdate"); +var se_GlobalSecondaryIndexAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_GlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}, "se_GlobalSecondaryIndexAutoScalingUpdateList"); +var se_GlobalTableGlobalSecondaryIndexSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingSettingsUpdate"), + ProvisionedWriteCapacityUnits: [] + }); +}, "se_GlobalTableGlobalSecondaryIndexSettingsUpdate"); +var se_GlobalTableGlobalSecondaryIndexSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_GlobalTableGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}, "se_GlobalTableGlobalSecondaryIndexSettingsUpdateList"); +var se_ImportTableInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + InputCompressionType: [], + InputFormat: [], + InputFormatOptions: import_smithy_client._json, + S3BucketSource: import_smithy_client._json, + TableCreationParameters: import_smithy_client._json + }); +}, "se_ImportTableInput"); +var se_IncrementalExportSpecification = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ExportFromTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportFromTime"), + ExportToTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "ExportToTime"), + ExportViewType: [] + }); +}, "se_IncrementalExportSpecification"); +var se_Key = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_Key"); +var se_KeyConditions = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}, "se_KeyConditions"); +var se_KeyList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_Key(entry, context); + }); +}, "se_KeyList"); +var se_KeysAndAttributes = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: [], + ExpressionAttributeNames: import_smithy_client._json, + Keys: /* @__PURE__ */ __name((_) => se_KeyList(_, context), "Keys"), + ProjectionExpression: [] + }); +}, "se_KeysAndAttributes"); +var se_ListAttributeValue = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_ListAttributeValue"); +var se_ListBackupsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + BackupType: [], + ExclusiveStartBackupArn: [], + Limit: [], + TableName: [], + TimeRangeLowerBound: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "TimeRangeLowerBound"), + TimeRangeUpperBound: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "TimeRangeUpperBound") + }); +}, "se_ListBackupsInput"); +var se_MapAttributeValue = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_MapAttributeValue"); +var se_ParameterizedStatement = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Parameters: /* @__PURE__ */ __name((_) => se_PreparedStatementParameters(_, context), "Parameters"), + ReturnValuesOnConditionCheckFailure: [], + Statement: [] + }); +}, "se_ParameterizedStatement"); +var se_ParameterizedStatements = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ParameterizedStatement(entry, context); + }); +}, "se_ParameterizedStatements"); +var se_PartiQLBatchRequest = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_BatchStatementRequest(entry, context); + }); +}, "se_PartiQLBatchRequest"); +var se_PreparedStatementParameters = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_AttributeValue(entry, context); + }); +}, "se_PreparedStatementParameters"); +var se_Put = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_Put"); +var se_PutItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [] + }); +}, "se_PutItemInput"); +var se_PutItemInputAttributeMap = /* @__PURE__ */ __name((input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}, "se_PutItemInputAttributeMap"); +var se_PutRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Item: /* @__PURE__ */ __name((_) => se_PutItemInputAttributeMap(_, context), "Item") + }); +}, "se_PutRequest"); +var se_QueryInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: /* @__PURE__ */ __name((_) => se_Key(_, context), "ExclusiveStartKey"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + FilterExpression: [], + IndexName: [], + KeyConditionExpression: [], + KeyConditions: /* @__PURE__ */ __name((_) => se_KeyConditions(_, context), "KeyConditions"), + Limit: [], + ProjectionExpression: [], + QueryFilter: /* @__PURE__ */ __name((_) => se_FilterConditionMap(_, context), "QueryFilter"), + ReturnConsumedCapacity: [], + ScanIndexForward: [], + Select: [], + TableName: [] + }); +}, "se_QueryInput"); +var se_ReplicaAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexUpdates: /* @__PURE__ */ __name((_) => se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList(_, context), "ReplicaGlobalSecondaryIndexUpdates"), + ReplicaProvisionedReadCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ReplicaProvisionedReadCapacityAutoScalingUpdate") + }); +}, "se_ReplicaAutoScalingUpdate"); +var se_ReplicaAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaAutoScalingUpdate(entry, context); + }); +}, "se_ReplicaAutoScalingUpdateList"); +var se_ReplicaGlobalSecondaryIndexAutoScalingUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedReadCapacityAutoScalingUpdate") + }); +}, "se_ReplicaGlobalSecondaryIndexAutoScalingUpdate"); +var se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaGlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}, "se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList"); +var se_ReplicaGlobalSecondaryIndexSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedReadCapacityAutoScalingSettingsUpdate"), + ProvisionedReadCapacityUnits: [] + }); +}, "se_ReplicaGlobalSecondaryIndexSettingsUpdate"); +var se_ReplicaGlobalSecondaryIndexSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}, "se_ReplicaGlobalSecondaryIndexSettingsUpdateList"); +var se_ReplicaSettingsUpdate = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexSettingsUpdate: /* @__PURE__ */ __name((_) => se_ReplicaGlobalSecondaryIndexSettingsUpdateList(_, context), "ReplicaGlobalSecondaryIndexSettingsUpdate"), + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate"), + ReplicaProvisionedReadCapacityUnits: [], + ReplicaTableClass: [] + }); +}, "se_ReplicaSettingsUpdate"); +var se_ReplicaSettingsUpdateList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_ReplicaSettingsUpdate(entry, context); + }); +}, "se_ReplicaSettingsUpdateList"); +var se_RestoreTableToPointInTimeInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + BillingModeOverride: [], + GlobalSecondaryIndexOverride: import_smithy_client._json, + LocalSecondaryIndexOverride: import_smithy_client._json, + OnDemandThroughputOverride: import_smithy_client._json, + ProvisionedThroughputOverride: import_smithy_client._json, + RestoreDateTime: /* @__PURE__ */ __name((_) => _.getTime() / 1e3, "RestoreDateTime"), + SSESpecificationOverride: import_smithy_client._json, + SourceTableArn: [], + SourceTableName: [], + TargetTableName: [], + UseLatestRestorableTime: [] + }); +}, "se_RestoreTableToPointInTimeInput"); +var se_ScanInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributesToGet: import_smithy_client._json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: /* @__PURE__ */ __name((_) => se_Key(_, context), "ExclusiveStartKey"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + FilterExpression: [], + IndexName: [], + Limit: [], + ProjectionExpression: [], + ReturnConsumedCapacity: [], + ScanFilter: /* @__PURE__ */ __name((_) => se_FilterConditionMap(_, context), "ScanFilter"), + Segment: [], + Select: [], + TableName: [], + TotalSegments: [] + }); +}, "se_ScanInput"); +var se_TransactGetItem = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + Get: /* @__PURE__ */ __name((_) => se_Get(_, context), "Get") + }); +}, "se_TransactGetItem"); +var se_TransactGetItemList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_TransactGetItem(entry, context); + }); +}, "se_TransactGetItemList"); +var se_TransactGetItemsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ReturnConsumedCapacity: [], + TransactItems: /* @__PURE__ */ __name((_) => se_TransactGetItemList(_, context), "TransactItems") + }); +}, "se_TransactGetItemsInput"); +var se_TransactWriteItem = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionCheck: /* @__PURE__ */ __name((_) => se_ConditionCheck(_, context), "ConditionCheck"), + Delete: /* @__PURE__ */ __name((_) => se_Delete(_, context), "Delete"), + Put: /* @__PURE__ */ __name((_) => se_Put(_, context), "Put"), + Update: /* @__PURE__ */ __name((_) => se_Update(_, context), "Update") + }); +}, "se_TransactWriteItem"); +var se_TransactWriteItemList = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_TransactWriteItem(entry, context); + }); +}, "se_TransactWriteItemList"); +var se_TransactWriteItemsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + TransactItems: /* @__PURE__ */ __name((_) => se_TransactWriteItemList(_, context), "TransactItems") + }); +}, "se_TransactWriteItemsInput"); +var se_Update = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ConditionExpression: [], + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [] + }); +}, "se_Update"); +var se_UpdateGlobalTableSettingsInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + GlobalTableBillingMode: [], + GlobalTableGlobalSecondaryIndexSettingsUpdate: /* @__PURE__ */ __name((_) => se_GlobalTableGlobalSecondaryIndexSettingsUpdateList(_, context), "GlobalTableGlobalSecondaryIndexSettingsUpdate"), + GlobalTableName: [], + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate"), + GlobalTableProvisionedWriteCapacityUnits: [], + ReplicaSettingsUpdate: /* @__PURE__ */ __name((_) => se_ReplicaSettingsUpdateList(_, context), "ReplicaSettingsUpdate") + }); +}, "se_UpdateGlobalTableSettingsInput"); +var se_UpdateItemInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AttributeUpdates: /* @__PURE__ */ __name((_) => se_AttributeUpdates(_, context), "AttributeUpdates"), + ConditionExpression: [], + ConditionalOperator: [], + Expected: /* @__PURE__ */ __name((_) => se_ExpectedAttributeMap(_, context), "Expected"), + ExpressionAttributeNames: import_smithy_client._json, + ExpressionAttributeValues: /* @__PURE__ */ __name((_) => se_ExpressionAttributeValueMap(_, context), "ExpressionAttributeValues"), + Key: /* @__PURE__ */ __name((_) => se_Key(_, context), "Key"), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [] + }); +}, "se_UpdateItemInput"); +var se_UpdateTableReplicaAutoScalingInput = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + GlobalSecondaryIndexUpdates: /* @__PURE__ */ __name((_) => se_GlobalSecondaryIndexAutoScalingUpdateList(_, context), "GlobalSecondaryIndexUpdates"), + ProvisionedWriteCapacityAutoScalingUpdate: /* @__PURE__ */ __name((_) => se_AutoScalingSettingsUpdate(_, context), "ProvisionedWriteCapacityAutoScalingUpdate"), + ReplicaUpdates: /* @__PURE__ */ __name((_) => se_ReplicaAutoScalingUpdateList(_, context), "ReplicaUpdates"), + TableName: [] + }); +}, "se_UpdateTableReplicaAutoScalingInput"); +var se_WriteRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + DeleteRequest: /* @__PURE__ */ __name((_) => se_DeleteRequest(_, context), "DeleteRequest"), + PutRequest: /* @__PURE__ */ __name((_) => se_PutRequest(_, context), "PutRequest") + }); +}, "se_WriteRequest"); +var se_WriteRequests = /* @__PURE__ */ __name((input, context) => { + return input.filter((e) => e != null).map((entry) => { + return se_WriteRequest(entry, context); + }); +}, "se_WriteRequests"); +var de_ArchivalSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ArchivalBackupArn: import_smithy_client.expectString, + ArchivalDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ArchivalDateTime"), + ArchivalReason: import_smithy_client.expectString + }); +}, "de_ArchivalSummary"); +var de_AttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_AttributeMap"); +var de_AttributeValue = /* @__PURE__ */ __name((output, context) => { + if (output.B != null) { + return { + B: context.base64Decoder(output.B) + }; + } + if ((0, import_smithy_client.expectBoolean)(output.BOOL) !== void 0) { + return { BOOL: (0, import_smithy_client.expectBoolean)(output.BOOL) }; + } + if (output.BS != null) { + return { + BS: de_BinarySetAttributeValue(output.BS, context) + }; + } + if (output.L != null) { + return { + L: de_ListAttributeValue(output.L, context) + }; + } + if (output.M != null) { + return { + M: de_MapAttributeValue(output.M, context) + }; + } + if ((0, import_smithy_client.expectString)(output.N) !== void 0) { + return { N: (0, import_smithy_client.expectString)(output.N) }; + } + if (output.NS != null) { + return { + NS: (0, import_smithy_client._json)(output.NS) + }; + } + if ((0, import_smithy_client.expectBoolean)(output.NULL) !== void 0) { + return { NULL: (0, import_smithy_client.expectBoolean)(output.NULL) }; + } + if ((0, import_smithy_client.expectString)(output.S) !== void 0) { + return { S: (0, import_smithy_client.expectString)(output.S) }; + } + if (output.SS != null) { + return { + SS: (0, import_smithy_client._json)(output.SS) + }; + } + return { $unknown: Object.entries(output)[0] }; +}, "de_AttributeValue"); +var de_AutoScalingPolicyDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + PolicyName: import_smithy_client.expectString, + TargetTrackingScalingPolicyConfiguration: /* @__PURE__ */ __name((_) => de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription(_, context), "TargetTrackingScalingPolicyConfiguration") + }); +}, "de_AutoScalingPolicyDescription"); +var de_AutoScalingPolicyDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AutoScalingPolicyDescription(entry, context); + }); + return retVal; +}, "de_AutoScalingPolicyDescriptionList"); +var de_AutoScalingSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + AutoScalingDisabled: import_smithy_client.expectBoolean, + AutoScalingRoleArn: import_smithy_client.expectString, + MaximumUnits: import_smithy_client.expectLong, + MinimumUnits: import_smithy_client.expectLong, + ScalingPolicies: /* @__PURE__ */ __name((_) => de_AutoScalingPolicyDescriptionList(_, context), "ScalingPolicies") + }); +}, "de_AutoScalingSettingsDescription"); +var de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + DisableScaleIn: import_smithy_client.expectBoolean, + ScaleInCooldown: import_smithy_client.expectInt32, + ScaleOutCooldown: import_smithy_client.expectInt32, + TargetValue: import_smithy_client.limitedParseDouble + }); +}, "de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription"); +var de_BackupDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDetails: /* @__PURE__ */ __name((_) => de_BackupDetails(_, context), "BackupDetails"), + SourceTableDetails: /* @__PURE__ */ __name((_) => de_SourceTableDetails(_, context), "SourceTableDetails"), + SourceTableFeatureDetails: /* @__PURE__ */ __name((_) => de_SourceTableFeatureDetails(_, context), "SourceTableFeatureDetails") + }); +}, "de_BackupDescription"); +var de_BackupDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupArn: import_smithy_client.expectString, + BackupCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupCreationDateTime"), + BackupExpiryDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupExpiryDateTime"), + BackupName: import_smithy_client.expectString, + BackupSizeBytes: import_smithy_client.expectLong, + BackupStatus: import_smithy_client.expectString, + BackupType: import_smithy_client.expectString + }); +}, "de_BackupDetails"); +var de_BackupSummaries = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_BackupSummary(entry, context); + }); + return retVal; +}, "de_BackupSummaries"); +var de_BackupSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupArn: import_smithy_client.expectString, + BackupCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupCreationDateTime"), + BackupExpiryDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "BackupExpiryDateTime"), + BackupName: import_smithy_client.expectString, + BackupSizeBytes: import_smithy_client.expectLong, + BackupStatus: import_smithy_client.expectString, + BackupType: import_smithy_client.expectString, + TableArn: import_smithy_client.expectString, + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString + }); +}, "de_BackupSummary"); +var de_BatchExecuteStatementOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_PartiQLBatchResponse(_, context), "Responses") + }); +}, "de_BatchExecuteStatementOutput"); +var de_BatchGetItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_BatchGetResponseMap(_, context), "Responses"), + UnprocessedKeys: /* @__PURE__ */ __name((_) => de_BatchGetRequestMap(_, context), "UnprocessedKeys") + }); +}, "de_BatchGetItemOutput"); +var de_BatchGetRequestMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_KeysAndAttributes(value, context); + return acc; + }, {}); +}, "de_BatchGetRequestMap"); +var de_BatchGetResponseMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce( + (acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemList(value, context); + return acc; + }, + {} + ); +}, "de_BatchGetResponseMap"); +var de_BatchStatementError = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Code: import_smithy_client.expectString, + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + Message: import_smithy_client.expectString + }); +}, "de_BatchStatementError"); +var de_BatchStatementResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Error: /* @__PURE__ */ __name((_) => de_BatchStatementError(_, context), "Error"), + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + TableName: import_smithy_client.expectString + }); +}, "de_BatchStatementResponse"); +var de_BatchWriteItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetricsPerTable(_, context), "ItemCollectionMetrics"), + UnprocessedItems: /* @__PURE__ */ __name((_) => de_BatchWriteItemRequestMap(_, context), "UnprocessedItems") + }); +}, "de_BatchWriteItemOutput"); +var de_BatchWriteItemRequestMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_WriteRequests(value, context); + return acc; + }, {}); +}, "de_BatchWriteItemRequestMap"); +var de_BillingModeSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BillingMode: import_smithy_client.expectString, + LastUpdateToPayPerRequestDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateToPayPerRequestDateTime") + }); +}, "de_BillingModeSummary"); +var de_BinarySetAttributeValue = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return context.base64Decoder(entry); + }); + return retVal; +}, "de_BinarySetAttributeValue"); +var de_CancellationReason = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Code: import_smithy_client.expectString, + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + Message: import_smithy_client.expectString + }); +}, "de_CancellationReason"); +var de_CancellationReasonList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_CancellationReason(entry, context); + }); + return retVal; +}, "de_CancellationReasonList"); +var de_Capacity = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CapacityUnits: import_smithy_client.limitedParseDouble, + ReadCapacityUnits: import_smithy_client.limitedParseDouble, + WriteCapacityUnits: import_smithy_client.limitedParseDouble + }); +}, "de_Capacity"); +var de_ConditionalCheckFailedException = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item"), + message: import_smithy_client.expectString + }); +}, "de_ConditionalCheckFailedException"); +var de_ConsumedCapacity = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CapacityUnits: import_smithy_client.limitedParseDouble, + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_SecondaryIndexesCapacityMap(_, context), "GlobalSecondaryIndexes"), + LocalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_SecondaryIndexesCapacityMap(_, context), "LocalSecondaryIndexes"), + ReadCapacityUnits: import_smithy_client.limitedParseDouble, + Table: /* @__PURE__ */ __name((_) => de_Capacity(_, context), "Table"), + TableName: import_smithy_client.expectString, + WriteCapacityUnits: import_smithy_client.limitedParseDouble + }); +}, "de_ConsumedCapacity"); +var de_ConsumedCapacityMultiple = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ConsumedCapacity(entry, context); + }); + return retVal; +}, "de_ConsumedCapacityMultiple"); +var de_ContinuousBackupsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsStatus: import_smithy_client.expectString, + PointInTimeRecoveryDescription: /* @__PURE__ */ __name((_) => de_PointInTimeRecoveryDescription(_, context), "PointInTimeRecoveryDescription") + }); +}, "de_ContinuousBackupsDescription"); +var de_CreateBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDetails: /* @__PURE__ */ __name((_) => de_BackupDetails(_, context), "BackupDetails") + }); +}, "de_CreateBackupOutput"); +var de_CreateGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_CreateGlobalTableOutput"); +var de_CreateTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_CreateTableOutput"); +var de_DeleteBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDescription: /* @__PURE__ */ __name((_) => de_BackupDescription(_, context), "BackupDescription") + }); +}, "de_DeleteBackupOutput"); +var de_DeleteItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_DeleteItemOutput"); +var de_DeleteRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Key: /* @__PURE__ */ __name((_) => de_Key(_, context), "Key") + }); +}, "de_DeleteRequest"); +var de_DeleteTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_DeleteTableOutput"); +var de_DescribeBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupDescription: /* @__PURE__ */ __name((_) => de_BackupDescription(_, context), "BackupDescription") + }); +}, "de_DescribeBackupOutput"); +var de_DescribeContinuousBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsDescription: /* @__PURE__ */ __name((_) => de_ContinuousBackupsDescription(_, context), "ContinuousBackupsDescription") + }); +}, "de_DescribeContinuousBackupsOutput"); +var de_DescribeContributorInsightsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContributorInsightsRuleList: import_smithy_client._json, + ContributorInsightsStatus: import_smithy_client.expectString, + FailureException: import_smithy_client._json, + IndexName: import_smithy_client.expectString, + LastUpdateDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateDateTime"), + TableName: import_smithy_client.expectString + }); +}, "de_DescribeContributorInsightsOutput"); +var de_DescribeExportOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportDescription: /* @__PURE__ */ __name((_) => de_ExportDescription(_, context), "ExportDescription") + }); +}, "de_DescribeExportOutput"); +var de_DescribeGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_DescribeGlobalTableOutput"); +var de_DescribeGlobalTableSettingsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableName: import_smithy_client.expectString, + ReplicaSettings: /* @__PURE__ */ __name((_) => de_ReplicaSettingsDescriptionList(_, context), "ReplicaSettings") + }); +}, "de_DescribeGlobalTableSettingsOutput"); +var de_DescribeImportOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportTableDescription: /* @__PURE__ */ __name((_) => de_ImportTableDescription(_, context), "ImportTableDescription") + }); +}, "de_DescribeImportOutput"); +var de_DescribeTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Table: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "Table") + }); +}, "de_DescribeTableOutput"); +var de_DescribeTableReplicaAutoScalingOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableAutoScalingDescription: /* @__PURE__ */ __name((_) => de_TableAutoScalingDescription(_, context), "TableAutoScalingDescription") + }); +}, "de_DescribeTableReplicaAutoScalingOutput"); +var de_ExecuteStatementOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + NextToken: import_smithy_client.expectString + }); +}, "de_ExecuteStatementOutput"); +var de_ExecuteTransactionOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_ItemResponseList(_, context), "Responses") + }); +}, "de_ExecuteTransactionOutput"); +var de_ExportDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BilledSizeBytes: import_smithy_client.expectLong, + ClientToken: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ExportArn: import_smithy_client.expectString, + ExportFormat: import_smithy_client.expectString, + ExportManifest: import_smithy_client.expectString, + ExportStatus: import_smithy_client.expectString, + ExportTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportTime"), + ExportType: import_smithy_client.expectString, + FailureCode: import_smithy_client.expectString, + FailureMessage: import_smithy_client.expectString, + IncrementalExportSpecification: /* @__PURE__ */ __name((_) => de_IncrementalExportSpecification(_, context), "IncrementalExportSpecification"), + ItemCount: import_smithy_client.expectLong, + S3Bucket: import_smithy_client.expectString, + S3BucketOwner: import_smithy_client.expectString, + S3Prefix: import_smithy_client.expectString, + S3SseAlgorithm: import_smithy_client.expectString, + S3SseKmsKeyId: import_smithy_client.expectString, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString, + TableId: import_smithy_client.expectString + }); +}, "de_ExportDescription"); +var de_ExportTableToPointInTimeOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportDescription: /* @__PURE__ */ __name((_) => de_ExportDescription(_, context), "ExportDescription") + }); +}, "de_ExportTableToPointInTimeOutput"); +var de_GetItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item") + }); +}, "de_GetItemOutput"); +var de_GlobalSecondaryIndexDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Backfilling: import_smithy_client.expectBoolean, + IndexArn: import_smithy_client.expectString, + IndexName: import_smithy_client.expectString, + IndexSizeBytes: import_smithy_client.expectLong, + IndexStatus: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + OnDemandThroughput: import_smithy_client._json, + Projection: import_smithy_client._json, + ProvisionedThroughput: /* @__PURE__ */ __name((_) => de_ProvisionedThroughputDescription(_, context), "ProvisionedThroughput"), + WarmThroughput: import_smithy_client._json + }); +}, "de_GlobalSecondaryIndexDescription"); +var de_GlobalSecondaryIndexDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_GlobalSecondaryIndexDescription(entry, context); + }); + return retVal; +}, "de_GlobalSecondaryIndexDescriptionList"); +var de_GlobalTableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "CreationDateTime"), + GlobalTableArn: import_smithy_client.expectString, + GlobalTableName: import_smithy_client.expectString, + GlobalTableStatus: import_smithy_client.expectString, + ReplicationGroup: /* @__PURE__ */ __name((_) => de_ReplicaDescriptionList(_, context), "ReplicationGroup") + }); +}, "de_GlobalTableDescription"); +var de_ImportSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CloudWatchLogGroupArn: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ImportArn: import_smithy_client.expectString, + ImportStatus: import_smithy_client.expectString, + InputFormat: import_smithy_client.expectString, + S3BucketSource: import_smithy_client._json, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString + }); +}, "de_ImportSummary"); +var de_ImportSummaryList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ImportSummary(entry, context); + }); + return retVal; +}, "de_ImportSummaryList"); +var de_ImportTableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ClientToken: import_smithy_client.expectString, + CloudWatchLogGroupArn: import_smithy_client.expectString, + EndTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EndTime"), + ErrorCount: import_smithy_client.expectLong, + FailureCode: import_smithy_client.expectString, + FailureMessage: import_smithy_client.expectString, + ImportArn: import_smithy_client.expectString, + ImportStatus: import_smithy_client.expectString, + ImportedItemCount: import_smithy_client.expectLong, + InputCompressionType: import_smithy_client.expectString, + InputFormat: import_smithy_client.expectString, + InputFormatOptions: import_smithy_client._json, + ProcessedItemCount: import_smithy_client.expectLong, + ProcessedSizeBytes: import_smithy_client.expectLong, + S3BucketSource: import_smithy_client._json, + StartTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "StartTime"), + TableArn: import_smithy_client.expectString, + TableCreationParameters: import_smithy_client._json, + TableId: import_smithy_client.expectString + }); +}, "de_ImportTableDescription"); +var de_ImportTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportTableDescription: /* @__PURE__ */ __name((_) => de_ImportTableDescription(_, context), "ImportTableDescription") + }); +}, "de_ImportTableOutput"); +var de_IncrementalExportSpecification = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ExportFromTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportFromTime"), + ExportToTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ExportToTime"), + ExportViewType: import_smithy_client.expectString + }); +}, "de_IncrementalExportSpecification"); +var de_ItemCollectionKeyAttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_ItemCollectionKeyAttributeMap"); +var de_ItemCollectionMetrics = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ItemCollectionKey: /* @__PURE__ */ __name((_) => de_ItemCollectionKeyAttributeMap(_, context), "ItemCollectionKey"), + SizeEstimateRangeGB: /* @__PURE__ */ __name((_) => de_ItemCollectionSizeEstimateRange(_, context), "SizeEstimateRangeGB") + }); +}, "de_ItemCollectionMetrics"); +var de_ItemCollectionMetricsMultiple = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ItemCollectionMetrics(entry, context); + }); + return retVal; +}, "de_ItemCollectionMetricsMultiple"); +var de_ItemCollectionMetricsPerTable = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemCollectionMetricsMultiple(value, context); + return acc; + }, {}); +}, "de_ItemCollectionMetricsPerTable"); +var de_ItemCollectionSizeEstimateRange = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return (0, import_smithy_client.limitedParseDouble)(entry); + }); + return retVal; +}, "de_ItemCollectionSizeEstimateRange"); +var de_ItemList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AttributeMap(entry, context); + }); + return retVal; +}, "de_ItemList"); +var de_ItemResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Item") + }); +}, "de_ItemResponse"); +var de_ItemResponseList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ItemResponse(entry, context); + }); + return retVal; +}, "de_ItemResponseList"); +var de_Key = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_Key"); +var de_KeyList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_Key(entry, context); + }); + return retVal; +}, "de_KeyList"); +var de_KeysAndAttributes = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + AttributesToGet: import_smithy_client._json, + ConsistentRead: import_smithy_client.expectBoolean, + ExpressionAttributeNames: import_smithy_client._json, + Keys: /* @__PURE__ */ __name((_) => de_KeyList(_, context), "Keys"), + ProjectionExpression: import_smithy_client.expectString + }); +}, "de_KeysAndAttributes"); +var de_ListAttributeValue = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_AttributeValue((0, import_core.awsExpectUnion)(entry), context); + }); + return retVal; +}, "de_ListAttributeValue"); +var de_ListBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BackupSummaries: /* @__PURE__ */ __name((_) => de_BackupSummaries(_, context), "BackupSummaries"), + LastEvaluatedBackupArn: import_smithy_client.expectString + }); +}, "de_ListBackupsOutput"); +var de_ListImportsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ImportSummaryList: /* @__PURE__ */ __name((_) => de_ImportSummaryList(_, context), "ImportSummaryList"), + NextToken: import_smithy_client.expectString + }); +}, "de_ListImportsOutput"); +var de_MapAttributeValue = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_MapAttributeValue"); +var de_PartiQLBatchResponse = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_BatchStatementResponse(entry, context); + }); + return retVal; +}, "de_PartiQLBatchResponse"); +var de_PointInTimeRecoveryDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + EarliestRestorableDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "EarliestRestorableDateTime"), + LatestRestorableDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LatestRestorableDateTime"), + PointInTimeRecoveryStatus: import_smithy_client.expectString, + RecoveryPeriodInDays: import_smithy_client.expectInt32 + }); +}, "de_PointInTimeRecoveryDescription"); +var de_ProvisionedThroughputDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + LastDecreaseDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastDecreaseDateTime"), + LastIncreaseDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastIncreaseDateTime"), + NumberOfDecreasesToday: import_smithy_client.expectLong, + ReadCapacityUnits: import_smithy_client.expectLong, + WriteCapacityUnits: import_smithy_client.expectLong + }); +}, "de_ProvisionedThroughputDescription"); +var de_PutItemInputAttributeMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue((0, import_core.awsExpectUnion)(value), context); + return acc; + }, {}); +}, "de_PutItemInputAttributeMap"); +var de_PutItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_PutItemOutput"); +var de_PutRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Item: /* @__PURE__ */ __name((_) => de_PutItemInputAttributeMap(_, context), "Item") + }); +}, "de_PutRequest"); +var de_QueryOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Count: import_smithy_client.expectInt32, + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + ScannedCount: import_smithy_client.expectInt32 + }); +}, "de_QueryOutput"); +var de_ReplicaAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList(_, context), "GlobalSecondaryIndexes"), + RegionName: import_smithy_client.expectString, + ReplicaProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettings"), + ReplicaProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedWriteCapacityAutoScalingSettings"), + ReplicaStatus: import_smithy_client.expectString + }); +}, "de_ReplicaAutoScalingDescription"); +var de_ReplicaAutoScalingDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaAutoScalingDescription(entry, context); + }); + return retVal; +}, "de_ReplicaAutoScalingDescriptionList"); +var de_ReplicaDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: import_smithy_client._json, + KMSMasterKeyId: import_smithy_client.expectString, + OnDemandThroughputOverride: import_smithy_client._json, + ProvisionedThroughputOverride: import_smithy_client._json, + RegionName: import_smithy_client.expectString, + ReplicaInaccessibleDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "ReplicaInaccessibleDateTime"), + ReplicaStatus: import_smithy_client.expectString, + ReplicaStatusDescription: import_smithy_client.expectString, + ReplicaStatusPercentProgress: import_smithy_client.expectString, + ReplicaTableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "ReplicaTableClassSummary"), + WarmThroughput: import_smithy_client._json + }); +}, "de_ReplicaDescription"); +var de_ReplicaDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaDescription(entry, context); + }); + return retVal; +}, "de_ReplicaDescriptionList"); +var de_ReplicaGlobalSecondaryIndexAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + IndexName: import_smithy_client.expectString, + IndexStatus: import_smithy_client.expectString, + ProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedReadCapacityAutoScalingSettings"), + ProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedWriteCapacityAutoScalingSettings") + }); +}, "de_ReplicaGlobalSecondaryIndexAutoScalingDescription"); +var de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaGlobalSecondaryIndexAutoScalingDescription(entry, context); + }); + return retVal; +}, "de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList"); +var de_ReplicaGlobalSecondaryIndexSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + IndexName: import_smithy_client.expectString, + IndexStatus: import_smithy_client.expectString, + ProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedReadCapacityAutoScalingSettings"), + ProvisionedReadCapacityUnits: import_smithy_client.expectLong, + ProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ProvisionedWriteCapacityAutoScalingSettings"), + ProvisionedWriteCapacityUnits: import_smithy_client.expectLong + }); +}, "de_ReplicaGlobalSecondaryIndexSettingsDescription"); +var de_ReplicaGlobalSecondaryIndexSettingsDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaGlobalSecondaryIndexSettingsDescription(entry, context); + }); + return retVal; +}, "de_ReplicaGlobalSecondaryIndexSettingsDescriptionList"); +var de_ReplicaSettingsDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + RegionName: import_smithy_client.expectString, + ReplicaBillingModeSummary: /* @__PURE__ */ __name((_) => de_BillingModeSummary(_, context), "ReplicaBillingModeSummary"), + ReplicaGlobalSecondaryIndexSettings: /* @__PURE__ */ __name((_) => de_ReplicaGlobalSecondaryIndexSettingsDescriptionList(_, context), "ReplicaGlobalSecondaryIndexSettings"), + ReplicaProvisionedReadCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedReadCapacityAutoScalingSettings"), + ReplicaProvisionedReadCapacityUnits: import_smithy_client.expectLong, + ReplicaProvisionedWriteCapacityAutoScalingSettings: /* @__PURE__ */ __name((_) => de_AutoScalingSettingsDescription(_, context), "ReplicaProvisionedWriteCapacityAutoScalingSettings"), + ReplicaProvisionedWriteCapacityUnits: import_smithy_client.expectLong, + ReplicaStatus: import_smithy_client.expectString, + ReplicaTableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "ReplicaTableClassSummary") + }); +}, "de_ReplicaSettingsDescription"); +var de_ReplicaSettingsDescriptionList = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicaSettingsDescription(entry, context); + }); + return retVal; +}, "de_ReplicaSettingsDescriptionList"); +var de_RestoreSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + RestoreDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "RestoreDateTime"), + RestoreInProgress: import_smithy_client.expectBoolean, + SourceBackupArn: import_smithy_client.expectString, + SourceTableArn: import_smithy_client.expectString + }); +}, "de_RestoreSummary"); +var de_RestoreTableFromBackupOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_RestoreTableFromBackupOutput"); +var de_RestoreTableToPointInTimeOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_RestoreTableToPointInTimeOutput"); +var de_ScanOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + Count: import_smithy_client.expectInt32, + Items: /* @__PURE__ */ __name((_) => de_ItemList(_, context), "Items"), + LastEvaluatedKey: /* @__PURE__ */ __name((_) => de_Key(_, context), "LastEvaluatedKey"), + ScannedCount: import_smithy_client.expectInt32 + }); +}, "de_ScanOutput"); +var de_SecondaryIndexesCapacityMap = /* @__PURE__ */ __name((output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_Capacity(value, context); + return acc; + }, {}); +}, "de_SecondaryIndexesCapacityMap"); +var de_SourceTableDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + BillingMode: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + OnDemandThroughput: import_smithy_client._json, + ProvisionedThroughput: import_smithy_client._json, + TableArn: import_smithy_client.expectString, + TableCreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "TableCreationDateTime"), + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString, + TableSizeBytes: import_smithy_client.expectLong + }); +}, "de_SourceTableDetails"); +var de_SourceTableFeatureDetails = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalSecondaryIndexes: import_smithy_client._json, + LocalSecondaryIndexes: import_smithy_client._json, + SSEDescription: /* @__PURE__ */ __name((_) => de_SSEDescription(_, context), "SSEDescription"), + StreamDescription: import_smithy_client._json, + TimeToLiveDescription: import_smithy_client._json + }); +}, "de_SourceTableFeatureDetails"); +var de_SSEDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + InaccessibleEncryptionDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "InaccessibleEncryptionDateTime"), + KMSMasterKeyArn: import_smithy_client.expectString, + SSEType: import_smithy_client.expectString, + Status: import_smithy_client.expectString + }); +}, "de_SSEDescription"); +var de_TableAutoScalingDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Replicas: /* @__PURE__ */ __name((_) => de_ReplicaAutoScalingDescriptionList(_, context), "Replicas"), + TableName: import_smithy_client.expectString, + TableStatus: import_smithy_client.expectString + }); +}, "de_TableAutoScalingDescription"); +var de_TableClassSummary = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + LastUpdateDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "LastUpdateDateTime"), + TableClass: import_smithy_client.expectString + }); +}, "de_TableClassSummary"); +var de_TableDescription = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ArchivalSummary: /* @__PURE__ */ __name((_) => de_ArchivalSummary(_, context), "ArchivalSummary"), + AttributeDefinitions: import_smithy_client._json, + BillingModeSummary: /* @__PURE__ */ __name((_) => de_BillingModeSummary(_, context), "BillingModeSummary"), + CreationDateTime: /* @__PURE__ */ __name((_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), "CreationDateTime"), + DeletionProtectionEnabled: import_smithy_client.expectBoolean, + GlobalSecondaryIndexes: /* @__PURE__ */ __name((_) => de_GlobalSecondaryIndexDescriptionList(_, context), "GlobalSecondaryIndexes"), + GlobalTableVersion: import_smithy_client.expectString, + ItemCount: import_smithy_client.expectLong, + KeySchema: import_smithy_client._json, + LatestStreamArn: import_smithy_client.expectString, + LatestStreamLabel: import_smithy_client.expectString, + LocalSecondaryIndexes: import_smithy_client._json, + MultiRegionConsistency: import_smithy_client.expectString, + OnDemandThroughput: import_smithy_client._json, + ProvisionedThroughput: /* @__PURE__ */ __name((_) => de_ProvisionedThroughputDescription(_, context), "ProvisionedThroughput"), + Replicas: /* @__PURE__ */ __name((_) => de_ReplicaDescriptionList(_, context), "Replicas"), + RestoreSummary: /* @__PURE__ */ __name((_) => de_RestoreSummary(_, context), "RestoreSummary"), + SSEDescription: /* @__PURE__ */ __name((_) => de_SSEDescription(_, context), "SSEDescription"), + StreamSpecification: import_smithy_client._json, + TableArn: import_smithy_client.expectString, + TableClassSummary: /* @__PURE__ */ __name((_) => de_TableClassSummary(_, context), "TableClassSummary"), + TableId: import_smithy_client.expectString, + TableName: import_smithy_client.expectString, + TableSizeBytes: import_smithy_client.expectLong, + TableStatus: import_smithy_client.expectString, + WarmThroughput: import_smithy_client._json + }); +}, "de_TableDescription"); +var de_TransactGetItemsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + Responses: /* @__PURE__ */ __name((_) => de_ItemResponseList(_, context), "Responses") + }); +}, "de_TransactGetItemsOutput"); +var de_TransactionCanceledException = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CancellationReasons: /* @__PURE__ */ __name((_) => de_CancellationReasonList(_, context), "CancellationReasons"), + Message: import_smithy_client.expectString + }); +}, "de_TransactionCanceledException"); +var de_TransactWriteItemsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacityMultiple(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetricsPerTable(_, context), "ItemCollectionMetrics") + }); +}, "de_TransactWriteItemsOutput"); +var de_UpdateContinuousBackupsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ContinuousBackupsDescription: /* @__PURE__ */ __name((_) => de_ContinuousBackupsDescription(_, context), "ContinuousBackupsDescription") + }); +}, "de_UpdateContinuousBackupsOutput"); +var de_UpdateGlobalTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableDescription: /* @__PURE__ */ __name((_) => de_GlobalTableDescription(_, context), "GlobalTableDescription") + }); +}, "de_UpdateGlobalTableOutput"); +var de_UpdateGlobalTableSettingsOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + GlobalTableName: import_smithy_client.expectString, + ReplicaSettings: /* @__PURE__ */ __name((_) => de_ReplicaSettingsDescriptionList(_, context), "ReplicaSettings") + }); +}, "de_UpdateGlobalTableSettingsOutput"); +var de_UpdateItemOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Attributes: /* @__PURE__ */ __name((_) => de_AttributeMap(_, context), "Attributes"), + ConsumedCapacity: /* @__PURE__ */ __name((_) => de_ConsumedCapacity(_, context), "ConsumedCapacity"), + ItemCollectionMetrics: /* @__PURE__ */ __name((_) => de_ItemCollectionMetrics(_, context), "ItemCollectionMetrics") + }); +}, "de_UpdateItemOutput"); +var de_UpdateTableOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableDescription: /* @__PURE__ */ __name((_) => de_TableDescription(_, context), "TableDescription") + }); +}, "de_UpdateTableOutput"); +var de_UpdateTableReplicaAutoScalingOutput = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + TableAutoScalingDescription: /* @__PURE__ */ __name((_) => de_TableAutoScalingDescription(_, context), "TableAutoScalingDescription") + }); +}, "de_UpdateTableReplicaAutoScalingOutput"); +var de_WriteRequest = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + DeleteRequest: /* @__PURE__ */ __name((_) => de_DeleteRequest(_, context), "DeleteRequest"), + PutRequest: /* @__PURE__ */ __name((_) => de_PutRequest(_, context), "PutRequest") + }); +}, "de_WriteRequest"); +var de_WriteRequests = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_WriteRequest(entry, context); + }); + return retVal; +}, "de_WriteRequests"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(DynamoDBServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +function sharedHeaders(operation) { + return { + "content-type": "application/x-amz-json-1.0", + "x-amz-target": `DynamoDB_20120810.${operation}` + }; +} +__name(sharedHeaders, "sharedHeaders"); + +// src/commands/DescribeEndpointsCommand.ts +var DescribeEndpointsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeEndpoints", {}).n("DynamoDBClient", "DescribeEndpointsCommand").f(void 0, void 0).ser(se_DescribeEndpointsCommand).de(de_DescribeEndpointsCommand).build() { + static { + __name(this, "DescribeEndpointsCommand"); + } +}; + +// src/DynamoDBClient.ts +var import_runtimeConfig = require("././runtimeConfig"); + +// src/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); + + + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/DynamoDBClient.ts +var DynamoDBClient = class extends import_smithy_client.Client { + static { + __name(this, "DynamoDBClient"); + } + /** + * The resolved configuration of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_account_id_endpoint.resolveAccountIdEndpointModeConfig)(_config_1); + const _config_3 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_2); + const _config_4 = (0, import_middleware_retry.resolveRetryConfig)(_config_3); + const _config_5 = (0, import_config_resolver.resolveRegionConfig)(_config_4); + const _config_6 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_5); + const _config_7 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_6); + const _config_8 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_7); + const _config_9 = (0, import_middleware_endpoint_discovery.resolveEndpointDiscoveryConfig)(_config_8, { + endpointDiscoveryCommandCtor: DescribeEndpointsCommand + }); + const _config_10 = resolveRuntimeExtensions(_config_9, configuration?.extensions || []); + this.config = _config_10; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core2.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultDynamoDBHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core2.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core2.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/DynamoDB.ts + + +// src/commands/BatchExecuteStatementCommand.ts + + + +var BatchExecuteStatementCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchExecuteStatement", {}).n("DynamoDBClient", "BatchExecuteStatementCommand").f(void 0, void 0).ser(se_BatchExecuteStatementCommand).de(de_BatchExecuteStatementCommand).build() { + static { + __name(this, "BatchExecuteStatementCommand"); + } +}; + +// src/commands/BatchGetItemCommand.ts + + + +var BatchGetItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => Object.keys(input?.RequestItems ?? {}), "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchGetItem", {}).n("DynamoDBClient", "BatchGetItemCommand").f(void 0, void 0).ser(se_BatchGetItemCommand).de(de_BatchGetItemCommand).build() { + static { + __name(this, "BatchGetItemCommand"); + } +}; + +// src/commands/BatchWriteItemCommand.ts + + + +var BatchWriteItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => Object.keys(input?.RequestItems ?? {}), "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "BatchWriteItem", {}).n("DynamoDBClient", "BatchWriteItemCommand").f(void 0, void 0).ser(se_BatchWriteItemCommand).de(de_BatchWriteItemCommand).build() { + static { + __name(this, "BatchWriteItemCommand"); + } +}; + +// src/commands/CreateBackupCommand.ts + + + +var CreateBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateBackup", {}).n("DynamoDBClient", "CreateBackupCommand").f(void 0, void 0).ser(se_CreateBackupCommand).de(de_CreateBackupCommand).build() { + static { + __name(this, "CreateBackupCommand"); + } +}; + +// src/commands/CreateGlobalTableCommand.ts + + + +var CreateGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateGlobalTable", {}).n("DynamoDBClient", "CreateGlobalTableCommand").f(void 0, void 0).ser(se_CreateGlobalTableCommand).de(de_CreateGlobalTableCommand).build() { + static { + __name(this, "CreateGlobalTableCommand"); + } +}; + +// src/commands/CreateTableCommand.ts + + + +var CreateTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "CreateTable", {}).n("DynamoDBClient", "CreateTableCommand").f(void 0, void 0).ser(se_CreateTableCommand).de(de_CreateTableCommand).build() { + static { + __name(this, "CreateTableCommand"); + } +}; + +// src/commands/DeleteBackupCommand.ts + + + +var DeleteBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteBackup", {}).n("DynamoDBClient", "DeleteBackupCommand").f(void 0, void 0).ser(se_DeleteBackupCommand).de(de_DeleteBackupCommand).build() { + static { + __name(this, "DeleteBackupCommand"); + } +}; + +// src/commands/DeleteItemCommand.ts + + + +var DeleteItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteItem", {}).n("DynamoDBClient", "DeleteItemCommand").f(void 0, void 0).ser(se_DeleteItemCommand).de(de_DeleteItemCommand).build() { + static { + __name(this, "DeleteItemCommand"); + } +}; + +// src/commands/DeleteResourcePolicyCommand.ts + + + +var DeleteResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteResourcePolicy", {}).n("DynamoDBClient", "DeleteResourcePolicyCommand").f(void 0, void 0).ser(se_DeleteResourcePolicyCommand).de(de_DeleteResourcePolicyCommand).build() { + static { + __name(this, "DeleteResourcePolicyCommand"); + } +}; + +// src/commands/DeleteTableCommand.ts + + + +var DeleteTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DeleteTable", {}).n("DynamoDBClient", "DeleteTableCommand").f(void 0, void 0).ser(se_DeleteTableCommand).de(de_DeleteTableCommand).build() { + static { + __name(this, "DeleteTableCommand"); + } +}; + +// src/commands/DescribeBackupCommand.ts + + + +var DescribeBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeBackup", {}).n("DynamoDBClient", "DescribeBackupCommand").f(void 0, void 0).ser(se_DescribeBackupCommand).de(de_DescribeBackupCommand).build() { + static { + __name(this, "DescribeBackupCommand"); + } +}; + +// src/commands/DescribeContinuousBackupsCommand.ts + + + +var DescribeContinuousBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeContinuousBackups", {}).n("DynamoDBClient", "DescribeContinuousBackupsCommand").f(void 0, void 0).ser(se_DescribeContinuousBackupsCommand).de(de_DescribeContinuousBackupsCommand).build() { + static { + __name(this, "DescribeContinuousBackupsCommand"); + } +}; + +// src/commands/DescribeContributorInsightsCommand.ts + + + +var DescribeContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeContributorInsights", {}).n("DynamoDBClient", "DescribeContributorInsightsCommand").f(void 0, void 0).ser(se_DescribeContributorInsightsCommand).de(de_DescribeContributorInsightsCommand).build() { + static { + __name(this, "DescribeContributorInsightsCommand"); + } +}; + +// src/commands/DescribeExportCommand.ts + + + +var DescribeExportCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ExportArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeExport", {}).n("DynamoDBClient", "DescribeExportCommand").f(void 0, void 0).ser(se_DescribeExportCommand).de(de_DescribeExportCommand).build() { + static { + __name(this, "DescribeExportCommand"); + } +}; + +// src/commands/DescribeGlobalTableCommand.ts + + + +var DescribeGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeGlobalTable", {}).n("DynamoDBClient", "DescribeGlobalTableCommand").f(void 0, void 0).ser(se_DescribeGlobalTableCommand).de(de_DescribeGlobalTableCommand).build() { + static { + __name(this, "DescribeGlobalTableCommand"); + } +}; + +// src/commands/DescribeGlobalTableSettingsCommand.ts + + + +var DescribeGlobalTableSettingsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeGlobalTableSettings", {}).n("DynamoDBClient", "DescribeGlobalTableSettingsCommand").f(void 0, void 0).ser(se_DescribeGlobalTableSettingsCommand).de(de_DescribeGlobalTableSettingsCommand).build() { + static { + __name(this, "DescribeGlobalTableSettingsCommand"); + } +}; + +// src/commands/DescribeImportCommand.ts + + + +var DescribeImportCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ImportArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeImport", {}).n("DynamoDBClient", "DescribeImportCommand").f(void 0, void 0).ser(se_DescribeImportCommand).de(de_DescribeImportCommand).build() { + static { + __name(this, "DescribeImportCommand"); + } +}; + +// src/commands/DescribeKinesisStreamingDestinationCommand.ts + + + +var DescribeKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeKinesisStreamingDestination", {}).n("DynamoDBClient", "DescribeKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_DescribeKinesisStreamingDestinationCommand).de(de_DescribeKinesisStreamingDestinationCommand).build() { + static { + __name(this, "DescribeKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/DescribeLimitsCommand.ts + + + +var DescribeLimitsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeLimits", {}).n("DynamoDBClient", "DescribeLimitsCommand").f(void 0, void 0).ser(se_DescribeLimitsCommand).de(de_DescribeLimitsCommand).build() { + static { + __name(this, "DescribeLimitsCommand"); + } +}; + +// src/commands/DescribeTableCommand.ts + + + +var DescribeTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTable", {}).n("DynamoDBClient", "DescribeTableCommand").f(void 0, void 0).ser(se_DescribeTableCommand).de(de_DescribeTableCommand).build() { + static { + __name(this, "DescribeTableCommand"); + } +}; + +// src/commands/DescribeTableReplicaAutoScalingCommand.ts + + + +var DescribeTableReplicaAutoScalingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTableReplicaAutoScaling", {}).n("DynamoDBClient", "DescribeTableReplicaAutoScalingCommand").f(void 0, void 0).ser(se_DescribeTableReplicaAutoScalingCommand).de(de_DescribeTableReplicaAutoScalingCommand).build() { + static { + __name(this, "DescribeTableReplicaAutoScalingCommand"); + } +}; + +// src/commands/DescribeTimeToLiveCommand.ts + + + +var DescribeTimeToLiveCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DescribeTimeToLive", {}).n("DynamoDBClient", "DescribeTimeToLiveCommand").f(void 0, void 0).ser(se_DescribeTimeToLiveCommand).de(de_DescribeTimeToLiveCommand).build() { + static { + __name(this, "DescribeTimeToLiveCommand"); + } +}; + +// src/commands/DisableKinesisStreamingDestinationCommand.ts + + + +var DisableKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "DisableKinesisStreamingDestination", {}).n("DynamoDBClient", "DisableKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_DisableKinesisStreamingDestinationCommand).de(de_DisableKinesisStreamingDestinationCommand).build() { + static { + __name(this, "DisableKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/EnableKinesisStreamingDestinationCommand.ts + + + +var EnableKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "EnableKinesisStreamingDestination", {}).n("DynamoDBClient", "EnableKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_EnableKinesisStreamingDestinationCommand).de(de_EnableKinesisStreamingDestinationCommand).build() { + static { + __name(this, "EnableKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/ExecuteStatementCommand.ts + + + +var ExecuteStatementCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExecuteStatement", {}).n("DynamoDBClient", "ExecuteStatementCommand").f(void 0, void 0).ser(se_ExecuteStatementCommand).de(de_ExecuteStatementCommand).build() { + static { + __name(this, "ExecuteStatementCommand"); + } +}; + +// src/commands/ExecuteTransactionCommand.ts + + + +var ExecuteTransactionCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExecuteTransaction", {}).n("DynamoDBClient", "ExecuteTransactionCommand").f(void 0, void 0).ser(se_ExecuteTransactionCommand).de(de_ExecuteTransactionCommand).build() { + static { + __name(this, "ExecuteTransactionCommand"); + } +}; + +// src/commands/ExportTableToPointInTimeCommand.ts + + + +var ExportTableToPointInTimeCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ExportTableToPointInTime", {}).n("DynamoDBClient", "ExportTableToPointInTimeCommand").f(void 0, void 0).ser(se_ExportTableToPointInTimeCommand).de(de_ExportTableToPointInTimeCommand).build() { + static { + __name(this, "ExportTableToPointInTimeCommand"); + } +}; + +// src/commands/GetItemCommand.ts + + + +var GetItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "GetItem", {}).n("DynamoDBClient", "GetItemCommand").f(void 0, void 0).ser(se_GetItemCommand).de(de_GetItemCommand).build() { + static { + __name(this, "GetItemCommand"); + } +}; + +// src/commands/GetResourcePolicyCommand.ts + + + +var GetResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "GetResourcePolicy", {}).n("DynamoDBClient", "GetResourcePolicyCommand").f(void 0, void 0).ser(se_GetResourcePolicyCommand).de(de_GetResourcePolicyCommand).build() { + static { + __name(this, "GetResourcePolicyCommand"); + } +}; + +// src/commands/ImportTableCommand.ts + + + +var ImportTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "operationContextParams", get: /* @__PURE__ */ __name((input) => input?.TableCreationParameters?.TableName, "get") } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ImportTable", {}).n("DynamoDBClient", "ImportTableCommand").f(void 0, void 0).ser(se_ImportTableCommand).de(de_ImportTableCommand).build() { + static { + __name(this, "ImportTableCommand"); + } +}; + +// src/commands/ListBackupsCommand.ts + + + +var ListBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListBackups", {}).n("DynamoDBClient", "ListBackupsCommand").f(void 0, void 0).ser(se_ListBackupsCommand).de(de_ListBackupsCommand).build() { + static { + __name(this, "ListBackupsCommand"); + } +}; + +// src/commands/ListContributorInsightsCommand.ts + + + +var ListContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListContributorInsights", {}).n("DynamoDBClient", "ListContributorInsightsCommand").f(void 0, void 0).ser(se_ListContributorInsightsCommand).de(de_ListContributorInsightsCommand).build() { + static { + __name(this, "ListContributorInsightsCommand"); + } +}; + +// src/commands/ListExportsCommand.ts + + + +var ListExportsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListExports", {}).n("DynamoDBClient", "ListExportsCommand").f(void 0, void 0).ser(se_ListExportsCommand).de(de_ListExportsCommand).build() { + static { + __name(this, "ListExportsCommand"); + } +}; + +// src/commands/ListGlobalTablesCommand.ts + + + +var ListGlobalTablesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListGlobalTables", {}).n("DynamoDBClient", "ListGlobalTablesCommand").f(void 0, void 0).ser(se_ListGlobalTablesCommand).de(de_ListGlobalTablesCommand).build() { + static { + __name(this, "ListGlobalTablesCommand"); + } +}; + +// src/commands/ListImportsCommand.ts + + + +var ListImportsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListImports", {}).n("DynamoDBClient", "ListImportsCommand").f(void 0, void 0).ser(se_ListImportsCommand).de(de_ListImportsCommand).build() { + static { + __name(this, "ListImportsCommand"); + } +}; + +// src/commands/ListTablesCommand.ts + + + +var ListTablesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListTables", {}).n("DynamoDBClient", "ListTablesCommand").f(void 0, void 0).ser(se_ListTablesCommand).de(de_ListTablesCommand).build() { + static { + __name(this, "ListTablesCommand"); + } +}; + +// src/commands/ListTagsOfResourceCommand.ts + + + +var ListTagsOfResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "ListTagsOfResource", {}).n("DynamoDBClient", "ListTagsOfResourceCommand").f(void 0, void 0).ser(se_ListTagsOfResourceCommand).de(de_ListTagsOfResourceCommand).build() { + static { + __name(this, "ListTagsOfResourceCommand"); + } +}; + +// src/commands/PutItemCommand.ts + + + +var PutItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "PutItem", {}).n("DynamoDBClient", "PutItemCommand").f(void 0, void 0).ser(se_PutItemCommand).de(de_PutItemCommand).build() { + static { + __name(this, "PutItemCommand"); + } +}; + +// src/commands/PutResourcePolicyCommand.ts + + + +var PutResourcePolicyCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "PutResourcePolicy", {}).n("DynamoDBClient", "PutResourcePolicyCommand").f(void 0, void 0).ser(se_PutResourcePolicyCommand).de(de_PutResourcePolicyCommand).build() { + static { + __name(this, "PutResourcePolicyCommand"); + } +}; + +// src/commands/QueryCommand.ts + + + +var QueryCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "Query", {}).n("DynamoDBClient", "QueryCommand").f(void 0, void 0).ser(se_QueryCommand).de(de_QueryCommand).build() { + static { + __name(this, "QueryCommand"); + } +}; + +// src/commands/RestoreTableFromBackupCommand.ts + + + +var RestoreTableFromBackupCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "RestoreTableFromBackup", {}).n("DynamoDBClient", "RestoreTableFromBackupCommand").f(void 0, void 0).ser(se_RestoreTableFromBackupCommand).de(de_RestoreTableFromBackupCommand).build() { + static { + __name(this, "RestoreTableFromBackupCommand"); + } +}; + +// src/commands/RestoreTableToPointInTimeCommand.ts + + + +var RestoreTableToPointInTimeCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "RestoreTableToPointInTime", {}).n("DynamoDBClient", "RestoreTableToPointInTimeCommand").f(void 0, void 0).ser(se_RestoreTableToPointInTimeCommand).de(de_RestoreTableToPointInTimeCommand).build() { + static { + __name(this, "RestoreTableToPointInTimeCommand"); + } +}; + +// src/commands/ScanCommand.ts + + + +var ScanCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "Scan", {}).n("DynamoDBClient", "ScanCommand").f(void 0, void 0).ser(se_ScanCommand).de(de_ScanCommand).build() { + static { + __name(this, "ScanCommand"); + } +}; + +// src/commands/TagResourceCommand.ts + + + +var TagResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TagResource", {}).n("DynamoDBClient", "TagResourceCommand").f(void 0, void 0).ser(se_TagResourceCommand).de(de_TagResourceCommand).build() { + static { + __name(this, "TagResourceCommand"); + } +}; + +// src/commands/TransactGetItemsCommand.ts + + + +var TransactGetItemsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: /* @__PURE__ */ __name((input) => input?.TransactItems?.map((obj) => obj?.Get?.TableName), "get") + } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TransactGetItems", {}).n("DynamoDBClient", "TransactGetItemsCommand").f(void 0, void 0).ser(se_TransactGetItemsCommand).de(de_TransactGetItemsCommand).build() { + static { + __name(this, "TransactGetItemsCommand"); + } +}; + +// src/commands/TransactWriteItemsCommand.ts + + + +var TransactWriteItemsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: /* @__PURE__ */ __name((input) => input?.TransactItems?.map( + (obj) => [obj?.ConditionCheck?.TableName, obj?.Put?.TableName, obj?.Delete?.TableName, obj?.Update?.TableName].filter( + (i) => i + ) + ).flat(), "get") + } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "TransactWriteItems", {}).n("DynamoDBClient", "TransactWriteItemsCommand").f(void 0, void 0).ser(se_TransactWriteItemsCommand).de(de_TransactWriteItemsCommand).build() { + static { + __name(this, "TransactWriteItemsCommand"); + } +}; + +// src/commands/UntagResourceCommand.ts + + + +var UntagResourceCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UntagResource", {}).n("DynamoDBClient", "UntagResourceCommand").f(void 0, void 0).ser(se_UntagResourceCommand).de(de_UntagResourceCommand).build() { + static { + __name(this, "UntagResourceCommand"); + } +}; + +// src/commands/UpdateContinuousBackupsCommand.ts + + + +var UpdateContinuousBackupsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateContinuousBackups", {}).n("DynamoDBClient", "UpdateContinuousBackupsCommand").f(void 0, void 0).ser(se_UpdateContinuousBackupsCommand).de(de_UpdateContinuousBackupsCommand).build() { + static { + __name(this, "UpdateContinuousBackupsCommand"); + } +}; + +// src/commands/UpdateContributorInsightsCommand.ts + + + +var UpdateContributorInsightsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateContributorInsights", {}).n("DynamoDBClient", "UpdateContributorInsightsCommand").f(void 0, void 0).ser(se_UpdateContributorInsightsCommand).de(de_UpdateContributorInsightsCommand).build() { + static { + __name(this, "UpdateContributorInsightsCommand"); + } +}; + +// src/commands/UpdateGlobalTableCommand.ts + + + +var UpdateGlobalTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateGlobalTable", {}).n("DynamoDBClient", "UpdateGlobalTableCommand").f(void 0, void 0).ser(se_UpdateGlobalTableCommand).de(de_UpdateGlobalTableCommand).build() { + static { + __name(this, "UpdateGlobalTableCommand"); + } +}; + +// src/commands/UpdateGlobalTableSettingsCommand.ts + + + +var UpdateGlobalTableSettingsCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateGlobalTableSettings", {}).n("DynamoDBClient", "UpdateGlobalTableSettingsCommand").f(void 0, void 0).ser(se_UpdateGlobalTableSettingsCommand).de(de_UpdateGlobalTableSettingsCommand).build() { + static { + __name(this, "UpdateGlobalTableSettingsCommand"); + } +}; + +// src/commands/UpdateItemCommand.ts + + + +var UpdateItemCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateItem", {}).n("DynamoDBClient", "UpdateItemCommand").f(void 0, void 0).ser(se_UpdateItemCommand).de(de_UpdateItemCommand).build() { + static { + __name(this, "UpdateItemCommand"); + } +}; + +// src/commands/UpdateKinesisStreamingDestinationCommand.ts + + + +var UpdateKinesisStreamingDestinationCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateKinesisStreamingDestination", {}).n("DynamoDBClient", "UpdateKinesisStreamingDestinationCommand").f(void 0, void 0).ser(se_UpdateKinesisStreamingDestinationCommand).de(de_UpdateKinesisStreamingDestinationCommand).build() { + static { + __name(this, "UpdateKinesisStreamingDestinationCommand"); + } +}; + +// src/commands/UpdateTableCommand.ts + + + +var UpdateTableCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTable", {}).n("DynamoDBClient", "UpdateTableCommand").f(void 0, void 0).ser(se_UpdateTableCommand).de(de_UpdateTableCommand).build() { + static { + __name(this, "UpdateTableCommand"); + } +}; + +// src/commands/UpdateTableReplicaAutoScalingCommand.ts + + + +var UpdateTableReplicaAutoScalingCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTableReplicaAutoScaling", {}).n("DynamoDBClient", "UpdateTableReplicaAutoScalingCommand").f(void 0, void 0).ser(se_UpdateTableReplicaAutoScalingCommand).de(de_UpdateTableReplicaAutoScalingCommand).build() { + static { + __name(this, "UpdateTableReplicaAutoScalingCommand"); + } +}; + +// src/commands/UpdateTimeToLiveCommand.ts + + + +var UpdateTimeToLiveCommand = class extends import_smithy_client.Command.classBuilder().ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" } +}).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("DynamoDB_20120810", "UpdateTimeToLive", {}).n("DynamoDBClient", "UpdateTimeToLiveCommand").f(void 0, void 0).ser(se_UpdateTimeToLiveCommand).de(de_UpdateTimeToLiveCommand).build() { + static { + __name(this, "UpdateTimeToLiveCommand"); + } +}; + +// src/DynamoDB.ts +var commands = { + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand +}; +var DynamoDB = class extends DynamoDBClient { + static { + __name(this, "DynamoDB"); + } +}; +(0, import_smithy_client.createAggregatedClient)(commands, DynamoDB); + +// src/pagination/ListContributorInsightsPaginator.ts +var import_core3 = require("@smithy/core"); +var paginateListContributorInsights = (0, import_core3.createPaginator)(DynamoDBClient, ListContributorInsightsCommand, "NextToken", "NextToken", "MaxResults"); + +// src/pagination/ListExportsPaginator.ts +var import_core4 = require("@smithy/core"); +var paginateListExports = (0, import_core4.createPaginator)(DynamoDBClient, ListExportsCommand, "NextToken", "NextToken", "MaxResults"); + +// src/pagination/ListImportsPaginator.ts +var import_core5 = require("@smithy/core"); +var paginateListImports = (0, import_core5.createPaginator)(DynamoDBClient, ListImportsCommand, "NextToken", "NextToken", "PageSize"); + +// src/pagination/ListTablesPaginator.ts +var import_core6 = require("@smithy/core"); +var paginateListTables = (0, import_core6.createPaginator)(DynamoDBClient, ListTablesCommand, "ExclusiveStartTableName", "LastEvaluatedTableName", "Limit"); + +// src/pagination/QueryPaginator.ts +var import_core7 = require("@smithy/core"); +var paginateQuery = (0, import_core7.createPaginator)(DynamoDBClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/pagination/ScanPaginator.ts +var import_core8 = require("@smithy/core"); +var paginateScan = (0, import_core8.createPaginator)(DynamoDBClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/waiters/waitForTableExists.ts +var import_util_waiter = require("@smithy/util-waiter"); +var checkState = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + try { + const returnComparator = /* @__PURE__ */ __name(() => { + return result.Table.TableStatus; + }, "returnComparator"); + if (returnComparator() === "ACTIVE") { + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } + } catch (e) { + } + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: import_util_waiter.WaiterState.RETRY, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForTableExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState); +}, "waitForTableExists"); +var waitUntilTableExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilTableExists"); + +// src/waiters/waitForTableNotExists.ts + +var checkState2 = /* @__PURE__ */ __name(async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + } catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: import_util_waiter.WaiterState.SUCCESS, reason }; + } + } + return { state: import_util_waiter.WaiterState.RETRY, reason }; +}, "checkState"); +var waitForTableNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState2); +}, "waitForTableNotExists"); +var waitUntilTableNotExists = /* @__PURE__ */ __name(async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await (0, import_util_waiter.createWaiter)({ ...serviceDefaults, ...params }, input, checkState2); + return (0, import_util_waiter.checkExceptions)(result); +}, "waitUntilTableNotExists"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DynamoDBServiceException, + __Client, + DynamoDBClient, + DynamoDB, + $Command, + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand, + paginateListContributorInsights, + paginateListExports, + paginateListImports, + paginateListTables, + paginateQuery, + paginateScan, + waitForTableExists, + waitUntilTableExists, + waitForTableNotExists, + waitUntilTableNotExists, + ApproximateCreationDateTimePrecision, + AttributeAction, + ScalarAttributeType, + BackupStatus, + BackupType, + BillingMode, + KeyType, + ProjectionType, + SSEType, + SSEStatus, + StreamViewType, + TimeToLiveStatus, + BackupInUseException, + BackupNotFoundException, + BackupTypeFilter, + ReturnConsumedCapacity, + ReturnValuesOnConditionCheckFailure, + BatchStatementErrorCodeEnum, + InternalServerError, + RequestLimitExceeded, + InvalidEndpointException, + ProvisionedThroughputExceededException, + ResourceNotFoundException, + ReturnItemCollectionMetrics, + ItemCollectionSizeLimitExceededException, + ComparisonOperator, + ConditionalOperator, + ContinuousBackupsStatus, + PointInTimeRecoveryStatus, + ContinuousBackupsUnavailableException, + ContributorInsightsAction, + ContributorInsightsStatus, + LimitExceededException, + TableInUseException, + TableNotFoundException, + GlobalTableStatus, + IndexStatus, + ReplicaStatus, + TableClass, + TableStatus, + GlobalTableAlreadyExistsException, + MultiRegionConsistency, + ResourceInUseException, + ReturnValue, + ReplicatedWriteConflictException, + TransactionConflictException, + PolicyNotFoundException, + ExportFormat, + ExportStatus, + ExportType, + ExportViewType, + S3SseAlgorithm, + ExportNotFoundException, + GlobalTableNotFoundException, + ImportStatus, + InputCompressionType, + InputFormat, + ImportNotFoundException, + DestinationStatus, + DuplicateItemException, + IdempotentParameterMismatchException, + TransactionInProgressException, + ExportConflictException, + InvalidExportTimeException, + PointInTimeRecoveryUnavailableException, + ImportConflictException, + Select, + TableAlreadyExistsException, + InvalidRestoreTimeException, + ReplicaAlreadyExistsException, + ReplicaNotFoundException, + IndexNotFoundException, + AttributeValue, + ConditionalCheckFailedException, + TransactionCanceledException +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js new file mode 100644 index 0000000..be381dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.browser.js @@ -0,0 +1,42 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const account_id_endpoint_1 = require("@aws-sdk/core/account-id-endpoint"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (() => Promise.resolve(account_id_endpoint_1.DEFAULT_ACCOUNT_ID_ENDPOINT_MODE)), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (() => Promise.resolve(undefined)), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js new file mode 100644 index 0000000..a07d8b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const core_1 = require("@aws-sdk/core"); +const account_id_endpoint_1 = require("@aws-sdk/core/account-id-endpoint"); +const credential_provider_node_1 = require("@aws-sdk/credential-provider-node"); +const middleware_endpoint_discovery_1 = require("@aws-sdk/middleware-endpoint-discovery"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (0, node_config_provider_1.loadConfig)(account_id_endpoint_1.NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, profileConfig), + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credential_provider_node_1.defaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (0, node_config_provider_1.loadConfig)(middleware_endpoint_discovery_1.NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, profileConfig), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js new file mode 100644 index 0000000..817ba14 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-cjs/runtimeConfig.shared.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2012-08-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultDynamoDBHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "DynamoDB", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js new file mode 100644 index 0000000..e7f892c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDB.js @@ -0,0 +1,121 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { BatchExecuteStatementCommand, } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommand, } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommand, } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommand, } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommand, } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommand } from "./commands/CreateTableCommand"; +import { DeleteBackupCommand, } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommand } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommand, } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommand } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommand, } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommand, } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommand, } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommand, } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommand, } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommand, } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommand, } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommand, } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommand, } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommand, } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommand, } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommand, } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommand, } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommand, } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommand, } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommand, } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommand, } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommand, } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommand } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommand, } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommand } from "./commands/ImportTableCommand"; +import { ListBackupsCommand } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommand, } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommand } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommand, } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommand } from "./commands/ListImportsCommand"; +import { ListTablesCommand } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommand, } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommand } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommand, } from "./commands/PutResourcePolicyCommand"; +import { QueryCommand } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommand, } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommand, } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommand } from "./commands/ScanCommand"; +import { TagResourceCommand } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommand, } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommand, } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommand, } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommand, } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommand, } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommand, } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommand, } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommand } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommand, } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommand } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommand, } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommand, } from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +const commands = { + BatchExecuteStatementCommand, + BatchGetItemCommand, + BatchWriteItemCommand, + CreateBackupCommand, + CreateGlobalTableCommand, + CreateTableCommand, + DeleteBackupCommand, + DeleteItemCommand, + DeleteResourcePolicyCommand, + DeleteTableCommand, + DescribeBackupCommand, + DescribeContinuousBackupsCommand, + DescribeContributorInsightsCommand, + DescribeEndpointsCommand, + DescribeExportCommand, + DescribeGlobalTableCommand, + DescribeGlobalTableSettingsCommand, + DescribeImportCommand, + DescribeKinesisStreamingDestinationCommand, + DescribeLimitsCommand, + DescribeTableCommand, + DescribeTableReplicaAutoScalingCommand, + DescribeTimeToLiveCommand, + DisableKinesisStreamingDestinationCommand, + EnableKinesisStreamingDestinationCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + ExportTableToPointInTimeCommand, + GetItemCommand, + GetResourcePolicyCommand, + ImportTableCommand, + ListBackupsCommand, + ListContributorInsightsCommand, + ListExportsCommand, + ListGlobalTablesCommand, + ListImportsCommand, + ListTablesCommand, + ListTagsOfResourceCommand, + PutItemCommand, + PutResourcePolicyCommand, + QueryCommand, + RestoreTableFromBackupCommand, + RestoreTableToPointInTimeCommand, + ScanCommand, + TagResourceCommand, + TransactGetItemsCommand, + TransactWriteItemsCommand, + UntagResourceCommand, + UpdateContinuousBackupsCommand, + UpdateContributorInsightsCommand, + UpdateGlobalTableCommand, + UpdateGlobalTableSettingsCommand, + UpdateItemCommand, + UpdateKinesisStreamingDestinationCommand, + UpdateTableCommand, + UpdateTableReplicaAutoScalingCommand, + UpdateTimeToLiveCommand, +}; +export class DynamoDB extends DynamoDBClient { +} +createAggregatedClient(commands, DynamoDB); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js new file mode 100644 index 0000000..b77d943 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/DynamoDBClient.js @@ -0,0 +1,55 @@ +import { resolveAccountIdEndpointModeConfig, } from "@aws-sdk/core/account-id-endpoint"; +import { resolveEndpointDiscoveryConfig, } from "@aws-sdk/middleware-endpoint-discovery"; +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultDynamoDBHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { DescribeEndpointsCommand, } from "./commands/DescribeEndpointsCommand"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class DynamoDBClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveAccountIdEndpointModeConfig(_config_1); + const _config_3 = resolveUserAgentConfig(_config_2); + const _config_4 = resolveRetryConfig(_config_3); + const _config_5 = resolveRegionConfig(_config_4); + const _config_6 = resolveHostHeaderConfig(_config_5); + const _config_7 = resolveEndpointConfig(_config_6); + const _config_8 = resolveHttpAuthSchemeConfig(_config_7); + const _config_9 = resolveEndpointDiscoveryConfig(_config_8, { + endpointDiscoveryCommandCtor: DescribeEndpointsCommand, + }); + const _config_10 = resolveRuntimeExtensions(_config_9, configuration?.extensions || []); + this.config = _config_10; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultDynamoDBHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..6a9e23e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/auth/httpAuthSchemeProvider.js @@ -0,0 +1,41 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultDynamoDBHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "dynamodb", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +export const defaultDynamoDBHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js new file mode 100644 index 0000000..4cbd251 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchExecuteStatementCommand, se_BatchExecuteStatementCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchExecuteStatementCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchExecuteStatement", {}) + .n("DynamoDBClient", "BatchExecuteStatementCommand") + .f(void 0, void 0) + .ser(se_BatchExecuteStatementCommand) + .de(de_BatchExecuteStatementCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js new file mode 100644 index 0000000..88dbf81 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchGetItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchGetItemCommand, se_BatchGetItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchGetItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: (input) => Object.keys(input?.RequestItems ?? {}) }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchGetItem", {}) + .n("DynamoDBClient", "BatchGetItemCommand") + .f(void 0, void 0) + .ser(se_BatchGetItemCommand) + .de(de_BatchGetItemCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js new file mode 100644 index 0000000..c27df1a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/BatchWriteItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_BatchWriteItemCommand, se_BatchWriteItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class BatchWriteItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { type: "operationContextParams", get: (input) => Object.keys(input?.RequestItems ?? {}) }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "BatchWriteItem", {}) + .n("DynamoDBClient", "BatchWriteItemCommand") + .f(void 0, void 0) + .ser(se_BatchWriteItemCommand) + .de(de_BatchWriteItemCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js new file mode 100644 index 0000000..c932b5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateBackupCommand, se_CreateBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateBackup", {}) + .n("DynamoDBClient", "CreateBackupCommand") + .f(void 0, void 0) + .ser(se_CreateBackupCommand) + .de(de_CreateBackupCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js new file mode 100644 index 0000000..49b8b7a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateGlobalTableCommand, se_CreateGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateGlobalTable", {}) + .n("DynamoDBClient", "CreateGlobalTableCommand") + .f(void 0, void 0) + .ser(se_CreateGlobalTableCommand) + .de(de_CreateGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js new file mode 100644 index 0000000..89f3586 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/CreateTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_CreateTableCommand, se_CreateTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class CreateTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "CreateTable", {}) + .n("DynamoDBClient", "CreateTableCommand") + .f(void 0, void 0) + .ser(se_CreateTableCommand) + .de(de_CreateTableCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js new file mode 100644 index 0000000..d420225 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteBackupCommand, se_DeleteBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteBackup", {}) + .n("DynamoDBClient", "DeleteBackupCommand") + .f(void 0, void 0) + .ser(se_DeleteBackupCommand) + .de(de_DeleteBackupCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js new file mode 100644 index 0000000..0550355 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteItemCommand, se_DeleteItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteItem", {}) + .n("DynamoDBClient", "DeleteItemCommand") + .f(void 0, void 0) + .ser(se_DeleteItemCommand) + .de(de_DeleteItemCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js new file mode 100644 index 0000000..045379c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteResourcePolicyCommand, se_DeleteResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteResourcePolicy", {}) + .n("DynamoDBClient", "DeleteResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_DeleteResourcePolicyCommand) + .de(de_DeleteResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js new file mode 100644 index 0000000..b7792b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DeleteTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DeleteTableCommand, se_DeleteTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DeleteTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DeleteTable", {}) + .n("DynamoDBClient", "DeleteTableCommand") + .f(void 0, void 0) + .ser(se_DeleteTableCommand) + .de(de_DeleteTableCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js new file mode 100644 index 0000000..70345e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeBackupCommand, se_DescribeBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "BackupArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeBackup", {}) + .n("DynamoDBClient", "DescribeBackupCommand") + .f(void 0, void 0) + .ser(se_DescribeBackupCommand) + .de(de_DescribeBackupCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js new file mode 100644 index 0000000..1104b60 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContinuousBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeContinuousBackupsCommand, se_DescribeContinuousBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeContinuousBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeContinuousBackups", {}) + .n("DynamoDBClient", "DescribeContinuousBackupsCommand") + .f(void 0, void 0) + .ser(se_DescribeContinuousBackupsCommand) + .de(de_DescribeContinuousBackupsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js new file mode 100644 index 0000000..18a44c9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeContributorInsightsCommand, se_DescribeContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeContributorInsights", {}) + .n("DynamoDBClient", "DescribeContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_DescribeContributorInsightsCommand) + .de(de_DescribeContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js new file mode 100644 index 0000000..7fa3d01 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeEndpointsCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeEndpointsCommand, se_DescribeEndpointsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeEndpointsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeEndpoints", {}) + .n("DynamoDBClient", "DescribeEndpointsCommand") + .f(void 0, void 0) + .ser(se_DescribeEndpointsCommand) + .de(de_DescribeEndpointsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js new file mode 100644 index 0000000..18f9c9a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeExportCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeExportCommand, se_DescribeExportCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeExportCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ExportArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeExport", {}) + .n("DynamoDBClient", "DescribeExportCommand") + .f(void 0, void 0) + .ser(se_DescribeExportCommand) + .de(de_DescribeExportCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js new file mode 100644 index 0000000..87acf97 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeGlobalTableCommand, se_DescribeGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeGlobalTable", {}) + .n("DynamoDBClient", "DescribeGlobalTableCommand") + .f(void 0, void 0) + .ser(se_DescribeGlobalTableCommand) + .de(de_DescribeGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js new file mode 100644 index 0000000..e6497a5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeGlobalTableSettingsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeGlobalTableSettingsCommand, se_DescribeGlobalTableSettingsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeGlobalTableSettingsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeGlobalTableSettings", {}) + .n("DynamoDBClient", "DescribeGlobalTableSettingsCommand") + .f(void 0, void 0) + .ser(se_DescribeGlobalTableSettingsCommand) + .de(de_DescribeGlobalTableSettingsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js new file mode 100644 index 0000000..2c147ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeImportCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeImportCommand, se_DescribeImportCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeImportCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ImportArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeImport", {}) + .n("DynamoDBClient", "DescribeImportCommand") + .f(void 0, void 0) + .ser(se_DescribeImportCommand) + .de(de_DescribeImportCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..b2011ae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeKinesisStreamingDestinationCommand, se_DescribeKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeKinesisStreamingDestination", {}) + .n("DynamoDBClient", "DescribeKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_DescribeKinesisStreamingDestinationCommand) + .de(de_DescribeKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js new file mode 100644 index 0000000..38a0fcc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeLimitsCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeLimitsCommand, se_DescribeLimitsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeLimitsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeLimits", {}) + .n("DynamoDBClient", "DescribeLimitsCommand") + .f(void 0, void 0) + .ser(se_DescribeLimitsCommand) + .de(de_DescribeLimitsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js new file mode 100644 index 0000000..b2a3ddf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTableCommand, se_DescribeTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTable", {}) + .n("DynamoDBClient", "DescribeTableCommand") + .f(void 0, void 0) + .ser(se_DescribeTableCommand) + .de(de_DescribeTableCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js new file mode 100644 index 0000000..127d173 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTableReplicaAutoScalingCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTableReplicaAutoScalingCommand, se_DescribeTableReplicaAutoScalingCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTableReplicaAutoScalingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTableReplicaAutoScaling", {}) + .n("DynamoDBClient", "DescribeTableReplicaAutoScalingCommand") + .f(void 0, void 0) + .ser(se_DescribeTableReplicaAutoScalingCommand) + .de(de_DescribeTableReplicaAutoScalingCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js new file mode 100644 index 0000000..0ae052c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DescribeTimeToLiveCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DescribeTimeToLiveCommand, se_DescribeTimeToLiveCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DescribeTimeToLiveCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DescribeTimeToLive", {}) + .n("DynamoDBClient", "DescribeTimeToLiveCommand") + .f(void 0, void 0) + .ser(se_DescribeTimeToLiveCommand) + .de(de_DescribeTimeToLiveCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..59ac9ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/DisableKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_DisableKinesisStreamingDestinationCommand, se_DisableKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class DisableKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "DisableKinesisStreamingDestination", {}) + .n("DynamoDBClient", "DisableKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_DisableKinesisStreamingDestinationCommand) + .de(de_DisableKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..e96a5f5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/EnableKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_EnableKinesisStreamingDestinationCommand, se_EnableKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class EnableKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "EnableKinesisStreamingDestination", {}) + .n("DynamoDBClient", "EnableKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_EnableKinesisStreamingDestinationCommand) + .de(de_EnableKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js new file mode 100644 index 0000000..8402c48 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteStatementCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExecuteStatementCommand, se_ExecuteStatementCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExecuteStatementCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExecuteStatement", {}) + .n("DynamoDBClient", "ExecuteStatementCommand") + .f(void 0, void 0) + .ser(se_ExecuteStatementCommand) + .de(de_ExecuteStatementCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js new file mode 100644 index 0000000..2298f6f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExecuteTransactionCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExecuteTransactionCommand, se_ExecuteTransactionCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExecuteTransactionCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExecuteTransaction", {}) + .n("DynamoDBClient", "ExecuteTransactionCommand") + .f(void 0, void 0) + .ser(se_ExecuteTransactionCommand) + .de(de_ExecuteTransactionCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js new file mode 100644 index 0000000..7cd72fc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ExportTableToPointInTimeCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ExportTableToPointInTimeCommand, se_ExportTableToPointInTimeCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ExportTableToPointInTimeCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ExportTableToPointInTime", {}) + .n("DynamoDBClient", "ExportTableToPointInTimeCommand") + .f(void 0, void 0) + .ser(se_ExportTableToPointInTimeCommand) + .de(de_ExportTableToPointInTimeCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js new file mode 100644 index 0000000..9b8e996 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetItemCommand, se_GetItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class GetItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "GetItem", {}) + .n("DynamoDBClient", "GetItemCommand") + .f(void 0, void 0) + .ser(se_GetItemCommand) + .de(de_GetItemCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js new file mode 100644 index 0000000..9b67404 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/GetResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_GetResourcePolicyCommand, se_GetResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class GetResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "GetResourcePolicy", {}) + .n("DynamoDBClient", "GetResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_GetResourcePolicyCommand) + .de(de_GetResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js new file mode 100644 index 0000000..2f7a05d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ImportTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ImportTableCommand, se_ImportTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ImportTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "operationContextParams", get: (input) => input?.TableCreationParameters?.TableName }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ImportTable", {}) + .n("DynamoDBClient", "ImportTableCommand") + .f(void 0, void 0) + .ser(se_ImportTableCommand) + .de(de_ImportTableCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js new file mode 100644 index 0000000..8e378a6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListBackupsCommand, se_ListBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListBackups", {}) + .n("DynamoDBClient", "ListBackupsCommand") + .f(void 0, void 0) + .ser(se_ListBackupsCommand) + .de(de_ListBackupsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js new file mode 100644 index 0000000..09e5506 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListContributorInsightsCommand, se_ListContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListContributorInsights", {}) + .n("DynamoDBClient", "ListContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_ListContributorInsightsCommand) + .de(de_ListContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js new file mode 100644 index 0000000..52e6bf0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListExportsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListExportsCommand, se_ListExportsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListExportsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListExports", {}) + .n("DynamoDBClient", "ListExportsCommand") + .f(void 0, void 0) + .ser(se_ListExportsCommand) + .de(de_ListExportsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js new file mode 100644 index 0000000..01def1c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListGlobalTablesCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListGlobalTablesCommand, se_ListGlobalTablesCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListGlobalTablesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListGlobalTables", {}) + .n("DynamoDBClient", "ListGlobalTablesCommand") + .f(void 0, void 0) + .ser(se_ListGlobalTablesCommand) + .de(de_ListGlobalTablesCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js new file mode 100644 index 0000000..89b1f8b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListImportsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListImportsCommand, se_ListImportsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListImportsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListImports", {}) + .n("DynamoDBClient", "ListImportsCommand") + .f(void 0, void 0) + .ser(se_ListImportsCommand) + .de(de_ListImportsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js new file mode 100644 index 0000000..6b299d1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTablesCommand.js @@ -0,0 +1,22 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListTablesCommand, se_ListTablesCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListTablesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListTables", {}) + .n("DynamoDBClient", "ListTablesCommand") + .f(void 0, void 0) + .ser(se_ListTablesCommand) + .de(de_ListTablesCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js new file mode 100644 index 0000000..1e0fb57 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ListTagsOfResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ListTagsOfResourceCommand, se_ListTagsOfResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ListTagsOfResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "ListTagsOfResource", {}) + .n("DynamoDBClient", "ListTagsOfResourceCommand") + .f(void 0, void 0) + .ser(se_ListTagsOfResourceCommand) + .de(de_ListTagsOfResourceCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js new file mode 100644 index 0000000..7d4bef4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutItemCommand, se_PutItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class PutItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "PutItem", {}) + .n("DynamoDBClient", "PutItemCommand") + .f(void 0, void 0) + .ser(se_PutItemCommand) + .de(de_PutItemCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js new file mode 100644 index 0000000..d683507 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/PutResourcePolicyCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_PutResourcePolicyCommand, se_PutResourcePolicyCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class PutResourcePolicyCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "PutResourcePolicy", {}) + .n("DynamoDBClient", "PutResourcePolicyCommand") + .f(void 0, void 0) + .ser(se_PutResourcePolicyCommand) + .de(de_PutResourcePolicyCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js new file mode 100644 index 0000000..9ee6441 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/QueryCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_QueryCommand, se_QueryCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class QueryCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "Query", {}) + .n("DynamoDBClient", "QueryCommand") + .f(void 0, void 0) + .ser(se_QueryCommand) + .de(de_QueryCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js new file mode 100644 index 0000000..7dd9ba0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableFromBackupCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_RestoreTableFromBackupCommand, se_RestoreTableFromBackupCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class RestoreTableFromBackupCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "RestoreTableFromBackup", {}) + .n("DynamoDBClient", "RestoreTableFromBackupCommand") + .f(void 0, void 0) + .ser(se_RestoreTableFromBackupCommand) + .de(de_RestoreTableFromBackupCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js new file mode 100644 index 0000000..98bbb12 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/RestoreTableToPointInTimeCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_RestoreTableToPointInTimeCommand, se_RestoreTableToPointInTimeCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class RestoreTableToPointInTimeCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TargetTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "RestoreTableToPointInTime", {}) + .n("DynamoDBClient", "RestoreTableToPointInTimeCommand") + .f(void 0, void 0) + .ser(se_RestoreTableToPointInTimeCommand) + .de(de_RestoreTableToPointInTimeCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js new file mode 100644 index 0000000..66285c2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/ScanCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_ScanCommand, se_ScanCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class ScanCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "Scan", {}) + .n("DynamoDBClient", "ScanCommand") + .f(void 0, void 0) + .ser(se_ScanCommand) + .de(de_ScanCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js new file mode 100644 index 0000000..244c7c2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TagResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TagResourceCommand, se_TagResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TagResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TagResource", {}) + .n("DynamoDBClient", "TagResourceCommand") + .f(void 0, void 0) + .ser(se_TagResourceCommand) + .de(de_TagResourceCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js new file mode 100644 index 0000000..82b3738 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactGetItemsCommand.js @@ -0,0 +1,28 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TransactGetItemsCommand, se_TransactGetItemsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TransactGetItemsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: (input) => input?.TransactItems?.map((obj) => obj?.Get?.TableName), + }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TransactGetItems", {}) + .n("DynamoDBClient", "TransactGetItemsCommand") + .f(void 0, void 0) + .ser(se_TransactGetItemsCommand) + .de(de_TransactGetItemsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js new file mode 100644 index 0000000..86d749b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/TransactWriteItemsCommand.js @@ -0,0 +1,28 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_TransactWriteItemsCommand, se_TransactWriteItemsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class TransactWriteItemsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArnList: { + type: "operationContextParams", + get: (input) => input?.TransactItems?.map((obj) => [obj?.ConditionCheck?.TableName, obj?.Put?.TableName, obj?.Delete?.TableName, obj?.Update?.TableName].filter((i) => i)).flat(), + }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "TransactWriteItems", {}) + .n("DynamoDBClient", "TransactWriteItemsCommand") + .f(void 0, void 0) + .ser(se_TransactWriteItemsCommand) + .de(de_TransactWriteItemsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js new file mode 100644 index 0000000..849acf1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UntagResourceCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UntagResourceCommand, se_UntagResourceCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UntagResourceCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "ResourceArn" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UntagResource", {}) + .n("DynamoDBClient", "UntagResourceCommand") + .f(void 0, void 0) + .ser(se_UntagResourceCommand) + .de(de_UntagResourceCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js new file mode 100644 index 0000000..753b715 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContinuousBackupsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateContinuousBackupsCommand, se_UpdateContinuousBackupsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateContinuousBackupsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateContinuousBackups", {}) + .n("DynamoDBClient", "UpdateContinuousBackupsCommand") + .f(void 0, void 0) + .ser(se_UpdateContinuousBackupsCommand) + .de(de_UpdateContinuousBackupsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js new file mode 100644 index 0000000..8d43ddc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateContributorInsightsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateContributorInsightsCommand, se_UpdateContributorInsightsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateContributorInsightsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateContributorInsights", {}) + .n("DynamoDBClient", "UpdateContributorInsightsCommand") + .f(void 0, void 0) + .ser(se_UpdateContributorInsightsCommand) + .de(de_UpdateContributorInsightsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js new file mode 100644 index 0000000..c613742 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateGlobalTableCommand, se_UpdateGlobalTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateGlobalTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateGlobalTable", {}) + .n("DynamoDBClient", "UpdateGlobalTableCommand") + .f(void 0, void 0) + .ser(se_UpdateGlobalTableCommand) + .de(de_UpdateGlobalTableCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js new file mode 100644 index 0000000..bab8a85 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateGlobalTableSettingsCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateGlobalTableSettingsCommand, se_UpdateGlobalTableSettingsCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateGlobalTableSettingsCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "GlobalTableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateGlobalTableSettings", {}) + .n("DynamoDBClient", "UpdateGlobalTableSettingsCommand") + .f(void 0, void 0) + .ser(se_UpdateGlobalTableSettingsCommand) + .de(de_UpdateGlobalTableSettingsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js new file mode 100644 index 0000000..1182c0d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateItemCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateItemCommand, se_UpdateItemCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateItemCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateItem", {}) + .n("DynamoDBClient", "UpdateItemCommand") + .f(void 0, void 0) + .ser(se_UpdateItemCommand) + .de(de_UpdateItemCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js new file mode 100644 index 0000000..5f44195 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateKinesisStreamingDestinationCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateKinesisStreamingDestinationCommand, se_UpdateKinesisStreamingDestinationCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateKinesisStreamingDestinationCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateKinesisStreamingDestination", {}) + .n("DynamoDBClient", "UpdateKinesisStreamingDestinationCommand") + .f(void 0, void 0) + .ser(se_UpdateKinesisStreamingDestinationCommand) + .de(de_UpdateKinesisStreamingDestinationCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js new file mode 100644 index 0000000..845f3e6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTableCommand, se_UpdateTableCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTableCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTable", {}) + .n("DynamoDBClient", "UpdateTableCommand") + .f(void 0, void 0) + .ser(se_UpdateTableCommand) + .de(de_UpdateTableCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js new file mode 100644 index 0000000..99fb7f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTableReplicaAutoScalingCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTableReplicaAutoScalingCommand, se_UpdateTableReplicaAutoScalingCommand, } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTableReplicaAutoScalingCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTableReplicaAutoScaling", {}) + .n("DynamoDBClient", "UpdateTableReplicaAutoScalingCommand") + .f(void 0, void 0) + .ser(se_UpdateTableReplicaAutoScalingCommand) + .de(de_UpdateTableReplicaAutoScalingCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js new file mode 100644 index 0000000..97200fb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/UpdateTimeToLiveCommand.js @@ -0,0 +1,25 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { de_UpdateTimeToLiveCommand, se_UpdateTimeToLiveCommand } from "../protocols/Aws_json1_0"; +export { $Command }; +export class UpdateTimeToLiveCommand extends $Command + .classBuilder() + .ep({ + ...commonParams, + ResourceArn: { type: "contextParams", name: "TableName" }, +}) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("DynamoDB_20120810", "UpdateTimeToLive", {}) + .n("DynamoDBClient", "UpdateTimeToLiveCommand") + .f(void 0, void 0) + .ser(se_UpdateTimeToLiveCommand) + .de(de_UpdateTimeToLiveCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/commands/index.js @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js new file mode 100644 index 0000000..e5cae7d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/EndpointParameters.js @@ -0,0 +1,15 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "dynamodb", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + AccountId: { type: "builtInParams", name: "accountId" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, + AccountIdEndpointMode: { type: "builtInParams", name: "accountIdEndpointMode" }, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js new file mode 100644 index 0000000..7fbe485 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/endpointResolver.js @@ -0,0 +1,23 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: [ + "AccountId", + "AccountIdEndpointMode", + "Endpoint", + "Region", + "ResourceArn", + "ResourceArnList", + "UseDualStack", + "UseFIPS", + ], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js new file mode 100644 index 0000000..86e26fd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const S = "required", T = "type", U = "fn", V = "argv", W = "ref", X = "properties", Y = "headers"; +const a = false, b = "isSet", c = "error", d = "endpoint", e = "tree", f = "PartitionResult", g = "stringEquals", h = "dynamodb", i = "getAttr", j = "aws.parseArn", k = "ParsedArn", l = "isValidHostLabel", m = "FirstArn", n = { [S]: false, [T]: "String" }, o = { [S]: true, "default": false, [T]: "Boolean" }, p = { [U]: "booleanEquals", [V]: [{ [W]: "UseFIPS" }, true] }, q = { [U]: "booleanEquals", [V]: [{ [W]: "UseDualStack" }, true] }, r = {}, s = { [W]: "Region" }, t = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsFIPS"] }, true] }, u = { [U]: "booleanEquals", [V]: [{ [U]: i, [V]: [{ [W]: f }, "supportsDualStack"] }, true] }, v = { "conditions": [{ [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], "rules": [{ [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }], [T]: e }, w = { [U]: b, [V]: [{ [W]: "AccountIdEndpointMode" }] }, x = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and FIPS is enabled, but FIPS account endpoints are not supported", [T]: c }, y = { [U]: i, [V]: [{ [W]: f }, "name"] }, z = { [d]: { "url": "https://dynamodb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, [T]: d }, A = { [U]: "not", [V]: [p] }, B = { [c]: "Invalid Configuration: AccountIdEndpointMode is required and DualStack is enabled, but DualStack account endpoints are not supported", [T]: c }, C = { [U]: "not", [V]: [{ [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "disabled"] }] }, D = { [U]: g, [V]: [y, "aws"] }, E = { [U]: "not", [V]: [q] }, F = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "service"] }, h] }, G = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, false] }, H = { [U]: g, [V]: [{ [U]: i, [V]: [{ [W]: k }, "region"] }, "{Region}"] }, I = { [U]: l, [V]: [{ [U]: i, [V]: [{ [W]: k }, "accountId"] }, false] }, J = { "url": "https://{ParsedArn#accountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: {}, [Y]: {} }, K = { [W]: "ResourceArnList" }, L = { [W]: "AccountId" }, M = [p], N = [q], O = [s], P = [w, { [U]: g, [V]: [{ [W]: "AccountIdEndpointMode" }, "required"] }], Q = [A], R = [{ [W]: "ResourceArn" }]; +const _data = { version: "1.0", parameters: { Region: n, UseDualStack: o, UseFIPS: o, Endpoint: n, AccountId: n, AccountIdEndpointMode: n, ResourceArn: n, ResourceArnList: { [S]: a, [T]: "stringArray" } }, rules: [{ conditions: [{ [U]: b, [V]: [{ [W]: "Endpoint" }] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [T]: c }, { endpoint: { url: "{Endpoint}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { conditions: [{ [U]: b, [V]: O }], rules: [{ conditions: [{ [U]: "aws.partition", [V]: O, assign: f }], rules: [{ conditions: [{ [U]: g, [V]: [s, "local"] }], rules: [{ conditions: M, error: "Invalid Configuration: FIPS and local endpoint are not supported", [T]: c }, { conditions: N, error: "Invalid Configuration: Dualstack and local endpoint are not supported", [T]: c }, { endpoint: { url: "http://localhost:8000", [X]: { authSchemes: [{ signingRegion: "us-east-1", name: "sigv4", signingName: h }] }, [Y]: r }, [T]: d }], [T]: e }, { conditions: [p, q], rules: [{ conditions: [t, u], rules: [v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [T]: c }], [T]: e }, { conditions: M, rules: [{ conditions: [t], rules: [{ conditions: [{ [U]: g, [V]: [y, "aws-us-gov"] }], rules: [v, z], [T]: e }, v, { endpoint: { url: "https://dynamodb-fips.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "FIPS is enabled but this partition does not support FIPS", [T]: c }], [T]: e }, { conditions: N, rules: [{ conditions: [u], rules: [{ conditions: P, rules: [{ conditions: Q, rules: [B], [T]: e }, x], [T]: e }, { endpoint: { url: "https://dynamodb.{Region}.{PartitionResult#dualStackDnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "DualStack is enabled but this partition does not support DualStack", [T]: c }], [T]: e }, { conditions: [w, C, D, A, E, { [U]: b, [V]: R }, { [U]: j, [V]: R, assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [K] }, { [U]: i, [V]: [K, "[0]"], assign: m }, { [U]: j, [V]: [{ [W]: m }], assign: k }, F, G, H, I], endpoint: J, [T]: d }, { conditions: [w, C, D, A, E, { [U]: b, [V]: [L] }], rules: [{ conditions: [{ [U]: l, [V]: [L, a] }], rules: [{ endpoint: { url: "https://{AccountId}.ddb.{Region}.{PartitionResult#dnsSuffix}", [X]: r, [Y]: r }, [T]: d }], [T]: e }, { error: "Credentials-sourced account ID parameter is invalid", [T]: c }], [T]: e }, { conditions: P, rules: [{ conditions: Q, rules: [{ conditions: [E], rules: [{ conditions: [D], rules: [{ error: "AccountIdEndpointMode is required but no AccountID was provided or able to be loaded", [T]: c }], [T]: e }, { error: "Invalid Configuration: AccountIdEndpointMode is required but account endpoints are not supported in this partition", [T]: c }], [T]: e }, B], [T]: e }, x], [T]: e }, z], [T]: e }], [T]: e }, { error: "Invalid Configuration: Missing Region", [T]: c }] }; +export const ruleSet = _data; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js new file mode 100644 index 0000000..8bd6d9e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js new file mode 100644 index 0000000..2712903 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/DynamoDBServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class DynamoDBServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, DynamoDBServiceException.prototype); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js new file mode 100644 index 0000000..2b4b364 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/models/models_0.js @@ -0,0 +1,674 @@ +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +export const ApproximateCreationDateTimePrecision = { + MICROSECOND: "MICROSECOND", + MILLISECOND: "MILLISECOND", +}; +export const AttributeAction = { + ADD: "ADD", + DELETE: "DELETE", + PUT: "PUT", +}; +export const ScalarAttributeType = { + B: "B", + N: "N", + S: "S", +}; +export const BackupStatus = { + AVAILABLE: "AVAILABLE", + CREATING: "CREATING", + DELETED: "DELETED", +}; +export const BackupType = { + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER", +}; +export const BillingMode = { + PAY_PER_REQUEST: "PAY_PER_REQUEST", + PROVISIONED: "PROVISIONED", +}; +export const KeyType = { + HASH: "HASH", + RANGE: "RANGE", +}; +export const ProjectionType = { + ALL: "ALL", + INCLUDE: "INCLUDE", + KEYS_ONLY: "KEYS_ONLY", +}; +export const SSEType = { + AES256: "AES256", + KMS: "KMS", +}; +export const SSEStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + UPDATING: "UPDATING", +}; +export const StreamViewType = { + KEYS_ONLY: "KEYS_ONLY", + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", + OLD_IMAGE: "OLD_IMAGE", +}; +export const TimeToLiveStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", +}; +export class BackupInUseException extends __BaseException { + name = "BackupInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "BackupInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, BackupInUseException.prototype); + } +} +export class BackupNotFoundException extends __BaseException { + name = "BackupNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "BackupNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, BackupNotFoundException.prototype); + } +} +export const BackupTypeFilter = { + ALL: "ALL", + AWS_BACKUP: "AWS_BACKUP", + SYSTEM: "SYSTEM", + USER: "USER", +}; +export const ReturnConsumedCapacity = { + INDEXES: "INDEXES", + NONE: "NONE", + TOTAL: "TOTAL", +}; +export const ReturnValuesOnConditionCheckFailure = { + ALL_OLD: "ALL_OLD", + NONE: "NONE", +}; +export const BatchStatementErrorCodeEnum = { + AccessDenied: "AccessDenied", + ConditionalCheckFailed: "ConditionalCheckFailed", + DuplicateItem: "DuplicateItem", + InternalServerError: "InternalServerError", + ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded", + ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded", + RequestLimitExceeded: "RequestLimitExceeded", + ResourceNotFound: "ResourceNotFound", + ThrottlingError: "ThrottlingError", + TransactionConflict: "TransactionConflict", + ValidationError: "ValidationError", +}; +export class InternalServerError extends __BaseException { + name = "InternalServerError"; + $fault = "server"; + constructor(opts) { + super({ + name: "InternalServerError", + $fault: "server", + ...opts, + }); + Object.setPrototypeOf(this, InternalServerError.prototype); + } +} +export class RequestLimitExceeded extends __BaseException { + name = "RequestLimitExceeded"; + $fault = "client"; + constructor(opts) { + super({ + name: "RequestLimitExceeded", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, RequestLimitExceeded.prototype); + } +} +export class InvalidEndpointException extends __BaseException { + name = "InvalidEndpointException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "InvalidEndpointException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidEndpointException.prototype); + this.Message = opts.Message; + } +} +export class ProvisionedThroughputExceededException extends __BaseException { + name = "ProvisionedThroughputExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ProvisionedThroughputExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ProvisionedThroughputExceededException.prototype); + } +} +export class ResourceNotFoundException extends __BaseException { + name = "ResourceNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +export const ReturnItemCollectionMetrics = { + NONE: "NONE", + SIZE: "SIZE", +}; +export class ItemCollectionSizeLimitExceededException extends __BaseException { + name = "ItemCollectionSizeLimitExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ItemCollectionSizeLimitExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ItemCollectionSizeLimitExceededException.prototype); + } +} +export const ComparisonOperator = { + BEGINS_WITH: "BEGINS_WITH", + BETWEEN: "BETWEEN", + CONTAINS: "CONTAINS", + EQ: "EQ", + GE: "GE", + GT: "GT", + IN: "IN", + LE: "LE", + LT: "LT", + NE: "NE", + NOT_CONTAINS: "NOT_CONTAINS", + NOT_NULL: "NOT_NULL", + NULL: "NULL", +}; +export const ConditionalOperator = { + AND: "AND", + OR: "OR", +}; +export const ContinuousBackupsStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED", +}; +export const PointInTimeRecoveryStatus = { + DISABLED: "DISABLED", + ENABLED: "ENABLED", +}; +export class ContinuousBackupsUnavailableException extends __BaseException { + name = "ContinuousBackupsUnavailableException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ContinuousBackupsUnavailableException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ContinuousBackupsUnavailableException.prototype); + } +} +export const ContributorInsightsAction = { + DISABLE: "DISABLE", + ENABLE: "ENABLE", +}; +export const ContributorInsightsStatus = { + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLED: "ENABLED", + ENABLING: "ENABLING", + FAILED: "FAILED", +}; +export class LimitExceededException extends __BaseException { + name = "LimitExceededException"; + $fault = "client"; + constructor(opts) { + super({ + name: "LimitExceededException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, LimitExceededException.prototype); + } +} +export class TableInUseException extends __BaseException { + name = "TableInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableInUseException.prototype); + } +} +export class TableNotFoundException extends __BaseException { + name = "TableNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableNotFoundException.prototype); + } +} +export const GlobalTableStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING", +}; +export const IndexStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + DELETING: "DELETING", + UPDATING: "UPDATING", +}; +export const ReplicaStatus = { + ACTIVE: "ACTIVE", + CREATING: "CREATING", + CREATION_FAILED: "CREATION_FAILED", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + REGION_DISABLED: "REGION_DISABLED", + UPDATING: "UPDATING", +}; +export const TableClass = { + STANDARD: "STANDARD", + STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS", +}; +export const TableStatus = { + ACTIVE: "ACTIVE", + ARCHIVED: "ARCHIVED", + ARCHIVING: "ARCHIVING", + CREATING: "CREATING", + DELETING: "DELETING", + INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + UPDATING: "UPDATING", +}; +export class GlobalTableAlreadyExistsException extends __BaseException { + name = "GlobalTableAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "GlobalTableAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, GlobalTableAlreadyExistsException.prototype); + } +} +export const MultiRegionConsistency = { + EVENTUAL: "EVENTUAL", + STRONG: "STRONG", +}; +export class ResourceInUseException extends __BaseException { + name = "ResourceInUseException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceInUseException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceInUseException.prototype); + } +} +export const ReturnValue = { + ALL_NEW: "ALL_NEW", + ALL_OLD: "ALL_OLD", + NONE: "NONE", + UPDATED_NEW: "UPDATED_NEW", + UPDATED_OLD: "UPDATED_OLD", +}; +export class ReplicatedWriteConflictException extends __BaseException { + name = "ReplicatedWriteConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicatedWriteConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicatedWriteConflictException.prototype); + } +} +export class TransactionConflictException extends __BaseException { + name = "TransactionConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TransactionConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionConflictException.prototype); + } +} +export class PolicyNotFoundException extends __BaseException { + name = "PolicyNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PolicyNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PolicyNotFoundException.prototype); + } +} +export const ExportFormat = { + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION", +}; +export const ExportStatus = { + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS", +}; +export const ExportType = { + FULL_EXPORT: "FULL_EXPORT", + INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT", +}; +export const ExportViewType = { + NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES", + NEW_IMAGE: "NEW_IMAGE", +}; +export const S3SseAlgorithm = { + AES256: "AES256", + KMS: "KMS", +}; +export class ExportNotFoundException extends __BaseException { + name = "ExportNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExportNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExportNotFoundException.prototype); + } +} +export class GlobalTableNotFoundException extends __BaseException { + name = "GlobalTableNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "GlobalTableNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, GlobalTableNotFoundException.prototype); + } +} +export const ImportStatus = { + CANCELLED: "CANCELLED", + CANCELLING: "CANCELLING", + COMPLETED: "COMPLETED", + FAILED: "FAILED", + IN_PROGRESS: "IN_PROGRESS", +}; +export const InputCompressionType = { + GZIP: "GZIP", + NONE: "NONE", + ZSTD: "ZSTD", +}; +export const InputFormat = { + CSV: "CSV", + DYNAMODB_JSON: "DYNAMODB_JSON", + ION: "ION", +}; +export class ImportNotFoundException extends __BaseException { + name = "ImportNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ImportNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ImportNotFoundException.prototype); + } +} +export const DestinationStatus = { + ACTIVE: "ACTIVE", + DISABLED: "DISABLED", + DISABLING: "DISABLING", + ENABLE_FAILED: "ENABLE_FAILED", + ENABLING: "ENABLING", + UPDATING: "UPDATING", +}; +export class DuplicateItemException extends __BaseException { + name = "DuplicateItemException"; + $fault = "client"; + constructor(opts) { + super({ + name: "DuplicateItemException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, DuplicateItemException.prototype); + } +} +export class IdempotentParameterMismatchException extends __BaseException { + name = "IdempotentParameterMismatchException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "IdempotentParameterMismatchException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IdempotentParameterMismatchException.prototype); + this.Message = opts.Message; + } +} +export class TransactionInProgressException extends __BaseException { + name = "TransactionInProgressException"; + $fault = "client"; + Message; + constructor(opts) { + super({ + name: "TransactionInProgressException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionInProgressException.prototype); + this.Message = opts.Message; + } +} +export class ExportConflictException extends __BaseException { + name = "ExportConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExportConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExportConflictException.prototype); + } +} +export class InvalidExportTimeException extends __BaseException { + name = "InvalidExportTimeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidExportTimeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidExportTimeException.prototype); + } +} +export class PointInTimeRecoveryUnavailableException extends __BaseException { + name = "PointInTimeRecoveryUnavailableException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PointInTimeRecoveryUnavailableException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PointInTimeRecoveryUnavailableException.prototype); + } +} +export class ImportConflictException extends __BaseException { + name = "ImportConflictException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ImportConflictException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ImportConflictException.prototype); + } +} +export const Select = { + ALL_ATTRIBUTES: "ALL_ATTRIBUTES", + ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES", + COUNT: "COUNT", + SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES", +}; +export class TableAlreadyExistsException extends __BaseException { + name = "TableAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TableAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TableAlreadyExistsException.prototype); + } +} +export class InvalidRestoreTimeException extends __BaseException { + name = "InvalidRestoreTimeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidRestoreTimeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRestoreTimeException.prototype); + } +} +export class ReplicaAlreadyExistsException extends __BaseException { + name = "ReplicaAlreadyExistsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicaAlreadyExistsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicaAlreadyExistsException.prototype); + } +} +export class ReplicaNotFoundException extends __BaseException { + name = "ReplicaNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ReplicaNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ReplicaNotFoundException.prototype); + } +} +export class IndexNotFoundException extends __BaseException { + name = "IndexNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IndexNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IndexNotFoundException.prototype); + } +} +export var AttributeValue; +(function (AttributeValue) { + AttributeValue.visit = (value, visitor) => { + if (value.S !== undefined) + return visitor.S(value.S); + if (value.N !== undefined) + return visitor.N(value.N); + if (value.B !== undefined) + return visitor.B(value.B); + if (value.SS !== undefined) + return visitor.SS(value.SS); + if (value.NS !== undefined) + return visitor.NS(value.NS); + if (value.BS !== undefined) + return visitor.BS(value.BS); + if (value.M !== undefined) + return visitor.M(value.M); + if (value.L !== undefined) + return visitor.L(value.L); + if (value.NULL !== undefined) + return visitor.NULL(value.NULL); + if (value.BOOL !== undefined) + return visitor.BOOL(value.BOOL); + return visitor._(value.$unknown[0], value.$unknown[1]); + }; +})(AttributeValue || (AttributeValue = {})); +export class ConditionalCheckFailedException extends __BaseException { + name = "ConditionalCheckFailedException"; + $fault = "client"; + Item; + constructor(opts) { + super({ + name: "ConditionalCheckFailedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ConditionalCheckFailedException.prototype); + this.Item = opts.Item; + } +} +export class TransactionCanceledException extends __BaseException { + name = "TransactionCanceledException"; + $fault = "client"; + Message; + CancellationReasons; + constructor(opts) { + super({ + name: "TransactionCanceledException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TransactionCanceledException.prototype); + this.Message = opts.Message; + this.CancellationReasons = opts.CancellationReasons; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js new file mode 100644 index 0000000..23bb95c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListContributorInsightsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListContributorInsightsCommand, } from "../commands/ListContributorInsightsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListContributorInsights = createPaginator(DynamoDBClient, ListContributorInsightsCommand, "NextToken", "NextToken", "MaxResults"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js new file mode 100644 index 0000000..e252e7f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListExportsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListExportsCommand } from "../commands/ListExportsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListExports = createPaginator(DynamoDBClient, ListExportsCommand, "NextToken", "NextToken", "MaxResults"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js new file mode 100644 index 0000000..c3fe323 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListImportsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListImportsCommand } from "../commands/ListImportsCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListImports = createPaginator(DynamoDBClient, ListImportsCommand, "NextToken", "NextToken", "PageSize"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js new file mode 100644 index 0000000..979f3f6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ListTablesPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListTablesCommand } from "../commands/ListTablesCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateListTables = createPaginator(DynamoDBClient, ListTablesCommand, "ExclusiveStartTableName", "LastEvaluatedTableName", "Limit"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js new file mode 100644 index 0000000..4fcc17d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/QueryPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { QueryCommand } from "../commands/QueryCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateQuery = createPaginator(DynamoDBClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js new file mode 100644 index 0000000..b95b746 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/ScanPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ScanCommand } from "../commands/ScanCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export const paginateScan = createPaginator(DynamoDBClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/pagination/index.js @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js new file mode 100644 index 0000000..d6c7135 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/protocols/Aws_json1_0.js @@ -0,0 +1,3094 @@ +import { awsExpectUnion as __expectUnion, loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody, } from "@aws-sdk/core"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectBoolean as __expectBoolean, expectInt32 as __expectInt32, expectLong as __expectLong, expectNonNull as __expectNonNull, expectNumber as __expectNumber, expectString as __expectString, limitedParseDouble as __limitedParseDouble, parseEpochTimestamp as __parseEpochTimestamp, serializeFloat as __serializeFloat, take, withBaseException, } from "@smithy/smithy-client"; +import { v4 as generateIdempotencyToken } from "uuid"; +import { DynamoDBServiceException as __BaseException } from "../models/DynamoDBServiceException"; +import { AttributeValue, BackupInUseException, BackupNotFoundException, ConditionalCheckFailedException, ContinuousBackupsUnavailableException, DuplicateItemException, ExportConflictException, ExportNotFoundException, GlobalTableAlreadyExistsException, GlobalTableNotFoundException, IdempotentParameterMismatchException, ImportConflictException, ImportNotFoundException, IndexNotFoundException, InternalServerError, InvalidEndpointException, InvalidExportTimeException, InvalidRestoreTimeException, ItemCollectionSizeLimitExceededException, LimitExceededException, PointInTimeRecoveryUnavailableException, PolicyNotFoundException, ProvisionedThroughputExceededException, ReplicaAlreadyExistsException, ReplicaNotFoundException, ReplicatedWriteConflictException, RequestLimitExceeded, ResourceInUseException, ResourceNotFoundException, TableAlreadyExistsException, TableInUseException, TableNotFoundException, TransactionCanceledException, TransactionConflictException, TransactionInProgressException, } from "../models/models_0"; +export const se_BatchExecuteStatementCommand = async (input, context) => { + const headers = sharedHeaders("BatchExecuteStatement"); + let body; + body = JSON.stringify(se_BatchExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_BatchGetItemCommand = async (input, context) => { + const headers = sharedHeaders("BatchGetItem"); + let body; + body = JSON.stringify(se_BatchGetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_BatchWriteItemCommand = async (input, context) => { + const headers = sharedHeaders("BatchWriteItem"); + let body; + body = JSON.stringify(se_BatchWriteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateBackupCommand = async (input, context) => { + const headers = sharedHeaders("CreateBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("CreateGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_CreateTableCommand = async (input, context) => { + const headers = sharedHeaders("CreateTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteBackupCommand = async (input, context) => { + const headers = sharedHeaders("DeleteBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteItemCommand = async (input, context) => { + const headers = sharedHeaders("DeleteItem"); + let body; + body = JSON.stringify(se_DeleteItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("DeleteResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DeleteTableCommand = async (input, context) => { + const headers = sharedHeaders("DeleteTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeBackupCommand = async (input, context) => { + const headers = sharedHeaders("DescribeBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeContinuousBackupsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeContinuousBackups"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeEndpointsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeEndpoints"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeExportCommand = async (input, context) => { + const headers = sharedHeaders("DescribeExport"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeGlobalTableSettingsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeGlobalTableSettings"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeImportCommand = async (input, context) => { + const headers = sharedHeaders("DescribeImport"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("DescribeKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeLimitsCommand = async (input, context) => { + const headers = sharedHeaders("DescribeLimits"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTableCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTableReplicaAutoScalingCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTableReplicaAutoScaling"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DescribeTimeToLiveCommand = async (input, context) => { + const headers = sharedHeaders("DescribeTimeToLive"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_DisableKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("DisableKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_EnableKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("EnableKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExecuteStatementCommand = async (input, context) => { + const headers = sharedHeaders("ExecuteStatement"); + let body; + body = JSON.stringify(se_ExecuteStatementInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExecuteTransactionCommand = async (input, context) => { + const headers = sharedHeaders("ExecuteTransaction"); + let body; + body = JSON.stringify(se_ExecuteTransactionInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ExportTableToPointInTimeCommand = async (input, context) => { + const headers = sharedHeaders("ExportTableToPointInTime"); + let body; + body = JSON.stringify(se_ExportTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_GetItemCommand = async (input, context) => { + const headers = sharedHeaders("GetItem"); + let body; + body = JSON.stringify(se_GetItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_GetResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("GetResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ImportTableCommand = async (input, context) => { + const headers = sharedHeaders("ImportTable"); + let body; + body = JSON.stringify(se_ImportTableInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListBackupsCommand = async (input, context) => { + const headers = sharedHeaders("ListBackups"); + let body; + body = JSON.stringify(se_ListBackupsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("ListContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListExportsCommand = async (input, context) => { + const headers = sharedHeaders("ListExports"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListGlobalTablesCommand = async (input, context) => { + const headers = sharedHeaders("ListGlobalTables"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListImportsCommand = async (input, context) => { + const headers = sharedHeaders("ListImports"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListTablesCommand = async (input, context) => { + const headers = sharedHeaders("ListTables"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ListTagsOfResourceCommand = async (input, context) => { + const headers = sharedHeaders("ListTagsOfResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_PutItemCommand = async (input, context) => { + const headers = sharedHeaders("PutItem"); + let body; + body = JSON.stringify(se_PutItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_PutResourcePolicyCommand = async (input, context) => { + const headers = sharedHeaders("PutResourcePolicy"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_QueryCommand = async (input, context) => { + const headers = sharedHeaders("Query"); + let body; + body = JSON.stringify(se_QueryInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_RestoreTableFromBackupCommand = async (input, context) => { + const headers = sharedHeaders("RestoreTableFromBackup"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_RestoreTableToPointInTimeCommand = async (input, context) => { + const headers = sharedHeaders("RestoreTableToPointInTime"); + let body; + body = JSON.stringify(se_RestoreTableToPointInTimeInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_ScanCommand = async (input, context) => { + const headers = sharedHeaders("Scan"); + let body; + body = JSON.stringify(se_ScanInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TagResourceCommand = async (input, context) => { + const headers = sharedHeaders("TagResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TransactGetItemsCommand = async (input, context) => { + const headers = sharedHeaders("TransactGetItems"); + let body; + body = JSON.stringify(se_TransactGetItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_TransactWriteItemsCommand = async (input, context) => { + const headers = sharedHeaders("TransactWriteItems"); + let body; + body = JSON.stringify(se_TransactWriteItemsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UntagResourceCommand = async (input, context) => { + const headers = sharedHeaders("UntagResource"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateContinuousBackupsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateContinuousBackups"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateContributorInsightsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateContributorInsights"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateGlobalTableCommand = async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateGlobalTableSettingsCommand = async (input, context) => { + const headers = sharedHeaders("UpdateGlobalTableSettings"); + let body; + body = JSON.stringify(se_UpdateGlobalTableSettingsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateItemCommand = async (input, context) => { + const headers = sharedHeaders("UpdateItem"); + let body; + body = JSON.stringify(se_UpdateItemInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateKinesisStreamingDestinationCommand = async (input, context) => { + const headers = sharedHeaders("UpdateKinesisStreamingDestination"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTableCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTable"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTableReplicaAutoScalingCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTableReplicaAutoScaling"); + let body; + body = JSON.stringify(se_UpdateTableReplicaAutoScalingInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_UpdateTimeToLiveCommand = async (input, context) => { + const headers = sharedHeaders("UpdateTimeToLive"); + let body; + body = JSON.stringify(_json(input)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const de_BatchExecuteStatementCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_BatchGetItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchGetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_BatchWriteItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_BatchWriteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_CreateTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_CreateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DeleteTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DeleteTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeContinuousBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeContributorInsightsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeEndpointsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeExportCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeExportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeGlobalTableSettingsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeImportCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeImportOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeLimitsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTableReplicaAutoScalingCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_DescribeTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DescribeTimeToLiveCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_DisableKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_EnableKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExecuteStatementCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExecuteStatementOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExecuteTransactionCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExecuteTransactionOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ExportTableToPointInTimeCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ExportTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_GetItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_GetItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_GetResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ImportTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ImportTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ListBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListExportsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListGlobalTablesCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListImportsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ListImportsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListTablesCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ListTagsOfResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_PutItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_PutItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_PutResourcePolicyCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_QueryCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_QueryOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_RestoreTableFromBackupCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_RestoreTableFromBackupOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_RestoreTableToPointInTimeCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_RestoreTableToPointInTimeOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_ScanCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_ScanOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_TagResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await collectBody(output.body, context); + const response = { + $metadata: deserializeMetadata(output), + }; + return response; +}; +export const de_TransactGetItemsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_TransactGetItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_TransactWriteItemsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_TransactWriteItemsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UntagResourceCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await collectBody(output.body, context); + const response = { + $metadata: deserializeMetadata(output), + }; + return response; +}; +export const de_UpdateContinuousBackupsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateContinuousBackupsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateContributorInsightsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateGlobalTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateGlobalTableSettingsCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateGlobalTableSettingsOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateItemCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateItemOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateKinesisStreamingDestinationCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTableCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateTableOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTableReplicaAutoScalingCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_UpdateTableReplicaAutoScalingOutput(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_UpdateTimeToLiveCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = _json(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServerError": + case "com.amazonaws.dynamodb#InternalServerError": + throw await de_InternalServerErrorRes(parsedOutput, context); + case "RequestLimitExceeded": + case "com.amazonaws.dynamodb#RequestLimitExceeded": + throw await de_RequestLimitExceededRes(parsedOutput, context); + case "InvalidEndpointException": + case "com.amazonaws.dynamodb#InvalidEndpointException": + throw await de_InvalidEndpointExceptionRes(parsedOutput, context); + case "ProvisionedThroughputExceededException": + case "com.amazonaws.dynamodb#ProvisionedThroughputExceededException": + throw await de_ProvisionedThroughputExceededExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.dynamodb#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "ItemCollectionSizeLimitExceededException": + case "com.amazonaws.dynamodb#ItemCollectionSizeLimitExceededException": + throw await de_ItemCollectionSizeLimitExceededExceptionRes(parsedOutput, context); + case "BackupInUseException": + case "com.amazonaws.dynamodb#BackupInUseException": + throw await de_BackupInUseExceptionRes(parsedOutput, context); + case "ContinuousBackupsUnavailableException": + case "com.amazonaws.dynamodb#ContinuousBackupsUnavailableException": + throw await de_ContinuousBackupsUnavailableExceptionRes(parsedOutput, context); + case "LimitExceededException": + case "com.amazonaws.dynamodb#LimitExceededException": + throw await de_LimitExceededExceptionRes(parsedOutput, context); + case "TableInUseException": + case "com.amazonaws.dynamodb#TableInUseException": + throw await de_TableInUseExceptionRes(parsedOutput, context); + case "TableNotFoundException": + case "com.amazonaws.dynamodb#TableNotFoundException": + throw await de_TableNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableAlreadyExistsException": + case "com.amazonaws.dynamodb#GlobalTableAlreadyExistsException": + throw await de_GlobalTableAlreadyExistsExceptionRes(parsedOutput, context); + case "ResourceInUseException": + case "com.amazonaws.dynamodb#ResourceInUseException": + throw await de_ResourceInUseExceptionRes(parsedOutput, context); + case "BackupNotFoundException": + case "com.amazonaws.dynamodb#BackupNotFoundException": + throw await de_BackupNotFoundExceptionRes(parsedOutput, context); + case "ConditionalCheckFailedException": + case "com.amazonaws.dynamodb#ConditionalCheckFailedException": + throw await de_ConditionalCheckFailedExceptionRes(parsedOutput, context); + case "ReplicatedWriteConflictException": + case "com.amazonaws.dynamodb#ReplicatedWriteConflictException": + throw await de_ReplicatedWriteConflictExceptionRes(parsedOutput, context); + case "TransactionConflictException": + case "com.amazonaws.dynamodb#TransactionConflictException": + throw await de_TransactionConflictExceptionRes(parsedOutput, context); + case "PolicyNotFoundException": + case "com.amazonaws.dynamodb#PolicyNotFoundException": + throw await de_PolicyNotFoundExceptionRes(parsedOutput, context); + case "ExportNotFoundException": + case "com.amazonaws.dynamodb#ExportNotFoundException": + throw await de_ExportNotFoundExceptionRes(parsedOutput, context); + case "GlobalTableNotFoundException": + case "com.amazonaws.dynamodb#GlobalTableNotFoundException": + throw await de_GlobalTableNotFoundExceptionRes(parsedOutput, context); + case "ImportNotFoundException": + case "com.amazonaws.dynamodb#ImportNotFoundException": + throw await de_ImportNotFoundExceptionRes(parsedOutput, context); + case "DuplicateItemException": + case "com.amazonaws.dynamodb#DuplicateItemException": + throw await de_DuplicateItemExceptionRes(parsedOutput, context); + case "IdempotentParameterMismatchException": + case "com.amazonaws.dynamodb#IdempotentParameterMismatchException": + throw await de_IdempotentParameterMismatchExceptionRes(parsedOutput, context); + case "TransactionCanceledException": + case "com.amazonaws.dynamodb#TransactionCanceledException": + throw await de_TransactionCanceledExceptionRes(parsedOutput, context); + case "TransactionInProgressException": + case "com.amazonaws.dynamodb#TransactionInProgressException": + throw await de_TransactionInProgressExceptionRes(parsedOutput, context); + case "ExportConflictException": + case "com.amazonaws.dynamodb#ExportConflictException": + throw await de_ExportConflictExceptionRes(parsedOutput, context); + case "InvalidExportTimeException": + case "com.amazonaws.dynamodb#InvalidExportTimeException": + throw await de_InvalidExportTimeExceptionRes(parsedOutput, context); + case "PointInTimeRecoveryUnavailableException": + case "com.amazonaws.dynamodb#PointInTimeRecoveryUnavailableException": + throw await de_PointInTimeRecoveryUnavailableExceptionRes(parsedOutput, context); + case "ImportConflictException": + case "com.amazonaws.dynamodb#ImportConflictException": + throw await de_ImportConflictExceptionRes(parsedOutput, context); + case "TableAlreadyExistsException": + case "com.amazonaws.dynamodb#TableAlreadyExistsException": + throw await de_TableAlreadyExistsExceptionRes(parsedOutput, context); + case "InvalidRestoreTimeException": + case "com.amazonaws.dynamodb#InvalidRestoreTimeException": + throw await de_InvalidRestoreTimeExceptionRes(parsedOutput, context); + case "ReplicaAlreadyExistsException": + case "com.amazonaws.dynamodb#ReplicaAlreadyExistsException": + throw await de_ReplicaAlreadyExistsExceptionRes(parsedOutput, context); + case "ReplicaNotFoundException": + case "com.amazonaws.dynamodb#ReplicaNotFoundException": + throw await de_ReplicaNotFoundExceptionRes(parsedOutput, context); + case "IndexNotFoundException": + case "com.amazonaws.dynamodb#IndexNotFoundException": + throw await de_IndexNotFoundExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const de_BackupInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new BackupInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_BackupNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new BackupNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ConditionalCheckFailedExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ConditionalCheckFailedException(body, context); + const exception = new ConditionalCheckFailedException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ContinuousBackupsUnavailableExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ContinuousBackupsUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_DuplicateItemExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new DuplicateItemException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ExportConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ExportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ExportNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ExportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_GlobalTableAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new GlobalTableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_GlobalTableNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new GlobalTableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IdempotentParameterMismatchExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new IdempotentParameterMismatchException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ImportConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ImportConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ImportNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ImportNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IndexNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new IndexNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InternalServerErrorRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InternalServerError({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidEndpointExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidEndpointException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidExportTimeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidExportTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidRestoreTimeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new InvalidRestoreTimeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ItemCollectionSizeLimitExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ItemCollectionSizeLimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_LimitExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new LimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PointInTimeRecoveryUnavailableExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new PointInTimeRecoveryUnavailableException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PolicyNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new PolicyNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ProvisionedThroughputExceededExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ProvisionedThroughputExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicaAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicaAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicaNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicaNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ReplicatedWriteConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ReplicatedWriteConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_RequestLimitExceededRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new RequestLimitExceeded({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ResourceInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ResourceInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableAlreadyExistsExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableAlreadyExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableInUseExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableInUseException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TableNotFoundExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TableNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionCanceledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_TransactionCanceledException(body, context); + const exception = new TransactionCanceledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionConflictExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TransactionConflictException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_TransactionInProgressExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = _json(body); + const exception = new TransactionInProgressException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const se_AttributeUpdates = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValueUpdate(value, context); + return acc; + }, {}); +}; +const se_AttributeValue = (input, context) => { + return AttributeValue.visit(input, { + B: (value) => ({ B: context.base64Encoder(value) }), + BOOL: (value) => ({ BOOL: value }), + BS: (value) => ({ BS: se_BinarySetAttributeValue(value, context) }), + L: (value) => ({ L: se_ListAttributeValue(value, context) }), + M: (value) => ({ M: se_MapAttributeValue(value, context) }), + N: (value) => ({ N: value }), + NS: (value) => ({ NS: _json(value) }), + NULL: (value) => ({ NULL: value }), + S: (value) => ({ S: value }), + SS: (value) => ({ SS: _json(value) }), + _: (name, value) => ({ [name]: value }), + }); +}; +const se_AttributeValueList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_AttributeValueUpdate = (input, context) => { + return take(input, { + Action: [], + Value: (_) => se_AttributeValue(_, context), + }); +}; +const se_AutoScalingPolicyUpdate = (input, context) => { + return take(input, { + PolicyName: [], + TargetTrackingScalingPolicyConfiguration: (_) => se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate(_, context), + }); +}; +const se_AutoScalingSettingsUpdate = (input, context) => { + return take(input, { + AutoScalingDisabled: [], + AutoScalingRoleArn: [], + MaximumUnits: [], + MinimumUnits: [], + ScalingPolicyUpdate: (_) => se_AutoScalingPolicyUpdate(_, context), + }); +}; +const se_AutoScalingTargetTrackingScalingPolicyConfigurationUpdate = (input, context) => { + return take(input, { + DisableScaleIn: [], + ScaleInCooldown: [], + ScaleOutCooldown: [], + TargetValue: __serializeFloat, + }); +}; +const se_BatchExecuteStatementInput = (input, context) => { + return take(input, { + ReturnConsumedCapacity: [], + Statements: (_) => se_PartiQLBatchRequest(_, context), + }); +}; +const se_BatchGetItemInput = (input, context) => { + return take(input, { + RequestItems: (_) => se_BatchGetRequestMap(_, context), + ReturnConsumedCapacity: [], + }); +}; +const se_BatchGetRequestMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_KeysAndAttributes(value, context); + return acc; + }, {}); +}; +const se_BatchStatementRequest = (input, context) => { + return take(input, { + ConsistentRead: [], + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_BatchWriteItemInput = (input, context) => { + return take(input, { + RequestItems: (_) => se_BatchWriteItemRequestMap(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + }); +}; +const se_BatchWriteItemRequestMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_WriteRequests(value, context); + return acc; + }, {}); +}; +const se_BinarySetAttributeValue = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return context.base64Encoder(entry); + }); +}; +const se_Condition = (input, context) => { + return take(input, { + AttributeValueList: (_) => se_AttributeValueList(_, context), + ComparisonOperator: [], + }); +}; +const se_ConditionCheck = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_Delete = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_DeleteItemInput = (input, context) => { + return take(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_DeleteRequest = (input, context) => { + return take(input, { + Key: (_) => se_Key(_, context), + }); +}; +const se_ExecuteStatementInput = (input, context) => { + return take(input, { + ConsistentRead: [], + Limit: [], + NextToken: [], + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnConsumedCapacity: [], + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_ExecuteTransactionInput = (input, context) => { + return take(input, { + ClientRequestToken: [true, (_) => _ ?? generateIdempotencyToken()], + ReturnConsumedCapacity: [], + TransactStatements: (_) => se_ParameterizedStatements(_, context), + }); +}; +const se_ExpectedAttributeMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_ExpectedAttributeValue(value, context); + return acc; + }, {}); +}; +const se_ExpectedAttributeValue = (input, context) => { + return take(input, { + AttributeValueList: (_) => se_AttributeValueList(_, context), + ComparisonOperator: [], + Exists: [], + Value: (_) => se_AttributeValue(_, context), + }); +}; +const se_ExportTableToPointInTimeInput = (input, context) => { + return take(input, { + ClientToken: [true, (_) => _ ?? generateIdempotencyToken()], + ExportFormat: [], + ExportTime: (_) => _.getTime() / 1000, + ExportType: [], + IncrementalExportSpecification: (_) => se_IncrementalExportSpecification(_, context), + S3Bucket: [], + S3BucketOwner: [], + S3Prefix: [], + S3SseAlgorithm: [], + S3SseKmsKeyId: [], + TableArn: [], + }); +}; +const se_ExpressionAttributeValueMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_FilterConditionMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}; +const se_Get = (input, context) => { + return take(input, { + ExpressionAttributeNames: _json, + Key: (_) => se_Key(_, context), + ProjectionExpression: [], + TableName: [], + }); +}; +const se_GetItemInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConsistentRead: [], + ExpressionAttributeNames: _json, + Key: (_) => se_Key(_, context), + ProjectionExpression: [], + ReturnConsumedCapacity: [], + TableName: [], + }); +}; +const se_GlobalSecondaryIndexAutoScalingUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_GlobalSecondaryIndexAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_GlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}; +const se_GlobalTableGlobalSecondaryIndexSettingsUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedWriteCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ProvisionedWriteCapacityUnits: [], + }); +}; +const se_GlobalTableGlobalSecondaryIndexSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_GlobalTableGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}; +const se_ImportTableInput = (input, context) => { + return take(input, { + ClientToken: [true, (_) => _ ?? generateIdempotencyToken()], + InputCompressionType: [], + InputFormat: [], + InputFormatOptions: _json, + S3BucketSource: _json, + TableCreationParameters: _json, + }); +}; +const se_IncrementalExportSpecification = (input, context) => { + return take(input, { + ExportFromTime: (_) => _.getTime() / 1000, + ExportToTime: (_) => _.getTime() / 1000, + ExportViewType: [], + }); +}; +const se_Key = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_KeyConditions = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_Condition(value, context); + return acc; + }, {}); +}; +const se_KeyList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_Key(entry, context); + }); +}; +const se_KeysAndAttributes = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConsistentRead: [], + ExpressionAttributeNames: _json, + Keys: (_) => se_KeyList(_, context), + ProjectionExpression: [], + }); +}; +const se_ListAttributeValue = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_ListBackupsInput = (input, context) => { + return take(input, { + BackupType: [], + ExclusiveStartBackupArn: [], + Limit: [], + TableName: [], + TimeRangeLowerBound: (_) => _.getTime() / 1000, + TimeRangeUpperBound: (_) => _.getTime() / 1000, + }); +}; +const se_MapAttributeValue = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_ParameterizedStatement = (input, context) => { + return take(input, { + Parameters: (_) => se_PreparedStatementParameters(_, context), + ReturnValuesOnConditionCheckFailure: [], + Statement: [], + }); +}; +const se_ParameterizedStatements = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ParameterizedStatement(entry, context); + }); +}; +const se_PartiQLBatchRequest = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_BatchStatementRequest(entry, context); + }); +}; +const se_PreparedStatementParameters = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_AttributeValue(entry, context); + }); +}; +const se_Put = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Item: (_) => se_PutItemInputAttributeMap(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_PutItemInput = (input, context) => { + return take(input, { + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Item: (_) => se_PutItemInputAttributeMap(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + }); +}; +const se_PutItemInputAttributeMap = (input, context) => { + return Object.entries(input).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = se_AttributeValue(value, context); + return acc; + }, {}); +}; +const se_PutRequest = (input, context) => { + return take(input, { + Item: (_) => se_PutItemInputAttributeMap(_, context), + }); +}; +const se_QueryInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: (_) => se_Key(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + FilterExpression: [], + IndexName: [], + KeyConditionExpression: [], + KeyConditions: (_) => se_KeyConditions(_, context), + Limit: [], + ProjectionExpression: [], + QueryFilter: (_) => se_FilterConditionMap(_, context), + ReturnConsumedCapacity: [], + ScanIndexForward: [], + Select: [], + TableName: [], + }); +}; +const se_ReplicaAutoScalingUpdate = (input, context) => { + return take(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexUpdates: (_) => se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList(_, context), + ReplicaProvisionedReadCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_ReplicaAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaAutoScalingUpdate(entry, context); + }); +}; +const se_ReplicaGlobalSecondaryIndexAutoScalingUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + }); +}; +const se_ReplicaGlobalSecondaryIndexAutoScalingUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaGlobalSecondaryIndexAutoScalingUpdate(entry, context); + }); +}; +const se_ReplicaGlobalSecondaryIndexSettingsUpdate = (input, context) => { + return take(input, { + IndexName: [], + ProvisionedReadCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ProvisionedReadCapacityUnits: [], + }); +}; +const se_ReplicaGlobalSecondaryIndexSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaGlobalSecondaryIndexSettingsUpdate(entry, context); + }); +}; +const se_ReplicaSettingsUpdate = (input, context) => { + return take(input, { + RegionName: [], + ReplicaGlobalSecondaryIndexSettingsUpdate: (_) => se_ReplicaGlobalSecondaryIndexSettingsUpdateList(_, context), + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ReplicaProvisionedReadCapacityUnits: [], + ReplicaTableClass: [], + }); +}; +const se_ReplicaSettingsUpdateList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_ReplicaSettingsUpdate(entry, context); + }); +}; +const se_RestoreTableToPointInTimeInput = (input, context) => { + return take(input, { + BillingModeOverride: [], + GlobalSecondaryIndexOverride: _json, + LocalSecondaryIndexOverride: _json, + OnDemandThroughputOverride: _json, + ProvisionedThroughputOverride: _json, + RestoreDateTime: (_) => _.getTime() / 1000, + SSESpecificationOverride: _json, + SourceTableArn: [], + SourceTableName: [], + TargetTableName: [], + UseLatestRestorableTime: [], + }); +}; +const se_ScanInput = (input, context) => { + return take(input, { + AttributesToGet: _json, + ConditionalOperator: [], + ConsistentRead: [], + ExclusiveStartKey: (_) => se_Key(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + FilterExpression: [], + IndexName: [], + Limit: [], + ProjectionExpression: [], + ReturnConsumedCapacity: [], + ScanFilter: (_) => se_FilterConditionMap(_, context), + Segment: [], + Select: [], + TableName: [], + TotalSegments: [], + }); +}; +const se_TransactGetItem = (input, context) => { + return take(input, { + Get: (_) => se_Get(_, context), + }); +}; +const se_TransactGetItemList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_TransactGetItem(entry, context); + }); +}; +const se_TransactGetItemsInput = (input, context) => { + return take(input, { + ReturnConsumedCapacity: [], + TransactItems: (_) => se_TransactGetItemList(_, context), + }); +}; +const se_TransactWriteItem = (input, context) => { + return take(input, { + ConditionCheck: (_) => se_ConditionCheck(_, context), + Delete: (_) => se_Delete(_, context), + Put: (_) => se_Put(_, context), + Update: (_) => se_Update(_, context), + }); +}; +const se_TransactWriteItemList = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_TransactWriteItem(entry, context); + }); +}; +const se_TransactWriteItemsInput = (input, context) => { + return take(input, { + ClientRequestToken: [true, (_) => _ ?? generateIdempotencyToken()], + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + TransactItems: (_) => se_TransactWriteItemList(_, context), + }); +}; +const se_Update = (input, context) => { + return take(input, { + ConditionExpression: [], + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [], + }); +}; +const se_UpdateGlobalTableSettingsInput = (input, context) => { + return take(input, { + GlobalTableBillingMode: [], + GlobalTableGlobalSecondaryIndexSettingsUpdate: (_) => se_GlobalTableGlobalSecondaryIndexSettingsUpdateList(_, context), + GlobalTableName: [], + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + GlobalTableProvisionedWriteCapacityUnits: [], + ReplicaSettingsUpdate: (_) => se_ReplicaSettingsUpdateList(_, context), + }); +}; +const se_UpdateItemInput = (input, context) => { + return take(input, { + AttributeUpdates: (_) => se_AttributeUpdates(_, context), + ConditionExpression: [], + ConditionalOperator: [], + Expected: (_) => se_ExpectedAttributeMap(_, context), + ExpressionAttributeNames: _json, + ExpressionAttributeValues: (_) => se_ExpressionAttributeValueMap(_, context), + Key: (_) => se_Key(_, context), + ReturnConsumedCapacity: [], + ReturnItemCollectionMetrics: [], + ReturnValues: [], + ReturnValuesOnConditionCheckFailure: [], + TableName: [], + UpdateExpression: [], + }); +}; +const se_UpdateTableReplicaAutoScalingInput = (input, context) => { + return take(input, { + GlobalSecondaryIndexUpdates: (_) => se_GlobalSecondaryIndexAutoScalingUpdateList(_, context), + ProvisionedWriteCapacityAutoScalingUpdate: (_) => se_AutoScalingSettingsUpdate(_, context), + ReplicaUpdates: (_) => se_ReplicaAutoScalingUpdateList(_, context), + TableName: [], + }); +}; +const se_WriteRequest = (input, context) => { + return take(input, { + DeleteRequest: (_) => se_DeleteRequest(_, context), + PutRequest: (_) => se_PutRequest(_, context), + }); +}; +const se_WriteRequests = (input, context) => { + return input + .filter((e) => e != null) + .map((entry) => { + return se_WriteRequest(entry, context); + }); +}; +const de_ArchivalSummary = (output, context) => { + return take(output, { + ArchivalBackupArn: __expectString, + ArchivalDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ArchivalReason: __expectString, + }); +}; +const de_AttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_AttributeValue = (output, context) => { + if (output.B != null) { + return { + B: context.base64Decoder(output.B), + }; + } + if (__expectBoolean(output.BOOL) !== undefined) { + return { BOOL: __expectBoolean(output.BOOL) }; + } + if (output.BS != null) { + return { + BS: de_BinarySetAttributeValue(output.BS, context), + }; + } + if (output.L != null) { + return { + L: de_ListAttributeValue(output.L, context), + }; + } + if (output.M != null) { + return { + M: de_MapAttributeValue(output.M, context), + }; + } + if (__expectString(output.N) !== undefined) { + return { N: __expectString(output.N) }; + } + if (output.NS != null) { + return { + NS: _json(output.NS), + }; + } + if (__expectBoolean(output.NULL) !== undefined) { + return { NULL: __expectBoolean(output.NULL) }; + } + if (__expectString(output.S) !== undefined) { + return { S: __expectString(output.S) }; + } + if (output.SS != null) { + return { + SS: _json(output.SS), + }; + } + return { $unknown: Object.entries(output)[0] }; +}; +const de_AutoScalingPolicyDescription = (output, context) => { + return take(output, { + PolicyName: __expectString, + TargetTrackingScalingPolicyConfiguration: (_) => de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription(_, context), + }); +}; +const de_AutoScalingPolicyDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AutoScalingPolicyDescription(entry, context); + }); + return retVal; +}; +const de_AutoScalingSettingsDescription = (output, context) => { + return take(output, { + AutoScalingDisabled: __expectBoolean, + AutoScalingRoleArn: __expectString, + MaximumUnits: __expectLong, + MinimumUnits: __expectLong, + ScalingPolicies: (_) => de_AutoScalingPolicyDescriptionList(_, context), + }); +}; +const de_AutoScalingTargetTrackingScalingPolicyConfigurationDescription = (output, context) => { + return take(output, { + DisableScaleIn: __expectBoolean, + ScaleInCooldown: __expectInt32, + ScaleOutCooldown: __expectInt32, + TargetValue: __limitedParseDouble, + }); +}; +const de_BackupDescription = (output, context) => { + return take(output, { + BackupDetails: (_) => de_BackupDetails(_, context), + SourceTableDetails: (_) => de_SourceTableDetails(_, context), + SourceTableFeatureDetails: (_) => de_SourceTableFeatureDetails(_, context), + }); +}; +const de_BackupDetails = (output, context) => { + return take(output, { + BackupArn: __expectString, + BackupCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupExpiryDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupName: __expectString, + BackupSizeBytes: __expectLong, + BackupStatus: __expectString, + BackupType: __expectString, + }); +}; +const de_BackupSummaries = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_BackupSummary(entry, context); + }); + return retVal; +}; +const de_BackupSummary = (output, context) => { + return take(output, { + BackupArn: __expectString, + BackupCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupExpiryDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + BackupName: __expectString, + BackupSizeBytes: __expectLong, + BackupStatus: __expectString, + BackupType: __expectString, + TableArn: __expectString, + TableId: __expectString, + TableName: __expectString, + }); +}; +const de_BatchExecuteStatementOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_PartiQLBatchResponse(_, context), + }); +}; +const de_BatchGetItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_BatchGetResponseMap(_, context), + UnprocessedKeys: (_) => de_BatchGetRequestMap(_, context), + }); +}; +const de_BatchGetRequestMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_KeysAndAttributes(value, context); + return acc; + }, {}); +}; +const de_BatchGetResponseMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemList(value, context); + return acc; + }, {}); +}; +const de_BatchStatementError = (output, context) => { + return take(output, { + Code: __expectString, + Item: (_) => de_AttributeMap(_, context), + Message: __expectString, + }); +}; +const de_BatchStatementResponse = (output, context) => { + return take(output, { + Error: (_) => de_BatchStatementError(_, context), + Item: (_) => de_AttributeMap(_, context), + TableName: __expectString, + }); +}; +const de_BatchWriteItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetricsPerTable(_, context), + UnprocessedItems: (_) => de_BatchWriteItemRequestMap(_, context), + }); +}; +const de_BatchWriteItemRequestMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_WriteRequests(value, context); + return acc; + }, {}); +}; +const de_BillingModeSummary = (output, context) => { + return take(output, { + BillingMode: __expectString, + LastUpdateToPayPerRequestDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + }); +}; +const de_BinarySetAttributeValue = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return context.base64Decoder(entry); + }); + return retVal; +}; +const de_CancellationReason = (output, context) => { + return take(output, { + Code: __expectString, + Item: (_) => de_AttributeMap(_, context), + Message: __expectString, + }); +}; +const de_CancellationReasonList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_CancellationReason(entry, context); + }); + return retVal; +}; +const de_Capacity = (output, context) => { + return take(output, { + CapacityUnits: __limitedParseDouble, + ReadCapacityUnits: __limitedParseDouble, + WriteCapacityUnits: __limitedParseDouble, + }); +}; +const de_ConditionalCheckFailedException = (output, context) => { + return take(output, { + Item: (_) => de_AttributeMap(_, context), + message: __expectString, + }); +}; +const de_ConsumedCapacity = (output, context) => { + return take(output, { + CapacityUnits: __limitedParseDouble, + GlobalSecondaryIndexes: (_) => de_SecondaryIndexesCapacityMap(_, context), + LocalSecondaryIndexes: (_) => de_SecondaryIndexesCapacityMap(_, context), + ReadCapacityUnits: __limitedParseDouble, + Table: (_) => de_Capacity(_, context), + TableName: __expectString, + WriteCapacityUnits: __limitedParseDouble, + }); +}; +const de_ConsumedCapacityMultiple = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ConsumedCapacity(entry, context); + }); + return retVal; +}; +const de_ContinuousBackupsDescription = (output, context) => { + return take(output, { + ContinuousBackupsStatus: __expectString, + PointInTimeRecoveryDescription: (_) => de_PointInTimeRecoveryDescription(_, context), + }); +}; +const de_CreateBackupOutput = (output, context) => { + return take(output, { + BackupDetails: (_) => de_BackupDetails(_, context), + }); +}; +const de_CreateGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_CreateTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_DeleteBackupOutput = (output, context) => { + return take(output, { + BackupDescription: (_) => de_BackupDescription(_, context), + }); +}; +const de_DeleteItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_DeleteRequest = (output, context) => { + return take(output, { + Key: (_) => de_Key(_, context), + }); +}; +const de_DeleteTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_DescribeBackupOutput = (output, context) => { + return take(output, { + BackupDescription: (_) => de_BackupDescription(_, context), + }); +}; +const de_DescribeContinuousBackupsOutput = (output, context) => { + return take(output, { + ContinuousBackupsDescription: (_) => de_ContinuousBackupsDescription(_, context), + }); +}; +const de_DescribeContributorInsightsOutput = (output, context) => { + return take(output, { + ContributorInsightsRuleList: _json, + ContributorInsightsStatus: __expectString, + FailureException: _json, + IndexName: __expectString, + LastUpdateDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableName: __expectString, + }); +}; +const de_DescribeExportOutput = (output, context) => { + return take(output, { + ExportDescription: (_) => de_ExportDescription(_, context), + }); +}; +const de_DescribeGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_DescribeGlobalTableSettingsOutput = (output, context) => { + return take(output, { + GlobalTableName: __expectString, + ReplicaSettings: (_) => de_ReplicaSettingsDescriptionList(_, context), + }); +}; +const de_DescribeImportOutput = (output, context) => { + return take(output, { + ImportTableDescription: (_) => de_ImportTableDescription(_, context), + }); +}; +const de_DescribeTableOutput = (output, context) => { + return take(output, { + Table: (_) => de_TableDescription(_, context), + }); +}; +const de_DescribeTableReplicaAutoScalingOutput = (output, context) => { + return take(output, { + TableAutoScalingDescription: (_) => de_TableAutoScalingDescription(_, context), + }); +}; +const de_ExecuteStatementOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + NextToken: __expectString, + }); +}; +const de_ExecuteTransactionOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_ItemResponseList(_, context), + }); +}; +const de_ExportDescription = (output, context) => { + return take(output, { + BilledSizeBytes: __expectLong, + ClientToken: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportArn: __expectString, + ExportFormat: __expectString, + ExportManifest: __expectString, + ExportStatus: __expectString, + ExportTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportType: __expectString, + FailureCode: __expectString, + FailureMessage: __expectString, + IncrementalExportSpecification: (_) => de_IncrementalExportSpecification(_, context), + ItemCount: __expectLong, + S3Bucket: __expectString, + S3BucketOwner: __expectString, + S3Prefix: __expectString, + S3SseAlgorithm: __expectString, + S3SseKmsKeyId: __expectString, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + TableId: __expectString, + }); +}; +const de_ExportTableToPointInTimeOutput = (output, context) => { + return take(output, { + ExportDescription: (_) => de_ExportDescription(_, context), + }); +}; +const de_GetItemOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Item: (_) => de_AttributeMap(_, context), + }); +}; +const de_GlobalSecondaryIndexDescription = (output, context) => { + return take(output, { + Backfilling: __expectBoolean, + IndexArn: __expectString, + IndexName: __expectString, + IndexSizeBytes: __expectLong, + IndexStatus: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + OnDemandThroughput: _json, + Projection: _json, + ProvisionedThroughput: (_) => de_ProvisionedThroughputDescription(_, context), + WarmThroughput: _json, + }); +}; +const de_GlobalSecondaryIndexDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_GlobalSecondaryIndexDescription(entry, context); + }); + return retVal; +}; +const de_GlobalTableDescription = (output, context) => { + return take(output, { + CreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + GlobalTableArn: __expectString, + GlobalTableName: __expectString, + GlobalTableStatus: __expectString, + ReplicationGroup: (_) => de_ReplicaDescriptionList(_, context), + }); +}; +const de_ImportSummary = (output, context) => { + return take(output, { + CloudWatchLogGroupArn: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ImportArn: __expectString, + ImportStatus: __expectString, + InputFormat: __expectString, + S3BucketSource: _json, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + }); +}; +const de_ImportSummaryList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ImportSummary(entry, context); + }); + return retVal; +}; +const de_ImportTableDescription = (output, context) => { + return take(output, { + ClientToken: __expectString, + CloudWatchLogGroupArn: __expectString, + EndTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ErrorCount: __expectLong, + FailureCode: __expectString, + FailureMessage: __expectString, + ImportArn: __expectString, + ImportStatus: __expectString, + ImportedItemCount: __expectLong, + InputCompressionType: __expectString, + InputFormat: __expectString, + InputFormatOptions: _json, + ProcessedItemCount: __expectLong, + ProcessedSizeBytes: __expectLong, + S3BucketSource: _json, + StartTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableArn: __expectString, + TableCreationParameters: _json, + TableId: __expectString, + }); +}; +const de_ImportTableOutput = (output, context) => { + return take(output, { + ImportTableDescription: (_) => de_ImportTableDescription(_, context), + }); +}; +const de_IncrementalExportSpecification = (output, context) => { + return take(output, { + ExportFromTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportToTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ExportViewType: __expectString, + }); +}; +const de_ItemCollectionKeyAttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_ItemCollectionMetrics = (output, context) => { + return take(output, { + ItemCollectionKey: (_) => de_ItemCollectionKeyAttributeMap(_, context), + SizeEstimateRangeGB: (_) => de_ItemCollectionSizeEstimateRange(_, context), + }); +}; +const de_ItemCollectionMetricsMultiple = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ItemCollectionMetrics(entry, context); + }); + return retVal; +}; +const de_ItemCollectionMetricsPerTable = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_ItemCollectionMetricsMultiple(value, context); + return acc; + }, {}); +}; +const de_ItemCollectionSizeEstimateRange = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return __limitedParseDouble(entry); + }); + return retVal; +}; +const de_ItemList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AttributeMap(entry, context); + }); + return retVal; +}; +const de_ItemResponse = (output, context) => { + return take(output, { + Item: (_) => de_AttributeMap(_, context), + }); +}; +const de_ItemResponseList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ItemResponse(entry, context); + }); + return retVal; +}; +const de_Key = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_KeyList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_Key(entry, context); + }); + return retVal; +}; +const de_KeysAndAttributes = (output, context) => { + return take(output, { + AttributesToGet: _json, + ConsistentRead: __expectBoolean, + ExpressionAttributeNames: _json, + Keys: (_) => de_KeyList(_, context), + ProjectionExpression: __expectString, + }); +}; +const de_ListAttributeValue = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_AttributeValue(__expectUnion(entry), context); + }); + return retVal; +}; +const de_ListBackupsOutput = (output, context) => { + return take(output, { + BackupSummaries: (_) => de_BackupSummaries(_, context), + LastEvaluatedBackupArn: __expectString, + }); +}; +const de_ListImportsOutput = (output, context) => { + return take(output, { + ImportSummaryList: (_) => de_ImportSummaryList(_, context), + NextToken: __expectString, + }); +}; +const de_MapAttributeValue = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_PartiQLBatchResponse = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_BatchStatementResponse(entry, context); + }); + return retVal; +}; +const de_PointInTimeRecoveryDescription = (output, context) => { + return take(output, { + EarliestRestorableDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + LatestRestorableDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + PointInTimeRecoveryStatus: __expectString, + RecoveryPeriodInDays: __expectInt32, + }); +}; +const de_ProvisionedThroughputDescription = (output, context) => { + return take(output, { + LastDecreaseDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + LastIncreaseDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + NumberOfDecreasesToday: __expectLong, + ReadCapacityUnits: __expectLong, + WriteCapacityUnits: __expectLong, + }); +}; +const de_PutItemInputAttributeMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_AttributeValue(__expectUnion(value), context); + return acc; + }, {}); +}; +const de_PutItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_PutRequest = (output, context) => { + return take(output, { + Item: (_) => de_PutItemInputAttributeMap(_, context), + }); +}; +const de_QueryOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Count: __expectInt32, + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + ScannedCount: __expectInt32, + }); +}; +const de_ReplicaAutoScalingDescription = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: (_) => de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList(_, context), + RegionName: __expectString, + ReplicaProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaStatus: __expectString, + }); +}; +const de_ReplicaAutoScalingDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaAutoScalingDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaDescription = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: _json, + KMSMasterKeyId: __expectString, + OnDemandThroughputOverride: _json, + ProvisionedThroughputOverride: _json, + RegionName: __expectString, + ReplicaInaccessibleDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + ReplicaStatus: __expectString, + ReplicaStatusDescription: __expectString, + ReplicaStatusPercentProgress: __expectString, + ReplicaTableClassSummary: (_) => de_TableClassSummary(_, context), + WarmThroughput: _json, + }); +}; +const de_ReplicaDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaGlobalSecondaryIndexAutoScalingDescription = (output, context) => { + return take(output, { + IndexName: __expectString, + IndexStatus: __expectString, + ProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + }); +}; +const de_ReplicaGlobalSecondaryIndexAutoScalingDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaGlobalSecondaryIndexAutoScalingDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaGlobalSecondaryIndexSettingsDescription = (output, context) => { + return take(output, { + IndexName: __expectString, + IndexStatus: __expectString, + ProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedReadCapacityUnits: __expectLong, + ProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ProvisionedWriteCapacityUnits: __expectLong, + }); +}; +const de_ReplicaGlobalSecondaryIndexSettingsDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaGlobalSecondaryIndexSettingsDescription(entry, context); + }); + return retVal; +}; +const de_ReplicaSettingsDescription = (output, context) => { + return take(output, { + RegionName: __expectString, + ReplicaBillingModeSummary: (_) => de_BillingModeSummary(_, context), + ReplicaGlobalSecondaryIndexSettings: (_) => de_ReplicaGlobalSecondaryIndexSettingsDescriptionList(_, context), + ReplicaProvisionedReadCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedReadCapacityUnits: __expectLong, + ReplicaProvisionedWriteCapacityAutoScalingSettings: (_) => de_AutoScalingSettingsDescription(_, context), + ReplicaProvisionedWriteCapacityUnits: __expectLong, + ReplicaStatus: __expectString, + ReplicaTableClassSummary: (_) => de_TableClassSummary(_, context), + }); +}; +const de_ReplicaSettingsDescriptionList = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_ReplicaSettingsDescription(entry, context); + }); + return retVal; +}; +const de_RestoreSummary = (output, context) => { + return take(output, { + RestoreDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + RestoreInProgress: __expectBoolean, + SourceBackupArn: __expectString, + SourceTableArn: __expectString, + }); +}; +const de_RestoreTableFromBackupOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_RestoreTableToPointInTimeOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_ScanOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + Count: __expectInt32, + Items: (_) => de_ItemList(_, context), + LastEvaluatedKey: (_) => de_Key(_, context), + ScannedCount: __expectInt32, + }); +}; +const de_SecondaryIndexesCapacityMap = (output, context) => { + return Object.entries(output).reduce((acc, [key, value]) => { + if (value === null) { + return acc; + } + acc[key] = de_Capacity(value, context); + return acc; + }, {}); +}; +const de_SourceTableDetails = (output, context) => { + return take(output, { + BillingMode: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + OnDemandThroughput: _json, + ProvisionedThroughput: _json, + TableArn: __expectString, + TableCreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableId: __expectString, + TableName: __expectString, + TableSizeBytes: __expectLong, + }); +}; +const de_SourceTableFeatureDetails = (output, context) => { + return take(output, { + GlobalSecondaryIndexes: _json, + LocalSecondaryIndexes: _json, + SSEDescription: (_) => de_SSEDescription(_, context), + StreamDescription: _json, + TimeToLiveDescription: _json, + }); +}; +const de_SSEDescription = (output, context) => { + return take(output, { + InaccessibleEncryptionDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + KMSMasterKeyArn: __expectString, + SSEType: __expectString, + Status: __expectString, + }); +}; +const de_TableAutoScalingDescription = (output, context) => { + return take(output, { + Replicas: (_) => de_ReplicaAutoScalingDescriptionList(_, context), + TableName: __expectString, + TableStatus: __expectString, + }); +}; +const de_TableClassSummary = (output, context) => { + return take(output, { + LastUpdateDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + TableClass: __expectString, + }); +}; +const de_TableDescription = (output, context) => { + return take(output, { + ArchivalSummary: (_) => de_ArchivalSummary(_, context), + AttributeDefinitions: _json, + BillingModeSummary: (_) => de_BillingModeSummary(_, context), + CreationDateTime: (_) => __expectNonNull(__parseEpochTimestamp(__expectNumber(_))), + DeletionProtectionEnabled: __expectBoolean, + GlobalSecondaryIndexes: (_) => de_GlobalSecondaryIndexDescriptionList(_, context), + GlobalTableVersion: __expectString, + ItemCount: __expectLong, + KeySchema: _json, + LatestStreamArn: __expectString, + LatestStreamLabel: __expectString, + LocalSecondaryIndexes: _json, + MultiRegionConsistency: __expectString, + OnDemandThroughput: _json, + ProvisionedThroughput: (_) => de_ProvisionedThroughputDescription(_, context), + Replicas: (_) => de_ReplicaDescriptionList(_, context), + RestoreSummary: (_) => de_RestoreSummary(_, context), + SSEDescription: (_) => de_SSEDescription(_, context), + StreamSpecification: _json, + TableArn: __expectString, + TableClassSummary: (_) => de_TableClassSummary(_, context), + TableId: __expectString, + TableName: __expectString, + TableSizeBytes: __expectLong, + TableStatus: __expectString, + WarmThroughput: _json, + }); +}; +const de_TransactGetItemsOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + Responses: (_) => de_ItemResponseList(_, context), + }); +}; +const de_TransactionCanceledException = (output, context) => { + return take(output, { + CancellationReasons: (_) => de_CancellationReasonList(_, context), + Message: __expectString, + }); +}; +const de_TransactWriteItemsOutput = (output, context) => { + return take(output, { + ConsumedCapacity: (_) => de_ConsumedCapacityMultiple(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetricsPerTable(_, context), + }); +}; +const de_UpdateContinuousBackupsOutput = (output, context) => { + return take(output, { + ContinuousBackupsDescription: (_) => de_ContinuousBackupsDescription(_, context), + }); +}; +const de_UpdateGlobalTableOutput = (output, context) => { + return take(output, { + GlobalTableDescription: (_) => de_GlobalTableDescription(_, context), + }); +}; +const de_UpdateGlobalTableSettingsOutput = (output, context) => { + return take(output, { + GlobalTableName: __expectString, + ReplicaSettings: (_) => de_ReplicaSettingsDescriptionList(_, context), + }); +}; +const de_UpdateItemOutput = (output, context) => { + return take(output, { + Attributes: (_) => de_AttributeMap(_, context), + ConsumedCapacity: (_) => de_ConsumedCapacity(_, context), + ItemCollectionMetrics: (_) => de_ItemCollectionMetrics(_, context), + }); +}; +const de_UpdateTableOutput = (output, context) => { + return take(output, { + TableDescription: (_) => de_TableDescription(_, context), + }); +}; +const de_UpdateTableReplicaAutoScalingOutput = (output, context) => { + return take(output, { + TableAutoScalingDescription: (_) => de_TableAutoScalingDescription(_, context), + }); +}; +const de_WriteRequest = (output, context) => { + return take(output, { + DeleteRequest: (_) => de_DeleteRequest(_, context), + PutRequest: (_) => de_PutRequest(_, context), + }); +}; +const de_WriteRequests = (output, context) => { + const retVal = (output || []) + .filter((e) => e != null) + .map((entry) => { + return de_WriteRequest(entry, context); + }); + return retVal; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = withBaseException(__BaseException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new __HttpRequest(contents); +}; +function sharedHeaders(operation) { + return { + "content-type": "application/x-amz-json-1.0", + "x-amz-target": `DynamoDB_20120810.${operation}`, + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js new file mode 100644 index 0000000..a29a02d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.browser.js @@ -0,0 +1,37 @@ +import packageInfo from "../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE } from "@aws-sdk/core/account-id-endpoint"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? (() => Promise.resolve(DEFAULT_ACCOUNT_ID_ENDPOINT_MODE)), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? (() => Promise.resolve(undefined)), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js new file mode 100644 index 0000000..69898d1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.js @@ -0,0 +1,52 @@ +import packageInfo from "../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS } from "@aws-sdk/core/account-id-endpoint"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS } from "@aws-sdk/middleware-endpoint-discovery"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + accountIdEndpointMode: config?.accountIdEndpointMode ?? loadNodeConfig(NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, profileConfig), + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credentialDefaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + endpointDiscoveryEnabledProvider: config?.endpointDiscoveryEnabledProvider ?? loadNodeConfig(NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, profileConfig), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js new file mode 100644 index 0000000..ee4ffa7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeConfig.shared.js @@ -0,0 +1,30 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultDynamoDBHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2012-08-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultDynamoDBHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "DynamoDB", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/index.js @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js new file mode 100644 index 0000000..c6faadd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableExists.js @@ -0,0 +1,34 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { DescribeTableCommand } from "../commands/DescribeTableCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + try { + const returnComparator = () => { + return result.Table.TableStatus; + }; + if (returnComparator() === "ACTIVE") { + return { state: WaiterState.SUCCESS, reason }; + } + } + catch (e) { } + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: WaiterState.RETRY, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForTableExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilTableExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js new file mode 100644 index 0000000..b691c03 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-es/waiters/waitForTableNotExists.js @@ -0,0 +1,25 @@ +import { checkExceptions, createWaiter, WaiterState } from "@smithy/util-waiter"; +import { DescribeTableCommand } from "../commands/DescribeTableCommand"; +const checkState = async (client, input) => { + let reason; + try { + const result = await client.send(new DescribeTableCommand(input)); + reason = result; + } + catch (exception) { + reason = exception; + if (exception.name && exception.name == "ResourceNotFoundException") { + return { state: WaiterState.SUCCESS, reason }; + } + } + return { state: WaiterState.RETRY, reason }; +}; +export const waitForTableNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + return createWaiter({ ...serviceDefaults, ...params }, input, checkState); +}; +export const waitUntilTableNotExists = async (params, input) => { + const serviceDefaults = { minDelay: 20, maxDelay: 120 }; + const result = await createWaiter({ ...serviceDefaults, ...params }, input, checkState); + return checkExceptions(result); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts new file mode 100644 index 0000000..7ad45f0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDB.d.ts @@ -0,0 +1,433 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "./commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "./commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "./commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +export interface DynamoDB { + /** + * @see {@link BatchExecuteStatementCommand} + */ + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + /** + * @see {@link BatchGetItemCommand} + */ + batchGetItem(args: BatchGetItemCommandInput, options?: __HttpHandlerOptions): Promise; + batchGetItem(args: BatchGetItemCommandInput, cb: (err: any, data?: BatchGetItemCommandOutput) => void): void; + batchGetItem(args: BatchGetItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchGetItemCommandOutput) => void): void; + /** + * @see {@link BatchWriteItemCommand} + */ + batchWriteItem(args: BatchWriteItemCommandInput, options?: __HttpHandlerOptions): Promise; + batchWriteItem(args: BatchWriteItemCommandInput, cb: (err: any, data?: BatchWriteItemCommandOutput) => void): void; + batchWriteItem(args: BatchWriteItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchWriteItemCommandOutput) => void): void; + /** + * @see {@link CreateBackupCommand} + */ + createBackup(args: CreateBackupCommandInput, options?: __HttpHandlerOptions): Promise; + createBackup(args: CreateBackupCommandInput, cb: (err: any, data?: CreateBackupCommandOutput) => void): void; + createBackup(args: CreateBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateBackupCommandOutput) => void): void; + /** + * @see {@link CreateGlobalTableCommand} + */ + createGlobalTable(args: CreateGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + createGlobalTable(args: CreateGlobalTableCommandInput, cb: (err: any, data?: CreateGlobalTableCommandOutput) => void): void; + createGlobalTable(args: CreateGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateGlobalTableCommandOutput) => void): void; + /** + * @see {@link CreateTableCommand} + */ + createTable(args: CreateTableCommandInput, options?: __HttpHandlerOptions): Promise; + createTable(args: CreateTableCommandInput, cb: (err: any, data?: CreateTableCommandOutput) => void): void; + createTable(args: CreateTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateTableCommandOutput) => void): void; + /** + * @see {@link DeleteBackupCommand} + */ + deleteBackup(args: DeleteBackupCommandInput, options?: __HttpHandlerOptions): Promise; + deleteBackup(args: DeleteBackupCommandInput, cb: (err: any, data?: DeleteBackupCommandOutput) => void): void; + deleteBackup(args: DeleteBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteBackupCommandOutput) => void): void; + /** + * @see {@link DeleteItemCommand} + */ + deleteItem(args: DeleteItemCommandInput, options?: __HttpHandlerOptions): Promise; + deleteItem(args: DeleteItemCommandInput, cb: (err: any, data?: DeleteItemCommandOutput) => void): void; + deleteItem(args: DeleteItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteItemCommandOutput) => void): void; + /** + * @see {@link DeleteResourcePolicyCommand} + */ + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void): void; + deleteResourcePolicy(args: DeleteResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void): void; + /** + * @see {@link DeleteTableCommand} + */ + deleteTable(args: DeleteTableCommandInput, options?: __HttpHandlerOptions): Promise; + deleteTable(args: DeleteTableCommandInput, cb: (err: any, data?: DeleteTableCommandOutput) => void): void; + deleteTable(args: DeleteTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteTableCommandOutput) => void): void; + /** + * @see {@link DescribeBackupCommand} + */ + describeBackup(args: DescribeBackupCommandInput, options?: __HttpHandlerOptions): Promise; + describeBackup(args: DescribeBackupCommandInput, cb: (err: any, data?: DescribeBackupCommandOutput) => void): void; + describeBackup(args: DescribeBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeBackupCommandOutput) => void): void; + /** + * @see {@link DescribeContinuousBackupsCommand} + */ + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void): void; + describeContinuousBackups(args: DescribeContinuousBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void): void; + /** + * @see {@link DescribeContributorInsightsCommand} + */ + describeContributorInsights(args: DescribeContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + describeContributorInsights(args: DescribeContributorInsightsCommandInput, cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void): void; + describeContributorInsights(args: DescribeContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void): void; + /** + * @see {@link DescribeEndpointsCommand} + */ + describeEndpoints(): Promise; + describeEndpoints(args: DescribeEndpointsCommandInput, options?: __HttpHandlerOptions): Promise; + describeEndpoints(args: DescribeEndpointsCommandInput, cb: (err: any, data?: DescribeEndpointsCommandOutput) => void): void; + describeEndpoints(args: DescribeEndpointsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeEndpointsCommandOutput) => void): void; + /** + * @see {@link DescribeExportCommand} + */ + describeExport(args: DescribeExportCommandInput, options?: __HttpHandlerOptions): Promise; + describeExport(args: DescribeExportCommandInput, cb: (err: any, data?: DescribeExportCommandOutput) => void): void; + describeExport(args: DescribeExportCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeExportCommandOutput) => void): void; + /** + * @see {@link DescribeGlobalTableCommand} + */ + describeGlobalTable(args: DescribeGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + describeGlobalTable(args: DescribeGlobalTableCommandInput, cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void): void; + describeGlobalTable(args: DescribeGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void): void; + /** + * @see {@link DescribeGlobalTableSettingsCommand} + */ + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, options?: __HttpHandlerOptions): Promise; + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void): void; + describeGlobalTableSettings(args: DescribeGlobalTableSettingsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void): void; + /** + * @see {@link DescribeImportCommand} + */ + describeImport(args: DescribeImportCommandInput, options?: __HttpHandlerOptions): Promise; + describeImport(args: DescribeImportCommandInput, cb: (err: any, data?: DescribeImportCommandOutput) => void): void; + describeImport(args: DescribeImportCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeImportCommandOutput) => void): void; + /** + * @see {@link DescribeKinesisStreamingDestinationCommand} + */ + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, cb: (err: any, data?: DescribeKinesisStreamingDestinationCommandOutput) => void): void; + describeKinesisStreamingDestination(args: DescribeKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link DescribeLimitsCommand} + */ + describeLimits(): Promise; + describeLimits(args: DescribeLimitsCommandInput, options?: __HttpHandlerOptions): Promise; + describeLimits(args: DescribeLimitsCommandInput, cb: (err: any, data?: DescribeLimitsCommandOutput) => void): void; + describeLimits(args: DescribeLimitsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeLimitsCommandOutput) => void): void; + /** + * @see {@link DescribeTableCommand} + */ + describeTable(args: DescribeTableCommandInput, options?: __HttpHandlerOptions): Promise; + describeTable(args: DescribeTableCommandInput, cb: (err: any, data?: DescribeTableCommandOutput) => void): void; + describeTable(args: DescribeTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTableCommandOutput) => void): void; + /** + * @see {@link DescribeTableReplicaAutoScalingCommand} + */ + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, options?: __HttpHandlerOptions): Promise; + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void): void; + describeTableReplicaAutoScaling(args: DescribeTableReplicaAutoScalingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void): void; + /** + * @see {@link DescribeTimeToLiveCommand} + */ + describeTimeToLive(args: DescribeTimeToLiveCommandInput, options?: __HttpHandlerOptions): Promise; + describeTimeToLive(args: DescribeTimeToLiveCommandInput, cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void): void; + describeTimeToLive(args: DescribeTimeToLiveCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void): void; + /** + * @see {@link DisableKinesisStreamingDestinationCommand} + */ + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, cb: (err: any, data?: DisableKinesisStreamingDestinationCommandOutput) => void): void; + disableKinesisStreamingDestination(args: DisableKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DisableKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link EnableKinesisStreamingDestinationCommand} + */ + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, cb: (err: any, data?: EnableKinesisStreamingDestinationCommandOutput) => void): void; + enableKinesisStreamingDestination(args: EnableKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: EnableKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link ExecuteStatementCommand} + */ + executeStatement(args: ExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + executeStatement(args: ExecuteStatementCommandInput, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + executeStatement(args: ExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + /** + * @see {@link ExecuteTransactionCommand} + */ + executeTransaction(args: ExecuteTransactionCommandInput, options?: __HttpHandlerOptions): Promise; + executeTransaction(args: ExecuteTransactionCommandInput, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + executeTransaction(args: ExecuteTransactionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + /** + * @see {@link ExportTableToPointInTimeCommand} + */ + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, options?: __HttpHandlerOptions): Promise; + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void): void; + exportTableToPointInTime(args: ExportTableToPointInTimeCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void): void; + /** + * @see {@link GetItemCommand} + */ + getItem(args: GetItemCommandInput, options?: __HttpHandlerOptions): Promise; + getItem(args: GetItemCommandInput, cb: (err: any, data?: GetItemCommandOutput) => void): void; + getItem(args: GetItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetItemCommandOutput) => void): void; + /** + * @see {@link GetResourcePolicyCommand} + */ + getResourcePolicy(args: GetResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + getResourcePolicy(args: GetResourcePolicyCommandInput, cb: (err: any, data?: GetResourcePolicyCommandOutput) => void): void; + getResourcePolicy(args: GetResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetResourcePolicyCommandOutput) => void): void; + /** + * @see {@link ImportTableCommand} + */ + importTable(args: ImportTableCommandInput, options?: __HttpHandlerOptions): Promise; + importTable(args: ImportTableCommandInput, cb: (err: any, data?: ImportTableCommandOutput) => void): void; + importTable(args: ImportTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ImportTableCommandOutput) => void): void; + /** + * @see {@link ListBackupsCommand} + */ + listBackups(): Promise; + listBackups(args: ListBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + listBackups(args: ListBackupsCommandInput, cb: (err: any, data?: ListBackupsCommandOutput) => void): void; + listBackups(args: ListBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListBackupsCommandOutput) => void): void; + /** + * @see {@link ListContributorInsightsCommand} + */ + listContributorInsights(): Promise; + listContributorInsights(args: ListContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + listContributorInsights(args: ListContributorInsightsCommandInput, cb: (err: any, data?: ListContributorInsightsCommandOutput) => void): void; + listContributorInsights(args: ListContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListContributorInsightsCommandOutput) => void): void; + /** + * @see {@link ListExportsCommand} + */ + listExports(): Promise; + listExports(args: ListExportsCommandInput, options?: __HttpHandlerOptions): Promise; + listExports(args: ListExportsCommandInput, cb: (err: any, data?: ListExportsCommandOutput) => void): void; + listExports(args: ListExportsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListExportsCommandOutput) => void): void; + /** + * @see {@link ListGlobalTablesCommand} + */ + listGlobalTables(): Promise; + listGlobalTables(args: ListGlobalTablesCommandInput, options?: __HttpHandlerOptions): Promise; + listGlobalTables(args: ListGlobalTablesCommandInput, cb: (err: any, data?: ListGlobalTablesCommandOutput) => void): void; + listGlobalTables(args: ListGlobalTablesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListGlobalTablesCommandOutput) => void): void; + /** + * @see {@link ListImportsCommand} + */ + listImports(): Promise; + listImports(args: ListImportsCommandInput, options?: __HttpHandlerOptions): Promise; + listImports(args: ListImportsCommandInput, cb: (err: any, data?: ListImportsCommandOutput) => void): void; + listImports(args: ListImportsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListImportsCommandOutput) => void): void; + /** + * @see {@link ListTablesCommand} + */ + listTables(): Promise; + listTables(args: ListTablesCommandInput, options?: __HttpHandlerOptions): Promise; + listTables(args: ListTablesCommandInput, cb: (err: any, data?: ListTablesCommandOutput) => void): void; + listTables(args: ListTablesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListTablesCommandOutput) => void): void; + /** + * @see {@link ListTagsOfResourceCommand} + */ + listTagsOfResource(args: ListTagsOfResourceCommandInput, options?: __HttpHandlerOptions): Promise; + listTagsOfResource(args: ListTagsOfResourceCommandInput, cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void): void; + listTagsOfResource(args: ListTagsOfResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void): void; + /** + * @see {@link PutItemCommand} + */ + putItem(args: PutItemCommandInput, options?: __HttpHandlerOptions): Promise; + putItem(args: PutItemCommandInput, cb: (err: any, data?: PutItemCommandOutput) => void): void; + putItem(args: PutItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutItemCommandOutput) => void): void; + /** + * @see {@link PutResourcePolicyCommand} + */ + putResourcePolicy(args: PutResourcePolicyCommandInput, options?: __HttpHandlerOptions): Promise; + putResourcePolicy(args: PutResourcePolicyCommandInput, cb: (err: any, data?: PutResourcePolicyCommandOutput) => void): void; + putResourcePolicy(args: PutResourcePolicyCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutResourcePolicyCommandOutput) => void): void; + /** + * @see {@link QueryCommand} + */ + query(args: QueryCommandInput, options?: __HttpHandlerOptions): Promise; + query(args: QueryCommandInput, cb: (err: any, data?: QueryCommandOutput) => void): void; + query(args: QueryCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: QueryCommandOutput) => void): void; + /** + * @see {@link RestoreTableFromBackupCommand} + */ + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, options?: __HttpHandlerOptions): Promise; + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void): void; + restoreTableFromBackup(args: RestoreTableFromBackupCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void): void; + /** + * @see {@link RestoreTableToPointInTimeCommand} + */ + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, options?: __HttpHandlerOptions): Promise; + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void): void; + restoreTableToPointInTime(args: RestoreTableToPointInTimeCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void): void; + /** + * @see {@link ScanCommand} + */ + scan(args: ScanCommandInput, options?: __HttpHandlerOptions): Promise; + scan(args: ScanCommandInput, cb: (err: any, data?: ScanCommandOutput) => void): void; + scan(args: ScanCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ScanCommandOutput) => void): void; + /** + * @see {@link TagResourceCommand} + */ + tagResource(args: TagResourceCommandInput, options?: __HttpHandlerOptions): Promise; + tagResource(args: TagResourceCommandInput, cb: (err: any, data?: TagResourceCommandOutput) => void): void; + tagResource(args: TagResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TagResourceCommandOutput) => void): void; + /** + * @see {@link TransactGetItemsCommand} + */ + transactGetItems(args: TransactGetItemsCommandInput, options?: __HttpHandlerOptions): Promise; + transactGetItems(args: TransactGetItemsCommandInput, cb: (err: any, data?: TransactGetItemsCommandOutput) => void): void; + transactGetItems(args: TransactGetItemsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactGetItemsCommandOutput) => void): void; + /** + * @see {@link TransactWriteItemsCommand} + */ + transactWriteItems(args: TransactWriteItemsCommandInput, options?: __HttpHandlerOptions): Promise; + transactWriteItems(args: TransactWriteItemsCommandInput, cb: (err: any, data?: TransactWriteItemsCommandOutput) => void): void; + transactWriteItems(args: TransactWriteItemsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactWriteItemsCommandOutput) => void): void; + /** + * @see {@link UntagResourceCommand} + */ + untagResource(args: UntagResourceCommandInput, options?: __HttpHandlerOptions): Promise; + untagResource(args: UntagResourceCommandInput, cb: (err: any, data?: UntagResourceCommandOutput) => void): void; + untagResource(args: UntagResourceCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UntagResourceCommandOutput) => void): void; + /** + * @see {@link UpdateContinuousBackupsCommand} + */ + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, options?: __HttpHandlerOptions): Promise; + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void): void; + updateContinuousBackups(args: UpdateContinuousBackupsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void): void; + /** + * @see {@link UpdateContributorInsightsCommand} + */ + updateContributorInsights(args: UpdateContributorInsightsCommandInput, options?: __HttpHandlerOptions): Promise; + updateContributorInsights(args: UpdateContributorInsightsCommandInput, cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void): void; + updateContributorInsights(args: UpdateContributorInsightsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void): void; + /** + * @see {@link UpdateGlobalTableCommand} + */ + updateGlobalTable(args: UpdateGlobalTableCommandInput, options?: __HttpHandlerOptions): Promise; + updateGlobalTable(args: UpdateGlobalTableCommandInput, cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void): void; + updateGlobalTable(args: UpdateGlobalTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void): void; + /** + * @see {@link UpdateGlobalTableSettingsCommand} + */ + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, options?: __HttpHandlerOptions): Promise; + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void): void; + updateGlobalTableSettings(args: UpdateGlobalTableSettingsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void): void; + /** + * @see {@link UpdateItemCommand} + */ + updateItem(args: UpdateItemCommandInput, options?: __HttpHandlerOptions): Promise; + updateItem(args: UpdateItemCommandInput, cb: (err: any, data?: UpdateItemCommandOutput) => void): void; + updateItem(args: UpdateItemCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateItemCommandOutput) => void): void; + /** + * @see {@link UpdateKinesisStreamingDestinationCommand} + */ + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, options?: __HttpHandlerOptions): Promise; + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, cb: (err: any, data?: UpdateKinesisStreamingDestinationCommandOutput) => void): void; + updateKinesisStreamingDestination(args: UpdateKinesisStreamingDestinationCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateKinesisStreamingDestinationCommandOutput) => void): void; + /** + * @see {@link UpdateTableCommand} + */ + updateTable(args: UpdateTableCommandInput, options?: __HttpHandlerOptions): Promise; + updateTable(args: UpdateTableCommandInput, cb: (err: any, data?: UpdateTableCommandOutput) => void): void; + updateTable(args: UpdateTableCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTableCommandOutput) => void): void; + /** + * @see {@link UpdateTableReplicaAutoScalingCommand} + */ + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, options?: __HttpHandlerOptions): Promise; + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void): void; + updateTableReplicaAutoScaling(args: UpdateTableReplicaAutoScalingCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void): void; + /** + * @see {@link UpdateTimeToLiveCommand} + */ + updateTimeToLive(args: UpdateTimeToLiveCommandInput, options?: __HttpHandlerOptions): Promise; + updateTimeToLive(args: UpdateTimeToLiveCommandInput, cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void): void; + updateTimeToLive(args: UpdateTimeToLiveCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void): void; +} +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * @public + */ +export declare class DynamoDB extends DynamoDBClient implements DynamoDB { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts new file mode 100644 index 0000000..c508d62 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/DynamoDBClient.d.ts @@ -0,0 +1,272 @@ +import { AccountIdEndpointMode, AccountIdEndpointModeInputConfig, AccountIdEndpointModeResolvedConfig } from "@aws-sdk/core/account-id-endpoint"; +import { EndpointDiscoveryInputConfig, EndpointDiscoveryResolvedConfig } from "@aws-sdk/middleware-endpoint-discovery"; +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { AwsCredentialIdentityProvider, BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "./commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "./commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "./commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "./commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "./commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "./commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "./commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "./commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "./commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "./commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "./commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "./commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "./commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "./commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "./commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "./commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "./commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "./commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "./commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "./commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "./commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "./commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "./commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "./commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "./commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "./commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "./commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "./commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "./commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "./commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "./commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "./commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "./commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "./commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "./commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "./commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "./commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "./commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "./commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "./commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "./commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "./commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "./commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "./commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "./commands/UpdateTimeToLiveCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = BatchExecuteStatementCommandInput | BatchGetItemCommandInput | BatchWriteItemCommandInput | CreateBackupCommandInput | CreateGlobalTableCommandInput | CreateTableCommandInput | DeleteBackupCommandInput | DeleteItemCommandInput | DeleteResourcePolicyCommandInput | DeleteTableCommandInput | DescribeBackupCommandInput | DescribeContinuousBackupsCommandInput | DescribeContributorInsightsCommandInput | DescribeEndpointsCommandInput | DescribeExportCommandInput | DescribeGlobalTableCommandInput | DescribeGlobalTableSettingsCommandInput | DescribeImportCommandInput | DescribeKinesisStreamingDestinationCommandInput | DescribeLimitsCommandInput | DescribeTableCommandInput | DescribeTableReplicaAutoScalingCommandInput | DescribeTimeToLiveCommandInput | DisableKinesisStreamingDestinationCommandInput | EnableKinesisStreamingDestinationCommandInput | ExecuteStatementCommandInput | ExecuteTransactionCommandInput | ExportTableToPointInTimeCommandInput | GetItemCommandInput | GetResourcePolicyCommandInput | ImportTableCommandInput | ListBackupsCommandInput | ListContributorInsightsCommandInput | ListExportsCommandInput | ListGlobalTablesCommandInput | ListImportsCommandInput | ListTablesCommandInput | ListTagsOfResourceCommandInput | PutItemCommandInput | PutResourcePolicyCommandInput | QueryCommandInput | RestoreTableFromBackupCommandInput | RestoreTableToPointInTimeCommandInput | ScanCommandInput | TagResourceCommandInput | TransactGetItemsCommandInput | TransactWriteItemsCommandInput | UntagResourceCommandInput | UpdateContinuousBackupsCommandInput | UpdateContributorInsightsCommandInput | UpdateGlobalTableCommandInput | UpdateGlobalTableSettingsCommandInput | UpdateItemCommandInput | UpdateKinesisStreamingDestinationCommandInput | UpdateTableCommandInput | UpdateTableReplicaAutoScalingCommandInput | UpdateTimeToLiveCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = BatchExecuteStatementCommandOutput | BatchGetItemCommandOutput | BatchWriteItemCommandOutput | CreateBackupCommandOutput | CreateGlobalTableCommandOutput | CreateTableCommandOutput | DeleteBackupCommandOutput | DeleteItemCommandOutput | DeleteResourcePolicyCommandOutput | DeleteTableCommandOutput | DescribeBackupCommandOutput | DescribeContinuousBackupsCommandOutput | DescribeContributorInsightsCommandOutput | DescribeEndpointsCommandOutput | DescribeExportCommandOutput | DescribeGlobalTableCommandOutput | DescribeGlobalTableSettingsCommandOutput | DescribeImportCommandOutput | DescribeKinesisStreamingDestinationCommandOutput | DescribeLimitsCommandOutput | DescribeTableCommandOutput | DescribeTableReplicaAutoScalingCommandOutput | DescribeTimeToLiveCommandOutput | DisableKinesisStreamingDestinationCommandOutput | EnableKinesisStreamingDestinationCommandOutput | ExecuteStatementCommandOutput | ExecuteTransactionCommandOutput | ExportTableToPointInTimeCommandOutput | GetItemCommandOutput | GetResourcePolicyCommandOutput | ImportTableCommandOutput | ListBackupsCommandOutput | ListContributorInsightsCommandOutput | ListExportsCommandOutput | ListGlobalTablesCommandOutput | ListImportsCommandOutput | ListTablesCommandOutput | ListTagsOfResourceCommandOutput | PutItemCommandOutput | PutResourcePolicyCommandOutput | QueryCommandOutput | RestoreTableFromBackupCommandOutput | RestoreTableToPointInTimeCommandOutput | ScanCommandOutput | TagResourceCommandOutput | TransactGetItemsCommandOutput | TransactWriteItemsCommandOutput | UntagResourceCommandOutput | UpdateContinuousBackupsCommandOutput | UpdateContributorInsightsCommandOutput | UpdateGlobalTableCommandOutput | UpdateGlobalTableSettingsCommandOutput | UpdateItemCommandOutput | UpdateKinesisStreamingDestinationCommandOutput | UpdateTableCommandOutput | UpdateTableReplicaAutoScalingCommandOutput | UpdateTimeToLiveCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * Defines if the AWS AccountId will be used for endpoint routing. + */ + accountIdEndpointMode?: AccountIdEndpointMode | __Provider; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Default credentials provider; Not available in browser runtime. + * @deprecated + * @internal + */ + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; + /** + * The provider which populates default for endpointDiscoveryEnabled configuration, if it's + * not passed during client creation. + * @internal + */ + endpointDiscoveryEnabledProvider?: __Provider; +} +/** + * @public + */ +export type DynamoDBClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & AccountIdEndpointModeInputConfig & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & EndpointDiscoveryInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of DynamoDBClient class constructor that set the region, credentials and other options. + */ +export interface DynamoDBClientConfig extends DynamoDBClientConfigType { +} +/** + * @public + */ +export type DynamoDBClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & AccountIdEndpointModeResolvedConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & EndpointDiscoveryResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ +export interface DynamoDBClientResolvedConfig extends DynamoDBClientResolvedConfigType { +} +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * @public + */ +export declare class DynamoDBClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, DynamoDBClientResolvedConfig> { + /** + * The resolved configuration of DynamoDBClient class. This is resolved and normalized from the {@link DynamoDBClientConfig | constructor configuration interface}. + */ + readonly config: DynamoDBClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..b2b8f76 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { DynamoDBHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): DynamoDBHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..3760ff0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { DynamoDBClientResolvedConfig } from "../DynamoDBClient"; +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultDynamoDBHttpAuthSchemeParametersProvider: (config: DynamoDBClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface DynamoDBHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultDynamoDBHttpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: DynamoDBHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..0d8793c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,227 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchExecuteStatementInput, BatchExecuteStatementOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchExecuteStatementCommand}. + */ +export interface BatchExecuteStatementCommandInput extends BatchExecuteStatementInput { +} +/** + * @public + * + * The output of {@link BatchExecuteStatementCommand}. + */ +export interface BatchExecuteStatementCommandOutput extends BatchExecuteStatementOutput, __MetadataBearer { +} +declare const BatchExecuteStatementCommand_base: { + new (input: BatchExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform batch reads or writes on data stored in DynamoDB, + * using PartiQL. Each read statement in a BatchExecuteStatement must specify + * an equality condition on all key attributes. This enforces that each SELECT + * statement in a batch returns at most a single item. For more information, see Running batch operations with PartiQL for DynamoDB .

+ * + *

The entire batch must consist of either read statements or write statements, you + * cannot mix both in one batch.

+ *
+ * + *

A HTTP 200 response does not mean that all statements in the BatchExecuteStatement + * succeeded. Error details for individual statements can be found under the Error field of the BatchStatementResponse for each + * statement.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchExecuteStatementCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchExecuteStatementInput + * Statements: [ // PartiQLBatchRequest // required + * { // BatchStatementRequest + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ConsistentRead: true || false, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new BatchExecuteStatementCommand(input); + * const response = await client.send(command); + * // { // BatchExecuteStatementOutput + * // Responses: [ // PartiQLBatchResponse + * // { // BatchStatementResponse + * // Error: { // BatchStatementError + * // Code: "ConditionalCheckFailed" || "ItemCollectionSizeLimitExceeded" || "RequestLimitExceeded" || "ValidationError" || "ProvisionedThroughputExceeded" || "TransactionConflict" || "ThrottlingError" || "InternalServerError" || "ResourceNotFound" || "AccessDenied" || "DuplicateItem", + * // Message: "STRING_VALUE", + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // TableName: "STRING_VALUE", + * // Item: { + * // "": "", + * // }, + * // }, + * // ], + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchExecuteStatementCommandInput - {@link BatchExecuteStatementCommandInput} + * @returns {@link BatchExecuteStatementCommandOutput} + * @see {@link BatchExecuteStatementCommandInput} for command's `input` shape. + * @see {@link BatchExecuteStatementCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class BatchExecuteStatementCommand extends BatchExecuteStatementCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchExecuteStatementInput; + output: BatchExecuteStatementOutput; + }; + sdk: { + input: BatchExecuteStatementCommandInput; + output: BatchExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts new file mode 100644 index 0000000..aee4fd5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchGetItemCommand.d.ts @@ -0,0 +1,357 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchGetItemInput, BatchGetItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchGetItemCommand}. + */ +export interface BatchGetItemCommandInput extends BatchGetItemInput { +} +/** + * @public + * + * The output of {@link BatchGetItemCommand}. + */ +export interface BatchGetItemCommandOutput extends BatchGetItemOutput, __MetadataBearer { +} +declare const BatchGetItemCommand_base: { + new (input: BatchGetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchGetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The BatchGetItem operation returns the attributes of one or more items + * from one or more tables. You identify requested items by primary key.

+ *

A single operation can retrieve up to 16 MB of data, which can contain as many as 100 + * items. BatchGetItem returns a partial result if the response size limit is + * exceeded, the table's provisioned throughput is exceeded, more than 1MB per partition is + * requested, or an internal processing failure occurs. If a partial result is returned, + * the operation returns a value for UnprocessedKeys. You can use this value + * to retry the operation starting with the next item to get.

+ * + *

If you request more than 100 items, BatchGetItem returns a + * ValidationException with the message "Too many items requested for + * the BatchGetItem call."

+ *
+ *

For example, if you ask to retrieve 100 items, but each individual item is 300 KB in + * size, the system returns 52 items (so as not to exceed the 16 MB limit). It also returns + * an appropriate UnprocessedKeys value so you can get the next page of + * results. If desired, your application can include its own logic to assemble the pages of + * results into one dataset.

+ *

If none of the items can be processed due to insufficient + * provisioned throughput on all of the tables in the request, then + * BatchGetItem returns a + * ProvisionedThroughputExceededException. If at least + * one of the items is successfully processed, then + * BatchGetItem completes successfully, while returning the keys of the + * unread items in UnprocessedKeys.

+ * + *

If DynamoDB returns any unprocessed items, you should retry the batch operation on + * those items. However, we strongly recommend that you use an exponential + * backoff algorithm. If you retry the batch operation immediately, the + * underlying read or write requests can still fail due to throttling on the individual + * tables. If you delay the batch operation using exponential backoff, the individual + * requests in the batch are much more likely to succeed.

+ *

For more information, see Batch Operations and Error Handling in the Amazon DynamoDB + * Developer Guide.

+ *
+ *

By default, BatchGetItem performs eventually consistent reads on every + * table in the request. If you want strongly consistent reads instead, you can set + * ConsistentRead to true for any or all tables.

+ *

In order to minimize response latency, BatchGetItem may retrieve items in + * parallel.

+ *

When designing your application, keep in mind that DynamoDB does not return items in + * any particular order. To help parse the response by item, include the primary key values + * for the items in your request in the ProjectionExpression parameter.

+ *

If a requested item does not exist, it is not returned in the result. Requests for + * nonexistent items consume the minimum read capacity units according to the type of read. + * For more information, see Working with Tables in the Amazon DynamoDB Developer + * Guide.

+ * + *

+ * BatchGetItem will result in a ValidationException if the + * same key is specified multiple times.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchGetItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchGetItemInput + * RequestItems: { // BatchGetRequestMap // required + * "": { // KeysAndAttributes + * Keys: [ // KeyList // required + * { // Key + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * ], + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * ConsistentRead: true || false, + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }, + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new BatchGetItemCommand(input); + * const response = await client.send(command); + * // { // BatchGetItemOutput + * // Responses: { // BatchGetResponseMap + * // "": [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // }, + * // UnprocessedKeys: { // BatchGetRequestMap + * // "": { // KeysAndAttributes + * // Keys: [ // KeyList // required + * // { // Key + * // "": "", + * // }, + * // ], + * // AttributesToGet: [ // AttributeNameList + * // "STRING_VALUE", + * // ], + * // ConsistentRead: true || false, + * // ProjectionExpression: "STRING_VALUE", + * // ExpressionAttributeNames: { // ExpressionAttributeNameMap + * // "": "STRING_VALUE", + * // }, + * // }, + * // }, + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchGetItemCommandInput - {@link BatchGetItemCommandInput} + * @returns {@link BatchGetItemCommandOutput} + * @see {@link BatchGetItemCommandInput} for command's `input` shape. + * @see {@link BatchGetItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To retrieve multiple items from a table + * ```javascript + * // This example reads multiple items from the Music table using a batch of three GetItem requests. Only the AlbumTitle attribute is returned. + * const input = { + * RequestItems: { + * Music: { + * Keys: [ + * { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * ], + * ProjectionExpression: "AlbumTitle" + * } + * } + * }; + * const command = new BatchGetItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Responses: { + * Music: [ + * { + * AlbumTitle: { + * S: "Somewhat Famous" + * } + * }, + * { + * AlbumTitle: { + * S: "Blue Sky Blues" + * } + * }, + * { + * AlbumTitle: { + * S: "Louder Than Ever" + * } + * } + * ] + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class BatchGetItemCommand extends BatchGetItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchGetItemInput; + output: BatchGetItemOutput; + }; + sdk: { + input: BatchGetItemCommandInput; + output: BatchGetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts new file mode 100644 index 0000000..13bed0c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/BatchWriteItemCommand.d.ts @@ -0,0 +1,401 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { BatchWriteItemInput, BatchWriteItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link BatchWriteItemCommand}. + */ +export interface BatchWriteItemCommandInput extends BatchWriteItemInput { +} +/** + * @public + * + * The output of {@link BatchWriteItemCommand}. + */ +export interface BatchWriteItemCommandOutput extends BatchWriteItemOutput, __MetadataBearer { +} +declare const BatchWriteItemCommand_base: { + new (input: BatchWriteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: BatchWriteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The BatchWriteItem operation puts or deletes multiple items in one or + * more tables. A single call to BatchWriteItem can transmit up to 16MB of + * data over the network, consisting of up to 25 item put or delete operations. While + * individual items can be up to 400 KB once stored, it's important to note that an item's + * representation might be greater than 400KB while being sent in DynamoDB's JSON format + * for the API call. For more details on this distinction, see Naming Rules and Data Types.

+ * + *

+ * BatchWriteItem cannot update items. If you perform a + * BatchWriteItem operation on an existing item, that item's values + * will be overwritten by the operation and it will appear like it was updated. To + * update items, we recommend you use the UpdateItem action.

+ *
+ *

The individual PutItem and DeleteItem operations specified + * in BatchWriteItem are atomic; however BatchWriteItem as a + * whole is not. If any requested operations fail because the table's provisioned + * throughput is exceeded or an internal processing failure occurs, the failed operations + * are returned in the UnprocessedItems response parameter. You can + * investigate and optionally resend the requests. Typically, you would call + * BatchWriteItem in a loop. Each iteration would check for unprocessed + * items and submit a new BatchWriteItem request with those unprocessed items + * until all items have been processed.

+ *

For tables and indexes with provisioned capacity, if none of the items can be + * processed due to insufficient provisioned throughput on all of the tables in the + * request, then BatchWriteItem returns a + * ProvisionedThroughputExceededException. For all tables and indexes, if + * none of the items can be processed due to other throttling scenarios (such as exceeding + * partition level limits), then BatchWriteItem returns a + * ThrottlingException.

+ * + *

If DynamoDB returns any unprocessed items, you should retry the batch operation on + * those items. However, we strongly recommend that you use an exponential + * backoff algorithm. If you retry the batch operation immediately, the + * underlying read or write requests can still fail due to throttling on the individual + * tables. If you delay the batch operation using exponential backoff, the individual + * requests in the batch are much more likely to succeed.

+ *

For more information, see Batch Operations and Error Handling in the Amazon DynamoDB + * Developer Guide.

+ *
+ *

With BatchWriteItem, you can efficiently write or delete large amounts of + * data, such as from Amazon EMR, or copy data from another database into DynamoDB. In + * order to improve performance with these large-scale operations, + * BatchWriteItem does not behave in the same way as individual + * PutItem and DeleteItem calls would. For example, you + * cannot specify conditions on individual put and delete requests, and + * BatchWriteItem does not return deleted items in the response.

+ *

If you use a programming language that supports concurrency, you can use threads to + * write items in parallel. Your application must include the necessary logic to manage the + * threads. With languages that don't support threading, you must update or delete the + * specified items one at a time. In both situations, BatchWriteItem performs + * the specified put and delete operations in parallel, giving you the power of the thread + * pool approach without having to introduce complexity into your application.

+ *

Parallel processing reduces latency, but each specified put and delete request + * consumes the same number of write capacity units whether it is processed in parallel or + * not. Delete operations on nonexistent items consume one write capacity unit.

+ *

If one or more of the following is true, DynamoDB rejects the entire batch write + * operation:

+ *
    + *
  • + *

    One or more tables specified in the BatchWriteItem request does + * not exist.

    + *
  • + *
  • + *

    Primary key attributes specified on an item in the request do not match those + * in the corresponding table's primary key schema.

    + *
  • + *
  • + *

    You try to perform multiple operations on the same item in the same + * BatchWriteItem request. For example, you cannot put and delete + * the same item in the same BatchWriteItem request.

    + *
  • + *
  • + *

    Your request contains at least two items with identical hash and range keys + * (which essentially is two put operations).

    + *
  • + *
  • + *

    There are more than 25 requests in the batch.

    + *
  • + *
  • + *

    Any individual item in a batch exceeds 400 KB.

    + *
  • + *
  • + *

    The total request size exceeds 16 MB.

    + *
  • + *
  • + *

    Any individual items with keys exceeding the key length limits. For a + * partition key, the limit is 2048 bytes and for a sort key, the limit is 1024 + * bytes.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, BatchWriteItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // BatchWriteItemInput + * RequestItems: { // BatchWriteItemRequestMap // required + * "": [ // WriteRequests + * { // WriteRequest + * PutRequest: { // PutRequest + * Item: { // PutItemInputAttributeMap // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * }, + * DeleteRequest: { // DeleteRequest + * Key: { // Key // required + * "": "", + * }, + * }, + * }, + * ], + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * }; + * const command = new BatchWriteItemCommand(input); + * const response = await client.send(command); + * // { // BatchWriteItemOutput + * // UnprocessedItems: { // BatchWriteItemRequestMap + * // "": [ // WriteRequests + * // { // WriteRequest + * // PutRequest: { // PutRequest + * // Item: { // PutItemInputAttributeMap // required + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // DeleteRequest: { // DeleteRequest + * // Key: { // Key // required + * // "": "", + * // }, + * // }, + * // }, + * // ], + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetricsPerTable + * // "": [ // ItemCollectionMetricsMultiple + * // { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // ], + * // }, + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param BatchWriteItemCommandInput - {@link BatchWriteItemCommandInput} + * @returns {@link BatchWriteItemCommandOutput} + * @see {@link BatchWriteItemCommandInput} for command's `input` shape. + * @see {@link BatchWriteItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To add multiple items to a table + * ```javascript + * // This example adds three new items to the Music table using a batch of three PutItem requests. + * const input = { + * RequestItems: { + * Music: [ + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * } + * } + * }, + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Songs About Life" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * } + * } + * }, + * { + * PutRequest: { + * Item: { + * AlbumTitle: { + * S: "Blue Sky Blues" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * } + * } + * ] + * } + * }; + * const command = new BatchWriteItemCommand(input); + * const response = await client.send(command); + * /* response is + * { /* empty *\/ } + * *\/ + * ``` + * + * @public + */ +export declare class BatchWriteItemCommand extends BatchWriteItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: BatchWriteItemInput; + output: BatchWriteItemOutput; + }; + sdk: { + input: BatchWriteItemCommandInput; + output: BatchWriteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts new file mode 100644 index 0000000..32ec75a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateBackupCommand.d.ts @@ -0,0 +1,146 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateBackupInput, CreateBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateBackupCommand}. + */ +export interface CreateBackupCommandInput extends CreateBackupInput { +} +/** + * @public + * + * The output of {@link CreateBackupCommand}. + */ +export interface CreateBackupCommandOutput extends CreateBackupOutput, __MetadataBearer { +} +declare const CreateBackupCommand_base: { + new (input: CreateBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a backup for an existing table.

+ *

Each time you create an on-demand backup, the entire table data is backed up. There + * is no limit to the number of on-demand backups that can be taken.

+ *

When you create an on-demand backup, a time marker of the request is cataloged, and + * the backup is created asynchronously, by applying all changes until the time of the + * request to the last full table snapshot. Backup requests are processed instantaneously + * and become available for restore within minutes.

+ *

You can call CreateBackup at a maximum rate of 50 times per + * second.

+ *

All backups in DynamoDB work without consuming any provisioned throughput on the + * table.

+ *

If you submit a backup request on 2018-12-14 at 14:25:00, the backup is guaranteed to + * contain all data committed to the table up to 14:24:00, and data committed after + * 14:26:00 will not be. The backup might contain data modifications made between 14:24:00 + * and 14:26:00. On-demand backup does not support causal consistency.

+ *

Along with data, the following are also included on the backups:

+ *
    + *
  • + *

    Global secondary indexes (GSIs)

    + *
  • + *
  • + *

    Local secondary indexes (LSIs)

    + *
  • + *
  • + *

    Streams

    + *
  • + *
  • + *

    Provisioned read and write capacity

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateBackupInput + * TableName: "STRING_VALUE", // required + * BackupName: "STRING_VALUE", // required + * }; + * const command = new CreateBackupCommand(input); + * const response = await client.send(command); + * // { // CreateBackupOutput + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // }; + * + * ``` + * + * @param CreateBackupCommandInput - {@link CreateBackupCommandInput} + * @returns {@link CreateBackupCommandOutput} + * @see {@link CreateBackupCommandInput} for command's `input` shape. + * @see {@link CreateBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link ContinuousBackupsUnavailableException} (client fault) + *

Backups have not yet been enabled for this table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateBackupCommand extends CreateBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateBackupInput; + output: CreateBackupOutput; + }; + sdk: { + input: CreateBackupCommandInput; + output: CreateBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts new file mode 100644 index 0000000..551fcd3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateGlobalTableCommand.d.ts @@ -0,0 +1,205 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateGlobalTableInput, CreateGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateGlobalTableCommand}. + */ +export interface CreateGlobalTableCommandInput extends CreateGlobalTableInput { +} +/** + * @public + * + * The output of {@link CreateGlobalTableCommand}. + */ +export interface CreateGlobalTableCommandOutput extends CreateGlobalTableOutput, __MetadataBearer { +} +declare const CreateGlobalTableCommand_base: { + new (input: CreateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a global table from an existing table. A global table creates a replication + * relationship between two or more DynamoDB tables with the same table name in the + * provided Regions.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ *

If you want to add a new replica table to a global table, each of the following + * conditions must be true:

+ *
    + *
  • + *

    The table must have the same primary key as all of the other replicas.

    + *
  • + *
  • + *

    The table must have the same name as all of the other replicas.

    + *
  • + *
  • + *

    The table must have DynamoDB Streams enabled, with the stream containing both + * the new and the old images of the item.

    + *
  • + *
  • + *

    None of the replica tables in the global table can contain any data.

    + *
  • + *
+ *

If global secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The global secondary indexes must have the same name.

    + *
  • + *
  • + *

    The global secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
+ *

If local secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The local secondary indexes must have the same name.

    + *
  • + *
  • + *

    The local secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
+ * + *

Write capacity settings should be set consistently across your replica tables and + * secondary indexes. DynamoDB strongly recommends enabling auto scaling to manage the + * write capacity settings for all of your global tables replicas and indexes.

+ *

If you prefer to manage write capacity settings manually, you should provision + * equal replicated write capacity units to your replica tables. You should also + * provision equal replicated write capacity units to matching secondary indexes across + * your global table.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * ReplicationGroup: [ // ReplicaList // required + * { // Replica + * RegionName: "STRING_VALUE", + * }, + * ], + * }; + * const command = new CreateGlobalTableCommand(input); + * const response = await client.send(command); + * // { // CreateGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param CreateGlobalTableCommandInput - {@link CreateGlobalTableCommandInput} + * @returns {@link CreateGlobalTableCommandOutput} + * @see {@link CreateGlobalTableCommandInput} for command's `input` shape. + * @see {@link CreateGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableAlreadyExistsException} (client fault) + *

The specified global table already exists.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateGlobalTableCommand extends CreateGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateGlobalTableInput; + output: CreateGlobalTableOutput; + }; + sdk: { + input: CreateGlobalTableCommandInput; + output: CreateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts new file mode 100644 index 0000000..0c21eec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/CreateTableCommand.d.ts @@ -0,0 +1,378 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { CreateTableInput, CreateTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateTableCommand}. + */ +export interface CreateTableCommandInput extends CreateTableInput { +} +/** + * @public + * + * The output of {@link CreateTableCommand}. + */ +export interface CreateTableCommandOutput extends CreateTableOutput, __MetadataBearer { +} +declare const CreateTableCommand_base: { + new (input: CreateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The CreateTable operation adds a new table to your account. In an Amazon Web Services account, table names must be unique within each Region. That is, you can + * have two tables with same name if you create the tables in different Regions.

+ *

+ * CreateTable is an asynchronous operation. Upon receiving a + * CreateTable request, DynamoDB immediately returns a response with a + * TableStatus of CREATING. After the table is created, + * DynamoDB sets the TableStatus to ACTIVE. You can perform read + * and write operations only on an ACTIVE table.

+ *

You can optionally define secondary indexes on the new table, as part of the + * CreateTable operation. If you want to create multiple tables with + * secondary indexes on them, you must create the tables sequentially. Only one table with + * secondary indexes can be in the CREATING state at any given time.

+ *

You can use the DescribeTable action to check the table status.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, CreateTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, CreateTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // CreateTableInput + * AttributeDefinitions: [ // AttributeDefinitions // required + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * TableName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * LocalSecondaryIndexes: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * StreamSpecification: { // StreamSpecification + * StreamEnabled: true || false, // required + * StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * Tags: [ // TagList + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * DeletionProtectionEnabled: true || false, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * ResourcePolicy: "STRING_VALUE", + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * }; + * const command = new CreateTableCommand(input); + * const response = await client.send(command); + * // { // CreateTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param CreateTableCommandInput - {@link CreateTableCommandInput} + * @returns {@link CreateTableCommandOutput} + * @see {@link CreateTableCommandInput} for command's `input` shape. + * @see {@link CreateTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class CreateTableCommand extends CreateTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateTableInput; + output: CreateTableOutput; + }; + sdk: { + input: CreateTableCommandInput; + output: CreateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts new file mode 100644 index 0000000..d3da508 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteBackupCommand.d.ts @@ -0,0 +1,193 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteBackupInput, DeleteBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteBackupCommand}. + */ +export interface DeleteBackupCommandInput extends DeleteBackupInput { +} +/** + * @public + * + * The output of {@link DeleteBackupCommand}. + */ +export interface DeleteBackupCommandOutput extends DeleteBackupOutput, __MetadataBearer { +} +declare const DeleteBackupCommand_base: { + new (input: DeleteBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes an existing backup of a table.

+ *

You can call DeleteBackup at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteBackupInput + * BackupArn: "STRING_VALUE", // required + * }; + * const command = new DeleteBackupCommand(input); + * const response = await client.send(command); + * // { // DeleteBackupOutput + * // BackupDescription: { // BackupDescription + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // SourceTableDetails: { // SourceTableDetails + * // TableName: "STRING_VALUE", // required + * // TableId: "STRING_VALUE", // required + * // TableArn: "STRING_VALUE", + * // TableSizeBytes: Number("long"), + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableCreationDateTime: new Date("TIMESTAMP"), // required + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // ItemCount: Number("long"), + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // }, + * // SourceTableFeatureDetails: { // SourceTableFeatureDetails + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexes + * // { // LocalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexes + * // { // GlobalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // }, + * // ], + * // StreamDescription: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param DeleteBackupCommandInput - {@link DeleteBackupCommandInput} + * @returns {@link DeleteBackupCommandOutput} + * @see {@link DeleteBackupCommandInput} for command's `input` shape. + * @see {@link DeleteBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DeleteBackupCommand extends DeleteBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteBackupInput; + output: DeleteBackupOutput; + }; + sdk: { + input: DeleteBackupCommandInput; + output: DeleteBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts new file mode 100644 index 0000000..619ecf2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteItemCommand.d.ts @@ -0,0 +1,286 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteItemInput, DeleteItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteItemCommand}. + */ +export interface DeleteItemCommandInput extends DeleteItemInput { +} +/** + * @public + * + * The output of {@link DeleteItemCommand}. + */ +export interface DeleteItemCommandOutput extends DeleteItemOutput, __MetadataBearer { +} +declare const DeleteItemCommand_base: { + new (input: DeleteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes a single item in a table by primary key. You can perform a conditional delete + * operation that deletes the item if it exists, or if it has an expected attribute + * value.

+ *

In addition to deleting an item, you can also return the item's attribute values in + * the same operation, using the ReturnValues parameter.

+ *

Unless you specify conditions, the DeleteItem is an idempotent operation; + * running it multiple times on the same item or attribute does not + * result in an error response.

+ *

Conditional deletes are useful for deleting items only if specific conditions are met. + * If those conditions are met, DynamoDB performs the delete. Otherwise, the item is not + * deleted.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new DeleteItemCommand(input); + * const response = await client.send(command); + * // { // DeleteItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param DeleteItemCommandInput - {@link DeleteItemCommandInput} + * @returns {@link DeleteItemCommandOutput} + * @see {@link DeleteItemCommandInput} for command's `input` shape. + * @see {@link DeleteItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To delete an item + * ```javascript + * // This example deletes an item from the Music table. + * const input = { + * Key: { + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * }, + * TableName: "Music" + * }; + * const command = new DeleteItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { + * CapacityUnits: 1, + * TableName: "Music" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteItemCommand extends DeleteItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteItemInput; + output: DeleteItemOutput; + }; + sdk: { + input: DeleteItemCommandInput; + output: DeleteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts new file mode 100644 index 0000000..da92cf9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteResourcePolicyCommand.d.ts @@ -0,0 +1,138 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteResourcePolicyInput, DeleteResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteResourcePolicyCommand}. + */ +export interface DeleteResourcePolicyCommandInput extends DeleteResourcePolicyInput { +} +/** + * @public + * + * The output of {@link DeleteResourcePolicyCommand}. + */ +export interface DeleteResourcePolicyCommandOutput extends DeleteResourcePolicyOutput, __MetadataBearer { +} +declare const DeleteResourcePolicyCommand_base: { + new (input: DeleteResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Deletes the resource-based policy attached to the resource, which can be a table or + * stream.

+ *

+ * DeleteResourcePolicy is an idempotent operation; running it multiple + * times on the same resource doesn't result in an error response, + * unless you specify an ExpectedRevisionId, which will then return a + * PolicyNotFoundException.

+ * + *

To make sure that you don't inadvertently lock yourself out of your own resources, + * the root principal in your Amazon Web Services account can perform + * DeleteResourcePolicy requests, even if your resource-based policy + * explicitly denies the root principal's access.

+ *
+ * + *

+ * DeleteResourcePolicy is an asynchronous operation. If you issue a + * GetResourcePolicy request immediately after running the + * DeleteResourcePolicy request, DynamoDB might still return + * the deleted policy. This is because the policy for your resource might not have been + * deleted yet. Wait for a few seconds, and then try the GetResourcePolicy + * request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * ExpectedRevisionId: "STRING_VALUE", + * }; + * const command = new DeleteResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // DeleteResourcePolicyOutput + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param DeleteResourcePolicyCommandInput - {@link DeleteResourcePolicyCommandInput} + * @returns {@link DeleteResourcePolicyCommandOutput} + * @see {@link DeleteResourcePolicyCommandInput} for command's `input` shape. + * @see {@link DeleteResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DeleteResourcePolicyCommand extends DeleteResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteResourcePolicyInput; + output: DeleteResourcePolicyOutput; + }; + sdk: { + input: DeleteResourcePolicyCommandInput; + output: DeleteResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts new file mode 100644 index 0000000..f756cd0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DeleteTableCommand.d.ts @@ -0,0 +1,328 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DeleteTableInput, DeleteTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteTableCommand}. + */ +export interface DeleteTableCommandInput extends DeleteTableInput { +} +/** + * @public + * + * The output of {@link DeleteTableCommand}. + */ +export interface DeleteTableCommandOutput extends DeleteTableOutput, __MetadataBearer { +} +declare const DeleteTableCommand_base: { + new (input: DeleteTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DeleteTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The DeleteTable operation deletes a table and all of its items. After a + * DeleteTable request, the specified table is in the + * DELETING state until DynamoDB completes the deletion. If the table is + * in the ACTIVE state, you can delete it. If a table is in + * CREATING or UPDATING states, then DynamoDB returns a + * ResourceInUseException. If the specified table does not exist, DynamoDB + * returns a ResourceNotFoundException. If table is already in the + * DELETING state, no error is returned.

+ * + *

For global tables, this operation only applies to + * global tables using Version 2019.11.21 (Current version).

+ *
+ * + *

DynamoDB might continue to accept data read and write operations, such as + * GetItem and PutItem, on a table in the + * DELETING state until the table deletion is complete. For the full + * list of table states, see TableStatus.

+ *
+ *

When you delete a table, any indexes on that table are also deleted.

+ *

If you have DynamoDB Streams enabled on the table, then the corresponding stream on + * that table goes into the DISABLED state, and the stream is automatically + * deleted after 24 hours.

+ *

Use the DescribeTable action to check the status of the table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DeleteTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DeleteTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DeleteTableInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DeleteTableCommand(input); + * const response = await client.send(command); + * // { // DeleteTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param DeleteTableCommandInput - {@link DeleteTableCommandInput} + * @returns {@link DeleteTableCommandOutput} + * @see {@link DeleteTableCommandInput} for command's `input` shape. + * @see {@link DeleteTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To delete a table + * ```javascript + * // This example deletes the Music table. + * const input = { + * TableName: "Music" + * }; + * const command = new DeleteTableCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TableDescription: { + * ItemCount: 0, + * ProvisionedThroughput: { + * NumberOfDecreasesToday: 1, + * ReadCapacityUnits: 5, + * WriteCapacityUnits: 5 + * }, + * TableName: "Music", + * TableSizeBytes: 0, + * TableStatus: "DELETING" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class DeleteTableCommand extends DeleteTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DeleteTableInput; + output: DeleteTableOutput; + }; + sdk: { + input: DeleteTableCommandInput; + output: DeleteTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts new file mode 100644 index 0000000..8568846 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeBackupCommand.d.ts @@ -0,0 +1,173 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeBackupInput, DescribeBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeBackupCommand}. + */ +export interface DescribeBackupCommandInput extends DescribeBackupInput { +} +/** + * @public + * + * The output of {@link DescribeBackupCommand}. + */ +export interface DescribeBackupCommandOutput extends DescribeBackupOutput, __MetadataBearer { +} +declare const DescribeBackupCommand_base: { + new (input: DescribeBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes an existing backup of a table.

+ *

You can call DescribeBackup at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeBackupInput + * BackupArn: "STRING_VALUE", // required + * }; + * const command = new DescribeBackupCommand(input); + * const response = await client.send(command); + * // { // DescribeBackupOutput + * // BackupDescription: { // BackupDescription + * // BackupDetails: { // BackupDetails + * // BackupArn: "STRING_VALUE", // required + * // BackupName: "STRING_VALUE", // required + * // BackupSizeBytes: Number("long"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", // required + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", // required + * // BackupCreationDateTime: new Date("TIMESTAMP"), // required + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // }, + * // SourceTableDetails: { // SourceTableDetails + * // TableName: "STRING_VALUE", // required + * // TableId: "STRING_VALUE", // required + * // TableArn: "STRING_VALUE", + * // TableSizeBytes: Number("long"), + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableCreationDateTime: new Date("TIMESTAMP"), // required + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // ItemCount: Number("long"), + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // }, + * // SourceTableFeatureDetails: { // SourceTableFeatureDetails + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexes + * // { // LocalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexes + * // { // GlobalSecondaryIndexInfo + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // }, + * // ], + * // StreamDescription: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeBackupCommandInput - {@link DescribeBackupCommandInput} + * @returns {@link DescribeBackupCommandOutput} + * @see {@link DescribeBackupCommandInput} for command's `input` shape. + * @see {@link DescribeBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeBackupCommand extends DescribeBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeBackupInput; + output: DescribeBackupOutput; + }; + sdk: { + input: DescribeBackupCommandInput; + output: DescribeBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..301ba60 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContinuousBackupsCommand.d.ts @@ -0,0 +1,101 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeContinuousBackupsInput, DescribeContinuousBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeContinuousBackupsCommand}. + */ +export interface DescribeContinuousBackupsCommandInput extends DescribeContinuousBackupsInput { +} +/** + * @public + * + * The output of {@link DescribeContinuousBackupsCommand}. + */ +export interface DescribeContinuousBackupsCommandOutput extends DescribeContinuousBackupsOutput, __MetadataBearer { +} +declare const DescribeContinuousBackupsCommand_base: { + new (input: DescribeContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Checks the status of continuous backups and point in time recovery on the specified + * table. Continuous backups are ENABLED on all tables at table creation. If + * point in time recovery is enabled, PointInTimeRecoveryStatus will be set to + * ENABLED.

+ *

After continuous backups and point in time recovery are enabled, you can restore to + * any point in time within EarliestRestorableDateTime and + * LatestRestorableDateTime.

+ *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + * You can restore your table to any point in time in the last 35 days. You can set the + * recovery period to any value between 1 and 35 days.

+ *

You can call DescribeContinuousBackups at a maximum rate of 10 times per + * second.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeContinuousBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeContinuousBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeContinuousBackupsInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeContinuousBackupsCommand(input); + * const response = await client.send(command); + * // { // DescribeContinuousBackupsOutput + * // ContinuousBackupsDescription: { // ContinuousBackupsDescription + * // ContinuousBackupsStatus: "ENABLED" || "DISABLED", // required + * // PointInTimeRecoveryDescription: { // PointInTimeRecoveryDescription + * // PointInTimeRecoveryStatus: "ENABLED" || "DISABLED", + * // RecoveryPeriodInDays: Number("int"), + * // EarliestRestorableDateTime: new Date("TIMESTAMP"), + * // LatestRestorableDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeContinuousBackupsCommandInput - {@link DescribeContinuousBackupsCommandInput} + * @returns {@link DescribeContinuousBackupsCommandOutput} + * @see {@link DescribeContinuousBackupsCommandInput} for command's `input` shape. + * @see {@link DescribeContinuousBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeContinuousBackupsCommand extends DescribeContinuousBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeContinuousBackupsInput; + output: DescribeContinuousBackupsOutput; + }; + sdk: { + input: DescribeContinuousBackupsCommandInput; + output: DescribeContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts new file mode 100644 index 0000000..99eb5e0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeContributorInsightsCommand.d.ts @@ -0,0 +1,91 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeContributorInsightsInput, DescribeContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeContributorInsightsCommand}. + */ +export interface DescribeContributorInsightsCommandInput extends DescribeContributorInsightsInput { +} +/** + * @public + * + * The output of {@link DescribeContributorInsightsCommand}. + */ +export interface DescribeContributorInsightsCommandOutput extends DescribeContributorInsightsOutput, __MetadataBearer { +} +declare const DescribeContributorInsightsCommand_base: { + new (input: DescribeContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about contributor insights for a given table or global secondary + * index.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeContributorInsightsInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * }; + * const command = new DescribeContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // DescribeContributorInsightsOutput + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsRuleList: [ // ContributorInsightsRuleList + * // "STRING_VALUE", + * // ], + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // FailureException: { // FailureException + * // ExceptionName: "STRING_VALUE", + * // ExceptionDescription: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeContributorInsightsCommandInput - {@link DescribeContributorInsightsCommandInput} + * @returns {@link DescribeContributorInsightsCommandOutput} + * @see {@link DescribeContributorInsightsCommandInput} for command's `input` shape. + * @see {@link DescribeContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeContributorInsightsCommand extends DescribeContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeContributorInsightsInput; + output: DescribeContributorInsightsOutput; + }; + sdk: { + input: DescribeContributorInsightsCommandInput; + output: DescribeContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts new file mode 100644 index 0000000..57fe82d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeEndpointsCommand.d.ts @@ -0,0 +1,76 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeEndpointsRequest, DescribeEndpointsResponse } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeEndpointsCommand}. + */ +export interface DescribeEndpointsCommandInput extends DescribeEndpointsRequest { +} +/** + * @public + * + * The output of {@link DescribeEndpointsCommand}. + */ +export interface DescribeEndpointsCommandOutput extends DescribeEndpointsResponse, __MetadataBearer { +} +declare const DescribeEndpointsCommand_base: { + new (input: DescribeEndpointsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [DescribeEndpointsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the regional endpoint information. For more information on policy permissions, + * please see Internetwork traffic privacy.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeEndpointsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeEndpointsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = {}; + * const command = new DescribeEndpointsCommand(input); + * const response = await client.send(command); + * // { // DescribeEndpointsResponse + * // Endpoints: [ // Endpoints // required + * // { // Endpoint + * // Address: "STRING_VALUE", // required + * // CachePeriodInMinutes: Number("long"), // required + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeEndpointsCommandInput - {@link DescribeEndpointsCommandInput} + * @returns {@link DescribeEndpointsCommandOutput} + * @see {@link DescribeEndpointsCommandInput} for command's `input` shape. + * @see {@link DescribeEndpointsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeEndpointsCommand extends DescribeEndpointsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: {}; + output: DescribeEndpointsResponse; + }; + sdk: { + input: DescribeEndpointsCommandInput; + output: DescribeEndpointsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts new file mode 100644 index 0000000..400d9ab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeExportCommand.d.ts @@ -0,0 +1,120 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeExportInput, DescribeExportOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeExportCommand}. + */ +export interface DescribeExportCommandInput extends DescribeExportInput { +} +/** + * @public + * + * The output of {@link DescribeExportCommand}. + */ +export interface DescribeExportCommandOutput extends DescribeExportOutput, __MetadataBearer { +} +declare const DescribeExportCommand_base: { + new (input: DescribeExportCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeExportCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes an existing table export.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeExportCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeExportCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeExportInput + * ExportArn: "STRING_VALUE", // required + * }; + * const command = new DescribeExportCommand(input); + * const response = await client.send(command); + * // { // DescribeExportOutput + * // ExportDescription: { // ExportDescription + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ExportManifest: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ExportTime: new Date("TIMESTAMP"), + * // ClientToken: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", + * // S3BucketOwner: "STRING_VALUE", + * // S3Prefix: "STRING_VALUE", + * // S3SseAlgorithm: "AES256" || "KMS", + * // S3SseKmsKeyId: "STRING_VALUE", + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // ExportFormat: "DYNAMODB_JSON" || "ION", + * // BilledSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // IncrementalExportSpecification: { // IncrementalExportSpecification + * // ExportFromTime: new Date("TIMESTAMP"), + * // ExportToTime: new Date("TIMESTAMP"), + * // ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * // }, + * // }, + * // }; + * + * ``` + * + * @param DescribeExportCommandInput - {@link DescribeExportCommandInput} + * @returns {@link DescribeExportCommandOutput} + * @see {@link DescribeExportCommandInput} for command's `input` shape. + * @see {@link DescribeExportCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ExportNotFoundException} (client fault) + *

The specified export was not found.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeExportCommand extends DescribeExportCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeExportInput; + output: DescribeExportOutput; + }; + sdk: { + input: DescribeExportCommandInput; + output: DescribeExportCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts new file mode 100644 index 0000000..79c9f59 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableCommand.d.ts @@ -0,0 +1,130 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeGlobalTableInput, DescribeGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeGlobalTableCommand}. + */ +export interface DescribeGlobalTableCommandInput extends DescribeGlobalTableInput { +} +/** + * @public + * + * The output of {@link DescribeGlobalTableCommand}. + */ +export interface DescribeGlobalTableCommandOutput extends DescribeGlobalTableOutput, __MetadataBearer { +} +declare const DescribeGlobalTableCommand_base: { + new (input: DescribeGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the specified global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * }; + * const command = new DescribeGlobalTableCommand(input); + * const response = await client.send(command); + * // { // DescribeGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeGlobalTableCommandInput - {@link DescribeGlobalTableCommandInput} + * @returns {@link DescribeGlobalTableCommandOutput} + * @see {@link DescribeGlobalTableCommandInput} for command's `input` shape. + * @see {@link DescribeGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeGlobalTableCommand extends DescribeGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeGlobalTableInput; + output: DescribeGlobalTableOutput; + }; + sdk: { + input: DescribeGlobalTableCommandInput; + output: DescribeGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..aadabee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeGlobalTableSettingsCommand.d.ts @@ -0,0 +1,176 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeGlobalTableSettingsInput, DescribeGlobalTableSettingsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeGlobalTableSettingsCommand}. + */ +export interface DescribeGlobalTableSettingsCommandInput extends DescribeGlobalTableSettingsInput { +} +/** + * @public + * + * The output of {@link DescribeGlobalTableSettingsCommand}. + */ +export interface DescribeGlobalTableSettingsCommandOutput extends DescribeGlobalTableSettingsOutput, __MetadataBearer { +} +declare const DescribeGlobalTableSettingsCommand_base: { + new (input: DescribeGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes Region-specific settings for a global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeGlobalTableSettingsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeGlobalTableSettingsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeGlobalTableSettingsInput + * GlobalTableName: "STRING_VALUE", // required + * }; + * const command = new DescribeGlobalTableSettingsCommand(input); + * const response = await client.send(command); + * // { // DescribeGlobalTableSettingsOutput + * // GlobalTableName: "STRING_VALUE", + * // ReplicaSettings: [ // ReplicaSettingsDescriptionList + * // { // ReplicaSettingsDescription + * // RegionName: "STRING_VALUE", // required + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaBillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // ReplicaProvisionedReadCapacityUnits: Number("long"), + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityUnits: Number("long"), + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaGlobalSecondaryIndexSettings: [ // ReplicaGlobalSecondaryIndexSettingsDescriptionList + * // { // ReplicaGlobalSecondaryIndexSettingsDescription + * // IndexName: "STRING_VALUE", // required + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityUnits: Number("long"), + * // ProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityUnits: Number("long"), + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeGlobalTableSettingsCommandInput - {@link DescribeGlobalTableSettingsCommandInput} + * @returns {@link DescribeGlobalTableSettingsCommandOutput} + * @see {@link DescribeGlobalTableSettingsCommandInput} for command's `input` shape. + * @see {@link DescribeGlobalTableSettingsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeGlobalTableSettingsCommand extends DescribeGlobalTableSettingsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeGlobalTableSettingsInput; + output: DescribeGlobalTableSettingsOutput; + }; + sdk: { + input: DescribeGlobalTableSettingsCommandInput; + output: DescribeGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts new file mode 100644 index 0000000..f440258 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeImportCommand.d.ts @@ -0,0 +1,165 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeImportInput, DescribeImportOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeImportCommand}. + */ +export interface DescribeImportCommandInput extends DescribeImportInput { +} +/** + * @public + * + * The output of {@link DescribeImportCommand}. + */ +export interface DescribeImportCommandOutput extends DescribeImportOutput, __MetadataBearer { +} +declare const DescribeImportCommand_base: { + new (input: DescribeImportCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeImportCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Represents the properties of the import.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeImportCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeImportCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeImportInput + * ImportArn: "STRING_VALUE", // required + * }; + * const command = new DescribeImportCommand(input); + * const response = await client.send(command); + * // { // DescribeImportOutput + * // ImportTableDescription: { // ImportTableDescription + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ClientToken: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // ErrorCount: Number("long"), + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // InputFormatOptions: { // InputFormatOptions + * // Csv: { // CsvOptions + * // Delimiter: "STRING_VALUE", + * // HeaderList: [ // CsvHeaderList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * // TableCreationParameters: { // TableCreationParameters + * // TableName: "STRING_VALUE", // required + * // AttributeDefinitions: [ // AttributeDefinitions // required + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // SSESpecification: { // SSESpecification + * // Enabled: true || false, + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyId: "STRING_VALUE", + * // }, + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * // { // GlobalSecondaryIndex + * // IndexName: "STRING_VALUE", // required + * // KeySchema: [ // required + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // WarmThroughput + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // }, + * // }, + * // ], + * // }, + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ProcessedSizeBytes: Number("long"), + * // ProcessedItemCount: Number("long"), + * // ImportedItemCount: Number("long"), + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeImportCommandInput - {@link DescribeImportCommandInput} + * @returns {@link DescribeImportCommandOutput} + * @see {@link DescribeImportCommandInput} for command's `input` shape. + * @see {@link DescribeImportCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ImportNotFoundException} (client fault) + *

+ * The specified import was not found. + *

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeImportCommand extends DescribeImportCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeImportInput; + output: DescribeImportOutput; + }; + sdk: { + input: DescribeImportCommandInput; + output: DescribeImportCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..5af13ee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeKinesisStreamingDestinationInput, DescribeKinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeKinesisStreamingDestinationCommand}. + */ +export interface DescribeKinesisStreamingDestinationCommandInput extends DescribeKinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link DescribeKinesisStreamingDestinationCommand}. + */ +export interface DescribeKinesisStreamingDestinationCommandOutput extends DescribeKinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const DescribeKinesisStreamingDestinationCommand_base: { + new (input: DescribeKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the status of Kinesis streaming.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeKinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // DescribeKinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // KinesisDataStreamDestinations: [ // KinesisDataStreamDestinations + * // { // KinesisDataStreamDestination + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // DestinationStatusDescription: "STRING_VALUE", + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // ], + * // }; + * + * ``` + * + * @param DescribeKinesisStreamingDestinationCommandInput - {@link DescribeKinesisStreamingDestinationCommandInput} + * @returns {@link DescribeKinesisStreamingDestinationCommandOutput} + * @see {@link DescribeKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link DescribeKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeKinesisStreamingDestinationCommand extends DescribeKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeKinesisStreamingDestinationInput; + output: DescribeKinesisStreamingDestinationOutput; + }; + sdk: { + input: DescribeKinesisStreamingDestinationCommandInput; + output: DescribeKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts new file mode 100644 index 0000000..bdf6316 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeLimitsCommand.d.ts @@ -0,0 +1,163 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeLimitsInput, DescribeLimitsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeLimitsCommand}. + */ +export interface DescribeLimitsCommandInput extends DescribeLimitsInput { +} +/** + * @public + * + * The output of {@link DescribeLimitsCommand}. + */ +export interface DescribeLimitsCommandOutput extends DescribeLimitsOutput, __MetadataBearer { +} +declare const DescribeLimitsCommand_base: { + new (input: DescribeLimitsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [DescribeLimitsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the current provisioned-capacity quotas for your Amazon Web Services account in + * a Region, both for the Region as a whole and for any one DynamoDB table that you create + * there.

+ *

When you establish an Amazon Web Services account, the account has initial quotas on + * the maximum read capacity units and write capacity units that you can provision across + * all of your DynamoDB tables in a given Region. Also, there are per-table + * quotas that apply when you create a table there. For more information, see Service, + * Account, and Table Quotas page in the Amazon DynamoDB + * Developer Guide.

+ *

Although you can increase these quotas by filing a case at Amazon Web Services Support Center, obtaining the + * increase is not instantaneous. The DescribeLimits action lets you write + * code to compare the capacity you are currently using to those quotas imposed by your + * account so that you have enough time to apply for an increase before you hit a + * quota.

+ *

For example, you could use one of the Amazon Web Services SDKs to do the + * following:

+ *
    + *
  1. + *

    Call DescribeLimits for a particular Region to obtain your + * current account quotas on provisioned capacity there.

    + *
  2. + *
  3. + *

    Create a variable to hold the aggregate read capacity units provisioned for + * all your tables in that Region, and one to hold the aggregate write capacity + * units. Zero them both.

    + *
  4. + *
  5. + *

    Call ListTables to obtain a list of all your DynamoDB + * tables.

    + *
  6. + *
  7. + *

    For each table name listed by ListTables, do the + * following:

    + *
      + *
    • + *

      Call DescribeTable with the table name.

      + *
    • + *
    • + *

      Use the data returned by DescribeTable to add the read + * capacity units and write capacity units provisioned for the table itself + * to your variables.

      + *
    • + *
    • + *

      If the table has one or more global secondary indexes (GSIs), loop + * over these GSIs and add their provisioned capacity values to your + * variables as well.

      + *
    • + *
    + *
  8. + *
  9. + *

    Report the account quotas for that Region returned by + * DescribeLimits, along with the total current provisioned + * capacity levels you have calculated.

    + *
  10. + *
+ *

This will let you see whether you are getting close to your account-level + * quotas.

+ *

The per-table quotas apply only when you are creating a new table. They restrict the + * sum of the provisioned capacity of the new table itself and all its global secondary + * indexes.

+ *

For existing tables and their GSIs, DynamoDB doesn't let you increase provisioned + * capacity extremely rapidly, but the only quota that applies is that the aggregate + * provisioned capacity over all your tables and GSIs cannot exceed either of the + * per-account quotas.

+ * + *

+ * DescribeLimits should only be called periodically. You can expect + * throttling errors if you call it more than once in a minute.

+ *
+ *

The DescribeLimits Request element has no content.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeLimitsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeLimitsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = {}; + * const command = new DescribeLimitsCommand(input); + * const response = await client.send(command); + * // { // DescribeLimitsOutput + * // AccountMaxReadCapacityUnits: Number("long"), + * // AccountMaxWriteCapacityUnits: Number("long"), + * // TableMaxReadCapacityUnits: Number("long"), + * // TableMaxWriteCapacityUnits: Number("long"), + * // }; + * + * ``` + * + * @param DescribeLimitsCommandInput - {@link DescribeLimitsCommandInput} + * @returns {@link DescribeLimitsCommandOutput} + * @see {@link DescribeLimitsCommandInput} for command's `input` shape. + * @see {@link DescribeLimitsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To determine capacity limits per table and account, in the current AWS region + * ```javascript + * // The following example returns the maximum read and write capacity units per table, and for the AWS account, in the current AWS region. + * const input = { /* empty *\/ }; + * const command = new DescribeLimitsCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AccountMaxReadCapacityUnits: 20000, + * AccountMaxWriteCapacityUnits: 20000, + * TableMaxReadCapacityUnits: 10000, + * TableMaxWriteCapacityUnits: 10000 + * } + * *\/ + * ``` + * + * @public + */ +export declare class DescribeLimitsCommand extends DescribeLimitsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: {}; + output: DescribeLimitsOutput; + }; + sdk: { + input: DescribeLimitsCommandInput; + output: DescribeLimitsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts new file mode 100644 index 0000000..b74ef35 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableCommand.d.ts @@ -0,0 +1,263 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTableInput, DescribeTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTableCommand}. + */ +export interface DescribeTableCommandInput extends DescribeTableInput { +} +/** + * @public + * + * The output of {@link DescribeTableCommand}. + */ +export interface DescribeTableCommandOutput extends DescribeTableOutput, __MetadataBearer { +} +declare const DescribeTableCommand_base: { + new (input: DescribeTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns information about the table, including the current status of the table, when + * it was created, the primary key schema, and any indexes on the table.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * + *

If you issue a DescribeTable request immediately after a + * CreateTable request, DynamoDB might return a + * ResourceNotFoundException. This is because + * DescribeTable uses an eventually consistent query, and the metadata + * for your table might not be available at that moment. Wait for a few seconds, and + * then try the DescribeTable request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTableInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTableCommand(input); + * const response = await client.send(command); + * // { // DescribeTableOutput + * // Table: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param DescribeTableCommandInput - {@link DescribeTableCommandInput} + * @returns {@link DescribeTableCommandOutput} + * @see {@link DescribeTableCommandInput} for command's `input` shape. + * @see {@link DescribeTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTableCommand extends DescribeTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTableInput; + output: DescribeTableOutput; + }; + sdk: { + input: DescribeTableCommandInput; + output: DescribeTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..6875231 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,166 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTableReplicaAutoScalingInput, DescribeTableReplicaAutoScalingOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTableReplicaAutoScalingCommand}. + */ +export interface DescribeTableReplicaAutoScalingCommandInput extends DescribeTableReplicaAutoScalingInput { +} +/** + * @public + * + * The output of {@link DescribeTableReplicaAutoScalingCommand}. + */ +export interface DescribeTableReplicaAutoScalingCommandOutput extends DescribeTableReplicaAutoScalingOutput, __MetadataBearer { +} +declare const DescribeTableReplicaAutoScalingCommand_base: { + new (input: DescribeTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Describes auto scaling settings across replicas of the global table at once.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTableReplicaAutoScalingCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTableReplicaAutoScalingCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTableReplicaAutoScalingInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTableReplicaAutoScalingCommand(input); + * const response = await client.send(command); + * // { // DescribeTableReplicaAutoScalingOutput + * // TableAutoScalingDescription: { // TableAutoScalingDescription + * // TableName: "STRING_VALUE", + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // Replicas: [ // ReplicaAutoScalingDescriptionList + * // { // ReplicaAutoScalingDescription + * // RegionName: "STRING_VALUE", + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexAutoScalingDescriptionList + * // { // ReplicaGlobalSecondaryIndexAutoScalingDescription + * // IndexName: "STRING_VALUE", + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param DescribeTableReplicaAutoScalingCommandInput - {@link DescribeTableReplicaAutoScalingCommandInput} + * @returns {@link DescribeTableReplicaAutoScalingCommandOutput} + * @see {@link DescribeTableReplicaAutoScalingCommandInput} for command's `input` shape. + * @see {@link DescribeTableReplicaAutoScalingCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTableReplicaAutoScalingCommand extends DescribeTableReplicaAutoScalingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTableReplicaAutoScalingInput; + output: DescribeTableReplicaAutoScalingOutput; + }; + sdk: { + input: DescribeTableReplicaAutoScalingCommandInput; + output: DescribeTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts new file mode 100644 index 0000000..f5f3419 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DescribeTimeToLiveCommand.d.ts @@ -0,0 +1,84 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { DescribeTimeToLiveInput, DescribeTimeToLiveOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DescribeTimeToLiveCommand}. + */ +export interface DescribeTimeToLiveCommandInput extends DescribeTimeToLiveInput { +} +/** + * @public + * + * The output of {@link DescribeTimeToLiveCommand}. + */ +export interface DescribeTimeToLiveCommandOutput extends DescribeTimeToLiveOutput, __MetadataBearer { +} +declare const DescribeTimeToLiveCommand_base: { + new (input: DescribeTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DescribeTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Gives a description of the Time to Live (TTL) status on the specified table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DescribeTimeToLiveCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DescribeTimeToLiveCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // DescribeTimeToLiveInput + * TableName: "STRING_VALUE", // required + * }; + * const command = new DescribeTimeToLiveCommand(input); + * const response = await client.send(command); + * // { // DescribeTimeToLiveOutput + * // TimeToLiveDescription: { // TimeToLiveDescription + * // TimeToLiveStatus: "ENABLING" || "DISABLING" || "ENABLED" || "DISABLED", + * // AttributeName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param DescribeTimeToLiveCommandInput - {@link DescribeTimeToLiveCommandInput} + * @returns {@link DescribeTimeToLiveCommandOutput} + * @see {@link DescribeTimeToLiveCommandInput} for command's `input` shape. + * @see {@link DescribeTimeToLiveCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DescribeTimeToLiveCommand extends DescribeTimeToLiveCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: DescribeTimeToLiveInput; + output: DescribeTimeToLiveOutput; + }; + sdk: { + input: DescribeTimeToLiveCommandInput; + output: DescribeTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..e52268c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/DisableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,122 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { KinesisStreamingDestinationInput, KinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DisableKinesisStreamingDestinationCommand}. + */ +export interface DisableKinesisStreamingDestinationCommandInput extends KinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link DisableKinesisStreamingDestinationCommand}. + */ +export interface DisableKinesisStreamingDestinationCommandOutput extends KinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const DisableKinesisStreamingDestinationCommand_base: { + new (input: DisableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: DisableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Stops replication from the DynamoDB table to the Kinesis data stream. This + * is done without deleting either of the resources.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, DisableKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, DisableKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // KinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new DisableKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // KinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param DisableKinesisStreamingDestinationCommandInput - {@link DisableKinesisStreamingDestinationCommandInput} + * @returns {@link DisableKinesisStreamingDestinationCommandOutput} + * @see {@link DisableKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link DisableKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class DisableKinesisStreamingDestinationCommand extends DisableKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: DisableKinesisStreamingDestinationCommandInput; + output: DisableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..ff8985a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/EnableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,124 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { KinesisStreamingDestinationInput, KinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link EnableKinesisStreamingDestinationCommand}. + */ +export interface EnableKinesisStreamingDestinationCommandInput extends KinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link EnableKinesisStreamingDestinationCommand}. + */ +export interface EnableKinesisStreamingDestinationCommandOutput extends KinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const EnableKinesisStreamingDestinationCommand_base: { + new (input: EnableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: EnableKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Starts table data replication to the specified Kinesis data stream at a timestamp + * chosen during the enable workflow. If this operation doesn't return results immediately, + * use DescribeKinesisStreamingDestination to check if streaming to the Kinesis data stream + * is ACTIVE.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, EnableKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, EnableKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // KinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new EnableKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // KinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // EnableKinesisStreamingConfiguration: { // EnableKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param EnableKinesisStreamingDestinationCommandInput - {@link EnableKinesisStreamingDestinationCommandInput} + * @returns {@link EnableKinesisStreamingDestinationCommandOutput} + * @see {@link EnableKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link EnableKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class EnableKinesisStreamingDestinationCommand extends EnableKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: EnableKinesisStreamingDestinationCommandInput; + output: EnableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..999b557 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,242 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExecuteStatementInput, ExecuteStatementOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExecuteStatementCommand}. + */ +export interface ExecuteStatementCommandInput extends ExecuteStatementInput { +} +/** + * @public + * + * The output of {@link ExecuteStatementCommand}. + */ +export interface ExecuteStatementCommandOutput extends ExecuteStatementOutput, __MetadataBearer { +} +declare const ExecuteStatementCommand_base: { + new (input: ExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExecuteStatementCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform reads and singleton writes on data stored in + * DynamoDB, using PartiQL.

+ *

For PartiQL reads (SELECT statement), if the total number of processed + * items exceeds the maximum dataset size limit of 1 MB, the read stops and results are + * returned to the user as a LastEvaluatedKey value to continue the read in a + * subsequent operation. If the filter criteria in WHERE clause does not match + * any data, the read will return an empty result set.

+ *

A single SELECT statement response can return up to the maximum number of + * items (if using the Limit parameter) or a maximum of 1 MB of data (and then apply any + * filtering to the results using WHERE clause). If + * LastEvaluatedKey is present in the response, you need to paginate the + * result set. If NextToken is present, you need to paginate the result set + * and include NextToken.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExecuteStatementCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExecuteStatementInput + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ConsistentRead: true || false, + * NextToken: "STRING_VALUE", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * Limit: Number("int"), + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new ExecuteStatementCommand(input); + * const response = await client.send(command); + * // { // ExecuteStatementOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // }; + * + * ``` + * + * @param ExecuteStatementCommandInput - {@link ExecuteStatementCommandInput} + * @returns {@link ExecuteStatementCommandOutput} + * @see {@link ExecuteStatementCommandInput} for command's `input` shape. + * @see {@link ExecuteStatementCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link DuplicateItemException} (client fault) + *

There was an attempt to insert an item with the same primary key as an item that + * already exists in the DynamoDB table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExecuteStatementCommand extends ExecuteStatementCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExecuteStatementInput; + output: ExecuteStatementOutput; + }; + sdk: { + input: ExecuteStatementCommandInput; + output: ExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..6e1a94b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,533 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExecuteTransactionInput, ExecuteTransactionOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExecuteTransactionCommand}. + */ +export interface ExecuteTransactionCommandInput extends ExecuteTransactionInput { +} +/** + * @public + * + * The output of {@link ExecuteTransactionCommand}. + */ +export interface ExecuteTransactionCommandOutput extends ExecuteTransactionOutput, __MetadataBearer { +} +declare const ExecuteTransactionCommand_base: { + new (input: ExecuteTransactionCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExecuteTransactionCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

This operation allows you to perform transactional reads or writes on data stored in + * DynamoDB, using PartiQL.

+ * + *

The entire transaction must consist of either read statements or write statements, + * you cannot mix both in one transaction. The EXISTS function is an exception and can + * be used to check the condition of specific attributes of the item in a similar + * manner to ConditionCheck in the TransactWriteItems API.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExecuteTransactionCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExecuteTransactionInput + * TransactStatements: [ // ParameterizedStatements // required + * { // ParameterizedStatement + * Statement: "STRING_VALUE", // required + * Parameters: [ // PreparedStatementParameters + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * ], + * ClientRequestToken: "STRING_VALUE", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new ExecuteTransactionCommand(input); + * const response = await client.send(command); + * // { // ExecuteTransactionOutput + * // Responses: [ // ItemResponseList + * // { // ItemResponse + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // ], + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param ExecuteTransactionCommandInput - {@link ExecuteTransactionCommandInput} + * @returns {@link ExecuteTransactionCommandOutput} + * @see {@link ExecuteTransactionCommandInput} for command's `input` shape. + * @see {@link ExecuteTransactionCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link IdempotentParameterMismatchException} (client fault) + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link TransactionInProgressException} (client fault) + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExecuteTransactionCommand extends ExecuteTransactionCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExecuteTransactionInput; + output: ExecuteTransactionOutput; + }; + sdk: { + input: ExecuteTransactionCommandInput; + output: ExecuteTransactionCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..672cebb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ExportTableToPointInTimeCommand.d.ts @@ -0,0 +1,147 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ExportTableToPointInTimeInput, ExportTableToPointInTimeOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ExportTableToPointInTimeCommand}. + */ +export interface ExportTableToPointInTimeCommandInput extends ExportTableToPointInTimeInput { +} +/** + * @public + * + * The output of {@link ExportTableToPointInTimeCommand}. + */ +export interface ExportTableToPointInTimeCommandOutput extends ExportTableToPointInTimeOutput, __MetadataBearer { +} +declare const ExportTableToPointInTimeCommand_base: { + new (input: ExportTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ExportTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Exports table data to an S3 bucket. The table must have point in time recovery + * enabled, and you can export data from any time within the point in time recovery + * window.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ExportTableToPointInTimeCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ExportTableToPointInTimeCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ExportTableToPointInTimeInput + * TableArn: "STRING_VALUE", // required + * ExportTime: new Date("TIMESTAMP"), + * ClientToken: "STRING_VALUE", + * S3Bucket: "STRING_VALUE", // required + * S3BucketOwner: "STRING_VALUE", + * S3Prefix: "STRING_VALUE", + * S3SseAlgorithm: "AES256" || "KMS", + * S3SseKmsKeyId: "STRING_VALUE", + * ExportFormat: "DYNAMODB_JSON" || "ION", + * ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * IncrementalExportSpecification: { // IncrementalExportSpecification + * ExportFromTime: new Date("TIMESTAMP"), + * ExportToTime: new Date("TIMESTAMP"), + * ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * }, + * }; + * const command = new ExportTableToPointInTimeCommand(input); + * const response = await client.send(command); + * // { // ExportTableToPointInTimeOutput + * // ExportDescription: { // ExportDescription + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ExportManifest: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ExportTime: new Date("TIMESTAMP"), + * // ClientToken: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", + * // S3BucketOwner: "STRING_VALUE", + * // S3Prefix: "STRING_VALUE", + * // S3SseAlgorithm: "AES256" || "KMS", + * // S3SseKmsKeyId: "STRING_VALUE", + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // ExportFormat: "DYNAMODB_JSON" || "ION", + * // BilledSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // IncrementalExportSpecification: { // IncrementalExportSpecification + * // ExportFromTime: new Date("TIMESTAMP"), + * // ExportToTime: new Date("TIMESTAMP"), + * // ExportViewType: "NEW_IMAGE" || "NEW_AND_OLD_IMAGES", + * // }, + * // }, + * // }; + * + * ``` + * + * @param ExportTableToPointInTimeCommandInput - {@link ExportTableToPointInTimeCommandInput} + * @returns {@link ExportTableToPointInTimeCommandOutput} + * @see {@link ExportTableToPointInTimeCommandInput} for command's `input` shape. + * @see {@link ExportTableToPointInTimeCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ExportConflictException} (client fault) + *

There was a conflict when writing to the specified S3 bucket.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidExportTimeException} (client fault) + *

The specified ExportTime is outside of the point in time recovery + * window.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PointInTimeRecoveryUnavailableException} (client fault) + *

Point in time recovery has not yet been enabled for this source table.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ExportTableToPointInTimeCommand extends ExportTableToPointInTimeCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ExportTableToPointInTimeInput; + output: ExportTableToPointInTimeOutput; + }; + sdk: { + input: ExportTableToPointInTimeCommandInput; + output: ExportTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts new file mode 100644 index 0000000..b5e2dfa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetItemCommand.d.ts @@ -0,0 +1,255 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { GetItemInput, GetItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetItemCommand}. + */ +export interface GetItemCommandInput extends GetItemInput { +} +/** + * @public + * + * The output of {@link GetItemCommand}. + */ +export interface GetItemCommandOutput extends GetItemOutput, __MetadataBearer { +} +declare const GetItemCommand_base: { + new (input: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The GetItem operation returns a set of attributes for the item with the + * given primary key. If there is no matching item, GetItem does not return + * any data and there will be no Item element in the response.

+ *

+ * GetItem provides an eventually consistent read by default. If your + * application requires a strongly consistent read, set ConsistentRead to + * true. Although a strongly consistent read might take more time than an + * eventually consistent read, it always returns the last updated value.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, GetItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, GetItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // GetItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * ConsistentRead: true || false, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }; + * const command = new GetItemCommand(input); + * const response = await client.send(command); + * // { // GetItemOutput + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param GetItemCommandInput - {@link GetItemCommandInput} + * @returns {@link GetItemCommandOutput} + * @see {@link GetItemCommandInput} for command's `input` shape. + * @see {@link GetItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To read an item from a table + * ```javascript + * // This example retrieves an item from the Music table. The table has a partition key and a sort key (Artist and SongTitle), so you must specify both of these attributes. + * const input = { + * Key: { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * TableName: "Music" + * }; + * const command = new GetItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Item: { + * AlbumTitle: { + * S: "Songs About Life" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class GetItemCommand extends GetItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetItemInput; + output: GetItemOutput; + }; + sdk: { + input: GetItemCommandInput; + output: GetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts new file mode 100644 index 0000000..9544c1a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/GetResourcePolicyCommand.d.ts @@ -0,0 +1,121 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { GetResourcePolicyInput, GetResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetResourcePolicyCommand}. + */ +export interface GetResourcePolicyCommandInput extends GetResourcePolicyInput { +} +/** + * @public + * + * The output of {@link GetResourcePolicyCommand}. + */ +export interface GetResourcePolicyCommandOutput extends GetResourcePolicyOutput, __MetadataBearer { +} +declare const GetResourcePolicyCommand_base: { + new (input: GetResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the resource-based policy document attached to the resource, which can be a + * table or stream, in JSON format.

+ *

+ * GetResourcePolicy follows an + * eventually consistent + * model. The following list + * describes the outcomes when you issue the GetResourcePolicy request + * immediately after issuing another request:

+ *
    + *
  • + *

    If you issue a GetResourcePolicy request immediately after a + * PutResourcePolicy request, DynamoDB might return a + * PolicyNotFoundException.

    + *
  • + *
  • + *

    If you issue a GetResourcePolicyrequest immediately after a + * DeleteResourcePolicy request, DynamoDB might return + * the policy that was present before the deletion request.

    + *
  • + *
  • + *

    If you issue a GetResourcePolicy request immediately after a + * CreateTable request, which includes a resource-based policy, + * DynamoDB might return a ResourceNotFoundException or + * a PolicyNotFoundException.

    + *
  • + *
+ *

Because GetResourcePolicy uses an eventually + * consistent query, the metadata for your policy or table might not be + * available at that moment. Wait for a few seconds, and then retry the + * GetResourcePolicy request.

+ *

After a GetResourcePolicy request returns a policy created using the + * PutResourcePolicy request, the policy will be applied in the + * authorization of requests to the resource. Because this process is eventually + * consistent, it will take some time to apply the policy to all requests to a resource. + * Policies that you attach while creating a table using the CreateTable + * request will always be applied to all requests for that table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, GetResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, GetResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // GetResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * }; + * const command = new GetResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // GetResourcePolicyOutput + * // Policy: "STRING_VALUE", + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param GetResourcePolicyCommandInput - {@link GetResourcePolicyCommandInput} + * @returns {@link GetResourcePolicyCommandOutput} + * @see {@link GetResourcePolicyCommandInput} for command's `input` shape. + * @see {@link GetResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class GetResourcePolicyCommand extends GetResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetResourcePolicyInput; + output: GetResourcePolicyOutput; + }; + sdk: { + input: GetResourcePolicyCommandInput; + output: GetResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts new file mode 100644 index 0000000..48a37a1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ImportTableCommand.d.ts @@ -0,0 +1,271 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ImportTableInput, ImportTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ImportTableCommand}. + */ +export interface ImportTableCommandInput extends ImportTableInput { +} +/** + * @public + * + * The output of {@link ImportTableCommand}. + */ +export interface ImportTableCommandOutput extends ImportTableOutput, __MetadataBearer { +} +declare const ImportTableCommand_base: { + new (input: ImportTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ImportTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Imports table data from an S3 bucket.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ImportTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ImportTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ImportTableInput + * ClientToken: "STRING_VALUE", + * S3BucketSource: { // S3BucketSource + * S3BucketOwner: "STRING_VALUE", + * S3Bucket: "STRING_VALUE", // required + * S3KeyPrefix: "STRING_VALUE", + * }, + * InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", // required + * InputFormatOptions: { // InputFormatOptions + * Csv: { // CsvOptions + * Delimiter: "STRING_VALUE", + * HeaderList: [ // CsvHeaderList + * "STRING_VALUE", + * ], + * }, + * }, + * InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * TableCreationParameters: { // TableCreationParameters + * TableName: "STRING_VALUE", // required + * AttributeDefinitions: [ // AttributeDefinitions // required + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * }, + * }; + * const command = new ImportTableCommand(input); + * const response = await client.send(command); + * // { // ImportTableOutput + * // ImportTableDescription: { // ImportTableDescription + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // ClientToken: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // ErrorCount: Number("long"), + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // InputFormatOptions: { // InputFormatOptions + * // Csv: { // CsvOptions + * // Delimiter: "STRING_VALUE", + * // HeaderList: [ // CsvHeaderList + * // "STRING_VALUE", + * // ], + * // }, + * // }, + * // InputCompressionType: "GZIP" || "ZSTD" || "NONE", + * // TableCreationParameters: { // TableCreationParameters + * // TableName: "STRING_VALUE", // required + * // AttributeDefinitions: [ // AttributeDefinitions // required + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // KeySchema: [ // KeySchema // required + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // ProvisionedThroughput: { // ProvisionedThroughput + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // SSESpecification: { // SSESpecification + * // Enabled: true || false, + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyId: "STRING_VALUE", + * // }, + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexList + * // { // GlobalSecondaryIndex + * // IndexName: "STRING_VALUE", // required + * // KeySchema: [ // required + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // ProvisionedThroughput: { + * // ReadCapacityUnits: Number("long"), // required + * // WriteCapacityUnits: Number("long"), // required + * // }, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // WarmThroughput + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // }, + * // }, + * // ], + * // }, + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // ProcessedSizeBytes: Number("long"), + * // ProcessedItemCount: Number("long"), + * // ImportedItemCount: Number("long"), + * // FailureCode: "STRING_VALUE", + * // FailureMessage: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param ImportTableCommandInput - {@link ImportTableCommandInput} + * @returns {@link ImportTableCommandOutput} + * @see {@link ImportTableCommandInput} for command's `input` shape. + * @see {@link ImportTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ImportConflictException} (client fault) + *

+ * There was a conflict when importing from the specified S3 source. + * This can occur when the current import conflicts with a previous import request + * that had the same client token. + *

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ImportTableCommand extends ImportTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ImportTableInput; + output: ImportTableOutput; + }; + sdk: { + input: ImportTableCommandInput; + output: ImportTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts new file mode 100644 index 0000000..50c70da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListBackupsCommand.d.ts @@ -0,0 +1,107 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListBackupsInput, ListBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListBackupsCommand}. + */ +export interface ListBackupsCommandInput extends ListBackupsInput { +} +/** + * @public + * + * The output of {@link ListBackupsCommand}. + */ +export interface ListBackupsCommandOutput extends ListBackupsOutput, __MetadataBearer { +} +declare const ListBackupsCommand_base: { + new (input: ListBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListBackupsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

List DynamoDB backups that are associated with an Amazon Web Services account and + * weren't made with Amazon Web Services Backup. To list these backups for a given table, + * specify TableName. ListBackups returns a paginated list of + * results with at most 1 MB worth of items in a page. You can also specify a maximum + * number of entries to be returned in a page.

+ *

In the request, start time is inclusive, but end time is exclusive. Note that these + * boundaries are for the time at which the original backup was requested.

+ *

You can call ListBackups a maximum of five times per second.

+ *

If you want to retrieve the complete list of backups made with Amazon Web Services + * Backup, use the Amazon Web Services Backup + * list API. + *

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListBackupsInput + * TableName: "STRING_VALUE", + * Limit: Number("int"), + * TimeRangeLowerBound: new Date("TIMESTAMP"), + * TimeRangeUpperBound: new Date("TIMESTAMP"), + * ExclusiveStartBackupArn: "STRING_VALUE", + * BackupType: "USER" || "SYSTEM" || "AWS_BACKUP" || "ALL", + * }; + * const command = new ListBackupsCommand(input); + * const response = await client.send(command); + * // { // ListBackupsOutput + * // BackupSummaries: [ // BackupSummaries + * // { // BackupSummary + * // TableName: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // TableArn: "STRING_VALUE", + * // BackupArn: "STRING_VALUE", + * // BackupName: "STRING_VALUE", + * // BackupCreationDateTime: new Date("TIMESTAMP"), + * // BackupExpiryDateTime: new Date("TIMESTAMP"), + * // BackupStatus: "CREATING" || "DELETED" || "AVAILABLE", + * // BackupType: "USER" || "SYSTEM" || "AWS_BACKUP", + * // BackupSizeBytes: Number("long"), + * // }, + * // ], + * // LastEvaluatedBackupArn: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListBackupsCommandInput - {@link ListBackupsCommandInput} + * @returns {@link ListBackupsCommandOutput} + * @see {@link ListBackupsCommandInput} for command's `input` shape. + * @see {@link ListBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListBackupsCommand extends ListBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListBackupsInput; + output: ListBackupsOutput; + }; + sdk: { + input: ListBackupsCommandInput; + output: ListBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts new file mode 100644 index 0000000..9d508c7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListContributorInsightsCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListContributorInsightsInput, ListContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListContributorInsightsCommand}. + */ +export interface ListContributorInsightsCommandInput extends ListContributorInsightsInput { +} +/** + * @public + * + * The output of {@link ListContributorInsightsCommand}. + */ +export interface ListContributorInsightsCommandOutput extends ListContributorInsightsOutput, __MetadataBearer { +} +declare const ListContributorInsightsCommand_base: { + new (input: ListContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListContributorInsightsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a list of ContributorInsightsSummary for a table and all its global secondary + * indexes.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListContributorInsightsInput + * TableName: "STRING_VALUE", + * NextToken: "STRING_VALUE", + * MaxResults: Number("int"), + * }; + * const command = new ListContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // ListContributorInsightsOutput + * // ContributorInsightsSummaries: [ // ContributorInsightsSummaries + * // { // ContributorInsightsSummary + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListContributorInsightsCommandInput - {@link ListContributorInsightsCommandInput} + * @returns {@link ListContributorInsightsCommandOutput} + * @see {@link ListContributorInsightsCommandInput} for command's `input` shape. + * @see {@link ListContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListContributorInsightsCommand extends ListContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListContributorInsightsInput; + output: ListContributorInsightsOutput; + }; + sdk: { + input: ListContributorInsightsCommandInput; + output: ListContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts new file mode 100644 index 0000000..83f5476 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListExportsCommand.d.ts @@ -0,0 +1,100 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListExportsInput, ListExportsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListExportsCommand}. + */ +export interface ListExportsCommandInput extends ListExportsInput { +} +/** + * @public + * + * The output of {@link ListExportsCommand}. + */ +export interface ListExportsCommandOutput extends ListExportsOutput, __MetadataBearer { +} +declare const ListExportsCommand_base: { + new (input: ListExportsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListExportsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists completed exports within the past 90 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListExportsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListExportsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListExportsInput + * TableArn: "STRING_VALUE", + * MaxResults: Number("int"), + * NextToken: "STRING_VALUE", + * }; + * const command = new ListExportsCommand(input); + * const response = await client.send(command); + * // { // ListExportsOutput + * // ExportSummaries: [ // ExportSummaries + * // { // ExportSummary + * // ExportArn: "STRING_VALUE", + * // ExportStatus: "IN_PROGRESS" || "COMPLETED" || "FAILED", + * // ExportType: "FULL_EXPORT" || "INCREMENTAL_EXPORT", + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListExportsCommandInput - {@link ListExportsCommandInput} + * @returns {@link ListExportsCommandOutput} + * @see {@link ListExportsCommandInput} for command's `input` shape. + * @see {@link ListExportsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListExportsCommand extends ListExportsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListExportsInput; + output: ListExportsOutput; + }; + sdk: { + input: ListExportsCommandInput; + output: ListExportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts new file mode 100644 index 0000000..530e8e4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListGlobalTablesCommand.d.ts @@ -0,0 +1,93 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListGlobalTablesInput, ListGlobalTablesOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListGlobalTablesCommand}. + */ +export interface ListGlobalTablesCommandInput extends ListGlobalTablesInput { +} +/** + * @public + * + * The output of {@link ListGlobalTablesCommand}. + */ +export interface ListGlobalTablesCommandOutput extends ListGlobalTablesOutput, __MetadataBearer { +} +declare const ListGlobalTablesCommand_base: { + new (input: ListGlobalTablesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListGlobalTablesCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all global tables that have a replica in the specified Region.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListGlobalTablesCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListGlobalTablesCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListGlobalTablesInput + * ExclusiveStartGlobalTableName: "STRING_VALUE", + * Limit: Number("int"), + * RegionName: "STRING_VALUE", + * }; + * const command = new ListGlobalTablesCommand(input); + * const response = await client.send(command); + * // { // ListGlobalTablesOutput + * // GlobalTables: [ // GlobalTableList + * // { // GlobalTable + * // GlobalTableName: "STRING_VALUE", + * // ReplicationGroup: [ // ReplicaList + * // { // Replica + * // RegionName: "STRING_VALUE", + * // }, + * // ], + * // }, + * // ], + * // LastEvaluatedGlobalTableName: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListGlobalTablesCommandInput - {@link ListGlobalTablesCommandInput} + * @returns {@link ListGlobalTablesCommandOutput} + * @see {@link ListGlobalTablesCommandInput} for command's `input` shape. + * @see {@link ListGlobalTablesCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListGlobalTablesCommand extends ListGlobalTablesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListGlobalTablesInput; + output: ListGlobalTablesOutput; + }; + sdk: { + input: ListGlobalTablesCommandInput; + output: ListGlobalTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts new file mode 100644 index 0000000..be76088 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListImportsCommand.d.ts @@ -0,0 +1,106 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListImportsInput, ListImportsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListImportsCommand}. + */ +export interface ListImportsCommandInput extends ListImportsInput { +} +/** + * @public + * + * The output of {@link ListImportsCommand}. + */ +export interface ListImportsCommandOutput extends ListImportsOutput, __MetadataBearer { +} +declare const ListImportsCommand_base: { + new (input: ListImportsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListImportsCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists completed imports within the past 90 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListImportsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListImportsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListImportsInput + * TableArn: "STRING_VALUE", + * PageSize: Number("int"), + * NextToken: "STRING_VALUE", + * }; + * const command = new ListImportsCommand(input); + * const response = await client.send(command); + * // { // ListImportsOutput + * // ImportSummaryList: [ // ImportSummaryList + * // { // ImportSummary + * // ImportArn: "STRING_VALUE", + * // ImportStatus: "IN_PROGRESS" || "COMPLETED" || "CANCELLING" || "CANCELLED" || "FAILED", + * // TableArn: "STRING_VALUE", + * // S3BucketSource: { // S3BucketSource + * // S3BucketOwner: "STRING_VALUE", + * // S3Bucket: "STRING_VALUE", // required + * // S3KeyPrefix: "STRING_VALUE", + * // }, + * // CloudWatchLogGroupArn: "STRING_VALUE", + * // InputFormat: "DYNAMODB_JSON" || "ION" || "CSV", + * // StartTime: new Date("TIMESTAMP"), + * // EndTime: new Date("TIMESTAMP"), + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListImportsCommandInput - {@link ListImportsCommandInput} + * @returns {@link ListImportsCommandOutput} + * @see {@link ListImportsCommandInput} for command's `input` shape. + * @see {@link ListImportsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListImportsCommand extends ListImportsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListImportsInput; + output: ListImportsOutput; + }; + sdk: { + input: ListImportsCommandInput; + output: ListImportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts new file mode 100644 index 0000000..394c20f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTablesCommand.d.ts @@ -0,0 +1,101 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListTablesInput, ListTablesOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListTablesCommand}. + */ +export interface ListTablesCommandInput extends ListTablesInput { +} +/** + * @public + * + * The output of {@link ListTablesCommand}. + */ +export interface ListTablesCommandOutput extends ListTablesOutput, __MetadataBearer { +} +declare const ListTablesCommand_base: { + new (input: ListTablesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (...[input]: [] | [ListTablesCommandInput]): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns an array of table names associated with the current account and endpoint. The + * output from ListTables is paginated, with each page returning a maximum of + * 100 table names.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListTablesCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListTablesCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListTablesInput + * ExclusiveStartTableName: "STRING_VALUE", + * Limit: Number("int"), + * }; + * const command = new ListTablesCommand(input); + * const response = await client.send(command); + * // { // ListTablesOutput + * // TableNames: [ // TableNameList + * // "STRING_VALUE", + * // ], + * // LastEvaluatedTableName: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListTablesCommandInput - {@link ListTablesCommandInput} + * @returns {@link ListTablesCommandOutput} + * @see {@link ListTablesCommandInput} for command's `input` shape. + * @see {@link ListTablesCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To list tables + * ```javascript + * // This example lists all of the tables associated with the current AWS account and endpoint. + * const input = { /* empty *\/ }; + * const command = new ListTablesCommand(input); + * const response = await client.send(command); + * /* response is + * { + * TableNames: [ + * "Forum", + * "ProductCatalog", + * "Reply", + * "Thread" + * ] + * } + * *\/ + * ``` + * + * @public + */ +export declare class ListTablesCommand extends ListTablesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListTablesInput; + output: ListTablesOutput; + }; + sdk: { + input: ListTablesCommandInput; + output: ListTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts new file mode 100644 index 0000000..c2230f4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ListTagsOfResourceCommand.d.ts @@ -0,0 +1,91 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ListTagsOfResourceInput, ListTagsOfResourceOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListTagsOfResourceCommand}. + */ +export interface ListTagsOfResourceCommandInput extends ListTagsOfResourceInput { +} +/** + * @public + * + * The output of {@link ListTagsOfResourceCommand}. + */ +export interface ListTagsOfResourceCommandOutput extends ListTagsOfResourceOutput, __MetadataBearer { +} +declare const ListTagsOfResourceCommand_base: { + new (input: ListTagsOfResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListTagsOfResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

List all tags on an Amazon DynamoDB resource. You can call ListTagsOfResource up to 10 + * times per second, per account.

+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ListTagsOfResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ListTagsOfResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ListTagsOfResourceInput + * ResourceArn: "STRING_VALUE", // required + * NextToken: "STRING_VALUE", + * }; + * const command = new ListTagsOfResourceCommand(input); + * const response = await client.send(command); + * // { // ListTagsOfResourceOutput + * // Tags: [ // TagList + * // { // Tag + * // Key: "STRING_VALUE", // required + * // Value: "STRING_VALUE", // required + * // }, + * // ], + * // NextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListTagsOfResourceCommandInput - {@link ListTagsOfResourceCommandInput} + * @returns {@link ListTagsOfResourceCommandOutput} + * @see {@link ListTagsOfResourceCommandInput} for command's `input` shape. + * @see {@link ListTagsOfResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class ListTagsOfResourceCommand extends ListTagsOfResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListTagsOfResourceInput; + output: ListTagsOfResourceOutput; + }; + sdk: { + input: ListTagsOfResourceCommandInput; + output: ListTagsOfResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts new file mode 100644 index 0000000..f9e32df --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutItemCommand.d.ts @@ -0,0 +1,300 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { PutItemInput, PutItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutItemCommand}. + */ +export interface PutItemCommandInput extends PutItemInput { +} +/** + * @public + * + * The output of {@link PutItemCommand}. + */ +export interface PutItemCommandOutput extends PutItemOutput, __MetadataBearer { +} +declare const PutItemCommand_base: { + new (input: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a new item, or replaces an old item with a new item. If an item that has the + * same primary key as the new item already exists in the specified table, the new item + * completely replaces the existing item. You can perform a conditional put operation (add + * a new item if one with the specified primary key doesn't exist), or replace an existing + * item if it has certain attribute values. You can return the item's attribute values in + * the same operation, using the ReturnValues parameter.

+ *

When you add an item, the primary key attributes are the only required attributes.

+ *

Empty String and Binary attribute values are allowed. Attribute values of type String + * and Binary must have a length greater than zero if the attribute is used as a key + * attribute for a table or index. Set type attributes cannot be empty.

+ *

Invalid Requests with empty values will be rejected with a + * ValidationException exception.

+ * + *

To prevent a new item from replacing an existing item, use a conditional + * expression that contains the attribute_not_exists function with the + * name of the attribute being used as the partition key for the table. Since every + * record must contain that attribute, the attribute_not_exists function + * will only succeed if no matching item exists.

+ *
+ *

For more information about PutItem, see Working with + * Items in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, PutItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, PutItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // PutItemInput + * TableName: "STRING_VALUE", // required + * Item: { // PutItemInputAttributeMap // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ConditionalOperator: "AND" || "OR", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new PutItemCommand(input); + * const response = await client.send(command); + * // { // PutItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param PutItemCommandInput - {@link PutItemCommandInput} + * @returns {@link PutItemCommandOutput} + * @see {@link PutItemCommandInput} for command's `input` shape. + * @see {@link PutItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To add an item to a table + * ```javascript + * // This example adds a new item to the Music table. + * const input = { + * Item: { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * Artist: { + * S: "No One You Know" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * ReturnConsumedCapacity: "TOTAL", + * TableName: "Music" + * }; + * const command = new PutItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { + * CapacityUnits: 1, + * TableName: "Music" + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class PutItemCommand extends PutItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutItemInput; + output: PutItemOutput; + }; + sdk: { + input: PutItemCommandInput; + output: PutItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts new file mode 100644 index 0000000..e222271 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/PutResourcePolicyCommand.d.ts @@ -0,0 +1,140 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { PutResourcePolicyInput, PutResourcePolicyOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link PutResourcePolicyCommand}. + */ +export interface PutResourcePolicyCommandInput extends PutResourcePolicyInput { +} +/** + * @public + * + * The output of {@link PutResourcePolicyCommand}. + */ +export interface PutResourcePolicyCommandOutput extends PutResourcePolicyOutput, __MetadataBearer { +} +declare const PutResourcePolicyCommand_base: { + new (input: PutResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: PutResourcePolicyCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Attaches a resource-based policy document to the resource, which can be a table or + * stream. When you attach a resource-based policy using this API, the policy application + * is + * eventually consistent + * .

+ *

+ * PutResourcePolicy is an idempotent operation; running it multiple times + * on the same resource using the same policy document will return the same revision ID. If + * you specify an ExpectedRevisionId that doesn't match the current policy's + * RevisionId, the PolicyNotFoundException will be + * returned.

+ * + *

+ * PutResourcePolicy is an asynchronous operation. If you issue a + * GetResourcePolicy request immediately after a + * PutResourcePolicy request, DynamoDB might return your + * previous policy, if there was one, or return the + * PolicyNotFoundException. This is because + * GetResourcePolicy uses an eventually consistent query, and the + * metadata for your policy or table might not be available at that moment. Wait for a + * few seconds, and then try the GetResourcePolicy request again.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, PutResourcePolicyCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, PutResourcePolicyCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // PutResourcePolicyInput + * ResourceArn: "STRING_VALUE", // required + * Policy: "STRING_VALUE", // required + * ExpectedRevisionId: "STRING_VALUE", + * ConfirmRemoveSelfResourceAccess: true || false, + * }; + * const command = new PutResourcePolicyCommand(input); + * const response = await client.send(command); + * // { // PutResourcePolicyOutput + * // RevisionId: "STRING_VALUE", + * // }; + * + * ``` + * + * @param PutResourcePolicyCommandInput - {@link PutResourcePolicyCommandInput} + * @returns {@link PutResourcePolicyCommandOutput} + * @see {@link PutResourcePolicyCommandInput} for command's `input` shape. + * @see {@link PutResourcePolicyCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PolicyNotFoundException} (client fault) + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class PutResourcePolicyCommand extends PutResourcePolicyCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: PutResourcePolicyInput; + output: PutResourcePolicyOutput; + }; + sdk: { + input: PutResourcePolicyCommandInput; + output: PutResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts new file mode 100644 index 0000000..bac7977 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/QueryCommand.d.ts @@ -0,0 +1,329 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { QueryInput, QueryOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link QueryCommand}. + */ +export interface QueryCommandInput extends QueryInput { +} +/** + * @public + * + * The output of {@link QueryCommand}. + */ +export interface QueryCommandOutput extends QueryOutput, __MetadataBearer { +} +declare const QueryCommand_base: { + new (input: QueryCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: QueryCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

You must provide the name of the partition key attribute and a single value for that + * attribute. Query returns all items with that partition key value. + * Optionally, you can provide a sort key attribute and use a comparison operator to refine + * the search results.

+ *

Use the KeyConditionExpression parameter to provide a specific value for + * the partition key. The Query operation will return all of the items from + * the table or index with that partition key value. You can optionally narrow the scope of + * the Query operation by specifying a sort key value and a comparison + * operator in KeyConditionExpression. To further refine the + * Query results, you can optionally provide a + * FilterExpression. A FilterExpression determines which + * items within the results should be returned to you. All of the other results are + * discarded.

+ *

A Query operation always returns a result set. If no matching items are + * found, the result set will be empty. Queries that do not return results consume the + * minimum number of read capacity units for that type of read operation.

+ * + *

DynamoDB calculates the number of read capacity units consumed based on item + * size, not on the amount of data that is returned to an application. The number of + * capacity units consumed will be the same whether you request all of the attributes + * (the default behavior) or just some of them (using a projection expression). The + * number will also be the same whether or not you use a FilterExpression. + *

+ *
+ *

+ * Query results are always sorted by the sort key value. If the data type of + * the sort key is Number, the results are returned in numeric order; otherwise, the + * results are returned in order of UTF-8 bytes. By default, the sort order is ascending. + * To reverse the order, set the ScanIndexForward parameter to false.

+ *

A single Query operation will read up to the maximum number of items set + * (if using the Limit parameter) or a maximum of 1 MB of data and then apply + * any filtering to the results using FilterExpression. If + * LastEvaluatedKey is present in the response, you will need to paginate + * the result set. For more information, see Paginating + * the Results in the Amazon DynamoDB Developer Guide.

+ *

+ * FilterExpression is applied after a Query finishes, but before + * the results are returned. A FilterExpression cannot contain partition key + * or sort key attributes. You need to specify those attributes in the + * KeyConditionExpression.

+ * + *

A Query operation can return an empty result set and a + * LastEvaluatedKey if all the items read for the page of results are + * filtered out.

+ *
+ *

You can query a table, a local secondary index, or a global secondary index. For a + * query on a table or on a local secondary index, you can set the + * ConsistentRead parameter to true and obtain a strongly + * consistent result. Global secondary indexes support eventually consistent reads only, so + * do not specify ConsistentRead when querying a global secondary + * index.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, QueryCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, QueryCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // QueryInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * Select: "ALL_ATTRIBUTES" || "ALL_PROJECTED_ATTRIBUTES" || "SPECIFIC_ATTRIBUTES" || "COUNT", + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * Limit: Number("int"), + * ConsistentRead: true || false, + * KeyConditions: { // KeyConditions + * "": { // Condition + * AttributeValueList: [ // AttributeValueList + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * QueryFilter: { // FilterConditionMap + * "": { + * AttributeValueList: [ + * "", + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ScanIndexForward: true || false, + * ExclusiveStartKey: { // Key + * "": "", + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ProjectionExpression: "STRING_VALUE", + * FilterExpression: "STRING_VALUE", + * KeyConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * }; + * const command = new QueryCommand(input); + * const response = await client.send(command); + * // { // QueryOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // Count: Number("int"), + * // ScannedCount: Number("int"), + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param QueryCommandInput - {@link QueryCommandInput} + * @returns {@link QueryCommandOutput} + * @see {@link QueryCommandInput} for command's `input` shape. + * @see {@link QueryCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To query an item + * ```javascript + * // This example queries items in the Music table. The table has a partition key and sort key (Artist and SongTitle), but this query only specifies the partition key value. It returns song titles by the artist named "No One You Know". + * const input = { + * ExpressionAttributeValues: { + * :v1: { + * S: "No One You Know" + * } + * }, + * KeyConditionExpression: "Artist = :v1", + * ProjectionExpression: "SongTitle", + * TableName: "Music" + * }; + * const command = new QueryCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { /* empty *\/ }, + * Count: 2, + * Items: [ + * { + * SongTitle: { + * S: "Call Me Today" + * } + * } + * ], + * ScannedCount: 2 + * } + * *\/ + * ``` + * + * @public + */ +export declare class QueryCommand extends QueryCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: QueryInput; + output: QueryOutput; + }; + sdk: { + input: QueryCommandInput; + output: QueryCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts new file mode 100644 index 0000000..495b072 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableFromBackupCommand.d.ts @@ -0,0 +1,361 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { RestoreTableFromBackupInput, RestoreTableFromBackupOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link RestoreTableFromBackupCommand}. + */ +export interface RestoreTableFromBackupCommandInput extends RestoreTableFromBackupInput { +} +/** + * @public + * + * The output of {@link RestoreTableFromBackupCommand}. + */ +export interface RestoreTableFromBackupCommandOutput extends RestoreTableFromBackupOutput, __MetadataBearer { +} +declare const RestoreTableFromBackupCommand_base: { + new (input: RestoreTableFromBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: RestoreTableFromBackupCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates a new table from an existing backup. Any number of users can execute up to 50 + * concurrent restores (any type of restore) in a given account.

+ *

You can call RestoreTableFromBackup at a maximum rate of 10 times per + * second.

+ *

You must manually set up the following on the restored table:

+ *
    + *
  • + *

    Auto scaling policies

    + *
  • + *
  • + *

    IAM policies

    + *
  • + *
  • + *

    Amazon CloudWatch metrics and alarms

    + *
  • + *
  • + *

    Tags

    + *
  • + *
  • + *

    Stream settings

    + *
  • + *
  • + *

    Time to Live (TTL) settings

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, RestoreTableFromBackupCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, RestoreTableFromBackupCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // RestoreTableFromBackupInput + * TargetTableName: "STRING_VALUE", // required + * BackupArn: "STRING_VALUE", // required + * BillingModeOverride: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalSecondaryIndexOverride: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * LocalSecondaryIndexOverride: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecificationOverride: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * }; + * const command = new RestoreTableFromBackupCommand(input); + * const response = await client.send(command); + * // { // RestoreTableFromBackupOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param RestoreTableFromBackupCommandInput - {@link RestoreTableFromBackupCommandInput} + * @returns {@link RestoreTableFromBackupCommandOutput} + * @see {@link RestoreTableFromBackupCommandInput} for command's `input` shape. + * @see {@link RestoreTableFromBackupCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link BackupInUseException} (client fault) + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * + * @throws {@link BackupNotFoundException} (client fault) + *

Backup not found for the given BackupARN.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link TableAlreadyExistsException} (client fault) + *

A target table with the specified name already exists.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class RestoreTableFromBackupCommand extends RestoreTableFromBackupCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: RestoreTableFromBackupInput; + output: RestoreTableFromBackupOutput; + }; + sdk: { + input: RestoreTableFromBackupCommandInput; + output: RestoreTableFromBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..156e857 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/RestoreTableToPointInTimeCommand.d.ts @@ -0,0 +1,394 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { RestoreTableToPointInTimeInput, RestoreTableToPointInTimeOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link RestoreTableToPointInTimeCommand}. + */ +export interface RestoreTableToPointInTimeCommandInput extends RestoreTableToPointInTimeInput { +} +/** + * @public + * + * The output of {@link RestoreTableToPointInTimeCommand}. + */ +export interface RestoreTableToPointInTimeCommandOutput extends RestoreTableToPointInTimeOutput, __MetadataBearer { +} +declare const RestoreTableToPointInTimeCommand_base: { + new (input: RestoreTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: RestoreTableToPointInTimeCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Restores the specified table to the specified point in time within + * EarliestRestorableDateTime and LatestRestorableDateTime. + * You can restore your table to any point in time in the last 35 days. You can set the + * recovery period to any value between 1 and 35 days. Any number of users can execute up + * to 50 concurrent restores (any type of restore) in a given account.

+ *

When you restore using point in time recovery, DynamoDB restores your table data to + * the state based on the selected date and time (day:hour:minute:second) to a new table.

+ *

Along with data, the following are also included on the new restored table using point + * in time recovery:

+ *
    + *
  • + *

    Global secondary indexes (GSIs)

    + *
  • + *
  • + *

    Local secondary indexes (LSIs)

    + *
  • + *
  • + *

    Provisioned read and write capacity

    + *
  • + *
  • + *

    Encryption settings

    + * + *

    All these settings come from the current settings of the source table at + * the time of restore.

    + *
    + *
  • + *
+ *

You must manually set up the following on the restored table:

+ *
    + *
  • + *

    Auto scaling policies

    + *
  • + *
  • + *

    IAM policies

    + *
  • + *
  • + *

    Amazon CloudWatch metrics and alarms

    + *
  • + *
  • + *

    Tags

    + *
  • + *
  • + *

    Stream settings

    + *
  • + *
  • + *

    Time to Live (TTL) settings

    + *
  • + *
  • + *

    Point in time recovery settings

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, RestoreTableToPointInTimeCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, RestoreTableToPointInTimeCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // RestoreTableToPointInTimeInput + * SourceTableArn: "STRING_VALUE", + * SourceTableName: "STRING_VALUE", + * TargetTableName: "STRING_VALUE", // required + * UseLatestRestorableTime: true || false, + * RestoreDateTime: new Date("TIMESTAMP"), + * BillingModeOverride: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalSecondaryIndexOverride: [ // GlobalSecondaryIndexList + * { // GlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * ], + * LocalSecondaryIndexOverride: [ // LocalSecondaryIndexList + * { // LocalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // required + * { + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ + * "STRING_VALUE", + * ], + * }, + * }, + * ], + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * SSESpecificationOverride: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * }; + * const command = new RestoreTableToPointInTimeCommand(input); + * const response = await client.send(command); + * // { // RestoreTableToPointInTimeOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param RestoreTableToPointInTimeCommandInput - {@link RestoreTableToPointInTimeCommandInput} + * @returns {@link RestoreTableToPointInTimeCommandOutput} + * @see {@link RestoreTableToPointInTimeCommandInput} for command's `input` shape. + * @see {@link RestoreTableToPointInTimeCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link InvalidRestoreTimeException} (client fault) + *

An invalid restore time was specified. RestoreDateTime must be between + * EarliestRestorableDateTime and LatestRestorableDateTime.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link PointInTimeRecoveryUnavailableException} (client fault) + *

Point in time recovery has not yet been enabled for this source table.

+ * + * @throws {@link TableAlreadyExistsException} (client fault) + *

A target table with the specified name already exists.

+ * + * @throws {@link TableInUseException} (client fault) + *

A target table with the specified name is either being created or deleted. + *

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class RestoreTableToPointInTimeCommand extends RestoreTableToPointInTimeCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: RestoreTableToPointInTimeInput; + output: RestoreTableToPointInTimeOutput; + }; + sdk: { + input: RestoreTableToPointInTimeCommandInput; + output: RestoreTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts new file mode 100644 index 0000000..64261d8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/ScanCommand.d.ts @@ -0,0 +1,328 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { ScanInput, ScanOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ScanCommand}. + */ +export interface ScanCommandInput extends ScanInput { +} +/** + * @public + * + * The output of {@link ScanCommand}. + */ +export interface ScanCommandOutput extends ScanOutput, __MetadataBearer { +} +declare const ScanCommand_base: { + new (input: ScanCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ScanCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The Scan operation returns one or more items and item attributes by + * accessing every item in a table or a secondary index. To have DynamoDB return fewer + * items, you can provide a FilterExpression operation.

+ *

If the total size of scanned items exceeds the maximum dataset size limit of 1 MB, the + * scan completes and results are returned to the user. The LastEvaluatedKey + * value is also returned and the requestor can use the LastEvaluatedKey to + * continue the scan in a subsequent operation. Each scan response also includes number of + * items that were scanned (ScannedCount) as part of the request. If using a + * FilterExpression, a scan result can result in no items meeting the + * criteria and the Count will result in zero. If you did not use a + * FilterExpression in the scan request, then Count is the + * same as ScannedCount.

+ * + *

+ * Count and ScannedCount only return the count of items + * specific to a single scan request and, unless the table is less than 1MB, do not + * represent the total number of items in the table.

+ *
+ *

A single Scan operation first reads up to the maximum number of items set + * (if using the Limit parameter) or a maximum of 1 MB of data and then + * applies any filtering to the results if a FilterExpression is provided. If + * LastEvaluatedKey is present in the response, pagination is required to + * complete the full table scan. For more information, see Paginating the + * Results in the Amazon DynamoDB Developer Guide.

+ *

+ * Scan operations proceed sequentially; however, for faster performance on + * a large table or secondary index, applications can request a parallel Scan + * operation by providing the Segment and TotalSegments + * parameters. For more information, see Parallel + * Scan in the Amazon DynamoDB Developer Guide.

+ *

By default, a Scan uses eventually consistent reads when accessing the + * items in a table. Therefore, the results from an eventually consistent Scan + * may not include the latest item changes at the time the scan iterates through each item + * in the table. If you require a strongly consistent read of each item as the scan + * iterates through the items in the table, you can set the ConsistentRead + * parameter to true. Strong consistency only relates to the consistency of the read at the + * item level.

+ * + *

DynamoDB does not provide snapshot isolation for a scan operation when the + * ConsistentRead parameter is set to true. Thus, a DynamoDB scan + * operation does not guarantee that all reads in a scan see a consistent snapshot of + * the table when the scan operation was requested.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, ScanCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, ScanCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // ScanInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * AttributesToGet: [ // AttributeNameList + * "STRING_VALUE", + * ], + * Limit: Number("int"), + * Select: "ALL_ATTRIBUTES" || "ALL_PROJECTED_ATTRIBUTES" || "SPECIFIC_ATTRIBUTES" || "COUNT", + * ScanFilter: { // FilterConditionMap + * "": { // Condition + * AttributeValueList: [ // AttributeValueList + * { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * ], + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", // required + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ExclusiveStartKey: { // Key + * "": "", + * }, + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * TotalSegments: Number("int"), + * Segment: Number("int"), + * ProjectionExpression: "STRING_VALUE", + * FilterExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ConsistentRead: true || false, + * }; + * const command = new ScanCommand(input); + * const response = await client.send(command); + * // { // ScanOutput + * // Items: [ // ItemList + * // { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ], + * // Count: Number("int"), + * // ScannedCount: Number("int"), + * // LastEvaluatedKey: { // Key + * // "": "", + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // }; + * + * ``` + * + * @param ScanCommandInput - {@link ScanCommandInput} + * @returns {@link ScanCommandOutput} + * @see {@link ScanCommandInput} for command's `input` shape. + * @see {@link ScanCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To scan a table + * ```javascript + * // This example scans the entire Music table, and then narrows the results to songs by the artist "No One You Know". For each item, only the album title and song title are returned. + * const input = { + * ExpressionAttributeNames: { + * #AT: "AlbumTitle", + * #ST: "SongTitle" + * }, + * ExpressionAttributeValues: { + * :a: { + * S: "No One You Know" + * } + * }, + * FilterExpression: "Artist = :a", + * ProjectionExpression: "#ST, #AT", + * TableName: "Music" + * }; + * const command = new ScanCommand(input); + * const response = await client.send(command); + * /* response is + * { + * ConsumedCapacity: { /* empty *\/ }, + * Count: 2, + * Items: [ + * { + * AlbumTitle: { + * S: "Somewhat Famous" + * }, + * SongTitle: { + * S: "Call Me Today" + * } + * }, + * { + * AlbumTitle: { + * S: "Blue Sky Blues" + * }, + * SongTitle: { + * S: "Scared of My Shadow" + * } + * } + * ], + * ScannedCount: 3 + * } + * *\/ + * ``` + * + * @public + */ +export declare class ScanCommand extends ScanCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ScanInput; + output: ScanOutput; + }; + sdk: { + input: ScanCommandInput; + output: ScanCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts new file mode 100644 index 0000000..8475f32 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TagResourceCommand.d.ts @@ -0,0 +1,139 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TagResourceInput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TagResourceCommand}. + */ +export interface TagResourceCommandInput extends TagResourceInput { +} +/** + * @public + * + * The output of {@link TagResourceCommand}. + */ +export interface TagResourceCommandOutput extends __MetadataBearer { +} +declare const TagResourceCommand_base: { + new (input: TagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Associate a set of tags with an Amazon DynamoDB resource. You can then activate these + * user-defined tags so that they appear on the Billing and Cost Management console for + * cost allocation tracking. You can call TagResource up to five times per second, per + * account.

+ *
    + *
  • + *

    + * TagResource is an asynchronous operation. If you issue a ListTagsOfResource request immediately after a + * TagResource request, DynamoDB might return your + * previous tag set, if there was one, or an empty tag set. This is because + * ListTagsOfResource uses an eventually consistent query, and the + * metadata for your tags or table might not be available at that moment. Wait for + * a few seconds, and then try the ListTagsOfResource request + * again.

    + *
  • + *
  • + *

    The application or removal of tags using TagResource and + * UntagResource APIs is eventually consistent. + * ListTagsOfResource API will only reflect the changes after a + * few seconds.

    + *
  • + *
+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TagResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TagResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TagResourceInput + * ResourceArn: "STRING_VALUE", // required + * Tags: [ // TagList // required + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * }; + * const command = new TagResourceCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param TagResourceCommandInput - {@link TagResourceCommandInput} + * @returns {@link TagResourceCommandOutput} + * @see {@link TagResourceCommandInput} for command's `input` shape. + * @see {@link TagResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TagResourceCommand extends TagResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TagResourceInput; + output: {}; + }; + sdk: { + input: TagResourceCommandInput; + output: TagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts new file mode 100644 index 0000000..a9d9997 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactGetItemsCommand.d.ts @@ -0,0 +1,489 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TransactGetItemsInput, TransactGetItemsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TransactGetItemsCommand}. + */ +export interface TransactGetItemsCommandInput extends TransactGetItemsInput { +} +/** + * @public + * + * The output of {@link TransactGetItemsCommand}. + */ +export interface TransactGetItemsCommandOutput extends TransactGetItemsOutput, __MetadataBearer { +} +declare const TransactGetItemsCommand_base: { + new (input: TransactGetItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TransactGetItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * TransactGetItems is a synchronous operation that atomically retrieves + * multiple items from one or more tables (but not from indexes) in a single account and + * Region. A TransactGetItems call can contain up to 100 + * TransactGetItem objects, each of which contains a Get + * structure that specifies an item to retrieve from a table in the account and Region. A + * call to TransactGetItems cannot retrieve items from tables in more than one + * Amazon Web Services account or Region. The aggregate size of the items in the + * transaction cannot exceed 4 MB.

+ *

DynamoDB rejects the entire TransactGetItems request if any of + * the following is true:

+ *
    + *
  • + *

    A conflicting operation is in the process of updating an item to be + * read.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    The aggregate size of the items in the transaction exceeded 4 MB.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TransactGetItemsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TransactGetItemsInput + * TransactItems: [ // TransactGetItemList // required + * { // TransactGetItem + * Get: { // Get + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * TableName: "STRING_VALUE", // required + * ProjectionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * }, + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * }; + * const command = new TransactGetItemsCommand(input); + * const response = await client.send(command); + * // { // TransactGetItemsOutput + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // Responses: [ // ItemResponseList + * // { // ItemResponse + * // Item: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param TransactGetItemsCommandInput - {@link TransactGetItemsCommandInput} + * @returns {@link TransactGetItemsCommandOutput} + * @see {@link TransactGetItemsCommandInput} for command's `input` shape. + * @see {@link TransactGetItemsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TransactGetItemsCommand extends TransactGetItemsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TransactGetItemsInput; + output: TransactGetItemsOutput; + }; + sdk: { + input: TransactGetItemsCommandInput; + output: TransactGetItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts new file mode 100644 index 0000000..644f975 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/TransactWriteItemsCommand.d.ts @@ -0,0 +1,658 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { TransactWriteItemsInput, TransactWriteItemsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TransactWriteItemsCommand}. + */ +export interface TransactWriteItemsCommandInput extends TransactWriteItemsInput { +} +/** + * @public + * + * The output of {@link TransactWriteItemsCommand}. + */ +export interface TransactWriteItemsCommandOutput extends TransactWriteItemsOutput, __MetadataBearer { +} +declare const TransactWriteItemsCommand_base: { + new (input: TransactWriteItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: TransactWriteItemsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * TransactWriteItems is a synchronous write operation that groups up to 100 + * action requests. These actions can target items in different tables, but not in + * different Amazon Web Services accounts or Regions, and no two actions can target the same + * item. For example, you cannot both ConditionCheck and Update + * the same item. The aggregate size of the items in the transaction cannot exceed 4 + * MB.

+ *

The actions are completed atomically so that either all of them succeed, or all of + * them fail. They are defined by the following objects:

+ *
    + *
  • + *

    + * Put  —   Initiates a PutItem + * operation to write a new item. This structure specifies the primary key of the + * item to be written, the name of the table to write it in, an optional condition + * expression that must be satisfied for the write to succeed, a list of the item's + * attributes, and a field indicating whether to retrieve the item's attributes if + * the condition is not met.

    + *
  • + *
  • + *

    + * Update  —   Initiates an UpdateItem + * operation to update an existing item. This structure specifies the primary key + * of the item to be updated, the name of the table where it resides, an optional + * condition expression that must be satisfied for the update to succeed, an + * expression that defines one or more attributes to be updated, and a field + * indicating whether to retrieve the item's attributes if the condition is not + * met.

    + *
  • + *
  • + *

    + * Delete  —   Initiates a DeleteItem + * operation to delete an existing item. This structure specifies the primary key + * of the item to be deleted, the name of the table where it resides, an optional + * condition expression that must be satisfied for the deletion to succeed, and a + * field indicating whether to retrieve the item's attributes if the condition is + * not met.

    + *
  • + *
  • + *

    + * ConditionCheck  —   Applies a condition to an item + * that is not being modified by the transaction. This structure specifies the + * primary key of the item to be checked, the name of the table where it resides, a + * condition expression that must be satisfied for the transaction to succeed, and + * a field indicating whether to retrieve the item's attributes if the condition is + * not met.

    + *
  • + *
+ *

DynamoDB rejects the entire TransactWriteItems request if any of the + * following is true:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    An ongoing operation is in the process of updating the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (bigger than 400 KB), a local secondary index + * (LSI) becomes too large, or a similar validation error occurs because of changes + * made by the transaction.

    + *
  • + *
  • + *

    The aggregate size of the items in the transaction exceeds 4 MB.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, TransactWriteItemsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // TransactWriteItemsInput + * TransactItems: [ // TransactWriteItemList // required + * { // TransactWriteItem + * ConditionCheck: { // ConditionCheck + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", // required + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Put: { // Put + * Item: { // PutItemInputAttributeMap // required + * "": "", + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Delete: { // Delete + * Key: { // required + * "": "", + * }, + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * Update: { // Update + * Key: { // required + * "": "", + * }, + * UpdateExpression: "STRING_VALUE", // required + * TableName: "STRING_VALUE", // required + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }, + * }, + * ], + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * ClientRequestToken: "STRING_VALUE", + * }; + * const command = new TransactWriteItemsCommand(input); + * const response = await client.send(command); + * // { // TransactWriteItemsOutput + * // ConsumedCapacity: [ // ConsumedCapacityMultiple + * // { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ], + * // ItemCollectionMetrics: { // ItemCollectionMetricsPerTable + * // "": [ // ItemCollectionMetricsMultiple + * // { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param TransactWriteItemsCommandInput - {@link TransactWriteItemsCommandInput} + * @returns {@link TransactWriteItemsCommandOutput} + * @see {@link TransactWriteItemsCommandInput} for command's `input` shape. + * @see {@link TransactWriteItemsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link IdempotentParameterMismatchException} (client fault) + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionCanceledException} (client fault) + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * + * @throws {@link TransactionInProgressException} (client fault) + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class TransactWriteItemsCommand extends TransactWriteItemsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: TransactWriteItemsInput; + output: TransactWriteItemsOutput; + }; + sdk: { + input: TransactWriteItemsCommandInput; + output: TransactWriteItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts new file mode 100644 index 0000000..70f702d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UntagResourceCommand.d.ts @@ -0,0 +1,134 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UntagResourceInput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UntagResourceCommand}. + */ +export interface UntagResourceCommandInput extends UntagResourceInput { +} +/** + * @public + * + * The output of {@link UntagResourceCommand}. + */ +export interface UntagResourceCommandOutput extends __MetadataBearer { +} +declare const UntagResourceCommand_base: { + new (input: UntagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UntagResourceCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Removes the association of tags from an Amazon DynamoDB resource. You can call + * UntagResource up to five times per second, per account.

+ *
    + *
  • + *

    + * UntagResource is an asynchronous operation. If you issue a ListTagsOfResource request immediately after an + * UntagResource request, DynamoDB might return your + * previous tag set, if there was one, or an empty tag set. This is because + * ListTagsOfResource uses an eventually consistent query, and the + * metadata for your tags or table might not be available at that moment. Wait for + * a few seconds, and then try the ListTagsOfResource request + * again.

    + *
  • + *
  • + *

    The application or removal of tags using TagResource and + * UntagResource APIs is eventually consistent. + * ListTagsOfResource API will only reflect the changes after a + * few seconds.

    + *
  • + *
+ *

For an overview on tagging DynamoDB resources, see Tagging for DynamoDB + * in the Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UntagResourceCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UntagResourceCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UntagResourceInput + * ResourceArn: "STRING_VALUE", // required + * TagKeys: [ // TagKeyList // required + * "STRING_VALUE", + * ], + * }; + * const command = new UntagResourceCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param UntagResourceCommandInput - {@link UntagResourceCommandInput} + * @returns {@link UntagResourceCommandOutput} + * @see {@link UntagResourceCommandInput} for command's `input` shape. + * @see {@link UntagResourceCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UntagResourceCommand extends UntagResourceCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UntagResourceInput; + output: {}; + }; + sdk: { + input: UntagResourceCommandInput; + output: UntagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..c15d7f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContinuousBackupsCommand.d.ts @@ -0,0 +1,108 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateContinuousBackupsInput, UpdateContinuousBackupsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateContinuousBackupsCommand}. + */ +export interface UpdateContinuousBackupsCommandInput extends UpdateContinuousBackupsInput { +} +/** + * @public + * + * The output of {@link UpdateContinuousBackupsCommand}. + */ +export interface UpdateContinuousBackupsCommandOutput extends UpdateContinuousBackupsOutput, __MetadataBearer { +} +declare const UpdateContinuousBackupsCommand_base: { + new (input: UpdateContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateContinuousBackupsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

+ * UpdateContinuousBackups enables or disables point in time recovery for + * the specified table. A successful UpdateContinuousBackups call returns the + * current ContinuousBackupsDescription. Continuous backups are + * ENABLED on all tables at table creation. If point in time recovery is + * enabled, PointInTimeRecoveryStatus will be set to ENABLED.

+ *

Once continuous backups and point in time recovery are enabled, you can restore to + * any point in time within EarliestRestorableDateTime and + * LatestRestorableDateTime.

+ *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + * You can restore your table to any point in time in the last 35 days. You can set the + * RecoveryPeriodInDays to any value between 1 and 35 days.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateContinuousBackupsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateContinuousBackupsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateContinuousBackupsInput + * TableName: "STRING_VALUE", // required + * PointInTimeRecoverySpecification: { // PointInTimeRecoverySpecification + * PointInTimeRecoveryEnabled: true || false, // required + * RecoveryPeriodInDays: Number("int"), + * }, + * }; + * const command = new UpdateContinuousBackupsCommand(input); + * const response = await client.send(command); + * // { // UpdateContinuousBackupsOutput + * // ContinuousBackupsDescription: { // ContinuousBackupsDescription + * // ContinuousBackupsStatus: "ENABLED" || "DISABLED", // required + * // PointInTimeRecoveryDescription: { // PointInTimeRecoveryDescription + * // PointInTimeRecoveryStatus: "ENABLED" || "DISABLED", + * // RecoveryPeriodInDays: Number("int"), + * // EarliestRestorableDateTime: new Date("TIMESTAMP"), + * // LatestRestorableDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // }; + * + * ``` + * + * @param UpdateContinuousBackupsCommandInput - {@link UpdateContinuousBackupsCommandInput} + * @returns {@link UpdateContinuousBackupsCommandOutput} + * @see {@link UpdateContinuousBackupsCommandInput} for command's `input` shape. + * @see {@link UpdateContinuousBackupsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ContinuousBackupsUnavailableException} (client fault) + *

Backups have not yet been enabled for this table.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateContinuousBackupsCommand extends UpdateContinuousBackupsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateContinuousBackupsInput; + output: UpdateContinuousBackupsOutput; + }; + sdk: { + input: UpdateContinuousBackupsCommandInput; + output: UpdateContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts new file mode 100644 index 0000000..61c9981 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateContributorInsightsCommand.d.ts @@ -0,0 +1,89 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateContributorInsightsInput, UpdateContributorInsightsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateContributorInsightsCommand}. + */ +export interface UpdateContributorInsightsCommandInput extends UpdateContributorInsightsInput { +} +/** + * @public + * + * The output of {@link UpdateContributorInsightsCommand}. + */ +export interface UpdateContributorInsightsCommandOutput extends UpdateContributorInsightsOutput, __MetadataBearer { +} +declare const UpdateContributorInsightsCommand_base: { + new (input: UpdateContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateContributorInsightsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates the status for contributor insights for a specific table or index. CloudWatch + * Contributor Insights for DynamoDB graphs display the partition key and (if applicable) + * sort key of frequently accessed items and frequently throttled items in plaintext. If + * you require the use of Amazon Web Services Key Management Service (KMS) to encrypt this + * table’s partition key and sort key data with an Amazon Web Services managed key or + * customer managed key, you should not enable CloudWatch Contributor Insights for DynamoDB + * for this table.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateContributorInsightsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateContributorInsightsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateContributorInsightsInput + * TableName: "STRING_VALUE", // required + * IndexName: "STRING_VALUE", + * ContributorInsightsAction: "ENABLE" || "DISABLE", // required + * }; + * const command = new UpdateContributorInsightsCommand(input); + * const response = await client.send(command); + * // { // UpdateContributorInsightsOutput + * // TableName: "STRING_VALUE", + * // IndexName: "STRING_VALUE", + * // ContributorInsightsStatus: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "FAILED", + * // }; + * + * ``` + * + * @param UpdateContributorInsightsCommandInput - {@link UpdateContributorInsightsCommandInput} + * @returns {@link UpdateContributorInsightsCommandOutput} + * @see {@link UpdateContributorInsightsCommandInput} for command's `input` shape. + * @see {@link UpdateContributorInsightsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateContributorInsightsCommand extends UpdateContributorInsightsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateContributorInsightsInput; + output: UpdateContributorInsightsOutput; + }; + sdk: { + input: UpdateContributorInsightsCommandInput; + output: UpdateContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts new file mode 100644 index 0000000..ff0b588 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableCommand.d.ts @@ -0,0 +1,176 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateGlobalTableInput, UpdateGlobalTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateGlobalTableCommand}. + */ +export interface UpdateGlobalTableCommandInput extends UpdateGlobalTableInput { +} +/** + * @public + * + * The output of {@link UpdateGlobalTableCommand}. + */ +export interface UpdateGlobalTableCommandOutput extends UpdateGlobalTableOutput, __MetadataBearer { +} +declare const UpdateGlobalTableCommand_base: { + new (input: UpdateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateGlobalTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Adds or removes replicas in the specified global table. The global table must already + * exist to be able to use this operation. Any replica to be added must be empty, have the + * same name as the global table, have the same key schema, have DynamoDB Streams enabled, + * and have the same provisioned and maximum write capacity units.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version). If you are using global tables Version + * 2019.11.21 you can use UpdateTable instead.

+ *

Although you can use UpdateGlobalTable to add replicas and remove + * replicas in a single request, for simplicity we recommend that you issue separate + * requests for adding or removing replicas.

+ *
+ *

If global secondary indexes are specified, then the following conditions must also be + * met:

+ *
    + *
  • + *

    The global secondary indexes must have the same name.

    + *
  • + *
  • + *

    The global secondary indexes must have the same hash key and sort key (if + * present).

    + *
  • + *
  • + *

    The global secondary indexes must have the same provisioned and maximum write + * capacity units.

    + *
  • + *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateGlobalTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateGlobalTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateGlobalTableInput + * GlobalTableName: "STRING_VALUE", // required + * ReplicaUpdates: [ // ReplicaUpdateList // required + * { // ReplicaUpdate + * Create: { // CreateReplicaAction + * RegionName: "STRING_VALUE", // required + * }, + * Delete: { // DeleteReplicaAction + * RegionName: "STRING_VALUE", // required + * }, + * }, + * ], + * }; + * const command = new UpdateGlobalTableCommand(input); + * const response = await client.send(command); + * // { // UpdateGlobalTableOutput + * // GlobalTableDescription: { // GlobalTableDescription + * // ReplicationGroup: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // GlobalTableArn: "STRING_VALUE", + * // CreationDateTime: new Date("TIMESTAMP"), + * // GlobalTableStatus: "CREATING" || "ACTIVE" || "DELETING" || "UPDATING", + * // GlobalTableName: "STRING_VALUE", + * // }, + * // }; + * + * ``` + * + * @param UpdateGlobalTableCommandInput - {@link UpdateGlobalTableCommandInput} + * @returns {@link UpdateGlobalTableCommandOutput} + * @see {@link UpdateGlobalTableCommandInput} for command's `input` shape. + * @see {@link UpdateGlobalTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ReplicaAlreadyExistsException} (client fault) + *

The specified replica is already part of the global table.

+ * + * @throws {@link ReplicaNotFoundException} (client fault) + *

The specified replica is no longer part of the global table.

+ * + * @throws {@link TableNotFoundException} (client fault) + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateGlobalTableCommand extends UpdateGlobalTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateGlobalTableInput; + output: UpdateGlobalTableOutput; + }; + sdk: { + input: UpdateGlobalTableCommandInput; + output: UpdateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..bf002ac --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateGlobalTableSettingsCommand.d.ts @@ -0,0 +1,280 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateGlobalTableSettingsInput, UpdateGlobalTableSettingsOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateGlobalTableSettingsCommand}. + */ +export interface UpdateGlobalTableSettingsCommandInput extends UpdateGlobalTableSettingsInput { +} +/** + * @public + * + * The output of {@link UpdateGlobalTableSettingsCommand}. + */ +export interface UpdateGlobalTableSettingsCommandOutput extends UpdateGlobalTableSettingsOutput, __MetadataBearer { +} +declare const UpdateGlobalTableSettingsCommand_base: { + new (input: UpdateGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateGlobalTableSettingsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates settings for a global table.

+ * + *

This documentation is for version 2017.11.29 (Legacy) of global tables, which should be avoided for new global tables. Customers should use Global Tables version 2019.11.21 (Current) when possible, because it provides greater flexibility, higher efficiency, and consumes less write capacity than 2017.11.29 (Legacy).

+ *

To determine which version you're using, see Determining the global table version you are using. To update existing global tables from version 2017.11.29 (Legacy) to version 2019.11.21 (Current), see Upgrading global tables.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateGlobalTableSettingsCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateGlobalTableSettingsCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateGlobalTableSettingsInput + * GlobalTableName: "STRING_VALUE", // required + * GlobalTableBillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * GlobalTableProvisionedWriteCapacityUnits: Number("long"), + * GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: { // AutoScalingSettingsUpdate + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { // AutoScalingPolicyUpdate + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * GlobalTableGlobalSecondaryIndexSettingsUpdate: [ // GlobalTableGlobalSecondaryIndexSettingsUpdateList + * { // GlobalTableGlobalSecondaryIndexSettingsUpdate + * IndexName: "STRING_VALUE", // required + * ProvisionedWriteCapacityUnits: Number("long"), + * ProvisionedWriteCapacityAutoScalingSettingsUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * }, + * ], + * ReplicaSettingsUpdate: [ // ReplicaSettingsUpdateList + * { // ReplicaSettingsUpdate + * RegionName: "STRING_VALUE", // required + * ReplicaProvisionedReadCapacityUnits: Number("long"), + * ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * ReplicaGlobalSecondaryIndexSettingsUpdate: [ // ReplicaGlobalSecondaryIndexSettingsUpdateList + * { // ReplicaGlobalSecondaryIndexSettingsUpdate + * IndexName: "STRING_VALUE", // required + * ProvisionedReadCapacityUnits: Number("long"), + * ProvisionedReadCapacityAutoScalingSettingsUpdate: "", + * }, + * ], + * ReplicaTableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * ], + * }; + * const command = new UpdateGlobalTableSettingsCommand(input); + * const response = await client.send(command); + * // { // UpdateGlobalTableSettingsOutput + * // GlobalTableName: "STRING_VALUE", + * // ReplicaSettings: [ // ReplicaSettingsDescriptionList + * // { // ReplicaSettingsDescription + * // RegionName: "STRING_VALUE", // required + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaBillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // ReplicaProvisionedReadCapacityUnits: Number("long"), + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityUnits: Number("long"), + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaGlobalSecondaryIndexSettings: [ // ReplicaGlobalSecondaryIndexSettingsDescriptionList + * // { // ReplicaGlobalSecondaryIndexSettingsDescription + * // IndexName: "STRING_VALUE", // required + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityUnits: Number("long"), + * // ProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityUnits: Number("long"), + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // }; + * + * ``` + * + * @param UpdateGlobalTableSettingsCommandInput - {@link UpdateGlobalTableSettingsCommandInput} + * @returns {@link UpdateGlobalTableSettingsCommandOutput} + * @see {@link UpdateGlobalTableSettingsCommandInput} for command's `input` shape. + * @see {@link UpdateGlobalTableSettingsCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link GlobalTableNotFoundException} (client fault) + *

The specified global table does not exist.

+ * + * @throws {@link IndexNotFoundException} (client fault) + *

The operation tried to access a nonexistent index.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ReplicaNotFoundException} (client fault) + *

The specified replica is no longer part of the global table.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateGlobalTableSettingsCommand extends UpdateGlobalTableSettingsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateGlobalTableSettingsInput; + output: UpdateGlobalTableSettingsOutput; + }; + sdk: { + input: UpdateGlobalTableSettingsCommandInput; + output: UpdateGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts new file mode 100644 index 0000000..0fc6013 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateItemCommand.d.ts @@ -0,0 +1,313 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateItemInput, UpdateItemOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateItemCommand}. + */ +export interface UpdateItemCommandInput extends UpdateItemInput { +} +/** + * @public + * + * The output of {@link UpdateItemCommand}. + */ +export interface UpdateItemCommandOutput extends UpdateItemOutput, __MetadataBearer { +} +declare const UpdateItemCommand_base: { + new (input: UpdateItemCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateItemCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Edits an existing item's attributes, or adds a new item to the table if it does not + * already exist. You can put, delete, or add attribute values. You can also perform a + * conditional update on an existing item (insert a new attribute name-value pair if it + * doesn't exist, or replace an existing name-value pair if it has certain expected + * attribute values).

+ *

You can also return the item's attribute values in the same UpdateItem + * operation using the ReturnValues parameter.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateItemCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateItemCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateItemInput + * TableName: "STRING_VALUE", // required + * Key: { // Key // required + * "": { // AttributeValue Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ // StringSetAttributeValue + * "STRING_VALUE", + * ], + * NS: [ // NumberSetAttributeValue + * "STRING_VALUE", + * ], + * BS: [ // BinarySetAttributeValue + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { // MapAttributeValue + * "": {// Union: only one key present + * S: "STRING_VALUE", + * N: "STRING_VALUE", + * B: new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * SS: [ + * "STRING_VALUE", + * ], + * NS: [ + * "STRING_VALUE", + * ], + * BS: [ + * new Uint8Array(), // e.g. Buffer.from("") or new TextEncoder().encode("") + * ], + * M: { + * "": "", + * }, + * L: [ // ListAttributeValue + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * L: [ + * "", + * ], + * NULL: true || false, + * BOOL: true || false, + * }, + * }, + * AttributeUpdates: { // AttributeUpdates + * "": { // AttributeValueUpdate + * Value: "", + * Action: "ADD" || "PUT" || "DELETE", + * }, + * }, + * Expected: { // ExpectedAttributeMap + * "": { // ExpectedAttributeValue + * Value: "", + * Exists: true || false, + * ComparisonOperator: "EQ" || "NE" || "IN" || "LE" || "LT" || "GE" || "GT" || "BETWEEN" || "NOT_NULL" || "NULL" || "CONTAINS" || "NOT_CONTAINS" || "BEGINS_WITH", + * AttributeValueList: [ // AttributeValueList + * "", + * ], + * }, + * }, + * ConditionalOperator: "AND" || "OR", + * ReturnValues: "NONE" || "ALL_OLD" || "UPDATED_OLD" || "ALL_NEW" || "UPDATED_NEW", + * ReturnConsumedCapacity: "INDEXES" || "TOTAL" || "NONE", + * ReturnItemCollectionMetrics: "SIZE" || "NONE", + * UpdateExpression: "STRING_VALUE", + * ConditionExpression: "STRING_VALUE", + * ExpressionAttributeNames: { // ExpressionAttributeNameMap + * "": "STRING_VALUE", + * }, + * ExpressionAttributeValues: { // ExpressionAttributeValueMap + * "": "", + * }, + * ReturnValuesOnConditionCheckFailure: "ALL_OLD" || "NONE", + * }; + * const command = new UpdateItemCommand(input); + * const response = await client.send(command); + * // { // UpdateItemOutput + * // Attributes: { // AttributeMap + * // "": { // AttributeValue Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ // StringSetAttributeValue + * // "STRING_VALUE", + * // ], + * // NS: [ // NumberSetAttributeValue + * // "STRING_VALUE", + * // ], + * // BS: [ // BinarySetAttributeValue + * // new Uint8Array(), + * // ], + * // M: { // MapAttributeValue + * // "": {// Union: only one key present + * // S: "STRING_VALUE", + * // N: "STRING_VALUE", + * // B: new Uint8Array(), + * // SS: [ + * // "STRING_VALUE", + * // ], + * // NS: [ + * // "STRING_VALUE", + * // ], + * // BS: [ + * // new Uint8Array(), + * // ], + * // M: { + * // "": "", + * // }, + * // L: [ // ListAttributeValue + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // L: [ + * // "", + * // ], + * // NULL: true || false, + * // BOOL: true || false, + * // }, + * // }, + * // ConsumedCapacity: { // ConsumedCapacity + * // TableName: "STRING_VALUE", + * // CapacityUnits: Number("double"), + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // Table: { // Capacity + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // LocalSecondaryIndexes: { // SecondaryIndexesCapacityMap + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // GlobalSecondaryIndexes: { + * // "": { + * // ReadCapacityUnits: Number("double"), + * // WriteCapacityUnits: Number("double"), + * // CapacityUnits: Number("double"), + * // }, + * // }, + * // }, + * // ItemCollectionMetrics: { // ItemCollectionMetrics + * // ItemCollectionKey: { // ItemCollectionKeyAttributeMap + * // "": "", + * // }, + * // SizeEstimateRangeGB: [ // ItemCollectionSizeEstimateRange + * // Number("double"), + * // ], + * // }, + * // }; + * + * ``` + * + * @param UpdateItemCommandInput - {@link UpdateItemCommandInput} + * @returns {@link UpdateItemCommandOutput} + * @see {@link UpdateItemCommandInput} for command's `input` shape. + * @see {@link UpdateItemCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link ConditionalCheckFailedException} (client fault) + *

A condition specified in the operation failed to be evaluated.

+ * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link ItemCollectionSizeLimitExceededException} (client fault) + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * + * @throws {@link ProvisionedThroughputExceededException} (client fault) + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * + * @throws {@link ReplicatedWriteConflictException} (client fault) + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * + * @throws {@link RequestLimitExceeded} (client fault) + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link TransactionConflictException} (client fault) + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @example To update an item in a table + * ```javascript + * // This example updates an item in the Music table. It adds a new attribute (Year) and modifies the AlbumTitle attribute. All of the attributes in the item, as they appear after the update, are returned in the response. + * const input = { + * ExpressionAttributeNames: { + * #AT: "AlbumTitle", + * #Y: "Year" + * }, + * ExpressionAttributeValues: { + * :t: { + * S: "Louder Than Ever" + * }, + * :y: { + * N: "2015" + * } + * }, + * Key: { + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * } + * }, + * ReturnValues: "ALL_NEW", + * TableName: "Music", + * UpdateExpression: "SET #Y = :y, #AT = :t" + * }; + * const command = new UpdateItemCommand(input); + * const response = await client.send(command); + * /* response is + * { + * Attributes: { + * AlbumTitle: { + * S: "Louder Than Ever" + * }, + * Artist: { + * S: "Acme Band" + * }, + * SongTitle: { + * S: "Happy Day" + * }, + * Year: { + * N: "2015" + * } + * } + * } + * *\/ + * ``` + * + * @public + */ +export declare class UpdateItemCommand extends UpdateItemCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateItemInput; + output: UpdateItemOutput; + }; + sdk: { + input: UpdateItemCommandInput; + output: UpdateItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..ec8c635 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,121 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateKinesisStreamingDestinationInput, UpdateKinesisStreamingDestinationOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateKinesisStreamingDestinationCommand}. + */ +export interface UpdateKinesisStreamingDestinationCommandInput extends UpdateKinesisStreamingDestinationInput { +} +/** + * @public + * + * The output of {@link UpdateKinesisStreamingDestinationCommand}. + */ +export interface UpdateKinesisStreamingDestinationCommandOutput extends UpdateKinesisStreamingDestinationOutput, __MetadataBearer { +} +declare const UpdateKinesisStreamingDestinationCommand_base: { + new (input: UpdateKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateKinesisStreamingDestinationCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The command to update the Kinesis stream destination.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateKinesisStreamingDestinationCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateKinesisStreamingDestinationCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateKinesisStreamingDestinationInput + * TableName: "STRING_VALUE", // required + * StreamArn: "STRING_VALUE", // required + * UpdateKinesisStreamingConfiguration: { // UpdateKinesisStreamingConfiguration + * ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * }, + * }; + * const command = new UpdateKinesisStreamingDestinationCommand(input); + * const response = await client.send(command); + * // { // UpdateKinesisStreamingDestinationOutput + * // TableName: "STRING_VALUE", + * // StreamArn: "STRING_VALUE", + * // DestinationStatus: "ENABLING" || "ACTIVE" || "DISABLING" || "DISABLED" || "ENABLE_FAILED" || "UPDATING", + * // UpdateKinesisStreamingConfiguration: { // UpdateKinesisStreamingConfiguration + * // ApproximateCreationDateTimePrecision: "MILLISECOND" || "MICROSECOND", + * // }, + * // }; + * + * ``` + * + * @param UpdateKinesisStreamingDestinationCommandInput - {@link UpdateKinesisStreamingDestinationCommandInput} + * @returns {@link UpdateKinesisStreamingDestinationCommandOutput} + * @see {@link UpdateKinesisStreamingDestinationCommandInput} for command's `input` shape. + * @see {@link UpdateKinesisStreamingDestinationCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateKinesisStreamingDestinationCommand extends UpdateKinesisStreamingDestinationCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateKinesisStreamingDestinationInput; + output: UpdateKinesisStreamingDestinationOutput; + }; + sdk: { + input: UpdateKinesisStreamingDestinationCommandInput; + output: UpdateKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts new file mode 100644 index 0000000..cf64a60 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableCommand.d.ts @@ -0,0 +1,437 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTableInput, UpdateTableOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTableCommand}. + */ +export interface UpdateTableCommandInput extends UpdateTableInput { +} +/** + * @public + * + * The output of {@link UpdateTableCommand}. + */ +export interface UpdateTableCommandOutput extends UpdateTableOutput, __MetadataBearer { +} +declare const UpdateTableCommand_base: { + new (input: UpdateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTableCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Modifies the provisioned throughput settings, global secondary indexes, or DynamoDB + * Streams settings for a given table.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ *

You can only perform one of the following operations at once:

+ *
    + *
  • + *

    Modify the provisioned throughput settings of the table.

    + *
  • + *
  • + *

    Remove a global secondary index from the table.

    + *
  • + *
  • + *

    Create a new global secondary index on the table. After the index begins + * backfilling, you can use UpdateTable to perform other + * operations.

    + *
  • + *
+ *

+ * UpdateTable is an asynchronous operation; while it's executing, the table + * status changes from ACTIVE to UPDATING. While it's + * UPDATING, you can't issue another UpdateTable request. + * When the table returns to the ACTIVE state, the UpdateTable + * operation is complete.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTableCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTableCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTableInput + * AttributeDefinitions: [ // AttributeDefinitions + * { // AttributeDefinition + * AttributeName: "STRING_VALUE", // required + * AttributeType: "S" || "N" || "B", // required + * }, + * ], + * TableName: "STRING_VALUE", // required + * BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * ProvisionedThroughput: { // ProvisionedThroughput + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * GlobalSecondaryIndexUpdates: [ // GlobalSecondaryIndexUpdateList + * { // GlobalSecondaryIndexUpdate + * Update: { // UpdateGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { // OnDemandThroughput + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { // WarmThroughput + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * Create: { // CreateGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * KeySchema: [ // KeySchema // required + * { // KeySchemaElement + * AttributeName: "STRING_VALUE", // required + * KeyType: "HASH" || "RANGE", // required + * }, + * ], + * Projection: { // Projection + * ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * NonKeyAttributes: [ // NonKeyAttributeNameList + * "STRING_VALUE", + * ], + * }, + * ProvisionedThroughput: { + * ReadCapacityUnits: Number("long"), // required + * WriteCapacityUnits: Number("long"), // required + * }, + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }, + * Delete: { // DeleteGlobalSecondaryIndexAction + * IndexName: "STRING_VALUE", // required + * }, + * }, + * ], + * StreamSpecification: { // StreamSpecification + * StreamEnabled: true || false, // required + * StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * }, + * SSESpecification: { // SSESpecification + * Enabled: true || false, + * SSEType: "AES256" || "KMS", + * KMSMasterKeyId: "STRING_VALUE", + * }, + * ReplicaUpdates: [ // ReplicationGroupUpdateList + * { // ReplicationGroupUpdate + * Create: { // CreateReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * KMSMasterKeyId: "STRING_VALUE", + * ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { // OnDemandThroughputOverride + * MaxReadRequestUnits: Number("long"), + * }, + * GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexList + * { // ReplicaGlobalSecondaryIndex + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * }, + * ], + * TableClassOverride: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * Update: { // UpdateReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * KMSMasterKeyId: "STRING_VALUE", + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * GlobalSecondaryIndexes: [ + * { + * IndexName: "STRING_VALUE", // required + * ProvisionedThroughputOverride: { + * ReadCapacityUnits: Number("long"), + * }, + * OnDemandThroughputOverride: { + * MaxReadRequestUnits: Number("long"), + * }, + * }, + * ], + * TableClassOverride: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * }, + * Delete: { // DeleteReplicationGroupMemberAction + * RegionName: "STRING_VALUE", // required + * }, + * }, + * ], + * TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * DeletionProtectionEnabled: true || false, + * MultiRegionConsistency: "EVENTUAL" || "STRONG", + * OnDemandThroughput: { + * MaxReadRequestUnits: Number("long"), + * MaxWriteRequestUnits: Number("long"), + * }, + * WarmThroughput: { + * ReadUnitsPerSecond: Number("long"), + * WriteUnitsPerSecond: Number("long"), + * }, + * }; + * const command = new UpdateTableCommand(input); + * const response = await client.send(command); + * // { // UpdateTableOutput + * // TableDescription: { // TableDescription + * // AttributeDefinitions: [ // AttributeDefinitions + * // { // AttributeDefinition + * // AttributeName: "STRING_VALUE", // required + * // AttributeType: "S" || "N" || "B", // required + * // }, + * // ], + * // TableName: "STRING_VALUE", + * // KeySchema: [ // KeySchema + * // { // KeySchemaElement + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // CreationDateTime: new Date("TIMESTAMP"), + * // ProvisionedThroughput: { // ProvisionedThroughputDescription + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // TableSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // TableArn: "STRING_VALUE", + * // TableId: "STRING_VALUE", + * // BillingModeSummary: { // BillingModeSummary + * // BillingMode: "PROVISIONED" || "PAY_PER_REQUEST", + * // LastUpdateToPayPerRequestDateTime: new Date("TIMESTAMP"), + * // }, + * // LocalSecondaryIndexes: [ // LocalSecondaryIndexDescriptionList + * // { // LocalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { // Projection + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ // NonKeyAttributeNameList + * // "STRING_VALUE", + * // ], + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // }, + * // ], + * // GlobalSecondaryIndexes: [ // GlobalSecondaryIndexDescriptionList + * // { // GlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // KeySchema: [ + * // { + * // AttributeName: "STRING_VALUE", // required + * // KeyType: "HASH" || "RANGE", // required + * // }, + * // ], + * // Projection: { + * // ProjectionType: "ALL" || "KEYS_ONLY" || "INCLUDE", + * // NonKeyAttributes: [ + * // "STRING_VALUE", + * // ], + * // }, + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // Backfilling: true || false, + * // ProvisionedThroughput: { + * // LastIncreaseDateTime: new Date("TIMESTAMP"), + * // LastDecreaseDateTime: new Date("TIMESTAMP"), + * // NumberOfDecreasesToday: Number("long"), + * // ReadCapacityUnits: Number("long"), + * // WriteCapacityUnits: Number("long"), + * // }, + * // IndexSizeBytes: Number("long"), + * // ItemCount: Number("long"), + * // IndexArn: "STRING_VALUE", + * // OnDemandThroughput: { // OnDemandThroughput + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // GlobalSecondaryIndexWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // StreamSpecification: { // StreamSpecification + * // StreamEnabled: true || false, // required + * // StreamViewType: "NEW_IMAGE" || "OLD_IMAGE" || "NEW_AND_OLD_IMAGES" || "KEYS_ONLY", + * // }, + * // LatestStreamLabel: "STRING_VALUE", + * // LatestStreamArn: "STRING_VALUE", + * // GlobalTableVersion: "STRING_VALUE", + * // Replicas: [ // ReplicaDescriptionList + * // { // ReplicaDescription + * // RegionName: "STRING_VALUE", + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // ReplicaStatusDescription: "STRING_VALUE", + * // ReplicaStatusPercentProgress: "STRING_VALUE", + * // KMSMasterKeyId: "STRING_VALUE", + * // ProvisionedThroughputOverride: { // ProvisionedThroughputOverride + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { // OnDemandThroughputOverride + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { // TableWarmThroughputDescription + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexDescriptionList + * // { // ReplicaGlobalSecondaryIndexDescription + * // IndexName: "STRING_VALUE", + * // ProvisionedThroughputOverride: { + * // ReadCapacityUnits: Number("long"), + * // }, + * // OnDemandThroughputOverride: { + * // MaxReadRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // }, + * // }, + * // ], + * // ReplicaInaccessibleDateTime: new Date("TIMESTAMP"), + * // ReplicaTableClassSummary: { // TableClassSummary + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // }, + * // ], + * // RestoreSummary: { // RestoreSummary + * // SourceBackupArn: "STRING_VALUE", + * // SourceTableArn: "STRING_VALUE", + * // RestoreDateTime: new Date("TIMESTAMP"), // required + * // RestoreInProgress: true || false, // required + * // }, + * // SSEDescription: { // SSEDescription + * // Status: "ENABLING" || "ENABLED" || "DISABLING" || "DISABLED" || "UPDATING", + * // SSEType: "AES256" || "KMS", + * // KMSMasterKeyArn: "STRING_VALUE", + * // InaccessibleEncryptionDateTime: new Date("TIMESTAMP"), + * // }, + * // ArchivalSummary: { // ArchivalSummary + * // ArchivalDateTime: new Date("TIMESTAMP"), + * // ArchivalReason: "STRING_VALUE", + * // ArchivalBackupArn: "STRING_VALUE", + * // }, + * // TableClassSummary: { + * // TableClass: "STANDARD" || "STANDARD_INFREQUENT_ACCESS", + * // LastUpdateDateTime: new Date("TIMESTAMP"), + * // }, + * // DeletionProtectionEnabled: true || false, + * // OnDemandThroughput: { + * // MaxReadRequestUnits: Number("long"), + * // MaxWriteRequestUnits: Number("long"), + * // }, + * // WarmThroughput: { + * // ReadUnitsPerSecond: Number("long"), + * // WriteUnitsPerSecond: Number("long"), + * // Status: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // }, + * // MultiRegionConsistency: "EVENTUAL" || "STRONG", + * // }, + * // }; + * + * ``` + * + * @param UpdateTableCommandInput - {@link UpdateTableCommandInput} + * @returns {@link UpdateTableCommandOutput} + * @see {@link UpdateTableCommandInput} for command's `input` shape. + * @see {@link UpdateTableCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTableCommand extends UpdateTableCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTableInput; + output: UpdateTableOutput; + }; + sdk: { + input: UpdateTableCommandInput; + output: UpdateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..e6f341c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,244 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTableReplicaAutoScalingInput, UpdateTableReplicaAutoScalingOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTableReplicaAutoScalingCommand}. + */ +export interface UpdateTableReplicaAutoScalingCommandInput extends UpdateTableReplicaAutoScalingInput { +} +/** + * @public + * + * The output of {@link UpdateTableReplicaAutoScalingCommand}. + */ +export interface UpdateTableReplicaAutoScalingCommandOutput extends UpdateTableReplicaAutoScalingOutput, __MetadataBearer { +} +declare const UpdateTableReplicaAutoScalingCommand_base: { + new (input: UpdateTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTableReplicaAutoScalingCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Updates auto scaling settings on your global tables at once.

+ * + *

For global tables, this operation only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTableReplicaAutoScalingCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTableReplicaAutoScalingCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTableReplicaAutoScalingInput + * GlobalSecondaryIndexUpdates: [ // GlobalSecondaryIndexAutoScalingUpdateList + * { // GlobalSecondaryIndexAutoScalingUpdate + * IndexName: "STRING_VALUE", + * ProvisionedWriteCapacityAutoScalingUpdate: { // AutoScalingSettingsUpdate + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { // AutoScalingPolicyUpdate + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * }, + * ], + * TableName: "STRING_VALUE", // required + * ProvisionedWriteCapacityAutoScalingUpdate: { + * MinimumUnits: Number("long"), + * MaximumUnits: Number("long"), + * AutoScalingDisabled: true || false, + * AutoScalingRoleArn: "STRING_VALUE", + * ScalingPolicyUpdate: { + * PolicyName: "STRING_VALUE", + * TargetTrackingScalingPolicyConfiguration: { + * DisableScaleIn: true || false, + * ScaleInCooldown: Number("int"), + * ScaleOutCooldown: Number("int"), + * TargetValue: Number("double"), // required + * }, + * }, + * }, + * ReplicaUpdates: [ // ReplicaAutoScalingUpdateList + * { // ReplicaAutoScalingUpdate + * RegionName: "STRING_VALUE", // required + * ReplicaGlobalSecondaryIndexUpdates: [ // ReplicaGlobalSecondaryIndexAutoScalingUpdateList + * { // ReplicaGlobalSecondaryIndexAutoScalingUpdate + * IndexName: "STRING_VALUE", + * ProvisionedReadCapacityAutoScalingUpdate: "", + * }, + * ], + * ReplicaProvisionedReadCapacityAutoScalingUpdate: "", + * }, + * ], + * }; + * const command = new UpdateTableReplicaAutoScalingCommand(input); + * const response = await client.send(command); + * // { // UpdateTableReplicaAutoScalingOutput + * // TableAutoScalingDescription: { // TableAutoScalingDescription + * // TableName: "STRING_VALUE", + * // TableStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS" || "ARCHIVING" || "ARCHIVED", + * // Replicas: [ // ReplicaAutoScalingDescriptionList + * // { // ReplicaAutoScalingDescription + * // RegionName: "STRING_VALUE", + * // GlobalSecondaryIndexes: [ // ReplicaGlobalSecondaryIndexAutoScalingDescriptionList + * // { // ReplicaGlobalSecondaryIndexAutoScalingDescription + * // IndexName: "STRING_VALUE", + * // IndexStatus: "CREATING" || "UPDATING" || "DELETING" || "ACTIVE", + * // ProvisionedReadCapacityAutoScalingSettings: { // AutoScalingSettingsDescription + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ // AutoScalingPolicyDescriptionList + * // { // AutoScalingPolicyDescription + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { // AutoScalingTargetTrackingScalingPolicyConfigurationDescription + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // }, + * // ], + * // ReplicaProvisionedReadCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaProvisionedWriteCapacityAutoScalingSettings: { + * // MinimumUnits: Number("long"), + * // MaximumUnits: Number("long"), + * // AutoScalingDisabled: true || false, + * // AutoScalingRoleArn: "STRING_VALUE", + * // ScalingPolicies: [ + * // { + * // PolicyName: "STRING_VALUE", + * // TargetTrackingScalingPolicyConfiguration: { + * // DisableScaleIn: true || false, + * // ScaleInCooldown: Number("int"), + * // ScaleOutCooldown: Number("int"), + * // TargetValue: Number("double"), // required + * // }, + * // }, + * // ], + * // }, + * // ReplicaStatus: "CREATING" || "CREATION_FAILED" || "UPDATING" || "DELETING" || "ACTIVE" || "REGION_DISABLED" || "INACCESSIBLE_ENCRYPTION_CREDENTIALS", + * // }, + * // ], + * // }, + * // }; + * + * ``` + * + * @param UpdateTableReplicaAutoScalingCommandInput - {@link UpdateTableReplicaAutoScalingCommandInput} + * @returns {@link UpdateTableReplicaAutoScalingCommandOutput} + * @see {@link UpdateTableReplicaAutoScalingCommandInput} for command's `input` shape. + * @see {@link UpdateTableReplicaAutoScalingCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTableReplicaAutoScalingCommand extends UpdateTableReplicaAutoScalingCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTableReplicaAutoScalingInput; + output: UpdateTableReplicaAutoScalingOutput; + }; + sdk: { + input: UpdateTableReplicaAutoScalingCommandInput; + output: UpdateTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts new file mode 100644 index 0000000..b36e07e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/UpdateTimeToLiveCommand.d.ts @@ -0,0 +1,143 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { DynamoDBClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBClient"; +import { UpdateTimeToLiveInput, UpdateTimeToLiveOutput } from "../models/models_0"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateTimeToLiveCommand}. + */ +export interface UpdateTimeToLiveCommandInput extends UpdateTimeToLiveInput { +} +/** + * @public + * + * The output of {@link UpdateTimeToLiveCommand}. + */ +export interface UpdateTimeToLiveCommandOutput extends UpdateTimeToLiveOutput, __MetadataBearer { +} +declare const UpdateTimeToLiveCommand_base: { + new (input: UpdateTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: UpdateTimeToLiveCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

The UpdateTimeToLive method enables or disables Time to Live (TTL) for + * the specified table. A successful UpdateTimeToLive call returns the current + * TimeToLiveSpecification. It can take up to one hour for the change to + * fully process. Any additional UpdateTimeToLive calls for the same table + * during this one hour duration result in a ValidationException.

+ *

TTL compares the current time in epoch time format to the time stored in the TTL + * attribute of an item. If the epoch time value stored in the attribute is less than the + * current time, the item is marked as expired and subsequently deleted.

+ * + *

The epoch time format is the number of seconds elapsed since 12:00:00 AM January + * 1, 1970 UTC.

+ *
+ *

DynamoDB deletes expired items on a best-effort basis to ensure availability of + * throughput for other data operations.

+ * + *

DynamoDB typically deletes expired items within two days of expiration. The exact + * duration within which an item gets deleted after expiration is specific to the + * nature of the workload. Items that have expired and not been deleted will still show + * up in reads, queries, and scans.

+ *
+ *

As items are deleted, they are removed from any local secondary index and global + * secondary index immediately in the same eventually consistent way as a standard delete + * operation.

+ *

For more information, see Time To Live in the + * Amazon DynamoDB Developer Guide.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { DynamoDBClient, UpdateTimeToLiveCommand } from "@aws-sdk/client-dynamodb"; // ES Modules import + * // const { DynamoDBClient, UpdateTimeToLiveCommand } = require("@aws-sdk/client-dynamodb"); // CommonJS import + * const client = new DynamoDBClient(config); + * const input = { // UpdateTimeToLiveInput + * TableName: "STRING_VALUE", // required + * TimeToLiveSpecification: { // TimeToLiveSpecification + * Enabled: true || false, // required + * AttributeName: "STRING_VALUE", // required + * }, + * }; + * const command = new UpdateTimeToLiveCommand(input); + * const response = await client.send(command); + * // { // UpdateTimeToLiveOutput + * // TimeToLiveSpecification: { // TimeToLiveSpecification + * // Enabled: true || false, // required + * // AttributeName: "STRING_VALUE", // required + * // }, + * // }; + * + * ``` + * + * @param UpdateTimeToLiveCommandInput - {@link UpdateTimeToLiveCommandInput} + * @returns {@link UpdateTimeToLiveCommandOutput} + * @see {@link UpdateTimeToLiveCommandInput} for command's `input` shape. + * @see {@link UpdateTimeToLiveCommandOutput} for command's `response` shape. + * @see {@link DynamoDBClientResolvedConfig | config} for DynamoDBClient's `config` shape. + * + * @throws {@link InternalServerError} (server fault) + *

An error occurred on the server side.

+ * + * @throws {@link InvalidEndpointException} (client fault) + * + * @throws {@link LimitExceededException} (client fault) + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * + * @throws {@link ResourceInUseException} (client fault) + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * + * @throws {@link DynamoDBServiceException} + *

Base exception class for all service exceptions from DynamoDB service.

+ * + * + * @public + */ +export declare class UpdateTimeToLiveCommand extends UpdateTimeToLiveCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: UpdateTimeToLiveInput; + output: UpdateTimeToLiveOutput; + }; + sdk: { + input: UpdateTimeToLiveCommandInput; + output: UpdateTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/commands/index.d.ts @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..057fd52 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/EndpointParameters.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; + accountId?: string | Provider; + accountIdEndpointMode?: string | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly AccountId: { + readonly type: "builtInParams"; + readonly name: "accountId"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; + readonly AccountIdEndpointMode: { + readonly type: "builtInParams"; + readonly name: "accountIdEndpointMode"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + AccountId?: string; + AccountIdEndpointMode?: string; + ResourceArn?: string; + ResourceArnList?: string[]; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts new file mode 100644 index 0000000..3aa1e50 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface DynamoDBExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..bb6be8f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/index.d.ts @@ -0,0 +1,31 @@ +/** + * Amazon DynamoDB + *

Amazon DynamoDB is a fully managed NoSQL database service that provides fast + * and predictable performance with seamless scalability. DynamoDB lets you + * offload the administrative burdens of operating and scaling a distributed database, so + * that you don't have to worry about hardware provisioning, setup and configuration, + * replication, software patching, or cluster scaling.

+ *

With DynamoDB, you can create database tables that can store and retrieve + * any amount of data, and serve any level of request traffic. You can scale up or scale + * down your tables' throughput capacity without downtime or performance degradation, and + * use the Amazon Web Services Management Console to monitor resource utilization and performance + * metrics.

+ *

DynamoDB automatically spreads the data and traffic for your tables over + * a sufficient number of servers to handle your throughput and storage requirements, while + * maintaining consistent and fast performance. All of your data is stored on solid state + * disks (SSDs) and automatically replicated across multiple Availability Zones in an + * Amazon Web Services Region, providing built-in high availability and data + * durability.

+ * + * @packageDocumentation + */ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts new file mode 100644 index 0000000..f8ff019 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/DynamoDBServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from DynamoDB service. + */ +export declare class DynamoDBServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts new file mode 100644 index 0000000..9821a22 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/models/models_0.d.ts @@ -0,0 +1,11039 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +/** + * @public + * @enum + */ +export declare const ApproximateCreationDateTimePrecision: { + readonly MICROSECOND: "MICROSECOND"; + readonly MILLISECOND: "MILLISECOND"; +}; +/** + * @public + */ +export type ApproximateCreationDateTimePrecision = (typeof ApproximateCreationDateTimePrecision)[keyof typeof ApproximateCreationDateTimePrecision]; +/** + *

Contains details of a table archival operation.

+ * @public + */ +export interface ArchivalSummary { + /** + *

The date and time when table archival was initiated by DynamoDB, in UNIX epoch time + * format.

+ * @public + */ + ArchivalDateTime?: Date | undefined; + /** + *

The reason DynamoDB archived the table. Currently, the only possible value is:

+ *
    + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The table was archived due + * to the table's KMS key being inaccessible for more than seven + * days. An On-Demand backup was created at the archival time.

    + *
  • + *
+ * @public + */ + ArchivalReason?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the backup the table was archived to, when + * applicable in the archival reason. If you wish to restore this backup to the same table + * name, you will need to delete the original table.

+ * @public + */ + ArchivalBackupArn?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const AttributeAction: { + readonly ADD: "ADD"; + readonly DELETE: "DELETE"; + readonly PUT: "PUT"; +}; +/** + * @public + */ +export type AttributeAction = (typeof AttributeAction)[keyof typeof AttributeAction]; +/** + * @public + * @enum + */ +export declare const ScalarAttributeType: { + readonly B: "B"; + readonly N: "N"; + readonly S: "S"; +}; +/** + * @public + */ +export type ScalarAttributeType = (typeof ScalarAttributeType)[keyof typeof ScalarAttributeType]; +/** + *

Represents an attribute for describing the schema for the table and indexes.

+ * @public + */ +export interface AttributeDefinition { + /** + *

A name for the attribute.

+ * @public + */ + AttributeName: string | undefined; + /** + *

The data type for the attribute, where:

+ *
    + *
  • + *

    + * S - the attribute is of type String

    + *
  • + *
  • + *

    + * N - the attribute is of type Number

    + *
  • + *
  • + *

    + * B - the attribute is of type Binary

    + *
  • + *
+ * @public + */ + AttributeType: ScalarAttributeType | undefined; +} +/** + *

Represents the properties of a target tracking scaling policy.

+ * @public + */ +export interface AutoScalingTargetTrackingScalingPolicyConfigurationDescription { + /** + *

Indicates whether scale in by the target tracking policy is disabled. If the value is + * true, scale in is disabled and the target tracking policy won't remove capacity from the + * scalable resource. Otherwise, scale in is enabled and the target tracking policy can + * remove capacity from the scalable resource. The default value is false.

+ * @public + */ + DisableScaleIn?: boolean | undefined; + /** + *

The amount of time, in seconds, after a scale in activity completes before another + * scale in activity can start. The cooldown period is used to block subsequent scale in + * requests until it has expired. You should scale in conservatively to protect your + * application's availability. However, if another alarm triggers a scale out policy during + * the cooldown period after a scale-in, application auto scaling scales out your scalable + * target immediately.

+ * @public + */ + ScaleInCooldown?: number | undefined; + /** + *

The amount of time, in seconds, after a scale out activity completes before another + * scale out activity can start. While the cooldown period is in effect, the capacity that + * has been added by the previous scale out event that initiated the cooldown is calculated + * as part of the desired capacity for the next scale out. You should continuously (but not + * excessively) scale out.

+ * @public + */ + ScaleOutCooldown?: number | undefined; + /** + *

The target value for the metric. The range is 8.515920e-109 to 1.174271e+108 (Base 10) + * or 2e-360 to 2e360 (Base 2).

+ * @public + */ + TargetValue: number | undefined; +} +/** + *

Represents the properties of the scaling policy.

+ * @public + */ +export interface AutoScalingPolicyDescription { + /** + *

The name of the scaling policy.

+ * @public + */ + PolicyName?: string | undefined; + /** + *

Represents a target tracking scaling policy configuration.

+ * @public + */ + TargetTrackingScalingPolicyConfiguration?: AutoScalingTargetTrackingScalingPolicyConfigurationDescription | undefined; +} +/** + *

Represents the settings of a target tracking scaling policy that will be + * modified.

+ * @public + */ +export interface AutoScalingTargetTrackingScalingPolicyConfigurationUpdate { + /** + *

Indicates whether scale in by the target tracking policy is disabled. If the value is + * true, scale in is disabled and the target tracking policy won't remove capacity from the + * scalable resource. Otherwise, scale in is enabled and the target tracking policy can + * remove capacity from the scalable resource. The default value is false.

+ * @public + */ + DisableScaleIn?: boolean | undefined; + /** + *

The amount of time, in seconds, after a scale in activity completes before another + * scale in activity can start. The cooldown period is used to block subsequent scale in + * requests until it has expired. You should scale in conservatively to protect your + * application's availability. However, if another alarm triggers a scale out policy during + * the cooldown period after a scale-in, application auto scaling scales out your scalable + * target immediately.

+ * @public + */ + ScaleInCooldown?: number | undefined; + /** + *

The amount of time, in seconds, after a scale out activity completes before another + * scale out activity can start. While the cooldown period is in effect, the capacity that + * has been added by the previous scale out event that initiated the cooldown is calculated + * as part of the desired capacity for the next scale out. You should continuously (but not + * excessively) scale out.

+ * @public + */ + ScaleOutCooldown?: number | undefined; + /** + *

The target value for the metric. The range is 8.515920e-109 to 1.174271e+108 (Base 10) + * or 2e-360 to 2e360 (Base 2).

+ * @public + */ + TargetValue: number | undefined; +} +/** + *

Represents the auto scaling policy to be modified.

+ * @public + */ +export interface AutoScalingPolicyUpdate { + /** + *

The name of the scaling policy.

+ * @public + */ + PolicyName?: string | undefined; + /** + *

Represents a target tracking scaling policy configuration.

+ * @public + */ + TargetTrackingScalingPolicyConfiguration: AutoScalingTargetTrackingScalingPolicyConfigurationUpdate | undefined; +} +/** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ +export interface AutoScalingSettingsDescription { + /** + *

The minimum capacity units that a global table or global secondary index should be + * scaled down to.

+ * @public + */ + MinimumUnits?: number | undefined; + /** + *

The maximum capacity units that a global table or global secondary index should be + * scaled up to.

+ * @public + */ + MaximumUnits?: number | undefined; + /** + *

Disabled auto scaling for this global table or global secondary index.

+ * @public + */ + AutoScalingDisabled?: boolean | undefined; + /** + *

Role ARN used for configuring the auto scaling policy.

+ * @public + */ + AutoScalingRoleArn?: string | undefined; + /** + *

Information about the scaling policies.

+ * @public + */ + ScalingPolicies?: AutoScalingPolicyDescription[] | undefined; +} +/** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ +export interface AutoScalingSettingsUpdate { + /** + *

The minimum capacity units that a global table or global secondary index should be + * scaled down to.

+ * @public + */ + MinimumUnits?: number | undefined; + /** + *

The maximum capacity units that a global table or global secondary index should be + * scaled up to.

+ * @public + */ + MaximumUnits?: number | undefined; + /** + *

Disabled auto scaling for this global table or global secondary index.

+ * @public + */ + AutoScalingDisabled?: boolean | undefined; + /** + *

Role ARN used for configuring auto scaling policy.

+ * @public + */ + AutoScalingRoleArn?: string | undefined; + /** + *

The scaling policy to apply for scaling target global table or global secondary index + * capacity units.

+ * @public + */ + ScalingPolicyUpdate?: AutoScalingPolicyUpdate | undefined; +} +/** + * @public + * @enum + */ +export declare const BackupStatus: { + readonly AVAILABLE: "AVAILABLE"; + readonly CREATING: "CREATING"; + readonly DELETED: "DELETED"; +}; +/** + * @public + */ +export type BackupStatus = (typeof BackupStatus)[keyof typeof BackupStatus]; +/** + * @public + * @enum + */ +export declare const BackupType: { + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +/** + * @public + */ +export type BackupType = (typeof BackupType)[keyof typeof BackupType]; +/** + *

Contains the details of the backup created for the table.

+ * @public + */ +export interface BackupDetails { + /** + *

ARN associated with the backup.

+ * @public + */ + BackupArn: string | undefined; + /** + *

Name of the requested backup.

+ * @public + */ + BackupName: string | undefined; + /** + *

Size of the backup in bytes. DynamoDB updates this value approximately every six + * hours. Recent changes might not be reflected in this value.

+ * @public + */ + BackupSizeBytes?: number | undefined; + /** + *

Backup can be in one of the following states: CREATING, ACTIVE, DELETED.

+ * @public + */ + BackupStatus: BackupStatus | undefined; + /** + *

BackupType:

+ *
    + *
  • + *

    + * USER - You create and manage these using the on-demand backup + * feature.

    + *
  • + *
  • + *

    + * SYSTEM - If you delete a table with point-in-time recovery enabled, + * a SYSTEM backup is automatically created and is retained for 35 + * days (at no additional cost). System backups allow you to restore the deleted + * table to the state it was in just before the point of deletion.

    + *
  • + *
  • + *

    + * AWS_BACKUP - On-demand backup created by you from Backup service.

    + *
  • + *
+ * @public + */ + BackupType: BackupType | undefined; + /** + *

Time at which the backup was created. This is the request time of the backup.

+ * @public + */ + BackupCreationDateTime: Date | undefined; + /** + *

Time at which the automatic on-demand backup created by DynamoDB will + * expire. This SYSTEM on-demand backup expires automatically 35 days after + * its creation.

+ * @public + */ + BackupExpiryDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const BillingMode: { + readonly PAY_PER_REQUEST: "PAY_PER_REQUEST"; + readonly PROVISIONED: "PROVISIONED"; +}; +/** + * @public + */ +export type BillingMode = (typeof BillingMode)[keyof typeof BillingMode]; +/** + * @public + * @enum + */ +export declare const KeyType: { + readonly HASH: "HASH"; + readonly RANGE: "RANGE"; +}; +/** + * @public + */ +export type KeyType = (typeof KeyType)[keyof typeof KeyType]; +/** + *

Represents a single element of a key schema. A key schema + * specifies the attributes that make up the primary key of a table, or the key attributes + * of an index.

+ *

A KeySchemaElement represents exactly one attribute of the primary key. + * For example, a simple primary key would be represented by one + * KeySchemaElement (for the partition key). A composite primary key would + * require one KeySchemaElement for the partition key, and another + * KeySchemaElement for the sort key.

+ *

A KeySchemaElement must be a scalar, top-level attribute (not a nested + * attribute). The data type must be one of String, Number, or Binary. The attribute cannot + * be nested within a List or a Map.

+ * @public + */ +export interface KeySchemaElement { + /** + *

The name of a key attribute.

+ * @public + */ + AttributeName: string | undefined; + /** + *

The role that this key attribute will assume:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeyType: KeyType | undefined; +} +/** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ +export interface OnDemandThroughput { + /** + *

Maximum number of read request units for the specified table.

+ *

To specify a maximum OnDemandThroughput on your table, set the value of + * MaxReadRequestUnits as greater than or equal to 1. To remove the + * maximum OnDemandThroughput that is currently set on your table, set the + * value of MaxReadRequestUnits to -1.

+ * @public + */ + MaxReadRequestUnits?: number | undefined; + /** + *

Maximum number of write request units for the specified table.

+ *

To specify a maximum OnDemandThroughput on your table, set the value of + * MaxWriteRequestUnits as greater than or equal to 1. To remove the + * maximum OnDemandThroughput that is currently set on your table, set the + * value of MaxWriteRequestUnits to -1.

+ * @public + */ + MaxWriteRequestUnits?: number | undefined; +} +/** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use ProvisionedThroughput or + * OnDemandThroughput based on your table’s capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface ProvisionedThroughput { + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying + * Read and Write Requirements in the Amazon DynamoDB Developer + * Guide.

+ *

If read/write capacity mode is PAY_PER_REQUEST the value is set to + * 0.

+ * @public + */ + ReadCapacityUnits: number | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. For more information, see Specifying + * Read and Write Requirements in the Amazon DynamoDB Developer + * Guide.

+ *

If read/write capacity mode is PAY_PER_REQUEST the value is set to + * 0.

+ * @public + */ + WriteCapacityUnits: number | undefined; +} +/** + *

Contains the details of the table when the backup was created.

+ * @public + */ +export interface SourceTableDetails { + /** + *

The name of the table for which the backup was created.

+ * @public + */ + TableName: string | undefined; + /** + *

Unique identifier for the table for which the backup was created.

+ * @public + */ + TableId: string | undefined; + /** + *

ARN of the table for which backup was created.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Size of the table in bytes. Note that this is an approximate value.

+ * @public + */ + TableSizeBytes?: number | undefined; + /** + *

Schema of the table.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Time when the source table was created.

+ * @public + */ + TableCreationDateTime: Date | undefined; + /** + *

Read IOPs and Write IOPS on the table when the backup was created.

+ * @public + */ + ProvisionedThroughput: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Number of items in the table. Note that this is an approximate value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PROVISIONED - Sets the read/write capacity mode to + * PROVISIONED. We recommend using PROVISIONED for + * predictable workloads.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - Sets the read/write capacity mode to + * PAY_PER_REQUEST. We recommend using + * PAY_PER_REQUEST for unpredictable workloads.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; +} +/** + * @public + * @enum + */ +export declare const ProjectionType: { + readonly ALL: "ALL"; + readonly INCLUDE: "INCLUDE"; + readonly KEYS_ONLY: "KEYS_ONLY"; +}; +/** + * @public + */ +export type ProjectionType = (typeof ProjectionType)[keyof typeof ProjectionType]; +/** + *

Represents attributes that are copied (projected) from the table into an index. These + * are in addition to the primary key attributes and index key attributes, which are + * automatically projected.

+ * @public + */ +export interface Projection { + /** + *

The set of attributes that are projected into the index:

+ *
    + *
  • + *

    + * KEYS_ONLY - Only the index and primary keys are projected into the + * index.

    + *
  • + *
  • + *

    + * INCLUDE - In addition to the attributes described in + * KEYS_ONLY, the secondary index will include other non-key + * attributes that you specify.

    + *
  • + *
  • + *

    + * ALL - All of the table attributes are projected into the + * index.

    + *
  • + *
+ *

When using the DynamoDB console, ALL is selected by default.

+ * @public + */ + ProjectionType?: ProjectionType | undefined; + /** + *

Represents the non-key attribute names which will be projected into the index.

+ *

For global and local secondary indexes, the total count of NonKeyAttributes summed + * across all of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct attributes when + * determining the total. This limit only applies when you specify the ProjectionType of + * INCLUDE. You still can specify the ProjectionType of ALL to + * project all attributes from the source table, even if the table has more than 100 + * attributes.

+ * @public + */ + NonKeyAttributes?: string[] | undefined; +} +/** + *

Represents the properties of a global secondary index for the table when the backup + * was created.

+ * @public + */ +export interface GlobalSecondaryIndexInfo { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; +} +/** + *

Represents the properties of a local secondary index for the table when the backup was + * created.

+ * @public + */ +export interface LocalSecondaryIndexInfo { + /** + *

Represents the name of the local secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a local secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; +} +/** + * @public + * @enum + */ +export declare const SSEType: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +/** + * @public + */ +export type SSEType = (typeof SSEType)[keyof typeof SSEType]; +/** + * @public + * @enum + */ +export declare const SSEStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type SSEStatus = (typeof SSEStatus)[keyof typeof SSEStatus]; +/** + *

The description of the server-side encryption status on the specified table.

+ * @public + */ +export interface SSEDescription { + /** + *

Represents the current state of server-side encryption. The only supported values + * are:

+ *
    + *
  • + *

    + * ENABLED - Server-side encryption is enabled.

    + *
  • + *
  • + *

    + * UPDATING - Server-side encryption is being updated.

    + *
  • + *
+ * @public + */ + Status?: SSEStatus | undefined; + /** + *

Server-side encryption type. The only supported value is:

+ *
    + *
  • + *

    + * KMS - Server-side encryption that uses Key Management Service. The + * key is stored in your account and is managed by KMS (KMS charges apply).

    + *
  • + *
+ * @public + */ + SSEType?: SSEType | undefined; + /** + *

The KMS key ARN used for the KMS encryption.

+ * @public + */ + KMSMasterKeyArn?: string | undefined; + /** + *

Indicates the time, in UNIX epoch date format, when DynamoDB detected that + * the table's KMS key was inaccessible. This attribute will automatically + * be cleared when DynamoDB detects that the table's KMS key is accessible + * again. DynamoDB will initiate the table archival process when table's KMS key remains inaccessible for more than seven days from this date.

+ * @public + */ + InaccessibleEncryptionDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const StreamViewType: { + readonly KEYS_ONLY: "KEYS_ONLY"; + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; + readonly OLD_IMAGE: "OLD_IMAGE"; +}; +/** + * @public + */ +export type StreamViewType = (typeof StreamViewType)[keyof typeof StreamViewType]; +/** + *

Represents the DynamoDB Streams configuration for a table in DynamoDB.

+ * @public + */ +export interface StreamSpecification { + /** + *

Indicates whether DynamoDB Streams is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + StreamEnabled: boolean | undefined; + /** + *

When an item in the table is modified, StreamViewType determines what + * information is written to the stream for this table. Valid values for + * StreamViewType are:

+ *
    + *
  • + *

    + * KEYS_ONLY - Only the key attributes of the modified item are + * written to the stream.

    + *
  • + *
  • + *

    + * NEW_IMAGE - The entire item, as it appears after it was modified, + * is written to the stream.

    + *
  • + *
  • + *

    + * OLD_IMAGE - The entire item, as it appeared before it was modified, + * is written to the stream.

    + *
  • + *
  • + *

    + * NEW_AND_OLD_IMAGES - Both the new and the old item images of the + * item are written to the stream.

    + *
  • + *
+ * @public + */ + StreamViewType?: StreamViewType | undefined; +} +/** + * @public + * @enum + */ +export declare const TimeToLiveStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; +}; +/** + * @public + */ +export type TimeToLiveStatus = (typeof TimeToLiveStatus)[keyof typeof TimeToLiveStatus]; +/** + *

The description of the Time to Live (TTL) status on the specified table.

+ * @public + */ +export interface TimeToLiveDescription { + /** + *

The TTL status for the table.

+ * @public + */ + TimeToLiveStatus?: TimeToLiveStatus | undefined; + /** + *

The name of the TTL attribute for items in the table.

+ * @public + */ + AttributeName?: string | undefined; +} +/** + *

Contains the details of the features enabled on the table when the backup was created. + * For example, LSIs, GSIs, streams, TTL.

+ * @public + */ +export interface SourceTableFeatureDetails { + /** + *

Represents the LSI properties for the table when the backup was created. It includes + * the IndexName, KeySchema and Projection for the LSIs on the table at the time of backup. + *

+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndexInfo[] | undefined; + /** + *

Represents the GSI properties for the table when the backup was created. It includes + * the IndexName, KeySchema, Projection, and ProvisionedThroughput for the GSIs on the + * table at the time of backup.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndexInfo[] | undefined; + /** + *

Stream settings on the table when the backup was created.

+ * @public + */ + StreamDescription?: StreamSpecification | undefined; + /** + *

Time to Live settings on the table when the backup was created.

+ * @public + */ + TimeToLiveDescription?: TimeToLiveDescription | undefined; + /** + *

The description of the server-side encryption status on the table when the backup was + * created.

+ * @public + */ + SSEDescription?: SSEDescription | undefined; +} +/** + *

Contains the description of the backup created for the table.

+ * @public + */ +export interface BackupDescription { + /** + *

Contains the details of the backup created for the table.

+ * @public + */ + BackupDetails?: BackupDetails | undefined; + /** + *

Contains the details of the table when the backup was created.

+ * @public + */ + SourceTableDetails?: SourceTableDetails | undefined; + /** + *

Contains the details of the features enabled on the table when the backup was created. + * For example, LSIs, GSIs, streams, TTL.

+ * @public + */ + SourceTableFeatureDetails?: SourceTableFeatureDetails | undefined; +} +/** + *

There is another ongoing conflicting backup control plane operation on the table. + * The backup is either being created, deleted or restored to a table.

+ * @public + */ +export declare class BackupInUseException extends __BaseException { + readonly name: "BackupInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Backup not found for the given BackupARN.

+ * @public + */ +export declare class BackupNotFoundException extends __BaseException { + readonly name: "BackupNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Contains details for the backup.

+ * @public + */ +export interface BackupSummary { + /** + *

Name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

Unique identifier for the table.

+ * @public + */ + TableId?: string | undefined; + /** + *

ARN associated with the table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

ARN associated with the backup.

+ * @public + */ + BackupArn?: string | undefined; + /** + *

Name of the specified backup.

+ * @public + */ + BackupName?: string | undefined; + /** + *

Time at which the backup was created.

+ * @public + */ + BackupCreationDateTime?: Date | undefined; + /** + *

Time at which the automatic on-demand backup created by DynamoDB will + * expire. This SYSTEM on-demand backup expires automatically 35 days after + * its creation.

+ * @public + */ + BackupExpiryDateTime?: Date | undefined; + /** + *

Backup can be in one of the following states: CREATING, ACTIVE, DELETED.

+ * @public + */ + BackupStatus?: BackupStatus | undefined; + /** + *

BackupType:

+ *
    + *
  • + *

    + * USER - You create and manage these using the on-demand backup + * feature.

    + *
  • + *
  • + *

    + * SYSTEM - If you delete a table with point-in-time recovery enabled, + * a SYSTEM backup is automatically created and is retained for 35 + * days (at no additional cost). System backups allow you to restore the deleted + * table to the state it was in just before the point of deletion.

    + *
  • + *
  • + *

    + * AWS_BACKUP - On-demand backup created by you from Backup service.

    + *
  • + *
+ * @public + */ + BackupType?: BackupType | undefined; + /** + *

Size of the backup in bytes.

+ * @public + */ + BackupSizeBytes?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const BackupTypeFilter: { + readonly ALL: "ALL"; + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +/** + * @public + */ +export type BackupTypeFilter = (typeof BackupTypeFilter)[keyof typeof BackupTypeFilter]; +/** + * @public + * @enum + */ +export declare const ReturnConsumedCapacity: { + readonly INDEXES: "INDEXES"; + readonly NONE: "NONE"; + readonly TOTAL: "TOTAL"; +}; +/** + * @public + */ +export type ReturnConsumedCapacity = (typeof ReturnConsumedCapacity)[keyof typeof ReturnConsumedCapacity]; +/** + * @public + * @enum + */ +export declare const ReturnValuesOnConditionCheckFailure: { + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; +}; +/** + * @public + */ +export type ReturnValuesOnConditionCheckFailure = (typeof ReturnValuesOnConditionCheckFailure)[keyof typeof ReturnValuesOnConditionCheckFailure]; +/** + *

Represents the amount of provisioned throughput capacity consumed on a table or an + * index.

+ * @public + */ +export interface Capacity { + /** + *

The total number of read capacity units consumed on a table or an index.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The total number of write capacity units consumed on a table or an index.

+ * @public + */ + WriteCapacityUnits?: number | undefined; + /** + *

The total number of capacity units consumed on a table or an index.

+ * @public + */ + CapacityUnits?: number | undefined; +} +/** + *

The capacity units consumed by an operation. The data returned includes the total + * provisioned throughput consumed, along with statistics for the table and any indexes + * involved in the operation. ConsumedCapacity is only returned if the request + * asked for it. For more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface ConsumedCapacity { + /** + *

The name of the table that was affected by the operation. If you had specified the + * Amazon Resource Name (ARN) of a table in the input, you'll see the table ARN in the response.

+ * @public + */ + TableName?: string | undefined; + /** + *

The total number of capacity units consumed by the operation.

+ * @public + */ + CapacityUnits?: number | undefined; + /** + *

The total number of read capacity units consumed by the operation.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The total number of write capacity units consumed by the operation.

+ * @public + */ + WriteCapacityUnits?: number | undefined; + /** + *

The amount of throughput consumed on the table affected by the operation.

+ * @public + */ + Table?: Capacity | undefined; + /** + *

The amount of throughput consumed on each local index affected by the + * operation.

+ * @public + */ + LocalSecondaryIndexes?: Record | undefined; + /** + *

The amount of throughput consumed on each global index affected by the + * operation.

+ * @public + */ + GlobalSecondaryIndexes?: Record | undefined; +} +/** + * @public + * @enum + */ +export declare const BatchStatementErrorCodeEnum: { + readonly AccessDenied: "AccessDenied"; + readonly ConditionalCheckFailed: "ConditionalCheckFailed"; + readonly DuplicateItem: "DuplicateItem"; + readonly InternalServerError: "InternalServerError"; + readonly ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded"; + readonly ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded"; + readonly RequestLimitExceeded: "RequestLimitExceeded"; + readonly ResourceNotFound: "ResourceNotFound"; + readonly ThrottlingError: "ThrottlingError"; + readonly TransactionConflict: "TransactionConflict"; + readonly ValidationError: "ValidationError"; +}; +/** + * @public + */ +export type BatchStatementErrorCodeEnum = (typeof BatchStatementErrorCodeEnum)[keyof typeof BatchStatementErrorCodeEnum]; +/** + *

An error occurred on the server side.

+ * @public + */ +export declare class InternalServerError extends __BaseException { + readonly name: "InternalServerError"; + readonly $fault: "server"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Throughput exceeds the current throughput quota for your account. Please contact + * Amazon Web ServicesSupport to request a + * quota increase.

+ * @public + */ +export declare class RequestLimitExceeded extends __BaseException { + readonly name: "RequestLimitExceeded"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export declare class InvalidEndpointException extends __BaseException { + readonly name: "InvalidEndpointException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Your request rate is too high. The Amazon Web Services SDKs for DynamoDB + * automatically retry requests that receive this exception. Your request is eventually + * successful, unless your retry queue is too large to finish. Reduce the frequency of + * requests and use exponential backoff. For more information, go to Error Retries and Exponential Backoff in the Amazon DynamoDB Developer Guide.

+ * @public + */ +export declare class ProvisionedThroughputExceededException extends __BaseException { + readonly name: "ProvisionedThroughputExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The operation tried to access a nonexistent table or index. The resource might not + * be specified correctly, or its status might not be ACTIVE.

+ * @public + */ +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + * @enum + */ +export declare const ReturnItemCollectionMetrics: { + readonly NONE: "NONE"; + readonly SIZE: "SIZE"; +}; +/** + * @public + */ +export type ReturnItemCollectionMetrics = (typeof ReturnItemCollectionMetrics)[keyof typeof ReturnItemCollectionMetrics]; +/** + *

An item collection is too large. This exception is only returned for tables that + * have one or more local secondary indexes.

+ * @public + */ +export declare class ItemCollectionSizeLimitExceededException extends __BaseException { + readonly name: "ItemCollectionSizeLimitExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Contains the details for the read/write capacity mode. This page talks about + * PROVISIONED and PAY_PER_REQUEST billing modes. For more + * information about these modes, see Read/write capacity mode.

+ * + *

You may need to switch to on-demand mode at least once in order to return a + * BillingModeSummary response.

+ *
+ * @public + */ +export interface BillingModeSummary { + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PROVISIONED - Sets the read/write capacity mode to + * PROVISIONED. We recommend using PROVISIONED for + * predictable workloads.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - Sets the read/write capacity mode to + * PAY_PER_REQUEST. We recommend using + * PAY_PER_REQUEST for unpredictable workloads.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the time when PAY_PER_REQUEST was last set as the read/write + * capacity mode.

+ * @public + */ + LastUpdateToPayPerRequestDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const ComparisonOperator: { + readonly BEGINS_WITH: "BEGINS_WITH"; + readonly BETWEEN: "BETWEEN"; + readonly CONTAINS: "CONTAINS"; + readonly EQ: "EQ"; + readonly GE: "GE"; + readonly GT: "GT"; + readonly IN: "IN"; + readonly LE: "LE"; + readonly LT: "LT"; + readonly NE: "NE"; + readonly NOT_CONTAINS: "NOT_CONTAINS"; + readonly NOT_NULL: "NOT_NULL"; + readonly NULL: "NULL"; +}; +/** + * @public + */ +export type ComparisonOperator = (typeof ComparisonOperator)[keyof typeof ComparisonOperator]; +/** + * @public + * @enum + */ +export declare const ConditionalOperator: { + readonly AND: "AND"; + readonly OR: "OR"; +}; +/** + * @public + */ +export type ConditionalOperator = (typeof ConditionalOperator)[keyof typeof ConditionalOperator]; +/** + * @public + * @enum + */ +export declare const ContinuousBackupsStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +/** + * @public + */ +export type ContinuousBackupsStatus = (typeof ContinuousBackupsStatus)[keyof typeof ContinuousBackupsStatus]; +/** + * @public + * @enum + */ +export declare const PointInTimeRecoveryStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +/** + * @public + */ +export type PointInTimeRecoveryStatus = (typeof PointInTimeRecoveryStatus)[keyof typeof PointInTimeRecoveryStatus]; +/** + *

The description of the point in time settings applied to the table.

+ * @public + */ +export interface PointInTimeRecoveryDescription { + /** + *

The current state of point in time recovery:

+ *
    + *
  • + *

    + * ENABLED - Point in time recovery is enabled.

    + *
  • + *
  • + *

    + * DISABLED - Point in time recovery is disabled.

    + *
  • + *
+ * @public + */ + PointInTimeRecoveryStatus?: PointInTimeRecoveryStatus | undefined; + /** + *

The number of preceding days for which continuous backups are taken and maintained. + * Your table data is only recoverable to any point-in-time from within the configured + * recovery period. This parameter is optional.

+ * @public + */ + RecoveryPeriodInDays?: number | undefined; + /** + *

Specifies the earliest point in time you can restore your table to. You can restore + * your table to any point in time during the last 35 days.

+ * @public + */ + EarliestRestorableDateTime?: Date | undefined; + /** + *

+ * LatestRestorableDateTime is typically 5 minutes before the current time. + *

+ * @public + */ + LatestRestorableDateTime?: Date | undefined; +} +/** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ +export interface ContinuousBackupsDescription { + /** + *

+ * ContinuousBackupsStatus can be one of the following states: ENABLED, + * DISABLED

+ * @public + */ + ContinuousBackupsStatus: ContinuousBackupsStatus | undefined; + /** + *

The description of the point in time recovery settings applied to the table.

+ * @public + */ + PointInTimeRecoveryDescription?: PointInTimeRecoveryDescription | undefined; +} +/** + *

Backups have not yet been enabled for this table.

+ * @public + */ +export declare class ContinuousBackupsUnavailableException extends __BaseException { + readonly name: "ContinuousBackupsUnavailableException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + * @enum + */ +export declare const ContributorInsightsAction: { + readonly DISABLE: "DISABLE"; + readonly ENABLE: "ENABLE"; +}; +/** + * @public + */ +export type ContributorInsightsAction = (typeof ContributorInsightsAction)[keyof typeof ContributorInsightsAction]; +/** + * @public + * @enum + */ +export declare const ContributorInsightsStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly FAILED: "FAILED"; +}; +/** + * @public + */ +export type ContributorInsightsStatus = (typeof ContributorInsightsStatus)[keyof typeof ContributorInsightsStatus]; +/** + *

Represents a Contributor Insights summary entry.

+ * @public + */ +export interface ContributorInsightsSummary { + /** + *

Name of the table associated with the summary.

+ * @public + */ + TableName?: string | undefined; + /** + *

Name of the index associated with the summary, if any.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Describes the current status for contributor insights for the given table and index, + * if applicable.

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +/** + * @public + */ +export interface CreateBackupInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Specified name for the backup.

+ * @public + */ + BackupName: string | undefined; +} +/** + * @public + */ +export interface CreateBackupOutput { + /** + *

Contains the details of the backup created for the table.

+ * @public + */ + BackupDetails?: BackupDetails | undefined; +} +/** + *

There is no limit to the number of daily on-demand backups that can be taken.

+ *

For most purposes, up to 500 simultaneous table operations are allowed per account. These operations + * include CreateTable, UpdateTable, + * DeleteTable,UpdateTimeToLive, + * RestoreTableFromBackup, and RestoreTableToPointInTime.

+ *

When you are creating a table with one or more secondary + * indexes, you can have up to 250 such requests running at a time. However, if the table or + * index specifications are complex, then DynamoDB might temporarily reduce the number + * of concurrent operations.

+ *

When importing into DynamoDB, up to 50 simultaneous import table operations are allowed per account.

+ *

There is a soft account quota of 2,500 tables.

+ *

GetRecords was called with a value of more than 1000 for the limit request parameter.

+ *

More than 2 processes are reading from the same streams shard at the same time. Exceeding + * this limit may result in request throttling.

+ * @public + */ +export declare class LimitExceededException extends __BaseException { + readonly name: "LimitExceededException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

A target table with the specified name is either being created or deleted. + *

+ * @public + */ +export declare class TableInUseException extends __BaseException { + readonly name: "TableInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

A source table with the name TableName does not currently exist within + * the subscriber's account or the subscriber is operating in the wrong Amazon Web Services Region.

+ * @public + */ +export declare class TableNotFoundException extends __BaseException { + readonly name: "TableNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Provides visibility into the number of read and write operations your table or + * secondary index can instantaneously support. The settings can be modified using the + * UpdateTable operation to meet the throughput requirements of an + * upcoming peak event.

+ * @public + */ +export interface WarmThroughput { + /** + *

Represents the number of read operations your base table can instantaneously + * support.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents the number of write operations your base table can instantaneously + * support.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; +} +/** + *

Represents a new global secondary index to be added to an existing table.

+ * @public + */ +export interface CreateGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be created.

+ * @public + */ + IndexName: string | undefined; + /** + *

The key schema for the global secondary index.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into an index. These + * are in addition to the primary key attributes and index key attributes, which are + * automatically projected.

+ * @public + */ + Projection: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The maximum number of read and write units for the global secondary index being + * created. If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both. You must use either + * OnDemand Throughput or ProvisionedThroughput based on your table's + * capacity mode.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) when creating a secondary index.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the properties of a replica.

+ * @public + */ +export interface Replica { + /** + *

The Region where the replica needs to be created.

+ * @public + */ + RegionName?: string | undefined; +} +/** + * @public + */ +export interface CreateGlobalTableInput { + /** + *

The global table name.

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

The Regions where the global table needs to be created.

+ * @public + */ + ReplicationGroup: Replica[] | undefined; +} +/** + * @public + * @enum + */ +export declare const GlobalTableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type GlobalTableStatus = (typeof GlobalTableStatus)[keyof typeof GlobalTableStatus]; +/** + *

Overrides the on-demand throughput settings for this replica table. If you don't + * specify a value for this parameter, it uses the source table's on-demand throughput + * settings.

+ * @public + */ +export interface OnDemandThroughputOverride { + /** + *

Maximum number of read request units for the specified replica table.

+ * @public + */ + MaxReadRequestUnits?: number | undefined; +} +/** + *

Replica-specific provisioned throughput settings. If not specified, uses the source + * table's provisioned throughput settings.

+ * @public + */ +export interface ProvisionedThroughputOverride { + /** + *

Replica-specific read capacity units. If not specified, uses the source table's read + * capacity settings.

+ * @public + */ + ReadCapacityUnits?: number | undefined; +} +/** + * @public + * @enum + */ +export declare const IndexStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type IndexStatus = (typeof IndexStatus)[keyof typeof IndexStatus]; +/** + *

The description of the warm throughput value on a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndexWarmThroughputDescription { + /** + *

Represents warm throughput read units per second value for a global secondary + * index.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents warm throughput write units per second value for a global secondary + * index.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; + /** + *

Represents the warm throughput status being created or updated on a global secondary + * index. The status can only be UPDATING or ACTIVE.

+ * @public + */ + Status?: IndexStatus | undefined; +} +/** + *

Represents the properties of a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

If not described, uses the source table GSI's read capacity settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput for the specified global secondary index in + * the specified replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Represents the warm throughput of the global secondary index for this replica.

+ * @public + */ + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +/** + * @public + * @enum + */ +export declare const ReplicaStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly CREATION_FAILED: "CREATION_FAILED"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly REGION_DISABLED: "REGION_DISABLED"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type ReplicaStatus = (typeof ReplicaStatus)[keyof typeof ReplicaStatus]; +/** + * @public + * @enum + */ +export declare const TableClass: { + readonly STANDARD: "STANDARD"; + readonly STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS"; +}; +/** + * @public + */ +export type TableClass = (typeof TableClass)[keyof typeof TableClass]; +/** + *

Contains details of the table class.

+ * @public + */ +export interface TableClassSummary { + /** + *

The table class of the specified table. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

The date and time at which the table class was last updated.

+ * @public + */ + LastUpdateDateTime?: Date | undefined; +} +/** + * @public + * @enum + */ +export declare const TableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly ARCHIVED: "ARCHIVED"; + readonly ARCHIVING: "ARCHIVING"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type TableStatus = (typeof TableStatus)[keyof typeof TableStatus]; +/** + *

Represents the warm throughput value (in read units per second and write units per second) + * of the table. Warm throughput is applicable for DynamoDB Standard-IA tables and specifies + * the minimum provisioned capacity maintained for immediate data access.

+ * @public + */ +export interface TableWarmThroughputDescription { + /** + *

Represents the base table's warm throughput value in read units per second.

+ * @public + */ + ReadUnitsPerSecond?: number | undefined; + /** + *

Represents the base table's warm throughput value in write units per second.

+ * @public + */ + WriteUnitsPerSecond?: number | undefined; + /** + *

Represents warm throughput value of the base table.

+ * @public + */ + Status?: TableStatus | undefined; +} +/** + *

Contains the details of the replica.

+ * @public + */ +export interface ReplicaDescription { + /** + *

The name of the Region.

+ * @public + */ + RegionName?: string | undefined; + /** + *

The current state of the replica:

+ *
    + *
  • + *

    + * CREATING - The replica is being created.

    + *
  • + *
  • + *

    + * UPDATING - The replica is being updated.

    + *
  • + *
  • + *

    + * DELETING - The replica is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The replica is ready for use.

    + *
  • + *
  • + *

    + * REGION_DISABLED - The replica is inaccessible because the Amazon Web Services Region has been disabled.

    + * + *

    If the Amazon Web Services Region remains inaccessible for more than 20 + * hours, DynamoDB will remove this replica from the replication + * group. The replica will not be deleted and replication will stop from and to + * this region.

    + *
    + *
  • + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The KMS key + * used to encrypt the table is inaccessible.

    + * + *

    If the KMS key remains inaccessible for more than 20 hours, + * DynamoDB will remove this replica from the replication group. + * The replica will not be deleted and replication will stop from and to this + * region.

    + *
    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; + /** + *

Detailed information about the replica status.

+ * @public + */ + ReplicaStatusDescription?: string | undefined; + /** + *

Specifies the progress of a Create, Update, or Delete action on the replica as a + * percentage.

+ * @public + */ + ReplicaStatusPercentProgress?: string | undefined; + /** + *

The KMS key of the replica that will be used for KMS + * encryption.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not described, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput settings for the specified replica + * table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Represents the warm throughput value for this replica.

+ * @public + */ + WarmThroughput?: TableWarmThroughputDescription | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexDescription[] | undefined; + /** + *

The time at which the replica was first detected as inaccessible. To determine cause + * of inaccessibility check the ReplicaStatus property.

+ * @public + */ + ReplicaInaccessibleDateTime?: Date | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +/** + *

Contains details about the global table.

+ * @public + */ +export interface GlobalTableDescription { + /** + *

The Regions where the global table has replicas.

+ * @public + */ + ReplicationGroup?: ReplicaDescription[] | undefined; + /** + *

The unique identifier of the global table.

+ * @public + */ + GlobalTableArn?: string | undefined; + /** + *

The creation time of the global table.

+ * @public + */ + CreationDateTime?: Date | undefined; + /** + *

The current state of the global table:

+ *
    + *
  • + *

    + * CREATING - The global table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The global table is being updated.

    + *
  • + *
  • + *

    + * DELETING - The global table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The global table is ready for use.

    + *
  • + *
+ * @public + */ + GlobalTableStatus?: GlobalTableStatus | undefined; + /** + *

The global table name.

+ * @public + */ + GlobalTableName?: string | undefined; +} +/** + * @public + */ +export interface CreateGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The specified global table already exists.

+ * @public + */ +export declare class GlobalTableAlreadyExistsException extends __BaseException { + readonly name: "GlobalTableAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a replica to be added.

+ * @public + */ +export interface CreateReplicaAction { + /** + *

The Region of the replica to be added.

+ * @public + */ + RegionName: string | undefined; +} +/** + *

Represents the properties of a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndex { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName: string | undefined; + /** + *

Replica table GSI-specific provisioned throughput. If not specified, uses the source + * table GSI's read capacity settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput settings for the specified global secondary + * index in the specified replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; +} +/** + *

Represents a replica to be created.

+ * @public + */ +export interface CreateReplicationGroupMemberAction { + /** + *

The Region where the new replica will be created.

+ * @public + */ + RegionName: string | undefined; + /** + *

The KMS key that should be used for KMS encryption in + * the new replica. To specify a key, use its key ID, Amazon Resource Name (ARN), alias + * name, or alias ARN. Note that you should only provide this parameter if the key is + * different from the default DynamoDB KMS key + * alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not specified, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

The maximum on-demand throughput settings for the specified replica table being + * created. You can only modify MaxReadRequestUnits, because you can't modify + * MaxWriteRequestUnits for individual replica tables.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + TableClassOverride?: TableClass | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndex { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection: Projection | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use either + * OnDemandThroughput or ProvisionedThroughput based + * on your table's capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The maximum number of read and write units for the specified global secondary index. + * If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both. You must use either + * OnDemandThroughput or ProvisionedThroughput based + * on your table's capacity mode.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) for the specified secondary index. If you use this parameter, you must specify + * ReadUnitsPerSecond, WriteUnitsPerSecond, or both.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the properties of a local secondary index.

+ * @public + */ +export interface LocalSecondaryIndex { + /** + *

The name of the local secondary index. The name must be unique among all other indexes + * on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The complete key schema for the local secondary index, consisting of one or more pairs + * of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the local + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection: Projection | undefined; +} +/** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ +export interface SSESpecification { + /** + *

Indicates whether server-side encryption is done using an Amazon Web Services managed + * key or an Amazon Web Services owned key. If enabled (true), server-side encryption type + * is set to KMS and an Amazon Web Services managed key is used (KMS charges apply). If disabled (false) or not specified, server-side + * encryption is set to Amazon Web Services owned key.

+ * @public + */ + Enabled?: boolean | undefined; + /** + *

Server-side encryption type. The only supported value is:

+ *
    + *
  • + *

    + * KMS - Server-side encryption that uses Key Management Service. The + * key is stored in your account and is managed by KMS (KMS charges apply).

    + *
  • + *
+ * @public + */ + SSEType?: SSEType | undefined; + /** + *

The KMS key that should be used for the KMS encryption. + * To specify a key, use its key ID, Amazon Resource Name (ARN), alias name, or alias ARN. + * Note that you should only provide this parameter if the key is different from the + * default DynamoDB key alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; +} +/** + *

Describes a tag. A tag is a key-value pair. You can add up to 50 tags to a single + * DynamoDB table.

+ *

Amazon Web Services-assigned tag names and values are automatically assigned the + * aws: prefix, which the user cannot assign. Amazon Web Services-assigned + * tag names do not count towards the tag limit of 50. User-assigned tag names have the + * prefix user: in the Cost Allocation Report. You cannot backdate the + * application of a tag.

+ *

For an overview on tagging DynamoDB resources, see Tagging + * for DynamoDB in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export interface Tag { + /** + *

The key of the tag. Tag keys are case sensitive. Each DynamoDB table can + * only have up to one tag with the same key. If you try to add an existing tag (same key), + * the existing tag value will be updated to the new value.

+ * @public + */ + Key: string | undefined; + /** + *

The value of the tag. Tag values are case-sensitive and can be null.

+ * @public + */ + Value: string | undefined; +} +/** + *

Represents the input of a CreateTable operation.

+ * @public + */ +export interface CreateTableInput { + /** + *

An array of attributes that describe the key schema for the table and indexes.

+ * @public + */ + AttributeDefinitions: AttributeDefinition[] | undefined; + /** + *

The name of the table to create. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Specifies the attributes that make up the primary key for a table or an index. The + * attributes in KeySchema must also be defined in the + * AttributeDefinitions array. For more information, see Data + * Model in the Amazon DynamoDB Developer Guide.

+ *

Each KeySchemaElement in the array is composed of:

+ *
    + *
  • + *

    + * AttributeName - The name of this key attribute.

    + *
  • + *
  • + *

    + * KeyType - The role that the key attribute will assume:

    + *
      + *
    • + *

      + * HASH - partition key

      + *
    • + *
    • + *

      + * RANGE - sort key

      + *
    • + *
    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from the DynamoDB usage + * of an internal hash function to evenly distribute data items across partitions, + * based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ *

For a simple primary key (partition key), you must provide exactly one element with a + * KeyType of HASH.

+ *

For a composite primary key (partition key and sort key), you must provide exactly two + * elements, in this order: The first element must have a KeyType of + * HASH, and the second element must have a KeyType of + * RANGE.

+ *

For more information, see Working with Tables in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

One or more local secondary indexes (the maximum is 5) to be created on the table. + * Each index is scoped to a given partition key value. There is a 10 GB size limit per + * partition key value; otherwise, the size of a local secondary index is + * unconstrained.

+ *

Each local secondary index in the array includes the following:

+ *
    + *
  • + *

    + * IndexName - The name of the local secondary index. Must be unique + * only for this table.

    + *

    + *
  • + *
  • + *

    + * KeySchema - Specifies the key schema for the local secondary index. + * The key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can specify the + * ProjectionType of ALL to project all attributes from the + * source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndex[] | undefined; + /** + *

One or more global secondary indexes (the maximum is 20) to be created on the table. + * Each global secondary index in the array includes the following:

+ *
    + *
  • + *

    + * IndexName - The name of the global secondary index. Must be unique + * only for this table.

    + *

    + *
  • + *
  • + *

    + * KeySchema - Specifies the key schema for the global secondary + * index.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * ProvisionedThroughput - The provisioned throughput settings for the + * global secondary index, consisting of read and write capacity units.

    + *
  • + *
+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. This setting can be changed later.

+ *
    + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for most DynamoDB workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * steady workloads with predictable growth where capacity requirements can be + * reliably forecasted. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the provisioned throughput settings for a specified table or index. The + * settings can be modified using the UpdateTable operation.

+ *

If you set BillingMode as PROVISIONED, you must specify this property. + * If you set BillingMode as PAY_PER_REQUEST, you cannot specify this + * property.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

The settings for DynamoDB Streams on the table. These settings consist of:

+ *
    + *
  • + *

    + * StreamEnabled - Indicates whether DynamoDB Streams is to be enabled + * (true) or disabled (false).

    + *
  • + *
  • + *

    + * StreamViewType - When an item in the table is modified, + * StreamViewType determines what information is written to the + * table's stream. Valid values for StreamViewType are:

    + *
      + *
    • + *

      + * KEYS_ONLY - Only the key attributes of the modified item + * are written to the stream.

      + *
    • + *
    • + *

      + * NEW_IMAGE - The entire item, as it appears after it was + * modified, is written to the stream.

      + *
    • + *
    • + *

      + * OLD_IMAGE - The entire item, as it appeared before it was + * modified, is written to the stream.

      + *
    • + *
    • + *

      + * NEW_AND_OLD_IMAGES - Both the new and the old item images + * of the item are written to the stream.

      + *
    • + *
    + *
  • + *
+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

A list of key-value pairs to label the table. For more information, see Tagging + * for DynamoDB.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

The table class of the new table. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

Indicates whether deletion protection is to be enabled (true) or disabled (false) on + * the table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

Represents the warm throughput (in read units per second and write units per second) + * for creating a table.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; + /** + *

An Amazon Web Services resource-based policy document in JSON format that will be + * attached to the table.

+ *

When you attach a resource-based policy while creating a table, the policy application + * is strongly consistent.

+ *

The maximum size supported for a resource-based policy document is 20 KB. DynamoDB counts whitespaces when calculating the size of a policy against this + * limit. For a full list of all considerations that apply for resource-based policies, see + * Resource-based + * policy considerations.

+ * + *

You need to specify the CreateTable and + * PutResourcePolicy + * IAM actions for authorizing a user to create a table with a + * resource-based policy.

+ *
+ * @public + */ + ResourcePolicy?: string | undefined; + /** + *

Sets the maximum number of read and write units for the specified table in on-demand + * capacity mode. If you use this parameter, you must specify + * MaxReadRequestUnits, MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; +} +/** + *

Represents the provisioned throughput settings for the table, consisting of read and + * write capacity units, along with data about increases and decreases.

+ * @public + */ +export interface ProvisionedThroughputDescription { + /** + *

The date and time of the last provisioned throughput increase for this table.

+ * @public + */ + LastIncreaseDateTime?: Date | undefined; + /** + *

The date and time of the last provisioned throughput decrease for this table.

+ * @public + */ + LastDecreaseDateTime?: Date | undefined; + /** + *

The number of provisioned throughput decreases for this table during this UTC calendar + * day. For current maximums on provisioned throughput decreases, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + NumberOfDecreasesToday?: number | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. Eventually consistent reads require less + * effort than strongly consistent reads, so a setting of 50 ReadCapacityUnits + * per second provides 100 eventually consistent ReadCapacityUnits per + * second.

+ * @public + */ + ReadCapacityUnits?: number | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException.

+ * @public + */ + WriteCapacityUnits?: number | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface GlobalSecondaryIndexDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for a global secondary index, which consists of one or more + * pairs of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with + * the same partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

The current state of the global secondary index:

+ *
    + *
  • + *

    + * CREATING - The index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The index is being updated.

    + *
  • + *
  • + *

    + * DELETING - The index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

Indicates whether the index is currently backfilling. Backfilling + * is the process of reading items from the table and determining whether they can be added + * to the index. (Not all items will qualify: For example, a partition key cannot have any + * duplicate values.) If an item can be added to the index, DynamoDB will do so. After all + * items have been processed, the backfilling operation is complete and + * Backfilling is false.

+ *

You can delete an index that is being created during the Backfilling + * phase when IndexStatus is set to CREATING and Backfilling is + * true. You can't delete the index that is being created when IndexStatus is + * set to CREATING and Backfilling is false.

+ * + *

For indexes that were created during a CreateTable operation, the + * Backfilling attribute does not appear in the + * DescribeTable output.

+ *
+ * @public + */ + Backfilling?: boolean | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + /** + *

The total size of the specified index, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + IndexSizeBytes?: number | undefined; + /** + *

The number of items in the specified index. DynamoDB updates this value approximately + * every six hours. Recent changes might not be reflected in this value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the index.

+ * @public + */ + IndexArn?: string | undefined; + /** + *

The maximum number of read and write units for the specified global secondary index. + * If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value (in read units per second and write units per + * second) for the specified secondary index.

+ * @public + */ + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +/** + *

Represents the properties of a local secondary index.

+ * @public + */ +export interface LocalSecondaryIndexDescription { + /** + *

Represents the name of the local secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The complete key schema for the local secondary index, consisting of one or more pairs + * of attribute names and key types:

+ *
    + *
  • + *

    + * HASH - partition key

    + *
  • + *
  • + *

    + * RANGE - sort key

    + *
  • + *
+ * + *

The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's usage of + * an internal hash function to evenly distribute data items across partitions, based + * on their partition key values.

+ *

The sort key of an item is also known as its range attribute. + * The term "range attribute" derives from the way DynamoDB stores items with the same + * partition key physically close together, in sorted order by the sort key + * value.

+ *
+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

Represents attributes that are copied (projected) from the table into the global + * secondary index. These are in addition to the primary key attributes and index key + * attributes, which are automatically projected.

+ * @public + */ + Projection?: Projection | undefined; + /** + *

The total size of the specified index, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + IndexSizeBytes?: number | undefined; + /** + *

The number of items in the specified index. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the index.

+ * @public + */ + IndexArn?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const MultiRegionConsistency: { + readonly EVENTUAL: "EVENTUAL"; + readonly STRONG: "STRONG"; +}; +/** + * @public + */ +export type MultiRegionConsistency = (typeof MultiRegionConsistency)[keyof typeof MultiRegionConsistency]; +/** + *

Contains details for the restore.

+ * @public + */ +export interface RestoreSummary { + /** + *

The Amazon Resource Name (ARN) of the backup from which the table was restored.

+ * @public + */ + SourceBackupArn?: string | undefined; + /** + *

The ARN of the source table of the backup that is being restored.

+ * @public + */ + SourceTableArn?: string | undefined; + /** + *

Point in time or source backup time.

+ * @public + */ + RestoreDateTime: Date | undefined; + /** + *

Indicates if a restore is in progress or not.

+ * @public + */ + RestoreInProgress: boolean | undefined; +} +/** + *

Represents the properties of a table.

+ * @public + */ +export interface TableDescription { + /** + *

An array of AttributeDefinition objects. Each of these objects describes + * one attribute in the table and index key schema.

+ *

Each AttributeDefinition object in this array is composed of:

+ *
    + *
  • + *

    + * AttributeName - The name of the attribute.

    + *
  • + *
  • + *

    + * AttributeType - The data type for the attribute.

    + *
  • + *
+ * @public + */ + AttributeDefinitions?: AttributeDefinition[] | undefined; + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The primary key structure for the table. Each KeySchemaElement consists + * of:

+ *
    + *
  • + *

    + * AttributeName - The name of the attribute.

    + *
  • + *
  • + *

    + * KeyType - The role of the attribute:

    + *
      + *
    • + *

      + * HASH - partition key

      + *
    • + *
    • + *

      + * RANGE - sort key

      + *
    • + *
    + * + *

    The partition key of an item is also known as its hash + * attribute. The term "hash attribute" derives from DynamoDB's + * usage of an internal hash function to evenly distribute data items across + * partitions, based on their partition key values.

    + *

    The sort key of an item is also known as its range + * attribute. The term "range attribute" derives from the way + * DynamoDB stores items with the same partition key physically close together, + * in sorted order by the sort key value.

    + *
    + *
  • + *
+ *

For more information about primary keys, see Primary Key in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeySchema?: KeySchemaElement[] | undefined; + /** + *

The current state of the table:

+ *
    + *
  • + *

    + * CREATING - The table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table/index configuration is being updated. The + * table/index remains available for data operations when + * UPDATING.

    + *
  • + *
  • + *

    + * DELETING - The table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The table is ready for use.

    + *
  • + *
  • + *

    + * INACCESSIBLE_ENCRYPTION_CREDENTIALS - The KMS key + * used to encrypt the table in inaccessible. Table operations may fail due to + * failure to use the KMS key. DynamoDB will initiate the + * table archival process when a table's KMS key remains + * inaccessible for more than seven days.

    + *
  • + *
  • + *

    + * ARCHIVING - The table is being archived. Operations are not allowed + * until archival is complete.

    + *
  • + *
  • + *

    + * ARCHIVED - The table has been archived. See the ArchivalReason for + * more information.

    + *
  • + *
+ * @public + */ + TableStatus?: TableStatus | undefined; + /** + *

The date and time when the table was created, in UNIX epoch time format.

+ * @public + */ + CreationDateTime?: Date | undefined; + /** + *

The provisioned throughput settings for the table, consisting of read and write + * capacity units, along with data about increases and decreases.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + /** + *

The total size of the specified table, in bytes. DynamoDB updates this value + * approximately every six hours. Recent changes might not be reflected in this + * value.

+ * @public + */ + TableSizeBytes?: number | undefined; + /** + *

The number of items in the specified table. DynamoDB updates this value approximately + * every six hours. Recent changes might not be reflected in this value.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Unique identifier for the table for which the backup was created.

+ * @public + */ + TableId?: string | undefined; + /** + *

Contains the details for the read/write capacity mode.

+ * @public + */ + BillingModeSummary?: BillingModeSummary | undefined; + /** + *

Represents one or more local secondary indexes on the table. Each index is scoped to a + * given partition key value. Tables with one or more local secondary indexes are subject + * to an item collection size limit, where the amount of data within a given item + * collection cannot exceed 10 GB. Each element is composed of:

+ *
    + *
  • + *

    + * IndexName - The name of the local secondary index.

    + *
  • + *
  • + *

    + * KeySchema - Specifies the complete index key schema. The attribute + * names in the key schema must be between 1 and 255 characters (inclusive). The + * key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - Only the specified table attributes are + * projected into the index. The list of projected attributes is in + * NonKeyAttributes.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * IndexSizeBytes - Represents the total size of the index, in bytes. + * DynamoDB updates this value approximately every six hours. Recent changes might + * not be reflected in this value.

    + *
  • + *
  • + *

    + * ItemCount - Represents the number of items in the index. DynamoDB + * updates this value approximately every six hours. Recent changes might not be + * reflected in this value.

    + *
  • + *
+ *

If the table is in the DELETING state, no information about indexes will + * be returned.

+ * @public + */ + LocalSecondaryIndexes?: LocalSecondaryIndexDescription[] | undefined; + /** + *

The global secondary indexes, if any, on the table. Each index is scoped to a given + * partition key value. Each element is composed of:

+ *
    + *
  • + *

    + * Backfilling - If true, then the index is currently in the + * backfilling phase. Backfilling occurs only when a new global secondary index is + * added to the table. It is the process by which DynamoDB populates the new index + * with data from the table. (This attribute does not appear for indexes that were + * created during a CreateTable operation.)

    + *

    You can delete an index that is being created during the + * Backfilling phase when IndexStatus is set to + * CREATING and Backfilling is true. You can't delete the index that + * is being created when IndexStatus is set to CREATING and + * Backfilling is false. (This attribute does not appear for + * indexes that were created during a CreateTable operation.)

    + *
  • + *
  • + *

    + * IndexName - The name of the global secondary index.

    + *
  • + *
  • + *

    + * IndexSizeBytes - The total size of the global secondary index, in + * bytes. DynamoDB updates this value approximately every six hours. Recent changes + * might not be reflected in this value.

    + *
  • + *
  • + *

    + * IndexStatus - The current status of the global secondary + * index:

    + *
      + *
    • + *

      + * CREATING - The index is being created.

      + *
    • + *
    • + *

      + * UPDATING - The index is being updated.

      + *
    • + *
    • + *

      + * DELETING - The index is being deleted.

      + *
    • + *
    • + *

      + * ACTIVE - The index is ready for use.

      + *
    • + *
    + *
  • + *
  • + *

    + * ItemCount - The number of items in the global secondary index. + * DynamoDB updates this value approximately every six hours. Recent changes might + * not be reflected in this value.

    + *
  • + *
  • + *

    + * KeySchema - Specifies the complete index key schema. The attribute + * names in the key schema must be between 1 and 255 characters (inclusive). The + * key schema must begin with the same partition key as the table.

    + *
  • + *
  • + *

    + * Projection - Specifies attributes that are copied (projected) from + * the table into the index. These are in addition to the primary key attributes + * and index key attributes, which are automatically projected. Each attribute + * specification is composed of:

    + *
      + *
    • + *

      + * ProjectionType - One of the following:

      + *
        + *
      • + *

        + * KEYS_ONLY - Only the index and primary keys are + * projected into the index.

        + *
      • + *
      • + *

        + * INCLUDE - In addition to the attributes described + * in KEYS_ONLY, the secondary index will include + * other non-key attributes that you specify.

        + *
      • + *
      • + *

        + * ALL - All of the table attributes are projected + * into the index.

        + *
      • + *
      + *
    • + *
    • + *

      + * NonKeyAttributes - A list of one or more non-key attribute + * names that are projected into the secondary index. The total count of + * attributes provided in NonKeyAttributes, summed across all + * of the secondary indexes, must not exceed 100. If you project the same + * attribute into two different indexes, this counts as two distinct + * attributes when determining the total. This limit only applies when you + * specify the ProjectionType of INCLUDE. You still can + * specify the ProjectionType of ALL to project all attributes + * from the source table, even if the table has more than 100 attributes.

      + *
    • + *
    + *
  • + *
  • + *

    + * ProvisionedThroughput - The provisioned throughput settings for the + * global secondary index, consisting of read and write capacity units, along with + * data about increases and decreases.

    + *
  • + *
+ *

If the table is in the DELETING state, no information about indexes will + * be returned.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndexDescription[] | undefined; + /** + *

The current DynamoDB Streams configuration for the table.

+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

A timestamp, in ISO 8601 format, for this stream.

+ *

Note that LatestStreamLabel is not a unique identifier for the stream, + * because it is possible that a stream from another table might have the same timestamp. + * However, the combination of the following three elements is guaranteed to be + * unique:

+ *
    + *
  • + *

    Amazon Web Services customer ID

    + *
  • + *
  • + *

    Table name

    + *
  • + *
  • + *

    + * StreamLabel + *

    + *
  • + *
+ * @public + */ + LatestStreamLabel?: string | undefined; + /** + *

The Amazon Resource Name (ARN) that uniquely identifies the latest stream for this + * table.

+ * @public + */ + LatestStreamArn?: string | undefined; + /** + *

Represents the version of global tables + * in use, if the table is replicated across Amazon Web Services Regions.

+ * @public + */ + GlobalTableVersion?: string | undefined; + /** + *

Represents replicas of the table.

+ * @public + */ + Replicas?: ReplicaDescription[] | undefined; + /** + *

Contains details for the restore.

+ * @public + */ + RestoreSummary?: RestoreSummary | undefined; + /** + *

The description of the server-side encryption status on the specified table.

+ * @public + */ + SSEDescription?: SSEDescription | undefined; + /** + *

Contains information about the table archive.

+ * @public + */ + ArchivalSummary?: ArchivalSummary | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + TableClassSummary?: TableClassSummary | undefined; + /** + *

Indicates whether deletion protection is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

The maximum number of read and write units for the specified on-demand table. If you + * use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Describes the warm throughput value of the base table.

+ * @public + */ + WarmThroughput?: TableWarmThroughputDescription | undefined; + /** + *

Indicates one of the following consistency modes for a global table:

+ *
    + *
  • + *

    + * EVENTUAL: Indicates that the global table is configured for multi-Region eventual consistency.

    + *
  • + *
  • + *

    + * STRONG: Indicates that the global table is configured for multi-Region strong consistency (preview).

    + * + *

    Multi-Region strong consistency (MRSC) is a new DynamoDB global tables capability currently available in preview mode. For more information, see Global tables multi-Region strong consistency.

    + *
    + *
  • + *
+ *

If you don't specify this field, the global table consistency mode defaults to EVENTUAL.

+ * @public + */ + MultiRegionConsistency?: MultiRegionConsistency | undefined; +} +/** + *

Represents the output of a CreateTable operation.

+ * @public + */ +export interface CreateTableOutput { + /** + *

Represents the properties of the table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

The operation conflicts with the resource's availability. For example:

+ *
    + *
  • + *

    You attempted to recreate an existing table.

    + *
  • + *
  • + *

    You tried to delete a table currently in the CREATING state.

    + *
  • + *
  • + *

    You tried to update a resource that was already being updated.

    + *
  • + *
+ *

When appropriate, wait for the ongoing update to complete and attempt the request again.

+ * @public + */ +export declare class ResourceInUseException extends __BaseException { + readonly name: "ResourceInUseException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Processing options for the CSV file being imported.

+ * @public + */ +export interface CsvOptions { + /** + *

The delimiter used for separating items in the CSV file being imported.

+ * @public + */ + Delimiter?: string | undefined; + /** + *

List of the headers used to specify a common header for all source CSV files being + * imported. If this field is specified then the first line of each CSV file is treated as + * data instead of the header. If this field is not specified the the first line of each + * CSV file is treated as the header.

+ * @public + */ + HeaderList?: string[] | undefined; +} +/** + * @public + */ +export interface DeleteBackupInput { + /** + *

The ARN associated with the backup.

+ * @public + */ + BackupArn: string | undefined; +} +/** + * @public + */ +export interface DeleteBackupOutput { + /** + *

Contains the description of the backup created for the table.

+ * @public + */ + BackupDescription?: BackupDescription | undefined; +} +/** + *

Represents a global secondary index to be deleted from an existing table.

+ * @public + */ +export interface DeleteGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be deleted.

+ * @public + */ + IndexName: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ReturnValue: { + readonly ALL_NEW: "ALL_NEW"; + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; + readonly UPDATED_NEW: "UPDATED_NEW"; + readonly UPDATED_OLD: "UPDATED_OLD"; +}; +/** + * @public + */ +export type ReturnValue = (typeof ReturnValue)[keyof typeof ReturnValue]; +/** + *

The request was rejected because one or more items in the request are being modified by a request in another Region.

+ * @public + */ +export declare class ReplicatedWriteConflictException extends __BaseException { + readonly name: "ReplicatedWriteConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Operation was rejected because there is an ongoing transaction for the + * item.

+ * @public + */ +export declare class TransactionConflictException extends __BaseException { + readonly name: "TransactionConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a replica to be removed.

+ * @public + */ +export interface DeleteReplicaAction { + /** + *

The Region of the replica to be removed.

+ * @public + */ + RegionName: string | undefined; +} +/** + *

Represents a replica to be deleted.

+ * @public + */ +export interface DeleteReplicationGroupMemberAction { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; +} +/** + * @public + */ +export interface DeleteResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource from which the policy will be + * removed. The resources you can specify include tables and streams. If you remove the + * policy of a table, it will also remove the permissions for the table's indexes defined + * in that policy document. This is because index permissions are defined in the table's + * policy.

+ * @public + */ + ResourceArn: string | undefined; + /** + *

A string value that you can use to conditionally delete your policy. When you provide + * an expected revision ID, if the revision ID of the existing policy on the resource + * doesn't match or if there's no policy attached to the resource, the request will fail + * and return a PolicyNotFoundException.

+ * @public + */ + ExpectedRevisionId?: string | undefined; +} +/** + * @public + */ +export interface DeleteResourcePolicyOutput { + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ *

This value will be empty if you make a request against a resource without a + * policy.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + *

The operation tried to access a nonexistent resource-based policy.

+ *

If you specified an ExpectedRevisionId, it's possible that a policy is present for the resource but its revision ID didn't match the expected value.

+ * @public + */ +export declare class PolicyNotFoundException extends __BaseException { + readonly name: "PolicyNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the input of a DeleteTable operation.

+ * @public + */ +export interface DeleteTableInput { + /** + *

The name of the table to delete. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the output of a DeleteTable operation.

+ * @public + */ +export interface DeleteTableOutput { + /** + *

Represents the properties of a table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + * @public + */ +export interface DescribeBackupInput { + /** + *

The Amazon Resource Name (ARN) associated with the backup.

+ * @public + */ + BackupArn: string | undefined; +} +/** + * @public + */ +export interface DescribeBackupOutput { + /** + *

Contains the description of the backup created for the table.

+ * @public + */ + BackupDescription?: BackupDescription | undefined; +} +/** + * @public + */ +export interface DescribeContinuousBackupsInput { + /** + *

Name of the table for which the customer wants to check the continuous backups and + * point in time recovery settings.

+ *

You can also provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + */ +export interface DescribeContinuousBackupsOutput { + /** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +/** + * @public + */ +export interface DescribeContributorInsightsInput { + /** + *

The name of the table to describe. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of the global secondary index to describe, if applicable.

+ * @public + */ + IndexName?: string | undefined; +} +/** + *

Represents a failure a contributor insights operation.

+ * @public + */ +export interface FailureException { + /** + *

Exception name.

+ * @public + */ + ExceptionName?: string | undefined; + /** + *

Description of the failure.

+ * @public + */ + ExceptionDescription?: string | undefined; +} +/** + * @public + */ +export interface DescribeContributorInsightsOutput { + /** + *

The name of the table being described.

+ * @public + */ + TableName?: string | undefined; + /** + *

The name of the global secondary index being described.

+ * @public + */ + IndexName?: string | undefined; + /** + *

List of names of the associated contributor insights rules.

+ * @public + */ + ContributorInsightsRuleList?: string[] | undefined; + /** + *

Current status of contributor insights.

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; + /** + *

Timestamp of the last time the status was changed.

+ * @public + */ + LastUpdateDateTime?: Date | undefined; + /** + *

Returns information about the last failure that was encountered.

+ *

The most common exceptions for a FAILED status are:

+ *
    + *
  • + *

    LimitExceededException - Per-account Amazon CloudWatch Contributor Insights + * rule limit reached. Please disable Contributor Insights for other tables/indexes + * OR disable Contributor Insights rules before retrying.

    + *
  • + *
  • + *

    AccessDeniedException - Amazon CloudWatch Contributor Insights rules cannot be + * modified due to insufficient permissions.

    + *
  • + *
  • + *

    AccessDeniedException - Failed to create service-linked role for Contributor + * Insights due to insufficient permissions.

    + *
  • + *
  • + *

    InternalServerError - Failed to create Amazon CloudWatch Contributor Insights + * rules. Please retry request.

    + *
  • + *
+ * @public + */ + FailureException?: FailureException | undefined; +} +/** + * @public + */ +export interface DescribeEndpointsRequest { +} +/** + *

An endpoint information details.

+ * @public + */ +export interface Endpoint { + /** + *

IP address of the endpoint.

+ * @public + */ + Address: string | undefined; + /** + *

Endpoint cache time to live (TTL) value.

+ * @public + */ + CachePeriodInMinutes: number | undefined; +} +/** + * @public + */ +export interface DescribeEndpointsResponse { + /** + *

List of endpoints.

+ * @public + */ + Endpoints: Endpoint[] | undefined; +} +/** + * @public + */ +export interface DescribeExportInput { + /** + *

The Amazon Resource Name (ARN) associated with the export.

+ * @public + */ + ExportArn: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ExportFormat: { + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +/** + * @public + */ +export type ExportFormat = (typeof ExportFormat)[keyof typeof ExportFormat]; +/** + * @public + * @enum + */ +export declare const ExportStatus: { + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +/** + * @public + */ +export type ExportStatus = (typeof ExportStatus)[keyof typeof ExportStatus]; +/** + * @public + * @enum + */ +export declare const ExportType: { + readonly FULL_EXPORT: "FULL_EXPORT"; + readonly INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT"; +}; +/** + * @public + */ +export type ExportType = (typeof ExportType)[keyof typeof ExportType]; +/** + * @public + * @enum + */ +export declare const ExportViewType: { + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; +}; +/** + * @public + */ +export type ExportViewType = (typeof ExportViewType)[keyof typeof ExportViewType]; +/** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ +export interface IncrementalExportSpecification { + /** + *

Time in the past which provides the inclusive start range for the export table's data, + * counted in seconds from the start of the Unix epoch. The incremental export will reflect + * the table's state including and after this point in time.

+ * @public + */ + ExportFromTime?: Date | undefined; + /** + *

Time in the past which provides the exclusive end range for the export table's data, + * counted in seconds from the start of the Unix epoch. The incremental export will reflect + * the table's state just prior to this point in time. If this is not provided, the latest + * time with data available will be used.

+ * @public + */ + ExportToTime?: Date | undefined; + /** + *

The view type that was chosen for the export. Valid values are + * NEW_AND_OLD_IMAGES and NEW_IMAGES. The default value is + * NEW_AND_OLD_IMAGES.

+ * @public + */ + ExportViewType?: ExportViewType | undefined; +} +/** + * @public + * @enum + */ +export declare const S3SseAlgorithm: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +/** + * @public + */ +export type S3SseAlgorithm = (typeof S3SseAlgorithm)[keyof typeof S3SseAlgorithm]; +/** + *

Represents the properties of the exported table.

+ * @public + */ +export interface ExportDescription { + /** + *

The Amazon Resource Name (ARN) of the table export.

+ * @public + */ + ExportArn?: string | undefined; + /** + *

Export can be in one of the following states: IN_PROGRESS, COMPLETED, or + * FAILED.

+ * @public + */ + ExportStatus?: ExportStatus | undefined; + /** + *

The time at which the export task began.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which the export task completed.

+ * @public + */ + EndTime?: Date | undefined; + /** + *

The name of the manifest file for the export task.

+ * @public + */ + ExportManifest?: string | undefined; + /** + *

The Amazon Resource Name (ARN) of the table that was exported.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Unique ID of the table that was exported.

+ * @public + */ + TableId?: string | undefined; + /** + *

Point in time from which table data was exported.

+ * @public + */ + ExportTime?: Date | undefined; + /** + *

The client token that was provided for the export task. A client token makes calls to + * ExportTableToPointInTimeInput idempotent, meaning that multiple + * identical calls have the same effect as one single call.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The name of the Amazon S3 bucket containing the export.

+ * @public + */ + S3Bucket?: string | undefined; + /** + *

The ID of the Amazon Web Services account that owns the bucket containing the + * export.

+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The Amazon S3 bucket prefix used as the file name and path of the exported + * snapshot.

+ * @public + */ + S3Prefix?: string | undefined; + /** + *

Type of encryption used on the bucket where export data is stored. Valid values for + * S3SseAlgorithm are:

+ *
    + *
  • + *

    + * AES256 - server-side encryption with Amazon S3 managed + * keys

    + *
  • + *
  • + *

    + * KMS - server-side encryption with KMS managed + * keys

    + *
  • + *
+ * @public + */ + S3SseAlgorithm?: S3SseAlgorithm | undefined; + /** + *

The ID of the KMS managed key used to encrypt the S3 bucket where + * export data is stored (if applicable).

+ * @public + */ + S3SseKmsKeyId?: string | undefined; + /** + *

Status code for the result of the failed export.

+ * @public + */ + FailureCode?: string | undefined; + /** + *

Export failure reason description.

+ * @public + */ + FailureMessage?: string | undefined; + /** + *

The format of the exported data. Valid values for ExportFormat are + * DYNAMODB_JSON or ION.

+ * @public + */ + ExportFormat?: ExportFormat | undefined; + /** + *

The billable size of the table export.

+ * @public + */ + BilledSizeBytes?: number | undefined; + /** + *

The number of items exported.

+ * @public + */ + ItemCount?: number | undefined; + /** + *

The type of export that was performed. Valid values are FULL_EXPORT or + * INCREMENTAL_EXPORT.

+ * @public + */ + ExportType?: ExportType | undefined; + /** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +/** + * @public + */ +export interface DescribeExportOutput { + /** + *

Represents the properties of the export.

+ * @public + */ + ExportDescription?: ExportDescription | undefined; +} +/** + *

The specified export was not found.

+ * @public + */ +export declare class ExportNotFoundException extends __BaseException { + readonly name: "ExportNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeGlobalTableInput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName: string | undefined; +} +/** + * @public + */ +export interface DescribeGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The specified global table does not exist.

+ * @public + */ +export declare class GlobalTableNotFoundException extends __BaseException { + readonly name: "GlobalTableNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeGlobalTableSettingsInput { + /** + *

The name of the global table to describe.

+ * @public + */ + GlobalTableName: string | undefined; +} +/** + *

Represents the properties of a global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexSettingsDescription { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The current status of the global secondary index:

+ *
    + *
  • + *

    + * CREATING - The global secondary index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The global secondary index is being updated.

    + *
  • + *
  • + *

    + * DELETING - The global secondary index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The global secondary index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException.

+ * @public + */ + ProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global secondary index replica's read capacity + * units.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException.

+ * @public + */ + ProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global secondary index replica's write capacity + * units.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; +} +/** + *

Represents the properties of a replica.

+ * @public + */ +export interface ReplicaSettingsDescription { + /** + *

The Region name of the replica.

+ * @public + */ + RegionName: string | undefined; + /** + *

The current state of the Region:

+ *
    + *
  • + *

    + * CREATING - The Region is being created.

    + *
  • + *
  • + *

    + * UPDATING - The Region is being updated.

    + *
  • + *
  • + *

    + * DELETING - The Region is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The Region is ready for use.

    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; + /** + *

The read/write capacity mode of the replica.

+ * @public + */ + ReplicaBillingModeSummary?: BillingModeSummary | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global table replica's read capacity units.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for a global table replica's write capacity units.

+ * @public + */ + ReplicaProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Replica global secondary index settings for the global table.

+ * @public + */ + ReplicaGlobalSecondaryIndexSettings?: ReplicaGlobalSecondaryIndexSettingsDescription[] | undefined; + /** + *

Contains details of the table class.

+ * @public + */ + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +/** + * @public + */ +export interface DescribeGlobalTableSettingsOutput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Region-specific settings for the global table.

+ * @public + */ + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +/** + * @public + */ +export interface DescribeImportInput { + /** + *

The Amazon Resource Name (ARN) associated with the table you're importing to.

+ * @public + */ + ImportArn: string | undefined; +} +/** + * @public + * @enum + */ +export declare const ImportStatus: { + readonly CANCELLED: "CANCELLED"; + readonly CANCELLING: "CANCELLING"; + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +/** + * @public + */ +export type ImportStatus = (typeof ImportStatus)[keyof typeof ImportStatus]; +/** + * @public + * @enum + */ +export declare const InputCompressionType: { + readonly GZIP: "GZIP"; + readonly NONE: "NONE"; + readonly ZSTD: "ZSTD"; +}; +/** + * @public + */ +export type InputCompressionType = (typeof InputCompressionType)[keyof typeof InputCompressionType]; +/** + * @public + * @enum + */ +export declare const InputFormat: { + readonly CSV: "CSV"; + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +/** + * @public + */ +export type InputFormat = (typeof InputFormat)[keyof typeof InputFormat]; +/** + *

The format options for the data that was imported into the target table. There is one + * value, CsvOption.

+ * @public + */ +export interface InputFormatOptions { + /** + *

The options for imported source files in CSV format. The values are Delimiter and + * HeaderList.

+ * @public + */ + Csv?: CsvOptions | undefined; +} +/** + *

The S3 bucket that is being imported from.

+ * @public + */ +export interface S3BucketSource { + /** + *

The account number of the S3 bucket that is being imported from. If the bucket is + * owned by the requester this is optional.

+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The S3 bucket that is being imported from.

+ * @public + */ + S3Bucket: string | undefined; + /** + *

The key prefix shared by all S3 Objects that are being imported.

+ * @public + */ + S3KeyPrefix?: string | undefined; +} +/** + *

The parameters for the table created as part of the import operation.

+ * @public + */ +export interface TableCreationParameters { + /** + *

The name of the table created as part of the import operation.

+ * @public + */ + TableName: string | undefined; + /** + *

The attributes of the table created as part of the import operation.

+ * @public + */ + AttributeDefinitions: AttributeDefinition[] | undefined; + /** + *

The primary key and option sort key of the table created as part of the import + * operation.

+ * @public + */ + KeySchema: KeySchemaElement[] | undefined; + /** + *

The billing mode for provisioning the table created as part of the import operation. + *

+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index. You must use ProvisionedThroughput or + * OnDemandThroughput based on your table’s capacity mode.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the settings used to enable server-side encryption.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

The Global Secondary Indexes (GSI) of the table to be created as part of the import + * operation.

+ * @public + */ + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; +} +/** + *

Represents the properties of the table being imported into. + *

+ * @public + */ +export interface ImportTableDescription { + /** + *

The Amazon Resource Number (ARN) corresponding to the import request. + *

+ * @public + */ + ImportArn?: string | undefined; + /** + *

The status of the import.

+ * @public + */ + ImportStatus?: ImportStatus | undefined; + /** + *

The Amazon Resource Number (ARN) of the table being imported into. + *

+ * @public + */ + TableArn?: string | undefined; + /** + *

The table id corresponding to the table created by import table process. + *

+ * @public + */ + TableId?: string | undefined; + /** + *

The client token that was provided for the import task. Reusing the client token on + * retry makes a call to ImportTable idempotent.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

Values for the S3 bucket the source file is imported from. Includes bucket name + * (required), key prefix (optional) and bucket account owner ID (optional).

+ * @public + */ + S3BucketSource?: S3BucketSource | undefined; + /** + *

The number of errors occurred on importing the source file into the target table. + *

+ * @public + */ + ErrorCount?: number | undefined; + /** + *

The Amazon Resource Number (ARN) of the Cloudwatch Log Group associated with the + * target table.

+ * @public + */ + CloudWatchLogGroupArn?: string | undefined; + /** + *

The format of the source data going into the target table. + *

+ * @public + */ + InputFormat?: InputFormat | undefined; + /** + *

The format options for the data that was imported into the target table. There is one + * value, CsvOption.

+ * @public + */ + InputFormatOptions?: InputFormatOptions | undefined; + /** + *

The compression options for the data that has been imported into the target table. + * The values are NONE, GZIP, or ZSTD.

+ * @public + */ + InputCompressionType?: InputCompressionType | undefined; + /** + *

The parameters for the new table that is being imported into.

+ * @public + */ + TableCreationParameters?: TableCreationParameters | undefined; + /** + *

The time when this import task started.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which the creation of the table associated with this import task + * completed.

+ * @public + */ + EndTime?: Date | undefined; + /** + *

The total size of data processed from the source file, in Bytes.

+ * @public + */ + ProcessedSizeBytes?: number | undefined; + /** + *

The total number of items processed from the source file.

+ * @public + */ + ProcessedItemCount?: number | undefined; + /** + *

The number of items successfully imported into the new table.

+ * @public + */ + ImportedItemCount?: number | undefined; + /** + *

The error code corresponding to the failure that the import job ran into during + * execution.

+ * @public + */ + FailureCode?: string | undefined; + /** + *

The error message corresponding to the failure that the import job ran into during + * execution.

+ * @public + */ + FailureMessage?: string | undefined; +} +/** + * @public + */ +export interface DescribeImportOutput { + /** + *

Represents the properties of the table created for the import, and parameters of the + * import. The import parameters include import status, how many items were processed, and + * how many errors were encountered.

+ * @public + */ + ImportTableDescription: ImportTableDescription | undefined; +} +/** + *

+ * The specified import was not found. + *

+ * @public + */ +export declare class ImportNotFoundException extends __BaseException { + readonly name: "ImportNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface DescribeKinesisStreamingDestinationInput { + /** + *

The name of the table being described. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + * @enum + */ +export declare const DestinationStatus: { + readonly ACTIVE: "ACTIVE"; + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLE_FAILED: "ENABLE_FAILED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +/** + * @public + */ +export type DestinationStatus = (typeof DestinationStatus)[keyof typeof DestinationStatus]; +/** + *

Describes a Kinesis data stream destination.

+ * @public + */ +export interface KinesisDataStreamDestination { + /** + *

The ARN for a specific Kinesis data stream.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The current status of replication.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The human-readable string that corresponds to the replica status.

+ * @public + */ + DestinationStatusDescription?: string | undefined; + /** + *

The precision of the Kinesis data stream timestamp. The values are either + * MILLISECOND or MICROSECOND.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface DescribeKinesisStreamingDestinationOutput { + /** + *

The name of the table being described.

+ * @public + */ + TableName?: string | undefined; + /** + *

The list of replica structures for the table being described.

+ * @public + */ + KinesisDataStreamDestinations?: KinesisDataStreamDestination[] | undefined; +} +/** + *

Represents the input of a DescribeLimits operation. Has no + * content.

+ * @public + */ +export interface DescribeLimitsInput { +} +/** + *

Represents the output of a DescribeLimits operation.

+ * @public + */ +export interface DescribeLimitsOutput { + /** + *

The maximum total read capacity units that your account allows you to provision across + * all of your tables in this Region.

+ * @public + */ + AccountMaxReadCapacityUnits?: number | undefined; + /** + *

The maximum total write capacity units that your account allows you to provision + * across all of your tables in this Region.

+ * @public + */ + AccountMaxWriteCapacityUnits?: number | undefined; + /** + *

The maximum read capacity units that your account allows you to provision for a new + * table that you are creating in this Region, including the read capacity units + * provisioned for its global secondary indexes (GSIs).

+ * @public + */ + TableMaxReadCapacityUnits?: number | undefined; + /** + *

The maximum write capacity units that your account allows you to provision for a new + * table that you are creating in this Region, including the write capacity units + * provisioned for its global secondary indexes (GSIs).

+ * @public + */ + TableMaxWriteCapacityUnits?: number | undefined; +} +/** + *

Represents the input of a DescribeTable operation.

+ * @public + */ +export interface DescribeTableInput { + /** + *

The name of the table to describe. You can also provide the Amazon Resource Name (ARN) of the table in + * this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the output of a DescribeTable operation.

+ * @public + */ +export interface DescribeTableOutput { + /** + *

The properties of the table.

+ * @public + */ + Table?: TableDescription | undefined; +} +/** + * @public + */ +export interface DescribeTableReplicaAutoScalingInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + *

Represents the auto scaling configuration for a replica global secondary index.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexAutoScalingDescription { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The current state of the replica global secondary index:

+ *
    + *
  • + *

    + * CREATING - The index is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table/index configuration is being updated. The + * table/index remains available for data operations when + * UPDATING + *

    + *
  • + *
  • + *

    + * DELETING - The index is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The index is ready for use.

    + *
  • + *
+ * @public + */ + IndexStatus?: IndexStatus | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; +} +/** + *

Represents the auto scaling settings of the replica.

+ * @public + */ +export interface ReplicaAutoScalingDescription { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName?: string | undefined; + /** + *

Replica-specific global secondary index auto scaling settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexAutoScalingDescription[] | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

Represents the auto scaling settings for a global table or global secondary + * index.

+ * @public + */ + ReplicaProvisionedWriteCapacityAutoScalingSettings?: AutoScalingSettingsDescription | undefined; + /** + *

The current state of the replica:

+ *
    + *
  • + *

    + * CREATING - The replica is being created.

    + *
  • + *
  • + *

    + * UPDATING - The replica is being updated.

    + *
  • + *
  • + *

    + * DELETING - The replica is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The replica is ready for use.

    + *
  • + *
+ * @public + */ + ReplicaStatus?: ReplicaStatus | undefined; +} +/** + *

Represents the auto scaling configuration for a global table.

+ * @public + */ +export interface TableAutoScalingDescription { + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The current state of the table:

+ *
    + *
  • + *

    + * CREATING - The table is being created.

    + *
  • + *
  • + *

    + * UPDATING - The table is being updated.

    + *
  • + *
  • + *

    + * DELETING - The table is being deleted.

    + *
  • + *
  • + *

    + * ACTIVE - The table is ready for use.

    + *
  • + *
+ * @public + */ + TableStatus?: TableStatus | undefined; + /** + *

Represents replicas of the global table.

+ * @public + */ + Replicas?: ReplicaAutoScalingDescription[] | undefined; +} +/** + * @public + */ +export interface DescribeTableReplicaAutoScalingOutput { + /** + *

Represents the auto scaling properties of the table.

+ * @public + */ + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +/** + * @public + */ +export interface DescribeTimeToLiveInput { + /** + *

The name of the table to be described. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; +} +/** + * @public + */ +export interface DescribeTimeToLiveOutput { + /** + *

+ * @public + */ + TimeToLiveDescription?: TimeToLiveDescription | undefined; +} +/** + *

Enables setting the configuration for Kinesis Streaming.

+ * @public + */ +export interface EnableKinesisStreamingConfiguration { + /** + *

Toggle for the precision of Kinesis data stream timestamp. The values are either + * MILLISECOND or MICROSECOND.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface KinesisStreamingDestinationInput { + /** + *

The name of the DynamoDB table. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The ARN for a Kinesis data stream.

+ * @public + */ + StreamArn: string | undefined; + /** + *

The source for the Kinesis streaming information that is being enabled.

+ * @public + */ + EnableKinesisStreamingConfiguration?: EnableKinesisStreamingConfiguration | undefined; +} +/** + * @public + */ +export interface KinesisStreamingDestinationOutput { + /** + *

The name of the table being modified.

+ * @public + */ + TableName?: string | undefined; + /** + *

The ARN for the specific Kinesis data stream.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The current status of the replication.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The destination for the Kinesis streaming information that is being enabled.

+ * @public + */ + EnableKinesisStreamingConfiguration?: EnableKinesisStreamingConfiguration | undefined; +} +/** + *

There was an attempt to insert an item with the same primary key as an item that + * already exists in the DynamoDB table.

+ * @public + */ +export declare class DuplicateItemException extends __BaseException { + readonly name: "DuplicateItemException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

DynamoDB rejected the request because you retried a request with a + * different payload but with an idempotent token that was already used.

+ * @public + */ +export declare class IdempotentParameterMismatchException extends __BaseException { + readonly name: "IdempotentParameterMismatchException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The transaction with the given request token is already in progress.

+ *

+ * Recommended Settings + *

+ * + *

+ * This is a general recommendation for handling the TransactionInProgressException. These settings help + * ensure that the client retries will trigger completion of the ongoing TransactWriteItems request. + *

+ *
+ *
    + *
  • + *

    + * Set clientExecutionTimeout to a value that allows at least one retry to be processed after 5 + * seconds have elapsed since the first attempt for the TransactWriteItems operation. + *

    + *
  • + *
  • + *

    + * Set socketTimeout to a value a little lower than the requestTimeout setting. + *

    + *
  • + *
  • + *

    + * requestTimeout should be set based on the time taken for the individual retries of a single + * HTTP request for your use case, but setting it to 1 second or higher should work well to reduce chances of + * retries and TransactionInProgressException errors. + *

    + *
  • + *
  • + *

    + * Use exponential backoff when retrying and tune backoff if needed. + *

    + *
  • + *
+ *

+ * Assuming default retry policy, + * example timeout settings based on the guidelines above are as follows: + *

+ *

Example timeline:

+ *
    + *
  • + *

    0-1000 first attempt

    + *
  • + *
  • + *

    1000-1500 first sleep/delay (default retry policy uses 500 ms as base delay for 4xx errors)

    + *
  • + *
  • + *

    1500-2500 second attempt

    + *
  • + *
  • + *

    2500-3500 second sleep/delay (500 * 2, exponential backoff)

    + *
  • + *
  • + *

    3500-4500 third attempt

    + *
  • + *
  • + *

    4500-6500 third sleep/delay (500 * 2^2)

    + *
  • + *
  • + *

    6500-7500 fourth attempt (this can trigger inline recovery since 5 seconds have elapsed since the first attempt reached TC)

    + *
  • + *
+ * @public + */ +export declare class TransactionInProgressException extends __BaseException { + readonly name: "TransactionInProgressException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

There was a conflict when writing to the specified S3 bucket.

+ * @public + */ +export declare class ExportConflictException extends __BaseException { + readonly name: "ExportConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ExportTableToPointInTimeInput { + /** + *

The Amazon Resource Name (ARN) associated with the table to export.

+ * @public + */ + TableArn: string | undefined; + /** + *

Time in the past from which to export table data, counted in seconds from the start of + * the Unix epoch. The table export will be a snapshot of the table's state at this point + * in time.

+ * @public + */ + ExportTime?: Date | undefined; + /** + *

Providing a ClientToken makes the call to + * ExportTableToPointInTimeInput idempotent, meaning that multiple + * identical calls have the same effect as one single call.

+ *

A client token is valid for 8 hours after the first request that uses it is completed. + * After 8 hours, any request with the same client token is treated as a new request. Do + * not resubmit the same request with the same client token for more than 8 hours, or the + * result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 8-hour idempotency window, DynamoDB returns an + * ImportConflictException.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The name of the Amazon S3 bucket to export the snapshot to.

+ * @public + */ + S3Bucket: string | undefined; + /** + *

The ID of the Amazon Web Services account that owns the bucket the export will be + * stored in.

+ * + *

S3BucketOwner is a required parameter when exporting to a S3 bucket in another + * account.

+ *
+ * @public + */ + S3BucketOwner?: string | undefined; + /** + *

The Amazon S3 bucket prefix to use as the file name and path of the exported + * snapshot.

+ * @public + */ + S3Prefix?: string | undefined; + /** + *

Type of encryption used on the bucket where export data will be stored. Valid values + * for S3SseAlgorithm are:

+ *
    + *
  • + *

    + * AES256 - server-side encryption with Amazon S3 managed + * keys

    + *
  • + *
  • + *

    + * KMS - server-side encryption with KMS managed + * keys

    + *
  • + *
+ * @public + */ + S3SseAlgorithm?: S3SseAlgorithm | undefined; + /** + *

The ID of the KMS managed key used to encrypt the S3 bucket where + * export data will be stored (if applicable).

+ * @public + */ + S3SseKmsKeyId?: string | undefined; + /** + *

The format for the exported data. Valid values for ExportFormat are + * DYNAMODB_JSON or ION.

+ * @public + */ + ExportFormat?: ExportFormat | undefined; + /** + *

Choice of whether to execute as a full export or incremental export. Valid values are + * FULL_EXPORT or INCREMENTAL_EXPORT. The default value is FULL_EXPORT. If + * INCREMENTAL_EXPORT is provided, the IncrementalExportSpecification must also be + * used.

+ * @public + */ + ExportType?: ExportType | undefined; + /** + *

Optional object containing the parameters specific to an incremental export.

+ * @public + */ + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +/** + * @public + */ +export interface ExportTableToPointInTimeOutput { + /** + *

Contains a description of the table export.

+ * @public + */ + ExportDescription?: ExportDescription | undefined; +} +/** + *

The specified ExportTime is outside of the point in time recovery + * window.

+ * @public + */ +export declare class InvalidExportTimeException extends __BaseException { + readonly name: "InvalidExportTimeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Point in time recovery has not yet been enabled for this source table.

+ * @public + */ +export declare class PointInTimeRecoveryUnavailableException extends __BaseException { + readonly name: "PointInTimeRecoveryUnavailableException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface GetResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource to which the policy is attached. The + * resources you can specify include tables and streams.

+ * @public + */ + ResourceArn: string | undefined; +} +/** + * @public + */ +export interface GetResourcePolicyOutput { + /** + *

The resource-based policy document attached to the resource, which can be a table or + * stream, in JSON format.

+ * @public + */ + Policy?: string | undefined; + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + *

+ * There was a conflict when importing from the specified S3 source. + * This can occur when the current import conflicts with a previous import request + * that had the same client token. + *

+ * @public + */ +export declare class ImportConflictException extends __BaseException { + readonly name: "ImportConflictException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ImportTableInput { + /** + *

Providing a ClientToken makes the call to ImportTableInput + * idempotent, meaning that multiple identical calls have the same effect as one single + * call.

+ *

A client token is valid for 8 hours after the first request that uses it is completed. + * After 8 hours, any request with the same client token is treated as a new request. Do + * not resubmit the same request with the same client token for more than 8 hours, or the + * result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 8-hour idempotency window, DynamoDB returns an + * IdempotentParameterMismatch exception.

+ * @public + */ + ClientToken?: string | undefined; + /** + *

The S3 bucket that provides the source for the import.

+ * @public + */ + S3BucketSource: S3BucketSource | undefined; + /** + *

The format of the source data. Valid values for ImportFormat are + * CSV, DYNAMODB_JSON or ION.

+ * @public + */ + InputFormat: InputFormat | undefined; + /** + *

Additional properties that specify how the input is formatted,

+ * @public + */ + InputFormatOptions?: InputFormatOptions | undefined; + /** + *

Type of compression to be used on the input coming from the imported table.

+ * @public + */ + InputCompressionType?: InputCompressionType | undefined; + /** + *

Parameters for the table to import the data into.

+ * @public + */ + TableCreationParameters: TableCreationParameters | undefined; +} +/** + * @public + */ +export interface ImportTableOutput { + /** + *

Represents the properties of the table created for the import, and parameters of the + * import. The import parameters include import status, how many items were processed, and + * how many errors were encountered.

+ * @public + */ + ImportTableDescription: ImportTableDescription | undefined; +} +/** + * @public + */ +export interface ListBackupsInput { + /** + *

Lists the backups from the table specified in TableName. You can also + * provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName?: string | undefined; + /** + *

Maximum number of backups to return at once.

+ * @public + */ + Limit?: number | undefined; + /** + *

Only backups created after this time are listed. TimeRangeLowerBound is + * inclusive.

+ * @public + */ + TimeRangeLowerBound?: Date | undefined; + /** + *

Only backups created before this time are listed. TimeRangeUpperBound is + * exclusive.

+ * @public + */ + TimeRangeUpperBound?: Date | undefined; + /** + *

+ * LastEvaluatedBackupArn is the Amazon Resource Name (ARN) of the backup last + * evaluated when the current page of results was returned, inclusive of the current page + * of results. This value may be specified as the ExclusiveStartBackupArn of a + * new ListBackups operation in order to fetch the next page of results. + *

+ * @public + */ + ExclusiveStartBackupArn?: string | undefined; + /** + *

The backups from the table specified by BackupType are listed.

+ *

Where BackupType can be:

+ *
    + *
  • + *

    + * USER - On-demand backup created by you. (The default setting if no + * other backup types are specified.)

    + *
  • + *
  • + *

    + * SYSTEM - On-demand backup automatically created by DynamoDB.

    + *
  • + *
  • + *

    + * ALL - All types of on-demand backups (USER and SYSTEM).

    + *
  • + *
+ * @public + */ + BackupType?: BackupTypeFilter | undefined; +} +/** + * @public + */ +export interface ListBackupsOutput { + /** + *

List of BackupSummary objects.

+ * @public + */ + BackupSummaries?: BackupSummary[] | undefined; + /** + *

The ARN of the backup last evaluated when the current page of results was returned, + * inclusive of the current page of results. This value may be specified as the + * ExclusiveStartBackupArn of a new ListBackups operation in + * order to fetch the next page of results.

+ *

If LastEvaluatedBackupArn is empty, then the last page of results has + * been processed and there are no more results to be retrieved.

+ *

If LastEvaluatedBackupArn is not empty, this may or may not indicate + * that there is more data to be returned. All results are guaranteed to have been returned + * if and only if no value for LastEvaluatedBackupArn is returned.

+ * @public + */ + LastEvaluatedBackupArn?: string | undefined; +} +/** + * @public + */ +export interface ListContributorInsightsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName?: string | undefined; + /** + *

A token to for the desired page, if there is one.

+ * @public + */ + NextToken?: string | undefined; + /** + *

Maximum number of results to return per page.

+ * @public + */ + MaxResults?: number | undefined; +} +/** + * @public + */ +export interface ListContributorInsightsOutput { + /** + *

A list of ContributorInsightsSummary.

+ * @public + */ + ContributorInsightsSummaries?: ContributorInsightsSummary[] | undefined; + /** + *

A token to go to the next page if there is one.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListExportsInput { + /** + *

The Amazon Resource Name (ARN) associated with the exported table.

+ * @public + */ + TableArn?: string | undefined; + /** + *

Maximum number of results to return per page.

+ * @public + */ + MaxResults?: number | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListExports. When provided in this manner, the API fetches the next + * page of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Summary information about an export task.

+ * @public + */ +export interface ExportSummary { + /** + *

The Amazon Resource Name (ARN) of the export.

+ * @public + */ + ExportArn?: string | undefined; + /** + *

Export can be in one of the following states: IN_PROGRESS, COMPLETED, or + * FAILED.

+ * @public + */ + ExportStatus?: ExportStatus | undefined; + /** + *

The type of export that was performed. Valid values are FULL_EXPORT or + * INCREMENTAL_EXPORT.

+ * @public + */ + ExportType?: ExportType | undefined; +} +/** + * @public + */ +export interface ListExportsOutput { + /** + *

A list of ExportSummary objects.

+ * @public + */ + ExportSummaries?: ExportSummary[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListExports again, with NextToken set to this + * value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListGlobalTablesInput { + /** + *

The first global table name that this operation will evaluate.

+ * @public + */ + ExclusiveStartGlobalTableName?: string | undefined; + /** + *

The maximum number of table names to return, if the parameter is not specified + * DynamoDB defaults to 100.

+ *

If the number of global tables DynamoDB finds reaches this limit, it stops the + * operation and returns the table names collected up to that point, with a table name in + * the LastEvaluatedGlobalTableName to apply in a subsequent operation to the + * ExclusiveStartGlobalTableName parameter.

+ * @public + */ + Limit?: number | undefined; + /** + *

Lists the global tables in a specific Region.

+ * @public + */ + RegionName?: string | undefined; +} +/** + *

Represents the properties of a global table.

+ * @public + */ +export interface GlobalTable { + /** + *

The global table name.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Regions where the global table has replicas.

+ * @public + */ + ReplicationGroup?: Replica[] | undefined; +} +/** + * @public + */ +export interface ListGlobalTablesOutput { + /** + *

List of global table names.

+ * @public + */ + GlobalTables?: GlobalTable[] | undefined; + /** + *

Last evaluated global table name.

+ * @public + */ + LastEvaluatedGlobalTableName?: string | undefined; +} +/** + * @public + */ +export interface ListImportsInput { + /** + *

The Amazon Resource Name (ARN) associated with the table that was imported to. + *

+ * @public + */ + TableArn?: string | undefined; + /** + *

The number of ImportSummary objects returned in a single page.

+ * @public + */ + PageSize?: number | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListImports. When provided in this manner, the API fetches the next + * page of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Summary information about the source file for the import. + *

+ * @public + */ +export interface ImportSummary { + /** + *

The Amazon Resource Number (ARN) corresponding to the import request.

+ * @public + */ + ImportArn?: string | undefined; + /** + *

The status of the import operation.

+ * @public + */ + ImportStatus?: ImportStatus | undefined; + /** + *

The Amazon Resource Number (ARN) of the table being imported into.

+ * @public + */ + TableArn?: string | undefined; + /** + *

The path and S3 bucket of the source file that is being imported. This includes the + * S3Bucket (required), S3KeyPrefix (optional) and S3BucketOwner (optional if the bucket is + * owned by the requester).

+ * @public + */ + S3BucketSource?: S3BucketSource | undefined; + /** + *

The Amazon Resource Number (ARN) of the Cloudwatch Log Group associated with this + * import task.

+ * @public + */ + CloudWatchLogGroupArn?: string | undefined; + /** + *

The format of the source data. Valid values are CSV, + * DYNAMODB_JSON or ION.

+ * @public + */ + InputFormat?: InputFormat | undefined; + /** + *

The time at which this import task began.

+ * @public + */ + StartTime?: Date | undefined; + /** + *

The time at which this import task ended. (Does this include the successful complete + * creation of the table it was imported to?)

+ * @public + */ + EndTime?: Date | undefined; +} +/** + * @public + */ +export interface ListImportsOutput { + /** + *

A list of ImportSummary objects.

+ * @public + */ + ImportSummaryList?: ImportSummary[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListImports again, with NextToken set to this + * value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + *

Represents the input of a ListTables operation.

+ * @public + */ +export interface ListTablesInput { + /** + *

The first table name that this operation will evaluate. Use the value that was + * returned for LastEvaluatedTableName in a previous operation, so that you + * can obtain the next page of results.

+ * @public + */ + ExclusiveStartTableName?: string | undefined; + /** + *

A maximum number of table names to return. If this parameter is not specified, the + * limit is 100.

+ * @public + */ + Limit?: number | undefined; +} +/** + *

Represents the output of a ListTables operation.

+ * @public + */ +export interface ListTablesOutput { + /** + *

The names of the tables associated with the current account at the current endpoint. + * The maximum size of this array is 100.

+ *

If LastEvaluatedTableName also appears in the output, you can use this + * value as the ExclusiveStartTableName parameter in a subsequent + * ListTables request and obtain the next page of results.

+ * @public + */ + TableNames?: string[] | undefined; + /** + *

The name of the last table in the current page of results. Use this value as the + * ExclusiveStartTableName in a new request to obtain the next page of + * results, until all the table names are returned.

+ *

If you do not receive a LastEvaluatedTableName value in the response, + * this means that there are no more table names to be retrieved.

+ * @public + */ + LastEvaluatedTableName?: string | undefined; +} +/** + * @public + */ +export interface ListTagsOfResourceInput { + /** + *

The Amazon DynamoDB resource with tags to be listed. This value is an Amazon Resource + * Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

An optional string that, if supplied, must be copied from the output of a previous + * call to ListTagOfResource. When provided in this manner, this API fetches the next page + * of results.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface ListTagsOfResourceOutput { + /** + *

The tags currently associated with the Amazon DynamoDB resource.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

If this value is returned, there are additional results to be displayed. To retrieve + * them, call ListTagsOfResource again, with NextToken set to this value.

+ * @public + */ + NextToken?: string | undefined; +} +/** + * @public + */ +export interface PutResourcePolicyInput { + /** + *

The Amazon Resource Name (ARN) of the DynamoDB resource to which the policy will be attached. + * The resources you can specify include tables and streams.

+ *

You can control index permissions using the base table's policy. To specify the same permission level for your table and its indexes, you can provide both the table and index Amazon Resource Name (ARN)s in the Resource field of a given Statement in your policy document. Alternatively, to specify different permissions for your table, indexes, or both, you can define multiple Statement fields in your policy document.

+ * @public + */ + ResourceArn: string | undefined; + /** + *

An Amazon Web Services resource-based policy document in JSON format.

+ *
    + *
  • + *

    The maximum size supported for a resource-based policy document is 20 KB. + * DynamoDB counts whitespaces when calculating the size of a policy + * against this limit.

    + *
  • + *
  • + *

    Within a resource-based policy, if the action for a DynamoDB + * service-linked role (SLR) to replicate data for a global table is denied, adding + * or deleting a replica will fail with an error.

    + *
  • + *
+ *

For a full list of all considerations that apply while attaching a resource-based + * policy, see Resource-based + * policy considerations.

+ * @public + */ + Policy: string | undefined; + /** + *

A string value that you can use to conditionally update your policy. You can provide + * the revision ID of your existing policy to make mutating requests against that + * policy.

+ * + *

When you provide an expected revision ID, if the revision ID of the existing + * policy on the resource doesn't match or if there's no policy attached to the + * resource, your request will be rejected with a + * PolicyNotFoundException.

+ *
+ *

To conditionally attach a policy when no policy exists for the resource, specify + * NO_POLICY for the revision ID.

+ * @public + */ + ExpectedRevisionId?: string | undefined; + /** + *

Set this parameter to true to confirm that you want to remove your + * permissions to change the policy of this resource in the future.

+ * @public + */ + ConfirmRemoveSelfResourceAccess?: boolean | undefined; +} +/** + * @public + */ +export interface PutResourcePolicyOutput { + /** + *

A unique string that represents the revision ID of the policy. If you're comparing revision IDs, make sure to always use string comparison logic.

+ * @public + */ + RevisionId?: string | undefined; +} +/** + * @public + * @enum + */ +export declare const Select: { + readonly ALL_ATTRIBUTES: "ALL_ATTRIBUTES"; + readonly ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES"; + readonly COUNT: "COUNT"; + readonly SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES"; +}; +/** + * @public + */ +export type Select = (typeof Select)[keyof typeof Select]; +/** + * @public + */ +export interface RestoreTableFromBackupInput { + /** + *

The name of the new table to which the backup must be restored.

+ * @public + */ + TargetTableName: string | undefined; + /** + *

The Amazon Resource Name (ARN) associated with the backup.

+ * @public + */ + BackupArn: string | undefined; + /** + *

The billing mode of the restored table.

+ * @public + */ + BillingModeOverride?: BillingMode | undefined; + /** + *

List of global secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + /** + *

List of local secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + /** + *

Provisioned throughput settings for the restored table.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + /** + *

The new server-side encryption settings for the restored table.

+ * @public + */ + SSESpecificationOverride?: SSESpecification | undefined; +} +/** + * @public + */ +export interface RestoreTableFromBackupOutput { + /** + *

The description of the table created from an existing backup.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

A target table with the specified name already exists.

+ * @public + */ +export declare class TableAlreadyExistsException extends __BaseException { + readonly name: "TableAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

An invalid restore time was specified. RestoreDateTime must be between + * EarliestRestorableDateTime and LatestRestorableDateTime.

+ * @public + */ +export declare class InvalidRestoreTimeException extends __BaseException { + readonly name: "InvalidRestoreTimeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface RestoreTableToPointInTimeInput { + /** + *

The DynamoDB table that will be restored. This value is an Amazon Resource Name + * (ARN).

+ * @public + */ + SourceTableArn?: string | undefined; + /** + *

Name of the source table that is being restored.

+ * @public + */ + SourceTableName?: string | undefined; + /** + *

The name of the new table to which it must be restored to.

+ * @public + */ + TargetTableName: string | undefined; + /** + *

Restore the table to the latest possible time. LatestRestorableDateTime + * is typically 5 minutes before the current time.

+ * @public + */ + UseLatestRestorableTime?: boolean | undefined; + /** + *

Time in the past to restore the table to.

+ * @public + */ + RestoreDateTime?: Date | undefined; + /** + *

The billing mode of the restored table.

+ * @public + */ + BillingModeOverride?: BillingMode | undefined; + /** + *

List of global secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + /** + *

List of local secondary indexes for the restored table. The indexes provided should + * match existing secondary indexes. You can choose to exclude some or all of the indexes + * at the time of restore.

+ * @public + */ + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + /** + *

Provisioned throughput settings for the restored table.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + /** + *

Sets the maximum number of read and write units for the specified on-demand table. If + * you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + /** + *

The new server-side encryption settings for the restored table.

+ * @public + */ + SSESpecificationOverride?: SSESpecification | undefined; +} +/** + * @public + */ +export interface RestoreTableToPointInTimeOutput { + /** + *

Represents the properties of a table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + * @public + */ +export interface TagResourceInput { + /** + *

Identifies the Amazon DynamoDB resource to which tags should be added. This value is + * an Amazon Resource Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

The tags to be assigned to the Amazon DynamoDB resource.

+ * @public + */ + Tags: Tag[] | undefined; +} +/** + * @public + */ +export interface UntagResourceInput { + /** + *

The DynamoDB resource that the tags will be removed from. This value is an Amazon + * Resource Name (ARN).

+ * @public + */ + ResourceArn: string | undefined; + /** + *

A list of tag keys. Existing tags of the resource whose keys are members of this list + * will be removed from the DynamoDB resource.

+ * @public + */ + TagKeys: string[] | undefined; +} +/** + *

Represents the settings used to enable point in time recovery.

+ * @public + */ +export interface PointInTimeRecoverySpecification { + /** + *

Indicates whether point in time recovery is enabled (true) or disabled (false) on the + * table.

+ * @public + */ + PointInTimeRecoveryEnabled: boolean | undefined; + /** + *

The number of preceding days for which continuous backups are taken and maintained. + * Your table data is only recoverable to any point-in-time from within the configured + * recovery period. This parameter is optional. If no value is provided, the value will + * default to 35.

+ * @public + */ + RecoveryPeriodInDays?: number | undefined; +} +/** + * @public + */ +export interface UpdateContinuousBackupsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the settings used to enable point in time recovery.

+ * @public + */ + PointInTimeRecoverySpecification: PointInTimeRecoverySpecification | undefined; +} +/** + * @public + */ +export interface UpdateContinuousBackupsOutput { + /** + *

Represents the continuous backups and point in time recovery settings on the + * table.

+ * @public + */ + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +/** + * @public + */ +export interface UpdateContributorInsightsInput { + /** + *

The name of the table. You can also provide the Amazon Resource Name (ARN) of the table in this + * parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The global secondary index name, if applicable.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the contributor insights action.

+ * @public + */ + ContributorInsightsAction: ContributorInsightsAction | undefined; +} +/** + * @public + */ +export interface UpdateContributorInsightsOutput { + /** + *

The name of the table.

+ * @public + */ + TableName?: string | undefined; + /** + *

The name of the global secondary index, if applicable.

+ * @public + */ + IndexName?: string | undefined; + /** + *

The status of contributor insights

+ * @public + */ + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +/** + *

The specified replica is already part of the global table.

+ * @public + */ +export declare class ReplicaAlreadyExistsException extends __BaseException { + readonly name: "ReplicaAlreadyExistsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The specified replica is no longer part of the global table.

+ * @public + */ +export declare class ReplicaNotFoundException extends __BaseException { + readonly name: "ReplicaNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new replica to be added to an existing global table.

    + *
  • + *
  • + *

    New parameters for an existing replica.

    + *
  • + *
  • + *

    An existing replica to be removed from an existing global table.

    + *
  • + *
+ * @public + */ +export interface ReplicaUpdate { + /** + *

The parameters required for creating a replica on an existing global table.

+ * @public + */ + Create?: CreateReplicaAction | undefined; + /** + *

The name of the existing replica to be removed.

+ * @public + */ + Delete?: DeleteReplicaAction | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableInput { + /** + *

The global table name.

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

A list of Regions that should be added or removed from the global table.

+ * @public + */ + ReplicaUpdates: ReplicaUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableOutput { + /** + *

Contains the details of the global table.

+ * @public + */ + GlobalTableDescription?: GlobalTableDescription | undefined; +} +/** + *

The operation tried to access a nonexistent index.

+ * @public + */ +export declare class IndexNotFoundException extends __BaseException { + readonly name: "IndexNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ +export interface GlobalTableGlobalSecondaryIndexSettingsUpdate { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. + *

+ * @public + */ + ProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global secondary index's write capacity + * units.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexSettingsUpdate { + /** + *

The name of the global secondary index. The name must be unique among all other + * indexes on this table.

+ * @public + */ + IndexName: string | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException.

+ * @public + */ + ProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global secondary index replica's read capacity + * units.

+ * @public + */ + ProvisionedReadCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the settings for a global table in a Region that will be modified.

+ * @public + */ +export interface ReplicaSettingsUpdate { + /** + *

The Region of the replica to be added.

+ * @public + */ + RegionName: string | undefined; + /** + *

The maximum number of strongly consistent reads consumed per second before DynamoDB + * returns a ThrottlingException. For more information, see Specifying Read and Write Requirements in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + ReplicaProvisionedReadCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing a global table replica's read capacity + * units.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ + ReplicaGlobalSecondaryIndexSettingsUpdate?: ReplicaGlobalSecondaryIndexSettingsUpdate[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + ReplicaTableClass?: TableClass | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableSettingsInput { + /** + *

The name of the global table

+ * @public + */ + GlobalTableName: string | undefined; + /** + *

The billing mode of the global table. If GlobalTableBillingMode is not + * specified, the global table defaults to PROVISIONED capacity billing + * mode.

+ *
    + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * predictable workloads. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for unpredictable workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
+ * @public + */ + GlobalTableBillingMode?: BillingMode | undefined; + /** + *

The maximum number of writes consumed per second before DynamoDB returns a + * ThrottlingException. + *

+ * @public + */ + GlobalTableProvisionedWriteCapacityUnits?: number | undefined; + /** + *

Auto scaling settings for managing provisioned write capacity for the global + * table.

+ * @public + */ + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the settings of a global secondary index for a global table that will be + * modified.

+ * @public + */ + GlobalTableGlobalSecondaryIndexSettingsUpdate?: GlobalTableGlobalSecondaryIndexSettingsUpdate[] | undefined; + /** + *

Represents the settings for a global table in a Region that will be modified.

+ * @public + */ + ReplicaSettingsUpdate?: ReplicaSettingsUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateGlobalTableSettingsOutput { + /** + *

The name of the global table.

+ * @public + */ + GlobalTableName?: string | undefined; + /** + *

The Region-specific settings for the global table.

+ * @public + */ + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +/** + *

Enables updating the configuration for Kinesis Streaming.

+ * @public + */ +export interface UpdateKinesisStreamingConfiguration { + /** + *

Enables updating the precision of Kinesis data stream timestamp.

+ * @public + */ + ApproximateCreationDateTimePrecision?: ApproximateCreationDateTimePrecision | undefined; +} +/** + * @public + */ +export interface UpdateKinesisStreamingDestinationInput { + /** + *

The table name for the Kinesis streaming destination input. You can also provide the + * ARN of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The Amazon Resource Name (ARN) for the Kinesis stream input.

+ * @public + */ + StreamArn: string | undefined; + /** + *

The command to update the Kinesis stream configuration.

+ * @public + */ + UpdateKinesisStreamingConfiguration?: UpdateKinesisStreamingConfiguration | undefined; +} +/** + * @public + */ +export interface UpdateKinesisStreamingDestinationOutput { + /** + *

The table name for the Kinesis streaming destination output.

+ * @public + */ + TableName?: string | undefined; + /** + *

The ARN for the Kinesis stream input.

+ * @public + */ + StreamArn?: string | undefined; + /** + *

The status of the attempt to update the Kinesis streaming destination output.

+ * @public + */ + DestinationStatus?: DestinationStatus | undefined; + /** + *

The command to update the Kinesis streaming destination configuration.

+ * @public + */ + UpdateKinesisStreamingConfiguration?: UpdateKinesisStreamingConfiguration | undefined; +} +/** + *

Represents the new provisioned throughput settings to be applied to a global secondary + * index.

+ * @public + */ +export interface UpdateGlobalSecondaryIndexAction { + /** + *

The name of the global secondary index to be updated.

+ * @public + */ + IndexName: string | undefined; + /** + *

Represents the provisioned throughput settings for the specified global secondary + * index.

+ *

For current minimum and maximum provisioned throughput values, see Service, + * Account, and Table Quotas in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

Updates the maximum number of read and write units for the specified global secondary + * index. If you use this parameter, you must specify MaxReadRequestUnits, + * MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput value of the new provisioned throughput settings to be + * applied to a global secondary index.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new global secondary index to be added to an existing table.

    + *
  • + *
  • + *

    New provisioned throughput parameters for an existing global secondary + * index.

    + *
  • + *
  • + *

    An existing global secondary index to be removed from an existing + * table.

    + *
  • + *
+ * @public + */ +export interface GlobalSecondaryIndexUpdate { + /** + *

The name of an existing global secondary index, along with new provisioned throughput + * settings to be applied to that index.

+ * @public + */ + Update?: UpdateGlobalSecondaryIndexAction | undefined; + /** + *

The parameters required for creating a global secondary index on an existing + * table:

+ *
    + *
  • + *

    + * IndexName + *

    + *
  • + *
  • + *

    + * KeySchema + *

    + *
  • + *
  • + *

    + * AttributeDefinitions + *

    + *
  • + *
  • + *

    + * Projection + *

    + *
  • + *
  • + *

    + * ProvisionedThroughput + *

    + *
  • + *
+ * @public + */ + Create?: CreateGlobalSecondaryIndexAction | undefined; + /** + *

The name of an existing global secondary index to be removed.

+ * @public + */ + Delete?: DeleteGlobalSecondaryIndexAction | undefined; +} +/** + *

Represents a replica to be modified.

+ * @public + */ +export interface UpdateReplicationGroupMemberAction { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; + /** + *

The KMS key of the replica that should be used for KMS + * encryption. To specify a key, use its key ID, Amazon Resource Name (ARN), alias name, or + * alias ARN. Note that you should only provide this parameter if the key is different from + * the default DynamoDB KMS key alias/aws/dynamodb.

+ * @public + */ + KMSMasterKeyId?: string | undefined; + /** + *

Replica-specific provisioned throughput. If not specified, uses the source table's + * provisioned throughput settings.

+ * @public + */ + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + /** + *

Overrides the maximum on-demand throughput for the replica table.

+ * @public + */ + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + /** + *

Replica-specific global secondary index settings.

+ * @public + */ + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + /** + *

Replica-specific table class. If not specified, uses the source table's table + * class.

+ * @public + */ + TableClassOverride?: TableClass | undefined; +} +/** + *

Represents one of the following:

+ *
    + *
  • + *

    A new replica to be added to an existing regional table or global table. This + * request invokes the CreateTableReplica action in the destination + * Region.

    + *
  • + *
  • + *

    New parameters for an existing replica. This request invokes the + * UpdateTable action in the destination Region.

    + *
  • + *
  • + *

    An existing replica to be deleted. The request invokes the + * DeleteTableReplica action in the destination Region, deleting + * the replica and all if its items in the destination Region.

    + *
  • + *
+ * + *

When you manually remove a table or global table replica, you do not automatically + * remove any associated scalable targets, scaling policies, or CloudWatch + * alarms.

+ *
+ * @public + */ +export interface ReplicationGroupUpdate { + /** + *

The parameters required for creating a replica for the table.

+ * @public + */ + Create?: CreateReplicationGroupMemberAction | undefined; + /** + *

The parameters required for updating a replica for the table.

+ * @public + */ + Update?: UpdateReplicationGroupMemberAction | undefined; + /** + *

The parameters required for deleting a replica for the table.

+ * @public + */ + Delete?: DeleteReplicationGroupMemberAction | undefined; +} +/** + *

Represents the input of an UpdateTable operation.

+ * @public + */ +export interface UpdateTableInput { + /** + *

An array of attributes that describe the key schema for the table and indexes. If you + * are adding a new global secondary index to the table, AttributeDefinitions + * must include the key element(s) of the new index.

+ * @public + */ + AttributeDefinitions?: AttributeDefinition[] | undefined; + /** + *

The name of the table to be updated. You can also provide the Amazon Resource Name (ARN) of the table + * in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Controls how you are charged for read and write throughput and how you manage + * capacity. When switching from pay-per-request to provisioned capacity, initial + * provisioned capacity values must be set. The initial provisioned capacity values are + * estimated based on the consumed read and write capacity of your table and global + * secondary indexes over the past 30 minutes.

+ *
    + *
  • + *

    + * PAY_PER_REQUEST - We recommend using PAY_PER_REQUEST + * for most DynamoDB workloads. PAY_PER_REQUEST sets the billing mode + * to On-demand capacity mode.

    + *
  • + *
  • + *

    + * PROVISIONED - We recommend using PROVISIONED for + * steady workloads with predictable growth where capacity requirements can be + * reliably forecasted. PROVISIONED sets the billing mode to Provisioned capacity mode.

    + *
  • + *
+ * @public + */ + BillingMode?: BillingMode | undefined; + /** + *

The new provisioned throughput settings for the specified table or index.

+ * @public + */ + ProvisionedThroughput?: ProvisionedThroughput | undefined; + /** + *

An array of one or more global secondary indexes for the table. For each index in the + * array, you can request one action:

+ *
    + *
  • + *

    + * Create - add a new global secondary index to the table.

    + *
  • + *
  • + *

    + * Update - modify the provisioned throughput settings of an existing + * global secondary index.

    + *
  • + *
  • + *

    + * Delete - remove a global secondary index from the table.

    + *
  • + *
+ *

You can create or delete only one global secondary index per UpdateTable + * operation.

+ *

For more information, see Managing Global + * Secondary Indexes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexUpdate[] | undefined; + /** + *

Represents the DynamoDB Streams configuration for the table.

+ * + *

You receive a ValidationException if you try to enable a stream on a + * table that already has a stream, or if you try to disable a stream on a table that + * doesn't have a stream.

+ *
+ * @public + */ + StreamSpecification?: StreamSpecification | undefined; + /** + *

The new server-side encryption settings for the specified table.

+ * @public + */ + SSESpecification?: SSESpecification | undefined; + /** + *

A list of replica update actions (create, delete, or update) for the table.

+ * + *

For global tables, this property only applies to global tables using Version + * 2019.11.21 (Current version).

+ *
+ * @public + */ + ReplicaUpdates?: ReplicationGroupUpdate[] | undefined; + /** + *

The table class of the table to be updated. Valid values are STANDARD and + * STANDARD_INFREQUENT_ACCESS.

+ * @public + */ + TableClass?: TableClass | undefined; + /** + *

Indicates whether deletion protection is to be enabled (true) or disabled (false) on + * the table.

+ * @public + */ + DeletionProtectionEnabled?: boolean | undefined; + /** + *

Specifies the consistency mode for a new global table. This parameter is only valid + * when you create a global table by specifying one or more Create actions in the ReplicaUpdates action list.

+ *

You can specify one of the following consistency modes:

+ *
    + *
  • + *

    + * EVENTUAL: Configures a new global table for multi-Region eventual + * consistency. This is the default consistency mode for global tables.

    + *
  • + *
  • + *

    + * STRONG: Configures a new global table for multi-Region strong + * consistency (preview).

    + * + *

    Multi-Region strong consistency (MRSC) is a new DynamoDB global + * tables capability currently available in preview mode. For more information, + * see Global tables multi-Region strong consistency.

    + *
    + *
  • + *
+ *

If you don't specify this parameter, the global table consistency mode defaults to + * EVENTUAL.

+ * @public + */ + MultiRegionConsistency?: MultiRegionConsistency | undefined; + /** + *

Updates the maximum number of read and write units for the specified table in + * on-demand capacity mode. If you use this parameter, you must specify + * MaxReadRequestUnits, MaxWriteRequestUnits, or both.

+ * @public + */ + OnDemandThroughput?: OnDemandThroughput | undefined; + /** + *

Represents the warm throughput (in read units per second and write units per second) + * for updating a table.

+ * @public + */ + WarmThroughput?: WarmThroughput | undefined; +} +/** + *

Represents the output of an UpdateTable operation.

+ * @public + */ +export interface UpdateTableOutput { + /** + *

Represents the properties of the table.

+ * @public + */ + TableDescription?: TableDescription | undefined; +} +/** + *

Represents the auto scaling settings of a global secondary index for a global table + * that will be modified.

+ * @public + */ +export interface GlobalSecondaryIndexAutoScalingUpdate { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the auto scaling settings of a global secondary index for a replica that + * will be modified.

+ * @public + */ +export interface ReplicaGlobalSecondaryIndexAutoScalingUpdate { + /** + *

The name of the global secondary index.

+ * @public + */ + IndexName?: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedReadCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + *

Represents the auto scaling settings of a replica that will be modified.

+ * @public + */ +export interface ReplicaAutoScalingUpdate { + /** + *

The Region where the replica exists.

+ * @public + */ + RegionName: string | undefined; + /** + *

Represents the auto scaling settings of global secondary indexes that will be + * modified.

+ * @public + */ + ReplicaGlobalSecondaryIndexUpdates?: ReplicaGlobalSecondaryIndexAutoScalingUpdate[] | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ReplicaProvisionedReadCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; +} +/** + * @public + */ +export interface UpdateTableReplicaAutoScalingInput { + /** + *

Represents the auto scaling settings of the global secondary indexes of the replica to + * be updated.

+ * @public + */ + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexAutoScalingUpdate[] | undefined; + /** + *

The name of the global table to be updated. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the auto scaling settings to be modified for a global table or global + * secondary index.

+ * @public + */ + ProvisionedWriteCapacityAutoScalingUpdate?: AutoScalingSettingsUpdate | undefined; + /** + *

Represents the auto scaling settings of replicas of the table that will be + * modified.

+ * @public + */ + ReplicaUpdates?: ReplicaAutoScalingUpdate[] | undefined; +} +/** + * @public + */ +export interface UpdateTableReplicaAutoScalingOutput { + /** + *

Returns information about the auto scaling settings of a table with replicas.

+ * @public + */ + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +/** + *

Represents the settings used to enable or disable Time to Live (TTL) for the specified + * table.

+ * @public + */ +export interface TimeToLiveSpecification { + /** + *

Indicates whether TTL is to be enabled (true) or disabled (false) on the table.

+ * @public + */ + Enabled: boolean | undefined; + /** + *

The name of the TTL attribute used to store the expiration time for items in the + * table.

+ * @public + */ + AttributeName: string | undefined; +} +/** + *

Represents the input of an UpdateTimeToLive operation.

+ * @public + */ +export interface UpdateTimeToLiveInput { + /** + *

The name of the table to be configured. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

Represents the settings used to enable or disable Time to Live for the specified + * table.

+ * @public + */ + TimeToLiveSpecification: TimeToLiveSpecification | undefined; +} +/** + * @public + */ +export interface UpdateTimeToLiveOutput { + /** + *

Represents the output of an UpdateTimeToLive operation.

+ * @public + */ + TimeToLiveSpecification?: TimeToLiveSpecification | undefined; +} +/** + *

Represents the data for an attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ +export type AttributeValue = AttributeValue.BMember | AttributeValue.BOOLMember | AttributeValue.BSMember | AttributeValue.LMember | AttributeValue.MMember | AttributeValue.NMember | AttributeValue.NSMember | AttributeValue.NULLMember | AttributeValue.SMember | AttributeValue.SSMember | AttributeValue.$UnknownMember; +/** + * @public + */ +export declare namespace AttributeValue { + /** + *

An attribute of type String. For example:

+ *

+ * "S": "Hello" + *

+ * @public + */ + interface SMember { + S: string; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Number. For example:

+ *

+ * "N": "123.45" + *

+ *

Numbers are sent across the network to DynamoDB as strings, to maximize compatibility + * across languages and libraries. However, DynamoDB treats them as number type attributes + * for mathematical operations.

+ * @public + */ + interface NMember { + S?: never; + N: string; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Binary. For example:

+ *

+ * "B": "dGhpcyB0ZXh0IGlzIGJhc2U2NC1lbmNvZGVk" + *

+ * @public + */ + interface BMember { + S?: never; + N?: never; + B: Uint8Array; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type String Set. For example:

+ *

+ * "SS": ["Giraffe", "Hippo" ,"Zebra"] + *

+ * @public + */ + interface SSMember { + S?: never; + N?: never; + B?: never; + SS: string[]; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Number Set. For example:

+ *

+ * "NS": ["42.2", "-19", "7.5", "3.14"] + *

+ *

Numbers are sent across the network to DynamoDB as strings, to maximize compatibility + * across languages and libraries. However, DynamoDB treats them as number type attributes + * for mathematical operations.

+ * @public + */ + interface NSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS: string[]; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Binary Set. For example:

+ *

+ * "BS": ["U3Vubnk=", "UmFpbnk=", "U25vd3k="] + *

+ * @public + */ + interface BSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS: Uint8Array[]; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Map. For example:

+ *

+ * "M": \{"Name": \{"S": "Joe"\}, "Age": \{"N": "35"\}\} + *

+ * @public + */ + interface MMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M: Record; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type List. For example:

+ *

+ * "L": [ \{"S": "Cookies"\} , \{"S": "Coffee"\}, \{"N": "3.14159"\}] + *

+ * @public + */ + interface LMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L: AttributeValue[]; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Null. For example:

+ *

+ * "NULL": true + *

+ * @public + */ + interface NULLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL: boolean; + BOOL?: never; + $unknown?: never; + } + /** + *

An attribute of type Boolean. For example:

+ *

+ * "BOOL": true + *

+ * @public + */ + interface BOOLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL: boolean; + $unknown?: never; + } + /** + * @public + */ + interface $UnknownMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown: [string, any]; + } + interface Visitor { + S: (value: string) => T; + N: (value: string) => T; + B: (value: Uint8Array) => T; + SS: (value: string[]) => T; + NS: (value: string[]) => T; + BS: (value: Uint8Array[]) => T; + M: (value: Record) => T; + L: (value: AttributeValue[]) => T; + NULL: (value: boolean) => T; + BOOL: (value: boolean) => T; + _: (name: string, value: any) => T; + } + const visit: (value: AttributeValue, visitor: Visitor) => T; +} +/** + *

For the UpdateItem operation, represents the attributes to be modified, + * the action to perform on each, and the new value for each.

+ * + *

You cannot use UpdateItem to update any primary key attributes. + * Instead, you will need to delete the item, and then use PutItem to + * create a new item with new attributes.

+ *
+ *

Attribute values cannot be null; string and binary type attributes must have lengths + * greater than zero; and set type attributes must not be empty. Requests with empty values + * will be rejected with a ValidationException exception.

+ * @public + */ +export interface AttributeValueUpdate { + /** + *

Represents the data for an attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer Guide. + *

+ * @public + */ + Value?: AttributeValue | undefined; + /** + *

Specifies how to perform the update. Valid values are PUT (default), + * DELETE, and ADD. The behavior depends on whether the + * specified primary key already exists in the table.

+ *

+ * If an item with the specified Key is found in + * the table: + *

+ *
    + *
  • + *

    + * PUT - Adds the specified attribute to the item. If the attribute + * already exists, it is replaced by the new value.

    + *
  • + *
  • + *

    + * DELETE - If no value is specified, the attribute and its value are + * removed from the item. The data type of the specified value must match the + * existing value's data type.

    + *

    If a set of values is specified, then those values are + * subtracted from the old set. For example, if the attribute value was the set + * [a,b,c] and the DELETE action specified + * [a,c], then the final attribute value would be + * [b]. Specifying an empty set is an error.

    + *
  • + *
  • + *

    + * ADD - If the attribute does not already exist, then the attribute + * and its values are added to the item. If the attribute does exist, then the + * behavior of ADD depends on the data type of the attribute:

    + *
      + *
    • + *

      If the existing attribute is a number, and if Value is + * also a number, then the Value is mathematically added to + * the existing attribute. If Value is a negative number, then + * it is subtracted from the existing attribute.

      + * + *

      If you use ADD to increment or decrement a number + * value for an item that doesn't exist before the update, DynamoDB + * uses 0 as the initial value.

      + *

      In addition, if you use ADD to update an existing + * item, and intend to increment or decrement an attribute value which + * does not yet exist, DynamoDB uses 0 as the initial + * value. For example, suppose that the item you want to update does + * not yet have an attribute named itemcount, but + * you decide to ADD the number 3 to this + * attribute anyway, even though it currently does not exist. DynamoDB + * will create the itemcount attribute, set its + * initial value to 0, and finally add 3 to + * it. The result will be a new itemcount + * attribute in the item, with a value of 3.

      + *
      + *
    • + *
    • + *

      If the existing data type is a set, and if the Value is + * also a set, then the Value is added to the existing set. + * (This is a set operation, not mathematical + * addition.) For example, if the attribute value was the set + * [1,2], and the ADD action specified + * [3], then the final attribute value would be + * [1,2,3]. An error occurs if an Add action is specified + * for a set attribute and the attribute type specified does not match the + * existing set type.

      + *

      Both sets must have the same primitive data type. For example, if the + * existing data type is a set of strings, the Value must also + * be a set of strings. The same holds true for number sets and binary + * sets.

      + *
    • + *
    + *

    This action is only valid for an existing attribute whose data type is number + * or is a set. Do not use ADD for any other data types.

    + *
  • + *
+ *

+ * If no item with the specified Key is + * found: + *

+ *
    + *
  • + *

    + * PUT - DynamoDB creates a new item with the specified primary key, + * and then adds the attribute.

    + *
  • + *
  • + *

    + * DELETE - Nothing happens; there is no attribute to delete.

    + *
  • + *
  • + *

    + * ADD - DynamoDB creates a new item with the supplied primary key and + * number (or set) for the attribute value. The only data types allowed are number, + * number set, string set or binary set.

    + *
  • + *
+ * @public + */ + Action?: AttributeAction | undefined; +} +/** + *

An error associated with a statement in a PartiQL batch that was run.

+ * @public + */ +export interface BatchStatementError { + /** + *

The error code associated with the failed PartiQL batch statement.

+ * @public + */ + Code?: BatchStatementErrorCodeEnum | undefined; + /** + *

The error message associated with the PartiQL batch response.

+ * @public + */ + Message?: string | undefined; + /** + *

The item which caused the condition check to fail. This will be set if + * ReturnValuesOnConditionCheckFailure is specified as ALL_OLD.

+ * @public + */ + Item?: Record | undefined; +} +/** + *

A PartiQL batch statement request.

+ * @public + */ +export interface BatchStatementRequest { + /** + *

A valid PartiQL statement.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameters associated with a PartiQL statement in the batch request.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

The read consistency of the PartiQL batch request.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

An optional parameter that returns the item attributes for a PartiQL batch request + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

An ordered list of errors for each item in the request which caused the transaction to + * get cancelled. The values of the list are ordered according to the ordering of the + * TransactWriteItems request parameter. If no error occurred for the + * associated item an error with a Null code and Null message will be present.

+ * @public + */ +export interface CancellationReason { + /** + *

Item in the request which caused the transaction to get cancelled.

+ * @public + */ + Item?: Record | undefined; + /** + *

Status code for the result of the cancelled transaction.

+ * @public + */ + Code?: string | undefined; + /** + *

Cancellation reason message description.

+ * @public + */ + Message?: string | undefined; +} +/** + *

Represents the selection criteria for a Query or Scan + * operation:

+ *
    + *
  • + *

    For a Query operation, Condition is used for + * specifying the KeyConditions to use when querying a table or an + * index. For KeyConditions, only the following comparison operators + * are supported:

    + *

    + * EQ | LE | LT | GE | GT | BEGINS_WITH | BETWEEN + *

    + *

    + * Condition is also used in a QueryFilter, which + * evaluates the query results and returns only the desired values.

    + *
  • + *
  • + *

    For a Scan operation, Condition is used in a + * ScanFilter, which evaluates the scan results and returns only + * the desired values.

    + *
  • + *
+ * @public + */ +export interface Condition { + /** + *

One or more values to evaluate against the supplied attribute. The number of values in + * the list depends on the ComparisonOperator being used.

+ *

For type Number, value comparisons are numeric.

+ *

String value comparisons for greater than, equals, or less than are based on ASCII + * character code values. For example, a is greater than A, and + * a is greater than B. For a list of code values, see http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters.

+ *

For Binary, DynamoDB treats each byte of the binary data as unsigned when it + * compares binary values.

+ * @public + */ + AttributeValueList?: AttributeValue[] | undefined; + /** + *

A comparator for evaluating attributes. For example, equals, greater than, less than, + * etc.

+ *

The following comparison operators are available:

+ *

+ * EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | + * BEGINS_WITH | IN | BETWEEN + *

+ *

The following are descriptions of each comparison operator.

+ *
    + *
  • + *

    + * EQ : Equal. EQ is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, Binary, String Set, Number Set, or Binary Set. + * If an item contains an AttributeValue element of a different type + * than the one provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NE : Not equal. NE is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, Binary, String Set, Number Set, or Binary Set. If an + * item contains an AttributeValue of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LE : Less than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LT : Less than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, or Binary (not a set type). If an item contains an + * AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GE : Greater than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GT : Greater than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NOT_NULL : The attribute exists. NOT_NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the existence of an attribute, not its data type. + * If the data type of attribute "a" is null, and you evaluate it + * using NOT_NULL, the result is a Boolean true. This + * result is because the attribute "a" exists; its data type is + * not relevant to the NOT_NULL comparison operator.

    + *
    + *
  • + *
  • + *

    + * NULL : The attribute does not exist. NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the nonexistence of an attribute, not its data + * type. If the data type of attribute "a" is null, and you + * evaluate it using NULL, the result is a Boolean + * false. This is because the attribute "a" + * exists; its data type is not relevant to the NULL comparison + * operator.

    + *
    + *
  • + *
  • + *

    + * CONTAINS : Checks for a subsequence, or value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is of type String, then the operator checks for a + * substring match. If the target attribute of the comparison is of type Binary, + * then the operator looks for a subsequence of the target that matches the input. + * If the target attribute of the comparison is a set ("SS", + * "NS", or "BS"), then the operator evaluates to + * true if it finds an exact match with any member of the set.

    + *

    CONTAINS is supported for lists: When evaluating "a CONTAINS b", + * "a" can be a list; however, "b" cannot be a set, a + * map, or a list.

    + *
  • + *
  • + *

    + * NOT_CONTAINS : Checks for absence of a subsequence, or absence of a + * value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is a String, then the operator checks for the + * absence of a substring match. If the target attribute of the comparison is + * Binary, then the operator checks for the absence of a subsequence of the target + * that matches the input. If the target attribute of the comparison is a set + * ("SS", "NS", or "BS"), then the + * operator evaluates to true if it does not find an exact + * match with any member of the set.

    + *

    NOT_CONTAINS is supported for lists: When evaluating "a NOT CONTAINS + * b", "a" can be a list; however, "b" cannot + * be a set, a map, or a list.

    + *
  • + *
  • + *

    + * BEGINS_WITH : Checks for a prefix.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String or Binary (not a Number or a set type). The target attribute of + * the comparison must be of type String or Binary (not a Number or a set + * type).

    + *

    + *
  • + *
  • + *

    + * IN : Checks for matching elements in a list.

    + *

    + * AttributeValueList can contain one or more + * AttributeValue elements of type String, Number, or Binary. + * These attributes are compared against an existing attribute of an item. If any + * elements of the input are equal to the item attribute, the expression evaluates + * to true.

    + *
  • + *
  • + *

    + * BETWEEN : Greater than or equal to the first value, and less than + * or equal to the second value.

    + *

    + * AttributeValueList must contain two AttributeValue + * elements of the same type, either String, Number, or Binary (not a set type). A + * target attribute matches if the target value is greater than, or equal to, the + * first element and less than, or equal to, the second element. If an item + * contains an AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not compare to \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\} + *

    + *
  • + *
+ *

For usage examples of AttributeValueList and + * ComparisonOperator, see Legacy + * Conditional Parameters in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ComparisonOperator: ComparisonOperator | undefined; +} +/** + *

A condition specified in the operation failed to be evaluated.

+ * @public + */ +export declare class ConditionalCheckFailedException extends __BaseException { + readonly name: "ConditionalCheckFailedException"; + readonly $fault: "client"; + /** + *

Item which caused the ConditionalCheckFailedException.

+ * @public + */ + Item?: Record | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents a request to perform a DeleteItem operation on an item.

+ * @public + */ +export interface DeleteRequest { + /** + *

A map of attribute name to attribute values, representing the primary key of the item + * to delete. All of the table's primary key attributes must be specified, and their data + * types must match those of the table's key schema.

+ * @public + */ + Key: Record | undefined; +} +/** + * @public + */ +export interface ExecuteStatementInput { + /** + *

The PartiQL statement representing the operation to run.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameters for the PartiQL statement, if any.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

The consistency of a read operation. If set to true, then a strongly + * consistent read is used; otherwise, an eventually consistent read is used.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

Set this value to get remaining results, if NextToken was returned in the + * statement response.

+ * @public + */ + NextToken?: string | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, along + * with a key in LastEvaluatedKey to apply in a subsequent operation so you + * can pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation.

+ * @public + */ + Limit?: number | undefined; + /** + *

An optional parameter that returns the item attributes for an + * ExecuteStatement operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Specifies an item and related attribute values to retrieve in a + * TransactGetItem object.

+ * @public + */ +export interface Get { + /** + *

A map of attribute names to AttributeValue objects that specifies the + * primary key of the item to retrieve.

+ * @public + */ + Key: Record | undefined; + /** + *

The name of the table from which to retrieve the specified item. You can also provide + * the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A string that identifies one or more attributes of the specified item to retrieve from + * the table. The attributes in the expression must be separated by commas. If no attribute + * names are specified, then all attributes of the specified item are returned. If any of + * the requested attributes are not found, they do not appear in the result.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in the ProjectionExpression + * parameter.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Represents the input of a GetItem operation.

+ * @public + */ +export interface GetItemInput { + /** + *

The name of the table containing the requested item. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute names to AttributeValue objects, representing the + * primary key of the item to retrieve.

+ *

For the primary key, you must provide all of the attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

Determines the read consistency model: If set to true, then the operation + * uses strongly consistent reads; otherwise, the operation uses eventually consistent + * reads.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes are returned. If any of the + * requested attributes are not found, they do not appear in the result.

+ *

For more information, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Represents the output of a GetItem operation.

+ * @public + */ +export interface GetItemOutput { + /** + *

A map of attribute names to AttributeValue objects, as specified by + * ProjectionExpression.

+ * @public + */ + Item?: Record | undefined; + /** + *

The capacity units consumed by the GetItem operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Information about item collections, if any, that were affected by the operation. + * ItemCollectionMetrics is only returned if the request asked for it. If + * the table does not have any local secondary indexes, this information is not returned in + * the response.

+ * @public + */ +export interface ItemCollectionMetrics { + /** + *

The partition key value of the item collection. This value is the same as the + * partition key value of the item.

+ * @public + */ + ItemCollectionKey?: Record | undefined; + /** + *

An estimate of item collection size, in gigabytes. This value is a two-element array + * containing a lower bound and an upper bound for the estimate. The estimate includes the + * size of all the items in the table, plus the size of all attributes projected into all + * of the local secondary indexes on that table. Use this estimate to measure whether a + * local secondary index is approaching its size limit.

+ *

The estimate is subject to change over time; therefore, do not rely on the precision + * or accuracy of the estimate.

+ * @public + */ + SizeEstimateRangeGB?: number[] | undefined; +} +/** + *

Details for the requested item.

+ * @public + */ +export interface ItemResponse { + /** + *

Map of attribute data consisting of the data type and attribute value.

+ * @public + */ + Item?: Record | undefined; +} +/** + *

Represents a PartiQL statement that uses parameters.

+ * @public + */ +export interface ParameterizedStatement { + /** + *

A PartiQL statement that uses parameters.

+ * @public + */ + Statement: string | undefined; + /** + *

The parameter values.

+ * @public + */ + Parameters?: AttributeValue[] | undefined; + /** + *

An optional parameter that returns the item attributes for a PartiQL + * ParameterizedStatement operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a PutItem operation on an item.

+ * @public + */ +export interface PutRequest { + /** + *

A map of attribute name to attribute values, representing the primary key of an item + * to be processed by PutItem. All of the table's primary key attributes must + * be specified, and their data types must match those of the table's key schema. If any + * attributes are present in the item that are part of an index key schema for the table, + * their types must match the index key schema.

+ * @public + */ + Item: Record | undefined; +} +/** + *

Represents a set of primary keys and, for each key, the attributes to retrieve from + * the table.

+ *

For each primary key, you must provide all of the key attributes. + * For example, with a simple primary key, you only need to provide the partition key. For + * a composite primary key, you must provide both the partition key + * and the sort key.

+ * @public + */ +export interface KeysAndAttributes { + /** + *

The primary key attribute values that define the items and the attributes associated + * with the items.

+ * @public + */ + Keys: Record[] | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see Legacy + * Conditional Parameters in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The consistency of a read operation. If set to true, then a strongly + * consistent read is used; otherwise, an eventually consistent read is used.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the ProjectionExpression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; +} +/** + *

Specifies an item to be retrieved as part of the transaction.

+ * @public + */ +export interface TransactGetItem { + /** + *

Contains the primary key that identifies the item to get, together with the name of + * the table that contains the item, and optionally the specific attributes of the item to + * retrieve.

+ * @public + */ + Get: Get | undefined; +} +/** + * @public + */ +export interface BatchExecuteStatementInput { + /** + *

The list of PartiQL statements representing the batch to run.

+ * @public + */ + Statements: BatchStatementRequest[] | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface ExecuteTransactionInput { + /** + *

The list of PartiQL statements representing the transaction to run.

+ * @public + */ + TransactStatements: ParameterizedStatement[] | undefined; + /** + *

Set this value to get remaining results, if NextToken was returned in the + * statement response.

+ * @public + */ + ClientRequestToken?: string | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response. For more information, see TransactGetItems and TransactWriteItems.

+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface ExecuteTransactionOutput { + /** + *

The response to a PartiQL transaction.

+ * @public + */ + Responses?: ItemResponse[] | undefined; + /** + *

The capacity units consumed by the entire operation. The values of the list are + * ordered according to the ordering of the statements.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + * @public + */ +export interface TransactGetItemsOutput { + /** + *

If the ReturnConsumedCapacity value was TOTAL, this + * is an array of ConsumedCapacity objects, one for each table addressed by + * TransactGetItem objects in the TransactItems + * parameter. These ConsumedCapacity objects report the read-capacity units + * consumed by the TransactGetItems call in that table.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; + /** + *

An ordered array of up to 100 ItemResponse objects, each of which + * corresponds to the TransactGetItem object in the same position in the + * TransactItems array. Each ItemResponse object + * contains a Map of the name-value pairs that are the projected attributes of the + * requested item.

+ *

If a requested item could not be retrieved, the corresponding + * ItemResponse object is Null, or if the requested item has no projected + * attributes, the corresponding ItemResponse object is an empty Map.

+ * @public + */ + Responses?: ItemResponse[] | undefined; +} +/** + *

The entire transaction request was canceled.

+ *

DynamoDB cancels a TransactWriteItems request under the following + * circumstances:

+ *
    + *
  • + *

    A condition in one of the condition expressions is not met.

    + *
  • + *
  • + *

    A table in the TransactWriteItems request is in a different + * account or region.

    + *
  • + *
  • + *

    More than one action in the TransactWriteItems operation + * targets the same item.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    An item size becomes too large (larger than 400 KB), or a local secondary + * index (LSI) becomes too large, or a similar validation error occurs because of + * changes made by the transaction.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
  • + *

    + * There is an ongoing TransactWriteItems operation that conflicts with a concurrent + * TransactWriteItems request. In this case the TransactWriteItems operation + * fails with a TransactionCanceledException. + *

    + *
  • + *
+ *

DynamoDB cancels a TransactGetItems request under the + * following circumstances:

+ *
    + *
  • + *

    There is an ongoing TransactGetItems operation that conflicts + * with a concurrent PutItem, UpdateItem, + * DeleteItem or TransactWriteItems request. In this + * case the TransactGetItems operation fails with a + * TransactionCanceledException.

    + *
  • + *
  • + *

    A table in the TransactGetItems request is in a different + * account or region.

    + *
  • + *
  • + *

    There is insufficient provisioned capacity for the transaction to be + * completed.

    + *
  • + *
  • + *

    There is a user error, such as an invalid data format.

    + *
  • + *
+ * + *

If using Java, DynamoDB lists the cancellation reasons on the + * CancellationReasons property. This property is not set for other + * languages. Transaction cancellation reasons are ordered in the order of requested + * items, if an item has no error it will have None code and + * Null message.

+ *
+ *

Cancellation reason codes and possible error messages:

+ *
    + *
  • + *

    No Errors:

    + *
      + *
    • + *

      Code: None + *

      + *
    • + *
    • + *

      Message: null + *

      + *
    • + *
    + *
  • + *
  • + *

    Conditional Check Failed:

    + *
      + *
    • + *

      Code: ConditionalCheckFailed + *

      + *
    • + *
    • + *

      Message: The conditional request failed.

      + *
    • + *
    + *
  • + *
  • + *

    Item Collection Size Limit Exceeded:

    + *
      + *
    • + *

      Code: ItemCollectionSizeLimitExceeded + *

      + *
    • + *
    • + *

      Message: Collection size exceeded.

      + *
    • + *
    + *
  • + *
  • + *

    Transaction Conflict:

    + *
      + *
    • + *

      Code: TransactionConflict + *

      + *
    • + *
    • + *

      Message: Transaction is ongoing for the item.

      + *
    • + *
    + *
  • + *
  • + *

    Provisioned Throughput Exceeded:

    + *
      + *
    • + *

      Code: ProvisionedThroughputExceeded + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        The level of configured provisioned throughput for the + * table was exceeded. Consider increasing your provisioning level + * with the UpdateTable API.

        + * + *

        This Message is received when provisioned throughput is + * exceeded is on a provisioned DynamoDB + * table.

        + *
        + *
      • + *
      • + *

        The level of configured provisioned throughput for one or + * more global secondary indexes of the table was exceeded. + * Consider increasing your provisioning level for the + * under-provisioned global secondary indexes with the UpdateTable + * API.

        + * + *

        This message is returned when provisioned throughput is + * exceeded is on a provisioned GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Throttling Error:

    + *
      + *
    • + *

      Code: ThrottlingError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        Throughput exceeds the current capacity of your table or + * index. DynamoDB is automatically scaling your table or + * index so please try again shortly. If exceptions persist, check + * if you have a hot key: + * https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/bp-partition-key-design.html.

        + * + *

        This message is returned when writes get throttled on an + * On-Demand table as DynamoDB is automatically + * scaling the table.

        + *
        + *
      • + *
      • + *

        Throughput exceeds the current capacity for one or more + * global secondary indexes. DynamoDB is automatically + * scaling your index so please try again shortly.

        + * + *

        This message is returned when writes get throttled on + * an On-Demand GSI as DynamoDB is automatically + * scaling the GSI.

        + *
        + *
      • + *
      + *
    • + *
    + *
  • + *
  • + *

    Validation Error:

    + *
      + *
    • + *

      Code: ValidationError + *

      + *
    • + *
    • + *

      Messages:

      + *
        + *
      • + *

        One or more parameter values were invalid.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key beyond allowed size limits.

        + *
      • + *
      • + *

        The update expression attempted to update the secondary + * index key to unsupported type.

        + *
      • + *
      • + *

        An operand in the update expression has an incorrect data + * type.

        + *
      • + *
      • + *

        Item size to update has exceeded the maximum allowed + * size.

        + *
      • + *
      • + *

        Number overflow. Attempting to store a number with + * magnitude larger than supported range.

        + *
      • + *
      • + *

        Type mismatch for attribute to update.

        + *
      • + *
      • + *

        Nesting Levels have exceeded supported limits.

        + *
      • + *
      • + *

        The document path provided in the update expression is + * invalid for update.

        + *
      • + *
      • + *

        The provided expression refers to an attribute that does + * not exist in the item.

        + *
      • + *
      + *
    • + *
    + *
  • + *
+ * @public + */ +export declare class TransactionCanceledException extends __BaseException { + readonly name: "TransactionCanceledException"; + readonly $fault: "client"; + Message?: string | undefined; + /** + *

A list of cancellation reasons.

+ * @public + */ + CancellationReasons?: CancellationReason[] | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Represents the input of a BatchGetItem operation.

+ * @public + */ +export interface BatchGetItemInput { + /** + *

A map of one or more table names or table ARNs and, for each table, a map that + * describes one or more items to retrieve from that table. Each table name or ARN can be + * used only once per BatchGetItem request.

+ *

Each element in the map of items to retrieve consists of the following:

+ *
    + *
  • + *

    + * ConsistentRead - If true, a strongly consistent read + * is used; if false (the default), an eventually consistent read is + * used.

    + *
  • + *
  • + *

    + * ExpressionAttributeNames - One or more substitution tokens for + * attribute names in the ProjectionExpression parameter. The + * following are some use cases for using + * ExpressionAttributeNames:

    + *
      + *
    • + *

      To access an attribute whose name conflicts with a DynamoDB reserved + * word.

      + *
    • + *
    • + *

      To create a placeholder for repeating occurrences of an attribute name + * in an expression.

      + *
    • + *
    • + *

      To prevent special characters in an attribute name from being + * misinterpreted in an expression.

      + *
    • + *
    + *

    Use the # character in an expression to + * dereference an attribute name. For example, consider the following attribute + * name:

    + *
      + *
    • + *

      + * Percentile + *

      + *
    • + *
    + *

    The name of this attribute conflicts with a reserved word, so it cannot be + * used directly in an expression. (For the complete list of reserved words, see + * Reserved + * Words in the Amazon DynamoDB Developer Guide). + * To work around this, you could specify the following for + * ExpressionAttributeNames:

    + *
      + *
    • + *

      + * \{"#P":"Percentile"\} + *

      + *
    • + *
    + *

    You could then use this substitution in an expression, as in this + * example:

    + *
      + *
    • + *

      + * #P = :val + *

      + *
    • + *
    + * + *

    Tokens that begin with the : character + * are expression attribute values, which are placeholders + * for the actual value at runtime.

    + *
    + *

    For more information about expression attribute names, see Accessing Item Attributes in the Amazon DynamoDB + * Developer Guide.

    + *
  • + *
  • + *

    + * Keys - An array of primary key attribute values that define + * specific items in the table. For each primary key, you must provide + * all of the key attributes. For example, with a simple + * primary key, you only need to provide the partition key value. For a composite + * key, you must provide both the partition key value and the + * sort key value.

    + *
  • + *
  • + *

    + * ProjectionExpression - A string that identifies one or more + * attributes to retrieve from the table. These attributes can include scalars, + * sets, or elements of a JSON document. The attributes in the expression must be + * separated by commas.

    + *

    If no attribute names are specified, then all attributes are returned. If any + * of the requested attributes are not found, they do not appear in the + * result.

    + *

    For more information, see Accessing Item Attributes in the Amazon DynamoDB + * Developer Guide.

    + *
  • + *
  • + *

    + * AttributesToGet - This is a legacy parameter. Use + * ProjectionExpression instead. For more information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

    + *
  • + *
+ * @public + */ + RequestItems: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + *

Represents a condition to be compared with an attribute value. This condition can be + * used with DeleteItem, PutItem, or UpdateItem + * operations; if the comparison evaluates to true, the operation succeeds; if not, the + * operation fails. You can use ExpectedAttributeValue in one of two different + * ways:

+ *
    + *
  • + *

    Use AttributeValueList to specify one or more values to compare + * against an attribute. Use ComparisonOperator to specify how you + * want to perform the comparison. If the comparison evaluates to true, then the + * conditional operation succeeds.

    + *
  • + *
  • + *

    Use Value to specify a value that DynamoDB will compare against + * an attribute. If the values match, then ExpectedAttributeValue + * evaluates to true and the conditional operation succeeds. Optionally, you can + * also set Exists to false, indicating that you do + * not expect to find the attribute value in the table. In this + * case, the conditional operation succeeds only if the comparison evaluates to + * false.

    + *
  • + *
+ *

+ * Value and Exists are incompatible with + * AttributeValueList and ComparisonOperator. Note that if + * you use both sets of parameters at once, DynamoDB will return a + * ValidationException exception.

+ * @public + */ +export interface ExpectedAttributeValue { + /** + *

Represents the data for the expected attribute.

+ *

Each attribute value is described as a name-value pair. The name is the data type, and + * the value is the data itself.

+ *

For more information, see Data Types in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Value?: AttributeValue | undefined; + /** + *

Causes DynamoDB to evaluate the value before attempting a conditional + * operation:

+ *
    + *
  • + *

    If Exists is true, DynamoDB will check to + * see if that attribute value already exists in the table. If it is found, then + * the operation succeeds. If it is not found, the operation fails with a + * ConditionCheckFailedException.

    + *
  • + *
  • + *

    If Exists is false, DynamoDB assumes that + * the attribute value does not exist in the table. If in fact the value does not + * exist, then the assumption is valid and the operation succeeds. If the value is + * found, despite the assumption that it does not exist, the operation fails with a + * ConditionCheckFailedException.

    + *
  • + *
+ *

The default setting for Exists is true. If you supply a + * Value all by itself, DynamoDB assumes the attribute exists: + * You don't have to set Exists to true, because it is + * implied.

+ *

DynamoDB returns a ValidationException if:

+ *
    + *
  • + *

    + * Exists is true but there is no Value to + * check. (You expect a value to exist, but don't specify what that value + * is.)

    + *
  • + *
  • + *

    + * Exists is false but you also provide a + * Value. (You cannot expect an attribute to have a value, while + * also expecting it not to exist.)

    + *
  • + *
+ * @public + */ + Exists?: boolean | undefined; + /** + *

A comparator for evaluating attributes in the AttributeValueList. For + * example, equals, greater than, less than, etc.

+ *

The following comparison operators are available:

+ *

+ * EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | + * BEGINS_WITH | IN | BETWEEN + *

+ *

The following are descriptions of each comparison operator.

+ *
    + *
  • + *

    + * EQ : Equal. EQ is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, Binary, String Set, Number Set, or Binary Set. + * If an item contains an AttributeValue element of a different type + * than the one provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NE : Not equal. NE is supported for all data types, + * including lists and maps.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, Binary, String Set, Number Set, or Binary Set. If an + * item contains an AttributeValue of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not equal \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LE : Less than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * LT : Less than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String, Number, or Binary (not a set type). If an item contains an + * AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not equal \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GE : Greater than or equal.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * GT : Greater than.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If an item contains + * an AttributeValue element of a different type than the one provided + * in the request, the value does not match. For example, \{"S":"6"\} + * does not equal \{"N":"6"\}. Also, \{"N":"6"\} does not + * compare to \{"NS":["6", "2", "1"]\}.

    + *

    + *
  • + *
  • + *

    + * NOT_NULL : The attribute exists. NOT_NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the existence of an attribute, not its data type. + * If the data type of attribute "a" is null, and you evaluate it + * using NOT_NULL, the result is a Boolean true. This + * result is because the attribute "a" exists; its data type is + * not relevant to the NOT_NULL comparison operator.

    + *
    + *
  • + *
  • + *

    + * NULL : The attribute does not exist. NULL is supported + * for all data types, including lists and maps.

    + * + *

    This operator tests for the nonexistence of an attribute, not its data + * type. If the data type of attribute "a" is null, and you + * evaluate it using NULL, the result is a Boolean + * false. This is because the attribute "a" + * exists; its data type is not relevant to the NULL comparison + * operator.

    + *
    + *
  • + *
  • + *

    + * CONTAINS : Checks for a subsequence, or value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is of type String, then the operator checks for a + * substring match. If the target attribute of the comparison is of type Binary, + * then the operator looks for a subsequence of the target that matches the input. + * If the target attribute of the comparison is a set ("SS", + * "NS", or "BS"), then the operator evaluates to + * true if it finds an exact match with any member of the set.

    + *

    CONTAINS is supported for lists: When evaluating "a CONTAINS b", + * "a" can be a list; however, "b" cannot be a set, a + * map, or a list.

    + *
  • + *
  • + *

    + * NOT_CONTAINS : Checks for absence of a subsequence, or absence of a + * value in a set.

    + *

    + * AttributeValueList can contain only one AttributeValue + * element of type String, Number, or Binary (not a set type). If the target + * attribute of the comparison is a String, then the operator checks for the + * absence of a substring match. If the target attribute of the comparison is + * Binary, then the operator checks for the absence of a subsequence of the target + * that matches the input. If the target attribute of the comparison is a set + * ("SS", "NS", or "BS"), then the + * operator evaluates to true if it does not find an exact + * match with any member of the set.

    + *

    NOT_CONTAINS is supported for lists: When evaluating "a NOT CONTAINS + * b", "a" can be a list; however, "b" cannot + * be a set, a map, or a list.

    + *
  • + *
  • + *

    + * BEGINS_WITH : Checks for a prefix.

    + *

    + * AttributeValueList can contain only one AttributeValue + * of type String or Binary (not a Number or a set type). The target attribute of + * the comparison must be of type String or Binary (not a Number or a set + * type).

    + *

    + *
  • + *
  • + *

    + * IN : Checks for matching elements in a list.

    + *

    + * AttributeValueList can contain one or more + * AttributeValue elements of type String, Number, or Binary. + * These attributes are compared against an existing attribute of an item. If any + * elements of the input are equal to the item attribute, the expression evaluates + * to true.

    + *
  • + *
  • + *

    + * BETWEEN : Greater than or equal to the first value, and less than + * or equal to the second value.

    + *

    + * AttributeValueList must contain two AttributeValue + * elements of the same type, either String, Number, or Binary (not a set type). A + * target attribute matches if the target value is greater than, or equal to, the + * first element and less than, or equal to, the second element. If an item + * contains an AttributeValue element of a different type than the one + * provided in the request, the value does not match. For example, + * \{"S":"6"\} does not compare to \{"N":"6"\}. Also, + * \{"N":"6"\} does not compare to \{"NS":["6", "2", + * "1"]\} + *

    + *
  • + *
+ * @public + */ + ComparisonOperator?: ComparisonOperator | undefined; + /** + *

One or more values to evaluate against the supplied attribute. The number of values in + * the list depends on the ComparisonOperator being used.

+ *

For type Number, value comparisons are numeric.

+ *

String value comparisons for greater than, equals, or less than are based on ASCII + * character code values. For example, a is greater than A, and + * a is greater than B. For a list of code values, see http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters.

+ *

For Binary, DynamoDB treats each byte of the binary data as unsigned when it + * compares binary values.

+ *

For information on specifying data types in JSON, see JSON Data Format + * in the Amazon DynamoDB Developer Guide.

+ * @public + */ + AttributeValueList?: AttributeValue[] | undefined; +} +/** + * @public + */ +export interface TransactGetItemsInput { + /** + *

An ordered array of up to 100 TransactGetItem objects, each of which + * contains a Get structure.

+ * @public + */ + TransactItems: TransactGetItem[] | undefined; + /** + *

A value of TOTAL causes consumed capacity information to be returned, and + * a value of NONE prevents that information from being returned. No other + * value is valid.

+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +/** + * @public + */ +export interface TransactWriteItemsOutput { + /** + *

The capacity units consumed by the entire TransactWriteItems operation. + * The values of the list are ordered according to the ordering of the + * TransactItems request parameter.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; + /** + *

A list of tables that were processed by TransactWriteItems and, for each + * table, information about any item collections that were affected by individual + * UpdateItem, PutItem, or DeleteItem + * operations.

+ * @public + */ + ItemCollectionMetrics?: Record | undefined; +} +/** + *

Represents a request to perform a check that an item exists or to check the condition + * of specific attributes of the item.

+ * @public + */ +export interface ConditionCheck { + /** + *

The primary key of the item to be checked. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

Name of the table for the check item request. You can also provide the Amazon Resource Name (ARN) of + * the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to succeed. For + * more information, see Condition expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. For more + * information, see Expression attribute names in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression. For more information, see + * Condition expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * ConditionCheck condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a DeleteItem operation.

+ * @public + */ +export interface Delete { + /** + *

The primary key of the item to be deleted. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

Name of the table in which the item to be deleted resides. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional delete to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Delete condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform a PutItem operation.

+ * @public + */ +export interface Put { + /** + *

A map of attribute name to attribute values, representing the primary key of the item + * to be written by PutItem. All of the table's primary key attributes must be + * specified, and their data types must match those of the table's key schema. If any + * attributes are present in the item that are part of an index key schema for the table, + * their types must match the index key schema.

+ * @public + */ + Item: Record | undefined; + /** + *

Name of the table in which to write the item. You can also provide the Amazon Resource Name (ARN) of + * the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Put condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents a request to perform an UpdateItem operation.

+ * @public + */ +export interface Update { + /** + *

The primary key of the item to be updated. Each element consists of an attribute name + * and a value for that attribute.

+ * @public + */ + Key: Record | undefined; + /** + *

An expression that defines one or more attributes to be updated, the action to be + * performed on them, and new value(s) for them.

+ * @public + */ + UpdateExpression: string | undefined; + /** + *

Name of the table for the UpdateItem request. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

Use ReturnValuesOnConditionCheckFailure to get the item attributes if the + * Update condition fails. For + * ReturnValuesOnConditionCheckFailure, the valid values are: NONE and + * ALL_OLD.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

A PartiQL batch statement response..

+ * @public + */ +export interface BatchStatementResponse { + /** + *

The error associated with a failed PartiQL batch statement.

+ * @public + */ + Error?: BatchStatementError | undefined; + /** + *

The table name associated with a failed PartiQL batch statement.

+ * @public + */ + TableName?: string | undefined; + /** + *

A DynamoDB item associated with a BatchStatementResponse

+ * @public + */ + Item?: Record | undefined; +} +/** + *

Represents the output of a DeleteItem operation.

+ * @public + */ +export interface DeleteItemOutput { + /** + *

A map of attribute names to AttributeValue objects, representing the item + * as it appeared before the DeleteItem operation. This map appears in the + * response only if ReturnValues was specified as ALL_OLD in the + * request.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the DeleteItem operation. The data + * returned includes the total provisioned throughput consumed, along with statistics for + * the table and any indexes involved in the operation. ConsumedCapacity is + * only returned if the ReturnConsumedCapacity parameter was specified. For + * more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * DeleteItem operation. ItemCollectionMetrics is only + * returned if the ReturnItemCollectionMetrics parameter was specified. If the + * table does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + * @public + */ +export interface ExecuteStatementOutput { + /** + *

If a read operation was used, this property will contain the result of the read + * operation; a map of attribute names and their values. For the write operations this + * value will be empty.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

If the response of a read request exceeds the response payload limit DynamoDB will set + * this value in the response. If set, you can use that this value in the subsequent + * request to get the remaining results.

+ * @public + */ + NextToken?: string | undefined; + /** + *

The capacity units consumed by an operation. The data returned includes the total + * provisioned throughput consumed, along with statistics for the table and any indexes + * involved in the operation. ConsumedCapacity is only returned if the request + * asked for it. For more information, see Provisioned capacity mode in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request. If LastEvaluatedKey is empty, then the "last page" of results has + * been processed and there is no more data to be retrieved. If + * LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; +} +/** + *

Represents the output of a PutItem operation.

+ * @public + */ +export interface PutItemOutput { + /** + *

The attribute values as they appeared before the PutItem operation, but + * only if ReturnValues is specified as ALL_OLD in the request. + * Each element consists of an attribute name and an attribute value.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the PutItem operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unity consumption for write operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * PutItem operation. ItemCollectionMetrics is only returned + * if the ReturnItemCollectionMetrics parameter was specified. If the table + * does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + *

Represents the output of a Query operation.

+ * @public + */ +export interface QueryOutput { + /** + *

An array of item attributes that match the query criteria. Each element in this array + * consists of an attribute name and the value for that attribute.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

The number of items in the response.

+ *

If you used a QueryFilter in the request, then Count is the + * number of items returned after the filter was applied, and ScannedCount is + * the number of matching items before the filter was applied.

+ *

If you did not use a filter in the request, then Count and + * ScannedCount are the same.

+ * @public + */ + Count?: number | undefined; + /** + *

The number of items evaluated, before any QueryFilter is applied. A high + * ScannedCount value with few, or no, Count results + * indicates an inefficient Query operation. For more information, see Count and ScannedCount in the Amazon DynamoDB Developer + * Guide.

+ *

If you did not use a filter in the request, then ScannedCount is the same + * as Count.

+ * @public + */ + ScannedCount?: number | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request.

+ *

If LastEvaluatedKey is empty, then the "last page" of results has been + * processed and there is no more data to be retrieved.

+ *

If LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; + /** + *

The capacity units consumed by the Query operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Represents the output of a Scan operation.

+ * @public + */ +export interface ScanOutput { + /** + *

An array of item attributes that match the scan criteria. Each element in this array + * consists of an attribute name and the value for that attribute.

+ * @public + */ + Items?: Record[] | undefined; + /** + *

The number of items in the response.

+ *

If you set ScanFilter in the request, then Count is the + * number of items returned after the filter was applied, and ScannedCount is + * the number of matching items before the filter was applied.

+ *

If you did not use a filter in the request, then Count is the same as + * ScannedCount.

+ * @public + */ + Count?: number | undefined; + /** + *

The number of items evaluated, before any ScanFilter is applied. A high + * ScannedCount value with few, or no, Count results + * indicates an inefficient Scan operation. For more information, see Count and + * ScannedCount in the Amazon DynamoDB Developer + * Guide.

+ *

If you did not use a filter in the request, then ScannedCount is the same + * as Count.

+ * @public + */ + ScannedCount?: number | undefined; + /** + *

The primary key of the item where the operation stopped, inclusive of the previous + * result set. Use this value to start a new operation, excluding this value in the new + * request.

+ *

If LastEvaluatedKey is empty, then the "last page" of results has been + * processed and there is no more data to be retrieved.

+ *

If LastEvaluatedKey is not empty, it does not necessarily mean that there + * is more data in the result set. The only way to know when you have reached the end of + * the result set is when LastEvaluatedKey is empty.

+ * @public + */ + LastEvaluatedKey?: Record | undefined; + /** + *

The capacity units consumed by the Scan operation. The data returned + * includes the total provisioned throughput consumed, along with statistics for the table + * and any indexes involved in the operation. ConsumedCapacity is only + * returned if the ReturnConsumedCapacity parameter was specified. For more + * information, see Capacity unit consumption for read operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; +} +/** + *

Represents the output of an UpdateItem operation.

+ * @public + */ +export interface UpdateItemOutput { + /** + *

A map of attribute values as they appear before or after the UpdateItem + * operation, as determined by the ReturnValues parameter.

+ *

The Attributes map is only present if the update was successful and + * ReturnValues was specified as something other than NONE in + * the request. Each element represents one attribute.

+ * @public + */ + Attributes?: Record | undefined; + /** + *

The capacity units consumed by the UpdateItem operation. The data + * returned includes the total provisioned throughput consumed, along with statistics for + * the table and any indexes involved in the operation. ConsumedCapacity is + * only returned if the ReturnConsumedCapacity parameter was specified. For + * more information, see Capacity unity consumption for write operations in the Amazon + * DynamoDB Developer Guide.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity | undefined; + /** + *

Information about item collections, if any, that were affected by the + * UpdateItem operation. ItemCollectionMetrics is only + * returned if the ReturnItemCollectionMetrics parameter was specified. If the + * table does not have any local secondary indexes, this information is not returned in the + * response.

+ *

Each ItemCollectionMetrics element consists of:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item itself.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, in + * gigabytes. This value is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on that table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +/** + *

Represents an operation to perform - either DeleteItem or + * PutItem. You can only request one of these operations, not both, in a + * single WriteRequest. If you do need to perform both of these operations, + * you need to provide two separate WriteRequest objects.

+ * @public + */ +export interface WriteRequest { + /** + *

A request to perform a PutItem operation.

+ * @public + */ + PutRequest?: PutRequest | undefined; + /** + *

A request to perform a DeleteItem operation.

+ * @public + */ + DeleteRequest?: DeleteRequest | undefined; +} +/** + * @public + */ +export interface BatchExecuteStatementOutput { + /** + *

The response to each PartiQL statement in the batch. The values of the list are + * ordered according to the ordering of the request statements.

+ * @public + */ + Responses?: BatchStatementResponse[] | undefined; + /** + *

The capacity units consumed by the entire operation. The values of the list are + * ordered according to the ordering of the statements.

+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the output of a BatchGetItem operation.

+ * @public + */ +export interface BatchGetItemOutput { + /** + *

A map of table name or table ARN to a list of items. Each object in + * Responses consists of a table name or ARN, along with a map of + * attribute data consisting of the data type and attribute value.

+ * @public + */ + Responses?: Record[]> | undefined; + /** + *

A map of tables and their respective keys that were not processed with the current + * response. The UnprocessedKeys value is in the same form as + * RequestItems, so the value can be provided directly to a subsequent + * BatchGetItem operation. For more information, see + * RequestItems in the Request Parameters section.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * Keys - An array of primary key attribute values that define + * specific items in the table.

    + *
  • + *
  • + *

    + * ProjectionExpression - One or more attributes to be retrieved from + * the table or index. By default, all attributes are returned. If a requested + * attribute is not found, it does not appear in the result.

    + *
  • + *
  • + *

    + * ConsistentRead - The consistency of a read operation. If set to + * true, then a strongly consistent read is used; otherwise, an + * eventually consistent read is used.

    + *
  • + *
+ *

If there are no unprocessed keys remaining, the response contains an empty + * UnprocessedKeys map.

+ * @public + */ + UnprocessedKeys?: Record | undefined; + /** + *

The read capacity units consumed by the entire BatchGetItem + * operation.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * TableName - The table that consumed the provisioned + * throughput.

    + *
  • + *
  • + *

    + * CapacityUnits - The total number of capacity units consumed.

    + *
  • + *
+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the input of a Scan operation.

+ * @public + */ +export interface ScanInput { + /** + *

The name of the table containing the requested items or if you provide + * IndexName, the name of the table to which that index belongs.

+ *

You can also provide the Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of a secondary index to scan. This index can be any local secondary index or + * global secondary index. Note that if you use the IndexName parameter, you + * must also provide TableName.

+ * @public + */ + IndexName?: string | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, and a + * key in LastEvaluatedKey to apply in a subsequent operation, so that you can + * pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation. For more information, see Working with Queries in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Limit?: number | undefined; + /** + *

The attributes to be returned in the result. You can retrieve all item attributes, + * specific item attributes, the count of matching items, or in the case of an index, some + * or all of the attributes projected into the index.

+ *
    + *
  • + *

    + * ALL_ATTRIBUTES - Returns all of the item attributes from the + * specified table or index. If you query a local secondary index, then for each + * matching item in the index, DynamoDB fetches the entire item from the parent + * table. If the index is configured to project all item attributes, then all of + * the data can be obtained from the local secondary index, and no fetching is + * required.

    + *
  • + *
  • + *

    + * ALL_PROJECTED_ATTRIBUTES - Allowed only when querying an index. + * Retrieves all attributes that have been projected into the index. If the index + * is configured to project all attributes, this return value is equivalent to + * specifying ALL_ATTRIBUTES.

    + *
  • + *
  • + *

    + * COUNT - Returns the number of matching items, rather than the + * matching items themselves. Note that this uses the same quantity of read + * capacity units as getting the items, and is subject to the same item size + * calculations.

    + *
  • + *
  • + *

    + * SPECIFIC_ATTRIBUTES - Returns only the attributes listed in + * ProjectionExpression. This return value is equivalent to + * specifying ProjectionExpression without specifying any value for + * Select.

    + *

    If you query or scan a local secondary index and request only attributes that + * are projected into that index, the operation reads only the index and not the + * table. If any of the requested attributes are not projected into the local + * secondary index, DynamoDB fetches each of these attributes from the parent + * table. This extra fetching incurs additional throughput cost and latency.

    + *

    If you query or scan a global secondary index, you can only request attributes + * that are projected into the index. Global secondary index queries cannot fetch + * attributes from the parent table.

    + *
  • + *
+ *

If neither Select nor ProjectionExpression are specified, + * DynamoDB defaults to ALL_ATTRIBUTES when accessing a table, and + * ALL_PROJECTED_ATTRIBUTES when accessing an index. You cannot use both + * Select and ProjectionExpression together in a single + * request, unless the value for Select is SPECIFIC_ATTRIBUTES. + * (This usage is equivalent to specifying ProjectionExpression without any + * value for Select.)

+ * + *

If you use the ProjectionExpression parameter, then the value for + * Select can only be SPECIFIC_ATTRIBUTES. Any other + * value for Select will return an error.

+ *
+ * @public + */ + Select?: Select | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ScanFilter in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ScanFilter?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

The primary key of the first item that this operation will evaluate. Use the value + * that was returned for LastEvaluatedKey in the previous operation.

+ *

The data type for ExclusiveStartKey must be String, Number or Binary. No + * set data types are allowed.

+ *

In a parallel scan, a Scan request that includes + * ExclusiveStartKey must specify the same segment whose previous + * Scan returned the corresponding value of + * LastEvaluatedKey.

+ * @public + */ + ExclusiveStartKey?: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

For a parallel Scan request, TotalSegments represents the + * total number of segments into which the Scan operation will be divided. The + * value of TotalSegments corresponds to the number of application workers + * that will perform the parallel scan. For example, if you want to use four application + * threads to scan a table or an index, specify a TotalSegments value of + * 4.

+ *

The value for TotalSegments must be greater than or equal to 1, and less + * than or equal to 1000000. If you specify a TotalSegments value of 1, the + * Scan operation will be sequential rather than parallel.

+ *

If you specify TotalSegments, you must also specify + * Segment.

+ * @public + */ + TotalSegments?: number | undefined; + /** + *

For a parallel Scan request, Segment identifies an + * individual segment to be scanned by an application worker.

+ *

Segment IDs are zero-based, so the first segment is always 0. For example, if you want + * to use four application threads to scan a table or an index, then the first thread + * specifies a Segment value of 0, the second thread specifies 1, and so + * on.

+ *

The value of LastEvaluatedKey returned from a parallel Scan + * request must be used as ExclusiveStartKey with the same segment ID in a + * subsequent Scan operation.

+ *

The value for Segment must be greater than or equal to 0, and less than + * the value provided for TotalSegments.

+ *

If you provide Segment, you must also provide + * TotalSegments.

+ * @public + */ + Segment?: number | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the specified table + * or index. These attributes can include scalars, sets, or elements of a JSON document. + * The attributes in the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

A string that contains conditions that DynamoDB applies after the Scan + * operation, but before the data is returned to you. Items that do not satisfy the + * FilterExpression criteria are not returned.

+ * + *

A FilterExpression is applied after the items have already been read; + * the process of filtering does not consume any additional read capacity units.

+ *
+ *

For more information, see Filter + * Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + FilterExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

A Boolean value that determines the read consistency model during the scan:

+ *
    + *
  • + *

    If ConsistentRead is false, then the data returned + * from Scan might not contain the results from other recently + * completed write operations (PutItem, UpdateItem, or + * DeleteItem).

    + *
  • + *
  • + *

    If ConsistentRead is true, then all of the write + * operations that completed before the Scan began are guaranteed to + * be contained in the Scan response.

    + *
  • + *
+ *

The default setting for ConsistentRead is false.

+ *

The ConsistentRead parameter is not supported on global secondary + * indexes. If you scan a global secondary index with ConsistentRead set to + * true, you will receive a ValidationException.

+ * @public + */ + ConsistentRead?: boolean | undefined; +} +/** + *

Represents the input of a BatchWriteItem operation.

+ * @public + */ +export interface BatchWriteItemInput { + /** + *

A map of one or more table names or table ARNs and, for each table, a list of + * operations to be performed (DeleteRequest or PutRequest). Each + * element in the map consists of the following:

+ *
    + *
  • + *

    + * DeleteRequest - Perform a DeleteItem operation on the + * specified item. The item to be deleted is identified by a Key + * subelement:

    + *
      + *
    • + *

      + * Key - A map of primary key attribute values that uniquely + * identify the item. Each entry in this map consists of an attribute name + * and an attribute value. For each primary key, you must provide + * all of the key attributes. For example, with a + * simple primary key, you only need to provide a value for the partition + * key. For a composite primary key, you must provide values for + * both the partition key and the sort key.

      + *
    • + *
    + *
  • + *
  • + *

    + * PutRequest - Perform a PutItem operation on the + * specified item. The item to be put is identified by an Item + * subelement:

    + *
      + *
    • + *

      + * Item - A map of attributes and their values. Each entry in + * this map consists of an attribute name and an attribute value. Attribute + * values must not be null; string and binary type attributes must have + * lengths greater than zero; and set type attributes must not be empty. + * Requests that contain empty values are rejected with a + * ValidationException exception.

      + *

      If you specify any attributes that are part of an index key, then the + * data types for those attributes must match those of the schema in the + * table's attribute definition.

      + *
    • + *
    + *
  • + *
+ * @public + */ + RequestItems: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; +} +/** + *

Represents the input of a DeleteItem operation.

+ * @public + */ +export interface DeleteItemInput { + /** + *

The name of the table from which to delete the item. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute names to AttributeValue objects, representing the + * primary key of the item to delete.

+ *

For the primary key, you must provide all of the key attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appeared + * before they were deleted. For DeleteItem, the valid values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - The content of the old item is returned.

    + *
  • + *
+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * + *

The ReturnValues parameter is used by several DynamoDB operations; + * however, DeleteItem does not recognize any values other than + * NONE or ALL_OLD.

+ *
+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

A condition that must be satisfied in order for a conditional DeleteItem + * to succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information about condition expressions, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for a DeleteItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents the input of a PutItem operation.

+ * @public + */ +export interface PutItemInput { + /** + *

The name of the table to contain the item. You can also provide the Amazon Resource Name (ARN) of the + * table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

A map of attribute name/value pairs, one for each attribute. Only the primary key + * attributes are required; you can optionally provide other attribute name-value pairs for + * the item.

+ *

You must provide all of the attributes for the primary key. For example, with a simple + * primary key, you only need to provide a value for the partition key. For a composite + * primary key, you must provide both values for both the partition key and the sort + * key.

+ *

If you specify any attributes that are part of an index key, then the data types for + * those attributes must match those of the schema in the table's attribute + * definition.

+ *

Empty String and Binary attribute values are allowed. Attribute values of type String + * and Binary must have a length greater than zero if the attribute is used as a key + * attribute for a table or index.

+ *

For more information about primary keys, see Primary Key in the Amazon DynamoDB Developer + * Guide.

+ *

Each element in the Item map is an AttributeValue + * object.

+ * @public + */ + Item: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appeared + * before they were updated with the PutItem request. For + * PutItem, the valid values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - If PutItem overwrote an attribute name-value + * pair, then the content of the old item is returned.

    + *
  • + *
+ *

The values returned are strongly consistent.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * + *

The ReturnValues parameter is used by several DynamoDB operations; + * however, PutItem does not recognize any values other than + * NONE or ALL_OLD.

+ *
+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

A condition that must be satisfied in order for a conditional PutItem + * operation to succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information on condition expressions, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for a PutItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

Represents the input of a Query operation.

+ * @public + */ +export interface QueryInput { + /** + *

The name of the table containing the requested items. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The name of an index to query. This index can be any local secondary index or global + * secondary index on the table. Note that if you use the IndexName parameter, + * you must also provide TableName. + *

+ * @public + */ + IndexName?: string | undefined; + /** + *

The attributes to be returned in the result. You can retrieve all item attributes, + * specific item attributes, the count of matching items, or in the case of an index, some + * or all of the attributes projected into the index.

+ *
    + *
  • + *

    + * ALL_ATTRIBUTES - Returns all of the item attributes from the + * specified table or index. If you query a local secondary index, then for each + * matching item in the index, DynamoDB fetches the entire item from the parent + * table. If the index is configured to project all item attributes, then all of + * the data can be obtained from the local secondary index, and no fetching is + * required.

    + *
  • + *
  • + *

    + * ALL_PROJECTED_ATTRIBUTES - Allowed only when querying an index. + * Retrieves all attributes that have been projected into the index. If the index + * is configured to project all attributes, this return value is equivalent to + * specifying ALL_ATTRIBUTES.

    + *
  • + *
  • + *

    + * COUNT - Returns the number of matching items, rather than the + * matching items themselves. Note that this uses the same quantity of read + * capacity units as getting the items, and is subject to the same item size + * calculations.

    + *
  • + *
  • + *

    + * SPECIFIC_ATTRIBUTES - Returns only the attributes listed in + * ProjectionExpression. This return value is equivalent to + * specifying ProjectionExpression without specifying any value for + * Select.

    + *

    If you query or scan a local secondary index and request only attributes that + * are projected into that index, the operation will read only the index and not + * the table. If any of the requested attributes are not projected into the local + * secondary index, DynamoDB fetches each of these attributes from the parent + * table. This extra fetching incurs additional throughput cost and latency.

    + *

    If you query or scan a global secondary index, you can only request attributes + * that are projected into the index. Global secondary index queries cannot fetch + * attributes from the parent table.

    + *
  • + *
+ *

If neither Select nor ProjectionExpression are specified, + * DynamoDB defaults to ALL_ATTRIBUTES when accessing a table, and + * ALL_PROJECTED_ATTRIBUTES when accessing an index. You cannot use both + * Select and ProjectionExpression together in a single + * request, unless the value for Select is SPECIFIC_ATTRIBUTES. + * (This usage is equivalent to specifying ProjectionExpression without any + * value for Select.)

+ * + *

If you use the ProjectionExpression parameter, then the value for + * Select can only be SPECIFIC_ATTRIBUTES. Any other + * value for Select will return an error.

+ *
+ * @public + */ + Select?: Select | undefined; + /** + *

This is a legacy parameter. Use ProjectionExpression instead. For more + * information, see AttributesToGet in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributesToGet?: string[] | undefined; + /** + *

The maximum number of items to evaluate (not necessarily the number of matching + * items). If DynamoDB processes the number of items up to the limit while processing the + * results, it stops the operation and returns the matching values up to that point, and a + * key in LastEvaluatedKey to apply in a subsequent operation, so that you can + * pick up where you left off. Also, if the processed dataset size exceeds 1 MB before + * DynamoDB reaches this limit, it stops the operation and returns the matching values up + * to the limit, and a key in LastEvaluatedKey to apply in a subsequent + * operation to continue the operation. For more information, see Query and Scan in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Limit?: number | undefined; + /** + *

Determines the read consistency model: If set to true, then the operation + * uses strongly consistent reads; otherwise, the operation uses eventually consistent + * reads.

+ *

Strongly consistent reads are not supported on global secondary indexes. If you query + * a global secondary index with ConsistentRead set to true, you + * will receive a ValidationException.

+ * @public + */ + ConsistentRead?: boolean | undefined; + /** + *

This is a legacy parameter. Use KeyConditionExpression instead. For more + * information, see KeyConditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + KeyConditions?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see QueryFilter in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + QueryFilter?: Record | undefined; + /** + *

This is a legacy parameter. Use FilterExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Specifies the order for index traversal: If true (default), the traversal + * is performed in ascending order; if false, the traversal is performed in + * descending order.

+ *

Items with the same partition key value are stored in sorted order by sort key. If the + * sort key data type is Number, the results are stored in numeric order. For type String, + * the results are stored in order of UTF-8 bytes. For type Binary, DynamoDB treats each + * byte of the binary data as unsigned.

+ *

If ScanIndexForward is true, DynamoDB returns the results in + * the order in which they are stored (by sort key value). This is the default behavior. If + * ScanIndexForward is false, DynamoDB reads the results in + * reverse order by sort key value, and then returns the results to the client.

+ * @public + */ + ScanIndexForward?: boolean | undefined; + /** + *

The primary key of the first item that this operation will evaluate. Use the value + * that was returned for LastEvaluatedKey in the previous operation.

+ *

The data type for ExclusiveStartKey must be String, Number, or Binary. No + * set data types are allowed.

+ * @public + */ + ExclusiveStartKey?: Record | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

A string that identifies one or more attributes to retrieve from the table. These + * attributes can include scalars, sets, or elements of a JSON document. The attributes in + * the expression must be separated by commas.

+ *

If no attribute names are specified, then all attributes will be returned. If any of + * the requested attributes are not found, they will not appear in the result.

+ *

For more information, see Accessing Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ProjectionExpression?: string | undefined; + /** + *

A string that contains conditions that DynamoDB applies after the Query + * operation, but before the data is returned to you. Items that do not satisfy the + * FilterExpression criteria are not returned.

+ *

A FilterExpression does not allow key attributes. You cannot define a + * filter expression based on a partition key or a sort key.

+ * + *

A FilterExpression is applied after the items have already been read; + * the process of filtering does not consume any additional read capacity units.

+ *
+ *

For more information, see Filter + * Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + FilterExpression?: string | undefined; + /** + *

The condition that specifies the key values for items to be retrieved by the + * Query action.

+ *

The condition must perform an equality test on a single partition key value.

+ *

The condition can optionally perform one of several comparison tests on a single sort + * key value. This allows Query to retrieve one item with a given partition + * key value and sort key value, or several items that have the same partition key value + * but different sort key values.

+ *

The partition key equality test is required, and must be specified in the following + * format:

+ *

+ * partitionKeyName + * = + * :partitionkeyval + *

+ *

If you also want to provide a condition for the sort key, it must be combined using + * AND with the condition for the sort key. Following is an example, using + * the = comparison operator for the sort key:

+ *

+ * partitionKeyName + * = + * :partitionkeyval + * AND + * sortKeyName + * = + * :sortkeyval + *

+ *

Valid comparisons for the sort key condition are as follows:

+ *
    + *
  • + *

    + * sortKeyName + * = + * :sortkeyval - true if the sort key value is equal to + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * < + * :sortkeyval - true if the sort key value is less than + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * <= + * :sortkeyval - true if the sort key value is less than or equal to + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * > + * :sortkeyval - true if the sort key value is greater than + * :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * >= + * :sortkeyval - true if the sort key value is greater than or equal + * to :sortkeyval.

    + *
  • + *
  • + *

    + * sortKeyName + * BETWEEN + * :sortkeyval1 + * AND + * :sortkeyval2 - true if the sort key value is greater than or equal + * to :sortkeyval1, and less than or equal to + * :sortkeyval2.

    + *
  • + *
  • + *

    + * begins_with ( + * sortKeyName, :sortkeyval + * ) - true if the sort key value begins with a particular operand. + * (You cannot use this function with a sort key that is of type Number.) Note that + * the function name begins_with is case-sensitive.

    + *
  • + *
+ *

Use the ExpressionAttributeValues parameter to replace tokens such as + * :partitionval and :sortval with actual values at + * runtime.

+ *

You can optionally use the ExpressionAttributeNames parameter to replace + * the names of the partition key and sort key with placeholder tokens. This option might + * be necessary if an attribute name conflicts with a DynamoDB reserved word. For example, + * the following KeyConditionExpression parameter causes an error because + * Size is a reserved word:

+ *
    + *
  • + *

    + * Size = :myval + *

    + *
  • + *
+ *

To work around this, define a placeholder (such a #S) to represent the + * attribute name Size. KeyConditionExpression then is as + * follows:

+ *
    + *
  • + *

    + * #S = :myval + *

    + *
  • + *
+ *

For a list of reserved words, see Reserved Words + * in the Amazon DynamoDB Developer Guide.

+ *

For more information on ExpressionAttributeNames and + * ExpressionAttributeValues, see Using + * Placeholders for Attribute Names and Values in the Amazon DynamoDB + * Developer Guide.

+ * @public + */ + KeyConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide). To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information on expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Specifying Conditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; +} +/** + *

Represents the output of a BatchWriteItem operation.

+ * @public + */ +export interface BatchWriteItemOutput { + /** + *

A map of tables and requests against those tables that were not processed. The + * UnprocessedItems value is in the same form as + * RequestItems, so you can provide this value directly to a subsequent + * BatchWriteItem operation. For more information, see + * RequestItems in the Request Parameters section.

+ *

Each UnprocessedItems entry consists of a table name or table ARN + * and, for that table, a list of operations to perform (DeleteRequest or + * PutRequest).

+ *
    + *
  • + *

    + * DeleteRequest - Perform a DeleteItem operation on the + * specified item. The item to be deleted is identified by a Key + * subelement:

    + *
      + *
    • + *

      + * Key - A map of primary key attribute values that uniquely + * identify the item. Each entry in this map consists of an attribute name + * and an attribute value.

      + *
    • + *
    + *
  • + *
  • + *

    + * PutRequest - Perform a PutItem operation on the + * specified item. The item to be put is identified by an Item + * subelement:

    + *
      + *
    • + *

      + * Item - A map of attributes and their values. Each entry in + * this map consists of an attribute name and an attribute value. Attribute + * values must not be null; string and binary type attributes must have + * lengths greater than zero; and set type attributes must not be empty. + * Requests that contain empty values will be rejected with a + * ValidationException exception.

      + *

      If you specify any attributes that are part of an index key, then the + * data types for those attributes must match those of the schema in the + * table's attribute definition.

      + *
    • + *
    + *
  • + *
+ *

If there are no unprocessed items remaining, the response contains an empty + * UnprocessedItems map.

+ * @public + */ + UnprocessedItems?: Record | undefined; + /** + *

A list of tables that were processed by BatchWriteItem and, for each + * table, information about any item collections that were affected by individual + * DeleteItem or PutItem operations.

+ *

Each entry consists of the following subelements:

+ *
    + *
  • + *

    + * ItemCollectionKey - The partition key value of the item collection. + * This is the same as the partition key value of the item.

    + *
  • + *
  • + *

    + * SizeEstimateRangeGB - An estimate of item collection size, + * expressed in GB. This is a two-element array containing a lower bound and an + * upper bound for the estimate. The estimate includes the size of all the items in + * the table, plus the size of all attributes projected into all of the local + * secondary indexes on the table. Use this estimate to measure whether a local + * secondary index is approaching its size limit.

    + *

    The estimate is subject to change over time; therefore, do not rely on the + * precision or accuracy of the estimate.

    + *
  • + *
+ * @public + */ + ItemCollectionMetrics?: Record | undefined; + /** + *

The capacity units consumed by the entire BatchWriteItem + * operation.

+ *

Each element consists of:

+ *
    + *
  • + *

    + * TableName - The table that consumed the provisioned + * throughput.

    + *
  • + *
  • + *

    + * CapacityUnits - The total number of capacity units consumed.

    + *
  • + *
+ * @public + */ + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +/** + *

Represents the input of an UpdateItem operation.

+ * @public + */ +export interface UpdateItemInput { + /** + *

The name of the table containing the item to update. You can also provide the + * Amazon Resource Name (ARN) of the table in this parameter.

+ * @public + */ + TableName: string | undefined; + /** + *

The primary key of the item to be updated. Each element consists of an attribute name + * and a value for that attribute.

+ *

For the primary key, you must provide all of the attributes. For example, with a + * simple primary key, you only need to provide a value for the partition key. For a + * composite primary key, you must provide values for both the partition key and the sort + * key.

+ * @public + */ + Key: Record | undefined; + /** + *

This is a legacy parameter. Use UpdateExpression instead. For more + * information, see AttributeUpdates in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + AttributeUpdates?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see Expected in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + Expected?: Record | undefined; + /** + *

This is a legacy parameter. Use ConditionExpression instead. For more + * information, see ConditionalOperator in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionalOperator?: ConditionalOperator | undefined; + /** + *

Use ReturnValues if you want to get the item attributes as they appear + * before or after they are successfully updated. For UpdateItem, the valid + * values are:

+ *
    + *
  • + *

    + * NONE - If ReturnValues is not specified, or if its + * value is NONE, then nothing is returned. (This setting is the + * default for ReturnValues.)

    + *
  • + *
  • + *

    + * ALL_OLD - Returns all of the attributes of the item, as they + * appeared before the UpdateItem operation.

    + *
  • + *
  • + *

    + * UPDATED_OLD - Returns only the updated attributes, as they appeared + * before the UpdateItem operation.

    + *
  • + *
  • + *

    + * ALL_NEW - Returns all of the attributes of the item, as they appear + * after the UpdateItem operation.

    + *
  • + *
  • + *

    + * UPDATED_NEW - Returns only the updated attributes, as they appear + * after the UpdateItem operation.

    + *
  • + *
+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ *

The values returned are strongly consistent.

+ * @public + */ + ReturnValues?: ReturnValue | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections, if any, that were modified + * during the operation are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

An expression that defines one or more attributes to be updated, the action to be + * performed on them, and new values for them.

+ *

The following action values are available for UpdateExpression.

+ *
    + *
  • + *

    + * SET - Adds one or more attributes and values to an item. If any of + * these attributes already exist, they are replaced by the new values. You can + * also use SET to add or subtract from an attribute that is of type + * Number. For example: SET myNum = myNum + :val + *

    + *

    + * SET supports the following functions:

    + *
      + *
    • + *

      + * if_not_exists (path, operand) - if the item does not + * contain an attribute at the specified path, then + * if_not_exists evaluates to operand; otherwise, it + * evaluates to path. You can use this function to avoid overwriting an + * attribute that may already be present in the item.

      + *
    • + *
    • + *

      + * list_append (operand, operand) - evaluates to a list with a + * new element added to it. You can append the new element to the start or + * the end of the list by reversing the order of the operands.

      + *
    • + *
    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    + * REMOVE - Removes one or more attributes from an item.

    + *
  • + *
  • + *

    + * ADD - Adds the specified value to the item, if the attribute does + * not already exist. If the attribute does exist, then the behavior of + * ADD depends on the data type of the attribute:

    + *
      + *
    • + *

      If the existing attribute is a number, and if Value is + * also a number, then Value is mathematically added to the + * existing attribute. If Value is a negative number, then it + * is subtracted from the existing attribute.

      + * + *

      If you use ADD to increment or decrement a number + * value for an item that doesn't exist before the update, DynamoDB + * uses 0 as the initial value.

      + *

      Similarly, if you use ADD for an existing item to + * increment or decrement an attribute value that doesn't exist before + * the update, DynamoDB uses 0 as the initial value. For + * example, suppose that the item you want to update doesn't have an + * attribute named itemcount, but you decide to + * ADD the number 3 to this attribute + * anyway. DynamoDB will create the itemcount attribute, + * set its initial value to 0, and finally add + * 3 to it. The result will be a new + * itemcount attribute in the item, with a value of + * 3.

      + *
      + *
    • + *
    • + *

      If the existing data type is a set and if Value is also a + * set, then Value is added to the existing set. For example, + * if the attribute value is the set [1,2], and the + * ADD action specified [3], then the final + * attribute value is [1,2,3]. An error occurs if an + * ADD action is specified for a set attribute and the + * attribute type specified does not match the existing set type.

      + *

      Both sets must have the same primitive data type. For example, if the + * existing data type is a set of strings, the Value must also + * be a set of strings.

      + *
    • + *
    + * + *

    The ADD action only supports Number and set data types. In + * addition, ADD can only be used on top-level attributes, not + * nested attributes.

    + *
    + *
  • + *
  • + *

    + * DELETE - Deletes an element from a set.

    + *

    If a set of values is specified, then those values are subtracted from the old + * set. For example, if the attribute value was the set [a,b,c] and + * the DELETE action specifies [a,c], then the final + * attribute value is [b]. Specifying an empty set is an error.

    + * + *

    The DELETE action only supports set data types. In addition, + * DELETE can only be used on top-level attributes, not nested + * attributes.

    + *
    + *
  • + *
+ *

You can have many actions in a single expression, such as the following: SET + * a=:value1, b=:value2 DELETE :value3, :value4, :value5 + *

+ *

For more information on update expressions, see Modifying + * Items and Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + UpdateExpression?: string | undefined; + /** + *

A condition that must be satisfied in order for a conditional update to + * succeed.

+ *

An expression can contain any of the following:

+ *
    + *
  • + *

    Functions: attribute_exists | attribute_not_exists | attribute_type | + * contains | begins_with | size + *

    + *

    These function names are case-sensitive.

    + *
  • + *
  • + *

    Comparison operators: = | <> | + * < | > | <= | >= | + * BETWEEN | IN + *

    + *
  • + *
  • + *

    Logical operators: AND | OR | NOT + *

    + *
  • + *
+ *

For more information about condition expressions, see Specifying Conditions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ConditionExpression?: string | undefined; + /** + *

One or more substitution tokens for attribute names in an expression. The following + * are some use cases for using ExpressionAttributeNames:

+ *
    + *
  • + *

    To access an attribute whose name conflicts with a DynamoDB reserved + * word.

    + *
  • + *
  • + *

    To create a placeholder for repeating occurrences of an attribute name in an + * expression.

    + *
  • + *
  • + *

    To prevent special characters in an attribute name from being misinterpreted + * in an expression.

    + *
  • + *
+ *

Use the # character in an expression to dereference + * an attribute name. For example, consider the following attribute name:

+ *
    + *
  • + *

    + * Percentile + *

    + *
  • + *
+ *

The name of this attribute conflicts with a reserved word, so it cannot be used + * directly in an expression. (For the complete list of reserved words, see Reserved Words in the Amazon DynamoDB Developer + * Guide.) To work around this, you could specify the following for + * ExpressionAttributeNames:

+ *
    + *
  • + *

    + * \{"#P":"Percentile"\} + *

    + *
  • + *
+ *

You could then use this substitution in an expression, as in this example:

+ *
    + *
  • + *

    + * #P = :val + *

    + *
  • + *
+ * + *

Tokens that begin with the : character are + * expression attribute values, which are placeholders for the + * actual value at runtime.

+ *
+ *

For more information about expression attribute names, see Specifying Item Attributes in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeNames?: Record | undefined; + /** + *

One or more values that can be substituted in an expression.

+ *

Use the : (colon) character in an expression to + * dereference an attribute value. For example, suppose that you wanted to check whether + * the value of the ProductStatus attribute was one of the following:

+ *

+ * Available | Backordered | Discontinued + *

+ *

You would first need to specify ExpressionAttributeValues as + * follows:

+ *

+ * \{ ":avail":\{"S":"Available"\}, ":back":\{"S":"Backordered"\}, + * ":disc":\{"S":"Discontinued"\} \} + *

+ *

You could then use these values in an expression, such as this:

+ *

+ * ProductStatus IN (:avail, :back, :disc) + *

+ *

For more information on expression attribute values, see Condition Expressions in the Amazon DynamoDB Developer + * Guide.

+ * @public + */ + ExpressionAttributeValues?: Record | undefined; + /** + *

An optional parameter that returns the item attributes for an UpdateItem + * operation that failed a condition check.

+ *

There is no additional cost associated with requesting a return value aside from the + * small network and processing overhead of receiving a larger response. No read capacity + * units are consumed.

+ * @public + */ + ReturnValuesOnConditionCheckFailure?: ReturnValuesOnConditionCheckFailure | undefined; +} +/** + *

A list of requests that can perform update, put, delete, or check operations on + * multiple items in one or more tables atomically.

+ * @public + */ +export interface TransactWriteItem { + /** + *

A request to perform a check item operation.

+ * @public + */ + ConditionCheck?: ConditionCheck | undefined; + /** + *

A request to perform a PutItem operation.

+ * @public + */ + Put?: Put | undefined; + /** + *

A request to perform a DeleteItem operation.

+ * @public + */ + Delete?: Delete | undefined; + /** + *

A request to perform an UpdateItem operation.

+ * @public + */ + Update?: Update | undefined; +} +/** + * @public + */ +export interface TransactWriteItemsInput { + /** + *

An ordered array of up to 100 TransactWriteItem objects, each of which + * contains a ConditionCheck, Put, Update, or + * Delete object. These can operate on items in different tables, but the + * tables must reside in the same Amazon Web Services account and Region, and no two of them + * can operate on the same item.

+ * @public + */ + TransactItems: TransactWriteItem[] | undefined; + /** + *

Determines the level of detail about either provisioned or on-demand throughput + * consumption that is returned in the response:

+ *
    + *
  • + *

    + * INDEXES - The response includes the aggregate + * ConsumedCapacity for the operation, together with + * ConsumedCapacity for each table and secondary index that was + * accessed.

    + *

    Note that some operations, such as GetItem and + * BatchGetItem, do not access any indexes at all. In these cases, + * specifying INDEXES will only return ConsumedCapacity + * information for table(s).

    + *
  • + *
  • + *

    + * TOTAL - The response includes only the aggregate + * ConsumedCapacity for the operation.

    + *
  • + *
  • + *

    + * NONE - No ConsumedCapacity details are included in the + * response.

    + *
  • + *
+ * @public + */ + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + /** + *

Determines whether item collection metrics are returned. If set to SIZE, + * the response includes statistics about item collections (if any), that were modified + * during the operation and are returned in the response. If set to NONE (the + * default), no statistics are returned.

+ * @public + */ + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + /** + *

Providing a ClientRequestToken makes the call to + * TransactWriteItems idempotent, meaning that multiple identical calls + * have the same effect as one single call.

+ *

Although multiple identical calls using the same client request token produce the same + * result on the server (no side effects), the responses to the calls might not be the + * same. If the ReturnConsumedCapacity parameter is set, then the initial + * TransactWriteItems call returns the amount of write capacity units + * consumed in making the changes. Subsequent TransactWriteItems calls with + * the same client token return the number of read capacity units consumed in reading the + * item.

+ *

A client request token is valid for 10 minutes after the first request that uses it is + * completed. After 10 minutes, any request with the same client token is treated as a new + * request. Do not resubmit the same request with the same client token for more than 10 + * minutes, or the result might not be idempotent.

+ *

If you submit a request with the same client token but a change in other parameters + * within the 10-minute idempotency window, DynamoDB returns an + * IdempotentParameterMismatch exception.

+ * @public + */ + ClientRequestToken?: string | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..b27919e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * @public + */ +export interface DynamoDBPaginationConfiguration extends PaginationConfiguration { + client: DynamoDBClient; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts new file mode 100644 index 0000000..2ca65b1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListContributorInsightsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "../commands/ListContributorInsightsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListContributorInsights: (config: DynamoDBPaginationConfiguration, input: ListContributorInsightsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts new file mode 100644 index 0000000..304892a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListExportsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "../commands/ListExportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListExports: (config: DynamoDBPaginationConfiguration, input: ListExportsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts new file mode 100644 index 0000000..0a2639c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListImportsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "../commands/ListImportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListImports: (config: DynamoDBPaginationConfiguration, input: ListImportsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts new file mode 100644 index 0000000..38cff29 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ListTablesPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "../commands/ListTablesCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListTables: (config: DynamoDBPaginationConfiguration, input: ListTablesCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..d6e9c31 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/QueryPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateQuery: (config: DynamoDBPaginationConfiguration, input: QueryCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..4902f31 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/ScanPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateScan: (config: DynamoDBPaginationConfiguration, input: ScanCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/pagination/index.d.ts @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts new file mode 100644 index 0000000..963dc98 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/protocols/Aws_json1_0.d.ts @@ -0,0 +1,515 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "../commands/BatchExecuteStatementCommand"; +import { BatchGetItemCommandInput, BatchGetItemCommandOutput } from "../commands/BatchGetItemCommand"; +import { BatchWriteItemCommandInput, BatchWriteItemCommandOutput } from "../commands/BatchWriteItemCommand"; +import { CreateBackupCommandInput, CreateBackupCommandOutput } from "../commands/CreateBackupCommand"; +import { CreateGlobalTableCommandInput, CreateGlobalTableCommandOutput } from "../commands/CreateGlobalTableCommand"; +import { CreateTableCommandInput, CreateTableCommandOutput } from "../commands/CreateTableCommand"; +import { DeleteBackupCommandInput, DeleteBackupCommandOutput } from "../commands/DeleteBackupCommand"; +import { DeleteItemCommandInput, DeleteItemCommandOutput } from "../commands/DeleteItemCommand"; +import { DeleteResourcePolicyCommandInput, DeleteResourcePolicyCommandOutput } from "../commands/DeleteResourcePolicyCommand"; +import { DeleteTableCommandInput, DeleteTableCommandOutput } from "../commands/DeleteTableCommand"; +import { DescribeBackupCommandInput, DescribeBackupCommandOutput } from "../commands/DescribeBackupCommand"; +import { DescribeContinuousBackupsCommandInput, DescribeContinuousBackupsCommandOutput } from "../commands/DescribeContinuousBackupsCommand"; +import { DescribeContributorInsightsCommandInput, DescribeContributorInsightsCommandOutput } from "../commands/DescribeContributorInsightsCommand"; +import { DescribeEndpointsCommandInput, DescribeEndpointsCommandOutput } from "../commands/DescribeEndpointsCommand"; +import { DescribeExportCommandInput, DescribeExportCommandOutput } from "../commands/DescribeExportCommand"; +import { DescribeGlobalTableCommandInput, DescribeGlobalTableCommandOutput } from "../commands/DescribeGlobalTableCommand"; +import { DescribeGlobalTableSettingsCommandInput, DescribeGlobalTableSettingsCommandOutput } from "../commands/DescribeGlobalTableSettingsCommand"; +import { DescribeImportCommandInput, DescribeImportCommandOutput } from "../commands/DescribeImportCommand"; +import { DescribeKinesisStreamingDestinationCommandInput, DescribeKinesisStreamingDestinationCommandOutput } from "../commands/DescribeKinesisStreamingDestinationCommand"; +import { DescribeLimitsCommandInput, DescribeLimitsCommandOutput } from "../commands/DescribeLimitsCommand"; +import { DescribeTableCommandInput, DescribeTableCommandOutput } from "../commands/DescribeTableCommand"; +import { DescribeTableReplicaAutoScalingCommandInput, DescribeTableReplicaAutoScalingCommandOutput } from "../commands/DescribeTableReplicaAutoScalingCommand"; +import { DescribeTimeToLiveCommandInput, DescribeTimeToLiveCommandOutput } from "../commands/DescribeTimeToLiveCommand"; +import { DisableKinesisStreamingDestinationCommandInput, DisableKinesisStreamingDestinationCommandOutput } from "../commands/DisableKinesisStreamingDestinationCommand"; +import { EnableKinesisStreamingDestinationCommandInput, EnableKinesisStreamingDestinationCommandOutput } from "../commands/EnableKinesisStreamingDestinationCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "../commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "../commands/ExecuteTransactionCommand"; +import { ExportTableToPointInTimeCommandInput, ExportTableToPointInTimeCommandOutput } from "../commands/ExportTableToPointInTimeCommand"; +import { GetItemCommandInput, GetItemCommandOutput } from "../commands/GetItemCommand"; +import { GetResourcePolicyCommandInput, GetResourcePolicyCommandOutput } from "../commands/GetResourcePolicyCommand"; +import { ImportTableCommandInput, ImportTableCommandOutput } from "../commands/ImportTableCommand"; +import { ListBackupsCommandInput, ListBackupsCommandOutput } from "../commands/ListBackupsCommand"; +import { ListContributorInsightsCommandInput, ListContributorInsightsCommandOutput } from "../commands/ListContributorInsightsCommand"; +import { ListExportsCommandInput, ListExportsCommandOutput } from "../commands/ListExportsCommand"; +import { ListGlobalTablesCommandInput, ListGlobalTablesCommandOutput } from "../commands/ListGlobalTablesCommand"; +import { ListImportsCommandInput, ListImportsCommandOutput } from "../commands/ListImportsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "../commands/ListTablesCommand"; +import { ListTagsOfResourceCommandInput, ListTagsOfResourceCommandOutput } from "../commands/ListTagsOfResourceCommand"; +import { PutItemCommandInput, PutItemCommandOutput } from "../commands/PutItemCommand"; +import { PutResourcePolicyCommandInput, PutResourcePolicyCommandOutput } from "../commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { RestoreTableFromBackupCommandInput, RestoreTableFromBackupCommandOutput } from "../commands/RestoreTableFromBackupCommand"; +import { RestoreTableToPointInTimeCommandInput, RestoreTableToPointInTimeCommandOutput } from "../commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "../commands/TagResourceCommand"; +import { TransactGetItemsCommandInput, TransactGetItemsCommandOutput } from "../commands/TransactGetItemsCommand"; +import { TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput } from "../commands/TransactWriteItemsCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "../commands/UntagResourceCommand"; +import { UpdateContinuousBackupsCommandInput, UpdateContinuousBackupsCommandOutput } from "../commands/UpdateContinuousBackupsCommand"; +import { UpdateContributorInsightsCommandInput, UpdateContributorInsightsCommandOutput } from "../commands/UpdateContributorInsightsCommand"; +import { UpdateGlobalTableCommandInput, UpdateGlobalTableCommandOutput } from "../commands/UpdateGlobalTableCommand"; +import { UpdateGlobalTableSettingsCommandInput, UpdateGlobalTableSettingsCommandOutput } from "../commands/UpdateGlobalTableSettingsCommand"; +import { UpdateItemCommandInput, UpdateItemCommandOutput } from "../commands/UpdateItemCommand"; +import { UpdateKinesisStreamingDestinationCommandInput, UpdateKinesisStreamingDestinationCommandOutput } from "../commands/UpdateKinesisStreamingDestinationCommand"; +import { UpdateTableCommandInput, UpdateTableCommandOutput } from "../commands/UpdateTableCommand"; +import { UpdateTableReplicaAutoScalingCommandInput, UpdateTableReplicaAutoScalingCommandOutput } from "../commands/UpdateTableReplicaAutoScalingCommand"; +import { UpdateTimeToLiveCommandInput, UpdateTimeToLiveCommandOutput } from "../commands/UpdateTimeToLiveCommand"; +/** + * serializeAws_json1_0BatchExecuteStatementCommand + */ +export declare const se_BatchExecuteStatementCommand: (input: BatchExecuteStatementCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0BatchGetItemCommand + */ +export declare const se_BatchGetItemCommand: (input: BatchGetItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0BatchWriteItemCommand + */ +export declare const se_BatchWriteItemCommand: (input: BatchWriteItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateBackupCommand + */ +export declare const se_CreateBackupCommand: (input: CreateBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateGlobalTableCommand + */ +export declare const se_CreateGlobalTableCommand: (input: CreateGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0CreateTableCommand + */ +export declare const se_CreateTableCommand: (input: CreateTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteBackupCommand + */ +export declare const se_DeleteBackupCommand: (input: DeleteBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteItemCommand + */ +export declare const se_DeleteItemCommand: (input: DeleteItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteResourcePolicyCommand + */ +export declare const se_DeleteResourcePolicyCommand: (input: DeleteResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DeleteTableCommand + */ +export declare const se_DeleteTableCommand: (input: DeleteTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeBackupCommand + */ +export declare const se_DescribeBackupCommand: (input: DescribeBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeContinuousBackupsCommand + */ +export declare const se_DescribeContinuousBackupsCommand: (input: DescribeContinuousBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeContributorInsightsCommand + */ +export declare const se_DescribeContributorInsightsCommand: (input: DescribeContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeEndpointsCommand + */ +export declare const se_DescribeEndpointsCommand: (input: DescribeEndpointsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeExportCommand + */ +export declare const se_DescribeExportCommand: (input: DescribeExportCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeGlobalTableCommand + */ +export declare const se_DescribeGlobalTableCommand: (input: DescribeGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeGlobalTableSettingsCommand + */ +export declare const se_DescribeGlobalTableSettingsCommand: (input: DescribeGlobalTableSettingsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeImportCommand + */ +export declare const se_DescribeImportCommand: (input: DescribeImportCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeKinesisStreamingDestinationCommand + */ +export declare const se_DescribeKinesisStreamingDestinationCommand: (input: DescribeKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeLimitsCommand + */ +export declare const se_DescribeLimitsCommand: (input: DescribeLimitsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTableCommand + */ +export declare const se_DescribeTableCommand: (input: DescribeTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTableReplicaAutoScalingCommand + */ +export declare const se_DescribeTableReplicaAutoScalingCommand: (input: DescribeTableReplicaAutoScalingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DescribeTimeToLiveCommand + */ +export declare const se_DescribeTimeToLiveCommand: (input: DescribeTimeToLiveCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0DisableKinesisStreamingDestinationCommand + */ +export declare const se_DisableKinesisStreamingDestinationCommand: (input: DisableKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0EnableKinesisStreamingDestinationCommand + */ +export declare const se_EnableKinesisStreamingDestinationCommand: (input: EnableKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExecuteStatementCommand + */ +export declare const se_ExecuteStatementCommand: (input: ExecuteStatementCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExecuteTransactionCommand + */ +export declare const se_ExecuteTransactionCommand: (input: ExecuteTransactionCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ExportTableToPointInTimeCommand + */ +export declare const se_ExportTableToPointInTimeCommand: (input: ExportTableToPointInTimeCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0GetItemCommand + */ +export declare const se_GetItemCommand: (input: GetItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0GetResourcePolicyCommand + */ +export declare const se_GetResourcePolicyCommand: (input: GetResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ImportTableCommand + */ +export declare const se_ImportTableCommand: (input: ImportTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListBackupsCommand + */ +export declare const se_ListBackupsCommand: (input: ListBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListContributorInsightsCommand + */ +export declare const se_ListContributorInsightsCommand: (input: ListContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListExportsCommand + */ +export declare const se_ListExportsCommand: (input: ListExportsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListGlobalTablesCommand + */ +export declare const se_ListGlobalTablesCommand: (input: ListGlobalTablesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListImportsCommand + */ +export declare const se_ListImportsCommand: (input: ListImportsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListTablesCommand + */ +export declare const se_ListTablesCommand: (input: ListTablesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ListTagsOfResourceCommand + */ +export declare const se_ListTagsOfResourceCommand: (input: ListTagsOfResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0PutItemCommand + */ +export declare const se_PutItemCommand: (input: PutItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0PutResourcePolicyCommand + */ +export declare const se_PutResourcePolicyCommand: (input: PutResourcePolicyCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0QueryCommand + */ +export declare const se_QueryCommand: (input: QueryCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0RestoreTableFromBackupCommand + */ +export declare const se_RestoreTableFromBackupCommand: (input: RestoreTableFromBackupCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0RestoreTableToPointInTimeCommand + */ +export declare const se_RestoreTableToPointInTimeCommand: (input: RestoreTableToPointInTimeCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0ScanCommand + */ +export declare const se_ScanCommand: (input: ScanCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TagResourceCommand + */ +export declare const se_TagResourceCommand: (input: TagResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TransactGetItemsCommand + */ +export declare const se_TransactGetItemsCommand: (input: TransactGetItemsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0TransactWriteItemsCommand + */ +export declare const se_TransactWriteItemsCommand: (input: TransactWriteItemsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UntagResourceCommand + */ +export declare const se_UntagResourceCommand: (input: UntagResourceCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateContinuousBackupsCommand + */ +export declare const se_UpdateContinuousBackupsCommand: (input: UpdateContinuousBackupsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateContributorInsightsCommand + */ +export declare const se_UpdateContributorInsightsCommand: (input: UpdateContributorInsightsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateGlobalTableCommand + */ +export declare const se_UpdateGlobalTableCommand: (input: UpdateGlobalTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateGlobalTableSettingsCommand + */ +export declare const se_UpdateGlobalTableSettingsCommand: (input: UpdateGlobalTableSettingsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateItemCommand + */ +export declare const se_UpdateItemCommand: (input: UpdateItemCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateKinesisStreamingDestinationCommand + */ +export declare const se_UpdateKinesisStreamingDestinationCommand: (input: UpdateKinesisStreamingDestinationCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTableCommand + */ +export declare const se_UpdateTableCommand: (input: UpdateTableCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTableReplicaAutoScalingCommand + */ +export declare const se_UpdateTableReplicaAutoScalingCommand: (input: UpdateTableReplicaAutoScalingCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_json1_0UpdateTimeToLiveCommand + */ +export declare const se_UpdateTimeToLiveCommand: (input: UpdateTimeToLiveCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_json1_0BatchExecuteStatementCommand + */ +export declare const de_BatchExecuteStatementCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0BatchGetItemCommand + */ +export declare const de_BatchGetItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0BatchWriteItemCommand + */ +export declare const de_BatchWriteItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateBackupCommand + */ +export declare const de_CreateBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateGlobalTableCommand + */ +export declare const de_CreateGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0CreateTableCommand + */ +export declare const de_CreateTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteBackupCommand + */ +export declare const de_DeleteBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteItemCommand + */ +export declare const de_DeleteItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteResourcePolicyCommand + */ +export declare const de_DeleteResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DeleteTableCommand + */ +export declare const de_DeleteTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeBackupCommand + */ +export declare const de_DescribeBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeContinuousBackupsCommand + */ +export declare const de_DescribeContinuousBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeContributorInsightsCommand + */ +export declare const de_DescribeContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeEndpointsCommand + */ +export declare const de_DescribeEndpointsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeExportCommand + */ +export declare const de_DescribeExportCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeGlobalTableCommand + */ +export declare const de_DescribeGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeGlobalTableSettingsCommand + */ +export declare const de_DescribeGlobalTableSettingsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeImportCommand + */ +export declare const de_DescribeImportCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeKinesisStreamingDestinationCommand + */ +export declare const de_DescribeKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeLimitsCommand + */ +export declare const de_DescribeLimitsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTableCommand + */ +export declare const de_DescribeTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTableReplicaAutoScalingCommand + */ +export declare const de_DescribeTableReplicaAutoScalingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DescribeTimeToLiveCommand + */ +export declare const de_DescribeTimeToLiveCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0DisableKinesisStreamingDestinationCommand + */ +export declare const de_DisableKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0EnableKinesisStreamingDestinationCommand + */ +export declare const de_EnableKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExecuteStatementCommand + */ +export declare const de_ExecuteStatementCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExecuteTransactionCommand + */ +export declare const de_ExecuteTransactionCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ExportTableToPointInTimeCommand + */ +export declare const de_ExportTableToPointInTimeCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0GetItemCommand + */ +export declare const de_GetItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0GetResourcePolicyCommand + */ +export declare const de_GetResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ImportTableCommand + */ +export declare const de_ImportTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListBackupsCommand + */ +export declare const de_ListBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListContributorInsightsCommand + */ +export declare const de_ListContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListExportsCommand + */ +export declare const de_ListExportsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListGlobalTablesCommand + */ +export declare const de_ListGlobalTablesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListImportsCommand + */ +export declare const de_ListImportsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListTablesCommand + */ +export declare const de_ListTablesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ListTagsOfResourceCommand + */ +export declare const de_ListTagsOfResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0PutItemCommand + */ +export declare const de_PutItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0PutResourcePolicyCommand + */ +export declare const de_PutResourcePolicyCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0QueryCommand + */ +export declare const de_QueryCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0RestoreTableFromBackupCommand + */ +export declare const de_RestoreTableFromBackupCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0RestoreTableToPointInTimeCommand + */ +export declare const de_RestoreTableToPointInTimeCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0ScanCommand + */ +export declare const de_ScanCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TagResourceCommand + */ +export declare const de_TagResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TransactGetItemsCommand + */ +export declare const de_TransactGetItemsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0TransactWriteItemsCommand + */ +export declare const de_TransactWriteItemsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UntagResourceCommand + */ +export declare const de_UntagResourceCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateContinuousBackupsCommand + */ +export declare const de_UpdateContinuousBackupsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateContributorInsightsCommand + */ +export declare const de_UpdateContributorInsightsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateGlobalTableCommand + */ +export declare const de_UpdateGlobalTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateGlobalTableSettingsCommand + */ +export declare const de_UpdateGlobalTableSettingsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateItemCommand + */ +export declare const de_UpdateItemCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateKinesisStreamingDestinationCommand + */ +export declare const de_UpdateKinesisStreamingDestinationCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTableCommand + */ +export declare const de_UpdateTableCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTableReplicaAutoScalingCommand + */ +export declare const de_UpdateTableReplicaAutoScalingCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_json1_0UpdateTimeToLiveCommand + */ +export declare const de_UpdateTimeToLiveCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..e8b4a74 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.browser.d.ts @@ -0,0 +1,55 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + accountIdEndpointMode: "disabled" | "preferred" | "required" | (() => Promise); + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts new file mode 100644 index 0000000..01479fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.d.ts @@ -0,0 +1,55 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + accountIdEndpointMode: "disabled" | "preferred" | "required" | import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((init?: import("@aws-sdk/credential-provider-node").DefaultProviderInit | undefined) => import("@smithy/types").MemoizedProvider); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts new file mode 100644 index 0000000..0288659 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.native.d.ts @@ -0,0 +1,54 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + accountIdEndpointMode: "disabled" | "preferred" | "required" | (() => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..36f4e1e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeConfig.shared.d.ts @@ -0,0 +1,21 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts new file mode 100644 index 0000000..ac1a4bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: DynamoDBExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts new file mode 100644 index 0000000..cf606cb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDB.d.ts @@ -0,0 +1,1000 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "./commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "./commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "./commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "./commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "./commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "./commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "./commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "./commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "./commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "./commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "./commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "./commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "./commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "./commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "./commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "./commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "./commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "./commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "./commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "./commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "./commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "./commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "./commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "./commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "./commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "./commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "./commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "./commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "./commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "./commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "./commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "./commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "./commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "./commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "./commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "./commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "./commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "./commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "./commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "./commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "./commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "./commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "./commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "./commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "./commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "./commands/UpdateTimeToLiveCommand"; +import { DynamoDBClient } from "./DynamoDBClient"; +export interface DynamoDB { + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchGetItem( + args: BatchGetItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchGetItem( + args: BatchGetItemCommandInput, + cb: (err: any, data?: BatchGetItemCommandOutput) => void + ): void; + batchGetItem( + args: BatchGetItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchGetItemCommandOutput) => void + ): void; + batchWriteItem( + args: BatchWriteItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchWriteItem( + args: BatchWriteItemCommandInput, + cb: (err: any, data?: BatchWriteItemCommandOutput) => void + ): void; + batchWriteItem( + args: BatchWriteItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchWriteItemCommandOutput) => void + ): void; + createBackup( + args: CreateBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createBackup( + args: CreateBackupCommandInput, + cb: (err: any, data?: CreateBackupCommandOutput) => void + ): void; + createBackup( + args: CreateBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateBackupCommandOutput) => void + ): void; + createGlobalTable( + args: CreateGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createGlobalTable( + args: CreateGlobalTableCommandInput, + cb: (err: any, data?: CreateGlobalTableCommandOutput) => void + ): void; + createGlobalTable( + args: CreateGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateGlobalTableCommandOutput) => void + ): void; + createTable( + args: CreateTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createTable( + args: CreateTableCommandInput, + cb: (err: any, data?: CreateTableCommandOutput) => void + ): void; + createTable( + args: CreateTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateTableCommandOutput) => void + ): void; + deleteBackup( + args: DeleteBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteBackup( + args: DeleteBackupCommandInput, + cb: (err: any, data?: DeleteBackupCommandOutput) => void + ): void; + deleteBackup( + args: DeleteBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBackupCommandOutput) => void + ): void; + deleteItem( + args: DeleteItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteItem( + args: DeleteItemCommandInput, + cb: (err: any, data?: DeleteItemCommandOutput) => void + ): void; + deleteItem( + args: DeleteItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteItemCommandOutput) => void + ): void; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void + ): void; + deleteResourcePolicy( + args: DeleteResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteResourcePolicyCommandOutput) => void + ): void; + deleteTable( + args: DeleteTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + deleteTable( + args: DeleteTableCommandInput, + cb: (err: any, data?: DeleteTableCommandOutput) => void + ): void; + deleteTable( + args: DeleteTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteTableCommandOutput) => void + ): void; + describeBackup( + args: DescribeBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeBackup( + args: DescribeBackupCommandInput, + cb: (err: any, data?: DescribeBackupCommandOutput) => void + ): void; + describeBackup( + args: DescribeBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeBackupCommandOutput) => void + ): void; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void + ): void; + describeContinuousBackups( + args: DescribeContinuousBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeContinuousBackupsCommandOutput) => void + ): void; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void + ): void; + describeContributorInsights( + args: DescribeContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeContributorInsightsCommandOutput) => void + ): void; + describeEndpoints(): Promise; + describeEndpoints( + args: DescribeEndpointsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeEndpoints( + args: DescribeEndpointsCommandInput, + cb: (err: any, data?: DescribeEndpointsCommandOutput) => void + ): void; + describeEndpoints( + args: DescribeEndpointsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeEndpointsCommandOutput) => void + ): void; + describeExport( + args: DescribeExportCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeExport( + args: DescribeExportCommandInput, + cb: (err: any, data?: DescribeExportCommandOutput) => void + ): void; + describeExport( + args: DescribeExportCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeExportCommandOutput) => void + ): void; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void + ): void; + describeGlobalTable( + args: DescribeGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeGlobalTableCommandOutput) => void + ): void; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void + ): void; + describeGlobalTableSettings( + args: DescribeGlobalTableSettingsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeGlobalTableSettingsCommandOutput) => void + ): void; + describeImport( + args: DescribeImportCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeImport( + args: DescribeImportCommandInput, + cb: (err: any, data?: DescribeImportCommandOutput) => void + ): void; + describeImport( + args: DescribeImportCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeImportCommandOutput) => void + ): void; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: DescribeKinesisStreamingDestinationCommandOutput + ) => void + ): void; + describeKinesisStreamingDestination( + args: DescribeKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DescribeKinesisStreamingDestinationCommandOutput + ) => void + ): void; + describeLimits(): Promise; + describeLimits( + args: DescribeLimitsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeLimits( + args: DescribeLimitsCommandInput, + cb: (err: any, data?: DescribeLimitsCommandOutput) => void + ): void; + describeLimits( + args: DescribeLimitsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeLimitsCommandOutput) => void + ): void; + describeTable( + args: DescribeTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTable( + args: DescribeTableCommandInput, + cb: (err: any, data?: DescribeTableCommandOutput) => void + ): void; + describeTable( + args: DescribeTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTableCommandOutput) => void + ): void; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void + ): void; + describeTableReplicaAutoScaling( + args: DescribeTableReplicaAutoScalingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTableReplicaAutoScalingCommandOutput) => void + ): void; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + options?: __HttpHandlerOptions + ): Promise; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void + ): void; + describeTimeToLive( + args: DescribeTimeToLiveCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTimeToLiveCommandOutput) => void + ): void; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: DisableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + disableKinesisStreamingDestination( + args: DisableKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: DisableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: EnableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + enableKinesisStreamingDestination( + args: EnableKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: EnableKinesisStreamingDestinationCommandOutput + ) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeStatement( + args: ExecuteStatementCommandInput, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeTransaction( + args: ExecuteTransactionCommandInput, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + options?: __HttpHandlerOptions + ): Promise; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void + ): void; + exportTableToPointInTime( + args: ExportTableToPointInTimeCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExportTableToPointInTimeCommandOutput) => void + ): void; + getItem( + args: GetItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getItem( + args: GetItemCommandInput, + cb: (err: any, data?: GetItemCommandOutput) => void + ): void; + getItem( + args: GetItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetItemCommandOutput) => void + ): void; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + cb: (err: any, data?: GetResourcePolicyCommandOutput) => void + ): void; + getResourcePolicy( + args: GetResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetResourcePolicyCommandOutput) => void + ): void; + importTable( + args: ImportTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + importTable( + args: ImportTableCommandInput, + cb: (err: any, data?: ImportTableCommandOutput) => void + ): void; + importTable( + args: ImportTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ImportTableCommandOutput) => void + ): void; + listBackups(): Promise; + listBackups( + args: ListBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listBackups( + args: ListBackupsCommandInput, + cb: (err: any, data?: ListBackupsCommandOutput) => void + ): void; + listBackups( + args: ListBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListBackupsCommandOutput) => void + ): void; + listContributorInsights(): Promise; + listContributorInsights( + args: ListContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listContributorInsights( + args: ListContributorInsightsCommandInput, + cb: (err: any, data?: ListContributorInsightsCommandOutput) => void + ): void; + listContributorInsights( + args: ListContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListContributorInsightsCommandOutput) => void + ): void; + listExports(): Promise; + listExports( + args: ListExportsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listExports( + args: ListExportsCommandInput, + cb: (err: any, data?: ListExportsCommandOutput) => void + ): void; + listExports( + args: ListExportsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListExportsCommandOutput) => void + ): void; + listGlobalTables(): Promise; + listGlobalTables( + args: ListGlobalTablesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listGlobalTables( + args: ListGlobalTablesCommandInput, + cb: (err: any, data?: ListGlobalTablesCommandOutput) => void + ): void; + listGlobalTables( + args: ListGlobalTablesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListGlobalTablesCommandOutput) => void + ): void; + listImports(): Promise; + listImports( + args: ListImportsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listImports( + args: ListImportsCommandInput, + cb: (err: any, data?: ListImportsCommandOutput) => void + ): void; + listImports( + args: ListImportsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListImportsCommandOutput) => void + ): void; + listTables(): Promise; + listTables( + args: ListTablesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listTables( + args: ListTablesCommandInput, + cb: (err: any, data?: ListTablesCommandOutput) => void + ): void; + listTables( + args: ListTablesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTablesCommandOutput) => void + ): void; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void + ): void; + listTagsOfResource( + args: ListTagsOfResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTagsOfResourceCommandOutput) => void + ): void; + putItem( + args: PutItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putItem( + args: PutItemCommandInput, + cb: (err: any, data?: PutItemCommandOutput) => void + ): void; + putItem( + args: PutItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutItemCommandOutput) => void + ): void; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + cb: (err: any, data?: PutResourcePolicyCommandOutput) => void + ): void; + putResourcePolicy( + args: PutResourcePolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutResourcePolicyCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options?: __HttpHandlerOptions + ): Promise; + query( + args: QueryCommandInput, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void + ): void; + restoreTableFromBackup( + args: RestoreTableFromBackupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RestoreTableFromBackupCommandOutput) => void + ): void; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + options?: __HttpHandlerOptions + ): Promise; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void + ): void; + restoreTableToPointInTime( + args: RestoreTableToPointInTimeCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RestoreTableToPointInTimeCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options?: __HttpHandlerOptions + ): Promise; + scan( + args: ScanCommandInput, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + tagResource( + args: TagResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + tagResource( + args: TagResourceCommandInput, + cb: (err: any, data?: TagResourceCommandOutput) => void + ): void; + tagResource( + args: TagResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TagResourceCommandOutput) => void + ): void; + transactGetItems( + args: TransactGetItemsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactGetItems( + args: TransactGetItemsCommandInput, + cb: (err: any, data?: TransactGetItemsCommandOutput) => void + ): void; + transactGetItems( + args: TransactGetItemsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactGetItemsCommandOutput) => void + ): void; + transactWriteItems( + args: TransactWriteItemsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactWriteItems( + args: TransactWriteItemsCommandInput, + cb: (err: any, data?: TransactWriteItemsCommandOutput) => void + ): void; + transactWriteItems( + args: TransactWriteItemsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactWriteItemsCommandOutput) => void + ): void; + untagResource( + args: UntagResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + untagResource( + args: UntagResourceCommandInput, + cb: (err: any, data?: UntagResourceCommandOutput) => void + ): void; + untagResource( + args: UntagResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UntagResourceCommandOutput) => void + ): void; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void + ): void; + updateContinuousBackups( + args: UpdateContinuousBackupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateContinuousBackupsCommandOutput) => void + ): void; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void + ): void; + updateContributorInsights( + args: UpdateContributorInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateContributorInsightsCommandOutput) => void + ): void; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void + ): void; + updateGlobalTable( + args: UpdateGlobalTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateGlobalTableCommandOutput) => void + ): void; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void + ): void; + updateGlobalTableSettings( + args: UpdateGlobalTableSettingsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateGlobalTableSettingsCommandOutput) => void + ): void; + updateItem( + args: UpdateItemCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateItem( + args: UpdateItemCommandInput, + cb: (err: any, data?: UpdateItemCommandOutput) => void + ): void; + updateItem( + args: UpdateItemCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateItemCommandOutput) => void + ): void; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + cb: ( + err: any, + data?: UpdateKinesisStreamingDestinationCommandOutput + ) => void + ): void; + updateKinesisStreamingDestination( + args: UpdateKinesisStreamingDestinationCommandInput, + options: __HttpHandlerOptions, + cb: ( + err: any, + data?: UpdateKinesisStreamingDestinationCommandOutput + ) => void + ): void; + updateTable( + args: UpdateTableCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTable( + args: UpdateTableCommandInput, + cb: (err: any, data?: UpdateTableCommandOutput) => void + ): void; + updateTable( + args: UpdateTableCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTableCommandOutput) => void + ): void; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void + ): void; + updateTableReplicaAutoScaling( + args: UpdateTableReplicaAutoScalingCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTableReplicaAutoScalingCommandOutput) => void + ): void; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + options?: __HttpHandlerOptions + ): Promise; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void + ): void; + updateTimeToLive( + args: UpdateTimeToLiveCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateTimeToLiveCommandOutput) => void + ): void; +} +export declare class DynamoDB extends DynamoDBClient implements DynamoDB {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts new file mode 100644 index 0000000..87aca7c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/DynamoDBClient.d.ts @@ -0,0 +1,472 @@ +import { + AccountIdEndpointMode, + AccountIdEndpointModeInputConfig, + AccountIdEndpointModeResolvedConfig, +} from "@aws-sdk/core/account-id-endpoint"; +import { + EndpointDiscoveryInputConfig, + EndpointDiscoveryResolvedConfig, +} from "@aws-sdk/middleware-endpoint-discovery"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + AwsCredentialIdentityProvider, + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "./commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "./commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "./commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "./commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "./commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "./commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "./commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "./commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "./commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "./commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "./commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "./commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "./commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "./commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "./commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "./commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "./commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "./commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "./commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "./commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "./commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "./commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "./commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "./commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "./commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "./commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "./commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "./commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "./commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "./commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "./commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "./commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "./commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "./commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "./commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "./commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "./commands/PutResourcePolicyCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "./commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "./commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "./commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "./commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "./commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "./commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "./commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "./commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "./commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "./commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "./commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "./commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "./commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "./commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "./commands/UpdateTimeToLiveCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | BatchExecuteStatementCommandInput + | BatchGetItemCommandInput + | BatchWriteItemCommandInput + | CreateBackupCommandInput + | CreateGlobalTableCommandInput + | CreateTableCommandInput + | DeleteBackupCommandInput + | DeleteItemCommandInput + | DeleteResourcePolicyCommandInput + | DeleteTableCommandInput + | DescribeBackupCommandInput + | DescribeContinuousBackupsCommandInput + | DescribeContributorInsightsCommandInput + | DescribeEndpointsCommandInput + | DescribeExportCommandInput + | DescribeGlobalTableCommandInput + | DescribeGlobalTableSettingsCommandInput + | DescribeImportCommandInput + | DescribeKinesisStreamingDestinationCommandInput + | DescribeLimitsCommandInput + | DescribeTableCommandInput + | DescribeTableReplicaAutoScalingCommandInput + | DescribeTimeToLiveCommandInput + | DisableKinesisStreamingDestinationCommandInput + | EnableKinesisStreamingDestinationCommandInput + | ExecuteStatementCommandInput + | ExecuteTransactionCommandInput + | ExportTableToPointInTimeCommandInput + | GetItemCommandInput + | GetResourcePolicyCommandInput + | ImportTableCommandInput + | ListBackupsCommandInput + | ListContributorInsightsCommandInput + | ListExportsCommandInput + | ListGlobalTablesCommandInput + | ListImportsCommandInput + | ListTablesCommandInput + | ListTagsOfResourceCommandInput + | PutItemCommandInput + | PutResourcePolicyCommandInput + | QueryCommandInput + | RestoreTableFromBackupCommandInput + | RestoreTableToPointInTimeCommandInput + | ScanCommandInput + | TagResourceCommandInput + | TransactGetItemsCommandInput + | TransactWriteItemsCommandInput + | UntagResourceCommandInput + | UpdateContinuousBackupsCommandInput + | UpdateContributorInsightsCommandInput + | UpdateGlobalTableCommandInput + | UpdateGlobalTableSettingsCommandInput + | UpdateItemCommandInput + | UpdateKinesisStreamingDestinationCommandInput + | UpdateTableCommandInput + | UpdateTableReplicaAutoScalingCommandInput + | UpdateTimeToLiveCommandInput; +export type ServiceOutputTypes = + | BatchExecuteStatementCommandOutput + | BatchGetItemCommandOutput + | BatchWriteItemCommandOutput + | CreateBackupCommandOutput + | CreateGlobalTableCommandOutput + | CreateTableCommandOutput + | DeleteBackupCommandOutput + | DeleteItemCommandOutput + | DeleteResourcePolicyCommandOutput + | DeleteTableCommandOutput + | DescribeBackupCommandOutput + | DescribeContinuousBackupsCommandOutput + | DescribeContributorInsightsCommandOutput + | DescribeEndpointsCommandOutput + | DescribeExportCommandOutput + | DescribeGlobalTableCommandOutput + | DescribeGlobalTableSettingsCommandOutput + | DescribeImportCommandOutput + | DescribeKinesisStreamingDestinationCommandOutput + | DescribeLimitsCommandOutput + | DescribeTableCommandOutput + | DescribeTableReplicaAutoScalingCommandOutput + | DescribeTimeToLiveCommandOutput + | DisableKinesisStreamingDestinationCommandOutput + | EnableKinesisStreamingDestinationCommandOutput + | ExecuteStatementCommandOutput + | ExecuteTransactionCommandOutput + | ExportTableToPointInTimeCommandOutput + | GetItemCommandOutput + | GetResourcePolicyCommandOutput + | ImportTableCommandOutput + | ListBackupsCommandOutput + | ListContributorInsightsCommandOutput + | ListExportsCommandOutput + | ListGlobalTablesCommandOutput + | ListImportsCommandOutput + | ListTablesCommandOutput + | ListTagsOfResourceCommandOutput + | PutItemCommandOutput + | PutResourcePolicyCommandOutput + | QueryCommandOutput + | RestoreTableFromBackupCommandOutput + | RestoreTableToPointInTimeCommandOutput + | ScanCommandOutput + | TagResourceCommandOutput + | TransactGetItemsCommandOutput + | TransactWriteItemsCommandOutput + | UntagResourceCommandOutput + | UpdateContinuousBackupsCommandOutput + | UpdateContributorInsightsCommandOutput + | UpdateGlobalTableCommandOutput + | UpdateGlobalTableSettingsCommandOutput + | UpdateItemCommandOutput + | UpdateKinesisStreamingDestinationCommandOutput + | UpdateTableCommandOutput + | UpdateTableReplicaAutoScalingCommandOutput + | UpdateTimeToLiveCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + accountIdEndpointMode?: + | AccountIdEndpointMode + | __Provider; + defaultUserAgentProvider?: Provider<__UserAgent>; + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; + endpointDiscoveryEnabledProvider?: __Provider; +} +export type DynamoDBClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + AccountIdEndpointModeInputConfig & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + EndpointDiscoveryInputConfig & + ClientInputEndpointParameters; +export interface DynamoDBClientConfig extends DynamoDBClientConfigType {} +export type DynamoDBClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + AccountIdEndpointModeResolvedConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + EndpointDiscoveryResolvedConfig & + ClientResolvedEndpointParameters; +export interface DynamoDBClientResolvedConfig + extends DynamoDBClientResolvedConfigType {} +export declare class DynamoDBClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + DynamoDBClientResolvedConfig +> { + readonly config: DynamoDBClientResolvedConfig; + constructor( + ...[configuration]: __CheckOptionalClientConfig + ); + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..236dccc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { DynamoDBHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): DynamoDBHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..299733c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,47 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { DynamoDBClientResolvedConfig } from "../DynamoDBClient"; +export interface DynamoDBHttpAuthSchemeParameters + extends HttpAuthSchemeParameters { + region?: string; +} +export interface DynamoDBHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + DynamoDBClientResolvedConfig, + HandlerExecutionContext, + DynamoDBHttpAuthSchemeParameters, + object + > {} +export declare const defaultDynamoDBHttpAuthSchemeParametersProvider: ( + config: DynamoDBClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface DynamoDBHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultDynamoDBHttpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: DynamoDBHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: DynamoDBHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..f7c8eb9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + BatchExecuteStatementInput, + BatchExecuteStatementOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchExecuteStatementCommandInput + extends BatchExecuteStatementInput {} +export interface BatchExecuteStatementCommandOutput + extends BatchExecuteStatementOutput, + __MetadataBearer {} +declare const BatchExecuteStatementCommand_base: { + new ( + input: BatchExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchExecuteStatementCommand extends BatchExecuteStatementCommand_base { + protected static __types: { + api: { + input: BatchExecuteStatementInput; + output: BatchExecuteStatementOutput; + }; + sdk: { + input: BatchExecuteStatementCommandInput; + output: BatchExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts new file mode 100644 index 0000000..7e11a34 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchGetItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { BatchGetItemInput, BatchGetItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchGetItemCommandInput extends BatchGetItemInput {} +export interface BatchGetItemCommandOutput + extends BatchGetItemOutput, + __MetadataBearer {} +declare const BatchGetItemCommand_base: { + new ( + input: BatchGetItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchGetItemCommandInput, + BatchGetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchGetItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchGetItemCommandInput, + BatchGetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchGetItemCommand extends BatchGetItemCommand_base { + protected static __types: { + api: { + input: BatchGetItemInput; + output: BatchGetItemOutput; + }; + sdk: { + input: BatchGetItemCommandInput; + output: BatchGetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts new file mode 100644 index 0000000..0542d2f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/BatchWriteItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { BatchWriteItemInput, BatchWriteItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface BatchWriteItemCommandInput extends BatchWriteItemInput {} +export interface BatchWriteItemCommandOutput + extends BatchWriteItemOutput, + __MetadataBearer {} +declare const BatchWriteItemCommand_base: { + new ( + input: BatchWriteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: BatchWriteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class BatchWriteItemCommand extends BatchWriteItemCommand_base { + protected static __types: { + api: { + input: BatchWriteItemInput; + output: BatchWriteItemOutput; + }; + sdk: { + input: BatchWriteItemCommandInput; + output: BatchWriteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts new file mode 100644 index 0000000..6692e00 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { CreateBackupInput, CreateBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateBackupCommandInput extends CreateBackupInput {} +export interface CreateBackupCommandOutput + extends CreateBackupOutput, + __MetadataBearer {} +declare const CreateBackupCommand_base: { + new ( + input: CreateBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBackupCommandInput, + CreateBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateBackupCommandInput, + CreateBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateBackupCommand extends CreateBackupCommand_base { + protected static __types: { + api: { + input: CreateBackupInput; + output: CreateBackupOutput; + }; + sdk: { + input: CreateBackupCommandInput; + output: CreateBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts new file mode 100644 index 0000000..65564e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateGlobalTableCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + CreateGlobalTableInput, + CreateGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateGlobalTableCommandInput extends CreateGlobalTableInput {} +export interface CreateGlobalTableCommandOutput + extends CreateGlobalTableOutput, + __MetadataBearer {} +declare const CreateGlobalTableCommand_base: { + new ( + input: CreateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateGlobalTableCommand extends CreateGlobalTableCommand_base { + protected static __types: { + api: { + input: CreateGlobalTableInput; + output: CreateGlobalTableOutput; + }; + sdk: { + input: CreateGlobalTableCommandInput; + output: CreateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts new file mode 100644 index 0000000..5761cdb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/CreateTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { CreateTableInput, CreateTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateTableCommandInput extends CreateTableInput {} +export interface CreateTableCommandOutput + extends CreateTableOutput, + __MetadataBearer {} +declare const CreateTableCommand_base: { + new ( + input: CreateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTableCommandInput, + CreateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: CreateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTableCommandInput, + CreateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateTableCommand extends CreateTableCommand_base { + protected static __types: { + api: { + input: CreateTableInput; + output: CreateTableOutput; + }; + sdk: { + input: CreateTableCommandInput; + output: CreateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts new file mode 100644 index 0000000..0b19c93 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteBackupInput, DeleteBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteBackupCommandInput extends DeleteBackupInput {} +export interface DeleteBackupCommandOutput + extends DeleteBackupOutput, + __MetadataBearer {} +declare const DeleteBackupCommand_base: { + new ( + input: DeleteBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBackupCommandInput, + DeleteBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteBackupCommandInput, + DeleteBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteBackupCommand extends DeleteBackupCommand_base { + protected static __types: { + api: { + input: DeleteBackupInput; + output: DeleteBackupOutput; + }; + sdk: { + input: DeleteBackupCommandInput; + output: DeleteBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts new file mode 100644 index 0000000..de7976c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteItemInput, DeleteItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteItemCommandInput extends DeleteItemInput {} +export interface DeleteItemCommandOutput + extends DeleteItemOutput, + __MetadataBearer {} +declare const DeleteItemCommand_base: { + new ( + input: DeleteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteItemCommandInput, + DeleteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteItemCommandInput, + DeleteItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteItemCommand extends DeleteItemCommand_base { + protected static __types: { + api: { + input: DeleteItemInput; + output: DeleteItemOutput; + }; + sdk: { + input: DeleteItemCommandInput; + output: DeleteItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts new file mode 100644 index 0000000..4aad4b7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteResourcePolicyCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DeleteResourcePolicyInput, + DeleteResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteResourcePolicyCommandInput + extends DeleteResourcePolicyInput {} +export interface DeleteResourcePolicyCommandOutput + extends DeleteResourcePolicyOutput, + __MetadataBearer {} +declare const DeleteResourcePolicyCommand_base: { + new ( + input: DeleteResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteResourcePolicyCommand extends DeleteResourcePolicyCommand_base { + protected static __types: { + api: { + input: DeleteResourcePolicyInput; + output: DeleteResourcePolicyOutput; + }; + sdk: { + input: DeleteResourcePolicyCommandInput; + output: DeleteResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts new file mode 100644 index 0000000..5dc9c2b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DeleteTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DeleteTableInput, DeleteTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DeleteTableCommandInput extends DeleteTableInput {} +export interface DeleteTableCommandOutput + extends DeleteTableOutput, + __MetadataBearer {} +declare const DeleteTableCommand_base: { + new ( + input: DeleteTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteTableCommandInput, + DeleteTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DeleteTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DeleteTableCommandInput, + DeleteTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DeleteTableCommand extends DeleteTableCommand_base { + protected static __types: { + api: { + input: DeleteTableInput; + output: DeleteTableOutput; + }; + sdk: { + input: DeleteTableCommandInput; + output: DeleteTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts new file mode 100644 index 0000000..e8a3f6f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeBackupCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeBackupInput, DescribeBackupOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeBackupCommandInput extends DescribeBackupInput {} +export interface DescribeBackupCommandOutput + extends DescribeBackupOutput, + __MetadataBearer {} +declare const DescribeBackupCommand_base: { + new ( + input: DescribeBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeBackupCommandInput, + DescribeBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeBackupCommandInput, + DescribeBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeBackupCommand extends DescribeBackupCommand_base { + protected static __types: { + api: { + input: DescribeBackupInput; + output: DescribeBackupOutput; + }; + sdk: { + input: DescribeBackupCommandInput; + output: DescribeBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..4bcc737 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContinuousBackupsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeContinuousBackupsInput, + DescribeContinuousBackupsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeContinuousBackupsCommandInput + extends DescribeContinuousBackupsInput {} +export interface DescribeContinuousBackupsCommandOutput + extends DescribeContinuousBackupsOutput, + __MetadataBearer {} +declare const DescribeContinuousBackupsCommand_base: { + new ( + input: DescribeContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeContinuousBackupsCommand extends DescribeContinuousBackupsCommand_base { + protected static __types: { + api: { + input: DescribeContinuousBackupsInput; + output: DescribeContinuousBackupsOutput; + }; + sdk: { + input: DescribeContinuousBackupsCommandInput; + output: DescribeContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts new file mode 100644 index 0000000..09a11d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeContributorInsightsInput, + DescribeContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeContributorInsightsCommandInput + extends DescribeContributorInsightsInput {} +export interface DescribeContributorInsightsCommandOutput + extends DescribeContributorInsightsOutput, + __MetadataBearer {} +declare const DescribeContributorInsightsCommand_base: { + new ( + input: DescribeContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeContributorInsightsCommand extends DescribeContributorInsightsCommand_base { + protected static __types: { + api: { + input: DescribeContributorInsightsInput; + output: DescribeContributorInsightsOutput; + }; + sdk: { + input: DescribeContributorInsightsCommandInput; + output: DescribeContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts new file mode 100644 index 0000000..2f88f7e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeEndpointsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeEndpointsRequest, + DescribeEndpointsResponse, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeEndpointsCommandInput + extends DescribeEndpointsRequest {} +export interface DescribeEndpointsCommandOutput + extends DescribeEndpointsResponse, + __MetadataBearer {} +declare const DescribeEndpointsCommand_base: { + new ( + input: DescribeEndpointsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [DescribeEndpointsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeEndpointsCommand extends DescribeEndpointsCommand_base { + protected static __types: { + api: { + input: {}; + output: DescribeEndpointsResponse; + }; + sdk: { + input: DescribeEndpointsCommandInput; + output: DescribeEndpointsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts new file mode 100644 index 0000000..81e570f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeExportCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeExportInput, DescribeExportOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeExportCommandInput extends DescribeExportInput {} +export interface DescribeExportCommandOutput + extends DescribeExportOutput, + __MetadataBearer {} +declare const DescribeExportCommand_base: { + new ( + input: DescribeExportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeExportCommandInput, + DescribeExportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeExportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeExportCommandInput, + DescribeExportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeExportCommand extends DescribeExportCommand_base { + protected static __types: { + api: { + input: DescribeExportInput; + output: DescribeExportOutput; + }; + sdk: { + input: DescribeExportCommandInput; + output: DescribeExportCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts new file mode 100644 index 0000000..55ef067 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeGlobalTableInput, + DescribeGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeGlobalTableCommandInput + extends DescribeGlobalTableInput {} +export interface DescribeGlobalTableCommandOutput + extends DescribeGlobalTableOutput, + __MetadataBearer {} +declare const DescribeGlobalTableCommand_base: { + new ( + input: DescribeGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeGlobalTableCommand extends DescribeGlobalTableCommand_base { + protected static __types: { + api: { + input: DescribeGlobalTableInput; + output: DescribeGlobalTableOutput; + }; + sdk: { + input: DescribeGlobalTableCommandInput; + output: DescribeGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..7cf5373 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeGlobalTableSettingsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeGlobalTableSettingsInput, + DescribeGlobalTableSettingsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeGlobalTableSettingsCommandInput + extends DescribeGlobalTableSettingsInput {} +export interface DescribeGlobalTableSettingsCommandOutput + extends DescribeGlobalTableSettingsOutput, + __MetadataBearer {} +declare const DescribeGlobalTableSettingsCommand_base: { + new ( + input: DescribeGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeGlobalTableSettingsCommand extends DescribeGlobalTableSettingsCommand_base { + protected static __types: { + api: { + input: DescribeGlobalTableSettingsInput; + output: DescribeGlobalTableSettingsOutput; + }; + sdk: { + input: DescribeGlobalTableSettingsCommandInput; + output: DescribeGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts new file mode 100644 index 0000000..eeaa9b9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeImportCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeImportInput, DescribeImportOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeImportCommandInput extends DescribeImportInput {} +export interface DescribeImportCommandOutput + extends DescribeImportOutput, + __MetadataBearer {} +declare const DescribeImportCommand_base: { + new ( + input: DescribeImportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeImportCommandInput, + DescribeImportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeImportCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeImportCommandInput, + DescribeImportCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeImportCommand extends DescribeImportCommand_base { + protected static __types: { + api: { + input: DescribeImportInput; + output: DescribeImportOutput; + }; + sdk: { + input: DescribeImportCommandInput; + output: DescribeImportCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..9801ad1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeKinesisStreamingDestinationInput, + DescribeKinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeKinesisStreamingDestinationCommandInput + extends DescribeKinesisStreamingDestinationInput {} +export interface DescribeKinesisStreamingDestinationCommandOutput + extends DescribeKinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const DescribeKinesisStreamingDestinationCommand_base: { + new ( + input: DescribeKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeKinesisStreamingDestinationCommand extends DescribeKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: DescribeKinesisStreamingDestinationInput; + output: DescribeKinesisStreamingDestinationOutput; + }; + sdk: { + input: DescribeKinesisStreamingDestinationCommandInput; + output: DescribeKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts new file mode 100644 index 0000000..52ce46b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeLimitsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeLimitsInput, DescribeLimitsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeLimitsCommandInput extends DescribeLimitsInput {} +export interface DescribeLimitsCommandOutput + extends DescribeLimitsOutput, + __MetadataBearer {} +declare const DescribeLimitsCommand_base: { + new ( + input: DescribeLimitsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [DescribeLimitsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeLimitsCommand extends DescribeLimitsCommand_base { + protected static __types: { + api: { + input: {}; + output: DescribeLimitsOutput; + }; + sdk: { + input: DescribeLimitsCommandInput; + output: DescribeLimitsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts new file mode 100644 index 0000000..d38362c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { DescribeTableInput, DescribeTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTableCommandInput extends DescribeTableInput {} +export interface DescribeTableCommandOutput + extends DescribeTableOutput, + __MetadataBearer {} +declare const DescribeTableCommand_base: { + new ( + input: DescribeTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableCommandInput, + DescribeTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableCommandInput, + DescribeTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTableCommand extends DescribeTableCommand_base { + protected static __types: { + api: { + input: DescribeTableInput; + output: DescribeTableOutput; + }; + sdk: { + input: DescribeTableCommandInput; + output: DescribeTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..07328ed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeTableReplicaAutoScalingInput, + DescribeTableReplicaAutoScalingOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTableReplicaAutoScalingCommandInput + extends DescribeTableReplicaAutoScalingInput {} +export interface DescribeTableReplicaAutoScalingCommandOutput + extends DescribeTableReplicaAutoScalingOutput, + __MetadataBearer {} +declare const DescribeTableReplicaAutoScalingCommand_base: { + new ( + input: DescribeTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTableReplicaAutoScalingCommand extends DescribeTableReplicaAutoScalingCommand_base { + protected static __types: { + api: { + input: DescribeTableReplicaAutoScalingInput; + output: DescribeTableReplicaAutoScalingOutput; + }; + sdk: { + input: DescribeTableReplicaAutoScalingCommandInput; + output: DescribeTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts new file mode 100644 index 0000000..7f8588f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DescribeTimeToLiveCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + DescribeTimeToLiveInput, + DescribeTimeToLiveOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DescribeTimeToLiveCommandInput + extends DescribeTimeToLiveInput {} +export interface DescribeTimeToLiveCommandOutput + extends DescribeTimeToLiveOutput, + __MetadataBearer {} +declare const DescribeTimeToLiveCommand_base: { + new ( + input: DescribeTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DescribeTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DescribeTimeToLiveCommand extends DescribeTimeToLiveCommand_base { + protected static __types: { + api: { + input: DescribeTimeToLiveInput; + output: DescribeTimeToLiveOutput; + }; + sdk: { + input: DescribeTimeToLiveCommandInput; + output: DescribeTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..22257f0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/DisableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + KinesisStreamingDestinationInput, + KinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface DisableKinesisStreamingDestinationCommandInput + extends KinesisStreamingDestinationInput {} +export interface DisableKinesisStreamingDestinationCommandOutput + extends KinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const DisableKinesisStreamingDestinationCommand_base: { + new ( + input: DisableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: DisableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class DisableKinesisStreamingDestinationCommand extends DisableKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: DisableKinesisStreamingDestinationCommandInput; + output: DisableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..5d40389 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/EnableKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + KinesisStreamingDestinationInput, + KinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface EnableKinesisStreamingDestinationCommandInput + extends KinesisStreamingDestinationInput {} +export interface EnableKinesisStreamingDestinationCommandOutput + extends KinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const EnableKinesisStreamingDestinationCommand_base: { + new ( + input: EnableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: EnableKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class EnableKinesisStreamingDestinationCommand extends EnableKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: KinesisStreamingDestinationInput; + output: KinesisStreamingDestinationOutput; + }; + sdk: { + input: EnableKinesisStreamingDestinationCommandInput; + output: EnableKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..5b73eee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExecuteStatementInput, + ExecuteStatementOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExecuteStatementCommandInput extends ExecuteStatementInput {} +export interface ExecuteStatementCommandOutput + extends ExecuteStatementOutput, + __MetadataBearer {} +declare const ExecuteStatementCommand_base: { + new ( + input: ExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExecuteStatementCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExecuteStatementCommand extends ExecuteStatementCommand_base { + protected static __types: { + api: { + input: ExecuteStatementInput; + output: ExecuteStatementOutput; + }; + sdk: { + input: ExecuteStatementCommandInput; + output: ExecuteStatementCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..2b94d7c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExecuteTransactionInput, + ExecuteTransactionOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExecuteTransactionCommandInput + extends ExecuteTransactionInput {} +export interface ExecuteTransactionCommandOutput + extends ExecuteTransactionOutput, + __MetadataBearer {} +declare const ExecuteTransactionCommand_base: { + new ( + input: ExecuteTransactionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExecuteTransactionCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExecuteTransactionCommand extends ExecuteTransactionCommand_base { + protected static __types: { + api: { + input: ExecuteTransactionInput; + output: ExecuteTransactionOutput; + }; + sdk: { + input: ExecuteTransactionCommandInput; + output: ExecuteTransactionCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..f65fac9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ExportTableToPointInTimeCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ExportTableToPointInTimeInput, + ExportTableToPointInTimeOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ExportTableToPointInTimeCommandInput + extends ExportTableToPointInTimeInput {} +export interface ExportTableToPointInTimeCommandOutput + extends ExportTableToPointInTimeOutput, + __MetadataBearer {} +declare const ExportTableToPointInTimeCommand_base: { + new ( + input: ExportTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ExportTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ExportTableToPointInTimeCommand extends ExportTableToPointInTimeCommand_base { + protected static __types: { + api: { + input: ExportTableToPointInTimeInput; + output: ExportTableToPointInTimeOutput; + }; + sdk: { + input: ExportTableToPointInTimeCommandInput; + output: ExportTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts new file mode 100644 index 0000000..0e7ffb9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetItemCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { GetItemInput, GetItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface GetItemCommandInput extends GetItemInput {} +export interface GetItemCommandOutput extends GetItemOutput, __MetadataBearer {} +declare const GetItemCommand_base: { + new (input: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl< + GetItemCommandInput, + GetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: GetItemCommandInput): import("@smithy/smithy-client").CommandImpl< + GetItemCommandInput, + GetItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetItemCommand extends GetItemCommand_base { + protected static __types: { + api: { + input: GetItemInput; + output: GetItemOutput; + }; + sdk: { + input: GetItemCommandInput; + output: GetItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts new file mode 100644 index 0000000..3691e80 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/GetResourcePolicyCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + GetResourcePolicyInput, + GetResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface GetResourcePolicyCommandInput extends GetResourcePolicyInput {} +export interface GetResourcePolicyCommandOutput + extends GetResourcePolicyOutput, + __MetadataBearer {} +declare const GetResourcePolicyCommand_base: { + new ( + input: GetResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetResourcePolicyCommand extends GetResourcePolicyCommand_base { + protected static __types: { + api: { + input: GetResourcePolicyInput; + output: GetResourcePolicyOutput; + }; + sdk: { + input: GetResourcePolicyCommandInput; + output: GetResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts new file mode 100644 index 0000000..57c8b04 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ImportTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ImportTableInput, ImportTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ImportTableCommandInput extends ImportTableInput {} +export interface ImportTableCommandOutput + extends ImportTableOutput, + __MetadataBearer {} +declare const ImportTableCommand_base: { + new ( + input: ImportTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ImportTableCommandInput, + ImportTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ImportTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ImportTableCommandInput, + ImportTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ImportTableCommand extends ImportTableCommand_base { + protected static __types: { + api: { + input: ImportTableInput; + output: ImportTableOutput; + }; + sdk: { + input: ImportTableCommandInput; + output: ImportTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts new file mode 100644 index 0000000..2b00a39 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListBackupsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListBackupsInput, ListBackupsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListBackupsCommandInput extends ListBackupsInput {} +export interface ListBackupsCommandOutput + extends ListBackupsOutput, + __MetadataBearer {} +declare const ListBackupsCommand_base: { + new ( + input: ListBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListBackupsCommandInput, + ListBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListBackupsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListBackupsCommandInput, + ListBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListBackupsCommand extends ListBackupsCommand_base { + protected static __types: { + api: { + input: ListBackupsInput; + output: ListBackupsOutput; + }; + sdk: { + input: ListBackupsCommandInput; + output: ListBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts new file mode 100644 index 0000000..455495e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListContributorInsightsInput, + ListContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListContributorInsightsCommandInput + extends ListContributorInsightsInput {} +export interface ListContributorInsightsCommandOutput + extends ListContributorInsightsOutput, + __MetadataBearer {} +declare const ListContributorInsightsCommand_base: { + new ( + input: ListContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListContributorInsightsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListContributorInsightsCommand extends ListContributorInsightsCommand_base { + protected static __types: { + api: { + input: ListContributorInsightsInput; + output: ListContributorInsightsOutput; + }; + sdk: { + input: ListContributorInsightsCommandInput; + output: ListContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts new file mode 100644 index 0000000..b968746 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListExportsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListExportsInput, ListExportsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListExportsCommandInput extends ListExportsInput {} +export interface ListExportsCommandOutput + extends ListExportsOutput, + __MetadataBearer {} +declare const ListExportsCommand_base: { + new ( + input: ListExportsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListExportsCommandInput, + ListExportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListExportsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListExportsCommandInput, + ListExportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListExportsCommand extends ListExportsCommand_base { + protected static __types: { + api: { + input: ListExportsInput; + output: ListExportsOutput; + }; + sdk: { + input: ListExportsCommandInput; + output: ListExportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts new file mode 100644 index 0000000..3428e57 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListGlobalTablesCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListGlobalTablesInput, + ListGlobalTablesOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListGlobalTablesCommandInput extends ListGlobalTablesInput {} +export interface ListGlobalTablesCommandOutput + extends ListGlobalTablesOutput, + __MetadataBearer {} +declare const ListGlobalTablesCommand_base: { + new ( + input: ListGlobalTablesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListGlobalTablesCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListGlobalTablesCommand extends ListGlobalTablesCommand_base { + protected static __types: { + api: { + input: ListGlobalTablesInput; + output: ListGlobalTablesOutput; + }; + sdk: { + input: ListGlobalTablesCommandInput; + output: ListGlobalTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts new file mode 100644 index 0000000..07bfebf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListImportsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListImportsInput, ListImportsOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListImportsCommandInput extends ListImportsInput {} +export interface ListImportsCommandOutput + extends ListImportsOutput, + __MetadataBearer {} +declare const ListImportsCommand_base: { + new ( + input: ListImportsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListImportsCommandInput, + ListImportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListImportsCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListImportsCommandInput, + ListImportsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListImportsCommand extends ListImportsCommand_base { + protected static __types: { + api: { + input: ListImportsInput; + output: ListImportsOutput; + }; + sdk: { + input: ListImportsCommandInput; + output: ListImportsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts new file mode 100644 index 0000000..497f02f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTablesCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ListTablesInput, ListTablesOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListTablesCommandInput extends ListTablesInput {} +export interface ListTablesCommandOutput + extends ListTablesOutput, + __MetadataBearer {} +declare const ListTablesCommand_base: { + new ( + input: ListTablesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTablesCommandInput, + ListTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + ...[input]: [] | [ListTablesCommandInput] + ): import("@smithy/smithy-client").CommandImpl< + ListTablesCommandInput, + ListTablesCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListTablesCommand extends ListTablesCommand_base { + protected static __types: { + api: { + input: ListTablesInput; + output: ListTablesOutput; + }; + sdk: { + input: ListTablesCommandInput; + output: ListTablesCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts new file mode 100644 index 0000000..8bffe40 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ListTagsOfResourceCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + ListTagsOfResourceInput, + ListTagsOfResourceOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ListTagsOfResourceCommandInput + extends ListTagsOfResourceInput {} +export interface ListTagsOfResourceCommandOutput + extends ListTagsOfResourceOutput, + __MetadataBearer {} +declare const ListTagsOfResourceCommand_base: { + new ( + input: ListTagsOfResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListTagsOfResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListTagsOfResourceCommand extends ListTagsOfResourceCommand_base { + protected static __types: { + api: { + input: ListTagsOfResourceInput; + output: ListTagsOfResourceOutput; + }; + sdk: { + input: ListTagsOfResourceCommandInput; + output: ListTagsOfResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts new file mode 100644 index 0000000..7ae0d3e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutItemCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { PutItemInput, PutItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface PutItemCommandInput extends PutItemInput {} +export interface PutItemCommandOutput extends PutItemOutput, __MetadataBearer {} +declare const PutItemCommand_base: { + new (input: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl< + PutItemCommandInput, + PutItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: PutItemCommandInput): import("@smithy/smithy-client").CommandImpl< + PutItemCommandInput, + PutItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutItemCommand extends PutItemCommand_base { + protected static __types: { + api: { + input: PutItemInput; + output: PutItemOutput; + }; + sdk: { + input: PutItemCommandInput; + output: PutItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts new file mode 100644 index 0000000..2c83af7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/PutResourcePolicyCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + PutResourcePolicyInput, + PutResourcePolicyOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface PutResourcePolicyCommandInput extends PutResourcePolicyInput {} +export interface PutResourcePolicyCommandOutput + extends PutResourcePolicyOutput, + __MetadataBearer {} +declare const PutResourcePolicyCommand_base: { + new ( + input: PutResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: PutResourcePolicyCommandInput + ): import("@smithy/smithy-client").CommandImpl< + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class PutResourcePolicyCommand extends PutResourcePolicyCommand_base { + protected static __types: { + api: { + input: PutResourcePolicyInput; + output: PutResourcePolicyOutput; + }; + sdk: { + input: PutResourcePolicyCommandInput; + output: PutResourcePolicyCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts new file mode 100644 index 0000000..125753a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { QueryInput, QueryOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface QueryCommandInput extends QueryInput {} +export interface QueryCommandOutput extends QueryOutput, __MetadataBearer {} +declare const QueryCommand_base: { + new (input: QueryCommandInput): import("@smithy/smithy-client").CommandImpl< + QueryCommandInput, + QueryCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: QueryCommandInput): import("@smithy/smithy-client").CommandImpl< + QueryCommandInput, + QueryCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class QueryCommand extends QueryCommand_base { + protected static __types: { + api: { + input: QueryInput; + output: QueryOutput; + }; + sdk: { + input: QueryCommandInput; + output: QueryCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts new file mode 100644 index 0000000..954e61b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableFromBackupCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + RestoreTableFromBackupInput, + RestoreTableFromBackupOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface RestoreTableFromBackupCommandInput + extends RestoreTableFromBackupInput {} +export interface RestoreTableFromBackupCommandOutput + extends RestoreTableFromBackupOutput, + __MetadataBearer {} +declare const RestoreTableFromBackupCommand_base: { + new ( + input: RestoreTableFromBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: RestoreTableFromBackupCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class RestoreTableFromBackupCommand extends RestoreTableFromBackupCommand_base { + protected static __types: { + api: { + input: RestoreTableFromBackupInput; + output: RestoreTableFromBackupOutput; + }; + sdk: { + input: RestoreTableFromBackupCommandInput; + output: RestoreTableFromBackupCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts new file mode 100644 index 0000000..8b243f5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/RestoreTableToPointInTimeCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + RestoreTableToPointInTimeInput, + RestoreTableToPointInTimeOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface RestoreTableToPointInTimeCommandInput + extends RestoreTableToPointInTimeInput {} +export interface RestoreTableToPointInTimeCommandOutput + extends RestoreTableToPointInTimeOutput, + __MetadataBearer {} +declare const RestoreTableToPointInTimeCommand_base: { + new ( + input: RestoreTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: RestoreTableToPointInTimeCommandInput + ): import("@smithy/smithy-client").CommandImpl< + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class RestoreTableToPointInTimeCommand extends RestoreTableToPointInTimeCommand_base { + protected static __types: { + api: { + input: RestoreTableToPointInTimeInput; + output: RestoreTableToPointInTimeOutput; + }; + sdk: { + input: RestoreTableToPointInTimeCommandInput; + output: RestoreTableToPointInTimeCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts new file mode 100644 index 0000000..fe66b35 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { ScanInput, ScanOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface ScanCommandInput extends ScanInput {} +export interface ScanCommandOutput extends ScanOutput, __MetadataBearer {} +declare const ScanCommand_base: { + new (input: ScanCommandInput): import("@smithy/smithy-client").CommandImpl< + ScanCommandInput, + ScanCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: ScanCommandInput): import("@smithy/smithy-client").CommandImpl< + ScanCommandInput, + ScanCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ScanCommand extends ScanCommand_base { + protected static __types: { + api: { + input: ScanInput; + output: ScanOutput; + }; + sdk: { + input: ScanCommandInput; + output: ScanCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts new file mode 100644 index 0000000..2791246 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TagResourceCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { TagResourceInput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TagResourceCommandInput extends TagResourceInput {} +export interface TagResourceCommandOutput extends __MetadataBearer {} +declare const TagResourceCommand_base: { + new ( + input: TagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TagResourceCommandInput, + TagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TagResourceCommandInput, + TagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TagResourceCommand extends TagResourceCommand_base { + protected static __types: { + api: { + input: TagResourceInput; + output: {}; + }; + sdk: { + input: TagResourceCommandInput; + output: TagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts new file mode 100644 index 0000000..0068549 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactGetItemsCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + TransactGetItemsInput, + TransactGetItemsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TransactGetItemsCommandInput extends TransactGetItemsInput {} +export interface TransactGetItemsCommandOutput + extends TransactGetItemsOutput, + __MetadataBearer {} +declare const TransactGetItemsCommand_base: { + new ( + input: TransactGetItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TransactGetItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TransactGetItemsCommand extends TransactGetItemsCommand_base { + protected static __types: { + api: { + input: TransactGetItemsInput; + output: TransactGetItemsOutput; + }; + sdk: { + input: TransactGetItemsCommandInput; + output: TransactGetItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts new file mode 100644 index 0000000..f945bc5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/TransactWriteItemsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + TransactWriteItemsInput, + TransactWriteItemsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface TransactWriteItemsCommandInput + extends TransactWriteItemsInput {} +export interface TransactWriteItemsCommandOutput + extends TransactWriteItemsOutput, + __MetadataBearer {} +declare const TransactWriteItemsCommand_base: { + new ( + input: TransactWriteItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: TransactWriteItemsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class TransactWriteItemsCommand extends TransactWriteItemsCommand_base { + protected static __types: { + api: { + input: TransactWriteItemsInput; + output: TransactWriteItemsOutput; + }; + sdk: { + input: TransactWriteItemsCommandInput; + output: TransactWriteItemsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts new file mode 100644 index 0000000..7744ef6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UntagResourceCommand.d.ts @@ -0,0 +1,45 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UntagResourceInput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UntagResourceCommandInput extends UntagResourceInput {} +export interface UntagResourceCommandOutput extends __MetadataBearer {} +declare const UntagResourceCommand_base: { + new ( + input: UntagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UntagResourceCommandInput, + UntagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UntagResourceCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UntagResourceCommandInput, + UntagResourceCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UntagResourceCommand extends UntagResourceCommand_base { + protected static __types: { + api: { + input: UntagResourceInput; + output: {}; + }; + sdk: { + input: UntagResourceCommandInput; + output: UntagResourceCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts new file mode 100644 index 0000000..d771ccd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContinuousBackupsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateContinuousBackupsInput, + UpdateContinuousBackupsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateContinuousBackupsCommandInput + extends UpdateContinuousBackupsInput {} +export interface UpdateContinuousBackupsCommandOutput + extends UpdateContinuousBackupsOutput, + __MetadataBearer {} +declare const UpdateContinuousBackupsCommand_base: { + new ( + input: UpdateContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateContinuousBackupsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateContinuousBackupsCommand extends UpdateContinuousBackupsCommand_base { + protected static __types: { + api: { + input: UpdateContinuousBackupsInput; + output: UpdateContinuousBackupsOutput; + }; + sdk: { + input: UpdateContinuousBackupsCommandInput; + output: UpdateContinuousBackupsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts new file mode 100644 index 0000000..07ce57b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateContributorInsightsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateContributorInsightsInput, + UpdateContributorInsightsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateContributorInsightsCommandInput + extends UpdateContributorInsightsInput {} +export interface UpdateContributorInsightsCommandOutput + extends UpdateContributorInsightsOutput, + __MetadataBearer {} +declare const UpdateContributorInsightsCommand_base: { + new ( + input: UpdateContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateContributorInsightsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateContributorInsightsCommand extends UpdateContributorInsightsCommand_base { + protected static __types: { + api: { + input: UpdateContributorInsightsInput; + output: UpdateContributorInsightsOutput; + }; + sdk: { + input: UpdateContributorInsightsCommandInput; + output: UpdateContributorInsightsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts new file mode 100644 index 0000000..415ecd8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateGlobalTableInput, + UpdateGlobalTableOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateGlobalTableCommandInput extends UpdateGlobalTableInput {} +export interface UpdateGlobalTableCommandOutput + extends UpdateGlobalTableOutput, + __MetadataBearer {} +declare const UpdateGlobalTableCommand_base: { + new ( + input: UpdateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateGlobalTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateGlobalTableCommand extends UpdateGlobalTableCommand_base { + protected static __types: { + api: { + input: UpdateGlobalTableInput; + output: UpdateGlobalTableOutput; + }; + sdk: { + input: UpdateGlobalTableCommandInput; + output: UpdateGlobalTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts new file mode 100644 index 0000000..653ae6c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateGlobalTableSettingsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateGlobalTableSettingsInput, + UpdateGlobalTableSettingsOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateGlobalTableSettingsCommandInput + extends UpdateGlobalTableSettingsInput {} +export interface UpdateGlobalTableSettingsCommandOutput + extends UpdateGlobalTableSettingsOutput, + __MetadataBearer {} +declare const UpdateGlobalTableSettingsCommand_base: { + new ( + input: UpdateGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateGlobalTableSettingsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateGlobalTableSettingsCommand extends UpdateGlobalTableSettingsCommand_base { + protected static __types: { + api: { + input: UpdateGlobalTableSettingsInput; + output: UpdateGlobalTableSettingsOutput; + }; + sdk: { + input: UpdateGlobalTableSettingsCommandInput; + output: UpdateGlobalTableSettingsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts new file mode 100644 index 0000000..b302067 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateItemCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UpdateItemInput, UpdateItemOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateItemCommandInput extends UpdateItemInput {} +export interface UpdateItemCommandOutput + extends UpdateItemOutput, + __MetadataBearer {} +declare const UpdateItemCommand_base: { + new ( + input: UpdateItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateItemCommandInput, + UpdateItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateItemCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateItemCommandInput, + UpdateItemCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateItemCommand extends UpdateItemCommand_base { + protected static __types: { + api: { + input: UpdateItemInput; + output: UpdateItemOutput; + }; + sdk: { + input: UpdateItemCommandInput; + output: UpdateItemCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts new file mode 100644 index 0000000..a36aa7d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateKinesisStreamingDestinationCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateKinesisStreamingDestinationInput, + UpdateKinesisStreamingDestinationOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateKinesisStreamingDestinationCommandInput + extends UpdateKinesisStreamingDestinationInput {} +export interface UpdateKinesisStreamingDestinationCommandOutput + extends UpdateKinesisStreamingDestinationOutput, + __MetadataBearer {} +declare const UpdateKinesisStreamingDestinationCommand_base: { + new ( + input: UpdateKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateKinesisStreamingDestinationCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateKinesisStreamingDestinationCommand extends UpdateKinesisStreamingDestinationCommand_base { + protected static __types: { + api: { + input: UpdateKinesisStreamingDestinationInput; + output: UpdateKinesisStreamingDestinationOutput; + }; + sdk: { + input: UpdateKinesisStreamingDestinationCommandInput; + output: UpdateKinesisStreamingDestinationCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts new file mode 100644 index 0000000..59afb16 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { UpdateTableInput, UpdateTableOutput } from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTableCommandInput extends UpdateTableInput {} +export interface UpdateTableCommandOutput + extends UpdateTableOutput, + __MetadataBearer {} +declare const UpdateTableCommand_base: { + new ( + input: UpdateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableCommandInput, + UpdateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTableCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableCommandInput, + UpdateTableCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTableCommand extends UpdateTableCommand_base { + protected static __types: { + api: { + input: UpdateTableInput; + output: UpdateTableOutput; + }; + sdk: { + input: UpdateTableCommandInput; + output: UpdateTableCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts new file mode 100644 index 0000000..9f2925f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTableReplicaAutoScalingCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateTableReplicaAutoScalingInput, + UpdateTableReplicaAutoScalingOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTableReplicaAutoScalingCommandInput + extends UpdateTableReplicaAutoScalingInput {} +export interface UpdateTableReplicaAutoScalingCommandOutput + extends UpdateTableReplicaAutoScalingOutput, + __MetadataBearer {} +declare const UpdateTableReplicaAutoScalingCommand_base: { + new ( + input: UpdateTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTableReplicaAutoScalingCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTableReplicaAutoScalingCommand extends UpdateTableReplicaAutoScalingCommand_base { + protected static __types: { + api: { + input: UpdateTableReplicaAutoScalingInput; + output: UpdateTableReplicaAutoScalingOutput; + }; + sdk: { + input: UpdateTableReplicaAutoScalingCommandInput; + output: UpdateTableReplicaAutoScalingCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts new file mode 100644 index 0000000..a3f7b3b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/UpdateTimeToLiveCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBClient"; +import { + UpdateTimeToLiveInput, + UpdateTimeToLiveOutput, +} from "../models/models_0"; +export { __MetadataBearer }; +export { $Command }; +export interface UpdateTimeToLiveCommandInput extends UpdateTimeToLiveInput {} +export interface UpdateTimeToLiveCommandOutput + extends UpdateTimeToLiveOutput, + __MetadataBearer {} +declare const UpdateTimeToLiveCommand_base: { + new ( + input: UpdateTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: UpdateTimeToLiveCommandInput + ): import("@smithy/smithy-client").CommandImpl< + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, + DynamoDBClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class UpdateTimeToLiveCommand extends UpdateTimeToLiveCommand_base { + protected static __types: { + api: { + input: UpdateTimeToLiveInput; + output: UpdateTimeToLiveOutput; + }; + sdk: { + input: UpdateTimeToLiveCommandInput; + output: UpdateTimeToLiveCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..a5053a4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,57 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetItemCommand"; +export * from "./BatchWriteItemCommand"; +export * from "./CreateBackupCommand"; +export * from "./CreateGlobalTableCommand"; +export * from "./CreateTableCommand"; +export * from "./DeleteBackupCommand"; +export * from "./DeleteItemCommand"; +export * from "./DeleteResourcePolicyCommand"; +export * from "./DeleteTableCommand"; +export * from "./DescribeBackupCommand"; +export * from "./DescribeContinuousBackupsCommand"; +export * from "./DescribeContributorInsightsCommand"; +export * from "./DescribeEndpointsCommand"; +export * from "./DescribeExportCommand"; +export * from "./DescribeGlobalTableCommand"; +export * from "./DescribeGlobalTableSettingsCommand"; +export * from "./DescribeImportCommand"; +export * from "./DescribeKinesisStreamingDestinationCommand"; +export * from "./DescribeLimitsCommand"; +export * from "./DescribeTableCommand"; +export * from "./DescribeTableReplicaAutoScalingCommand"; +export * from "./DescribeTimeToLiveCommand"; +export * from "./DisableKinesisStreamingDestinationCommand"; +export * from "./EnableKinesisStreamingDestinationCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./ExportTableToPointInTimeCommand"; +export * from "./GetItemCommand"; +export * from "./GetResourcePolicyCommand"; +export * from "./ImportTableCommand"; +export * from "./ListBackupsCommand"; +export * from "./ListContributorInsightsCommand"; +export * from "./ListExportsCommand"; +export * from "./ListGlobalTablesCommand"; +export * from "./ListImportsCommand"; +export * from "./ListTablesCommand"; +export * from "./ListTagsOfResourceCommand"; +export * from "./PutItemCommand"; +export * from "./PutResourcePolicyCommand"; +export * from "./QueryCommand"; +export * from "./RestoreTableFromBackupCommand"; +export * from "./RestoreTableToPointInTimeCommand"; +export * from "./ScanCommand"; +export * from "./TagResourceCommand"; +export * from "./TransactGetItemsCommand"; +export * from "./TransactWriteItemsCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateContinuousBackupsCommand"; +export * from "./UpdateContributorInsightsCommand"; +export * from "./UpdateGlobalTableCommand"; +export * from "./UpdateGlobalTableSettingsCommand"; +export * from "./UpdateItemCommand"; +export * from "./UpdateKinesisStreamingDestinationCommand"; +export * from "./UpdateTableCommand"; +export * from "./UpdateTableReplicaAutoScalingCommand"; +export * from "./UpdateTimeToLiveCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..bef37c3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/EndpointParameters.d.ts @@ -0,0 +1,65 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; + accountId?: string | Provider; + accountIdEndpointMode?: string | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly AccountId: { + readonly type: "builtInParams"; + readonly name: "accountId"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; + readonly AccountIdEndpointMode: { + readonly type: "builtInParams"; + readonly name: "accountIdEndpointMode"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + AccountId?: string; + AccountIdEndpointMode?: string; + ResourceArn?: string; + ResourceArnList?: string[]; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts new file mode 100644 index 0000000..7db993d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface DynamoDBExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..58fb2ff --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,10 @@ +export * from "./DynamoDBClient"; +export * from "./DynamoDB"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./waiters"; +export * from "./models"; +export { DynamoDBServiceException } from "./models/DynamoDBServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts new file mode 100644 index 0000000..e5bd2c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/DynamoDBServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class DynamoDBServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts new file mode 100644 index 0000000..3acf86a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/models/models_0.d.ts @@ -0,0 +1,2036 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { DynamoDBServiceException as __BaseException } from "./DynamoDBServiceException"; +export declare const ApproximateCreationDateTimePrecision: { + readonly MICROSECOND: "MICROSECOND"; + readonly MILLISECOND: "MILLISECOND"; +}; +export type ApproximateCreationDateTimePrecision = + (typeof ApproximateCreationDateTimePrecision)[keyof typeof ApproximateCreationDateTimePrecision]; +export interface ArchivalSummary { + ArchivalDateTime?: Date | undefined; + ArchivalReason?: string | undefined; + ArchivalBackupArn?: string | undefined; +} +export declare const AttributeAction: { + readonly ADD: "ADD"; + readonly DELETE: "DELETE"; + readonly PUT: "PUT"; +}; +export type AttributeAction = + (typeof AttributeAction)[keyof typeof AttributeAction]; +export declare const ScalarAttributeType: { + readonly B: "B"; + readonly N: "N"; + readonly S: "S"; +}; +export type ScalarAttributeType = + (typeof ScalarAttributeType)[keyof typeof ScalarAttributeType]; +export interface AttributeDefinition { + AttributeName: string | undefined; + AttributeType: ScalarAttributeType | undefined; +} +export interface AutoScalingTargetTrackingScalingPolicyConfigurationDescription { + DisableScaleIn?: boolean | undefined; + ScaleInCooldown?: number | undefined; + ScaleOutCooldown?: number | undefined; + TargetValue: number | undefined; +} +export interface AutoScalingPolicyDescription { + PolicyName?: string | undefined; + TargetTrackingScalingPolicyConfiguration?: + | AutoScalingTargetTrackingScalingPolicyConfigurationDescription + | undefined; +} +export interface AutoScalingTargetTrackingScalingPolicyConfigurationUpdate { + DisableScaleIn?: boolean | undefined; + ScaleInCooldown?: number | undefined; + ScaleOutCooldown?: number | undefined; + TargetValue: number | undefined; +} +export interface AutoScalingPolicyUpdate { + PolicyName?: string | undefined; + TargetTrackingScalingPolicyConfiguration: + | AutoScalingTargetTrackingScalingPolicyConfigurationUpdate + | undefined; +} +export interface AutoScalingSettingsDescription { + MinimumUnits?: number | undefined; + MaximumUnits?: number | undefined; + AutoScalingDisabled?: boolean | undefined; + AutoScalingRoleArn?: string | undefined; + ScalingPolicies?: AutoScalingPolicyDescription[] | undefined; +} +export interface AutoScalingSettingsUpdate { + MinimumUnits?: number | undefined; + MaximumUnits?: number | undefined; + AutoScalingDisabled?: boolean | undefined; + AutoScalingRoleArn?: string | undefined; + ScalingPolicyUpdate?: AutoScalingPolicyUpdate | undefined; +} +export declare const BackupStatus: { + readonly AVAILABLE: "AVAILABLE"; + readonly CREATING: "CREATING"; + readonly DELETED: "DELETED"; +}; +export type BackupStatus = (typeof BackupStatus)[keyof typeof BackupStatus]; +export declare const BackupType: { + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +export type BackupType = (typeof BackupType)[keyof typeof BackupType]; +export interface BackupDetails { + BackupArn: string | undefined; + BackupName: string | undefined; + BackupSizeBytes?: number | undefined; + BackupStatus: BackupStatus | undefined; + BackupType: BackupType | undefined; + BackupCreationDateTime: Date | undefined; + BackupExpiryDateTime?: Date | undefined; +} +export declare const BillingMode: { + readonly PAY_PER_REQUEST: "PAY_PER_REQUEST"; + readonly PROVISIONED: "PROVISIONED"; +}; +export type BillingMode = (typeof BillingMode)[keyof typeof BillingMode]; +export declare const KeyType: { + readonly HASH: "HASH"; + readonly RANGE: "RANGE"; +}; +export type KeyType = (typeof KeyType)[keyof typeof KeyType]; +export interface KeySchemaElement { + AttributeName: string | undefined; + KeyType: KeyType | undefined; +} +export interface OnDemandThroughput { + MaxReadRequestUnits?: number | undefined; + MaxWriteRequestUnits?: number | undefined; +} +export interface ProvisionedThroughput { + ReadCapacityUnits: number | undefined; + WriteCapacityUnits: number | undefined; +} +export interface SourceTableDetails { + TableName: string | undefined; + TableId: string | undefined; + TableArn?: string | undefined; + TableSizeBytes?: number | undefined; + KeySchema: KeySchemaElement[] | undefined; + TableCreationDateTime: Date | undefined; + ProvisionedThroughput: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + ItemCount?: number | undefined; + BillingMode?: BillingMode | undefined; +} +export declare const ProjectionType: { + readonly ALL: "ALL"; + readonly INCLUDE: "INCLUDE"; + readonly KEYS_ONLY: "KEYS_ONLY"; +}; +export type ProjectionType = + (typeof ProjectionType)[keyof typeof ProjectionType]; +export interface Projection { + ProjectionType?: ProjectionType | undefined; + NonKeyAttributes?: string[] | undefined; +} +export interface GlobalSecondaryIndexInfo { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; +} +export interface LocalSecondaryIndexInfo { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; +} +export declare const SSEType: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +export type SSEType = (typeof SSEType)[keyof typeof SSEType]; +export declare const SSEStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +export type SSEStatus = (typeof SSEStatus)[keyof typeof SSEStatus]; +export interface SSEDescription { + Status?: SSEStatus | undefined; + SSEType?: SSEType | undefined; + KMSMasterKeyArn?: string | undefined; + InaccessibleEncryptionDateTime?: Date | undefined; +} +export declare const StreamViewType: { + readonly KEYS_ONLY: "KEYS_ONLY"; + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; + readonly OLD_IMAGE: "OLD_IMAGE"; +}; +export type StreamViewType = + (typeof StreamViewType)[keyof typeof StreamViewType]; +export interface StreamSpecification { + StreamEnabled: boolean | undefined; + StreamViewType?: StreamViewType | undefined; +} +export declare const TimeToLiveStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; +}; +export type TimeToLiveStatus = + (typeof TimeToLiveStatus)[keyof typeof TimeToLiveStatus]; +export interface TimeToLiveDescription { + TimeToLiveStatus?: TimeToLiveStatus | undefined; + AttributeName?: string | undefined; +} +export interface SourceTableFeatureDetails { + LocalSecondaryIndexes?: LocalSecondaryIndexInfo[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndexInfo[] | undefined; + StreamDescription?: StreamSpecification | undefined; + TimeToLiveDescription?: TimeToLiveDescription | undefined; + SSEDescription?: SSEDescription | undefined; +} +export interface BackupDescription { + BackupDetails?: BackupDetails | undefined; + SourceTableDetails?: SourceTableDetails | undefined; + SourceTableFeatureDetails?: SourceTableFeatureDetails | undefined; +} +export declare class BackupInUseException extends __BaseException { + readonly name: "BackupInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class BackupNotFoundException extends __BaseException { + readonly name: "BackupNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface BackupSummary { + TableName?: string | undefined; + TableId?: string | undefined; + TableArn?: string | undefined; + BackupArn?: string | undefined; + BackupName?: string | undefined; + BackupCreationDateTime?: Date | undefined; + BackupExpiryDateTime?: Date | undefined; + BackupStatus?: BackupStatus | undefined; + BackupType?: BackupType | undefined; + BackupSizeBytes?: number | undefined; +} +export declare const BackupTypeFilter: { + readonly ALL: "ALL"; + readonly AWS_BACKUP: "AWS_BACKUP"; + readonly SYSTEM: "SYSTEM"; + readonly USER: "USER"; +}; +export type BackupTypeFilter = + (typeof BackupTypeFilter)[keyof typeof BackupTypeFilter]; +export declare const ReturnConsumedCapacity: { + readonly INDEXES: "INDEXES"; + readonly NONE: "NONE"; + readonly TOTAL: "TOTAL"; +}; +export type ReturnConsumedCapacity = + (typeof ReturnConsumedCapacity)[keyof typeof ReturnConsumedCapacity]; +export declare const ReturnValuesOnConditionCheckFailure: { + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; +}; +export type ReturnValuesOnConditionCheckFailure = + (typeof ReturnValuesOnConditionCheckFailure)[keyof typeof ReturnValuesOnConditionCheckFailure]; +export interface Capacity { + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; + CapacityUnits?: number | undefined; +} +export interface ConsumedCapacity { + TableName?: string | undefined; + CapacityUnits?: number | undefined; + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; + Table?: Capacity | undefined; + LocalSecondaryIndexes?: Record | undefined; + GlobalSecondaryIndexes?: Record | undefined; +} +export declare const BatchStatementErrorCodeEnum: { + readonly AccessDenied: "AccessDenied"; + readonly ConditionalCheckFailed: "ConditionalCheckFailed"; + readonly DuplicateItem: "DuplicateItem"; + readonly InternalServerError: "InternalServerError"; + readonly ItemCollectionSizeLimitExceeded: "ItemCollectionSizeLimitExceeded"; + readonly ProvisionedThroughputExceeded: "ProvisionedThroughputExceeded"; + readonly RequestLimitExceeded: "RequestLimitExceeded"; + readonly ResourceNotFound: "ResourceNotFound"; + readonly ThrottlingError: "ThrottlingError"; + readonly TransactionConflict: "TransactionConflict"; + readonly ValidationError: "ValidationError"; +}; +export type BatchStatementErrorCodeEnum = + (typeof BatchStatementErrorCodeEnum)[keyof typeof BatchStatementErrorCodeEnum]; +export declare class InternalServerError extends __BaseException { + readonly name: "InternalServerError"; + readonly $fault: "server"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class RequestLimitExceeded extends __BaseException { + readonly name: "RequestLimitExceeded"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidEndpointException extends __BaseException { + readonly name: "InvalidEndpointException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ProvisionedThroughputExceededException extends __BaseException { + readonly name: "ProvisionedThroughputExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ProvisionedThroughputExceededException, + __BaseException + > + ); +} +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare const ReturnItemCollectionMetrics: { + readonly NONE: "NONE"; + readonly SIZE: "SIZE"; +}; +export type ReturnItemCollectionMetrics = + (typeof ReturnItemCollectionMetrics)[keyof typeof ReturnItemCollectionMetrics]; +export declare class ItemCollectionSizeLimitExceededException extends __BaseException { + readonly name: "ItemCollectionSizeLimitExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ItemCollectionSizeLimitExceededException, + __BaseException + > + ); +} +export interface BillingModeSummary { + BillingMode?: BillingMode | undefined; + LastUpdateToPayPerRequestDateTime?: Date | undefined; +} +export declare const ComparisonOperator: { + readonly BEGINS_WITH: "BEGINS_WITH"; + readonly BETWEEN: "BETWEEN"; + readonly CONTAINS: "CONTAINS"; + readonly EQ: "EQ"; + readonly GE: "GE"; + readonly GT: "GT"; + readonly IN: "IN"; + readonly LE: "LE"; + readonly LT: "LT"; + readonly NE: "NE"; + readonly NOT_CONTAINS: "NOT_CONTAINS"; + readonly NOT_NULL: "NOT_NULL"; + readonly NULL: "NULL"; +}; +export type ComparisonOperator = + (typeof ComparisonOperator)[keyof typeof ComparisonOperator]; +export declare const ConditionalOperator: { + readonly AND: "AND"; + readonly OR: "OR"; +}; +export type ConditionalOperator = + (typeof ConditionalOperator)[keyof typeof ConditionalOperator]; +export declare const ContinuousBackupsStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +export type ContinuousBackupsStatus = + (typeof ContinuousBackupsStatus)[keyof typeof ContinuousBackupsStatus]; +export declare const PointInTimeRecoveryStatus: { + readonly DISABLED: "DISABLED"; + readonly ENABLED: "ENABLED"; +}; +export type PointInTimeRecoveryStatus = + (typeof PointInTimeRecoveryStatus)[keyof typeof PointInTimeRecoveryStatus]; +export interface PointInTimeRecoveryDescription { + PointInTimeRecoveryStatus?: PointInTimeRecoveryStatus | undefined; + RecoveryPeriodInDays?: number | undefined; + EarliestRestorableDateTime?: Date | undefined; + LatestRestorableDateTime?: Date | undefined; +} +export interface ContinuousBackupsDescription { + ContinuousBackupsStatus: ContinuousBackupsStatus | undefined; + PointInTimeRecoveryDescription?: PointInTimeRecoveryDescription | undefined; +} +export declare class ContinuousBackupsUnavailableException extends __BaseException { + readonly name: "ContinuousBackupsUnavailableException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ContinuousBackupsUnavailableException, + __BaseException + > + ); +} +export declare const ContributorInsightsAction: { + readonly DISABLE: "DISABLE"; + readonly ENABLE: "ENABLE"; +}; +export type ContributorInsightsAction = + (typeof ContributorInsightsAction)[keyof typeof ContributorInsightsAction]; +export declare const ContributorInsightsStatus: { + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLED: "ENABLED"; + readonly ENABLING: "ENABLING"; + readonly FAILED: "FAILED"; +}; +export type ContributorInsightsStatus = + (typeof ContributorInsightsStatus)[keyof typeof ContributorInsightsStatus]; +export interface ContributorInsightsSummary { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +export interface CreateBackupInput { + TableName: string | undefined; + BackupName: string | undefined; +} +export interface CreateBackupOutput { + BackupDetails?: BackupDetails | undefined; +} +export declare class LimitExceededException extends __BaseException { + readonly name: "LimitExceededException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TableInUseException extends __BaseException { + readonly name: "TableInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TableNotFoundException extends __BaseException { + readonly name: "TableNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface WarmThroughput { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; +} +export interface CreateGlobalSecondaryIndexAction { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface Replica { + RegionName?: string | undefined; +} +export interface CreateGlobalTableInput { + GlobalTableName: string | undefined; + ReplicationGroup: Replica[] | undefined; +} +export declare const GlobalTableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +export type GlobalTableStatus = + (typeof GlobalTableStatus)[keyof typeof GlobalTableStatus]; +export interface OnDemandThroughputOverride { + MaxReadRequestUnits?: number | undefined; +} +export interface ProvisionedThroughputOverride { + ReadCapacityUnits?: number | undefined; +} +export declare const IndexStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly UPDATING: "UPDATING"; +}; +export type IndexStatus = (typeof IndexStatus)[keyof typeof IndexStatus]; +export interface GlobalSecondaryIndexWarmThroughputDescription { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; + Status?: IndexStatus | undefined; +} +export interface ReplicaGlobalSecondaryIndexDescription { + IndexName?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +export declare const ReplicaStatus: { + readonly ACTIVE: "ACTIVE"; + readonly CREATING: "CREATING"; + readonly CREATION_FAILED: "CREATION_FAILED"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly REGION_DISABLED: "REGION_DISABLED"; + readonly UPDATING: "UPDATING"; +}; +export type ReplicaStatus = (typeof ReplicaStatus)[keyof typeof ReplicaStatus]; +export declare const TableClass: { + readonly STANDARD: "STANDARD"; + readonly STANDARD_INFREQUENT_ACCESS: "STANDARD_INFREQUENT_ACCESS"; +}; +export type TableClass = (typeof TableClass)[keyof typeof TableClass]; +export interface TableClassSummary { + TableClass?: TableClass | undefined; + LastUpdateDateTime?: Date | undefined; +} +export declare const TableStatus: { + readonly ACTIVE: "ACTIVE"; + readonly ARCHIVED: "ARCHIVED"; + readonly ARCHIVING: "ARCHIVING"; + readonly CREATING: "CREATING"; + readonly DELETING: "DELETING"; + readonly INACCESSIBLE_ENCRYPTION_CREDENTIALS: "INACCESSIBLE_ENCRYPTION_CREDENTIALS"; + readonly UPDATING: "UPDATING"; +}; +export type TableStatus = (typeof TableStatus)[keyof typeof TableStatus]; +export interface TableWarmThroughputDescription { + ReadUnitsPerSecond?: number | undefined; + WriteUnitsPerSecond?: number | undefined; + Status?: TableStatus | undefined; +} +export interface ReplicaDescription { + RegionName?: string | undefined; + ReplicaStatus?: ReplicaStatus | undefined; + ReplicaStatusDescription?: string | undefined; + ReplicaStatusPercentProgress?: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + WarmThroughput?: TableWarmThroughputDescription | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndexDescription[] | undefined; + ReplicaInaccessibleDateTime?: Date | undefined; + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +export interface GlobalTableDescription { + ReplicationGroup?: ReplicaDescription[] | undefined; + GlobalTableArn?: string | undefined; + CreationDateTime?: Date | undefined; + GlobalTableStatus?: GlobalTableStatus | undefined; + GlobalTableName?: string | undefined; +} +export interface CreateGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class GlobalTableAlreadyExistsException extends __BaseException { + readonly name: "GlobalTableAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + GlobalTableAlreadyExistsException, + __BaseException + > + ); +} +export interface CreateReplicaAction { + RegionName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndex { + IndexName: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; +} +export interface CreateReplicationGroupMemberAction { + RegionName: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + TableClassOverride?: TableClass | undefined; +} +export interface GlobalSecondaryIndex { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface LocalSecondaryIndex { + IndexName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + Projection: Projection | undefined; +} +export interface SSESpecification { + Enabled?: boolean | undefined; + SSEType?: SSEType | undefined; + KMSMasterKeyId?: string | undefined; +} +export interface Tag { + Key: string | undefined; + Value: string | undefined; +} +export interface CreateTableInput { + AttributeDefinitions: AttributeDefinition[] | undefined; + TableName: string | undefined; + KeySchema: KeySchemaElement[] | undefined; + LocalSecondaryIndexes?: LocalSecondaryIndex[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + StreamSpecification?: StreamSpecification | undefined; + SSESpecification?: SSESpecification | undefined; + Tags?: Tag[] | undefined; + TableClass?: TableClass | undefined; + DeletionProtectionEnabled?: boolean | undefined; + WarmThroughput?: WarmThroughput | undefined; + ResourcePolicy?: string | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; +} +export interface ProvisionedThroughputDescription { + LastIncreaseDateTime?: Date | undefined; + LastDecreaseDateTime?: Date | undefined; + NumberOfDecreasesToday?: number | undefined; + ReadCapacityUnits?: number | undefined; + WriteCapacityUnits?: number | undefined; +} +export interface GlobalSecondaryIndexDescription { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + IndexStatus?: IndexStatus | undefined; + Backfilling?: boolean | undefined; + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + IndexSizeBytes?: number | undefined; + ItemCount?: number | undefined; + IndexArn?: string | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: GlobalSecondaryIndexWarmThroughputDescription | undefined; +} +export interface LocalSecondaryIndexDescription { + IndexName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + Projection?: Projection | undefined; + IndexSizeBytes?: number | undefined; + ItemCount?: number | undefined; + IndexArn?: string | undefined; +} +export declare const MultiRegionConsistency: { + readonly EVENTUAL: "EVENTUAL"; + readonly STRONG: "STRONG"; +}; +export type MultiRegionConsistency = + (typeof MultiRegionConsistency)[keyof typeof MultiRegionConsistency]; +export interface RestoreSummary { + SourceBackupArn?: string | undefined; + SourceTableArn?: string | undefined; + RestoreDateTime: Date | undefined; + RestoreInProgress: boolean | undefined; +} +export interface TableDescription { + AttributeDefinitions?: AttributeDefinition[] | undefined; + TableName?: string | undefined; + KeySchema?: KeySchemaElement[] | undefined; + TableStatus?: TableStatus | undefined; + CreationDateTime?: Date | undefined; + ProvisionedThroughput?: ProvisionedThroughputDescription | undefined; + TableSizeBytes?: number | undefined; + ItemCount?: number | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + BillingModeSummary?: BillingModeSummary | undefined; + LocalSecondaryIndexes?: LocalSecondaryIndexDescription[] | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndexDescription[] | undefined; + StreamSpecification?: StreamSpecification | undefined; + LatestStreamLabel?: string | undefined; + LatestStreamArn?: string | undefined; + GlobalTableVersion?: string | undefined; + Replicas?: ReplicaDescription[] | undefined; + RestoreSummary?: RestoreSummary | undefined; + SSEDescription?: SSEDescription | undefined; + ArchivalSummary?: ArchivalSummary | undefined; + TableClassSummary?: TableClassSummary | undefined; + DeletionProtectionEnabled?: boolean | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: TableWarmThroughputDescription | undefined; + MultiRegionConsistency?: MultiRegionConsistency | undefined; +} +export interface CreateTableOutput { + TableDescription?: TableDescription | undefined; +} +export declare class ResourceInUseException extends __BaseException { + readonly name: "ResourceInUseException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface CsvOptions { + Delimiter?: string | undefined; + HeaderList?: string[] | undefined; +} +export interface DeleteBackupInput { + BackupArn: string | undefined; +} +export interface DeleteBackupOutput { + BackupDescription?: BackupDescription | undefined; +} +export interface DeleteGlobalSecondaryIndexAction { + IndexName: string | undefined; +} +export declare const ReturnValue: { + readonly ALL_NEW: "ALL_NEW"; + readonly ALL_OLD: "ALL_OLD"; + readonly NONE: "NONE"; + readonly UPDATED_NEW: "UPDATED_NEW"; + readonly UPDATED_OLD: "UPDATED_OLD"; +}; +export type ReturnValue = (typeof ReturnValue)[keyof typeof ReturnValue]; +export declare class ReplicatedWriteConflictException extends __BaseException { + readonly name: "ReplicatedWriteConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + ReplicatedWriteConflictException, + __BaseException + > + ); +} +export declare class TransactionConflictException extends __BaseException { + readonly name: "TransactionConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DeleteReplicaAction { + RegionName: string | undefined; +} +export interface DeleteReplicationGroupMemberAction { + RegionName: string | undefined; +} +export interface DeleteResourcePolicyInput { + ResourceArn: string | undefined; + ExpectedRevisionId?: string | undefined; +} +export interface DeleteResourcePolicyOutput { + RevisionId?: string | undefined; +} +export declare class PolicyNotFoundException extends __BaseException { + readonly name: "PolicyNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DeleteTableInput { + TableName: string | undefined; +} +export interface DeleteTableOutput { + TableDescription?: TableDescription | undefined; +} +export interface DescribeBackupInput { + BackupArn: string | undefined; +} +export interface DescribeBackupOutput { + BackupDescription?: BackupDescription | undefined; +} +export interface DescribeContinuousBackupsInput { + TableName: string | undefined; +} +export interface DescribeContinuousBackupsOutput { + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +export interface DescribeContributorInsightsInput { + TableName: string | undefined; + IndexName?: string | undefined; +} +export interface FailureException { + ExceptionName?: string | undefined; + ExceptionDescription?: string | undefined; +} +export interface DescribeContributorInsightsOutput { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsRuleList?: string[] | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; + LastUpdateDateTime?: Date | undefined; + FailureException?: FailureException | undefined; +} +export interface DescribeEndpointsRequest {} +export interface Endpoint { + Address: string | undefined; + CachePeriodInMinutes: number | undefined; +} +export interface DescribeEndpointsResponse { + Endpoints: Endpoint[] | undefined; +} +export interface DescribeExportInput { + ExportArn: string | undefined; +} +export declare const ExportFormat: { + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +export type ExportFormat = (typeof ExportFormat)[keyof typeof ExportFormat]; +export declare const ExportStatus: { + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +export type ExportStatus = (typeof ExportStatus)[keyof typeof ExportStatus]; +export declare const ExportType: { + readonly FULL_EXPORT: "FULL_EXPORT"; + readonly INCREMENTAL_EXPORT: "INCREMENTAL_EXPORT"; +}; +export type ExportType = (typeof ExportType)[keyof typeof ExportType]; +export declare const ExportViewType: { + readonly NEW_AND_OLD_IMAGES: "NEW_AND_OLD_IMAGES"; + readonly NEW_IMAGE: "NEW_IMAGE"; +}; +export type ExportViewType = + (typeof ExportViewType)[keyof typeof ExportViewType]; +export interface IncrementalExportSpecification { + ExportFromTime?: Date | undefined; + ExportToTime?: Date | undefined; + ExportViewType?: ExportViewType | undefined; +} +export declare const S3SseAlgorithm: { + readonly AES256: "AES256"; + readonly KMS: "KMS"; +}; +export type S3SseAlgorithm = + (typeof S3SseAlgorithm)[keyof typeof S3SseAlgorithm]; +export interface ExportDescription { + ExportArn?: string | undefined; + ExportStatus?: ExportStatus | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; + ExportManifest?: string | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + ExportTime?: Date | undefined; + ClientToken?: string | undefined; + S3Bucket?: string | undefined; + S3BucketOwner?: string | undefined; + S3Prefix?: string | undefined; + S3SseAlgorithm?: S3SseAlgorithm | undefined; + S3SseKmsKeyId?: string | undefined; + FailureCode?: string | undefined; + FailureMessage?: string | undefined; + ExportFormat?: ExportFormat | undefined; + BilledSizeBytes?: number | undefined; + ItemCount?: number | undefined; + ExportType?: ExportType | undefined; + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +export interface DescribeExportOutput { + ExportDescription?: ExportDescription | undefined; +} +export declare class ExportNotFoundException extends __BaseException { + readonly name: "ExportNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeGlobalTableInput { + GlobalTableName: string | undefined; +} +export interface DescribeGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class GlobalTableNotFoundException extends __BaseException { + readonly name: "GlobalTableNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeGlobalTableSettingsInput { + GlobalTableName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndexSettingsDescription { + IndexName: string | undefined; + IndexStatus?: IndexStatus | undefined; + ProvisionedReadCapacityUnits?: number | undefined; + ProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ProvisionedWriteCapacityUnits?: number | undefined; + ProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; +} +export interface ReplicaSettingsDescription { + RegionName: string | undefined; + ReplicaStatus?: ReplicaStatus | undefined; + ReplicaBillingModeSummary?: BillingModeSummary | undefined; + ReplicaProvisionedReadCapacityUnits?: number | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaProvisionedWriteCapacityUnits?: number | undefined; + ReplicaProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaGlobalSecondaryIndexSettings?: + | ReplicaGlobalSecondaryIndexSettingsDescription[] + | undefined; + ReplicaTableClassSummary?: TableClassSummary | undefined; +} +export interface DescribeGlobalTableSettingsOutput { + GlobalTableName?: string | undefined; + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +export interface DescribeImportInput { + ImportArn: string | undefined; +} +export declare const ImportStatus: { + readonly CANCELLED: "CANCELLED"; + readonly CANCELLING: "CANCELLING"; + readonly COMPLETED: "COMPLETED"; + readonly FAILED: "FAILED"; + readonly IN_PROGRESS: "IN_PROGRESS"; +}; +export type ImportStatus = (typeof ImportStatus)[keyof typeof ImportStatus]; +export declare const InputCompressionType: { + readonly GZIP: "GZIP"; + readonly NONE: "NONE"; + readonly ZSTD: "ZSTD"; +}; +export type InputCompressionType = + (typeof InputCompressionType)[keyof typeof InputCompressionType]; +export declare const InputFormat: { + readonly CSV: "CSV"; + readonly DYNAMODB_JSON: "DYNAMODB_JSON"; + readonly ION: "ION"; +}; +export type InputFormat = (typeof InputFormat)[keyof typeof InputFormat]; +export interface InputFormatOptions { + Csv?: CsvOptions | undefined; +} +export interface S3BucketSource { + S3BucketOwner?: string | undefined; + S3Bucket: string | undefined; + S3KeyPrefix?: string | undefined; +} +export interface TableCreationParameters { + TableName: string | undefined; + AttributeDefinitions: AttributeDefinition[] | undefined; + KeySchema: KeySchemaElement[] | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + SSESpecification?: SSESpecification | undefined; + GlobalSecondaryIndexes?: GlobalSecondaryIndex[] | undefined; +} +export interface ImportTableDescription { + ImportArn?: string | undefined; + ImportStatus?: ImportStatus | undefined; + TableArn?: string | undefined; + TableId?: string | undefined; + ClientToken?: string | undefined; + S3BucketSource?: S3BucketSource | undefined; + ErrorCount?: number | undefined; + CloudWatchLogGroupArn?: string | undefined; + InputFormat?: InputFormat | undefined; + InputFormatOptions?: InputFormatOptions | undefined; + InputCompressionType?: InputCompressionType | undefined; + TableCreationParameters?: TableCreationParameters | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; + ProcessedSizeBytes?: number | undefined; + ProcessedItemCount?: number | undefined; + ImportedItemCount?: number | undefined; + FailureCode?: string | undefined; + FailureMessage?: string | undefined; +} +export interface DescribeImportOutput { + ImportTableDescription: ImportTableDescription | undefined; +} +export declare class ImportNotFoundException extends __BaseException { + readonly name: "ImportNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface DescribeKinesisStreamingDestinationInput { + TableName: string | undefined; +} +export declare const DestinationStatus: { + readonly ACTIVE: "ACTIVE"; + readonly DISABLED: "DISABLED"; + readonly DISABLING: "DISABLING"; + readonly ENABLE_FAILED: "ENABLE_FAILED"; + readonly ENABLING: "ENABLING"; + readonly UPDATING: "UPDATING"; +}; +export type DestinationStatus = + (typeof DestinationStatus)[keyof typeof DestinationStatus]; +export interface KinesisDataStreamDestination { + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + DestinationStatusDescription?: string | undefined; + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface DescribeKinesisStreamingDestinationOutput { + TableName?: string | undefined; + KinesisDataStreamDestinations?: KinesisDataStreamDestination[] | undefined; +} +export interface DescribeLimitsInput {} +export interface DescribeLimitsOutput { + AccountMaxReadCapacityUnits?: number | undefined; + AccountMaxWriteCapacityUnits?: number | undefined; + TableMaxReadCapacityUnits?: number | undefined; + TableMaxWriteCapacityUnits?: number | undefined; +} +export interface DescribeTableInput { + TableName: string | undefined; +} +export interface DescribeTableOutput { + Table?: TableDescription | undefined; +} +export interface DescribeTableReplicaAutoScalingInput { + TableName: string | undefined; +} +export interface ReplicaGlobalSecondaryIndexAutoScalingDescription { + IndexName?: string | undefined; + IndexStatus?: IndexStatus | undefined; + ProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; +} +export interface ReplicaAutoScalingDescription { + RegionName?: string | undefined; + GlobalSecondaryIndexes?: + | ReplicaGlobalSecondaryIndexAutoScalingDescription[] + | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaProvisionedWriteCapacityAutoScalingSettings?: + | AutoScalingSettingsDescription + | undefined; + ReplicaStatus?: ReplicaStatus | undefined; +} +export interface TableAutoScalingDescription { + TableName?: string | undefined; + TableStatus?: TableStatus | undefined; + Replicas?: ReplicaAutoScalingDescription[] | undefined; +} +export interface DescribeTableReplicaAutoScalingOutput { + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +export interface DescribeTimeToLiveInput { + TableName: string | undefined; +} +export interface DescribeTimeToLiveOutput { + TimeToLiveDescription?: TimeToLiveDescription | undefined; +} +export interface EnableKinesisStreamingConfiguration { + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface KinesisStreamingDestinationInput { + TableName: string | undefined; + StreamArn: string | undefined; + EnableKinesisStreamingConfiguration?: + | EnableKinesisStreamingConfiguration + | undefined; +} +export interface KinesisStreamingDestinationOutput { + TableName?: string | undefined; + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + EnableKinesisStreamingConfiguration?: + | EnableKinesisStreamingConfiguration + | undefined; +} +export declare class DuplicateItemException extends __BaseException { + readonly name: "DuplicateItemException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class IdempotentParameterMismatchException extends __BaseException { + readonly name: "IdempotentParameterMismatchException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType< + IdempotentParameterMismatchException, + __BaseException + > + ); +} +export declare class TransactionInProgressException extends __BaseException { + readonly name: "TransactionInProgressException"; + readonly $fault: "client"; + Message?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ExportConflictException extends __BaseException { + readonly name: "ExportConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ExportTableToPointInTimeInput { + TableArn: string | undefined; + ExportTime?: Date | undefined; + ClientToken?: string | undefined; + S3Bucket: string | undefined; + S3BucketOwner?: string | undefined; + S3Prefix?: string | undefined; + S3SseAlgorithm?: S3SseAlgorithm | undefined; + S3SseKmsKeyId?: string | undefined; + ExportFormat?: ExportFormat | undefined; + ExportType?: ExportType | undefined; + IncrementalExportSpecification?: IncrementalExportSpecification | undefined; +} +export interface ExportTableToPointInTimeOutput { + ExportDescription?: ExportDescription | undefined; +} +export declare class InvalidExportTimeException extends __BaseException { + readonly name: "InvalidExportTimeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class PointInTimeRecoveryUnavailableException extends __BaseException { + readonly name: "PointInTimeRecoveryUnavailableException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + PointInTimeRecoveryUnavailableException, + __BaseException + > + ); +} +export interface GetResourcePolicyInput { + ResourceArn: string | undefined; +} +export interface GetResourcePolicyOutput { + Policy?: string | undefined; + RevisionId?: string | undefined; +} +export declare class ImportConflictException extends __BaseException { + readonly name: "ImportConflictException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ImportTableInput { + ClientToken?: string | undefined; + S3BucketSource: S3BucketSource | undefined; + InputFormat: InputFormat | undefined; + InputFormatOptions?: InputFormatOptions | undefined; + InputCompressionType?: InputCompressionType | undefined; + TableCreationParameters: TableCreationParameters | undefined; +} +export interface ImportTableOutput { + ImportTableDescription: ImportTableDescription | undefined; +} +export interface ListBackupsInput { + TableName?: string | undefined; + Limit?: number | undefined; + TimeRangeLowerBound?: Date | undefined; + TimeRangeUpperBound?: Date | undefined; + ExclusiveStartBackupArn?: string | undefined; + BackupType?: BackupTypeFilter | undefined; +} +export interface ListBackupsOutput { + BackupSummaries?: BackupSummary[] | undefined; + LastEvaluatedBackupArn?: string | undefined; +} +export interface ListContributorInsightsInput { + TableName?: string | undefined; + NextToken?: string | undefined; + MaxResults?: number | undefined; +} +export interface ListContributorInsightsOutput { + ContributorInsightsSummaries?: ContributorInsightsSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListExportsInput { + TableArn?: string | undefined; + MaxResults?: number | undefined; + NextToken?: string | undefined; +} +export interface ExportSummary { + ExportArn?: string | undefined; + ExportStatus?: ExportStatus | undefined; + ExportType?: ExportType | undefined; +} +export interface ListExportsOutput { + ExportSummaries?: ExportSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListGlobalTablesInput { + ExclusiveStartGlobalTableName?: string | undefined; + Limit?: number | undefined; + RegionName?: string | undefined; +} +export interface GlobalTable { + GlobalTableName?: string | undefined; + ReplicationGroup?: Replica[] | undefined; +} +export interface ListGlobalTablesOutput { + GlobalTables?: GlobalTable[] | undefined; + LastEvaluatedGlobalTableName?: string | undefined; +} +export interface ListImportsInput { + TableArn?: string | undefined; + PageSize?: number | undefined; + NextToken?: string | undefined; +} +export interface ImportSummary { + ImportArn?: string | undefined; + ImportStatus?: ImportStatus | undefined; + TableArn?: string | undefined; + S3BucketSource?: S3BucketSource | undefined; + CloudWatchLogGroupArn?: string | undefined; + InputFormat?: InputFormat | undefined; + StartTime?: Date | undefined; + EndTime?: Date | undefined; +} +export interface ListImportsOutput { + ImportSummaryList?: ImportSummary[] | undefined; + NextToken?: string | undefined; +} +export interface ListTablesInput { + ExclusiveStartTableName?: string | undefined; + Limit?: number | undefined; +} +export interface ListTablesOutput { + TableNames?: string[] | undefined; + LastEvaluatedTableName?: string | undefined; +} +export interface ListTagsOfResourceInput { + ResourceArn: string | undefined; + NextToken?: string | undefined; +} +export interface ListTagsOfResourceOutput { + Tags?: Tag[] | undefined; + NextToken?: string | undefined; +} +export interface PutResourcePolicyInput { + ResourceArn: string | undefined; + Policy: string | undefined; + ExpectedRevisionId?: string | undefined; + ConfirmRemoveSelfResourceAccess?: boolean | undefined; +} +export interface PutResourcePolicyOutput { + RevisionId?: string | undefined; +} +export declare const Select: { + readonly ALL_ATTRIBUTES: "ALL_ATTRIBUTES"; + readonly ALL_PROJECTED_ATTRIBUTES: "ALL_PROJECTED_ATTRIBUTES"; + readonly COUNT: "COUNT"; + readonly SPECIFIC_ATTRIBUTES: "SPECIFIC_ATTRIBUTES"; +}; +export type Select = (typeof Select)[keyof typeof Select]; +export interface RestoreTableFromBackupInput { + TargetTableName: string | undefined; + BackupArn: string | undefined; + BillingModeOverride?: BillingMode | undefined; + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + SSESpecificationOverride?: SSESpecification | undefined; +} +export interface RestoreTableFromBackupOutput { + TableDescription?: TableDescription | undefined; +} +export declare class TableAlreadyExistsException extends __BaseException { + readonly name: "TableAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidRestoreTimeException extends __BaseException { + readonly name: "InvalidRestoreTimeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface RestoreTableToPointInTimeInput { + SourceTableArn?: string | undefined; + SourceTableName?: string | undefined; + TargetTableName: string | undefined; + UseLatestRestorableTime?: boolean | undefined; + RestoreDateTime?: Date | undefined; + BillingModeOverride?: BillingMode | undefined; + GlobalSecondaryIndexOverride?: GlobalSecondaryIndex[] | undefined; + LocalSecondaryIndexOverride?: LocalSecondaryIndex[] | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughput | undefined; + OnDemandThroughputOverride?: OnDemandThroughput | undefined; + SSESpecificationOverride?: SSESpecification | undefined; +} +export interface RestoreTableToPointInTimeOutput { + TableDescription?: TableDescription | undefined; +} +export interface TagResourceInput { + ResourceArn: string | undefined; + Tags: Tag[] | undefined; +} +export interface UntagResourceInput { + ResourceArn: string | undefined; + TagKeys: string[] | undefined; +} +export interface PointInTimeRecoverySpecification { + PointInTimeRecoveryEnabled: boolean | undefined; + RecoveryPeriodInDays?: number | undefined; +} +export interface UpdateContinuousBackupsInput { + TableName: string | undefined; + PointInTimeRecoverySpecification: + | PointInTimeRecoverySpecification + | undefined; +} +export interface UpdateContinuousBackupsOutput { + ContinuousBackupsDescription?: ContinuousBackupsDescription | undefined; +} +export interface UpdateContributorInsightsInput { + TableName: string | undefined; + IndexName?: string | undefined; + ContributorInsightsAction: ContributorInsightsAction | undefined; +} +export interface UpdateContributorInsightsOutput { + TableName?: string | undefined; + IndexName?: string | undefined; + ContributorInsightsStatus?: ContributorInsightsStatus | undefined; +} +export declare class ReplicaAlreadyExistsException extends __BaseException { + readonly name: "ReplicaAlreadyExistsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ReplicaNotFoundException extends __BaseException { + readonly name: "ReplicaNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ReplicaUpdate { + Create?: CreateReplicaAction | undefined; + Delete?: DeleteReplicaAction | undefined; +} +export interface UpdateGlobalTableInput { + GlobalTableName: string | undefined; + ReplicaUpdates: ReplicaUpdate[] | undefined; +} +export interface UpdateGlobalTableOutput { + GlobalTableDescription?: GlobalTableDescription | undefined; +} +export declare class IndexNotFoundException extends __BaseException { + readonly name: "IndexNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface GlobalTableGlobalSecondaryIndexSettingsUpdate { + IndexName: string | undefined; + ProvisionedWriteCapacityUnits?: number | undefined; + ProvisionedWriteCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaGlobalSecondaryIndexSettingsUpdate { + IndexName: string | undefined; + ProvisionedReadCapacityUnits?: number | undefined; + ProvisionedReadCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaSettingsUpdate { + RegionName: string | undefined; + ReplicaProvisionedReadCapacityUnits?: number | undefined; + ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; + ReplicaGlobalSecondaryIndexSettingsUpdate?: + | ReplicaGlobalSecondaryIndexSettingsUpdate[] + | undefined; + ReplicaTableClass?: TableClass | undefined; +} +export interface UpdateGlobalTableSettingsInput { + GlobalTableName: string | undefined; + GlobalTableBillingMode?: BillingMode | undefined; + GlobalTableProvisionedWriteCapacityUnits?: number | undefined; + GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate?: + | AutoScalingSettingsUpdate + | undefined; + GlobalTableGlobalSecondaryIndexSettingsUpdate?: + | GlobalTableGlobalSecondaryIndexSettingsUpdate[] + | undefined; + ReplicaSettingsUpdate?: ReplicaSettingsUpdate[] | undefined; +} +export interface UpdateGlobalTableSettingsOutput { + GlobalTableName?: string | undefined; + ReplicaSettings?: ReplicaSettingsDescription[] | undefined; +} +export interface UpdateKinesisStreamingConfiguration { + ApproximateCreationDateTimePrecision?: + | ApproximateCreationDateTimePrecision + | undefined; +} +export interface UpdateKinesisStreamingDestinationInput { + TableName: string | undefined; + StreamArn: string | undefined; + UpdateKinesisStreamingConfiguration?: + | UpdateKinesisStreamingConfiguration + | undefined; +} +export interface UpdateKinesisStreamingDestinationOutput { + TableName?: string | undefined; + StreamArn?: string | undefined; + DestinationStatus?: DestinationStatus | undefined; + UpdateKinesisStreamingConfiguration?: + | UpdateKinesisStreamingConfiguration + | undefined; +} +export interface UpdateGlobalSecondaryIndexAction { + IndexName: string | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface GlobalSecondaryIndexUpdate { + Update?: UpdateGlobalSecondaryIndexAction | undefined; + Create?: CreateGlobalSecondaryIndexAction | undefined; + Delete?: DeleteGlobalSecondaryIndexAction | undefined; +} +export interface UpdateReplicationGroupMemberAction { + RegionName: string | undefined; + KMSMasterKeyId?: string | undefined; + ProvisionedThroughputOverride?: ProvisionedThroughputOverride | undefined; + OnDemandThroughputOverride?: OnDemandThroughputOverride | undefined; + GlobalSecondaryIndexes?: ReplicaGlobalSecondaryIndex[] | undefined; + TableClassOverride?: TableClass | undefined; +} +export interface ReplicationGroupUpdate { + Create?: CreateReplicationGroupMemberAction | undefined; + Update?: UpdateReplicationGroupMemberAction | undefined; + Delete?: DeleteReplicationGroupMemberAction | undefined; +} +export interface UpdateTableInput { + AttributeDefinitions?: AttributeDefinition[] | undefined; + TableName: string | undefined; + BillingMode?: BillingMode | undefined; + ProvisionedThroughput?: ProvisionedThroughput | undefined; + GlobalSecondaryIndexUpdates?: GlobalSecondaryIndexUpdate[] | undefined; + StreamSpecification?: StreamSpecification | undefined; + SSESpecification?: SSESpecification | undefined; + ReplicaUpdates?: ReplicationGroupUpdate[] | undefined; + TableClass?: TableClass | undefined; + DeletionProtectionEnabled?: boolean | undefined; + MultiRegionConsistency?: MultiRegionConsistency | undefined; + OnDemandThroughput?: OnDemandThroughput | undefined; + WarmThroughput?: WarmThroughput | undefined; +} +export interface UpdateTableOutput { + TableDescription?: TableDescription | undefined; +} +export interface GlobalSecondaryIndexAutoScalingUpdate { + IndexName?: string | undefined; + ProvisionedWriteCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaGlobalSecondaryIndexAutoScalingUpdate { + IndexName?: string | undefined; + ProvisionedReadCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface ReplicaAutoScalingUpdate { + RegionName: string | undefined; + ReplicaGlobalSecondaryIndexUpdates?: + | ReplicaGlobalSecondaryIndexAutoScalingUpdate[] + | undefined; + ReplicaProvisionedReadCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; +} +export interface UpdateTableReplicaAutoScalingInput { + GlobalSecondaryIndexUpdates?: + | GlobalSecondaryIndexAutoScalingUpdate[] + | undefined; + TableName: string | undefined; + ProvisionedWriteCapacityAutoScalingUpdate?: + | AutoScalingSettingsUpdate + | undefined; + ReplicaUpdates?: ReplicaAutoScalingUpdate[] | undefined; +} +export interface UpdateTableReplicaAutoScalingOutput { + TableAutoScalingDescription?: TableAutoScalingDescription | undefined; +} +export interface TimeToLiveSpecification { + Enabled: boolean | undefined; + AttributeName: string | undefined; +} +export interface UpdateTimeToLiveInput { + TableName: string | undefined; + TimeToLiveSpecification: TimeToLiveSpecification | undefined; +} +export interface UpdateTimeToLiveOutput { + TimeToLiveSpecification?: TimeToLiveSpecification | undefined; +} +export type AttributeValue = + | AttributeValue.BMember + | AttributeValue.BOOLMember + | AttributeValue.BSMember + | AttributeValue.LMember + | AttributeValue.MMember + | AttributeValue.NMember + | AttributeValue.NSMember + | AttributeValue.NULLMember + | AttributeValue.SMember + | AttributeValue.SSMember + | AttributeValue.$UnknownMember; +export declare namespace AttributeValue { + interface SMember { + S: string; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NMember { + S?: never; + N: string; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface BMember { + S?: never; + N?: never; + B: Uint8Array; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface SSMember { + S?: never; + N?: never; + B?: never; + SS: string[]; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS: string[]; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface BSMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS: Uint8Array[]; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface MMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M: Record; + L?: never; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface LMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L: AttributeValue[]; + NULL?: never; + BOOL?: never; + $unknown?: never; + } + interface NULLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL: boolean; + BOOL?: never; + $unknown?: never; + } + interface BOOLMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL: boolean; + $unknown?: never; + } + interface $UnknownMember { + S?: never; + N?: never; + B?: never; + SS?: never; + NS?: never; + BS?: never; + M?: never; + L?: never; + NULL?: never; + BOOL?: never; + $unknown: [string, any]; + } + interface Visitor { + S: (value: string) => T; + N: (value: string) => T; + B: (value: Uint8Array) => T; + SS: (value: string[]) => T; + NS: (value: string[]) => T; + BS: (value: Uint8Array[]) => T; + M: (value: Record) => T; + L: (value: AttributeValue[]) => T; + NULL: (value: boolean) => T; + BOOL: (value: boolean) => T; + _: (name: string, value: any) => T; + } + const visit: (value: AttributeValue, visitor: Visitor) => T; +} +export interface AttributeValueUpdate { + Value?: AttributeValue | undefined; + Action?: AttributeAction | undefined; +} +export interface BatchStatementError { + Code?: BatchStatementErrorCodeEnum | undefined; + Message?: string | undefined; + Item?: Record | undefined; +} +export interface BatchStatementRequest { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ConsistentRead?: boolean | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface CancellationReason { + Item?: Record | undefined; + Code?: string | undefined; + Message?: string | undefined; +} +export interface Condition { + AttributeValueList?: AttributeValue[] | undefined; + ComparisonOperator: ComparisonOperator | undefined; +} +export declare class ConditionalCheckFailedException extends __BaseException { + readonly name: "ConditionalCheckFailedException"; + readonly $fault: "client"; + Item?: Record | undefined; + constructor( + opts: __ExceptionOptionType< + ConditionalCheckFailedException, + __BaseException + > + ); +} +export interface DeleteRequest { + Key: Record | undefined; +} +export interface ExecuteStatementInput { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ConsistentRead?: boolean | undefined; + NextToken?: string | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + Limit?: number | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Get { + Key: Record | undefined; + TableName: string | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface GetItemInput { + TableName: string | undefined; + Key: Record | undefined; + AttributesToGet?: string[] | undefined; + ConsistentRead?: boolean | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface GetItemOutput { + Item?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface ItemCollectionMetrics { + ItemCollectionKey?: Record | undefined; + SizeEstimateRangeGB?: number[] | undefined; +} +export interface ItemResponse { + Item?: Record | undefined; +} +export interface ParameterizedStatement { + Statement: string | undefined; + Parameters?: AttributeValue[] | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface PutRequest { + Item: Record | undefined; +} +export interface KeysAndAttributes { + Keys: Record[] | undefined; + AttributesToGet?: string[] | undefined; + ConsistentRead?: boolean | undefined; + ProjectionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; +} +export interface TransactGetItem { + Get: Get | undefined; +} +export interface BatchExecuteStatementInput { + Statements: BatchStatementRequest[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExecuteTransactionInput { + TransactStatements: ParameterizedStatement[] | undefined; + ClientRequestToken?: string | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExecuteTransactionOutput { + Responses?: ItemResponse[] | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface TransactGetItemsOutput { + ConsumedCapacity?: ConsumedCapacity[] | undefined; + Responses?: ItemResponse[] | undefined; +} +export declare class TransactionCanceledException extends __BaseException { + readonly name: "TransactionCanceledException"; + readonly $fault: "client"; + Message?: string | undefined; + CancellationReasons?: CancellationReason[] | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export interface BatchGetItemInput { + RequestItems: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface ExpectedAttributeValue { + Value?: AttributeValue | undefined; + Exists?: boolean | undefined; + ComparisonOperator?: ComparisonOperator | undefined; + AttributeValueList?: AttributeValue[] | undefined; +} +export interface TransactGetItemsInput { + TransactItems: TransactGetItem[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; +} +export interface TransactWriteItemsOutput { + ConsumedCapacity?: ConsumedCapacity[] | undefined; + ItemCollectionMetrics?: Record | undefined; +} +export interface ConditionCheck { + Key: Record | undefined; + TableName: string | undefined; + ConditionExpression: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Delete { + Key: Record | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Put { + Item: Record | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface Update { + Key: Record | undefined; + UpdateExpression: string | undefined; + TableName: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface BatchStatementResponse { + Error?: BatchStatementError | undefined; + TableName?: string | undefined; + Item?: Record | undefined; +} +export interface DeleteItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface ExecuteStatementOutput { + Items?: Record[] | undefined; + NextToken?: string | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + LastEvaluatedKey?: Record | undefined; +} +export interface PutItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface QueryOutput { + Items?: Record[] | undefined; + Count?: number | undefined; + ScannedCount?: number | undefined; + LastEvaluatedKey?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface ScanOutput { + Items?: Record[] | undefined; + Count?: number | undefined; + ScannedCount?: number | undefined; + LastEvaluatedKey?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; +} +export interface UpdateItemOutput { + Attributes?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity | undefined; + ItemCollectionMetrics?: ItemCollectionMetrics | undefined; +} +export interface WriteRequest { + PutRequest?: PutRequest | undefined; + DeleteRequest?: DeleteRequest | undefined; +} +export interface BatchExecuteStatementOutput { + Responses?: BatchStatementResponse[] | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface BatchGetItemOutput { + Responses?: Record[]> | undefined; + UnprocessedKeys?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface ScanInput { + TableName: string | undefined; + IndexName?: string | undefined; + AttributesToGet?: string[] | undefined; + Limit?: number | undefined; + Select?: Select | undefined; + ScanFilter?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ExclusiveStartKey?: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + TotalSegments?: number | undefined; + Segment?: number | undefined; + ProjectionExpression?: string | undefined; + FilterExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ConsistentRead?: boolean | undefined; +} +export interface BatchWriteItemInput { + RequestItems: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; +} +export interface DeleteItemInput { + TableName: string | undefined; + Key: Record | undefined; + Expected?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface PutItemInput { + TableName: string | undefined; + Item: Record | undefined; + Expected?: Record | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface QueryInput { + TableName: string | undefined; + IndexName?: string | undefined; + Select?: Select | undefined; + AttributesToGet?: string[] | undefined; + Limit?: number | undefined; + ConsistentRead?: boolean | undefined; + KeyConditions?: Record | undefined; + QueryFilter?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ScanIndexForward?: boolean | undefined; + ExclusiveStartKey?: Record | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ProjectionExpression?: string | undefined; + FilterExpression?: string | undefined; + KeyConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +} +export interface BatchWriteItemOutput { + UnprocessedItems?: Record | undefined; + ItemCollectionMetrics?: Record | undefined; + ConsumedCapacity?: ConsumedCapacity[] | undefined; +} +export interface UpdateItemInput { + TableName: string | undefined; + Key: Record | undefined; + AttributeUpdates?: Record | undefined; + Expected?: Record | undefined; + ConditionalOperator?: ConditionalOperator | undefined; + ReturnValues?: ReturnValue | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + UpdateExpression?: string | undefined; + ConditionExpression?: string | undefined; + ExpressionAttributeNames?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + ReturnValuesOnConditionCheckFailure?: + | ReturnValuesOnConditionCheckFailure + | undefined; +} +export interface TransactWriteItem { + ConditionCheck?: ConditionCheck | undefined; + Put?: Put | undefined; + Delete?: Delete | undefined; + Update?: Update | undefined; +} +export interface TransactWriteItemsInput { + TransactItems: TransactWriteItem[] | undefined; + ReturnConsumedCapacity?: ReturnConsumedCapacity | undefined; + ReturnItemCollectionMetrics?: ReturnItemCollectionMetrics | undefined; + ClientRequestToken?: string | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..109e7f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,6 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBClient } from "../DynamoDBClient"; +export interface DynamoDBPaginationConfiguration + extends PaginationConfiguration { + client: DynamoDBClient; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts new file mode 100644 index 0000000..f4961a0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListContributorInsightsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "../commands/ListContributorInsightsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListContributorInsights: ( + config: DynamoDBPaginationConfiguration, + input: ListContributorInsightsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts new file mode 100644 index 0000000..29a8603 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListExportsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "../commands/ListExportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListExports: ( + config: DynamoDBPaginationConfiguration, + input: ListExportsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts new file mode 100644 index 0000000..f8903e8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListImportsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "../commands/ListImportsCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListImports: ( + config: DynamoDBPaginationConfiguration, + input: ListImportsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts new file mode 100644 index 0000000..6246d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ListTablesPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "../commands/ListTablesCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateListTables: ( + config: DynamoDBPaginationConfiguration, + input: ListTablesCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..2ff4976 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateQuery: ( + config: DynamoDBPaginationConfiguration, + input: QueryCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..a84dfd1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts @@ -0,0 +1,8 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBPaginationConfiguration } from "./Interfaces"; +export declare const paginateScan: ( + config: DynamoDBPaginationConfiguration, + input: ScanCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..a6dfcd0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,7 @@ +export * from "./Interfaces"; +export * from "./ListContributorInsightsPaginator"; +export * from "./ListExportsPaginator"; +export * from "./ListImportsPaginator"; +export * from "./ListTablesPaginator"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts new file mode 100644 index 0000000..83e200c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/protocols/Aws_json1_0.d.ts @@ -0,0 +1,686 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "../commands/BatchExecuteStatementCommand"; +import { + BatchGetItemCommandInput, + BatchGetItemCommandOutput, +} from "../commands/BatchGetItemCommand"; +import { + BatchWriteItemCommandInput, + BatchWriteItemCommandOutput, +} from "../commands/BatchWriteItemCommand"; +import { + CreateBackupCommandInput, + CreateBackupCommandOutput, +} from "../commands/CreateBackupCommand"; +import { + CreateGlobalTableCommandInput, + CreateGlobalTableCommandOutput, +} from "../commands/CreateGlobalTableCommand"; +import { + CreateTableCommandInput, + CreateTableCommandOutput, +} from "../commands/CreateTableCommand"; +import { + DeleteBackupCommandInput, + DeleteBackupCommandOutput, +} from "../commands/DeleteBackupCommand"; +import { + DeleteItemCommandInput, + DeleteItemCommandOutput, +} from "../commands/DeleteItemCommand"; +import { + DeleteResourcePolicyCommandInput, + DeleteResourcePolicyCommandOutput, +} from "../commands/DeleteResourcePolicyCommand"; +import { + DeleteTableCommandInput, + DeleteTableCommandOutput, +} from "../commands/DeleteTableCommand"; +import { + DescribeBackupCommandInput, + DescribeBackupCommandOutput, +} from "../commands/DescribeBackupCommand"; +import { + DescribeContinuousBackupsCommandInput, + DescribeContinuousBackupsCommandOutput, +} from "../commands/DescribeContinuousBackupsCommand"; +import { + DescribeContributorInsightsCommandInput, + DescribeContributorInsightsCommandOutput, +} from "../commands/DescribeContributorInsightsCommand"; +import { + DescribeEndpointsCommandInput, + DescribeEndpointsCommandOutput, +} from "../commands/DescribeEndpointsCommand"; +import { + DescribeExportCommandInput, + DescribeExportCommandOutput, +} from "../commands/DescribeExportCommand"; +import { + DescribeGlobalTableCommandInput, + DescribeGlobalTableCommandOutput, +} from "../commands/DescribeGlobalTableCommand"; +import { + DescribeGlobalTableSettingsCommandInput, + DescribeGlobalTableSettingsCommandOutput, +} from "../commands/DescribeGlobalTableSettingsCommand"; +import { + DescribeImportCommandInput, + DescribeImportCommandOutput, +} from "../commands/DescribeImportCommand"; +import { + DescribeKinesisStreamingDestinationCommandInput, + DescribeKinesisStreamingDestinationCommandOutput, +} from "../commands/DescribeKinesisStreamingDestinationCommand"; +import { + DescribeLimitsCommandInput, + DescribeLimitsCommandOutput, +} from "../commands/DescribeLimitsCommand"; +import { + DescribeTableCommandInput, + DescribeTableCommandOutput, +} from "../commands/DescribeTableCommand"; +import { + DescribeTableReplicaAutoScalingCommandInput, + DescribeTableReplicaAutoScalingCommandOutput, +} from "../commands/DescribeTableReplicaAutoScalingCommand"; +import { + DescribeTimeToLiveCommandInput, + DescribeTimeToLiveCommandOutput, +} from "../commands/DescribeTimeToLiveCommand"; +import { + DisableKinesisStreamingDestinationCommandInput, + DisableKinesisStreamingDestinationCommandOutput, +} from "../commands/DisableKinesisStreamingDestinationCommand"; +import { + EnableKinesisStreamingDestinationCommandInput, + EnableKinesisStreamingDestinationCommandOutput, +} from "../commands/EnableKinesisStreamingDestinationCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "../commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "../commands/ExecuteTransactionCommand"; +import { + ExportTableToPointInTimeCommandInput, + ExportTableToPointInTimeCommandOutput, +} from "../commands/ExportTableToPointInTimeCommand"; +import { + GetItemCommandInput, + GetItemCommandOutput, +} from "../commands/GetItemCommand"; +import { + GetResourcePolicyCommandInput, + GetResourcePolicyCommandOutput, +} from "../commands/GetResourcePolicyCommand"; +import { + ImportTableCommandInput, + ImportTableCommandOutput, +} from "../commands/ImportTableCommand"; +import { + ListBackupsCommandInput, + ListBackupsCommandOutput, +} from "../commands/ListBackupsCommand"; +import { + ListContributorInsightsCommandInput, + ListContributorInsightsCommandOutput, +} from "../commands/ListContributorInsightsCommand"; +import { + ListExportsCommandInput, + ListExportsCommandOutput, +} from "../commands/ListExportsCommand"; +import { + ListGlobalTablesCommandInput, + ListGlobalTablesCommandOutput, +} from "../commands/ListGlobalTablesCommand"; +import { + ListImportsCommandInput, + ListImportsCommandOutput, +} from "../commands/ListImportsCommand"; +import { + ListTablesCommandInput, + ListTablesCommandOutput, +} from "../commands/ListTablesCommand"; +import { + ListTagsOfResourceCommandInput, + ListTagsOfResourceCommandOutput, +} from "../commands/ListTagsOfResourceCommand"; +import { + PutItemCommandInput, + PutItemCommandOutput, +} from "../commands/PutItemCommand"; +import { + PutResourcePolicyCommandInput, + PutResourcePolicyCommandOutput, +} from "../commands/PutResourcePolicyCommand"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { + RestoreTableFromBackupCommandInput, + RestoreTableFromBackupCommandOutput, +} from "../commands/RestoreTableFromBackupCommand"; +import { + RestoreTableToPointInTimeCommandInput, + RestoreTableToPointInTimeCommandOutput, +} from "../commands/RestoreTableToPointInTimeCommand"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { + TagResourceCommandInput, + TagResourceCommandOutput, +} from "../commands/TagResourceCommand"; +import { + TransactGetItemsCommandInput, + TransactGetItemsCommandOutput, +} from "../commands/TransactGetItemsCommand"; +import { + TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput, +} from "../commands/TransactWriteItemsCommand"; +import { + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "../commands/UntagResourceCommand"; +import { + UpdateContinuousBackupsCommandInput, + UpdateContinuousBackupsCommandOutput, +} from "../commands/UpdateContinuousBackupsCommand"; +import { + UpdateContributorInsightsCommandInput, + UpdateContributorInsightsCommandOutput, +} from "../commands/UpdateContributorInsightsCommand"; +import { + UpdateGlobalTableCommandInput, + UpdateGlobalTableCommandOutput, +} from "../commands/UpdateGlobalTableCommand"; +import { + UpdateGlobalTableSettingsCommandInput, + UpdateGlobalTableSettingsCommandOutput, +} from "../commands/UpdateGlobalTableSettingsCommand"; +import { + UpdateItemCommandInput, + UpdateItemCommandOutput, +} from "../commands/UpdateItemCommand"; +import { + UpdateKinesisStreamingDestinationCommandInput, + UpdateKinesisStreamingDestinationCommandOutput, +} from "../commands/UpdateKinesisStreamingDestinationCommand"; +import { + UpdateTableCommandInput, + UpdateTableCommandOutput, +} from "../commands/UpdateTableCommand"; +import { + UpdateTableReplicaAutoScalingCommandInput, + UpdateTableReplicaAutoScalingCommandOutput, +} from "../commands/UpdateTableReplicaAutoScalingCommand"; +import { + UpdateTimeToLiveCommandInput, + UpdateTimeToLiveCommandOutput, +} from "../commands/UpdateTimeToLiveCommand"; +export declare const se_BatchExecuteStatementCommand: ( + input: BatchExecuteStatementCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_BatchGetItemCommand: ( + input: BatchGetItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_BatchWriteItemCommand: ( + input: BatchWriteItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateBackupCommand: ( + input: CreateBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateGlobalTableCommand: ( + input: CreateGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_CreateTableCommand: ( + input: CreateTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteBackupCommand: ( + input: DeleteBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteItemCommand: ( + input: DeleteItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteResourcePolicyCommand: ( + input: DeleteResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DeleteTableCommand: ( + input: DeleteTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeBackupCommand: ( + input: DescribeBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeContinuousBackupsCommand: ( + input: DescribeContinuousBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeContributorInsightsCommand: ( + input: DescribeContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeEndpointsCommand: ( + input: DescribeEndpointsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeExportCommand: ( + input: DescribeExportCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeGlobalTableCommand: ( + input: DescribeGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeGlobalTableSettingsCommand: ( + input: DescribeGlobalTableSettingsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeImportCommand: ( + input: DescribeImportCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeKinesisStreamingDestinationCommand: ( + input: DescribeKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeLimitsCommand: ( + input: DescribeLimitsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTableCommand: ( + input: DescribeTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTableReplicaAutoScalingCommand: ( + input: DescribeTableReplicaAutoScalingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DescribeTimeToLiveCommand: ( + input: DescribeTimeToLiveCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_DisableKinesisStreamingDestinationCommand: ( + input: DisableKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_EnableKinesisStreamingDestinationCommand: ( + input: EnableKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExecuteStatementCommand: ( + input: ExecuteStatementCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExecuteTransactionCommand: ( + input: ExecuteTransactionCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ExportTableToPointInTimeCommand: ( + input: ExportTableToPointInTimeCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetItemCommand: ( + input: GetItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_GetResourcePolicyCommand: ( + input: GetResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ImportTableCommand: ( + input: ImportTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListBackupsCommand: ( + input: ListBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListContributorInsightsCommand: ( + input: ListContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListExportsCommand: ( + input: ListExportsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListGlobalTablesCommand: ( + input: ListGlobalTablesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListImportsCommand: ( + input: ListImportsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListTablesCommand: ( + input: ListTablesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListTagsOfResourceCommand: ( + input: ListTagsOfResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutItemCommand: ( + input: PutItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_PutResourcePolicyCommand: ( + input: PutResourcePolicyCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_QueryCommand: ( + input: QueryCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_RestoreTableFromBackupCommand: ( + input: RestoreTableFromBackupCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_RestoreTableToPointInTimeCommand: ( + input: RestoreTableToPointInTimeCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ScanCommand: ( + input: ScanCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TagResourceCommand: ( + input: TagResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TransactGetItemsCommand: ( + input: TransactGetItemsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_TransactWriteItemsCommand: ( + input: TransactWriteItemsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UntagResourceCommand: ( + input: UntagResourceCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateContinuousBackupsCommand: ( + input: UpdateContinuousBackupsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateContributorInsightsCommand: ( + input: UpdateContributorInsightsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateGlobalTableCommand: ( + input: UpdateGlobalTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateGlobalTableSettingsCommand: ( + input: UpdateGlobalTableSettingsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateItemCommand: ( + input: UpdateItemCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateKinesisStreamingDestinationCommand: ( + input: UpdateKinesisStreamingDestinationCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTableCommand: ( + input: UpdateTableCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTableReplicaAutoScalingCommand: ( + input: UpdateTableReplicaAutoScalingCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_UpdateTimeToLiveCommand: ( + input: UpdateTimeToLiveCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_BatchExecuteStatementCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_BatchGetItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_BatchWriteItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_CreateTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DeleteTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeContinuousBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeEndpointsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeExportCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeGlobalTableSettingsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeImportCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeLimitsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTableReplicaAutoScalingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DescribeTimeToLiveCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_DisableKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_EnableKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExecuteStatementCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExecuteTransactionCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ExportTableToPointInTimeCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_GetResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ImportTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListExportsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListGlobalTablesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListImportsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListTablesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListTagsOfResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_PutResourcePolicyCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_QueryCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_RestoreTableFromBackupCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_RestoreTableToPointInTimeCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ScanCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TagResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TransactGetItemsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_TransactWriteItemsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UntagResourceCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateContinuousBackupsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateContributorInsightsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateGlobalTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateGlobalTableSettingsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateItemCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateKinesisStreamingDestinationCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTableCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTableReplicaAutoScalingCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_UpdateTimeToLiveCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..80f326c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.browser.d.ts @@ -0,0 +1,111 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | (() => Promise); + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts new file mode 100644 index 0000000..32d5489 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.d.ts @@ -0,0 +1,111 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | import("@smithy/types").Provider< + import("@aws-sdk/core/account-id-endpoint").AccountIdEndpointMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + init?: + | import("@aws-sdk/credential-provider-node").DefaultProviderInit + | undefined + ) => import("@smithy/types").MemoizedProvider< + import("@smithy/types").AwsCredentialIdentity + >); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts new file mode 100644 index 0000000..f3583eb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.native.d.ts @@ -0,0 +1,115 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + accountIdEndpointMode: + | "disabled" + | "preferred" + | "required" + | (() => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + endpointDiscoveryEnabledProvider: import("@smithy/types").Provider< + boolean | undefined + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + endpointCacheSize?: number | undefined; + endpointDiscoveryEnabled?: boolean | undefined; + accountId?: string | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..5d480e8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeConfig.shared.d.ts @@ -0,0 +1,21 @@ +import { DynamoDBClientConfig } from "./DynamoDBClient"; +export declare const getRuntimeConfig: (config: DynamoDBClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").DynamoDBHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts new file mode 100644 index 0000000..632b45b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { DynamoDBExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: DynamoDBExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/index.d.ts @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts new file mode 100644 index 0000000..5dba224 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export declare const waitForTableExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; +export declare const waitUntilTableExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts new file mode 100644 index 0000000..1df1b34 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/ts3.4/waiters/waitForTableNotExists.d.ts @@ -0,0 +1,11 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +export declare const waitForTableNotExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; +export declare const waitUntilTableNotExists: ( + params: WaiterConfiguration, + input: DescribeTableCommandInput +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts new file mode 100644 index 0000000..a70fabd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/index.d.ts @@ -0,0 +1,2 @@ +export * from "./waitForTableExists"; +export * from "./waitForTableNotExists"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts new file mode 100644 index 0000000..a8ae496 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * + * @deprecated Use waitUntilTableExists instead. waitForTableExists does not throw error in non-success cases. + */ +export declare const waitForTableExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to DescribeTableCommand for polling. + */ +export declare const waitUntilTableExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts new file mode 100644 index 0000000..df87d8a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/dist-types/waiters/waitForTableNotExists.d.ts @@ -0,0 +1,14 @@ +import { WaiterConfiguration, WaiterResult } from "@smithy/util-waiter"; +import { DescribeTableCommandInput } from "../commands/DescribeTableCommand"; +import { DynamoDBClient } from "../DynamoDBClient"; +/** + * + * @deprecated Use waitUntilTableNotExists instead. waitForTableNotExists does not throw error in non-success cases. + */ +export declare const waitForTableNotExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; +/** + * + * @param params - Waiter configuration options. + * @param input - The input to DescribeTableCommand for polling. + */ +export declare const waitUntilTableNotExists: (params: WaiterConfiguration, input: DescribeTableCommandInput) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/package.json new file mode 100644 index 0000000..9377573 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-dynamodb/package.json @@ -0,0 +1,103 @@ +{ + "name": "@aws-sdk/client-dynamodb", + "description": "AWS SDK for JavaScript Dynamodb Client for Node.js, Browser and React Native", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline client-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "generate:client": "node ../../scripts/generate-clients/single-service --solo dynamodb" + }, + "main": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@tsconfig/node18": "18.2.4", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "browser": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser" + }, + "react-native": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.native" + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-dynamodb" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/README.md new file mode 100644 index 0000000..09d5fe3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/README.md @@ -0,0 +1,252 @@ + + +# @aws-sdk/client-sso + +## Description + +AWS SDK for JavaScript SSO Client for Node.js, Browser and React Native. + +

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to +IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles +assigned to them and get federated into the application.

+ +

Although AWS Single Sign-On was renamed, the sso and +identitystore API namespaces will continue to retain their original name for +backward compatibility purposes. For more information, see IAM Identity Center rename.

+
+

This reference guide describes the IAM Identity Center Portal operations that you can call +programatically and includes detailed information on data types and errors.

+ +

AWS provides SDKs that consist of libraries and sample code for various programming +languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a +convenient way to create programmatic access to IAM Identity Center and other AWS services. For more +information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+
+ +## Installing + +To install this package, simply type add or install @aws-sdk/client-sso +using your favorite package manager: + +- `npm install @aws-sdk/client-sso` +- `yarn add @aws-sdk/client-sso` +- `pnpm add @aws-sdk/client-sso` + +## Getting Started + +### Import + +The AWS SDK is modulized by clients and commands. +To send a request, you only need to import the `SSOClient` and +the commands you need, for example `ListAccountsCommand`: + +```js +// ES5 example +const { SSOClient, ListAccountsCommand } = require("@aws-sdk/client-sso"); +``` + +```ts +// ES6+ example +import { SSOClient, ListAccountsCommand } from "@aws-sdk/client-sso"; +``` + +### Usage + +To send a request, you: + +- Initiate client with configuration (e.g. credentials, region). +- Initiate command with input parameters. +- Call `send` operation on client with command object as input. +- If you are using a custom http handler, you may call `destroy()` to close open connections. + +```js +// a client can be shared by different commands. +const client = new SSOClient({ region: "REGION" }); + +const params = { + /** input parameters */ +}; +const command = new ListAccountsCommand(params); +``` + +#### Async/await + +We recommend using [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) +operator to wait for the promise returned by send operation as follows: + +```js +// async/await. +try { + const data = await client.send(command); + // process data. +} catch (error) { + // error handling. +} finally { + // finally. +} +``` + +Async-await is clean, concise, intuitive, easy to debug and has better error handling +as compared to using Promise chains or callbacks. + +#### Promises + +You can also use [Promise chaining](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises#chaining) +to execute send operation. + +```js +client.send(command).then( + (data) => { + // process data. + }, + (error) => { + // error handling. + } +); +``` + +Promises can also be called using `.catch()` and `.finally()` as follows: + +```js +client + .send(command) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }) + .finally(() => { + // finally. + }); +``` + +#### Callbacks + +We do not recommend using callbacks because of [callback hell](http://callbackhell.com/), +but they are supported by the send operation. + +```js +// callbacks. +client.send(command, (err, data) => { + // process err and data. +}); +``` + +#### v2 compatible style + +The client can also send requests using v2 compatible style. +However, it results in a bigger bundle size and may be dropped in next major version. More details in the blog post +on [modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/) + +```ts +import * as AWS from "@aws-sdk/client-sso"; +const client = new AWS.SSO({ region: "REGION" }); + +// async/await. +try { + const data = await client.listAccounts(params); + // process data. +} catch (error) { + // error handling. +} + +// Promises. +client + .listAccounts(params) + .then((data) => { + // process data. + }) + .catch((error) => { + // error handling. + }); + +// callbacks. +client.listAccounts(params, (err, data) => { + // process err and data. +}); +``` + +### Troubleshooting + +When the service returns an exception, the error will include the exception information, +as well as response metadata (e.g. request id). + +```js +try { + const data = await client.send(command); + // process data. +} catch (error) { + const { requestId, cfId, extendedRequestId } = error.$metadata; + console.log({ requestId, cfId, extendedRequestId }); + /** + * The keys within exceptions are also parsed. + * You can access them by specifying exception names: + * if (error.name === 'SomeServiceException') { + * const value = error.specialKeyInException; + * } + */ +} +``` + +## Getting Help + +Please use these community resources for getting help. +We use the GitHub issues for tracking bugs and feature requests, but have limited bandwidth to address them. + +- Visit [Developer Guide](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) + or [API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html). +- Check out the blog posts tagged with [`aws-sdk-js`](https://aws.amazon.com/blogs/developer/tag/aws-sdk-js/) + on AWS Developer Blog. +- Ask a question on [StackOverflow](https://stackoverflow.com/questions/tagged/aws-sdk-js) and tag it with `aws-sdk-js`. +- Join the AWS JavaScript community on [gitter](https://gitter.im/aws/aws-sdk-js-v3). +- If it turns out that you may have found a bug, please [open an issue](https://github.com/aws/aws-sdk-js-v3/issues/new/choose). + +To test your universal JavaScript code in Node.js, browser and react-native environments, +visit our [code samples repo](https://github.com/aws-samples/aws-sdk-js-tests). + +## Contributing + +This client code is generated automatically. Any modifications will be overwritten the next time the `@aws-sdk/client-sso` package is updated. +To contribute to client you can check our [generate clients scripts](https://github.com/aws/aws-sdk-js-v3/tree/main/scripts/generate-clients). + +## License + +This SDK is distributed under the +[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0), +see LICENSE for more information. + +## Client Commands (Operations List) + +
+ +GetRoleCredentials + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/GetRoleCredentialsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/GetRoleCredentialsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/GetRoleCredentialsCommandOutput/) + +
+
+ +ListAccountRoles + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/ListAccountRolesCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountRolesCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountRolesCommandOutput/) + +
+
+ +ListAccounts + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/ListAccountsCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountsCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/ListAccountsCommandOutput/) + +
+
+ +Logout + + +[Command API Reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/sso/command/LogoutCommand/) / [Input](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/LogoutCommandInput/) / [Output](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-client-sso/Interface/LogoutCommandOutput/) + +
diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..2c256ee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/auth/httpAuthSchemeProvider.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOHttpAuthSchemeProvider = exports.defaultSSOHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOHttpAuthSchemeParametersProvider = defaultSSOHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOHttpAuthSchemeProvider = defaultSSOHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js new file mode 100644 index 0000000..7258a35 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js new file mode 100644 index 0000000..4321ed9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/index.js new file mode 100644 index 0000000..8383b07 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/index.js @@ -0,0 +1,625 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + GetRoleCredentialsCommand: () => GetRoleCredentialsCommand, + GetRoleCredentialsRequestFilterSensitiveLog: () => GetRoleCredentialsRequestFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog: () => GetRoleCredentialsResponseFilterSensitiveLog, + InvalidRequestException: () => InvalidRequestException, + ListAccountRolesCommand: () => ListAccountRolesCommand, + ListAccountRolesRequestFilterSensitiveLog: () => ListAccountRolesRequestFilterSensitiveLog, + ListAccountsCommand: () => ListAccountsCommand, + ListAccountsRequestFilterSensitiveLog: () => ListAccountsRequestFilterSensitiveLog, + LogoutCommand: () => LogoutCommand, + LogoutRequestFilterSensitiveLog: () => LogoutRequestFilterSensitiveLog, + ResourceNotFoundException: () => ResourceNotFoundException, + RoleCredentialsFilterSensitiveLog: () => RoleCredentialsFilterSensitiveLog, + SSO: () => SSO, + SSOClient: () => SSOClient, + SSOServiceException: () => SSOServiceException, + TooManyRequestsException: () => TooManyRequestsException, + UnauthorizedException: () => UnauthorizedException, + __Client: () => import_smithy_client.Client, + paginateListAccountRoles: () => paginateListAccountRoles, + paginateListAccounts: () => paginateListAccounts +}); +module.exports = __toCommonJS(index_exports); + +// src/SSOClient.ts +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_retry = require("@smithy/middleware-retry"); + +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/SSOClient.ts +var import_runtimeConfig = require("././runtimeConfig"); + +// src/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/SSOClient.ts +var SSOClient = class extends import_smithy_client.Client { + static { + __name(this, "SSOClient"); + } + /** + * The resolved configuration of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/SSO.ts + + +// src/commands/GetRoleCredentialsCommand.ts + +var import_middleware_serde = require("@smithy/middleware-serde"); + + +// src/models/models_0.ts + + +// src/models/SSOServiceException.ts + +var SSOServiceException = class _SSOServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "SSOServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOServiceException.prototype); + } +}; + +// src/models/models_0.ts +var InvalidRequestException = class _InvalidRequestException extends SSOServiceException { + static { + __name(this, "InvalidRequestException"); + } + name = "InvalidRequestException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + } +}; +var ResourceNotFoundException = class _ResourceNotFoundException extends SSOServiceException { + static { + __name(this, "ResourceNotFoundException"); + } + name = "ResourceNotFoundException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + } +}; +var TooManyRequestsException = class _TooManyRequestsException extends SSOServiceException { + static { + __name(this, "TooManyRequestsException"); + } + name = "TooManyRequestsException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _TooManyRequestsException.prototype); + } +}; +var UnauthorizedException = class _UnauthorizedException extends SSOServiceException { + static { + __name(this, "UnauthorizedException"); + } + name = "UnauthorizedException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnauthorizedException.prototype); + } +}; +var GetRoleCredentialsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "GetRoleCredentialsRequestFilterSensitiveLog"); +var RoleCredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.secretAccessKey && { secretAccessKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.sessionToken && { sessionToken: import_smithy_client.SENSITIVE_STRING } +}), "RoleCredentialsFilterSensitiveLog"); +var GetRoleCredentialsResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) } +}), "GetRoleCredentialsResponseFilterSensitiveLog"); +var ListAccountRolesRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountRolesRequestFilterSensitiveLog"); +var ListAccountsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountsRequestFilterSensitiveLog"); +var LogoutRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "LogoutRequestFilterSensitiveLog"); + +// src/protocols/Aws_restJson1.ts +var import_core2 = require("@aws-sdk/core"); + + +var se_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/federation/credentials"); + const query = (0, import_smithy_client.map)({ + [_rn]: [, (0, import_smithy_client.expectNonNull)(input[_rN], `roleName`)], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetRoleCredentialsCommand"); +var se_ListAccountRolesCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/roles"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountRolesCommand"); +var se_ListAccountsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/accounts"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountsCommand"); +var se_LogoutCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_LogoutCommand"); +var de_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + roleCredentials: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_GetRoleCredentialsCommand"); +var de_ListAccountRolesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + nextToken: import_smithy_client.expectString, + roleList: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountRolesCommand"); +var de_ListAccountsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + accountList: import_smithy_client._json, + nextToken: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountsCommand"); +var de_LogoutCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_LogoutCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(SSOServiceException); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_ResourceNotFoundExceptionRes"); +var de_TooManyRequestsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_TooManyRequestsExceptionRes"); +var de_UnauthorizedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var _aI = "accountId"; +var _aT = "accessToken"; +var _ai = "account_id"; +var _mR = "maxResults"; +var _mr = "max_result"; +var _nT = "nextToken"; +var _nt = "next_token"; +var _rN = "roleName"; +var _rn = "role_name"; +var _xasbt = "x-amz-sso_bearer_token"; + +// src/commands/GetRoleCredentialsCommand.ts +var GetRoleCredentialsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "GetRoleCredentials", {}).n("SSOClient", "GetRoleCredentialsCommand").f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog).ser(se_GetRoleCredentialsCommand).de(de_GetRoleCredentialsCommand).build() { + static { + __name(this, "GetRoleCredentialsCommand"); + } +}; + +// src/commands/ListAccountRolesCommand.ts + + + +var ListAccountRolesCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccountRoles", {}).n("SSOClient", "ListAccountRolesCommand").f(ListAccountRolesRequestFilterSensitiveLog, void 0).ser(se_ListAccountRolesCommand).de(de_ListAccountRolesCommand).build() { + static { + __name(this, "ListAccountRolesCommand"); + } +}; + +// src/commands/ListAccountsCommand.ts + + + +var ListAccountsCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccounts", {}).n("SSOClient", "ListAccountsCommand").f(ListAccountsRequestFilterSensitiveLog, void 0).ser(se_ListAccountsCommand).de(de_ListAccountsCommand).build() { + static { + __name(this, "ListAccountsCommand"); + } +}; + +// src/commands/LogoutCommand.ts + + + +var LogoutCommand = class extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "Logout", {}).n("SSOClient", "LogoutCommand").f(LogoutRequestFilterSensitiveLog, void 0).ser(se_LogoutCommand).de(de_LogoutCommand).build() { + static { + __name(this, "LogoutCommand"); + } +}; + +// src/SSO.ts +var commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand +}; +var SSO = class extends SSOClient { + static { + __name(this, "SSO"); + } +}; +(0, import_smithy_client.createAggregatedClient)(commands, SSO); + +// src/pagination/ListAccountRolesPaginator.ts + +var paginateListAccountRoles = (0, import_core.createPaginator)(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); + +// src/pagination/ListAccountsPaginator.ts + +var paginateListAccounts = (0, import_core.createPaginator)(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + SSOServiceException, + __Client, + SSOClient, + SSO, + $Command, + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand, + paginateListAccountRoles, + paginateListAccounts, + InvalidRequestException, + ResourceNotFoundException, + TooManyRequestsException, + UnauthorizedException, + GetRoleCredentialsRequestFilterSensitiveLog, + RoleCredentialsFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog, + ListAccountRolesRequestFilterSensitiveLog, + ListAccountsRequestFilterSensitiveLog, + LogoutRequestFilterSensitiveLog +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js new file mode 100644 index 0000000..3b40936 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.browser.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js new file mode 100644 index 0000000..befc739 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js new file mode 100644 index 0000000..24a378c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-cjs/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/SSO.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/SSO.js new file mode 100644 index 0000000..04d3169 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/SSO.js @@ -0,0 +1,15 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { GetRoleCredentialsCommand, } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommand, } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommand, } from "./commands/ListAccountsCommand"; +import { LogoutCommand } from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +const commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand, +}; +export class SSO extends SSOClient { +} +createAggregatedClient(commands, SSO); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js new file mode 100644 index 0000000..890a848 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/SSOClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSSOHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class SSOClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..f7ff90f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/auth/httpAuthSchemeProvider.js @@ -0,0 +1,62 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js new file mode 100644 index 0000000..aa4c2e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/GetRoleCredentialsCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_GetRoleCredentialsCommand, se_GetRoleCredentialsCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class GetRoleCredentialsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "GetRoleCredentials", {}) + .n("SSOClient", "GetRoleCredentialsCommand") + .f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog) + .ser(se_GetRoleCredentialsCommand) + .de(de_GetRoleCredentialsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js new file mode 100644 index 0000000..d5bcc14 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountRolesCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListAccountRolesRequestFilterSensitiveLog, } from "../models/models_0"; +import { de_ListAccountRolesCommand, se_ListAccountRolesCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class ListAccountRolesCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "ListAccountRoles", {}) + .n("SSOClient", "ListAccountRolesCommand") + .f(ListAccountRolesRequestFilterSensitiveLog, void 0) + .ser(se_ListAccountRolesCommand) + .de(de_ListAccountRolesCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js new file mode 100644 index 0000000..d4ab8ba --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/ListAccountsCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListAccountsRequestFilterSensitiveLog } from "../models/models_0"; +import { de_ListAccountsCommand, se_ListAccountsCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class ListAccountsCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "ListAccounts", {}) + .n("SSOClient", "ListAccountsCommand") + .f(ListAccountsRequestFilterSensitiveLog, void 0) + .ser(se_ListAccountsCommand) + .de(de_ListAccountsCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js new file mode 100644 index 0000000..29a37ed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/LogoutCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { LogoutRequestFilterSensitiveLog } from "../models/models_0"; +import { de_LogoutCommand, se_LogoutCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class LogoutCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("SWBPortalService", "Logout", {}) + .n("SSOClient", "LogoutCommand") + .f(LogoutRequestFilterSensitiveLog, void 0) + .ser(se_LogoutCommand) + .de(de_LogoutCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/commands/index.js @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js new file mode 100644 index 0000000..77e34f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/EndpointParameters.js @@ -0,0 +1,13 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js new file mode 100644 index 0000000..0ac15bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js new file mode 100644 index 0000000..c48673d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +export const ruleSet = _data; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/index.js new file mode 100644 index 0000000..b297556 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./SSOClient"; +export * from "./SSO"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js new file mode 100644 index 0000000..fa5d8fb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/SSOServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class SSOServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOServiceException.prototype); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js new file mode 100644 index 0000000..56ec16d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/models/models_0.js @@ -0,0 +1,75 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +export class InvalidRequestException extends __BaseException { + name = "InvalidRequestException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequestException.prototype); + } +} +export class ResourceNotFoundException extends __BaseException { + name = "ResourceNotFoundException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ResourceNotFoundException.prototype); + } +} +export class TooManyRequestsException extends __BaseException { + name = "TooManyRequestsException"; + $fault = "client"; + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, TooManyRequestsException.prototype); + } +} +export class UnauthorizedException extends __BaseException { + name = "UnauthorizedException"; + $fault = "client"; + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnauthorizedException.prototype); + } +} +export const GetRoleCredentialsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const RoleCredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.secretAccessKey && { secretAccessKey: SENSITIVE_STRING }), + ...(obj.sessionToken && { sessionToken: SENSITIVE_STRING }), +}); +export const GetRoleCredentialsResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) }), +}); +export const ListAccountRolesRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const ListAccountsRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); +export const LogoutRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js new file mode 100644 index 0000000..b18c3a8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountRolesPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListAccountRolesCommand, } from "../commands/ListAccountRolesCommand"; +import { SSOClient } from "../SSOClient"; +export const paginateListAccountRoles = createPaginator(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js new file mode 100644 index 0000000..342c663 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/ListAccountsPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ListAccountsCommand, } from "../commands/ListAccountsCommand"; +import { SSOClient } from "../SSOClient"; +export const paginateListAccounts = createPaginator(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/pagination/index.js @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js new file mode 100644 index 0000000..11b1892 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/protocols/Aws_restJson1.js @@ -0,0 +1,210 @@ +import { loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { requestBuilder as rb } from "@smithy/core"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, isSerializableHeaderValue, map, take, withBaseException, } from "@smithy/smithy-client"; +import { InvalidRequestException, ResourceNotFoundException, TooManyRequestsException, UnauthorizedException, } from "../models/models_0"; +import { SSOServiceException as __BaseException } from "../models/SSOServiceException"; +export const se_GetRoleCredentialsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/federation/credentials"); + const query = map({ + [_rn]: [, __expectNonNull(input[_rN], `roleName`)], + [_ai]: [, __expectNonNull(input[_aI], `accountId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListAccountRolesCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/assignment/roles"); + const query = map({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, __expectNonNull(input[_aI], `accountId`)], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_ListAccountsCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/assignment/accounts"); + const query = map({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}; +export const se_LogoutCommand = async (input, context) => { + const b = rb(input, context); + const headers = map({}, isSerializableHeaderValue, { + [_xasbt]: input[_aT], + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_GetRoleCredentialsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + roleCredentials: _json, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_ListAccountRolesCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + nextToken: __expectString, + roleList: _json, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_ListAccountsCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + accountList: _json, + nextToken: __expectString, + }); + Object.assign(contents, doc); + return contents; +}; +export const de_LogoutCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + await collectBody(output.body, context); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ResourceNotFoundExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_TooManyRequestsExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnauthorizedExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + message: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const _aI = "accountId"; +const _aT = "accessToken"; +const _ai = "account_id"; +const _mR = "maxResults"; +const _mr = "max_result"; +const _nT = "nextToken"; +const _nt = "next_token"; +const _rN = "roleName"; +const _rn = "role_name"; +const _xasbt = "x-amz-sso_bearer_token"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js new file mode 100644 index 0000000..7c8fe85 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.browser.js @@ -0,0 +1,33 @@ +import packageInfo from "../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js new file mode 100644 index 0000000..d8440b7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.js @@ -0,0 +1,46 @@ +import packageInfo from "../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js new file mode 100644 index 0000000..3dfac58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSSOHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-es/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts new file mode 100644 index 0000000..8500e0c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/SSO.d.ts @@ -0,0 +1,53 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "./commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +export interface SSO { + /** + * @see {@link GetRoleCredentialsCommand} + */ + getRoleCredentials(args: GetRoleCredentialsCommandInput, options?: __HttpHandlerOptions): Promise; + getRoleCredentials(args: GetRoleCredentialsCommandInput, cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void): void; + getRoleCredentials(args: GetRoleCredentialsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void): void; + /** + * @see {@link ListAccountRolesCommand} + */ + listAccountRoles(args: ListAccountRolesCommandInput, options?: __HttpHandlerOptions): Promise; + listAccountRoles(args: ListAccountRolesCommandInput, cb: (err: any, data?: ListAccountRolesCommandOutput) => void): void; + listAccountRoles(args: ListAccountRolesCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListAccountRolesCommandOutput) => void): void; + /** + * @see {@link ListAccountsCommand} + */ + listAccounts(args: ListAccountsCommandInput, options?: __HttpHandlerOptions): Promise; + listAccounts(args: ListAccountsCommandInput, cb: (err: any, data?: ListAccountsCommandOutput) => void): void; + listAccounts(args: ListAccountsCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ListAccountsCommandOutput) => void): void; + /** + * @see {@link LogoutCommand} + */ + logout(args: LogoutCommandInput, options?: __HttpHandlerOptions): Promise; + logout(args: LogoutCommandInput, cb: (err: any, data?: LogoutCommandOutput) => void): void; + logout(args: LogoutCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: LogoutCommandOutput) => void): void; +} +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * @public + */ +export declare class SSO extends SSOClient implements SSO { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts new file mode 100644 index 0000000..acfb2fd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/SSOClient.d.ts @@ -0,0 +1,200 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "./commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "./commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "./commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "./commands/LogoutCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = GetRoleCredentialsCommandInput | ListAccountRolesCommandInput | ListAccountsCommandInput | LogoutCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = GetRoleCredentialsCommandOutput | ListAccountRolesCommandOutput | ListAccountsCommandOutput | LogoutCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type SSOClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of SSOClient class constructor that set the region, credentials and other options. + */ +export interface SSOClientConfig extends SSOClientConfigType { +} +/** + * @public + */ +export type SSOClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ +export interface SSOClientResolvedConfig extends SSOClientResolvedConfigType { +} +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * @public + */ +export declare class SSOClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig> { + /** + * The resolved configuration of SSOClient class. This is resolved and normalized from the {@link SSOClientConfig | constructor configuration interface}. + */ + readonly config: SSOClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..7e7ff4c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { SSOHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: SSOHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): SSOHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..bf3aad6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { SSOClientResolvedConfig } from "../SSOClient"; +/** + * @internal + */ +export interface SSOHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface SSOHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSSOHttpAuthSchemeParametersProvider: (config: SSOClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface SSOHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSSOHttpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: SSOHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts new file mode 100644 index 0000000..f306bd5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/GetRoleCredentialsCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { GetRoleCredentialsRequest, GetRoleCredentialsResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetRoleCredentialsCommand}. + */ +export interface GetRoleCredentialsCommandInput extends GetRoleCredentialsRequest { +} +/** + * @public + * + * The output of {@link GetRoleCredentialsCommand}. + */ +export interface GetRoleCredentialsCommandOutput extends GetRoleCredentialsResponse, __MetadataBearer { +} +declare const GetRoleCredentialsCommand_base: { + new (input: GetRoleCredentialsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: GetRoleCredentialsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns the STS short-term credentials for a given role name that is assigned to the + * user.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, GetRoleCredentialsCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, GetRoleCredentialsCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // GetRoleCredentialsRequest + * roleName: "STRING_VALUE", // required + * accountId: "STRING_VALUE", // required + * accessToken: "STRING_VALUE", // required + * }; + * const command = new GetRoleCredentialsCommand(input); + * const response = await client.send(command); + * // { // GetRoleCredentialsResponse + * // roleCredentials: { // RoleCredentials + * // accessKeyId: "STRING_VALUE", + * // secretAccessKey: "STRING_VALUE", + * // sessionToken: "STRING_VALUE", + * // expiration: Number("long"), + * // }, + * // }; + * + * ``` + * + * @param GetRoleCredentialsCommandInput - {@link GetRoleCredentialsCommandInput} + * @returns {@link GetRoleCredentialsCommandOutput} + * @see {@link GetRoleCredentialsCommandInput} for command's `input` shape. + * @see {@link GetRoleCredentialsCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class GetRoleCredentialsCommand extends GetRoleCredentialsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: GetRoleCredentialsRequest; + output: GetRoleCredentialsResponse; + }; + sdk: { + input: GetRoleCredentialsCommandInput; + output: GetRoleCredentialsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts new file mode 100644 index 0000000..8ce6a04 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountRolesCommand.d.ts @@ -0,0 +1,96 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountRolesRequest, ListAccountRolesResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListAccountRolesCommand}. + */ +export interface ListAccountRolesCommandInput extends ListAccountRolesRequest { +} +/** + * @public + * + * The output of {@link ListAccountRolesCommand}. + */ +export interface ListAccountRolesCommandOutput extends ListAccountRolesResponse, __MetadataBearer { +} +declare const ListAccountRolesCommand_base: { + new (input: ListAccountRolesCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListAccountRolesCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all roles that are assigned to the user for a given AWS account.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, ListAccountRolesCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, ListAccountRolesCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // ListAccountRolesRequest + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * accessToken: "STRING_VALUE", // required + * accountId: "STRING_VALUE", // required + * }; + * const command = new ListAccountRolesCommand(input); + * const response = await client.send(command); + * // { // ListAccountRolesResponse + * // nextToken: "STRING_VALUE", + * // roleList: [ // RoleListType + * // { // RoleInfo + * // roleName: "STRING_VALUE", + * // accountId: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListAccountRolesCommandInput - {@link ListAccountRolesCommandInput} + * @returns {@link ListAccountRolesCommandOutput} + * @see {@link ListAccountRolesCommandInput} for command's `input` shape. + * @see {@link ListAccountRolesCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class ListAccountRolesCommand extends ListAccountRolesCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListAccountRolesRequest; + output: ListAccountRolesResponse; + }; + sdk: { + input: ListAccountRolesCommandInput; + output: ListAccountRolesCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts new file mode 100644 index 0000000..cffc47e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/ListAccountsCommand.d.ts @@ -0,0 +1,98 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountsRequest, ListAccountsResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListAccountsCommand}. + */ +export interface ListAccountsCommandInput extends ListAccountsRequest { +} +/** + * @public + * + * The output of {@link ListAccountsCommand}. + */ +export interface ListAccountsCommandOutput extends ListAccountsResponse, __MetadataBearer { +} +declare const ListAccountsCommand_base: { + new (input: ListAccountsCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: ListAccountsCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Lists all AWS accounts assigned to the user. These AWS accounts are assigned by the + * administrator of the account. For more information, see Assign User Access in the IAM Identity Center User Guide. This operation + * returns a paginated response.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, ListAccountsCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, ListAccountsCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // ListAccountsRequest + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * accessToken: "STRING_VALUE", // required + * }; + * const command = new ListAccountsCommand(input); + * const response = await client.send(command); + * // { // ListAccountsResponse + * // nextToken: "STRING_VALUE", + * // accountList: [ // AccountListType + * // { // AccountInfo + * // accountId: "STRING_VALUE", + * // accountName: "STRING_VALUE", + * // emailAddress: "STRING_VALUE", + * // }, + * // ], + * // }; + * + * ``` + * + * @param ListAccountsCommandInput - {@link ListAccountsCommandInput} + * @returns {@link ListAccountsCommandOutput} + * @see {@link ListAccountsCommandInput} for command's `input` shape. + * @see {@link ListAccountsCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link ResourceNotFoundException} (client fault) + *

The specified resource doesn't exist.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class ListAccountsCommand extends ListAccountsCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: ListAccountsRequest; + output: ListAccountsResponse; + }; + sdk: { + input: ListAccountsCommandInput; + output: ListAccountsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts new file mode 100644 index 0000000..e85fe33 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/LogoutCommand.d.ts @@ -0,0 +1,95 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { LogoutRequest } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, SSOClientResolvedConfig } from "../SSOClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link LogoutCommand}. + */ +export interface LogoutCommandInput extends LogoutRequest { +} +/** + * @public + * + * The output of {@link LogoutCommand}. + */ +export interface LogoutCommandOutput extends __MetadataBearer { +} +declare const LogoutCommand_base: { + new (input: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Removes the locally stored SSO tokens from the client-side cache and sends an API call to + * the IAM Identity Center service to invalidate the corresponding server-side IAM Identity Center sign in + * session.

+ * + *

If a user uses IAM Identity Center to access the AWS CLI, the user’s IAM Identity Center sign in session is + * used to obtain an IAM session, as specified in the corresponding IAM Identity Center permission set. + * More specifically, IAM Identity Center assumes an IAM role in the target account on behalf of the user, + * and the corresponding temporary AWS credentials are returned to the client.

+ *

After user logout, any existing IAM role sessions that were created by using IAM Identity Center + * permission sets continue based on the duration configured in the permission set. + * For more information, see User + * authentications in the IAM Identity Center User + * Guide.

+ *
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOClient, LogoutCommand } from "@aws-sdk/client-sso"; // ES Modules import + * // const { SSOClient, LogoutCommand } = require("@aws-sdk/client-sso"); // CommonJS import + * const client = new SSOClient(config); + * const input = { // LogoutRequest + * accessToken: "STRING_VALUE", // required + * }; + * const command = new LogoutCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param LogoutCommandInput - {@link LogoutCommandInput} + * @returns {@link LogoutCommandOutput} + * @see {@link LogoutCommandInput} for command's `input` shape. + * @see {@link LogoutCommandOutput} for command's `response` shape. + * @see {@link SSOClientResolvedConfig | config} for SSOClient's `config` shape. + * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link TooManyRequestsException} (client fault) + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * + * @throws {@link UnauthorizedException} (client fault) + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * + * @throws {@link SSOServiceException} + *

Base exception class for all service exceptions from SSO service.

+ * + * + * @public + */ +export declare class LogoutCommand extends LogoutCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: LogoutRequest; + output: {}; + }; + sdk: { + input: LogoutCommandInput; + output: LogoutCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/commands/index.d.ts @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..23f42e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/EndpointParameters.d.ts @@ -0,0 +1,40 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts new file mode 100644 index 0000000..0f76dd3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface SSOExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts new file mode 100644 index 0000000..3b3bcea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/index.d.ts @@ -0,0 +1,29 @@ +/** + *

AWS IAM Identity Center (successor to AWS Single Sign-On) Portal is a web service that makes it easy for you to assign user access to + * IAM Identity Center resources such as the AWS access portal. Users can get AWS account applications and roles + * assigned to them and get federated into the application.

+ * + *

Although AWS Single Sign-On was renamed, the sso and + * identitystore API namespaces will continue to retain their original name for + * backward compatibility purposes. For more information, see IAM Identity Center rename.

+ *
+ *

This reference guide describes the IAM Identity Center Portal operations that you can call + * programatically and includes detailed information on data types and errors.

+ * + *

AWS provides SDKs that consist of libraries and sample code for various programming + * languages and platforms, such as Java, Ruby, .Net, iOS, or Android. The SDKs provide a + * convenient way to create programmatic access to IAM Identity Center and other AWS services. For more + * information about the AWS SDKs, including how to download and install them, see Tools for Amazon Web Services.

+ *
+ * + * @packageDocumentation + */ +export * from "./SSOClient"; +export * from "./SSO"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { SSOExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts new file mode 100644 index 0000000..9172f1a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/SSOServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from SSO service. + */ +export declare class SSOServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts new file mode 100644 index 0000000..0d40fa7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/models/models_0.d.ts @@ -0,0 +1,266 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +/** + *

Provides information about your AWS account.

+ * @public + */ +export interface AccountInfo { + /** + *

The identifier of the AWS account that is assigned to the user.

+ * @public + */ + accountId?: string | undefined; + /** + *

The display name of the AWS account that is assigned to the user.

+ * @public + */ + accountName?: string | undefined; + /** + *

The email address of the AWS account that is assigned to the user.

+ * @public + */ + emailAddress?: string | undefined; +} +/** + * @public + */ +export interface GetRoleCredentialsRequest { + /** + *

The friendly name of the role that is assigned to the user.

+ * @public + */ + roleName: string | undefined; + /** + *

The identifier for the AWS account that is assigned to the user.

+ * @public + */ + accountId: string | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + *

Provides information about the role credentials that are assigned to the user.

+ * @public + */ +export interface RoleCredentials { + /** + *

The identifier used for the temporary security credentials. For more information, see + * Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + accessKeyId?: string | undefined; + /** + *

The key that is used to sign the request. For more information, see Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + secretAccessKey?: string | undefined; + /** + *

The token used for temporary credentials. For more information, see Using Temporary Security Credentials to Request Access to AWS Resources in the + * AWS IAM User Guide.

+ * @public + */ + sessionToken?: string | undefined; + /** + *

The date on which temporary security credentials expire.

+ * @public + */ + expiration?: number | undefined; +} +/** + * @public + */ +export interface GetRoleCredentialsResponse { + /** + *

The credentials for the role that is assigned to the user.

+ * @public + */ + roleCredentials?: RoleCredentials | undefined; +} +/** + *

Indicates that a problem occurred with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * @public + */ +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The specified resource doesn't exist.

+ * @public + */ +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the request is being made too frequently and is more than what the server + * can handle.

+ * @public + */ +export declare class TooManyRequestsException extends __BaseException { + readonly name: "TooManyRequestsException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the request is not authorized. This can happen due to an invalid access + * token in the request.

+ * @public + */ +export declare class UnauthorizedException extends __BaseException { + readonly name: "UnauthorizedException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface ListAccountRolesRequest { + /** + *

The page token from the previous response output when you request subsequent pages.

+ * @public + */ + nextToken?: string | undefined; + /** + *

The number of items that clients can request per page.

+ * @public + */ + maxResults?: number | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; + /** + *

The identifier for the AWS account that is assigned to the user.

+ * @public + */ + accountId: string | undefined; +} +/** + *

Provides information about the role that is assigned to the user.

+ * @public + */ +export interface RoleInfo { + /** + *

The friendly name of the role that is assigned to the user.

+ * @public + */ + roleName?: string | undefined; + /** + *

The identifier of the AWS account assigned to the user.

+ * @public + */ + accountId?: string | undefined; +} +/** + * @public + */ +export interface ListAccountRolesResponse { + /** + *

The page token client that is used to retrieve the list of accounts.

+ * @public + */ + nextToken?: string | undefined; + /** + *

A paginated response with the list of roles and the next token if more results are + * available.

+ * @public + */ + roleList?: RoleInfo[] | undefined; +} +/** + * @public + */ +export interface ListAccountsRequest { + /** + *

(Optional) When requesting subsequent pages, this is the page token from the previous + * response output.

+ * @public + */ + nextToken?: string | undefined; + /** + *

This is the number of items clients can request per page.

+ * @public + */ + maxResults?: number | undefined; + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + * @public + */ +export interface ListAccountsResponse { + /** + *

The page token client that is used to retrieve the list of accounts.

+ * @public + */ + nextToken?: string | undefined; + /** + *

A paginated response with the list of account information and the next token if more + * results are available.

+ * @public + */ + accountList?: AccountInfo[] | undefined; +} +/** + * @public + */ +export interface LogoutRequest { + /** + *

The token issued by the CreateToken API call. For more information, see + * CreateToken in the IAM Identity Center OIDC API Reference Guide.

+ * @public + */ + accessToken: string | undefined; +} +/** + * @internal + */ +export declare const GetRoleCredentialsRequestFilterSensitiveLog: (obj: GetRoleCredentialsRequest) => any; +/** + * @internal + */ +export declare const RoleCredentialsFilterSensitiveLog: (obj: RoleCredentials) => any; +/** + * @internal + */ +export declare const GetRoleCredentialsResponseFilterSensitiveLog: (obj: GetRoleCredentialsResponse) => any; +/** + * @internal + */ +export declare const ListAccountRolesRequestFilterSensitiveLog: (obj: ListAccountRolesRequest) => any; +/** + * @internal + */ +export declare const ListAccountsRequestFilterSensitiveLog: (obj: ListAccountsRequest) => any; +/** + * @internal + */ +export declare const LogoutRequestFilterSensitiveLog: (obj: LogoutRequest) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..81addca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { SSOClient } from "../SSOClient"; +/** + * @public + */ +export interface SSOPaginationConfiguration extends PaginationConfiguration { + client: SSOClient; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts new file mode 100644 index 0000000..fa309d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountRolesPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "../commands/ListAccountRolesCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListAccountRoles: (config: SSOPaginationConfiguration, input: ListAccountRolesCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts new file mode 100644 index 0000000..21c2559 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/ListAccountsPaginator.d.ts @@ -0,0 +1,7 @@ +import { Paginator } from "@smithy/types"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "../commands/ListAccountsCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export declare const paginateListAccounts: (config: SSOPaginationConfiguration, input: ListAccountsCommandInput, ...rest: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..02d97aa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/protocols/Aws_restJson1.d.ts @@ -0,0 +1,38 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { GetRoleCredentialsCommandInput, GetRoleCredentialsCommandOutput } from "../commands/GetRoleCredentialsCommand"; +import { ListAccountRolesCommandInput, ListAccountRolesCommandOutput } from "../commands/ListAccountRolesCommand"; +import { ListAccountsCommandInput, ListAccountsCommandOutput } from "../commands/ListAccountsCommand"; +import { LogoutCommandInput, LogoutCommandOutput } from "../commands/LogoutCommand"; +/** + * serializeAws_restJson1GetRoleCredentialsCommand + */ +export declare const se_GetRoleCredentialsCommand: (input: GetRoleCredentialsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1ListAccountRolesCommand + */ +export declare const se_ListAccountRolesCommand: (input: ListAccountRolesCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1ListAccountsCommand + */ +export declare const se_ListAccountsCommand: (input: ListAccountsCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_restJson1LogoutCommand + */ +export declare const se_LogoutCommand: (input: LogoutCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restJson1GetRoleCredentialsCommand + */ +export declare const de_GetRoleCredentialsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1ListAccountRolesCommand + */ +export declare const de_ListAccountRolesCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1ListAccountsCommand + */ +export declare const de_ListAccountsCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_restJson1LogoutCommand + */ +export declare const de_LogoutCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..c593515 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.browser.d.ts @@ -0,0 +1,57 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts new file mode 100644 index 0000000..4194fd5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts new file mode 100644 index 0000000..38c1d33 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.native.d.ts @@ -0,0 +1,56 @@ +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..20ab682 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOClientConfig } from "./SSOClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts new file mode 100644 index 0000000..a0f078c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { SSOExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: SSOExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts new file mode 100644 index 0000000..9a242fc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSO.d.ts @@ -0,0 +1,73 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "./commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "./commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "./commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "./commands/LogoutCommand"; +import { SSOClient } from "./SSOClient"; +export interface SSO { + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void + ): void; + getRoleCredentials( + args: GetRoleCredentialsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetRoleCredentialsCommandOutput) => void + ): void; + listAccountRoles( + args: ListAccountRolesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listAccountRoles( + args: ListAccountRolesCommandInput, + cb: (err: any, data?: ListAccountRolesCommandOutput) => void + ): void; + listAccountRoles( + args: ListAccountRolesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAccountRolesCommandOutput) => void + ): void; + listAccounts( + args: ListAccountsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + listAccounts( + args: ListAccountsCommandInput, + cb: (err: any, data?: ListAccountsCommandOutput) => void + ): void; + listAccounts( + args: ListAccountsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAccountsCommandOutput) => void + ): void; + logout( + args: LogoutCommandInput, + options?: __HttpHandlerOptions + ): Promise; + logout( + args: LogoutCommandInput, + cb: (err: any, data?: LogoutCommandOutput) => void + ): void; + logout( + args: LogoutCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: LogoutCommandOutput) => void + ): void; +} +export declare class SSO extends SSOClient implements SSO {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts new file mode 100644 index 0000000..efd5a5f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/SSOClient.d.ts @@ -0,0 +1,138 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "./commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "./commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "./commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "./commands/LogoutCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | GetRoleCredentialsCommandInput + | ListAccountRolesCommandInput + | ListAccountsCommandInput + | LogoutCommandInput; +export type ServiceOutputTypes = + | GetRoleCredentialsCommandOutput + | ListAccountRolesCommandOutput + | ListAccountsCommandOutput + | LogoutCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type SSOClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface SSOClientConfig extends SSOClientConfigType {} +export type SSOClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface SSOClientResolvedConfig extends SSOClientResolvedConfigType {} +export declare class SSOClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig +> { + readonly config: SSOClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..29f38b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { SSOHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): SSOHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..864f755 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,46 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { SSOClientResolvedConfig } from "../SSOClient"; +export interface SSOHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface SSOHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + SSOClientResolvedConfig, + HandlerExecutionContext, + SSOHttpAuthSchemeParameters, + object + > {} +export declare const defaultSSOHttpAuthSchemeParametersProvider: ( + config: SSOClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface SSOHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSSOHttpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: SSOHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: SSOHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts new file mode 100644 index 0000000..7c1b358 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/GetRoleCredentialsCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + GetRoleCredentialsRequest, + GetRoleCredentialsResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface GetRoleCredentialsCommandInput + extends GetRoleCredentialsRequest {} +export interface GetRoleCredentialsCommandOutput + extends GetRoleCredentialsResponse, + __MetadataBearer {} +declare const GetRoleCredentialsCommand_base: { + new ( + input: GetRoleCredentialsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: GetRoleCredentialsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class GetRoleCredentialsCommand extends GetRoleCredentialsCommand_base { + protected static __types: { + api: { + input: GetRoleCredentialsRequest; + output: GetRoleCredentialsResponse; + }; + sdk: { + input: GetRoleCredentialsCommandInput; + output: GetRoleCredentialsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts new file mode 100644 index 0000000..3b898a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountRolesCommand.d.ts @@ -0,0 +1,50 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + ListAccountRolesRequest, + ListAccountRolesResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface ListAccountRolesCommandInput extends ListAccountRolesRequest {} +export interface ListAccountRolesCommandOutput + extends ListAccountRolesResponse, + __MetadataBearer {} +declare const ListAccountRolesCommand_base: { + new ( + input: ListAccountRolesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListAccountRolesCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListAccountRolesCommand extends ListAccountRolesCommand_base { + protected static __types: { + api: { + input: ListAccountRolesRequest; + output: ListAccountRolesResponse; + }; + sdk: { + input: ListAccountRolesCommandInput; + output: ListAccountRolesCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts new file mode 100644 index 0000000..3a00cc1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/ListAccountsCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { ListAccountsRequest, ListAccountsResponse } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface ListAccountsCommandInput extends ListAccountsRequest {} +export interface ListAccountsCommandOutput + extends ListAccountsResponse, + __MetadataBearer {} +declare const ListAccountsCommand_base: { + new ( + input: ListAccountsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountsCommandInput, + ListAccountsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: ListAccountsCommandInput + ): import("@smithy/smithy-client").CommandImpl< + ListAccountsCommandInput, + ListAccountsCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class ListAccountsCommand extends ListAccountsCommand_base { + protected static __types: { + api: { + input: ListAccountsRequest; + output: ListAccountsResponse; + }; + sdk: { + input: ListAccountsCommandInput; + output: ListAccountsCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts new file mode 100644 index 0000000..2599250 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/LogoutCommand.d.ts @@ -0,0 +1,41 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { LogoutRequest } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + SSOClientResolvedConfig, +} from "../SSOClient"; +export { __MetadataBearer }; +export { $Command }; +export interface LogoutCommandInput extends LogoutRequest {} +export interface LogoutCommandOutput extends __MetadataBearer {} +declare const LogoutCommand_base: { + new (input: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl< + LogoutCommandInput, + LogoutCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new (__0_0: LogoutCommandInput): import("@smithy/smithy-client").CommandImpl< + LogoutCommandInput, + LogoutCommandOutput, + SSOClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class LogoutCommand extends LogoutCommand_base { + protected static __types: { + api: { + input: LogoutRequest; + output: {}; + }; + sdk: { + input: LogoutCommandInput; + output: LogoutCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..0ab890d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,4 @@ +export * from "./GetRoleCredentialsCommand"; +export * from "./ListAccountRolesCommand"; +export * from "./ListAccountsCommand"; +export * from "./LogoutCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..7f24540 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/EndpointParameters.d.ts @@ -0,0 +1,51 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts new file mode 100644 index 0000000..c1b43ff --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface SSOExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..891aed3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +export * from "./SSOClient"; +export * from "./SSO"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { SSOExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./pagination"; +export * from "./models"; +export { SSOServiceException } from "./models/SSOServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts new file mode 100644 index 0000000..1ad045d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/SSOServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class SSOServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts new file mode 100644 index 0000000..4bbe08c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/models/models_0.d.ts @@ -0,0 +1,93 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOServiceException as __BaseException } from "./SSOServiceException"; +export interface AccountInfo { + accountId?: string | undefined; + accountName?: string | undefined; + emailAddress?: string | undefined; +} +export interface GetRoleCredentialsRequest { + roleName: string | undefined; + accountId: string | undefined; + accessToken: string | undefined; +} +export interface RoleCredentials { + accessKeyId?: string | undefined; + secretAccessKey?: string | undefined; + sessionToken?: string | undefined; + expiration?: number | undefined; +} +export interface GetRoleCredentialsResponse { + roleCredentials?: RoleCredentials | undefined; +} +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class ResourceNotFoundException extends __BaseException { + readonly name: "ResourceNotFoundException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class TooManyRequestsException extends __BaseException { + readonly name: "TooManyRequestsException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class UnauthorizedException extends __BaseException { + readonly name: "UnauthorizedException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface ListAccountRolesRequest { + nextToken?: string | undefined; + maxResults?: number | undefined; + accessToken: string | undefined; + accountId: string | undefined; +} +export interface RoleInfo { + roleName?: string | undefined; + accountId?: string | undefined; +} +export interface ListAccountRolesResponse { + nextToken?: string | undefined; + roleList?: RoleInfo[] | undefined; +} +export interface ListAccountsRequest { + nextToken?: string | undefined; + maxResults?: number | undefined; + accessToken: string | undefined; +} +export interface ListAccountsResponse { + nextToken?: string | undefined; + accountList?: AccountInfo[] | undefined; +} +export interface LogoutRequest { + accessToken: string | undefined; +} +export declare const GetRoleCredentialsRequestFilterSensitiveLog: ( + obj: GetRoleCredentialsRequest +) => any; +export declare const RoleCredentialsFilterSensitiveLog: ( + obj: RoleCredentials +) => any; +export declare const GetRoleCredentialsResponseFilterSensitiveLog: ( + obj: GetRoleCredentialsResponse +) => any; +export declare const ListAccountRolesRequestFilterSensitiveLog: ( + obj: ListAccountRolesRequest +) => any; +export declare const ListAccountsRequestFilterSensitiveLog: ( + obj: ListAccountsRequest +) => any; +export declare const LogoutRequestFilterSensitiveLog: ( + obj: LogoutRequest +) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..2970898 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,5 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { SSOClient } from "../SSOClient"; +export interface SSOPaginationConfiguration extends PaginationConfiguration { + client: SSOClient; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts new file mode 100644 index 0000000..174f32b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountRolesPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "../commands/ListAccountRolesCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +export declare const paginateListAccountRoles: ( + config: SSOPaginationConfiguration, + input: ListAccountRolesCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts new file mode 100644 index 0000000..bb5e66d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/ListAccountsPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "../commands/ListAccountsCommand"; +import { SSOPaginationConfiguration } from "./Interfaces"; +export declare const paginateListAccounts: ( + config: SSOPaginationConfiguration, + input: ListAccountsCommandInput, + ...rest: any[] +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..1e7866f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./ListAccountRolesPaginator"; +export * from "./ListAccountsPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..74eebdc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/protocols/Aws_restJson1.d.ts @@ -0,0 +1,53 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + GetRoleCredentialsCommandInput, + GetRoleCredentialsCommandOutput, +} from "../commands/GetRoleCredentialsCommand"; +import { + ListAccountRolesCommandInput, + ListAccountRolesCommandOutput, +} from "../commands/ListAccountRolesCommand"; +import { + ListAccountsCommandInput, + ListAccountsCommandOutput, +} from "../commands/ListAccountsCommand"; +import { + LogoutCommandInput, + LogoutCommandOutput, +} from "../commands/LogoutCommand"; +export declare const se_GetRoleCredentialsCommand: ( + input: GetRoleCredentialsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListAccountRolesCommand: ( + input: ListAccountRolesCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_ListAccountsCommand: ( + input: ListAccountsCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_LogoutCommand: ( + input: LogoutCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_GetRoleCredentialsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListAccountRolesCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_ListAccountsCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_LogoutCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..4042bcf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.browser.d.ts @@ -0,0 +1,120 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts new file mode 100644 index 0000000..7152445 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.d.ts @@ -0,0 +1,114 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts new file mode 100644 index 0000000..3dc6c95 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.native.d.ts @@ -0,0 +1,124 @@ +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..00b2942 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeConfig.shared.d.ts @@ -0,0 +1,49 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOClientConfig } from "./SSOClient"; +export declare const getRuntimeConfig: (config: SSOClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts new file mode 100644 index 0000000..fbec1e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/dist-types/ts3.4/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { SSOExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: SSOExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/package.json new file mode 100644 index 0000000..971fcd1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/client-sso/package.json @@ -0,0 +1,98 @@ +{ + "name": "@aws-sdk/client-sso", + "description": "AWS SDK for JavaScript Sso Client for Node.js, Browser and React Native", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline client-sso", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "generate:client": "node ../../scripts/generate-clients/single-service --solo sso" + }, + "main": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/node18": "18.2.4", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "browser": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser" + }, + "react-native": { + "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.native" + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sso", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-sso" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/README.md new file mode 100644 index 0000000..6056468 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/README.md @@ -0,0 +1,39 @@ +# `@aws-sdk/core` + +This package provides common or core functionality to the AWS SDK for JavaScript (v3). + +You do not need to explicitly install this package, since it will be transitively installed by AWS SDK clients. + +## `@aws-sdk/core` submodules + +Core submodules are organized for distribution via the `package.json` `exports` field. + +`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be +enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support). + +Think of `@aws-sdk/core` as a mono-package within the monorepo. +It preserves the benefits of modularization, for example to optimize Node.js initialization speed, +while making it easier to have a consistent version of core dependencies, reducing package sprawl when +installing an SDK client. + +### Guide for submodules + +- Each `index.ts` file corresponding to the pattern `./src/submodules//index.ts` will be + published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script. +- create a folder as `./src/submodules/` including an `index.ts` file and a `README.md` file. + - The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible. +- a submodule is equivalent to a standalone `@aws-sdk/` package in that importing it in Node.js will resolve a separate bundle. +- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import. + - The linter will check for this and throw an error. +- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@aws-sdk/core`. + The linter runs during `yarn build` and also as `yarn lint`. + +### When should I create an `@aws-sdk/core/submodule` vs. `@aws-sdk/new-package`? + +Keep in mind that the core package is installed by all AWS SDK clients. + +If the component functionality is upstream of multiple clients, it is +a good candidate for a core submodule. For example, XML serialization. + +If the component's functionality is downstream of a client, for example S3 pre-signing, +it should be a standalone package with potentially a peer or runtime dependency on an AWS SDK client. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/account-id-endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/account-id-endpoint.d.ts new file mode 100644 index 0000000..60f14d1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/account-id-endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/account-id-endpoint" { + export * from "@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/account-id-endpoint.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/account-id-endpoint.js new file mode 100644 index 0000000..b2550f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/account-id-endpoint.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/account-id-endpoint/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/client.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/client.d.ts new file mode 100644 index 0000000..ce995ae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/client.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/client" { + export * from "@aws-sdk/core/dist-types/submodules/client/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/client.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/client.js new file mode 100644 index 0000000..e3a644b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/client.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/client/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/index.js new file mode 100644 index 0000000..cddde6a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/index.js @@ -0,0 +1,6 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const tslib_1 = require("tslib"); +tslib_1.__exportStar(require("./submodules/client/index"), exports); +tslib_1.__exportStar(require("./submodules/httpAuthSchemes/index"), exports); +tslib_1.__exportStar(require("./submodules/protocols/index"), exports); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js new file mode 100644 index 0000000..c277b79 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/account-id-endpoint/index.js @@ -0,0 +1,95 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/account-id-endpoint/index.ts +var index_exports = {}; +__export(index_exports, { + ACCOUNT_ID_ENDPOINT_MODE_VALUES: () => ACCOUNT_ID_ENDPOINT_MODE_VALUES, + CONFIG_ACCOUNT_ID_ENDPOINT_MODE: () => CONFIG_ACCOUNT_ID_ENDPOINT_MODE, + DEFAULT_ACCOUNT_ID_ENDPOINT_MODE: () => DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, + ENV_ACCOUNT_ID_ENDPOINT_MODE: () => ENV_ACCOUNT_ID_ENDPOINT_MODE, + NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: () => NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, + resolveAccountIdEndpointModeConfig: () => resolveAccountIdEndpointModeConfig, + validateAccountIdEndpointMode: () => validateAccountIdEndpointMode +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.ts +var import_util_middleware = require("@smithy/util-middleware"); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConstants.ts +var DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +var ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"]; +function validateAccountIdEndpointMode(value) { + return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value); +} +__name(validateAccountIdEndpointMode, "validateAccountIdEndpointMode"); + +// src/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.ts +var resolveAccountIdEndpointModeConfig = /* @__PURE__ */ __name((input) => { + const { accountIdEndpointMode } = input; + const accountIdEndpointModeProvider = (0, import_util_middleware.normalizeProvider)(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE); + return Object.assign(input, { + accountIdEndpointMode: /* @__PURE__ */ __name(async () => { + const accIdMode = await accountIdEndpointModeProvider(); + if (!validateAccountIdEndpointMode(accIdMode)) { + throw new Error( + `Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".` + ); + } + return accIdMode; + }, "accountIdEndpointMode") + }); +}, "resolveAccountIdEndpointModeConfig"); + +// src/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.ts +var err = "Invalid AccountIdEndpointMode value"; +var _throw = /* @__PURE__ */ __name((message) => { + throw new Error(message); +}, "_throw"); +var ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +var CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +var NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => { + const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, "configFileSelector"), + default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + ACCOUNT_ID_ENDPOINT_MODE_VALUES, + CONFIG_ACCOUNT_ID_ENDPOINT_MODE, + DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, + ENV_ACCOUNT_ID_ENDPOINT_MODE, + NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS, + resolveAccountIdEndpointModeConfig, + validateAccountIdEndpointMode +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js new file mode 100644 index 0000000..ebd6c61 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/client/index.js @@ -0,0 +1,78 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/client/index.ts +var index_exports = {}; +__export(index_exports, { + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + setCredentialFeature: () => setCredentialFeature, + setFeature: () => setFeature, + state: () => state +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/client/emitWarningIfUnsupportedVersion.ts +var state = { + warningEmitted: false +}; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning( + `NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI` + ); + } +}, "emitWarningIfUnsupportedVersion"); + +// src/submodules/client/setCredentialFeature.ts +function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} +__name(setCredentialFeature, "setCredentialFeature"); + +// src/submodules/client/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {} + }; + } else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} +__name(setFeature, "setFeature"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + emitWarningIfUnsupportedVersion, + setCredentialFeature, + setFeature, + state +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js new file mode 100644 index 0000000..82db91e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/httpAuthSchemes/index.js @@ -0,0 +1,382 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/httpAuthSchemes/index.ts +var index_exports = {}; +__export(index_exports, { + AWSSDKSigV4Signer: () => AWSSDKSigV4Signer, + AwsSdkSigV4ASigner: () => AwsSdkSigV4ASigner, + AwsSdkSigV4Signer: () => AwsSdkSigV4Signer, + NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: () => NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, + NODE_SIGV4A_CONFIG_OPTIONS: () => NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config: () => resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig: () => resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config: () => resolveAwsSdkSigV4Config, + validateSigningProperties: () => validateSigningProperties +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var import_protocol_http2 = require("@smithy/protocol-http"); + +// src/submodules/httpAuthSchemes/utils/getDateHeader.ts +var import_protocol_http = require("@smithy/protocol-http"); +var getDateHeader = /* @__PURE__ */ __name((response) => import_protocol_http.HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : void 0, "getDateHeader"); + +// src/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.ts +var getSkewCorrectedDate = /* @__PURE__ */ __name((systemClockOffset) => new Date(Date.now() + systemClockOffset), "getSkewCorrectedDate"); + +// src/submodules/httpAuthSchemes/utils/isClockSkewed.ts +var isClockSkewed = /* @__PURE__ */ __name((clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 3e5, "isClockSkewed"); + +// src/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.ts +var getUpdatedSystemClockOffset = /* @__PURE__ */ __name((clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}, "getUpdatedSystemClockOffset"); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var throwSigningPropertyError = /* @__PURE__ */ __name((name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}, "throwSigningPropertyError"); +var validateSigningProperties = /* @__PURE__ */ __name(async (signingProperties) => { + const context = throwSigningPropertyError( + "context", + signingProperties.context + ); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = context.endpointV2?.properties?.authSchemes?.[0]; + const signerFunction = throwSigningPropertyError( + "signer", + config.signer + ); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties?.signingRegion; + const signingRegionSet = signingProperties?.signingRegionSet; + const signingName = signingProperties?.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName + }; +}, "validateSigningProperties"); +var AwsSdkSigV4Signer = class { + static { + __name(this, "AwsSdkSigV4Signer"); + } + async sign(httpRequest, identity, signingProperties) { + if (!import_protocol_http2.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if (first?.name === "sigv4a" && second?.name === "sigv4") { + signingRegion = second?.signingRegion ?? signingRegion; + signingName = second?.signingName ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion, + signingService: signingName + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +}; +var AWSSDKSigV4Signer = AwsSdkSigV4Signer; + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.ts +var import_protocol_http3 = require("@smithy/protocol-http"); +var AwsSdkSigV4ASigner = class extends AwsSdkSigV4Signer { + static { + __name(this, "AwsSdkSigV4ASigner"); + } + async sign(httpRequest, identity, signingProperties) { + if (!import_protocol_http3.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties( + signingProperties + ); + const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.(); + const multiRegionOverride = (configResolvedSigningRegionSet ?? signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName + }); + return signedRequest; + } +}; + +// src/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.ts +var getArrayForCommaSeparatedString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : [], "getArrayForCommaSeparatedString"); + +// src/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.ts +var NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE"; +var NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference"; +var NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = { + /** + * Retrieves auth scheme preference from environment variables + * @param env - Node process environment object + * @returns Array of auth scheme strings if preference is set, undefined otherwise + */ + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env)) return void 0; + return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]); + }, "environmentVariableSelector"), + /** + * Retrieves auth scheme preference from config file + * @param profile - Config profile object + * @returns Array of auth scheme strings if preference is set, undefined otherwise + */ + configFileSelector: /* @__PURE__ */ __name((profile) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile)) return void 0; + return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]); + }, "configFileSelector"), + /** + * Default auth scheme preference if not specified in environment or config + */ + default: [] +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.ts +var import_core = require("@smithy/core"); +var import_property_provider = require("@smithy/property-provider"); +var resolveAwsSdkSigV4AConfig = /* @__PURE__ */ __name((config) => { + config.sigv4aSigningRegionSet = (0, import_core.normalizeProvider)(config.sigv4aSigningRegionSet); + return config; +}, "resolveAwsSdkSigV4AConfig"); +var NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true + }); + }, + default: void 0 +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.ts +var import_client = require("@aws-sdk/core/client"); +var import_core2 = require("@smithy/core"); +var import_signature_v4 = require("@smithy/signature-v4"); +var resolveAwsSdkSigV4Config = /* @__PURE__ */ __name((config) => { + let inputCredentials = config.credentials; + let isUserSupplied = !!config.credentials; + let resolvedCredentials = void 0; + Object.defineProperty(config, "credentials", { + set(credentials) { + if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) { + isUserSupplied = true; + } + inputCredentials = credentials; + const memoizedProvider = normalizeCredentialProvider(config, { + credentials: inputCredentials, + credentialDefaultProvider: config.credentialDefaultProvider + }); + const boundProvider = bindCallerConfig(config, memoizedProvider); + if (isUserSupplied && !boundProvider.attributed) { + resolvedCredentials = /* @__PURE__ */ __name(async (options) => boundProvider(options).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_CODE", "e") + ), "resolvedCredentials"); + resolvedCredentials.memoized = boundProvider.memoized; + resolvedCredentials.configBound = boundProvider.configBound; + resolvedCredentials.attributed = true; + } else { + resolvedCredentials = boundProvider; + } + }, + get() { + return resolvedCredentials; + }, + enumerable: true, + configurable: true + }); + config.credentials = inputCredentials; + const { + // Default for signingEscapePath + signingEscapePath = true, + // Default for systemClockOffset + systemClockOffset = config.systemClockOffset || 0, + // No default for sha256 since it is platform dependent + sha256 + } = config; + let signer; + if (config.signer) { + signer = (0, import_core2.normalizeProvider)(config.signer); + } else if (config.regionInfoProvider) { + signer = /* @__PURE__ */ __name(() => (0, import_core2.normalizeProvider)(config.region)().then( + async (region) => [ + await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint() + }) || {}, + region + ] + ).then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }), "signer"); + } else { + signer = /* @__PURE__ */ __name(async (authScheme) => { + authScheme = Object.assign( + {}, + { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await (0, import_core2.normalizeProvider)(config.region)(), + properties: {} + }, + authScheme + ); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }, "signer"); + } + const resolvedConfig = Object.assign(config, { + systemClockOffset, + signingEscapePath, + signer + }); + return resolvedConfig; +}, "resolveAwsSdkSigV4Config"); +var resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +function normalizeCredentialProvider(config, { + credentials, + credentialDefaultProvider +}) { + let credentialsProvider; + if (credentials) { + if (!credentials?.memoized) { + credentialsProvider = (0, import_core2.memoizeIdentityProvider)(credentials, import_core2.isIdentityExpired, import_core2.doesIdentityRequireRefresh); + } else { + credentialsProvider = credentials; + } + } else { + if (credentialDefaultProvider) { + credentialsProvider = (0, import_core2.normalizeProvider)( + credentialDefaultProvider( + Object.assign({}, config, { + parentClientConfig: config + }) + ) + ); + } else { + credentialsProvider = /* @__PURE__ */ __name(async () => { + throw new Error( + "@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured." + ); + }, "credentialsProvider"); + } + } + credentialsProvider.memoized = true; + return credentialsProvider; +} +__name(normalizeCredentialProvider, "normalizeCredentialProvider"); +function bindCallerConfig(config, credentialsProvider) { + if (credentialsProvider.configBound) { + return credentialsProvider; + } + const fn = /* @__PURE__ */ __name(async (options) => credentialsProvider({ ...options, callerClientConfig: config }), "fn"); + fn.memoized = credentialsProvider.memoized; + fn.configBound = true; + return fn; +} +__name(bindCallerConfig, "bindCallerConfig"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + AWSSDKSigV4Signer, + AwsSdkSigV4ASigner, + AwsSdkSigV4Signer, + NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, + NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config, + validateSigningProperties +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js new file mode 100644 index 0000000..d84c65b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-cjs/submodules/protocols/index.js @@ -0,0 +1,227 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var index_exports = {}; +__export(index_exports, { + _toBool: () => _toBool, + _toNum: () => _toNum, + _toStr: () => _toStr, + awsExpectUnion: () => awsExpectUnion, + loadRestJsonErrorCode: () => loadRestJsonErrorCode, + loadRestXmlErrorCode: () => loadRestXmlErrorCode, + parseJsonBody: () => parseJsonBody, + parseJsonErrorBody: () => parseJsonErrorBody, + parseXmlBody: () => parseXmlBody, + parseXmlErrorBody: () => parseXmlErrorBody +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/protocols/coercing-serializers.ts +var _toStr = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}, "_toStr"); +var _toBool = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}, "_toBool"); +var _toNum = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}, "_toNum"); + +// src/submodules/protocols/json/awsExpectUnion.ts +var import_smithy_client = require("@smithy/smithy-client"); +var awsExpectUnion = /* @__PURE__ */ __name((value) => { + if (value == null) { + return void 0; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return (0, import_smithy_client.expectUnion)(value); +}, "awsExpectUnion"); + +// src/submodules/protocols/common.ts +var import_smithy_client2 = require("@smithy/smithy-client"); +var collectBodyString = /* @__PURE__ */ __name((streamBody, context) => (0, import_smithy_client2.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)), "collectBodyString"); + +// src/submodules/protocols/json/parseJsonBody.ts +var parseJsonBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } catch (e) { + if (e?.name === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + } + return {}; +}), "parseJsonBody"); +var parseJsonErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}, "parseJsonErrorBody"); +var loadRestJsonErrorCode = /* @__PURE__ */ __name((output, data) => { + const findKey = /* @__PURE__ */ __name((object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()), "findKey"); + const sanitizeErrorCode = /* @__PURE__ */ __name((rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }, "sanitizeErrorCode"); + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== void 0) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== void 0) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== void 0) { + return sanitizeErrorCode(data["__type"]); + } +}, "loadRestJsonErrorCode"); + +// src/submodules/protocols/xml/parseXmlBody.ts +var import_smithy_client3 = require("@smithy/smithy-client"); +var import_fast_xml_parser = require("fast-xml-parser"); +var parseXmlBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new import_fast_xml_parser.XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: /* @__PURE__ */ __name((_, val) => val.trim() === "" && val.includes("\n") ? "" : void 0, "tagValueProcessor") + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return (0, import_smithy_client3.getValueFromTextNode)(parsedObjToReturn); + } + return {}; +}), "parseXmlBody"); +var parseXmlErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}, "parseXmlErrorBody"); +var loadRestXmlErrorCode = /* @__PURE__ */ __name((output, data) => { + if (data?.Error?.Code !== void 0) { + return data.Error.Code; + } + if (data?.Code !== void 0) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadRestXmlErrorCode"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + _toBool, + _toNum, + _toStr, + awsExpectUnion, + loadRestJsonErrorCode, + loadRestXmlErrorCode, + parseJsonBody, + parseJsonErrorBody, + parseXmlBody, + parseXmlErrorBody +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/index.js new file mode 100644 index 0000000..239de7a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js new file mode 100644 index 0000000..cc0c55a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.js @@ -0,0 +1,15 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants"; +export const resolveAccountIdEndpointModeConfig = (input) => { + const { accountIdEndpointMode } = input; + const accountIdEndpointModeProvider = normalizeProvider(accountIdEndpointMode ?? DEFAULT_ACCOUNT_ID_ENDPOINT_MODE); + return Object.assign(input, { + accountIdEndpointMode: async () => { + const accIdMode = await accountIdEndpointModeProvider(); + if (!validateAccountIdEndpointMode(accIdMode)) { + throw new Error(`Invalid value for accountIdEndpointMode: ${accIdMode}. Valid values are: "required", "preferred", "disabled".`); + } + return accIdMode; + }, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js new file mode 100644 index 0000000..e7a2ca0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/AccountIdEndpointModeConstants.js @@ -0,0 +1,5 @@ +export const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +export const ACCOUNT_ID_ENDPOINT_MODE_VALUES = ["disabled", "preferred", "required"]; +export function validateAccountIdEndpointMode(value) { + return ACCOUNT_ID_ENDPOINT_MODE_VALUES.includes(value); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js new file mode 100644 index 0000000..54832d5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.js @@ -0,0 +1,24 @@ +import { DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, validateAccountIdEndpointMode, } from "./AccountIdEndpointModeConstants"; +const err = "Invalid AccountIdEndpointMode value"; +const _throw = (message) => { + throw new Error(message); +}; +export const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +export const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +export const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_ACCOUNT_ID_ENDPOINT_MODE]; + if (value && !validateAccountIdEndpointMode(value)) { + _throw(err); + } + return value; + }, + default: DEFAULT_ACCOUNT_ID_ENDPOINT_MODE, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/account-id-endpoint/index.js @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..d1dab1d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/emitWarningIfUnsupportedVersion.js @@ -0,0 +1,15 @@ +export const state = { + warningEmitted: false, +}; +export const emitWarningIfUnsupportedVersion = (version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning(`NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI`); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/index.js @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js new file mode 100644 index 0000000..a489c40 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/setCredentialFeature.js @@ -0,0 +1,7 @@ +export function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js new file mode 100644 index 0000000..2d8804b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/client/setFeature.js @@ -0,0 +1,11 @@ +export function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {}, + }; + } + else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js new file mode 100644 index 0000000..548fefb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.js @@ -0,0 +1,20 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getSkewCorrectedDate } from "../utils"; +import { AwsSdkSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + if (!HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties(signingProperties); + const configResolvedSigningRegionSet = await config.sigv4aSigningRegionSet?.(); + const multiRegionOverride = (configResolvedSigningRegionSet ?? + signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName, + }); + return signedRequest; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js new file mode 100644 index 0000000..ee236cd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.js @@ -0,0 +1,72 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getDateHeader, getSkewCorrectedDate, getUpdatedSystemClockOffset } from "../utils"; +const throwSigningPropertyError = (name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}; +export const validateSigningProperties = async (signingProperties) => { + const context = throwSigningPropertyError("context", signingProperties.context); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = context.endpointV2?.properties?.authSchemes?.[0]; + const signerFunction = throwSigningPropertyError("signer", config.signer); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties?.signingRegion; + const signingRegionSet = signingProperties?.signingRegionSet; + const signingName = signingProperties?.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName, + }; +}; +export class AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + if (!HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (handlerExecutionContext?.authSchemes?.length ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if (first?.name === "sigv4a" && second?.name === "sigv4") { + signingRegion = second?.signingRegion ?? signingRegion; + signingName = second?.signingName ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: signingRegion, + signingService: signingName, + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +} +export const AWSSDKSigV4Signer = AwsSdkSigV4Signer; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js new file mode 100644 index 0000000..17e3d2e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.js @@ -0,0 +1,16 @@ +import { getArrayForCommaSeparatedString } from "../utils/getArrayForCommaSeparatedString"; +const NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY = "AWS_AUTH_SCHEME_PREFERENCE"; +const NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY = "auth_scheme_preference"; +export const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS = { + environmentVariableSelector: (env) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY in env)) + return undefined; + return getArrayForCommaSeparatedString(env[NODE_AUTH_SCHEME_PREFERENCE_ENV_KEY]); + }, + configFileSelector: (profile) => { + if (!(NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY in profile)) + return undefined; + return getArrayForCommaSeparatedString(profile[NODE_AUTH_SCHEME_PREFERENCE_CONFIG_KEY]); + }, + default: [], +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js new file mode 100644 index 0000000..4071225 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/index.js @@ -0,0 +1,5 @@ +export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js new file mode 100644 index 0000000..0e62ef0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.js @@ -0,0 +1,25 @@ +import { normalizeProvider } from "@smithy/core"; +import { ProviderError } from "@smithy/property-provider"; +export const resolveAwsSdkSigV4AConfig = (config) => { + config.sigv4aSigningRegionSet = normalizeProvider(config.sigv4aSigningRegionSet); + return config; +}; +export const NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true, + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true, + }); + }, + default: undefined, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js new file mode 100644 index 0000000..6da968d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.js @@ -0,0 +1,131 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { doesIdentityRequireRefresh, isIdentityExpired, memoizeIdentityProvider, normalizeProvider, } from "@smithy/core"; +import { SignatureV4 } from "@smithy/signature-v4"; +export const resolveAwsSdkSigV4Config = (config) => { + let inputCredentials = config.credentials; + let isUserSupplied = !!config.credentials; + let resolvedCredentials = undefined; + Object.defineProperty(config, "credentials", { + set(credentials) { + if (credentials && credentials !== inputCredentials && credentials !== resolvedCredentials) { + isUserSupplied = true; + } + inputCredentials = credentials; + const memoizedProvider = normalizeCredentialProvider(config, { + credentials: inputCredentials, + credentialDefaultProvider: config.credentialDefaultProvider, + }); + const boundProvider = bindCallerConfig(config, memoizedProvider); + if (isUserSupplied && !boundProvider.attributed) { + resolvedCredentials = async (options) => boundProvider(options).then((creds) => setCredentialFeature(creds, "CREDENTIALS_CODE", "e")); + resolvedCredentials.memoized = boundProvider.memoized; + resolvedCredentials.configBound = boundProvider.configBound; + resolvedCredentials.attributed = true; + } + else { + resolvedCredentials = boundProvider; + } + }, + get() { + return resolvedCredentials; + }, + enumerable: true, + configurable: true, + }); + config.credentials = inputCredentials; + const { signingEscapePath = true, systemClockOffset = config.systemClockOffset || 0, sha256, } = config; + let signer; + if (config.signer) { + signer = normalizeProvider(config.signer); + } + else if (config.regionInfoProvider) { + signer = () => normalizeProvider(config.region)() + .then(async (region) => [ + (await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint(), + })) || {}, + region, + ]) + .then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = config.signerConstructor || SignatureV4; + return new SignerCtor(params); + }); + } + else { + signer = async (authScheme) => { + authScheme = Object.assign({}, { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await normalizeProvider(config.region)(), + properties: {}, + }, authScheme); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: config.credentials, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath, + }; + const SignerCtor = config.signerConstructor || SignatureV4; + return new SignerCtor(params); + }; + } + const resolvedConfig = Object.assign(config, { + systemClockOffset, + signingEscapePath, + signer, + }); + return resolvedConfig; +}; +export const resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +function normalizeCredentialProvider(config, { credentials, credentialDefaultProvider, }) { + let credentialsProvider; + if (credentials) { + if (!credentials?.memoized) { + credentialsProvider = memoizeIdentityProvider(credentials, isIdentityExpired, doesIdentityRequireRefresh); + } + else { + credentialsProvider = credentials; + } + } + else { + if (credentialDefaultProvider) { + credentialsProvider = normalizeProvider(credentialDefaultProvider(Object.assign({}, config, { + parentClientConfig: config, + }))); + } + else { + credentialsProvider = async () => { + throw new Error("@aws-sdk/core::resolveAwsSdkSigV4Config - `credentials` not provided and no credentialDefaultProvider was configured."); + }; + } + } + credentialsProvider.memoized = true; + return credentialsProvider; +} +function bindCallerConfig(config, credentialsProvider) { + if (credentialsProvider.configBound) { + return credentialsProvider; + } + const fn = async (options) => credentialsProvider({ ...options, callerClientConfig: config }); + fn.memoized = credentialsProvider.memoized; + fn.configBound = true; + return fn; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/index.js @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js new file mode 100644 index 0000000..aa60799 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.js @@ -0,0 +1 @@ +export const getArrayForCommaSeparatedString = (str) => typeof str === "string" && str.length > 0 ? str.split(",").map((item) => item.trim()) : []; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js new file mode 100644 index 0000000..449c182 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getDateHeader.js @@ -0,0 +1,2 @@ +import { HttpResponse } from "@smithy/protocol-http"; +export const getDateHeader = (response) => HttpResponse.isInstance(response) ? response.headers?.date ?? response.headers?.Date : undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js new file mode 100644 index 0000000..6ee8036 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.js @@ -0,0 +1 @@ +export const getSkewCorrectedDate = (systemClockOffset) => new Date(Date.now() + systemClockOffset); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js new file mode 100644 index 0000000..859c41a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.js @@ -0,0 +1,8 @@ +import { isClockSkewed } from "./isClockSkewed"; +export const getUpdatedSystemClockOffset = (clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/index.js @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js new file mode 100644 index 0000000..086d7a8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/httpAuthSchemes/utils/isClockSkewed.js @@ -0,0 +1,2 @@ +import { getSkewCorrectedDate } from "./getSkewCorrectedDate"; +export const isClockSkewed = (clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 300000; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js new file mode 100644 index 0000000..fce893b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/coercing-serializers.js @@ -0,0 +1,53 @@ +export const _toStr = (val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}; +export const _toBool = (val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}; +export const _toNum = (val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js new file mode 100644 index 0000000..4348b08 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/common.js @@ -0,0 +1,2 @@ +import { collectBody } from "@smithy/smithy-client"; +export const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/index.js @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js new file mode 100644 index 0000000..1c6cc32 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/awsExpectUnion.js @@ -0,0 +1,10 @@ +import { expectUnion } from "@smithy/smithy-client"; +export const awsExpectUnion = (value) => { + if (value == null) { + return undefined; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return expectUnion(value); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js new file mode 100644 index 0000000..d9c1564 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/json/parseJsonBody.js @@ -0,0 +1,51 @@ +import { collectBodyString } from "../common"; +export const parseJsonBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } + catch (e) { + if (e?.name === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded, + }); + } + throw e; + } + } + return {}; +}); +export const parseJsonErrorBody = async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +export const loadRestJsonErrorCode = (output, data) => { + const findKey = (object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js new file mode 100644 index 0000000..556a967 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-es/submodules/protocols/xml/parseXmlBody.js @@ -0,0 +1,57 @@ +import { getValueFromTextNode } from "@smithy/smithy-client"; +import { XMLParser } from "fast-xml-parser"; +import { collectBodyString } from "../common"; +export const parseXmlBody = (streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: (_, val) => (val.trim() === "" && val.includes("\n") ? "" : undefined), + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } + catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded, + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return getValueFromTextNode(parsedObjToReturn); + } + return {}; +}); +export const parseXmlErrorBody = async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}; +export const loadRestXmlErrorCode = (output, data) => { + if (data?.Error?.Code !== undefined) { + return data.Error.Code; + } + if (data?.Code !== undefined) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts new file mode 100644 index 0000000..e83f927 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/api-extractor-type-index.d.ts @@ -0,0 +1,5 @@ +export * from "./index"; +export * from "./submodules/account-id-endpoint/index"; +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/index.d.ts new file mode 100644 index 0000000..5d51cdb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/index.d.ts @@ -0,0 +1,22 @@ +/** + * Submodules annotated with "Legacy" are from prior to the submodule system. + * They are exported from the package's root index to preserve backwards compatibility. + * + * New development should go in a proper submodule and not be exported from the root index. + */ +/** + * Legacy submodule. + */ +export * from "./submodules/client/index"; +/** + * Legacy submodule. + */ +export * from "./submodules/httpAuthSchemes/index"; +/** + * Legacy submodule. + */ +export * from "./submodules/protocols/index"; +/** + * Warning: do not export any additional submodules from the root of this package. See readme.md for + * guide on developing submodules. + */ diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts new file mode 100644 index 0000000..bf612a2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts @@ -0,0 +1,27 @@ +import { Provider } from "@smithy/types"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +/** + * @public + */ +export interface AccountIdEndpointModeInputConfig { + /** + * The account ID endpoint mode to use. + */ + accountIdEndpointMode?: AccountIdEndpointMode | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface AccountIdEndpointModeResolvedConfig { + accountIdEndpointMode: Provider; +} +/** + * @internal + */ +export declare const resolveAccountIdEndpointModeConfig: (input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved) => T & AccountIdEndpointModeResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts new file mode 100644 index 0000000..640a747 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts @@ -0,0 +1,16 @@ +/** + * @public + */ +export type AccountIdEndpointMode = "disabled" | "preferred" | "required"; +/** + * @internal + */ +export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +/** + * @internal + */ +export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[]; +/** + * @internal + */ +export declare function validateAccountIdEndpointMode(value: any): value is AccountIdEndpointMode; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..96b8059 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts @@ -0,0 +1,14 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +/** + * @internal + */ +export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = "account_id_endpoint_mode"; +/** + * @internal + */ +export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/account-id-endpoint/index.d.ts @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..d97bc8c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,12 @@ +export declare const state: { + warningEmitted: boolean; +}; +/** + * @internal + * + * Emits warning if the provided Node.js version string is + * pending deprecation by AWS SDK JSv3. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/index.d.ts @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts new file mode 100644 index 0000000..b3b4a68 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/setCredentialFeature.d.ts @@ -0,0 +1,7 @@ +import type { AttributedAwsCredentialIdentity, AwsSdkCredentialsFeatures } from "@aws-sdk/types"; +/** + * @internal + * + * @returns the credentials with source feature attribution. + */ +export declare function setCredentialFeature(credentials: AttributedAwsCredentialIdentity, feature: F, value: AwsSdkCredentialsFeatures[F]): AttributedAwsCredentialIdentity; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts new file mode 100644 index 0000000..93458bf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/client/setFeature.d.ts @@ -0,0 +1,12 @@ +import type { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the SDK not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: AwsHandlerExecutionContext, feature: F, value: AwsSdkFeatures[F]): void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts new file mode 100644 index 0000000..051b17c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts @@ -0,0 +1,10 @@ +import { AwsCredentialIdentity, HttpRequest as IHttpRequest } from "@smithy/types"; +import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer"; +/** + * @internal + * Note: this is not a signing algorithm implementation. The sign method + * accepts the real signer as an input parameter. + */ +export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + sign(httpRequest: IHttpRequest, identity: AwsCredentialIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts new file mode 100644 index 0000000..7c1b550 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts @@ -0,0 +1,43 @@ +import { AuthScheme, AwsCredentialIdentity, HttpRequest as IHttpRequest, HttpResponse, HttpSigner, RequestSigner } from "@smithy/types"; +import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig"; +/** + * @internal + */ +interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig { + systemClockOffset: number; + signer: (authScheme?: AuthScheme) => Promise; +} +/** + * @internal + */ +interface AwsSdkSigV4AuthSigningProperties { + config: AwsSdkSigV4Config; + signer: RequestSigner; + signingRegion?: string; + signingRegionSet?: string[]; + signingName?: string; +} +/** + * @internal + */ +export declare const validateSigningProperties: (signingProperties: Record) => Promise; +/** + * Note: this is not a signing algorithm implementation. The sign method + * accepts the real signer as an input parameter. + * @internal + */ +export declare class AwsSdkSigV4Signer implements HttpSigner { + sign(httpRequest: IHttpRequest, + /** + * `identity` is bound in {@link resolveAWSSDKSigV4Config} + */ + identity: AwsCredentialIdentity, signingProperties: Record): Promise; + errorHandler(signingProperties: Record): (error: Error) => never; + successHandler(httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4Signer} + */ +export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts new file mode 100644 index 0000000..edf3162 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts @@ -0,0 +1,5 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @public + */ +export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts new file mode 100644 index 0000000..4071225 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/index.d.ts @@ -0,0 +1,5 @@ +export { AwsSdkSigV4Signer, AWSSDKSigV4Signer, validateSigningProperties } from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts new file mode 100644 index 0000000..f741625 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts @@ -0,0 +1,38 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface AwsSdkSigV4AAuthInputConfig { + /** + * This option will override the AWS sigv4a + * signing regionSet from any other source. + * + * The lookup order is: + * 1. this value + * 2. configuration file value of sigv4a_signing_region_set. + * 3. environment value of AWS_SIGV4A_SIGNING_REGION_SET. + * 4. signingRegionSet given by endpoint resolution. + * 5. the singular region of the SDK client. + */ + sigv4aSigningRegionSet?: string[] | undefined | Provider; +} +/** + * @internal + */ +export interface AwsSdkSigV4APreviouslyResolved { +} +/** + * @internal + */ +export interface AwsSdkSigV4AAuthResolvedConfig { + sigv4aSigningRegionSet: Provider; +} +/** + * @internal + */ +export declare const resolveAwsSdkSigV4AConfig: (config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved) => T & AwsSdkSigV4AAuthResolvedConfig; +/** + * @internal + */ +export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts new file mode 100644 index 0000000..cf42eec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts @@ -0,0 +1,117 @@ +import type { MergeFunctions } from "@aws-sdk/types"; +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { AuthScheme, AwsCredentialIdentity, AwsCredentialIdentityProvider, ChecksumConstructor, HashConstructor, MemoizedProvider, Provider, RegionInfoProvider, RequestSigner } from "@smithy/types"; +/** + * @public + */ +export interface AwsSdkSigV4AuthInputConfig { + /** + * The credentials used to sign requests. + */ + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + /** + * The signer to use when signing requests. + */ + signer?: RequestSigner | ((authScheme?: AuthScheme) => Promise); + /** + * Whether to escape request path when signing the request. + */ + signingEscapePath?: boolean; + /** + * An offset value in milliseconds to apply to all signing times. + */ + systemClockOffset?: number; + /** + * The region where you want to sign your request against. This + * can be different to the region in the endpoint. + */ + signingRegion?: string; + /** + * The injectable SigV4-compatible signer class constructor. If not supplied, + * regular SignatureV4 constructor will be used. + * + * @internal + */ + signerConstructor?: new (options: SignatureV4Init & SignatureV4CryptoInit) => RequestSigner; +} +/** + * Used to indicate whether a credential provider function was memoized by this resolver. + * @public + */ +export type AwsSdkSigV4Memoized = { + /** + * The credential provider has been memoized by the AWS SDK SigV4 config resolver. + */ + memoized?: boolean; + /** + * The credential provider has the caller client config object bound to its arguments. + */ + configBound?: boolean; + /** + * Function is wrapped with attribution transform. + */ + attributed?: boolean; +}; +/** + * @internal + */ +export interface AwsSdkSigV4PreviouslyResolved { + credentialDefaultProvider?: (input: any) => MemoizedProvider; + region: string | Provider; + sha256: ChecksumConstructor | HashConstructor; + signingName?: string; + regionInfoProvider?: RegionInfoProvider; + defaultSigningName?: string; + serviceId: string; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +/** + * @internal + */ +export interface AwsSdkSigV4AuthResolvedConfig { + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.credentials} + * This provider MAY memoize the loaded credentials for certain period. + */ + credentials: MergeFunctions> & AwsSdkSigV4Memoized; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signer} + */ + signer: (authScheme?: AuthScheme) => Promise; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.signingEscapePath} + */ + signingEscapePath: boolean; + /** + * Resolved value for input config {@link AwsSdkSigV4AuthInputConfig.systemClockOffset} + */ + systemClockOffset: number; +} +/** + * @internal + */ +export declare const resolveAwsSdkSigV4Config: (config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig; +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4AuthInputConfig} + */ +export interface AWSSDKSigV4AuthInputConfig extends AwsSdkSigV4AuthInputConfig { +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4PreviouslyResolved} + */ +export interface AWSSDKSigV4PreviouslyResolved extends AwsSdkSigV4PreviouslyResolved { +} +/** + * @internal + * @deprecated renamed to {@link AwsSdkSigV4AuthResolvedConfig} + */ +export interface AWSSDKSigV4AuthResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { +} +/** + * @internal + * @deprecated renamed to {@link resolveAwsSdkSigV4Config} + */ +export declare const resolveAWSSDKSigV4Config: (config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved) => T & AwsSdkSigV4AuthResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d.ts @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts new file mode 100644 index 0000000..823921b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a comma-separated string into an array of trimmed strings + * @param str The comma-separated input string to split + * @returns Array of trimmed strings split from the input + * + * @internal + */ +export declare const getArrayForCommaSeparatedString: (str: string) => string[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts new file mode 100644 index 0000000..2c9157b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getDateHeader.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getDateHeader: (response: unknown) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts new file mode 100644 index 0000000..4b72690 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Returns a date that is corrected for clock skew. + * + * @param systemClockOffset The offset of the system clock in milliseconds. + */ +export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts new file mode 100644 index 0000000..2d554b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + * + * If clock is skewed, it returns the difference between serverTime and current time. + * If clock is not skewed, it returns currentSystemClockOffset. + * + * @param clockTime The string value of the server time. + * @param currentSystemClockOffset The current system clock offset. + */ +export declare const getUpdatedSystemClockOffset: (clockTime: string, currentSystemClockOffset: number) => number; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/index.d.ts @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts new file mode 100644 index 0000000..970fa15 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + * + * Checks if the provided date is within the skew window of 300000ms. + * + * @param clockTime - The time to check for skew in milliseconds. + * @param systemClockOffset - The offset of the system clock in milliseconds. + */ +export declare const isClockSkewed: (clockTime: number, systemClockOffset: number) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts new file mode 100644 index 0000000..10d9d39 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/coercing-serializers.d.ts @@ -0,0 +1,18 @@ +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toStr: (val: unknown) => string | undefined; +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toBool: (val: unknown) => boolean | undefined; +/** + * @internal + * + * Used for awsQueryCompatibility trait. + */ +export declare const _toNum: (val: unknown) => number | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts new file mode 100644 index 0000000..ec78fb2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/common.d.ts @@ -0,0 +1,2 @@ +import type { SerdeContext } from "@smithy/types"; +export declare const collectBodyString: (streamBody: any, context: SerdeContext) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts new file mode 100644 index 0000000..98607ea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/awsExpectUnion.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Forwards to Smithy's expectUnion function, but also ignores + * the `__type` field if it is present. + */ +export declare const awsExpectUnion: (value: unknown) => Record | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts new file mode 100644 index 0000000..827ffe9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/json/parseJsonBody.d.ts @@ -0,0 +1,13 @@ +import type { HttpResponse, SerdeContext } from "@smithy/types"; +/** + * @internal + */ +export declare const parseJsonBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const parseJsonErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadRestJsonErrorCode: (output: HttpResponse, data: any) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts new file mode 100644 index 0000000..30cfc30 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/submodules/protocols/xml/parseXmlBody.d.ts @@ -0,0 +1,13 @@ +import type { HttpResponse, SerdeContext } from "@smithy/types"; +/** + * @internal + */ +export declare const parseXmlBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const parseXmlErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadRestXmlErrorCode: (output: HttpResponse, data: any) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts new file mode 100644 index 0000000..e83f927 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/api-extractor-type-index.d.ts @@ -0,0 +1,5 @@ +export * from "./index"; +export * from "./submodules/account-id-endpoint/index"; +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..239de7a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./submodules/client/index"; +export * from "./submodules/httpAuthSchemes/index"; +export * from "./submodules/protocols/index"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts new file mode 100644 index 0000000..10d5c21 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConfigResolver.d.ts @@ -0,0 +1,15 @@ +import { Provider } from "@smithy/types"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +export interface AccountIdEndpointModeInputConfig { + accountIdEndpointMode?: + | AccountIdEndpointMode + | Provider; +} +interface PreviouslyResolved {} +export interface AccountIdEndpointModeResolvedConfig { + accountIdEndpointMode: Provider; +} +export declare const resolveAccountIdEndpointModeConfig: ( + input: T & AccountIdEndpointModeInputConfig & PreviouslyResolved +) => T & AccountIdEndpointModeResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts new file mode 100644 index 0000000..27bdce9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/AccountIdEndpointModeConstants.d.ts @@ -0,0 +1,6 @@ +export type AccountIdEndpointMode = "disabled" | "preferred" | "required"; +export declare const DEFAULT_ACCOUNT_ID_ENDPOINT_MODE = "preferred"; +export declare const ACCOUNT_ID_ENDPOINT_MODE_VALUES: AccountIdEndpointMode[]; +export declare function validateAccountIdEndpointMode( + value: any +): value is AccountIdEndpointMode; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..9b04566 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/NodeAccountIdEndpointModeConfigOptions.d.ts @@ -0,0 +1,7 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { AccountIdEndpointMode } from "./AccountIdEndpointModeConstants"; +export declare const ENV_ACCOUNT_ID_ENDPOINT_MODE = + "AWS_ACCOUNT_ID_ENDPOINT_MODE"; +export declare const CONFIG_ACCOUNT_ID_ENDPOINT_MODE = + "account_id_endpoint_mode"; +export declare const NODE_ACCOUNT_ID_ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts new file mode 100644 index 0000000..52af11d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/account-id-endpoint/index.d.ts @@ -0,0 +1,3 @@ +export * from "./AccountIdEndpointModeConfigResolver"; +export * from "./AccountIdEndpointModeConstants"; +export * from "./NodeAccountIdEndpointModeConfigOptions"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..84af567 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,4 @@ +export declare const state: { + warningEmitted: boolean; +}; +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts new file mode 100644 index 0000000..1a2cc9d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/index.d.ts @@ -0,0 +1,3 @@ +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./setCredentialFeature"; +export * from "./setFeature"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts new file mode 100644 index 0000000..1336619 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setCredentialFeature.d.ts @@ -0,0 +1,11 @@ +import { + AttributedAwsCredentialIdentity, + AwsSdkCredentialsFeatures, +} from "@aws-sdk/types"; +export declare function setCredentialFeature< + F extends keyof AwsSdkCredentialsFeatures +>( + credentials: AttributedAwsCredentialIdentity, + feature: F, + value: AwsSdkCredentialsFeatures[F] +): AttributedAwsCredentialIdentity; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts new file mode 100644 index 0000000..84482ee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/client/setFeature.d.ts @@ -0,0 +1,6 @@ +import { AwsHandlerExecutionContext, AwsSdkFeatures } from "@aws-sdk/types"; +export declare function setFeature( + context: AwsHandlerExecutionContext, + feature: F, + value: AwsSdkFeatures[F] +): void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts new file mode 100644 index 0000000..b8c2b74 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.d.ts @@ -0,0 +1,12 @@ +import { + AwsCredentialIdentity, + HttpRequest as IHttpRequest, +} from "@smithy/types"; +import { AwsSdkSigV4Signer } from "./AwsSdkSigV4Signer"; +export declare class AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + sign( + httpRequest: IHttpRequest, + identity: AwsCredentialIdentity, + signingProperties: Record + ): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts new file mode 100644 index 0000000..0be6b41 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.d.ts @@ -0,0 +1,39 @@ +import { + AuthScheme, + AwsCredentialIdentity, + HttpRequest as IHttpRequest, + HttpResponse, + HttpSigner, + RequestSigner, +} from "@smithy/types"; +import { AwsSdkSigV4AAuthResolvedConfig } from "./resolveAwsSdkSigV4AConfig"; +interface AwsSdkSigV4Config extends AwsSdkSigV4AAuthResolvedConfig { + systemClockOffset: number; + signer: (authScheme?: AuthScheme) => Promise; +} +interface AwsSdkSigV4AuthSigningProperties { + config: AwsSdkSigV4Config; + signer: RequestSigner; + signingRegion?: string; + signingRegionSet?: string[]; + signingName?: string; +} +export declare const validateSigningProperties: ( + signingProperties: Record +) => Promise; +export declare class AwsSdkSigV4Signer implements HttpSigner { + sign( + httpRequest: IHttpRequest, + identity: AwsCredentialIdentity, + signingProperties: Record + ): Promise; + errorHandler( + signingProperties: Record + ): (error: Error) => never; + successHandler( + httpResponse: HttpResponse | unknown, + signingProperties: Record + ): void; +} +export declare const AWSSDKSigV4Signer: typeof AwsSdkSigV4Signer; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts new file mode 100644 index 0000000..effc1e0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/NODE_AUTH_SCHEME_PREFERENCE_OPTIONS.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_AUTH_SCHEME_PREFERENCE_OPTIONS: LoadedConfigSelectors< + string[] +>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts new file mode 100644 index 0000000..6047921 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/index.d.ts @@ -0,0 +1,9 @@ +export { + AwsSdkSigV4Signer, + AWSSDKSigV4Signer, + validateSigningProperties, +} from "./AwsSdkSigV4Signer"; +export { AwsSdkSigV4ASigner } from "./AwsSdkSigV4ASigner"; +export * from "./NODE_AUTH_SCHEME_PREFERENCE_OPTIONS"; +export * from "./resolveAwsSdkSigV4AConfig"; +export * from "./resolveAwsSdkSigV4Config"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts new file mode 100644 index 0000000..9f949b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.d.ts @@ -0,0 +1,18 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider } from "@smithy/types"; +export interface AwsSdkSigV4AAuthInputConfig { + sigv4aSigningRegionSet?: + | string[] + | undefined + | Provider; +} +export interface AwsSdkSigV4APreviouslyResolved {} +export interface AwsSdkSigV4AAuthResolvedConfig { + sigv4aSigningRegionSet: Provider; +} +export declare const resolveAwsSdkSigV4AConfig: ( + config: T & AwsSdkSigV4AAuthInputConfig & AwsSdkSigV4APreviouslyResolved +) => T & AwsSdkSigV4AAuthResolvedConfig; +export declare const NODE_SIGV4A_CONFIG_OPTIONS: LoadedConfigSelectors< + string[] | undefined +>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts new file mode 100644 index 0000000..fc562d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.d.ts @@ -0,0 +1,65 @@ +import { MergeFunctions } from "@aws-sdk/types"; +import { SignatureV4CryptoInit, SignatureV4Init } from "@smithy/signature-v4"; +import { + AuthScheme, + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + ChecksumConstructor, + HashConstructor, + MemoizedProvider, + Provider, + RegionInfoProvider, + RequestSigner, +} from "@smithy/types"; +export interface AwsSdkSigV4AuthInputConfig { + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + signer?: + | RequestSigner + | ((authScheme?: AuthScheme) => Promise); + signingEscapePath?: boolean; + systemClockOffset?: number; + signingRegion?: string; + signerConstructor?: new ( + options: SignatureV4Init & SignatureV4CryptoInit + ) => RequestSigner; +} +export type AwsSdkSigV4Memoized = { + memoized?: boolean; + configBound?: boolean; + attributed?: boolean; +}; +export interface AwsSdkSigV4PreviouslyResolved { + credentialDefaultProvider?: ( + input: any + ) => MemoizedProvider; + region: string | Provider; + sha256: ChecksumConstructor | HashConstructor; + signingName?: string; + regionInfoProvider?: RegionInfoProvider; + defaultSigningName?: string; + serviceId: string; + useFipsEndpoint: Provider; + useDualstackEndpoint: Provider; +} +export interface AwsSdkSigV4AuthResolvedConfig { + credentials: MergeFunctions< + AwsCredentialIdentityProvider, + MemoizedProvider + > & + AwsSdkSigV4Memoized; + signer: (authScheme?: AuthScheme) => Promise; + signingEscapePath: boolean; + systemClockOffset: number; +} +export declare const resolveAwsSdkSigV4Config: ( + config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & AwsSdkSigV4AuthResolvedConfig; +export interface AWSSDKSigV4AuthInputConfig + extends AwsSdkSigV4AuthInputConfig {} +export interface AWSSDKSigV4PreviouslyResolved + extends AwsSdkSigV4PreviouslyResolved {} +export interface AWSSDKSigV4AuthResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig {} +export declare const resolveAWSSDKSigV4Config: ( + config: T & AwsSdkSigV4AuthInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & AwsSdkSigV4AuthResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..29d0c3b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/index.d.ts @@ -0,0 +1 @@ +export * from "./aws_sdk"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts new file mode 100644 index 0000000..aee2328 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getArrayForCommaSeparatedString.d.ts @@ -0,0 +1 @@ +export declare const getArrayForCommaSeparatedString: (str: string) => string[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts new file mode 100644 index 0000000..73fc529 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getDateHeader.d.ts @@ -0,0 +1 @@ +export declare const getDateHeader: (response: unknown) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts new file mode 100644 index 0000000..741c5ea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.d.ts @@ -0,0 +1 @@ +export declare const getSkewCorrectedDate: (systemClockOffset: number) => Date; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts new file mode 100644 index 0000000..eae3311 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.d.ts @@ -0,0 +1,4 @@ +export declare const getUpdatedSystemClockOffset: ( + clockTime: string, + currentSystemClockOffset: number +) => number; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts new file mode 100644 index 0000000..07c2195 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/index.d.ts @@ -0,0 +1,3 @@ +export * from "./getDateHeader"; +export * from "./getSkewCorrectedDate"; +export * from "./getUpdatedSystemClockOffset"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts new file mode 100644 index 0000000..9f994f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/httpAuthSchemes/utils/isClockSkewed.d.ts @@ -0,0 +1,4 @@ +export declare const isClockSkewed: ( + clockTime: number, + systemClockOffset: number +) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts new file mode 100644 index 0000000..7657ceb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/coercing-serializers.d.ts @@ -0,0 +1,3 @@ +export declare const _toStr: (val: unknown) => string | undefined; +export declare const _toBool: (val: unknown) => boolean | undefined; +export declare const _toNum: (val: unknown) => number | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts new file mode 100644 index 0000000..73486db --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/common.d.ts @@ -0,0 +1,5 @@ +import { SerdeContext } from "@smithy/types"; +export declare const collectBodyString: ( + streamBody: any, + context: SerdeContext +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts new file mode 100644 index 0000000..09a6ac2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./coercing-serializers"; +export * from "./json/awsExpectUnion"; +export * from "./json/parseJsonBody"; +export * from "./xml/parseXmlBody"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts new file mode 100644 index 0000000..fdc331e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/awsExpectUnion.d.ts @@ -0,0 +1,3 @@ +export declare const awsExpectUnion: ( + value: unknown +) => Record | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts new file mode 100644 index 0000000..b400419 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/json/parseJsonBody.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse, SerdeContext } from "@smithy/types"; +export declare const parseJsonBody: ( + streamBody: any, + context: SerdeContext +) => any; +export declare const parseJsonErrorBody: ( + errorBody: any, + context: SerdeContext +) => Promise; +export declare const loadRestJsonErrorCode: ( + output: HttpResponse, + data: any +) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts new file mode 100644 index 0000000..f151834 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/dist-types/ts3.4/submodules/protocols/xml/parseXmlBody.d.ts @@ -0,0 +1,13 @@ +import { HttpResponse, SerdeContext } from "@smithy/types"; +export declare const parseXmlBody: ( + streamBody: any, + context: SerdeContext +) => any; +export declare const parseXmlErrorBody: ( + errorBody: any, + context: SerdeContext +) => Promise; +export declare const loadRestXmlErrorCode: ( + output: HttpResponse, + data: any +) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts new file mode 100644 index 0000000..3783b5e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/httpAuthSchemes.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/httpAuthSchemes" { + export * from "@aws-sdk/core/dist-types/submodules/httpAuthSchemes/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/httpAuthSchemes.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/httpAuthSchemes.js new file mode 100644 index 0000000..17685b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/httpAuthSchemes.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/httpAuthSchemes/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/package.json new file mode 100644 index 0000000..a41d77a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/package.json @@ -0,0 +1,119 @@ +{ + "name": "@aws-sdk/core", + "version": "3.799.0", + "description": "Core functions & classes shared by multiple AWS SDK clients.", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline core && rimraf ./dist-cjs/api-extractor-type-index.js", + "build:es": "tsc -p tsconfig.es.json && rimraf ./dist-es/api-extractor-type-index.js", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "lint": "node ../../scripts/validation/submodules-linter.js --pkg core", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "exports": { + ".": { + "types": "./dist-types/index.d.ts", + "module": "./dist-es/index.js", + "node": "./dist-cjs/index.js", + "import": "./dist-es/index.js", + "require": "./dist-cjs/index.js" + }, + "./package.json": { + "module": "./package.json", + "node": "./package.json", + "import": "./package.json", + "require": "./package.json" + }, + "./client": { + "types": "./dist-types/submodules/client/index.d.ts", + "module": "./dist-es/submodules/client/index.js", + "node": "./dist-cjs/submodules/client/index.js", + "import": "./dist-es/submodules/client/index.js", + "require": "./dist-cjs/submodules/client/index.js" + }, + "./httpAuthSchemes": { + "types": "./dist-types/submodules/httpAuthSchemes/index.d.ts", + "module": "./dist-es/submodules/httpAuthSchemes/index.js", + "node": "./dist-cjs/submodules/httpAuthSchemes/index.js", + "import": "./dist-es/submodules/httpAuthSchemes/index.js", + "require": "./dist-cjs/submodules/httpAuthSchemes/index.js" + }, + "./account-id-endpoint": { + "types": "./dist-types/submodules/account-id-endpoint/index.d.ts", + "module": "./dist-es/submodules/account-id-endpoint/index.js", + "node": "./dist-cjs/submodules/account-id-endpoint/index.js", + "import": "./dist-es/submodules/account-id-endpoint/index.js", + "require": "./dist-cjs/submodules/account-id-endpoint/index.js" + }, + "./protocols": { + "types": "./dist-types/submodules/protocols/index.d.ts", + "module": "./dist-es/submodules/protocols/index.js", + "node": "./dist-cjs/submodules/protocols/index.js", + "import": "./dist-es/submodules/protocols/index.js", + "require": "./dist-cjs/submodules/protocols/index.js" + } + }, + "files": [ + "./account-id-endpoint.d.ts", + "./account-id-endpoint.js", + "./client.d.ts", + "./client.js", + "./httpAuthSchemes.d.ts", + "./httpAuthSchemes.js", + "./protocols.d.ts", + "./protocols.js", + "dist-*/**" + ], + "sideEffects": false, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/core", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/core" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/protocols.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/protocols.d.ts new file mode 100644 index 0000000..7a36334 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/protocols.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/core/protocols" { + export * from "@aws-sdk/core/dist-types/submodules/protocols/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/protocols.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/protocols.js new file mode 100644 index 0000000..e2916e8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/core/protocols.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/protocols/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/README.md new file mode 100644 index 0000000..61a6436 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-env + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-env/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-env) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-env.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-env) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js new file mode 100644 index 0000000..c906a6b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-cjs/index.js @@ -0,0 +1,76 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ENV_ACCOUNT_ID: () => ENV_ACCOUNT_ID, + ENV_CREDENTIAL_SCOPE: () => ENV_CREDENTIAL_SCOPE, + ENV_EXPIRATION: () => ENV_EXPIRATION, + ENV_KEY: () => ENV_KEY, + ENV_SECRET: () => ENV_SECRET, + ENV_SESSION: () => ENV_SESSION, + fromEnv: () => fromEnv +}); +module.exports = __toCommonJS(index_exports); + +// src/fromEnv.ts +var import_client = require("@aws-sdk/core/client"); +var import_property_provider = require("@smithy/property-provider"); +var ENV_KEY = "AWS_ACCESS_KEY_ID"; +var ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +var ENV_SESSION = "AWS_SESSION_TOKEN"; +var ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +var ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +var ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +var fromEnv = /* @__PURE__ */ __name((init) => async () => { + init?.logger?.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...sessionToken && { sessionToken }, + ...expiry && { expiration: new Date(expiry) }, + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new import_property_provider.CredentialsProviderError("Unable to find environment variable credentials.", { logger: init?.logger }); +}, "fromEnv"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_KEY, + ENV_SECRET, + ENV_SESSION, + ENV_EXPIRATION, + ENV_CREDENTIAL_SCOPE, + ENV_ACCOUNT_ID, + fromEnv +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js new file mode 100644 index 0000000..a6a2928 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-es/fromEnv.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +export const ENV_KEY = "AWS_ACCESS_KEY_ID"; +export const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +export const ENV_SESSION = "AWS_SESSION_TOKEN"; +export const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +export const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +export const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +export const fromEnv = (init) => async () => { + init?.logger?.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...(sessionToken && { sessionToken }), + ...(expiry && { expiration: new Date(expiry) }), + ...(credentialScope && { credentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new CredentialsProviderError("Unable to find environment variable credentials.", { logger: init?.logger }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js new file mode 100644 index 0000000..17bf6da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromEnv"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts new file mode 100644 index 0000000..541aa69 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/fromEnv.d.ts @@ -0,0 +1,36 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export interface FromEnvInit extends CredentialProviderOptions { +} +/** + * @internal + */ +export declare const ENV_KEY = "AWS_ACCESS_KEY_ID"; +/** + * @internal + */ +export declare const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +/** + * @internal + */ +export declare const ENV_SESSION = "AWS_SESSION_TOKEN"; +/** + * @internal + */ +export declare const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +/** + * @internal + */ +export declare const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +/** + * @internal + */ +export declare const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +/** + * @internal + * + * Source AWS credentials from known environment variables. If either the + * `AWS_ACCESS_KEY_ID` or `AWS_SECRET_ACCESS_KEY` environment variable is not + * set in this process, the provider will return a rejected promise. + */ +export declare const fromEnv: (init?: FromEnvInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts new file mode 100644 index 0000000..fe76e31 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromEnv"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts new file mode 100644 index 0000000..55c454e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/fromEnv.d.ts @@ -0,0 +1,12 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export interface FromEnvInit extends CredentialProviderOptions {} +export declare const ENV_KEY = "AWS_ACCESS_KEY_ID"; +export declare const ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +export declare const ENV_SESSION = "AWS_SESSION_TOKEN"; +export declare const ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +export declare const ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +export declare const ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +export declare const fromEnv: ( + init?: FromEnvInit +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..17bf6da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromEnv"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/package.json new file mode 100644 index 0000000..a66a0de --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-env/package.json @@ -0,0 +1,62 @@ +{ + "name": "@aws-sdk/credential-provider-env", + "version": "3.799.0", + "description": "AWS credential provider that sources credentials from known environment variables", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-env", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-env", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-env" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/README.md new file mode 100644 index 0000000..e8f19f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/credential-provider-http + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-http/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-http) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-http.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-http) + +> An internal transitively required package. + +## Usage + +See https://www.npmjs.com/package/@aws-sdk/credential-providers diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js new file mode 100644 index 0000000..c4adb5f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/checkUrl.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkUrl = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new property_provider_1.CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; +exports.checkUrl = checkUrl; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js new file mode 100644 index 0000000..d7c0efa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.browser.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const property_provider_1 = require("@smithy/property-provider"); +const checkUrl_1 = require("./checkUrl"); +const requestHelpers_1 = require("./requestHelpers"); +const retry_wrapper_1 = require("./retry-wrapper"); +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const full = options.credentialsFullUri; + if (full) { + host = full; + } + else { + throw new property_provider_1.CredentialsProviderError("No HTTP credential provider host provided.", { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new fetch_http_handler_1.FetchHttpHandler(); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (options.authorizationToken) { + request.headers.Authorization = options.authorizationToken; + } + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response); + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js new file mode 100644 index 0000000..6e0269a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttp.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +const tslib_1 = require("tslib"); +const client_1 = require("@aws-sdk/core/client"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const property_provider_1 = require("@smithy/property-provider"); +const promises_1 = tslib_1.__importDefault(require("fs/promises")); +const checkUrl_1 = require("./checkUrl"); +const requestHelpers_1 = require("./requestHelpers"); +const retry_wrapper_1 = require("./retry-wrapper"); +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new property_provider_1.CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new node_http_handler_1.NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await promises_1.default.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response).then((creds) => (0, client_1.setCredentialFeature)(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/fromHttpTypes.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js new file mode 100644 index 0000000..5229d79 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/requestHelpers.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getCredentials = exports.createGetRequest = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const protocol_http_1 = require("@smithy/protocol-http"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_stream_1 = require("@smithy/util-stream"); +function createGetRequest(url) { + return new protocol_http_1.HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +exports.createGetRequest = createGetRequest; +async function getCredentials(response, logger) { + const stream = (0, util_stream_1.sdkStreamMixin)(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new property_provider_1.CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: (0, smithy_client_1.parseRfc3339DateTime)(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} +exports.getCredentials = getCredentials; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js new file mode 100644 index 0000000..b99b2ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/fromHttp/retry-wrapper.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryWrapper = void 0; +const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; +exports.retryWrapper = retryWrapper; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js new file mode 100644 index 0000000..9300747 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +var fromHttp_browser_1 = require("./fromHttp/fromHttp.browser"); +Object.defineProperty(exports, "fromHttp", { enumerable: true, get: function () { return fromHttp_browser_1.fromHttp; } }); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js new file mode 100644 index 0000000..0286ea0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-cjs/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromHttp = void 0; +var fromHttp_1 = require("./fromHttp/fromHttp"); +Object.defineProperty(exports, "fromHttp", { enumerable: true, get: function () { return fromHttp_1.fromHttp; } }); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js new file mode 100644 index 0000000..2a42ed7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/checkUrl.js @@ -0,0 +1,42 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +export const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js new file mode 100644 index 0000000..7189b92 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.browser.js @@ -0,0 +1,27 @@ +import { FetchHttpHandler } from "@smithy/fetch-http-handler"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { checkUrl } from "./checkUrl"; +import { createGetRequest, getCredentials } from "./requestHelpers"; +import { retryWrapper } from "./retry-wrapper"; +export const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const full = options.credentialsFullUri; + if (full) { + host = full; + } + else { + throw new CredentialsProviderError("No HTTP credential provider host provided.", { logger: options.logger }); + } + const url = new URL(host); + checkUrl(url, options.logger); + const requestHandler = new FetchHttpHandler(); + return retryWrapper(async () => { + const request = createGetRequest(url); + if (options.authorizationToken) { + request.headers.Authorization = options.authorizationToken; + } + const result = await requestHandler.handle(request); + return getCredentials(result.response); + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js new file mode 100644 index 0000000..36dd8a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttp.js @@ -0,0 +1,63 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { NodeHttpHandler } from "@smithy/node-http-handler"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import fs from "fs/promises"; +import { checkUrl } from "./checkUrl"; +import { createGetRequest, getCredentials } from "./requestHelpers"; +import { retryWrapper } from "./retry-wrapper"; +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +export const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + checkUrl(url, options.logger); + const requestHandler = new NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return retryWrapper(async () => { + const request = createGetRequest(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await fs.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return getCredentials(result.response).then((creds) => setCredentialFeature(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/fromHttpTypes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js new file mode 100644 index 0000000..9e271ce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/requestHelpers.js @@ -0,0 +1,49 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { HttpRequest } from "@smithy/protocol-http"; +import { parseRfc3339DateTime } from "@smithy/smithy-client"; +import { sdkStreamMixin } from "@smithy/util-stream"; +export function createGetRequest(url) { + return new HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +export async function getCredentials(response, logger) { + const stream = sdkStreamMixin(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: parseRfc3339DateTime(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js new file mode 100644 index 0000000..7006f3c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/fromHttp/retry-wrapper.js @@ -0,0 +1,13 @@ +export const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js new file mode 100644 index 0000000..98204c5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/index.browser.js @@ -0,0 +1 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js new file mode 100644 index 0000000..2911386 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-es/index.js @@ -0,0 +1 @@ +export { fromHttp } from "./fromHttp/fromHttp"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts new file mode 100644 index 0000000..933b12c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/checkUrl.d.ts @@ -0,0 +1,9 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + * + * @param url - to be validated. + * @param logger - passed to CredentialsProviderError. + * @throws if not acceptable to this provider. + */ +export declare const checkUrl: (url: URL, logger?: Logger) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts new file mode 100644 index 0000000..cb3a03b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.browser.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import type { FromHttpOptions } from "./fromHttpTypes"; +/** + * Creates a provider that gets credentials via HTTP request. + */ +export declare const fromHttp: (options?: FromHttpOptions) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts new file mode 100644 index 0000000..cb3a03b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttp.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import type { FromHttpOptions } from "./fromHttpTypes"; +/** + * Creates a provider that gets credentials via HTTP request. + */ +export declare const fromHttp: (options?: FromHttpOptions) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts new file mode 100644 index 0000000..b751ded --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/fromHttpTypes.d.ts @@ -0,0 +1,69 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +/** + * @public + * + * Input for the fromHttp function in the HTTP Credentials Provider for Node.js. + */ +export interface FromHttpOptions extends CredentialProviderOptions { + /** + * If this value is provided, it will be used as-is. + * + * For browser environments, use instead {@link credentialsFullUri}. + */ + awsContainerCredentialsFullUri?: string; + /** + * If this value is provided instead of the full URI, it + * will be appended to the default link local host of 169.254.170.2. + * + * Not supported in browsers. + */ + awsContainerCredentialsRelativeUri?: string; + /** + * Will be read on each credentials request to + * add an Authorization request header value. + * + * Not supported in browsers. + */ + awsContainerAuthorizationTokenFile?: string; + /** + * An alternative to awsContainerAuthorizationTokenFile, + * this is the token value itself. + * + * For browser environments, use instead {@link authorizationToken}. + */ + awsContainerAuthorizationToken?: string; + /** + * BROWSER ONLY. + * + * In browsers, a relative URI is not allowed, and a full URI must be provided. + * HTTPS is required. + * + * This value is required for the browser environment. + */ + credentialsFullUri?: string; + /** + * BROWSER ONLY. + * + * Providing this value will set an "Authorization" request + * header value on the GET request. + */ + authorizationToken?: string; + /** + * Default is 3 retry attempts or 4 total attempts. + */ + maxRetries?: number; + /** + * Default is 1000ms. Time in milliseconds to spend waiting between retry attempts. + */ + timeout?: number; +} +/** + * @public + */ +export type HttpProviderCredentials = { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + AccountId?: string; + Expiration: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts new file mode 100644 index 0000000..6d1c16e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/requestHelpers.d.ts @@ -0,0 +1,11 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpResponse, Logger } from "@smithy/types"; +/** + * @internal + */ +export declare function createGetRequest(url: URL): HttpRequest; +/** + * @internal + */ +export declare function getCredentials(response: HttpResponse, logger?: Logger): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts new file mode 100644 index 0000000..bf63add --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/fromHttp/retry-wrapper.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retryWrapper: (toRetry: RetryableProvider, maxRetries: number, delayMs: number) => RetryableProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts new file mode 100644 index 0000000..2a9e4ec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/index.browser.d.ts @@ -0,0 +1,2 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; +export type { FromHttpOptions, HttpProviderCredentials } from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts new file mode 100644 index 0000000..b1e9985 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export { fromHttp } from "./fromHttp/fromHttp"; +export type { FromHttpOptions, HttpProviderCredentials } from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts new file mode 100644 index 0000000..9f518b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/checkUrl.d.ts @@ -0,0 +1,2 @@ +import { Logger } from "@smithy/types"; +export declare const checkUrl: (url: URL, logger?: Logger) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts new file mode 100644 index 0000000..00f1506 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.browser.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromHttpOptions } from "./fromHttpTypes"; +export declare const fromHttp: ( + options?: FromHttpOptions +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts new file mode 100644 index 0000000..00f1506 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttp.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromHttpOptions } from "./fromHttpTypes"; +export declare const fromHttp: ( + options?: FromHttpOptions +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts new file mode 100644 index 0000000..767b6b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/fromHttpTypes.d.ts @@ -0,0 +1,18 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +export interface FromHttpOptions extends CredentialProviderOptions { + awsContainerCredentialsFullUri?: string; + awsContainerCredentialsRelativeUri?: string; + awsContainerAuthorizationTokenFile?: string; + awsContainerAuthorizationToken?: string; + credentialsFullUri?: string; + authorizationToken?: string; + maxRetries?: number; + timeout?: number; +} +export type HttpProviderCredentials = { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + AccountId?: string; + Expiration: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts new file mode 100644 index 0000000..68a3285 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/requestHelpers.d.ts @@ -0,0 +1,8 @@ +import { AwsCredentialIdentity } from "@aws-sdk/types"; +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpResponse, Logger } from "@smithy/types"; +export declare function createGetRequest(url: URL): HttpRequest; +export declare function getCredentials( + response: HttpResponse, + logger?: Logger +): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts new file mode 100644 index 0000000..f992038 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/fromHttp/retry-wrapper.d.ts @@ -0,0 +1,8 @@ +export interface RetryableProvider { + (): Promise; +} +export declare const retryWrapper: ( + toRetry: RetryableProvider, + maxRetries: number, + delayMs: number +) => RetryableProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts new file mode 100644 index 0000000..40696b9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.browser.d.ts @@ -0,0 +1,5 @@ +export { fromHttp } from "./fromHttp/fromHttp.browser"; +export { + FromHttpOptions, + HttpProviderCredentials, +} from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..560256f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +export { fromHttp } from "./fromHttp/fromHttp"; +export { + FromHttpOptions, + HttpProviderCredentials, +} from "./fromHttp/fromHttpTypes"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/package.json new file mode 100644 index 0000000..2ad154b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-http/package.json @@ -0,0 +1,69 @@ +{ + "name": "@aws-sdk/credential-provider-http", + "version": "3.799.0", + "description": "AWS credential provider for containers and HTTP sources", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": "./dist-es/index.browser.js", + "react-native": "./dist-es/index.browser.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-http", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-http", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-http" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/README.md new file mode 100644 index 0000000..b4f3af1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-ini + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-ini/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-ini) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-ini.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-ini) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js new file mode 100644 index 0000000..e9b6049 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-cjs/index.js @@ -0,0 +1,276 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromIni: () => fromIni +}); +module.exports = __toCommonJS(index_exports); + +// src/fromIni.ts + + +// src/resolveProfileData.ts + + +// src/resolveAssumeRoleCredentials.ts + + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/resolveCredentialSource.ts +var import_client = require("@aws-sdk/core/client"); +var import_property_provider = require("@smithy/property-provider"); +var resolveCredentialSource = /* @__PURE__ */ __name((credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: /* @__PURE__ */ __name(async (options) => { + const { fromHttp } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-http"))); + const { fromContainerMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => (0, import_property_provider.chain)(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, "EcsContainer"), + Ec2InstanceMetadata: /* @__PURE__ */ __name(async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, "Ec2InstanceMetadata"), + Environment: /* @__PURE__ */ __name(async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-env"))); + return async () => fromEnv(options)().then(setNamedProvider); + }, "Environment") + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } else { + throw new import_property_provider.CredentialsProviderError( + `Unsupported credential source in profile ${profileName}. Got ${credentialSource}, expected EcsContainer or Ec2InstanceMetadata or Environment.`, + { logger } + ); + } +}, "resolveCredentialSource"); +var setNamedProvider = /* @__PURE__ */ __name((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"), "setNamedProvider"); + +// src/resolveAssumeRoleCredentials.ts +var isAssumeRoleProfile = /* @__PURE__ */ __name((arg, { profile = "default", logger } = {}) => { + return Boolean(arg) && typeof arg === "object" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger })); +}, "isAssumeRoleProfile"); +var isAssumeRoleWithSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + logger?.debug?.(` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}, "isAssumeRoleWithSourceProfile"); +var isCredentialSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + logger?.debug?.(` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}, "isCredentialSourceProfile"); +var resolveAssumeRoleCredentials = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sts"))); + options.roleAssumer = getDefaultRoleAssumer( + { + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options?.parentClientConfig, + region: region ?? options?.parentClientConfig?.region + } + }, + options.clientPlugins + ); + } + if (source_profile && source_profile in visitedProfiles) { + throw new import_property_provider.CredentialsProviderError( + `Detected a cycle attempting to resolve credentials for profile ${(0, import_shared_ini_file_loader.getProfileName)(options)}. Profiles visited: ` + Object.keys(visitedProfiles).join(", "), + { logger: options.logger } + ); + } + options.logger?.debug( + `@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}` + ); + const sourceCredsProvider = source_profile ? resolveProfileData( + source_profile, + profiles, + options, + { + ...visitedProfiles, + [source_profile]: true + }, + isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {}) + ) : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10) + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new import_property_provider.CredentialsProviderError( + `Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, + { logger: options.logger, tryNextLink: false } + ); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o") + ); + } +}, "resolveAssumeRoleCredentials"); +var isCredentialSourceWithoutRoleArn = /* @__PURE__ */ __name((section) => { + return !section.role_arn && !!section.credential_source; +}, "isCredentialSourceWithoutRoleArn"); + +// src/resolveProcessCredentials.ts + +var isProcessProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string", "isProcessProfile"); +var resolveProcessCredentials = /* @__PURE__ */ __name(async (options, profile) => Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-process"))).then( + ({ fromProcess }) => fromProcess({ + ...options, + profile + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_PROCESS", "v")) +), "resolveProcessCredentials"); + +// src/resolveSsoCredentials.ts + +var resolveSsoCredentials = /* @__PURE__ */ __name(async (profile, profileData, options = {}) => { + const { fromSSO } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-sso"))); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig + })().then((creds) => { + if (profileData.sso_session) { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } else { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}, "resolveSsoCredentials"); +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveStaticCredentials.ts + +var isStaticCredsProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.aws_access_key_id === "string" && typeof arg.aws_secret_access_key === "string" && ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1, "isStaticCredsProfile"); +var resolveStaticCredentials = /* @__PURE__ */ __name(async (profile, options) => { + options?.logger?.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }, + ...profile.aws_account_id && { accountId: profile.aws_account_id } + }; + return (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROFILE", "n"); +}, "resolveStaticCredentials"); + +// src/resolveWebIdentityCredentials.ts + +var isWebIdentityProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.web_identity_token_file === "string" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1, "isWebIdentityProfile"); +var resolveWebIdentityCredentials = /* @__PURE__ */ __name(async (profile, options) => Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-web-identity"))).then( + ({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q")) +), "resolveWebIdentityCredentials"); + +// src/resolveProfileData.ts +var resolveProfileData = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new import_property_provider.CredentialsProviderError( + `Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, + { logger: options.logger } + ); +}, "resolveProfileData"); + +// src/fromIni.ts +var fromIni = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + init.logger?.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProfileData( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: _init.profile ?? callerClientConfig?.profile + }), + profiles, + init + ); +}, "fromIni"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromIni +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js new file mode 100644 index 0000000..ccf0397 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/fromIni.js @@ -0,0 +1,16 @@ +import { getProfileName, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { resolveProfileData } from "./resolveProfileData"; +export const fromIni = (_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig, + }, + }; + init.logger?.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await parseKnownFiles(init); + return resolveProfileData(getProfileName({ + profile: _init.profile ?? callerClientConfig?.profile, + }), profiles, init); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js new file mode 100644 index 0000000..b019131 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromIni"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js new file mode 100644 index 0000000..1411318 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveAssumeRoleCredentials.js @@ -0,0 +1,80 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName } from "@smithy/shared-ini-file-loader"; +import { resolveCredentialSource } from "./resolveCredentialSource"; +import { resolveProfileData } from "./resolveProfileData"; +export const isAssumeRoleProfile = (arg, { profile = "default", logger } = {}) => { + return (Boolean(arg) && + typeof arg === "object" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && + ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && + ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && + (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger }))); +}; +const isAssumeRoleWithSourceProfile = (arg, { profile, logger }) => { + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + logger?.debug?.(` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}; +const isCredentialSourceProfile = (arg, { profile, logger }) => { + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + logger?.debug?.(` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}; +export const resolveAssumeRoleCredentials = async (profileName, profiles, options, visitedProfiles = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await import("@aws-sdk/nested-clients/sts"); + options.roleAssumer = getDefaultRoleAssumer({ + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options?.parentClientConfig, + region: region ?? options?.parentClientConfig?.region, + }, + }, options.clientPlugins); + } + if (source_profile && source_profile in visitedProfiles) { + throw new CredentialsProviderError(`Detected a cycle attempting to resolve credentials for profile` + + ` ${getProfileName(options)}. Profiles visited: ` + + Object.keys(visitedProfiles).join(", "), { logger: options.logger }); + } + options.logger?.debug(`@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}`); + const sourceCredsProvider = source_profile + ? resolveProfileData(source_profile, profiles, options, { + ...visitedProfiles, + [source_profile]: true, + }, isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {})) + : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } + else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10), + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new CredentialsProviderError(`Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, { logger: options.logger, tryNextLink: false }); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } +}; +const isCredentialSourceWithoutRoleArn = (section) => { + return !section.role_arn && !!section.credential_source; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js new file mode 100644 index 0000000..b004933 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveCredentialSource.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { chain, CredentialsProviderError } from "@smithy/property-provider"; +export const resolveCredentialSource = (credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: async (options) => { + const { fromHttp } = await import("@aws-sdk/credential-provider-http"); + const { fromContainerMetadata } = await import("@smithy/credential-provider-imds"); + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => chain(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, + Ec2InstanceMetadata: async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await import("@smithy/credential-provider-imds"); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, + Environment: async (options) => { + logger?.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await import("@aws-sdk/credential-provider-env"); + return async () => fromEnv(options)().then(setNamedProvider); + }, + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } + else { + throw new CredentialsProviderError(`Unsupported credential source in profile ${profileName}. Got ${credentialSource}, ` + + `expected EcsContainer or Ec2InstanceMetadata or Environment.`, { logger }); + } +}; +const setNamedProvider = (creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js new file mode 100644 index 0000000..5a9f975 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProcessCredentials.js @@ -0,0 +1,6 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isProcessProfile = (arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string"; +export const resolveProcessCredentials = async (options, profile) => import("@aws-sdk/credential-provider-process").then(({ fromProcess }) => fromProcess({ + ...options, + profile, +})().then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_PROCESS", "v"))); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js new file mode 100644 index 0000000..3e64e9e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveProfileData.js @@ -0,0 +1,28 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { isAssumeRoleProfile, resolveAssumeRoleCredentials } from "./resolveAssumeRoleCredentials"; +import { isProcessProfile, resolveProcessCredentials } from "./resolveProcessCredentials"; +import { isSsoProfile, resolveSsoCredentials } from "./resolveSsoCredentials"; +import { isStaticCredsProfile, resolveStaticCredentials } from "./resolveStaticCredentials"; +import { isWebIdentityProfile, resolveWebIdentityCredentials } from "./resolveWebIdentityCredentials"; +export const resolveProfileData = async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new CredentialsProviderError(`Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, { logger: options.logger }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js new file mode 100644 index 0000000..5da74da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveSsoCredentials.js @@ -0,0 +1,23 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const resolveSsoCredentials = async (profile, profileData, options = {}) => { + const { fromSSO } = await import("@aws-sdk/credential-provider-sso"); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig, + })().then((creds) => { + if (profileData.sso_session) { + return setCredentialFeature(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } + else { + return setCredentialFeature(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}; +export const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js new file mode 100644 index 0000000..c04435f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveStaticCredentials.js @@ -0,0 +1,18 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isStaticCredsProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.aws_access_key_id === "string" && + typeof arg.aws_secret_access_key === "string" && + ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && + ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1; +export const resolveStaticCredentials = async (profile, options) => { + options?.logger?.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...(profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }), + ...(profile.aws_account_id && { accountId: profile.aws_account_id }), + }; + return setCredentialFeature(credentials, "CREDENTIALS_PROFILE", "n"); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js new file mode 100644 index 0000000..10adfe7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-es/resolveWebIdentityCredentials.js @@ -0,0 +1,14 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const isWebIdentityProfile = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.web_identity_token_file === "string" && + typeof arg.role_arn === "string" && + ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1; +export const resolveWebIdentityCredentials = async (profile, options) => import("@aws-sdk/credential-provider-web-identity").then(({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig, +})().then((creds) => setCredentialFeature(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q"))); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts new file mode 100644 index 0000000..5554125 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/fromIni.d.ts @@ -0,0 +1,55 @@ +import type { AssumeRoleWithWebIdentityParams } from "@aws-sdk/credential-provider-web-identity"; +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import type { RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import type { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +import { AssumeRoleParams } from "./resolveAssumeRoleCredentials"; +/** + * @public + */ +export interface FromIniInit extends SourceProfileInit, CredentialProviderOptions { + /** + * A function that returns a promise fulfilled with an MFA token code for + * the provided MFA Serial code. If a profile requires an MFA code and + * `mfaCodeProvider` is not a valid function, the credential provider + * promise will be rejected. + * + * @param mfaSerial The serial code of the MFA device specified. + */ + mfaCodeProvider?: (mfaSerial: string) => Promise; + /** + * A function that assumes a role and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param sourceCreds The credentials with which to assume a role. + * @param params + */ + roleAssumer?: (sourceCreds: AwsCredentialIdentity, params: AssumeRoleParams) => Promise; + /** + * A function that assumes a role with web identity and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param sourceCreds The credentials with which to assume a role. + * @param params + */ + roleAssumerWithWebIdentity?: (params: AssumeRoleWithWebIdentityParams) => Promise; + /** + * STSClientConfig or SSOClientConfig to be used for creating inner client + * for auth operations. + * @internal + */ + clientConfig?: any; + clientPlugins?: Pluggable[]; + /** + * When true, always reload credentials from the file system instead of using cached values. + * This is useful when you need to detect changes to the credentials file. + */ + ignoreCache?: boolean; +} +/** + * @internal + * + * Creates a credential provider that will read from ini files and supports + * role assumption and multi-factor authentication. + */ +export declare const fromIni: (_init?: FromIniInit) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts new file mode 100644 index 0000000..75680c0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromIni"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts new file mode 100644 index 0000000..dd9a896 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveAssumeRoleCredentials.d.ts @@ -0,0 +1,47 @@ +import { Logger, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + * + * @see http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/STS.html#assumeRole-property + * TODO update the above to link to V3 docs + */ +export interface AssumeRoleParams { + /** + * The identifier of the role to be assumed. + */ + RoleArn: string; + /** + * A name for the assumed role session. + */ + RoleSessionName: string; + /** + * A unique identifier that is used by third parties when assuming roles in + * their customers' accounts. + */ + ExternalId?: string; + /** + * The identification number of the MFA device that is associated with the + * user who is making the `AssumeRole` call. + */ + SerialNumber?: string; + /** + * The value provided by the MFA device. + */ + TokenCode?: string; + /** + * The duration, in seconds, of the role session. + */ + DurationSeconds?: number; +} +/** + * @internal + */ +export declare const isAssumeRoleProfile: (arg: any, { profile, logger }?: { + profile?: string | undefined; + logger?: Logger | undefined; +}) => boolean; +/** + * @internal + */ +export declare const resolveAssumeRoleCredentials: (profileName: string, profiles: ParsedIniData, options: FromIniInit, visitedProfiles?: Record) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts new file mode 100644 index 0000000..6f1c9b7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveCredentialSource.d.ts @@ -0,0 +1,12 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider, Logger } from "@smithy/types"; +/** + * @internal + * + * Resolve the `credential_source` entry from the profile, and return the + * credential providers respectively. No memoization is needed for the + * credential source providers because memoization should be added outside the + * fromIni() provider. The source credential needs to be refreshed every time + * fromIni() is called. + */ +export declare const resolveCredentialSource: (credentialSource: string, profileName: string, logger?: Logger) => (options?: CredentialProviderOptions) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..7194518 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProcessCredentials.d.ts @@ -0,0 +1,16 @@ +import { Credentials, Profile } from "@aws-sdk/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface ProcessProfile extends Profile { + credential_process: string; +} +/** + * @internal + */ +export declare const isProcessProfile: (arg: any) => arg is ProcessProfile; +/** + * @internal + */ +export declare const resolveProcessCredentials: (options: FromIniInit, profile: string) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts new file mode 100644 index 0000000..e59ca93 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveProfileData.d.ts @@ -0,0 +1,6 @@ +import type { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export declare const resolveProfileData: (profileName: string, profiles: ParsedIniData, options: FromIniInit, visitedProfiles?: Record, isAssumeRoleRecursiveCall?: boolean) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts new file mode 100644 index 0000000..1909a51 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveSsoCredentials.d.ts @@ -0,0 +1,12 @@ +import type { SsoProfile } from "@aws-sdk/credential-provider-sso"; +import type { IniSection, Profile } from "@smithy/types"; +import type { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export declare const resolveSsoCredentials: (profile: string, profileData: IniSection, options?: FromIniInit) => Promise; +/** + * @internal + * duplicated from \@aws-sdk/credential-provider-sso to defer import. + */ +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts new file mode 100644 index 0000000..e04cf26 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveStaticCredentials.d.ts @@ -0,0 +1,20 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface StaticCredsProfile extends Profile { + aws_access_key_id: string; + aws_secret_access_key: string; + aws_session_token?: string; + aws_credential_scope?: string; + aws_account_id?: string; +} +/** + * @internal + */ +export declare const isStaticCredsProfile: (arg: any) => arg is StaticCredsProfile; +/** + * @internal + */ +export declare const resolveStaticCredentials: (profile: StaticCredsProfile, options?: FromIniInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts new file mode 100644 index 0000000..acb1d45 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/resolveWebIdentityCredentials.d.ts @@ -0,0 +1,18 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +/** + * @internal + */ +export interface WebIdentityProfile extends Profile { + web_identity_token_file: string; + role_arn: string; + role_session_name?: string; +} +/** + * @internal + */ +export declare const isWebIdentityProfile: (arg: any) => arg is WebIdentityProfile; +/** + * @internal + */ +export declare const resolveWebIdentityCredentials: (profile: WebIdentityProfile, options: FromIniInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts new file mode 100644 index 0000000..9d640a0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/fromIni.d.ts @@ -0,0 +1,24 @@ +import { AssumeRoleWithWebIdentityParams } from "@aws-sdk/credential-provider-web-identity"; +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +import { AssumeRoleParams } from "./resolveAssumeRoleCredentials"; +export interface FromIniInit + extends SourceProfileInit, + CredentialProviderOptions { + mfaCodeProvider?: (mfaSerial: string) => Promise; + roleAssumer?: ( + sourceCreds: AwsCredentialIdentity, + params: AssumeRoleParams + ) => Promise; + roleAssumerWithWebIdentity?: ( + params: AssumeRoleWithWebIdentityParams + ) => Promise; + clientConfig?: any; + clientPlugins?: Pluggable[]; + ignoreCache?: boolean; +} +export declare const fromIni: ( + _init?: FromIniInit +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..b019131 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromIni"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts new file mode 100644 index 0000000..eb782f3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveAssumeRoleCredentials.d.ts @@ -0,0 +1,26 @@ +import { Logger, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface AssumeRoleParams { + RoleArn: string; + RoleSessionName: string; + ExternalId?: string; + SerialNumber?: string; + TokenCode?: string; + DurationSeconds?: number; +} +export declare const isAssumeRoleProfile: ( + arg: any, + { + profile, + logger, + }?: { + profile?: string | undefined; + logger?: Logger | undefined; + } +) => boolean; +export declare const resolveAssumeRoleCredentials: ( + profileName: string, + profiles: ParsedIniData, + options: FromIniInit, + visitedProfiles?: Record +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts new file mode 100644 index 0000000..21a7f9f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveCredentialSource.d.ts @@ -0,0 +1,9 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider, Logger } from "@smithy/types"; +export declare const resolveCredentialSource: ( + credentialSource: string, + profileName: string, + logger?: Logger +) => ( + options?: CredentialProviderOptions +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..dbd5583 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProcessCredentials.d.ts @@ -0,0 +1,10 @@ +import { Credentials, Profile } from "@aws-sdk/types"; +import { FromIniInit } from "./fromIni"; +export interface ProcessProfile extends Profile { + credential_process: string; +} +export declare const isProcessProfile: (arg: any) => arg is ProcessProfile; +export declare const resolveProcessCredentials: ( + options: FromIniInit, + profile: string +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts new file mode 100644 index 0000000..d821bb4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveProfileData.d.ts @@ -0,0 +1,9 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export declare const resolveProfileData: ( + profileName: string, + profiles: ParsedIniData, + options: FromIniInit, + visitedProfiles?: Record, + isAssumeRoleRecursiveCall?: boolean +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts new file mode 100644 index 0000000..88bec34 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveSsoCredentials.d.ts @@ -0,0 +1,9 @@ +import { SsoProfile } from "@aws-sdk/credential-provider-sso"; +import { IniSection, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export declare const resolveSsoCredentials: ( + profile: string, + profileData: IniSection, + options?: FromIniInit +) => Promise; +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts new file mode 100644 index 0000000..5f5daa9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveStaticCredentials.d.ts @@ -0,0 +1,16 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface StaticCredsProfile extends Profile { + aws_access_key_id: string; + aws_secret_access_key: string; + aws_session_token?: string; + aws_credential_scope?: string; + aws_account_id?: string; +} +export declare const isStaticCredsProfile: ( + arg: any +) => arg is StaticCredsProfile; +export declare const resolveStaticCredentials: ( + profile: StaticCredsProfile, + options?: FromIniInit +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts new file mode 100644 index 0000000..4179f94 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/dist-types/ts3.4/resolveWebIdentityCredentials.d.ts @@ -0,0 +1,14 @@ +import { AwsCredentialIdentity, Profile } from "@smithy/types"; +import { FromIniInit } from "./fromIni"; +export interface WebIdentityProfile extends Profile { + web_identity_token_file: string; + role_arn: string; + role_session_name?: string; +} +export declare const isWebIdentityProfile: ( + arg: any +) => arg is WebIdentityProfile; +export declare const resolveWebIdentityCredentials: ( + profile: WebIdentityProfile, + options: FromIniInit +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/package.json new file mode 100644 index 0000000..313a386 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-ini/package.json @@ -0,0 +1,72 @@ +{ + "name": "@aws-sdk/credential-provider-ini", + "version": "3.803.0", + "description": "AWS credential provider that sources credentials from ~/.aws/credentials and ~/.aws/config", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-ini", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-ini", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-ini" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/README.md new file mode 100644 index 0000000..7957cc0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/README.md @@ -0,0 +1,104 @@ +# @aws-sdk/credential-provider-node + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-node/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-node) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-node.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-node) + +## AWS Credential Provider for Node.JS + +This module provides a factory function, `defaultProvider`, that will attempt to +source AWS credentials from a Node.JS environment. It will attempt to find +credentials from the following sources (listed in order of precedence): + +- Environment variables exposed via `process.env` +- SSO credentials from token cache +- Web identity token credentials +- Shared credentials and config ini files +- The EC2/ECS Instance Metadata Service + +The default credential provider will invoke one provider at a time and only +continue to the next if no credentials have been located. For example, if the +process finds values defined via the `AWS_ACCESS_KEY_ID` and +`AWS_SECRET_ACCESS_KEY` environment variables, the files at `~/.aws/credentials` +and `~/.aws/config` will not be read, nor will any messages be sent to the +Instance Metadata Service. + +If invalid configuration is encountered (such as a profile in +`~/.aws/credentials` specifying as its `source_profile` the name of a profile +that does not exist), then the chained provider will be rejected with an error +and will not invoke the next provider in the list. + +_IMPORTANT_: if you intend to acquire credentials using EKS +[IAM Roles for Service Accounts](https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html), +then you must explicitly specify a value for `roleAssumerWithWebIdentity`. There is a +default function available in `@aws-sdk/client-sts` package. An example of using +this: + +```js +const { getDefaultRoleAssumerWithWebIdentity } = require("@aws-sdk/client-sts"); +const { defaultProvider } = require("@aws-sdk/credential-provider-node"); +const { S3Client, GetObjectCommand } = require("@aws-sdk/client-s3"); + +const provider = defaultProvider({ + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity({ + // You must explicitly pass a region if you are not using us-east-1 + region: "eu-west-1" + }), +}); + +const client = new S3Client({ credentialDefaultProvider: provider }); +``` + +_IMPORTANT_: We provide a wrapper of this provider in `@aws-sdk/credential-providers` +package to save you from importing `getDefaultRoleAssumerWithWebIdentity()` or +`getDefaultRoleAssume()` from STS package. Similarly, you can do: + +```js +const { fromNodeProviderChain } = require("@aws-sdk/credential-providers"); + +const credentials = fromNodeProviderChain(); + +const client = new S3Client({ credentials }); +``` + +## Supported configuration + +You may customize how credentials are resolved by providing an options hash to +the `defaultProvider` factory function. The following options are +supported: + +- `profile` - The configuration profile to use. If not specified, the provider + will use the value in the `AWS_PROFILE` environment variable or a default of + `default`. +- `filepath` - The path to the shared credentials file. If not specified, the + provider will use the value in the `AWS_SHARED_CREDENTIALS_FILE` environment + variable or a default of `~/.aws/credentials`. +- `configFilepath` - The path to the shared config file. If not specified, the + provider will use the value in the `AWS_CONFIG_FILE` environment variable or a + default of `~/.aws/config`. +- `mfaCodeProvider` - A function that returns a a promise fulfilled with an + MFA token code for the provided MFA Serial code. If a profile requires an MFA + code and `mfaCodeProvider` is not a valid function, the credential provider + promise will be rejected. +- `roleAssumer` - A function that assumes a role and returns a promise + fulfilled with credentials for the assumed role. If not specified, no role + will be assumed, and an error will be thrown. +- `roleArn` - ARN to assume. If not specified, the provider will use the value + in the `AWS_ROLE_ARN` environment variable. +- `webIdentityTokenFile` - File location of where the `OIDC` token is stored. + If not specified, the provider will use the value in the `AWS_WEB_IDENTITY_TOKEN_FILE` + environment variable. +- `roleAssumerWithWebIdentity` - A function that assumes a role with web identity and + returns a promise fulfilled with credentials for the assumed role. +- `timeout` - The connection timeout (in milliseconds) to apply to any remote + requests. If not specified, a default value of `1000` (one second) is used. +- `maxRetries` - The maximum number of times any HTTP connections should be + retried. If not specified, a default value of `0` will be used. + +## Related packages: + +- [AWS Credential Provider for Node.JS - Environment Variables](../credential-provider-env) +- [AWS Credential Provider for Node.JS - SSO](../credential-provider-sso) +- [AWS Credential Provider for Node.JS - Web Identity](../credential-provider-web-identity) +- [AWS Credential Provider for Node.JS - Shared Configuration Files](../credential-provider-ini) +- [AWS Credential Provider for Node.JS - Instance and Container Metadata](../credential-provider-imds) +- [AWS Shared Configuration File Loader](../shared-ini-file-loader) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js new file mode 100644 index 0000000..be4d2b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-cjs/index.js @@ -0,0 +1,147 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + credentialsTreatedAsExpired: () => credentialsTreatedAsExpired, + credentialsWillNeedRefresh: () => credentialsWillNeedRefresh, + defaultProvider: () => defaultProvider +}); +module.exports = __toCommonJS(index_exports); + +// src/defaultProvider.ts +var import_credential_provider_env = require("@aws-sdk/credential-provider-env"); + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/remoteProvider.ts +var import_property_provider = require("@smithy/property-provider"); +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var remoteProvider = /* @__PURE__ */ __name(async (init) => { + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-http"))); + return (0, import_property_provider.chain)(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED] && process.env[ENV_IMDS_DISABLED] !== "false") { + return async () => { + throw new import_property_provider.CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}, "remoteProvider"); + +// src/defaultProvider.ts +var multipleCredentialSourceWarningEmitted = false; +var defaultProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + async () => { + const profile = init.profile ?? process.env[import_shared_ini_file_loader.ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[import_credential_provider_env.ENV_KEY] && process.env[import_credential_provider_env.ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = init.logger?.warn && init.logger?.constructor?.name !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn( + `@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +` + ); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new import_property_provider.CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true + }); + } + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return (0, import_credential_provider_env.fromEnv)(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new import_property_provider.CredentialsProviderError( + "Skipping SSO provider in default chain (inputs do not include SSO fields).", + { logger: init.logger } + ); + } + const { fromSSO } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-sso"))); + return fromSSO(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-ini"))); + return fromIni(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-process"))); + return fromProcess(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/credential-provider-web-identity"))); + return fromTokenFile(init)(); + }, + async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); + }, + async () => { + throw new import_property_provider.CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger + }); + } + ), + credentialsTreatedAsExpired, + credentialsWillNeedRefresh +), "defaultProvider"); +var credentialsWillNeedRefresh = /* @__PURE__ */ __name((credentials) => credentials?.expiration !== void 0, "credentialsWillNeedRefresh"); +var credentialsTreatedAsExpired = /* @__PURE__ */ __name((credentials) => credentials?.expiration !== void 0 && credentials.expiration.getTime() - Date.now() < 3e5, "credentialsTreatedAsExpired"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + defaultProvider, + credentialsWillNeedRefresh, + credentialsTreatedAsExpired +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js new file mode 100644 index 0000000..d582cf8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/defaultProvider.js @@ -0,0 +1,62 @@ +import { ENV_KEY, ENV_SECRET, fromEnv } from "@aws-sdk/credential-provider-env"; +import { chain, CredentialsProviderError, memoize } from "@smithy/property-provider"; +import { ENV_PROFILE } from "@smithy/shared-ini-file-loader"; +import { remoteProvider } from "./remoteProvider"; +let multipleCredentialSourceWarningEmitted = false; +export const defaultProvider = (init = {}) => memoize(chain(async () => { + const profile = init.profile ?? process.env[ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[ENV_KEY] && process.env[ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = init.logger?.warn && init.logger?.constructor?.name !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn(`@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +`); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true, + }); + } + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return fromEnv(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new CredentialsProviderError("Skipping SSO provider in default chain (inputs do not include SSO fields).", { logger: init.logger }); + } + const { fromSSO } = await import("@aws-sdk/credential-provider-sso"); + return fromSSO(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await import("@aws-sdk/credential-provider-ini"); + return fromIni(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await import("@aws-sdk/credential-provider-process"); + return fromProcess(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await import("@aws-sdk/credential-provider-web-identity"); + return fromTokenFile(init)(); +}, async () => { + init.logger?.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); +}, async () => { + throw new CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger, + }); +}), credentialsTreatedAsExpired, credentialsWillNeedRefresh); +export const credentialsWillNeedRefresh = (credentials) => credentials?.expiration !== undefined; +export const credentialsTreatedAsExpired = (credentials) => credentials?.expiration !== undefined && credentials.expiration.getTime() - Date.now() < 300000; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js new file mode 100644 index 0000000..c455bc1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-es/remoteProvider.js @@ -0,0 +1,17 @@ +import { chain, CredentialsProviderError } from "@smithy/property-provider"; +export const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export const remoteProvider = async (init) => { + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await import("@smithy/credential-provider-imds"); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await import("@aws-sdk/credential-provider-http"); + return chain(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED] && process.env[ENV_IMDS_DISABLED] !== "false") { + return async () => { + throw new CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + init.logger?.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts new file mode 100644 index 0000000..fd40150 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/defaultProvider.d.ts @@ -0,0 +1,58 @@ +import type { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import type { FromIniInit } from "@aws-sdk/credential-provider-ini"; +import type { FromProcessInit } from "@aws-sdk/credential-provider-process"; +import type { FromSSOInit, SsoCredentialsParameters } from "@aws-sdk/credential-provider-sso"; +import type { FromTokenFileInit } from "@aws-sdk/credential-provider-web-identity"; +import type { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentity, MemoizedProvider } from "@smithy/types"; +/** + * @public + */ +export type DefaultProviderInit = FromIniInit & FromHttpOptions & RemoteProviderInit & FromProcessInit & (FromSSOInit & Partial) & FromTokenFileInit; +/** + * Creates a credential provider that will attempt to find credentials from the + * following sources (listed in order of precedence): + * * Environment variables exposed via `process.env` + * * SSO credentials from token cache + * * Web identity token credentials + * * Shared credentials and config ini files + * * The EC2/ECS Instance Metadata Service + * + * The default credential provider will invoke one provider at a time and only + * continue to the next if no credentials have been located. For example, if + * the process finds values defined via the `AWS_ACCESS_KEY_ID` and + * `AWS_SECRET_ACCESS_KEY` environment variables, the files at + * `~/.aws/credentials` and `~/.aws/config` will not be read, nor will any + * messages be sent to the Instance Metadata Service. + * + * @param init Configuration that is passed to each individual + * provider + * + * @see {@link fromEnv} The function used to source credentials from + * environment variables. + * @see {@link fromSSO} The function used to source credentials from + * resolved SSO token cache. + * @see {@link fromTokenFile} The function used to source credentials from + * token file. + * @see {@link fromIni} The function used to source credentials from INI + * files. + * @see {@link fromProcess} The function used to sources credentials from + * credential_process in INI files. + * @see {@link fromInstanceMetadata} The function used to source credentials from the + * EC2 Instance Metadata Service. + * @see {@link fromContainerMetadata} The function used to source credentials from the + * ECS Container Metadata Service. + */ +export declare const defaultProvider: (init?: DefaultProviderInit) => MemoizedProvider; +/** + * @internal + * + * @returns credentials have expiration. + */ +export declare const credentialsWillNeedRefresh: (credentials: AwsCredentialIdentity) => boolean; +/** + * @internal + * + * @returns credentials with less than 5 minutes left. + */ +export declare const credentialsTreatedAsExpired: (credentials: AwsCredentialIdentity) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts new file mode 100644 index 0000000..4022a4e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/remoteProvider.d.ts @@ -0,0 +1,11 @@ +import type { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import type { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import type { AwsCredentialIdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const remoteProvider: (init: RemoteProviderInit | FromHttpOptions) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts new file mode 100644 index 0000000..e1f1a8d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/defaultProvider.d.ts @@ -0,0 +1,25 @@ +import { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import { FromIniInit } from "@aws-sdk/credential-provider-ini"; +import { FromProcessInit } from "@aws-sdk/credential-provider-process"; +import { + FromSSOInit, + SsoCredentialsParameters, +} from "@aws-sdk/credential-provider-sso"; +import { FromTokenFileInit } from "@aws-sdk/credential-provider-web-identity"; +import { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentity, MemoizedProvider } from "@smithy/types"; +export type DefaultProviderInit = FromIniInit & + FromHttpOptions & + RemoteProviderInit & + FromProcessInit & + (FromSSOInit & Partial) & + FromTokenFileInit; +export declare const defaultProvider: ( + init?: DefaultProviderInit +) => MemoizedProvider; +export declare const credentialsWillNeedRefresh: ( + credentials: AwsCredentialIdentity +) => boolean; +export declare const credentialsTreatedAsExpired: ( + credentials: AwsCredentialIdentity +) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c82818e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts new file mode 100644 index 0000000..90948cc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/dist-types/ts3.4/remoteProvider.d.ts @@ -0,0 +1,7 @@ +import { FromHttpOptions } from "@aws-sdk/credential-provider-http"; +import { RemoteProviderInit } from "@smithy/credential-provider-imds"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export declare const remoteProvider: ( + init: RemoteProviderInit | FromHttpOptions +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/package.json new file mode 100644 index 0000000..40b3009 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-node/package.json @@ -0,0 +1,70 @@ +{ + "name": "@aws-sdk/credential-provider-node", + "version": "3.803.0", + "description": "AWS credential provider that sources credentials from a Node.JS environment. ", + "engines": { + "node": ">=18.0.0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-node", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:jest -c jest.config.integ.js", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-node", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-node" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/README.md new file mode 100644 index 0000000..4e9d9bd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-process + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-process/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-process) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-process.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-process) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js new file mode 100644 index 0000000..57146de --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-cjs/index.js @@ -0,0 +1,114 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromProcess: () => fromProcess +}); +module.exports = __toCommonJS(index_exports); + +// src/fromProcess.ts +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); + +// src/resolveProcessCredentials.ts +var import_property_provider = require("@smithy/property-provider"); +var import_child_process = require("child_process"); +var import_util = require("util"); + +// src/getValidatedProcessCredentials.ts +var import_client = require("@aws-sdk/core/client"); +var getValidatedProcessCredentials = /* @__PURE__ */ __name((profileName, data, profiles) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === void 0 || data.SecretAccessKey === void 0) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = /* @__PURE__ */ new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && profiles?.[profileName]?.aws_account_id) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...data.SessionToken && { sessionToken: data.SessionToken }, + ...data.Expiration && { expiration: new Date(data.Expiration) }, + ...data.CredentialScope && { credentialScope: data.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}, "getValidatedProcessCredentials"); + +// src/resolveProcessCredentials.ts +var resolveProcessCredentials = /* @__PURE__ */ __name(async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== void 0) { + const execPromise = (0, import_util.promisify)(import_child_process.exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } catch (error) { + throw new import_property_provider.CredentialsProviderError(error.message, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger + }); + } +}, "resolveProcessCredentials"); + +// src/fromProcess.ts +var fromProcess = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProcessCredentials( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }), + profiles, + init.logger + ); +}, "fromProcess"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromProcess +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/ProcessCredentials.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js new file mode 100644 index 0000000..9e1e800 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/fromProcess.js @@ -0,0 +1,9 @@ +import { getProfileName, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { resolveProcessCredentials } from "./resolveProcessCredentials"; +export const fromProcess = (init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await parseKnownFiles(init); + return resolveProcessCredentials(getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }), profiles, init.logger); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js new file mode 100644 index 0000000..caa0dd1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/getValidatedProcessCredentials.js @@ -0,0 +1,30 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +export const getValidatedProcessCredentials = (profileName, data, profiles) => { + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === undefined || data.SecretAccessKey === undefined) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && profiles?.[profileName]?.aws_account_id) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...(data.SessionToken && { sessionToken: data.SessionToken }), + ...(data.Expiration && { expiration: new Date(data.Expiration) }), + ...(data.CredentialScope && { credentialScope: data.CredentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js new file mode 100644 index 0000000..b921d35 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/index.js @@ -0,0 +1 @@ +export * from "./fromProcess"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js new file mode 100644 index 0000000..334e0af --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-es/resolveProcessCredentials.js @@ -0,0 +1,35 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { exec } from "child_process"; +import { promisify } from "util"; +import { getValidatedProcessCredentials } from "./getValidatedProcessCredentials"; +export const resolveProcessCredentials = async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== undefined) { + const execPromise = promisify(exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } + catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } + catch (error) { + throw new CredentialsProviderError(error.message, { logger }); + } + } + else { + throw new CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } + else { + throw new CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger, + }); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts new file mode 100644 index 0000000..a4e6b46 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ProcessCredentials.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export type ProcessCredentials = { + Version: number; + AccessKeyId: string; + SecretAccessKey: string; + SessionToken?: string; + Expiration?: number; + CredentialScope?: string; + AccountId?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts new file mode 100644 index 0000000..2177630 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/fromProcess.d.ts @@ -0,0 +1,14 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +/** + * @internal + */ +export interface FromProcessInit extends SourceProfileInit, CredentialProviderOptions { +} +/** + * @internal + * + * Creates a credential provider that will read from a credential_process specified + * in ini files. + */ +export declare const fromProcess: (init?: FromProcessInit) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts new file mode 100644 index 0000000..1e86d6b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/getValidatedProcessCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { ProcessCredentials } from "./ProcessCredentials"; +/** + * @internal + */ +export declare const getValidatedProcessCredentials: (profileName: string, data: ProcessCredentials, profiles: ParsedIniData) => AwsCredentialIdentity; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts new file mode 100644 index 0000000..adad939 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./fromProcess"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..4f69ca7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/resolveProcessCredentials.d.ts @@ -0,0 +1,5 @@ +import { AwsCredentialIdentity, Logger, ParsedIniData } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveProcessCredentials: (profileName: string, profiles: ParsedIniData, logger?: Logger) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts new file mode 100644 index 0000000..45acf5e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/ProcessCredentials.d.ts @@ -0,0 +1,9 @@ +export type ProcessCredentials = { + Version: number; + AccessKeyId: string; + SecretAccessKey: string; + SessionToken?: string; + Expiration?: number; + CredentialScope?: string; + AccountId?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts new file mode 100644 index 0000000..8e39656 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/fromProcess.d.ts @@ -0,0 +1,11 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromProcessInit + extends SourceProfileInit, + CredentialProviderOptions {} +export declare const fromProcess: ( + init?: FromProcessInit +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts new file mode 100644 index 0000000..f44c81c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/getValidatedProcessCredentials.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity, ParsedIniData } from "@smithy/types"; +import { ProcessCredentials } from "./ProcessCredentials"; +export declare const getValidatedProcessCredentials: ( + profileName: string, + data: ProcessCredentials, + profiles: ParsedIniData +) => AwsCredentialIdentity; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..b921d35 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./fromProcess"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts new file mode 100644 index 0000000..a204db4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/dist-types/ts3.4/resolveProcessCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity, Logger, ParsedIniData } from "@smithy/types"; +export declare const resolveProcessCredentials: ( + profileName: string, + profiles: ParsedIniData, + logger?: Logger +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/package.json new file mode 100644 index 0000000..fb1f383 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-process/package.json @@ -0,0 +1,63 @@ +{ + "name": "@aws-sdk/credential-provider-process", + "version": "3.799.0", + "description": "AWS credential provider that sources credential_process from ~/.aws/credentials and ~/.aws/config", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-process", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-process", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-process" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/README.md new file mode 100644 index 0000000..aba3fa8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-sso + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-sso/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-sso) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-sso.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-sso) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js new file mode 100644 index 0000000..ab7549a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-cjs/index.js @@ -0,0 +1,246 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __esm = (fn, res) => function __init() { + return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res; +}; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/loadSso.ts +var loadSso_exports = {}; +__export(loadSso_exports, { + GetRoleCredentialsCommand: () => import_client_sso.GetRoleCredentialsCommand, + SSOClient: () => import_client_sso.SSOClient +}); +var import_client_sso; +var init_loadSso = __esm({ + "src/loadSso.ts"() { + "use strict"; + import_client_sso = require("@aws-sdk/client-sso"); + } +}); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromSSO: () => fromSSO, + isSsoProfile: () => isSsoProfile, + validateSsoProfile: () => validateSsoProfile +}); +module.exports = __toCommonJS(index_exports); + +// src/fromSSO.ts + + + +// src/isSsoProfile.ts +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveSSOCredentials.ts +var import_client = require("@aws-sdk/core/client"); +var import_token_providers = require("@aws-sdk/token-providers"); +var import_property_provider = require("@smithy/property-provider"); +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var SHOULD_FAIL_CREDENTIAL_CHAIN = false; +var resolveSSOCredentials = /* @__PURE__ */ __name(async ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger +}) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await (0, import_token_providers.fromSso)({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString() + }; + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } else { + try { + token = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoStartUrl); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { accessToken } = token; + const { SSOClient: SSOClient2, GetRoleCredentialsCommand: GetRoleCredentialsCommand2 } = await Promise.resolve().then(() => (init_loadSso(), loadSso_exports)); + const sso = ssoClient || new SSOClient2( + Object.assign({}, clientConfig ?? {}, { + logger: clientConfig?.logger ?? parentClientConfig?.logger, + region: clientConfig?.region ?? ssoRegion + }) + ); + let ssoResp; + try { + ssoResp = await sso.send( + new GetRoleCredentialsCommand2({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken + }) + ); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { + roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {} + } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new import_property_provider.CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + if (ssoSession) { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO", "s"); + } else { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}, "resolveSSOCredentials"); + +// src/validateSsoProfile.ts + +var validateSsoProfile = /* @__PURE__ */ __name((profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new import_property_provider.CredentialsProviderError( + `Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", "sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join( + ", " + )} +Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, + { tryNextLink: false, logger } + ); + } + return profile; +}, "validateSsoProfile"); + +// src/fromSSO.ts +var fromSSO = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger + }); + } + if (profile?.sso_session) { + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile( + profile, + init.logger + ); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new import_property_provider.CredentialsProviderError( + 'Incomplete configuration. The fromSSO() argument hash must include "ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', + { tryNextLink: false, logger: init.logger } + ); + } else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } +}, "fromSSO"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromSSO, + isSsoProfile, + validateSsoProfile +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js new file mode 100644 index 0000000..75f1860 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/fromSSO.js @@ -0,0 +1,73 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName, loadSsoSessionData, parseKnownFiles } from "@smithy/shared-ini-file-loader"; +import { isSsoProfile } from "./isSsoProfile"; +import { resolveSSOCredentials } from "./resolveSSOCredentials"; +import { validateSsoProfile } from "./validateSsoProfile"; +export const fromSSO = (init = {}) => async ({ callerClientConfig } = {}) => { + init.logger?.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await parseKnownFiles(init); + const profile = profiles[profileName]; + if (!profile) { + throw new CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger, + }); + } + if (profile?.sso_session) { + const ssoSessions = await loadSsoSessionData(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger, + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger, + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile(profile, init.logger); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient: ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName, + }); + } + else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new CredentialsProviderError("Incomplete configuration. The fromSSO() argument hash must include " + + '"ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', { tryNextLink: false, logger: init.logger }); + } + else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName, + }); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js new file mode 100644 index 0000000..7215fb6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/index.js @@ -0,0 +1,4 @@ +export * from "./fromSSO"; +export * from "./isSsoProfile"; +export * from "./types"; +export * from "./validateSsoProfile"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js new file mode 100644 index 0000000..e655438 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/isSsoProfile.js @@ -0,0 +1,6 @@ +export const isSsoProfile = (arg) => arg && + (typeof arg.sso_start_url === "string" || + typeof arg.sso_account_id === "string" || + typeof arg.sso_session === "string" || + typeof arg.sso_region === "string" || + typeof arg.sso_role_name === "string"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js new file mode 100644 index 0000000..6a4414f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/loadSso.js @@ -0,0 +1,2 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js new file mode 100644 index 0000000..979d9b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/resolveSSOCredentials.js @@ -0,0 +1,84 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { fromSso as getSsoTokenProvider } from "@aws-sdk/token-providers"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getSSOTokenFromFile } from "@smithy/shared-ini-file-loader"; +const SHOULD_FAIL_CREDENTIAL_CHAIN = false; +export const resolveSSOCredentials = async ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, clientConfig, parentClientConfig, profile, logger, }) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await getSsoTokenProvider({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString(), + }; + } + catch (e) { + throw new CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + } + else { + try { + token = await getSSOTokenFromFile(ssoStartUrl); + } + catch (e) { + throw new CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const { accessToken } = token; + const { SSOClient, GetRoleCredentialsCommand } = await import("./loadSso"); + const sso = ssoClient || + new SSOClient(Object.assign({}, clientConfig ?? {}, { + logger: clientConfig?.logger ?? parentClientConfig?.logger, + region: clientConfig?.region ?? ssoRegion, + })); + let ssoResp; + try { + ssoResp = await sso.send(new GetRoleCredentialsCommand({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken, + })); + } + catch (e) { + throw new CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const { roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {}, } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger, + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...(credentialScope && { credentialScope }), + ...(accountId && { accountId }), + }; + if (ssoSession) { + setCredentialFeature(credentials, "CREDENTIALS_SSO", "s"); + } + else { + setCredentialFeature(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js new file mode 100644 index 0000000..94174b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-es/validateSsoProfile.js @@ -0,0 +1,9 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +export const validateSsoProfile = (profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new CredentialsProviderError(`Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", ` + + `"sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join(", ")}\nReference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, { tryNextLink: false, logger }); + } + return profile; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts new file mode 100644 index 0000000..47521a6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/fromSSO.d.ts @@ -0,0 +1,68 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import type { SSOClient, SSOClientConfig } from "./loadSso"; +/** + * @internal + */ +export interface SsoCredentialsParameters { + /** + * The URL to the AWS SSO service. + */ + ssoStartUrl: string; + /** + * SSO session identifier. + * Presence implies usage of the SSOTokenProvider. + */ + ssoSession?: string; + /** + * The ID of the AWS account to use for temporary credentials. + */ + ssoAccountId: string; + /** + * The AWS region to use for temporary credentials. + */ + ssoRegion: string; + /** + * The name of the AWS role to assume. + */ + ssoRoleName: string; +} +/** + * @internal + */ +export interface FromSSOInit extends SourceProfileInit, CredentialProviderOptions { + ssoClient?: SSOClient; + clientConfig?: SSOClientConfig; +} +/** + * @internal + * + * Creates a credential provider that will read from a credential_process specified + * in ini files. + * + * The SSO credential provider must support both + * + * 1. the legacy profile format, + * @example + * ``` + * [profile sample-profile] + * sso_account_id = 012345678901 + * sso_region = us-east-1 + * sso_role_name = SampleRole + * sso_start_url = https://www.....com/start + * ``` + * + * 2. and the profile format for SSO Token Providers. + * @example + * ``` + * [profile sso-profile] + * sso_session = dev + * sso_account_id = 012345678901 + * sso_role_name = SampleRole + * + * [sso-session dev] + * sso_region = us-east-1 + * sso_start_url = https://www.....com/start + * ``` + */ +export declare const fromSSO: (init?: FromSSOInit & Partial) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts new file mode 100644 index 0000000..d851c15 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./fromSSO"; +/** + * @internal + */ +export * from "./isSsoProfile"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export * from "./validateSsoProfile"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts new file mode 100644 index 0000000..77c1fb2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/isSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Profile } from "@smithy/types"; +import { SsoProfile } from "./types"; +/** + * @internal + */ +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts new file mode 100644 index 0000000..f44232f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/loadSso.d.ts @@ -0,0 +1,3 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; +export type { SSOClientConfig, GetRoleCredentialsCommandOutput } from "@aws-sdk/client-sso"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts new file mode 100644 index 0000000..e4e3fcc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/resolveSSOCredentials.d.ts @@ -0,0 +1,6 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +import { FromSSOInit, SsoCredentialsParameters } from "./fromSSO"; +/** + * @internal + */ +export declare const resolveSSOCredentials: ({ ssoStartUrl, ssoSession, ssoAccountId, ssoRegion, ssoRoleName, ssoClient, clientConfig, parentClientConfig, profile, logger, }: FromSSOInit & SsoCredentialsParameters) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts new file mode 100644 index 0000000..bf50b63 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/fromSSO.d.ts @@ -0,0 +1,22 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { SSOClient, SSOClientConfig } from "./loadSso"; +export interface SsoCredentialsParameters { + ssoStartUrl: string; + ssoSession?: string; + ssoAccountId: string; + ssoRegion: string; + ssoRoleName: string; +} +export interface FromSSOInit + extends SourceProfileInit, + CredentialProviderOptions { + ssoClient?: SSOClient; + clientConfig?: SSOClientConfig; +} +export declare const fromSSO: ( + init?: FromSSOInit & Partial +) => RuntimeConfigAwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..7215fb6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +export * from "./fromSSO"; +export * from "./isSsoProfile"; +export * from "./types"; +export * from "./validateSsoProfile"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts new file mode 100644 index 0000000..b4e8bdd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/isSsoProfile.d.ts @@ -0,0 +1,3 @@ +import { Profile } from "@smithy/types"; +import { SsoProfile } from "./types"; +export declare const isSsoProfile: (arg: Profile) => arg is Partial; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts new file mode 100644 index 0000000..2d3249f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/loadSso.d.ts @@ -0,0 +1,6 @@ +import { GetRoleCredentialsCommand, SSOClient } from "@aws-sdk/client-sso"; +export { GetRoleCredentialsCommand, SSOClient }; +export { + SSOClientConfig, + GetRoleCredentialsCommandOutput, +} from "@aws-sdk/client-sso"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts new file mode 100644 index 0000000..cc16554 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/resolveSSOCredentials.d.ts @@ -0,0 +1,14 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +import { FromSSOInit, SsoCredentialsParameters } from "./fromSSO"; +export declare const resolveSSOCredentials: ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger, +}: FromSSOInit & SsoCredentialsParameters) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..4a3986b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/types.d.ts @@ -0,0 +1,14 @@ +import { Profile } from "@smithy/types"; +export interface SSOToken { + accessToken: string; + expiresAt: string; + region?: string; + startUrl?: string; +} +export interface SsoProfile extends Profile { + sso_start_url: string; + sso_session?: string; + sso_account_id: string; + sso_region: string; + sso_role_name: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts new file mode 100644 index 0000000..6572fc4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/ts3.4/validateSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { SsoProfile } from "./types"; +export declare const validateSsoProfile: ( + profile: Partial, + logger?: Logger +) => SsoProfile; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts new file mode 100644 index 0000000..551d678 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/types.d.ts @@ -0,0 +1,22 @@ +import { Profile } from "@smithy/types"; +/** + * @internal + * + * Cached SSO token retrieved from SSO login flow. + */ +export interface SSOToken { + accessToken: string; + expiresAt: string; + region?: string; + startUrl?: string; +} +/** + * @internal + */ +export interface SsoProfile extends Profile { + sso_start_url: string; + sso_session?: string; + sso_account_id: string; + sso_region: string; + sso_role_name: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts new file mode 100644 index 0000000..8b0ab31 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/dist-types/validateSsoProfile.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { SsoProfile } from "./types"; +/** + * @internal + */ +export declare const validateSsoProfile: (profile: Partial, logger?: Logger) => SsoProfile; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/package.json new file mode 100644 index 0000000..0d5cfb5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-sso/package.json @@ -0,0 +1,65 @@ +{ + "name": "@aws-sdk/credential-provider-sso", + "version": "3.803.0", + "description": "AWS credential provider that exchanges a resolved SSO login token file for temporary AWS credentials", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-sso", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-sso", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-sso" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE new file mode 100644 index 0000000..f9a6673 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/README.md new file mode 100644 index 0000000..e4858a4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/README.md @@ -0,0 +1,11 @@ +# @aws-sdk/credential-provider-web-identity + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/credential-provider-web-identity/latest.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-web-identity) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/credential-provider-web-identity.svg)](https://www.npmjs.com/package/@aws-sdk/credential-provider-web-identity) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@aws-sdk/credential-providers](https://www.npmjs.com/package/@aws-sdk/credential-providers) +instead. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js new file mode 100644 index 0000000..2be7727 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromTokenFile.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromTokenFile = void 0; +const client_1 = require("@aws-sdk/core/client"); +const property_provider_1 = require("@smithy/property-provider"); +const fs_1 = require("fs"); +const fromWebToken_1 = require("./fromWebToken"); +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new property_provider_1.CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await (0, fromWebToken_1.fromWebToken)({ + ...init, + webIdentityToken: (0, fs_1.readFileSync)(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + (0, client_1.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; +exports.fromTokenFile = fromTokenFile; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js new file mode 100644 index 0000000..f8eafde --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/fromWebToken.js @@ -0,0 +1,52 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromWebToken = void 0; +const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await Promise.resolve().then(() => __importStar(require("@aws-sdk/nested-clients/sts"))); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; +exports.fromWebToken = fromWebToken; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js new file mode 100644 index 0000000..5dc29db --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-cjs/index.js @@ -0,0 +1,28 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +module.exports = __toCommonJS(index_exports); +__reExport(index_exports, require("././fromTokenFile"), module.exports); +__reExport(index_exports, require("././fromWebToken"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromTokenFile, + fromWebToken +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js new file mode 100644 index 0000000..64a5032 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromTokenFile.js @@ -0,0 +1,28 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { readFileSync } from "fs"; +import { fromWebToken } from "./fromWebToken"; +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +export const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await fromWebToken({ + ...init, + webIdentityToken: readFileSync(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + setCredentialFeature(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js new file mode 100644 index 0000000..268e0aa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/fromWebToken.js @@ -0,0 +1,25 @@ +export const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await import("@aws-sdk/nested-clients/sts"); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js new file mode 100644 index 0000000..0e900c0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fromTokenFile"; +export * from "./fromWebToken"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts new file mode 100644 index 0000000..58f885f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromTokenFile.d.ts @@ -0,0 +1,18 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import type { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromWebTokenInit } from "./fromWebToken"; +/** + * @public + */ +export interface FromTokenFileInit extends Partial>, CredentialProviderOptions { + /** + * File location of where the `OIDC` token is stored. + */ + webIdentityTokenFile?: string; +} +/** + * @internal + * + * Represents OIDC credentials from a file on disk. + */ +export declare const fromTokenFile: (init?: FromTokenFileInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts new file mode 100644 index 0000000..6b5e066 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/fromWebToken.d.ts @@ -0,0 +1,145 @@ +import type { CredentialProviderOptions, RuntimeConfigAwsCredentialIdentityProvider } from "@aws-sdk/types"; +import type { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +/** + * @public + */ +export interface AssumeRoleWithWebIdentityParams { + /** + *

The Amazon Resource Name (ARN) of the role that the caller is assuming.

+ */ + RoleArn: string; + /** + *

An identifier for the assumed role session. Typically, you pass the name or identifier + * that is associated with the user who is using your application. That way, the temporary + * security credentials that your application will use are associated with that user. This + * session name is included as part of the ARN and assumed role ID in the + * AssumedRoleUser response element.

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ */ + RoleSessionName: string; + /** + *

The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity + * provider. Your application must get this token by authenticating the user who is using your + * application with a web identity provider before the application makes an + * AssumeRoleWithWebIdentity call.

+ */ + WebIdentityToken: string; + /** + *

The fully qualified host component of the domain name of the identity provider.

+ *

Specify this value only for OAuth 2.0 access tokens. Currently + * www.amazon.com and graph.facebook.com are the only supported + * identity providers for OAuth 2.0 access tokens. Do not include URL schemes and port + * numbers.

+ *

Do not specify this value for OpenID Connect ID tokens.

+ */ + ProviderId?: string; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plain text that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and AWS + * Service Namespaces in the AWS General Reference.

+ * + *

An AWS conversion compresses the passed session policies and session tags into a + * packed binary format that has a separate limit. Your request can fail for this limit + * even if your plain text meets the other requirements. The PackedPolicySize + * response element indicates by percentage how close the policies and tags for your + * request are to the upper size limit. + *

+ *
+ * + *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent AWS API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ */ + PolicyArns?: { + arn?: string; + }[]; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent AWS API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plain text that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ * + *

An AWS conversion compresses the passed session policies and session tags into a + * packed binary format that has a separate limit. Your request can fail for this limit + * even if your plain text meets the other requirements. The PackedPolicySize + * response element indicates by percentage how close the policies and tags for your + * request are to the upper size limit. + *

+ *
+ */ + Policy?: string; + /** + *

The duration, in seconds, of the role session. The value can range from 900 seconds (15 + * minutes) up to the maximum session duration setting for the role. This setting can have a + * value from 1 hour to 12 hours. If you specify a value higher than this setting, the + * operation fails. For example, if you specify a session duration of 12 hours, but your + * administrator set the maximum session duration to 6 hours, your operation fails. To learn + * how to view the maximum value for your role, see View the + * Maximum Session Duration Setting for a Role in the + * IAM User Guide.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the AWS Management Console in the + * IAM User Guide.

+ *
+ */ + DurationSeconds?: number; +} +type LowerCaseKey = { + [K in keyof T as `${Uncapitalize}`]: T[K]; +}; +/** + * @public + */ +export interface FromWebTokenInit extends Omit, "roleSessionName">, CredentialProviderOptions { + /** + * The IAM session name used to distinguish sessions. + */ + roleSessionName?: string; + /** + * A function that assumes a role with web identity and returns a promise fulfilled with + * credentials for the assumed role. + * + * @param params input parameter of sts:AssumeRoleWithWebIdentity API. + */ + roleAssumerWithWebIdentity?: (params: AssumeRoleWithWebIdentityParams) => Promise; + /** + * STSClientConfig to be used for creating STS Client for assuming role. + * @internal + */ + clientConfig?: any; + /** + * @internal + */ + clientPlugins?: Pluggable[]; +} +/** + * @internal + */ +export declare const fromWebToken: (init: FromWebTokenInit) => RuntimeConfigAwsCredentialIdentityProvider; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts new file mode 100644 index 0000000..36c15dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./fromTokenFile"; +/** + * @internal + */ +export * from "./fromWebToken"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts new file mode 100644 index 0000000..4f67356 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromTokenFile.d.ts @@ -0,0 +1,16 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { FromWebTokenInit } from "./fromWebToken"; +export interface FromTokenFileInit + extends Partial< + Pick< + FromWebTokenInit, + Exclude + > + >, + CredentialProviderOptions { + webIdentityTokenFile?: string; +} +export declare const fromTokenFile: ( + init?: FromTokenFileInit +) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts new file mode 100644 index 0000000..73529a1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/fromWebToken.d.ts @@ -0,0 +1,39 @@ +import { + CredentialProviderOptions, + RuntimeConfigAwsCredentialIdentityProvider, +} from "@aws-sdk/types"; +import { AwsCredentialIdentity, Pluggable } from "@smithy/types"; +export interface AssumeRoleWithWebIdentityParams { + RoleArn: string; + RoleSessionName: string; + WebIdentityToken: string; + ProviderId?: string; + PolicyArns?: { + arn?: string; + }[]; + Policy?: string; + DurationSeconds?: number; +} +type LowerCaseKey = { + [K in keyof T as `${Uncapitalize}`]: T[K]; +}; +export interface FromWebTokenInit + extends Pick< + LowerCaseKey, + Exclude< + keyof LowerCaseKey, + "roleSessionName" + > + >, + CredentialProviderOptions { + roleSessionName?: string; + roleAssumerWithWebIdentity?: ( + params: AssumeRoleWithWebIdentityParams + ) => Promise; + clientConfig?: any; + clientPlugins?: Pluggable[]; +} +export declare const fromWebToken: ( + init: FromWebTokenInit +) => RuntimeConfigAwsCredentialIdentityProvider; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..0e900c0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromTokenFile"; +export * from "./fromWebToken"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/package.json new file mode 100644 index 0000000..500eb21 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/credential-provider-web-identity/package.json @@ -0,0 +1,71 @@ +{ + "name": "@aws-sdk/credential-provider-web-identity", + "version": "3.803.0", + "description": "AWS credential provider that calls STS assumeRole for temporary AWS credentials", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline credential-provider-web-identity", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "browser": { + "./dist-cjs/fromTokenFile": false, + "./dist-es/fromTokenFile": false + }, + "react-native": { + "./dist-es/fromTokenFile": false, + "./dist-cjs/fromTokenFile": false + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/credential-provider-web-identity", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/credential-provider-web-identity" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/README.md new file mode 100644 index 0000000..5d72b8c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/README.md @@ -0,0 +1,17 @@ +# @aws-sdk/endpoint-cache + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/endpoint-cache/latest.svg)](https://www.npmjs.com/package/@aws-sdk/endpoint-cache) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/endpoint-cache.svg)](https://www.npmjs.com/package/@aws-sdk/endpoint-cache) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. + +## EndpointCache + +- uses `mnemonist/lru-cache` for storing the cache. +- the `set` operation stores milliseconds elapsed since the UNIX epoch in Expires param based on CachePeriodInMinutes provided in Endpoint. +- the `get` operation returns all un-expired endpoints with their Expires values. +- the `getEndpoint` operation returns a randomly selected un-expired endpoint. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js new file mode 100644 index 0000000..6975621 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-cjs/index.js @@ -0,0 +1,140 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EndpointCache: () => EndpointCache +}); +module.exports = __toCommonJS(src_exports); + +// src/EndpointCache.ts +var import_lru_cache = __toESM(require("mnemonist/lru-cache")); +var EndpointCache = class { + static { + __name(this, "EndpointCache"); + } + cache; + constructor(capacity) { + this.cache = new import_lru_cache.default(capacity); + } + /** + * Returns an un-expired endpoint for the given key. + * + * @param endpointsWithExpiry + * @returns + */ + getEndpoint(key) { + const endpointsWithExpiry = this.get(key); + if (!endpointsWithExpiry || endpointsWithExpiry.length === 0) { + return void 0; + } + const endpoints = endpointsWithExpiry.map((endpoint) => endpoint.Address); + return endpoints[Math.floor(Math.random() * endpoints.length)]; + } + /** + * Returns un-expired endpoints for the given key. + * + * @param key + * @returns + */ + get(key) { + if (!this.has(key)) { + return; + } + const value = this.cache.get(key); + if (!value) { + return; + } + const now = Date.now(); + const endpointsWithExpiry = value.filter((endpoint) => now < endpoint.Expires); + if (endpointsWithExpiry.length === 0) { + this.delete(key); + return void 0; + } + return endpointsWithExpiry; + } + /** + * Stores the endpoints passed for the key in cache. + * If not defined, uses empty string for the Address in endpoint. + * If not defined, uses one minute for CachePeriodInMinutes in endpoint. + * Stores milliseconds elapsed since the UNIX epoch in Expires param based + * on value provided in CachePeriodInMinutes. + * + * @param key + * @param endpoints + */ + set(key, endpoints) { + const now = Date.now(); + this.cache.set( + key, + endpoints.map(({ Address, CachePeriodInMinutes }) => ({ + Address, + Expires: now + CachePeriodInMinutes * 60 * 1e3 + })) + ); + } + /** + * Deletes the value for the given key in the cache. + * + * @param {string} key + */ + delete(key) { + this.cache.set(key, []); + } + /** + * Checks whether the key exists in cache. + * + * @param {string} key + * @returns {boolean} + */ + has(key) { + if (!this.cache.has(key)) { + return false; + } + const endpoints = this.cache.peek(key); + if (!endpoints) { + return false; + } + return endpoints.length > 0; + } + /** + * Clears the cache. + */ + clear() { + this.cache.clear(); + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EndpointCache +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/Endpoint.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js new file mode 100644 index 0000000..decd3f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/EndpointCache.js @@ -0,0 +1,54 @@ +import LRUCache from "mnemonist/lru-cache"; +export class EndpointCache { + cache; + constructor(capacity) { + this.cache = new LRUCache(capacity); + } + getEndpoint(key) { + const endpointsWithExpiry = this.get(key); + if (!endpointsWithExpiry || endpointsWithExpiry.length === 0) { + return undefined; + } + const endpoints = endpointsWithExpiry.map((endpoint) => endpoint.Address); + return endpoints[Math.floor(Math.random() * endpoints.length)]; + } + get(key) { + if (!this.has(key)) { + return; + } + const value = this.cache.get(key); + if (!value) { + return; + } + const now = Date.now(); + const endpointsWithExpiry = value.filter((endpoint) => now < endpoint.Expires); + if (endpointsWithExpiry.length === 0) { + this.delete(key); + return undefined; + } + return endpointsWithExpiry; + } + set(key, endpoints) { + const now = Date.now(); + this.cache.set(key, endpoints.map(({ Address, CachePeriodInMinutes }) => ({ + Address, + Expires: now + CachePeriodInMinutes * 60 * 1000, + }))); + } + delete(key) { + this.cache.set(key, []); + } + has(key) { + if (!this.cache.has(key)) { + return false; + } + const endpoints = this.cache.peek(key); + if (!endpoints) { + return false; + } + return endpoints.length > 0; + } + clear() { + this.cache.clear(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js new file mode 100644 index 0000000..41fce6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./Endpoint"; +export * from "./EndpointCache"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts new file mode 100644 index 0000000..17b37cf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/Endpoint.d.ts @@ -0,0 +1,13 @@ +/** + * @internal + */ +export interface Endpoint { + /** + *

An endpoint address.

+ */ + Address: string; + /** + *

The TTL for the endpoint, in minutes.

+ */ + CachePeriodInMinutes: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts new file mode 100644 index 0000000..5128e14 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/EndpointCache.d.ts @@ -0,0 +1,56 @@ +import { Endpoint } from "./Endpoint"; +/** + * @internal + */ +export interface EndpointWithExpiry extends Pick { + Expires: number; +} +/** + * @internal + */ +export declare class EndpointCache { + private readonly cache; + constructor(capacity: number); + /** + * Returns an un-expired endpoint for the given key. + * + * @param endpointsWithExpiry + * @returns + */ + getEndpoint(key: string): string | undefined; + /** + * Returns un-expired endpoints for the given key. + * + * @param key + * @returns + */ + get(key: string): EndpointWithExpiry[] | undefined; + /** + * Stores the endpoints passed for the key in cache. + * If not defined, uses empty string for the Address in endpoint. + * If not defined, uses one minute for CachePeriodInMinutes in endpoint. + * Stores milliseconds elapsed since the UNIX epoch in Expires param based + * on value provided in CachePeriodInMinutes. + * + * @param key + * @param endpoints + */ + set(key: string, endpoints: Endpoint[]): void; + /** + * Deletes the value for the given key in the cache. + * + * @param {string} key + */ + delete(key: string): void; + /** + * Checks whether the key exists in cache. + * + * @param {string} key + * @returns {boolean} + */ + has(key: string): boolean; + /** + * Clears the cache. + */ + clear(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts new file mode 100644 index 0000000..f2f149f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./Endpoint"; +/** + * @internal + */ +export * from "./EndpointCache"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts new file mode 100644 index 0000000..c1caacb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/Endpoint.d.ts @@ -0,0 +1,4 @@ +export interface Endpoint { + Address: string; + CachePeriodInMinutes: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts new file mode 100644 index 0000000..c01e2b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/EndpointCache.d.ts @@ -0,0 +1,14 @@ +import { Endpoint } from "./Endpoint"; +export interface EndpointWithExpiry extends Pick { + Expires: number; +} +export declare class EndpointCache { + private readonly cache; + constructor(capacity: number); + getEndpoint(key: string): string | undefined; + get(key: string): EndpointWithExpiry[] | undefined; + set(key: string, endpoints: Endpoint[]): void; + delete(key: string): void; + has(key: string): boolean; + clear(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..41fce6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./Endpoint"; +export * from "./EndpointCache"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/package.json new file mode 100644 index 0000000..13edb6f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/endpoint-cache/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/endpoint-cache", + "version": "3.723.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline endpoint-cache", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/endpoint-cache", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/endpoint-cache" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/README.md new file mode 100644 index 0000000..fc88a48 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/README.md @@ -0,0 +1,393 @@ +# @aws-sdk/lib-dynamodb + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/lib-dynamodb/latest.svg)](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/lib-dynamodb.svg)](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) + +## Overview + +The document client simplifies working with items in Amazon DynamoDB by +abstracting away the notion of attribute values. This abstraction annotates native +JavaScript types supplied as input parameters, as well as converts annotated +response data to native JavaScript types. + +## Marshalling Input and Unmarshalling Response Data + +The document client affords developers the use of native JavaScript types +instead of `AttributeValue`s to simplify the JavaScript development +experience with Amazon DynamoDB. JavaScript objects passed in as parameters +are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. +Responses from DynamoDB are unmarshalled into plain JavaScript objects +by the `DocumentClient`. The `DocumentClient` does not accept +`AttributeValue`s in favor of native JavaScript types. + +| JavaScript Type | DynamoDB AttributeValue | +| :--------------------------------: | ----------------------- | +| String | S | +| Number / BigInt / NumberValue | N | +| Boolean | BOOL | +| null | NULL | +| Array | L | +| Object | M | +| Set\ | BS | +| Set\ | NS | +| Set\ | SS | +| Uint8Array, Buffer, File, Blob... | B | + +### Example + +Here is an example list which is sent to DynamoDB client in an operation: + +```json +{ "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } +``` + +The DynamoDB document client abstracts the attribute values as follows in +both input and output: + +```json +[null, false, 1, "two"] +``` + +## Usage + +To create document client, you need to create DynamoDB client first as follows: + +```js +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; // ES6 import +// const { DynamoDBClient } = require("@aws-sdk/client-dynamodb"); // CommonJS import + +// Bare-bones DynamoDB Client +const client = new DynamoDBClient({}); +``` + +```js +import { DynamoDB } from "@aws-sdk/client-dynamodb"; // ES6 import +// const { DynamoDB } = require("@aws-sdk/client-dynamodb"); // CommonJS import + +// Full DynamoDB Client +const client = new DynamoDB({}); +``` + +The bare-bones clients are more modular. They reduce bundle size and improve +loading performance over full clients as explained in blog post on +[modular packages in AWS SDK for JavaScript](https://aws.amazon.com/blogs/developer/modular-packages-in-aws-sdk-for-javascript/). + +### Constructor + +Once DynamoDB client is created, you can either create the bare-bones +document client or full document client as follows: + +```js +import { DynamoDBDocumentClient } from "@aws-sdk/lib-dynamodb"; // ES6 import +// const { DynamoDBDocumentClient } = require("@aws-sdk/lib-dynamodb"); // CommonJS import + +// Bare-bones document client +const ddbDocClient = DynamoDBDocumentClient.from(client); // client is DynamoDB client +``` + +```js +import { DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; // ES6 import +// const { DynamoDBDocument } = require("@aws-sdk/lib-dynamodb"); // CommonJS import + +// Full document client +const ddbDocClient = DynamoDBDocument.from(client); // client is DynamoDB client +``` + +### Configuration + +The configuration for marshalling and unmarshalling can be sent as an optional +second parameter during creation of document client as follows: + +```ts +export interface marshallOptions { + /** + * Whether to automatically convert empty strings, blobs, and sets to `null` + */ + convertEmptyValues?: boolean; + /** + * Whether to remove undefined values from JS arrays/Sets/objects + * when marshalling to DynamoDB lists/sets/maps respectively. + * + * A DynamoDB item is not itself considered a map. Only + * attributes of an item are examined. + */ + removeUndefinedValues?: boolean; + /** + * Whether to convert typeof object to map attribute. + */ + convertClassInstanceToMap?: boolean; + /** + * Whether to convert the top level container + * if it is a map or list. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the marshall function (backwards compatibility). + */ + convertTopLevelContainer?: boolean; + /** + * Whether to allow numbers beyond Number.MAX_SAFE_INTEGER during marshalling. + * When set to true, allows numbers that may lose precision when converted to JavaScript numbers. + * When false (default), throws an error if a number exceeds Number.MAX_SAFE_INTEGER to prevent + * unintended loss of precision. Consider using the NumberValue type from @aws-sdk/lib-dynamodb + * for precise handling of large numbers. + */ + allowImpreciseNumbers?: boolean; +} + +export interface unmarshallOptions { + /** + * Whether to modify how numbers are unmarshalled from DynamoDB. + * When set to true, returns numbers as NumberValue instances instead of native JavaScript numbers. + * This allows for the safe round-trip transport of numbers of arbitrary size. + * + * If a function is provided, it will be called with the string representation of numbers to handle + * custom conversions (e.g., using BigInt or decimal libraries). + */ + wrapNumbers?: boolean | ((value: string) => number | bigint | NumberValue | any); + /** + * When true, skip wrapping the data in `{ M: data }` before converting. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the unmarshall function (backwards compatibility). + */ + convertWithoutMapWrapper?: boolean; +} + +const marshallOptions: marshallOptions = {}; +const unmarshallOptions: unmarshallOptions = {}; + +const translateConfig = { marshallOptions, unmarshallOptions }; + +const client = new DynamoDBClient({}); +const ddbDocClient = DynamoDBDocument.from(client, translateConfig); +``` + +### Calling operations + +You can call the document client operations using command objects on bare-bones +client as follows: + +```js +import { DynamoDBDocumentClient, PutCommand } from "@aws-sdk/lib-dynamodb"; + +// ... DynamoDB client creation + +const ddbDocClient = DynamoDBDocumentClient.from(client); +// Call using bare-bones client and Command object. +await ddbDocClient.send( + new PutCommand({ + TableName, + Item: { + id: "1", + content: "content from DynamoDBDocumentClient", + }, + }) +); +``` + +You can also call operations on full client as follows: + +```js +import { DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +// ... DynamoDB client creation + +const ddbDocClient = DynamoDBDocument.from(client); +// Call using full client. +await ddbDocClient.put({ + TableName, + Item: { + id: "2", + content: "content from DynamoDBDocument", + }, +}); +``` + +### Large Numbers and `NumberValue`. + +On the input or marshalling side, the class `NumberValue` can be used +anywhere to represent a DynamoDB number value, even small numbers. + +```ts +import { DynamoDB } from "@aws-sdk/client-dynamodb"; +import { NumberValue, DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +// Note, the client will not validate the acceptability of the number +// in terms of size or format. +// It is only here to preserve your precise representation. +const client = DynamoDBDocument.from(new DynamoDB({})); + +await client.put({ + Item: { + id: 1, + smallNumber: NumberValue.from("123"), + bigNumber: NumberValue.from("1000000000000000000000.000000000001"), + nSet: new Set([123, NumberValue.from("456"), 789]), + }, +}); +``` + +On the output or unmarshalling side, the class `NumberValue` is used +depending on your setting for the `unmarshallOptions` flag `wrapnumbers`, +shown above. + +```ts +import { DynamoDB } from "@aws-sdk/client-dynamodb"; +import { NumberValue, DynamoDBDocument } from "@aws-sdk/lib-dynamodb"; + +const client = DynamoDBDocument.from(new DynamoDB({})); + +const response = await client.get({ + Key: { + id: 1, + }, +}); + +/** + * Numbers in the response may be a number, a BigInt, or a NumberValue depending + * on how you set `wrapNumbers`. + */ +const value = response.Item.bigNumber; +``` + +You can also provide a custom function to handle number conversion during unmarshalling: + +```typescript +const client = DynamoDBDocument.from(new DynamoDB({}), { + unmarshallOptions: { + // Use BigInt for all numbers + wrapNumbers: (str) => BigInt(str), + }, +}); + +const response = await client.get({ + Key: { id: 1 }, +}); + +// Numbers in response will be BigInt instead of NumberValue or regular numbers +``` + +`NumberValue` does not provide a way to do mathematical operations on itself. +To do mathematical operations, take the string value of `NumberValue` by calling +`.toString()` and supply it to your chosen big number implementation. + +The client protects against precision loss by throwing an error on large numbers, but you can either +allow imprecise values with `allowImpreciseNumbers` or maintain exact precision using `NumberValue`. + +```typescript +const preciseValue = "34567890123456789012345678901234567890"; + +// 1. Default behavior - will throw error +await client.send( + new PutCommand({ + TableName: "Table", + Item: { + id: "1", + number: Number(preciseValue), // Throws error: Number is greater than Number.MAX_SAFE_INTEGER + }, + }) +); + +// 2. Using allowImpreciseNumbers - will store but loses precision (mimics the v2 implicit behavior) +const impreciseClient = DynamoDBDocumentClient.from(new DynamoDBClient({}), { + marshallOptions: { allowImpreciseNumbers: true }, +}); +await impreciseClient.send( + new PutCommand({ + TableName: "Table", + Item: { + id: "2", + number: Number(preciseValue), // Loses precision 34567890123456790000000000000000000000n + }, + }) +); +``` + +### Client and Command middleware stacks + +As with other AWS SDK for JavaScript v3 clients, you can apply middleware functions +both on the client itself and individual `Command`s. + +For individual `Command`s, here are examples of how to add middleware before and after +both marshalling and unmarshalling. We will use `QueryCommand` as an example. +Others follow the same pattern. + +```js +import { DynamoDBDocumentClient, QueryCommand } from "@aws-sdk/lib-dynamodb"; + +const client = new DynamoDBClient({ + /*...*/ +}); +const doc = DynamoDBDocumentClient.from(client); +const command = new QueryCommand({ + /*...*/ +}); +``` + +Before and after marshalling: + +```js +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + console.log("pre-marshall", args.input); + return next(args); + }, + { + relation: "before", + toMiddleware: "DocumentMarshall", + } +); +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + console.log("post-marshall", args.input); + return next(args); + }, + { + relation: "after", + toMiddleware: "DocumentMarshall", + } +); +``` + +Before and after unmarshalling: + +```js +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + const result = await next(args); + console.log("pre-unmarshall", result.output.Items); + return result; + }, + { + relation: "after", // <- after for pre-unmarshall + toMiddleware: "DocumentUnmarshall", + } +); +command.middlewareStack.addRelativeTo( + (next) => async (args) => { + const result = await next(args); + console.log("post-unmarshall", result.output.Items); + return result; + }, + { + relation: "before", // <- before for post-unmarshall + toMiddleware: "DocumentUnmarshall", + } +); +``` + +### Destroying document client + +The `destroy()` call on document client is a no-op as document client does not +create a new DynamoDB client. You need to call `destroy()` on DynamoDB client to +clean resources used by it as shown below. + +```js +const client = new DynamoDBClient({}); +const ddbDocClient = DynamoDBDocumentClient.from(client); + +// Perform operations on document client. + +ddbDocClient.destroy(); // no-op +client.destroy(); // destroys DynamoDBClient +``` diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..b8cf754 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-cjs/index.js @@ -0,0 +1,1053 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + BatchExecuteStatementCommand: () => BatchExecuteStatementCommand, + BatchGetCommand: () => BatchGetCommand, + BatchWriteCommand: () => BatchWriteCommand, + DeleteCommand: () => DeleteCommand, + DynamoDBDocument: () => DynamoDBDocument, + DynamoDBDocumentClient: () => DynamoDBDocumentClient, + DynamoDBDocumentClientCommand: () => DynamoDBDocumentClientCommand, + ExecuteStatementCommand: () => ExecuteStatementCommand, + ExecuteTransactionCommand: () => ExecuteTransactionCommand, + GetCommand: () => GetCommand, + NativeAttributeBinary: () => import_util_dynamodb.NativeAttributeBinary, + NativeAttributeValue: () => import_util_dynamodb.NativeAttributeValue, + NativeScalarAttributeValue: () => import_util_dynamodb.NativeScalarAttributeValue, + NumberValue: () => import_util_dynamodb.NumberValueImpl, + PaginationConfiguration: () => import_types.PaginationConfiguration, + PutCommand: () => PutCommand, + QueryCommand: () => QueryCommand, + ScanCommand: () => ScanCommand, + TransactGetCommand: () => TransactGetCommand, + TransactWriteCommand: () => TransactWriteCommand, + UpdateCommand: () => UpdateCommand, + __Client: () => import_smithy_client.Client, + marshallOptions: () => import_util_dynamodb.marshallOptions, + paginateQuery: () => paginateQuery, + paginateScan: () => paginateScan, + unmarshallOptions: () => import_util_dynamodb.unmarshallOptions +}); +module.exports = __toCommonJS(index_exports); + +// src/commands/BatchExecuteStatementCommand.ts +var import_client_dynamodb = require("@aws-sdk/client-dynamodb"); + + +// src/baseCommand/DynamoDBDocumentClientCommand.ts +var import_core = require("@aws-sdk/core"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/commands/utils.ts +var import_util_dynamodb = require("@aws-sdk/util-dynamodb"); +var SELF = null; +var ALL_VALUES = {}; +var ALL_MEMBERS = []; +var NEXT_LEVEL = "*"; +var processObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + if (obj !== void 0) { + if (keyNodes == null) { + return processFunc(obj); + } else { + const keys = Object.keys(keyNodes); + const goToNextLevel = keys.length === 1 && keys[0] === NEXT_LEVEL; + const someChildren = keys.length >= 1 && !goToNextLevel; + const allChildren = keys.length === 0; + if (someChildren) { + return processKeysInObj(obj, processFunc, keyNodes); + } else if (allChildren) { + return processAllKeysInObj(obj, processFunc, SELF); + } else if (goToNextLevel) { + return Object.entries(obj ?? {}).reduce((acc, [k, v]) => { + if (typeof v !== "function") { + acc[k] = processObj(v, processFunc, keyNodes[NEXT_LEVEL]); + } + return acc; + }, Array.isArray(obj) ? [] : {}); + } + } + } + return void 0; +}, "processObj"); +var processKeysInObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + let accumulator; + if (Array.isArray(obj)) { + accumulator = obj.filter((item) => typeof item !== "function"); + } else { + accumulator = {}; + for (const [k, v] of Object.entries(obj)) { + if (typeof v !== "function") { + accumulator[k] = v; + } + } + } + for (const [nodeKey, nodes] of Object.entries(keyNodes)) { + if (typeof obj[nodeKey] === "function") { + continue; + } + const processedValue = processObj(obj[nodeKey], processFunc, nodes); + if (processedValue !== void 0 && typeof processedValue !== "function") { + accumulator[nodeKey] = processedValue; + } + } + return accumulator; +}, "processKeysInObj"); +var processAllKeysInObj = /* @__PURE__ */ __name((obj, processFunc, keyNodes) => { + if (Array.isArray(obj)) { + return obj.filter((item) => typeof item !== "function").map((item) => processObj(item, processFunc, keyNodes)); + } + return Object.entries(obj).reduce((acc, [key, value]) => { + if (typeof value === "function") { + return acc; + } + const processedValue = processObj(value, processFunc, keyNodes); + if (processedValue !== void 0 && typeof processedValue !== "function") { + acc[key] = processedValue; + } + return acc; + }, {}); +}, "processAllKeysInObj"); +var marshallInput = /* @__PURE__ */ __name((obj, keyNodes, options) => { + const marshallFunc = /* @__PURE__ */ __name((toMarshall) => (0, import_util_dynamodb.marshall)(toMarshall, options), "marshallFunc"); + return processKeysInObj(obj, marshallFunc, keyNodes); +}, "marshallInput"); +var unmarshallOutput = /* @__PURE__ */ __name((obj, keyNodes, options) => { + const unmarshallFunc = /* @__PURE__ */ __name((toMarshall) => (0, import_util_dynamodb.unmarshall)(toMarshall, options), "unmarshallFunc"); + return processKeysInObj(obj, unmarshallFunc, keyNodes); +}, "unmarshallOutput"); + +// src/baseCommand/DynamoDBDocumentClientCommand.ts +var DynamoDBDocumentClientCommand = class extends import_smithy_client.Command { + static { + __name(this, "DynamoDBDocumentClientCommand"); + } + addMarshallingMiddleware(configuration) { + const { marshallOptions: marshallOptions3 = {}, unmarshallOptions: unmarshallOptions3 = {} } = configuration.translateConfig || {}; + marshallOptions3.convertTopLevelContainer = marshallOptions3.convertTopLevelContainer ?? true; + unmarshallOptions3.convertWithoutMapWrapper = unmarshallOptions3.convertWithoutMapWrapper ?? true; + this.clientCommand.middlewareStack.addRelativeTo( + (next, context) => async (args) => { + (0, import_core.setFeature)(context, "DDB_MAPPER", "d"); + args.input = marshallInput(args.input, this.inputKeyNodes, marshallOptions3); + return next(args); + }, + { + name: "DocumentMarshall", + relation: "before", + toMiddleware: "serializerMiddleware", + override: true + } + ); + this.clientCommand.middlewareStack.addRelativeTo( + (next, context) => async (args) => { + const deserialized = await next(args); + deserialized.output = unmarshallOutput(deserialized.output, this.outputKeyNodes, unmarshallOptions3); + return deserialized; + }, + { + name: "DocumentUnmarshall", + relation: "before", + toMiddleware: "deserializerMiddleware", + override: true + } + ); + } +}; + +// src/commands/BatchExecuteStatementCommand.ts +var BatchExecuteStatementCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchExecuteStatementCommand"); + } + inputKeyNodes = { + Statements: { + "*": { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Error: { + Item: ALL_VALUES + // map with AttributeValue + }, + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/BatchGetCommand.ts + + +var BatchGetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchGetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchGetCommand"); + } + inputKeyNodes = { + RequestItems: { + "*": { + Keys: { + "*": ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + "*": ALL_VALUES + // map with AttributeValue + } + }, + UnprocessedKeys: { + "*": { + Keys: { + "*": ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/BatchWriteCommand.ts + + +var BatchWriteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.BatchWriteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "BatchWriteCommand"); + } + inputKeyNodes = { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES + // map with AttributeValue + }, + DeleteRequest: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + } + }; + outputKeyNodes = { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES + // map with AttributeValue + }, + DeleteRequest: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + }, + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/DeleteCommand.ts + + +var DeleteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.DeleteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "DeleteCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES, + // map with AttributeValue + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ExecuteStatementCommand.ts + + +var ExecuteStatementCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ExecuteStatementCommand"); + } + inputKeyNodes = { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ExecuteTransactionCommand.ts + + +var ExecuteTransactionCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ExecuteTransactionCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ExecuteTransactionCommand"); + } + inputKeyNodes = { + TransactStatements: { + "*": { + Parameters: ALL_MEMBERS + // set/list of AttributeValue + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/GetCommand.ts + + +var GetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.GetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "GetCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Item: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/PutCommand.ts + + +var PutCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.PutItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "PutCommand"); + } + inputKeyNodes = { + Item: ALL_VALUES, + // map with AttributeValue + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/QueryCommand.ts + + +var QueryCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.QueryCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "QueryCommand"); + } + inputKeyNodes = { + KeyConditions: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + QueryFilter: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExclusiveStartKey: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/ScanCommand.ts + + +var ScanCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.ScanCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "ScanCommand"); + } + inputKeyNodes = { + ScanFilter: { + "*": { + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExclusiveStartKey: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES + // map with AttributeValue + }, + LastEvaluatedKey: ALL_VALUES + // map with AttributeValue + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/TransactGetCommand.ts + + +var TransactGetCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.TransactGetItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "TransactGetCommand"); + } + inputKeyNodes = { + TransactItems: { + "*": { + Get: { + Key: ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES + // map with AttributeValue + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/TransactWriteCommand.ts + + +var TransactWriteCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.TransactWriteItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "TransactWriteCommand"); + } + inputKeyNodes = { + TransactItems: { + "*": { + ConditionCheck: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Put: { + Item: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Delete: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }, + Update: { + Key: ALL_VALUES, + // map with AttributeValue + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + } + } + } + }; + outputKeyNodes = { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + } + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/commands/UpdateCommand.ts + + +var UpdateCommand = class extends DynamoDBDocumentClientCommand { + constructor(input) { + super(); + this.input = input; + this.clientCommand = new import_client_dynamodb.UpdateItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + static { + __name(this, "UpdateCommand"); + } + inputKeyNodes = { + Key: ALL_VALUES, + // map with AttributeValue + AttributeUpdates: { + "*": { + Value: SELF + } + }, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS + // set/list of AttributeValue + } + }, + ExpressionAttributeValues: ALL_VALUES + // map with AttributeValue + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + // map with AttributeValue + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES + // map with AttributeValue + } + }; + clientCommand; + middlewareStack; + /** + * @internal + */ + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +}; + +// src/DynamoDBDocumentClient.ts + +var DynamoDBDocumentClient = class _DynamoDBDocumentClient extends import_smithy_client.Client { + static { + __name(this, "DynamoDBDocumentClient"); + } + config; + constructor(client, translateConfig) { + super(client.config); + this.config = client.config; + this.config.translateConfig = translateConfig; + this.middlewareStack = client.middlewareStack; + if (this.config?.cacheMiddleware) { + throw new Error( + "@aws-sdk/lib-dynamodb - cacheMiddleware=true is not compatible with the DynamoDBDocumentClient. This option must be set to false." + ); + } + } + static from(client, translateConfig) { + return new _DynamoDBDocumentClient(client, translateConfig); + } + destroy() { + } +}; + +// src/DynamoDBDocument.ts +var DynamoDBDocument = class _DynamoDBDocument extends DynamoDBDocumentClient { + static { + __name(this, "DynamoDBDocument"); + } + static from(client, translateConfig) { + return new _DynamoDBDocument(client, translateConfig); + } + batchExecuteStatement(args, optionsOrCb, cb) { + const command = new BatchExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + batchGet(args, optionsOrCb, cb) { + const command = new BatchGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + batchWrite(args, optionsOrCb, cb) { + const command = new BatchWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + delete(args, optionsOrCb, cb) { + const command = new DeleteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + executeStatement(args, optionsOrCb, cb) { + const command = new ExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + executeTransaction(args, optionsOrCb, cb) { + const command = new ExecuteTransactionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + get(args, optionsOrCb, cb) { + const command = new GetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + put(args, optionsOrCb, cb) { + const command = new PutCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + query(args, optionsOrCb, cb) { + const command = new QueryCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + scan(args, optionsOrCb, cb) { + const command = new ScanCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + transactGet(args, optionsOrCb, cb) { + const command = new TransactGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + transactWrite(args, optionsOrCb, cb) { + const command = new TransactWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + update(args, optionsOrCb, cb) { + const command = new UpdateCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +}; + +// src/pagination/Interfaces.ts +var import_types = require("@smithy/types"); + +// src/pagination/QueryPaginator.ts +var import_core2 = require("@smithy/core"); + +var paginateQuery = (0, import_core2.createPaginator)(DynamoDBDocumentClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/pagination/ScanPaginator.ts +var import_core3 = require("@smithy/core"); + +var paginateScan = (0, import_core3.createPaginator)(DynamoDBDocumentClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); + +// src/index.ts + + + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NumberValue, + DynamoDBDocument, + __Client, + DynamoDBDocumentClient, + DynamoDBDocumentClientCommand, + $Command, + BatchExecuteStatementCommand, + BatchGetCommand, + BatchWriteCommand, + DeleteCommand, + ExecuteStatementCommand, + ExecuteTransactionCommand, + GetCommand, + PutCommand, + QueryCommand, + ScanCommand, + TransactGetCommand, + TransactWriteCommand, + UpdateCommand, + paginateQuery, + paginateScan +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js new file mode 100644 index 0000000..206f25a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocument.js @@ -0,0 +1,214 @@ +import { BatchExecuteStatementCommand, } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommand } from "./commands/BatchGetCommand"; +import { BatchWriteCommand } from "./commands/BatchWriteCommand"; +import { DeleteCommand } from "./commands/DeleteCommand"; +import { ExecuteStatementCommand, } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommand, } from "./commands/ExecuteTransactionCommand"; +import { GetCommand } from "./commands/GetCommand"; +import { PutCommand } from "./commands/PutCommand"; +import { QueryCommand } from "./commands/QueryCommand"; +import { ScanCommand } from "./commands/ScanCommand"; +import { TransactGetCommand } from "./commands/TransactGetCommand"; +import { TransactWriteCommand, } from "./commands/TransactWriteCommand"; +import { UpdateCommand } from "./commands/UpdateCommand"; +import { DynamoDBDocumentClient } from "./DynamoDBDocumentClient"; +export class DynamoDBDocument extends DynamoDBDocumentClient { + static from(client, translateConfig) { + return new DynamoDBDocument(client, translateConfig); + } + batchExecuteStatement(args, optionsOrCb, cb) { + const command = new BatchExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + batchGet(args, optionsOrCb, cb) { + const command = new BatchGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + batchWrite(args, optionsOrCb, cb) { + const command = new BatchWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + delete(args, optionsOrCb, cb) { + const command = new DeleteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + executeStatement(args, optionsOrCb, cb) { + const command = new ExecuteStatementCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + executeTransaction(args, optionsOrCb, cb) { + const command = new ExecuteTransactionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + get(args, optionsOrCb, cb) { + const command = new GetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + put(args, optionsOrCb, cb) { + const command = new PutCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + query(args, optionsOrCb, cb) { + const command = new QueryCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + scan(args, optionsOrCb, cb) { + const command = new ScanCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + transactGet(args, optionsOrCb, cb) { + const command = new TransactGetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + transactWrite(args, optionsOrCb, cb) { + const command = new TransactWriteCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } + update(args, optionsOrCb, cb) { + const command = new UpdateCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") { + throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + } + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js new file mode 100644 index 0000000..f8dc9bb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/DynamoDBDocumentClient.js @@ -0,0 +1,20 @@ +import { Client as __Client } from "@smithy/smithy-client"; +export { __Client }; +export class DynamoDBDocumentClient extends __Client { + config; + constructor(client, translateConfig) { + super(client.config); + this.config = client.config; + this.config.translateConfig = translateConfig; + this.middlewareStack = client.middlewareStack; + if (this.config?.cacheMiddleware) { + throw new Error("@aws-sdk/lib-dynamodb - cacheMiddleware=true is not compatible with the" + + " DynamoDBDocumentClient. This option must be set to false."); + } + } + static from(client, translateConfig) { + return new DynamoDBDocumentClient(client, translateConfig); + } + destroy() { + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js new file mode 100644 index 0000000..5f751c1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/baseCommand/DynamoDBDocumentClientCommand.js @@ -0,0 +1,30 @@ +import { setFeature } from "@aws-sdk/core"; +import { Command as $Command } from "@smithy/smithy-client"; +import { marshallInput, unmarshallOutput } from "../commands/utils"; +export class DynamoDBDocumentClientCommand extends $Command { + addMarshallingMiddleware(configuration) { + const { marshallOptions = {}, unmarshallOptions = {} } = configuration.translateConfig || {}; + marshallOptions.convertTopLevelContainer = marshallOptions.convertTopLevelContainer ?? true; + unmarshallOptions.convertWithoutMapWrapper = unmarshallOptions.convertWithoutMapWrapper ?? true; + this.clientCommand.middlewareStack.addRelativeTo((next, context) => async (args) => { + setFeature(context, "DDB_MAPPER", "d"); + args.input = marshallInput(args.input, this.inputKeyNodes, marshallOptions); + return next(args); + }, { + name: "DocumentMarshall", + relation: "before", + toMiddleware: "serializerMiddleware", + override: true, + }); + this.clientCommand.middlewareStack.addRelativeTo((next, context) => async (args) => { + const deserialized = await next(args); + deserialized.output = unmarshallOutput(deserialized.output, this.outputKeyNodes, unmarshallOptions); + return deserialized; + }, { + name: "DocumentUnmarshall", + relation: "before", + toMiddleware: "deserializerMiddleware", + override: true, + }); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js new file mode 100644 index 0000000..ee5acdc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchExecuteStatementCommand.js @@ -0,0 +1,39 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Statements: { + "*": { + Parameters: ALL_MEMBERS, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Error: { + Item: ALL_VALUES, + }, + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js new file mode 100644 index 0000000..bab370e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchGetCommand.js @@ -0,0 +1,45 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchGetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + RequestItems: { + "*": { + Keys: { + "*": ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + "*": ALL_VALUES, + }, + }, + UnprocessedKeys: { + "*": { + Keys: { + "*": ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchGetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js new file mode 100644 index 0000000..c2a480a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/BatchWriteCommand.js @@ -0,0 +1,57 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class BatchWriteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES, + }, + DeleteRequest: { + Key: ALL_VALUES, + }, + }, + }, + }, + }; + outputKeyNodes = { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: ALL_VALUES, + }, + DeleteRequest: { + Key: ALL_VALUES, + }, + }, + }, + }, + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __BatchWriteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js new file mode 100644 index 0000000..5622ae6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/DeleteCommand.js @@ -0,0 +1,38 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class DeleteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __DeleteItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js new file mode 100644 index 0000000..e3f3cfa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteStatementCommand.js @@ -0,0 +1,31 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ExecuteStatementCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Parameters: ALL_MEMBERS, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ExecuteStatementCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js new file mode 100644 index 0000000..3c575aa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ExecuteTransactionCommand.js @@ -0,0 +1,36 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactStatements: { + "*": { + Parameters: ALL_MEMBERS, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ExecuteTransactionCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js new file mode 100644 index 0000000..14ae2f3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/GetCommand.js @@ -0,0 +1,28 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class GetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + }; + outputKeyNodes = { + Item: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __GetItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js new file mode 100644 index 0000000..51ae545 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/PutCommand.js @@ -0,0 +1,38 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class PutCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Item: ALL_VALUES, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __PutItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js new file mode 100644 index 0000000..36a5418 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/QueryCommand.js @@ -0,0 +1,42 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class QueryCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + KeyConditions: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + QueryFilter: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + ExclusiveStartKey: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __QueryCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js new file mode 100644 index 0000000..a5ba2c9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/ScanCommand.js @@ -0,0 +1,37 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class ScanCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + ScanFilter: { + "*": { + AttributeValueList: ALL_MEMBERS, + }, + }, + ExclusiveStartKey: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Items: { + "*": ALL_VALUES, + }, + LastEvaluatedKey: ALL_VALUES, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __ScanCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js new file mode 100644 index 0000000..3de3ec5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactGetCommand.js @@ -0,0 +1,38 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class TransactGetCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactItems: { + "*": { + Get: { + Key: ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + Responses: { + "*": { + Item: ALL_VALUES, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __TransactGetItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js new file mode 100644 index 0000000..5f1fea0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/TransactWriteCommand.js @@ -0,0 +1,53 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_VALUES } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class TransactWriteCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + TransactItems: { + "*": { + ConditionCheck: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Put: { + Item: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Delete: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + Update: { + Key: ALL_VALUES, + ExpressionAttributeValues: ALL_VALUES, + }, + }, + }, + }; + outputKeyNodes = { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: ALL_VALUES, + }, + }, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __TransactWriteItemsCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js new file mode 100644 index 0000000..a5a2da1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/UpdateCommand.js @@ -0,0 +1,43 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { ALL_MEMBERS, ALL_VALUES, SELF } from "../commands/utils"; +export { DynamoDBDocumentClientCommand, $Command }; +export class UpdateCommand extends DynamoDBDocumentClientCommand { + input; + inputKeyNodes = { + Key: ALL_VALUES, + AttributeUpdates: { + "*": { + Value: SELF, + }, + }, + Expected: { + "*": { + Value: SELF, + AttributeValueList: ALL_MEMBERS, + }, + }, + ExpressionAttributeValues: ALL_VALUES, + }; + outputKeyNodes = { + Attributes: ALL_VALUES, + ItemCollectionMetrics: { + ItemCollectionKey: ALL_VALUES, + }, + }; + clientCommand; + middlewareStack; + constructor(input) { + super(); + this.input = input; + this.clientCommand = new __UpdateItemCommand(this.input); + this.middlewareStack = this.clientCommand.middlewareStack; + } + resolveMiddleware(clientStack, configuration, options) { + this.addMarshallingMiddleware(configuration); + const stack = clientStack.concat(this.middlewareStack); + const handler = this.clientCommand.resolveMiddleware(stack, configuration, options); + return async () => handler(this.clientCommand); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/index.js @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js new file mode 100644 index 0000000..19c4e99 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/commands/utils.js @@ -0,0 +1,80 @@ +import { marshall, unmarshall } from "@aws-sdk/util-dynamodb"; +export const SELF = null; +export const ALL_VALUES = {}; +export const ALL_MEMBERS = []; +const NEXT_LEVEL = "*"; +const processObj = (obj, processFunc, keyNodes) => { + if (obj !== undefined) { + if (keyNodes == null) { + return processFunc(obj); + } + else { + const keys = Object.keys(keyNodes); + const goToNextLevel = keys.length === 1 && keys[0] === NEXT_LEVEL; + const someChildren = keys.length >= 1 && !goToNextLevel; + const allChildren = keys.length === 0; + if (someChildren) { + return processKeysInObj(obj, processFunc, keyNodes); + } + else if (allChildren) { + return processAllKeysInObj(obj, processFunc, SELF); + } + else if (goToNextLevel) { + return Object.entries(obj ?? {}).reduce((acc, [k, v]) => { + if (typeof v !== "function") { + acc[k] = processObj(v, processFunc, keyNodes[NEXT_LEVEL]); + } + return acc; + }, (Array.isArray(obj) ? [] : {})); + } + } + } + return undefined; +}; +const processKeysInObj = (obj, processFunc, keyNodes) => { + let accumulator; + if (Array.isArray(obj)) { + accumulator = obj.filter((item) => typeof item !== "function"); + } + else { + accumulator = {}; + for (const [k, v] of Object.entries(obj)) { + if (typeof v !== "function") { + accumulator[k] = v; + } + } + } + for (const [nodeKey, nodes] of Object.entries(keyNodes)) { + if (typeof obj[nodeKey] === "function") { + continue; + } + const processedValue = processObj(obj[nodeKey], processFunc, nodes); + if (processedValue !== undefined && typeof processedValue !== "function") { + accumulator[nodeKey] = processedValue; + } + } + return accumulator; +}; +const processAllKeysInObj = (obj, processFunc, keyNodes) => { + if (Array.isArray(obj)) { + return obj.filter((item) => typeof item !== "function").map((item) => processObj(item, processFunc, keyNodes)); + } + return Object.entries(obj).reduce((acc, [key, value]) => { + if (typeof value === "function") { + return acc; + } + const processedValue = processObj(value, processFunc, keyNodes); + if (processedValue !== undefined && typeof processedValue !== "function") { + acc[key] = processedValue; + } + return acc; + }, {}); +}; +export const marshallInput = (obj, keyNodes, options) => { + const marshallFunc = (toMarshall) => marshall(toMarshall, options); + return processKeysInObj(obj, marshallFunc, keyNodes); +}; +export const unmarshallOutput = (obj, keyNodes, options) => { + const unmarshallFunc = (toMarshall) => unmarshall(toMarshall, options); + return processKeysInObj(obj, unmarshallFunc, keyNodes); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js new file mode 100644 index 0000000..77c5a74 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/Interfaces.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js new file mode 100644 index 0000000..da9e058 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/QueryPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { QueryCommand } from "../commands/QueryCommand"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export const paginateQuery = createPaginator(DynamoDBDocumentClient, QueryCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js new file mode 100644 index 0000000..ae01799 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/ScanPaginator.js @@ -0,0 +1,4 @@ +import { createPaginator } from "@smithy/core"; +import { ScanCommand } from "../commands/ScanCommand"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export const paginateScan = createPaginator(DynamoDBDocumentClient, ScanCommand, "ExclusiveStartKey", "LastEvaluatedKey", "Limit"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-es/pagination/index.js @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts new file mode 100644 index 0000000..38d4a20 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocument.d.ts @@ -0,0 +1,195 @@ +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommandInput, BatchGetCommandOutput } from "./commands/BatchGetCommand"; +import { BatchWriteCommandInput, BatchWriteCommandOutput } from "./commands/BatchWriteCommand"; +import { DeleteCommandInput, DeleteCommandOutput } from "./commands/DeleteCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TransactGetCommandInput, TransactGetCommandOutput } from "./commands/TransactGetCommand"; +import { TransactWriteCommandInput, TransactWriteCommandOutput } from "./commands/TransactWriteCommand"; +import { UpdateCommandInput, UpdateCommandOutput } from "./commands/UpdateCommand"; +import { DynamoDBDocumentClient, TranslateConfig } from "./DynamoDBDocumentClient"; +/** + * The document client simplifies working with items in Amazon DynamoDB by + * abstracting away the notion of attribute values. This abstraction annotates native + * JavaScript types supplied as input parameters, as well as converts annotated + * response data to native JavaScript types. + * + * ## Marshalling Input and Unmarshalling Response Data + * + * The document client affords developers the use of native JavaScript types + * instead of `AttributeValue`s to simplify the JavaScript development + * experience with Amazon DynamoDB. JavaScript objects passed in as parameters + * are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. + * Responses from DynamoDB are unmarshalled into plain JavaScript objects + * by the `DocumentClient`. The `DocumentClient` does not accept + * `AttributeValue`s in favor of native JavaScript types. + * + * | JavaScript Type | DynamoDB AttributeValue | + * | :-------------------------------: | ----------------------- | + * | String | S | + * | Number / BigInt | N | + * | Boolean | BOOL | + * | null | NULL | + * | Array | L | + * | Object | M | + * | Set\ | BS | + * | Set\ | NS | + * | Set\ | SS | + * | Uint8Array, Buffer, File, Blob... | B | + * + * ### Example + * + * Here is an example list which is sent to DynamoDB client in an operation: + * + * ```json + * { "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } + * ``` + * + * The DynamoDB document client abstracts the attribute values as follows in + * both input and output: + * + * ```json + * [null, false, 1, "two"] + * ``` + * + * @see {@link https://www.npmjs.com/package/@aws-sdk/client-dynamodb | @aws-sdk/client-dynamodb} + */ +export declare class DynamoDBDocument extends DynamoDBDocumentClient { + static from(client: DynamoDBClient, translateConfig?: TranslateConfig): DynamoDBDocument; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#BatchExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + batchExecuteStatement(args: BatchExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchGetItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchGetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchGet(args: BatchGetCommandInput, options?: __HttpHandlerOptions): Promise; + batchGet(args: BatchGetCommandInput, cb: (err: any, data?: BatchGetCommandOutput) => void): void; + batchGet(args: BatchGetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchGetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchWriteItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchWriteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + batchWrite(args: BatchWriteCommandInput, options?: __HttpHandlerOptions): Promise; + batchWrite(args: BatchWriteCommandInput, cb: (err: any, data?: BatchWriteCommandOutput) => void): void; + batchWrite(args: BatchWriteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: BatchWriteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * DeleteItemCommand operation from {@link @aws-sdk/client-dynamodb#DeleteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + delete(args: DeleteCommandInput, options?: __HttpHandlerOptions): Promise; + delete(args: DeleteCommandInput, cb: (err: any, data?: DeleteCommandOutput) => void): void; + delete(args: DeleteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: DeleteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + executeStatement(args: ExecuteStatementCommandInput, options?: __HttpHandlerOptions): Promise; + executeStatement(args: ExecuteStatementCommandInput, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + executeStatement(args: ExecuteStatementCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteStatementCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteTransactionCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteTransactionCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + executeTransaction(args: ExecuteTransactionCommandInput, options?: __HttpHandlerOptions): Promise; + executeTransaction(args: ExecuteTransactionCommandInput, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + executeTransaction(args: ExecuteTransactionCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ExecuteTransactionCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * GetItemCommand operation from {@link @aws-sdk/client-dynamodb#GetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + get(args: GetCommandInput, options?: __HttpHandlerOptions): Promise; + get(args: GetCommandInput, cb: (err: any, data?: GetCommandOutput) => void): void; + get(args: GetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: GetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * PutItemCommand operation from {@link @aws-sdk/client-dynamodb#PutItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + put(args: PutCommandInput, options?: __HttpHandlerOptions): Promise; + put(args: PutCommandInput, cb: (err: any, data?: PutCommandOutput) => void): void; + put(args: PutCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: PutCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * QueryCommand operation from {@link @aws-sdk/client-dynamodb#QueryCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + query(args: QueryCommandInput, options?: __HttpHandlerOptions): Promise; + query(args: QueryCommandInput, cb: (err: any, data?: QueryCommandOutput) => void): void; + query(args: QueryCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: QueryCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ScanCommand operation from {@link @aws-sdk/client-dynamodb#ScanCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + scan(args: ScanCommandInput, options?: __HttpHandlerOptions): Promise; + scan(args: ScanCommandInput, cb: (err: any, data?: ScanCommandOutput) => void): void; + scan(args: ScanCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: ScanCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactGetItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactGetItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + transactGet(args: TransactGetCommandInput, options?: __HttpHandlerOptions): Promise; + transactGet(args: TransactGetCommandInput, cb: (err: any, data?: TransactGetCommandOutput) => void): void; + transactGet(args: TransactGetCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactGetCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactWriteItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactWriteItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + transactWrite(args: TransactWriteCommandInput, options?: __HttpHandlerOptions): Promise; + transactWrite(args: TransactWriteCommandInput, cb: (err: any, data?: TransactWriteCommandOutput) => void): void; + transactWrite(args: TransactWriteCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: TransactWriteCommandOutput) => void): void; + /** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * UpdateItemCommand operation from {@link @aws-sdk/client-dynamodb#UpdateItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + */ + update(args: UpdateCommandInput, options?: __HttpHandlerOptions): Promise; + update(args: UpdateCommandInput, cb: (err: any, data?: UpdateCommandOutput) => void): void; + update(args: UpdateCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: UpdateCommandOutput) => void): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts new file mode 100644 index 0000000..260830f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/DynamoDBDocumentClient.d.ts @@ -0,0 +1,96 @@ +import { DynamoDBClient, DynamoDBClientResolvedConfig, ServiceInputTypes as __ServiceInputTypes, ServiceOutputTypes as __ServiceOutputTypes } from "@aws-sdk/client-dynamodb"; +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +import { Client as __Client } from "@smithy/smithy-client"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput } from "./commands/BatchExecuteStatementCommand"; +import { BatchGetCommandInput, BatchGetCommandOutput } from "./commands/BatchGetCommand"; +import { BatchWriteCommandInput, BatchWriteCommandOutput } from "./commands/BatchWriteCommand"; +import { DeleteCommandInput, DeleteCommandOutput } from "./commands/DeleteCommand"; +import { ExecuteStatementCommandInput, ExecuteStatementCommandOutput } from "./commands/ExecuteStatementCommand"; +import { ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput } from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { TransactGetCommandInput, TransactGetCommandOutput } from "./commands/TransactGetCommand"; +import { TransactWriteCommandInput, TransactWriteCommandOutput } from "./commands/TransactWriteCommand"; +import { UpdateCommandInput, UpdateCommandOutput } from "./commands/UpdateCommand"; +/** + * @public + */ +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = __ServiceInputTypes | BatchExecuteStatementCommandInput | BatchGetCommandInput | BatchWriteCommandInput | DeleteCommandInput | ExecuteStatementCommandInput | ExecuteTransactionCommandInput | GetCommandInput | PutCommandInput | QueryCommandInput | ScanCommandInput | TransactGetCommandInput | TransactWriteCommandInput | UpdateCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = __ServiceOutputTypes | BatchExecuteStatementCommandOutput | BatchGetCommandOutput | BatchWriteCommandOutput | DeleteCommandOutput | ExecuteStatementCommandOutput | ExecuteTransactionCommandOutput | GetCommandOutput | PutCommandOutput | QueryCommandOutput | ScanCommandOutput | TransactGetCommandOutput | TransactWriteCommandOutput | UpdateCommandOutput; +/** + * @public + */ +export type TranslateConfig = { + marshallOptions?: marshallOptions; + unmarshallOptions?: unmarshallOptions; +}; +/** + * @public + */ +export type DynamoDBDocumentClientResolvedConfig = DynamoDBClientResolvedConfig & { + translateConfig?: TranslateConfig; +}; +/** + * The document client simplifies working with items in Amazon DynamoDB by + * abstracting away the notion of attribute values. This abstraction annotates native + * JavaScript types supplied as input parameters, as well as converts annotated + * response data to native JavaScript types. + * + * ## Marshalling Input and Unmarshalling Response Data + * + * The document client affords developers the use of native JavaScript types + * instead of `AttributeValue`s to simplify the JavaScript development + * experience with Amazon DynamoDB. JavaScript objects passed in as parameters + * are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. + * Responses from DynamoDB are unmarshalled into plain JavaScript objects + * by the `DocumentClient`. The `DocumentClient` does not accept + * `AttributeValue`s in favor of native JavaScript types. + * + * | JavaScript Type | DynamoDB AttributeValue | + * | :-------------------------------: | ----------------------- | + * | String | S | + * | Number / BigInt | N | + * | Boolean | BOOL | + * | null | NULL | + * | Array | L | + * | Object | M | + * | Set\ | BS | + * | Set\ | NS | + * | Set\ | SS | + * | Uint8Array, Buffer, File, Blob... | B | + * + * ### Example + * + * Here is an example list which is sent to DynamoDB client in an operation: + * + * ```json + * { "L": [{ "NULL": true }, { "BOOL": false }, { "N": 1 }, { "S": "two" }] } + * ``` + * + * The DynamoDB document client abstracts the attribute values as follows in + * both input and output: + * + * ```json + * [null, false, 1, "two"] + * ``` + * + * @see {@link https://www.npmjs.com/package/@aws-sdk/client-dynamodb | @aws-sdk/client-dynamodb} + * + * @public + */ +export declare class DynamoDBDocumentClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, DynamoDBDocumentClientResolvedConfig> { + readonly config: DynamoDBDocumentClientResolvedConfig; + protected constructor(client: DynamoDBClient, translateConfig?: TranslateConfig); + static from(client: DynamoDBClient, translateConfig?: TranslateConfig): DynamoDBDocumentClient; + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts new file mode 100644 index 0000000..e78690a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/baseCommand/DynamoDBDocumentClientCommand.d.ts @@ -0,0 +1,17 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MiddlewareStack } from "@smithy/types"; +import { KeyNodeChildren } from "../commands/utils"; +import { DynamoDBDocumentClientResolvedConfig } from "../DynamoDBDocumentClient"; +/** + * Base class for Commands in lib-dynamodb used to pass middleware to + * the underlying DynamoDBClient Commands. + * + * @public + */ +export declare abstract class DynamoDBDocumentClientCommand extends $Command { + protected abstract readonly inputKeyNodes: KeyNodeChildren; + protected abstract readonly outputKeyNodes: KeyNodeChildren; + protected abstract clientCommand: $Command; + abstract middlewareStack: MiddlewareStack; + protected addMarshallingMiddleware(configuration: DynamoDBDocumentClientResolvedConfig): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..8f55283 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,66 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchExecuteStatementCommandInput = Omit<__BatchExecuteStatementCommandInput, "Statements"> & { + Statements: (Omit & { + Parameters?: NativeAttributeValue[] | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type BatchExecuteStatementCommandOutput = Omit<__BatchExecuteStatementCommandOutput, "Responses"> & { + Responses?: (Omit & { + Error?: (Omit & { + Item?: Record | undefined; + }) | undefined; + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#BatchExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Statements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Error: { + Item: import("../commands/utils").KeyNodeChildren; + }; + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __BatchExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchExecuteStatementCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchExecuteStatementCommandInput as __BatchExecuteStatementCommandInput, BatchExecuteStatementCommandOutput as __BatchExecuteStatementCommandOutput, BatchStatementError, BatchStatementRequest, BatchStatementResponse } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts new file mode 100644 index 0000000..60cd689 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchGetCommand.d.ts @@ -0,0 +1,70 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchGetCommandInput = Omit<__BatchGetItemCommandInput, "RequestItems"> & { + RequestItems: Record & { + Keys: Record[] | undefined; + }> | undefined; +}; +/** + * @public + */ +export type BatchGetCommandOutput = Omit<__BatchGetItemCommandOutput, "Responses" | "UnprocessedKeys"> & { + Responses?: Record[]> | undefined; + UnprocessedKeys?: Record & { + Keys: Record[] | undefined; + }> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchGetItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchGetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchGetCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchGetCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + UnprocessedKeys: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchGetItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchGetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchGetItemCommandInput as __BatchGetItemCommandInput, BatchGetItemCommandOutput as __BatchGetItemCommandOutput, KeysAndAttributes } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts new file mode 100644 index 0000000..930d3f5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/BatchWriteCommand.d.ts @@ -0,0 +1,94 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type BatchWriteCommandInput = Omit<__BatchWriteItemCommandInput, "RequestItems"> & { + RequestItems: Record & { + PutRequest?: (Omit & { + Item: Record | undefined; + }) | undefined; + DeleteRequest?: (Omit & { + Key: Record | undefined; + }) | undefined; + })[]> | undefined; +}; +/** + * @public + */ +export type BatchWriteCommandOutput = Omit<__BatchWriteItemCommandOutput, "UnprocessedItems" | "ItemCollectionMetrics"> & { + UnprocessedItems?: Record & { + PutRequest?: (Omit & { + Item: Record | undefined; + }) | undefined; + DeleteRequest?: (Omit & { + Key: Record | undefined; + }) | undefined; + })[]> | undefined; + ItemCollectionMetrics?: Record & { + ItemCollectionKey?: Record | undefined; + })[]> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * BatchWriteItemCommand operation from {@link @aws-sdk/client-dynamodb#BatchWriteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class BatchWriteCommand extends DynamoDBDocumentClientCommand { + readonly input: BatchWriteCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchWriteItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: BatchWriteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { BatchWriteItemCommandInput as __BatchWriteItemCommandInput, BatchWriteItemCommandOutput as __BatchWriteItemCommandOutput, DeleteRequest, ItemCollectionMetrics, PutRequest, WriteRequest } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts new file mode 100644 index 0000000..c091175 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/DeleteCommand.d.ts @@ -0,0 +1,66 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type DeleteCommandInput = Omit<__DeleteItemCommandInput, "Key" | "Expected" | "ExpressionAttributeValues"> & { + Key: Record | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type DeleteCommandOutput = Omit<__DeleteItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * DeleteItemCommand operation from {@link @aws-sdk/client-dynamodb#DeleteItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class DeleteCommand extends DynamoDBDocumentClientCommand { + readonly input: DeleteCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __DeleteItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: DeleteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { DeleteItemCommandInput as __DeleteItemCommandInput, DeleteItemCommandOutput as __DeleteItemCommandOutput, ExpectedAttributeValue, ItemCollectionMetrics } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..4498320 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,52 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ExecuteStatementCommandInput = Omit<__ExecuteStatementCommandInput, "Parameters"> & { + Parameters?: NativeAttributeValue[] | undefined; +}; +/** + * @public + */ +export type ExecuteStatementCommandOutput = Omit<__ExecuteStatementCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteStatementCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteStatementCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ExecuteStatementCommand extends DynamoDBDocumentClientCommand { + readonly input: ExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ExecuteStatementCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExecuteStatementCommandInput as __ExecuteStatementCommandInput, ExecuteStatementCommandOutput as __ExecuteStatementCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..04873aa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,60 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ExecuteTransactionCommandInput = Omit<__ExecuteTransactionCommandInput, "TransactStatements"> & { + TransactStatements: (Omit & { + Parameters?: NativeAttributeValue[] | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type ExecuteTransactionCommandOutput = Omit<__ExecuteTransactionCommandOutput, "Responses"> & { + Responses?: (Omit & { + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ExecuteTransactionCommand operation from {@link @aws-sdk/client-dynamodb#ExecuteTransactionCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand { + readonly input: ExecuteTransactionCommandInput; + protected readonly inputKeyNodes: { + TransactStatements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __ExecuteTransactionCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ExecuteTransactionCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExecuteTransactionCommandInput as __ExecuteTransactionCommandInput, ExecuteTransactionCommandOutput as __ExecuteTransactionCommandOutput, ItemResponse, ParameterizedStatement } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts new file mode 100644 index 0000000..d425131 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/GetCommand.d.ts @@ -0,0 +1,48 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type GetCommandInput = Omit<__GetItemCommandInput, "Key"> & { + Key: Record | undefined; +}; +/** + * @public + */ +export type GetCommandOutput = Omit<__GetItemCommandOutput, "Item"> & { + Item?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * GetItemCommand operation from {@link @aws-sdk/client-dynamodb#GetItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class GetCommand extends DynamoDBDocumentClientCommand { + readonly input: GetCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __GetItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: GetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { GetItemCommandInput as __GetItemCommandInput, GetItemCommandOutput as __GetItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts new file mode 100644 index 0000000..babe831 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/PutCommand.d.ts @@ -0,0 +1,66 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type PutCommandInput = Omit<__PutItemCommandInput, "Item" | "Expected" | "ExpressionAttributeValues"> & { + Item: Record | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type PutCommandOutput = Omit<__PutItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * PutItemCommand operation from {@link @aws-sdk/client-dynamodb#PutItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class PutCommand extends DynamoDBDocumentClientCommand { + readonly input: PutCommandInput; + protected readonly inputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __PutItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: PutCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ExpectedAttributeValue, ItemCollectionMetrics, PutItemCommandInput as __PutItemCommandInput, PutItemCommandOutput as __PutItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts new file mode 100644 index 0000000..4948a70 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/QueryCommand.d.ts @@ -0,0 +1,70 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type QueryCommandInput = Omit<__QueryCommandInput, "KeyConditions" | "QueryFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues"> & { + KeyConditions?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + QueryFilter?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type QueryCommandOutput = Omit<__QueryCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * QueryCommand operation from {@link @aws-sdk/client-dynamodb#QueryCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class QueryCommand extends DynamoDBDocumentClientCommand { + readonly input: QueryCommandInput; + protected readonly inputKeyNodes: { + KeyConditions: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + QueryFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __QueryCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: QueryCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Condition, QueryCommandInput as __QueryCommandInput, QueryCommandOutput as __QueryCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts new file mode 100644 index 0000000..86b0089 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/ScanCommand.d.ts @@ -0,0 +1,62 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type ScanCommandInput = Omit<__ScanCommandInput, "ScanFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues"> & { + ScanFilter?: Record & { + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type ScanCommandOutput = Omit<__ScanCommandOutput, "Items" | "LastEvaluatedKey"> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * ScanCommand operation from {@link @aws-sdk/client-dynamodb#ScanCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class ScanCommand extends DynamoDBDocumentClientCommand { + readonly input: ScanCommandInput; + protected readonly inputKeyNodes: { + ScanFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ScanCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: ScanCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Condition, ScanCommandInput as __ScanCommandInput, ScanCommandOutput as __ScanCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts new file mode 100644 index 0000000..3355455 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactGetCommand.d.ts @@ -0,0 +1,64 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type TransactGetCommandInput = Omit<__TransactGetItemsCommandInput, "TransactItems"> & { + TransactItems: (Omit & { + Get: (Omit & { + Key: Record | undefined; + }) | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type TransactGetCommandOutput = Omit<__TransactGetItemsCommandOutput, "Responses"> & { + Responses?: (Omit & { + Item?: Record | undefined; + })[] | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactGetItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactGetItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class TransactGetCommand extends DynamoDBDocumentClientCommand { + readonly input: TransactGetCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + Get: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __TransactGetItemsCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: TransactGetCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { Get, ItemResponse, TransactGetItem, TransactGetItemsCommandInput as __TransactGetItemsCommandInput, TransactGetItemsCommandOutput as __TransactGetItemsCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts new file mode 100644 index 0000000..eef56d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/TransactWriteCommand.d.ts @@ -0,0 +1,92 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type TransactWriteCommandInput = Omit<__TransactWriteItemsCommandInput, "TransactItems"> & { + TransactItems: (Omit & { + ConditionCheck?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Put?: (Omit & { + Item: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Delete?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + Update?: (Omit & { + Key: Record | undefined; + ExpressionAttributeValues?: Record | undefined; + }) | undefined; + })[] | undefined; +}; +/** + * @public + */ +export type TransactWriteCommandOutput = Omit<__TransactWriteItemsCommandOutput, "ItemCollectionMetrics"> & { + ItemCollectionMetrics?: Record & { + ItemCollectionKey?: Record | undefined; + })[]> | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * TransactWriteItemsCommand operation from {@link @aws-sdk/client-dynamodb#TransactWriteItemsCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class TransactWriteCommand extends DynamoDBDocumentClientCommand { + readonly input: TransactWriteCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + ConditionCheck: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Put: { + Item: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Delete: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Update: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __TransactWriteItemsCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: TransactWriteCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { ConditionCheck, Delete, ItemCollectionMetrics, Put, TransactWriteItem, TransactWriteItemsCommandInput as __TransactWriteItemsCommandInput, TransactWriteItemsCommandOutput as __TransactWriteItemsCommandOutput, Update } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts new file mode 100644 index 0000000..905e751 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/UpdateCommand.d.ts @@ -0,0 +1,74 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { Handler, HttpHandlerOptions as __HttpHandlerOptions, MiddlewareStack } from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { DynamoDBDocumentClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { DynamoDBDocumentClientCommand, $Command }; +/** + * @public + */ +export type UpdateCommandInput = Omit<__UpdateItemCommandInput, "Key" | "AttributeUpdates" | "Expected" | "ExpressionAttributeValues"> & { + Key: Record | undefined; + AttributeUpdates?: Record & { + Value?: NativeAttributeValue | undefined; + }> | undefined; + Expected?: Record & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + }> | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +/** + * @public + */ +export type UpdateCommandOutput = Omit<__UpdateItemCommandOutput, "Attributes" | "ItemCollectionMetrics"> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: (Omit & { + ItemCollectionKey?: Record | undefined; + }) | undefined; +}; +/** + * Accepts native JavaScript types instead of `AttributeValue`s, and calls + * UpdateItemCommand operation from {@link @aws-sdk/client-dynamodb#UpdateItemCommand}. + * + * JavaScript objects passed in as parameters are marshalled into `AttributeValue` shapes + * required by Amazon DynamoDB. Responses from DynamoDB are unmarshalled into plain JavaScript objects. + * + * @public + */ +export declare class UpdateCommand extends DynamoDBDocumentClientCommand { + readonly input: UpdateCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + AttributeUpdates: { + "*": { + Value: null; + }; + }; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __UpdateItemCommand; + readonly middlewareStack: MiddlewareStack; + constructor(input: UpdateCommandInput); + /** + * @internal + */ + resolveMiddleware(clientStack: MiddlewareStack, configuration: DynamoDBDocumentClientResolvedConfig, options?: __HttpHandlerOptions): Handler; +} +import type { AttributeValueUpdate, ExpectedAttributeValue, ItemCollectionMetrics, UpdateItemCommandInput as __UpdateItemCommandInput, UpdateItemCommandOutput as __UpdateItemCommandOutput } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/index.d.ts @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts new file mode 100644 index 0000000..d24d22c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/commands/utils.d.ts @@ -0,0 +1,33 @@ +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +/** + * @internal + */ +export type KeyNodeSelf = null; +/** + * @internal + */ +export declare const SELF: KeyNodeSelf; +/** + * @internal + */ +export type KeyNodeChildren = Record; +/** + * @internal + */ +export declare const ALL_VALUES: KeyNodeChildren; +/** + * @internal + */ +export declare const ALL_MEMBERS: KeyNodeChildren; +/** + * @internal + */ +export type KeyNodes = KeyNodeSelf | KeyNodeChildren; +/** + * @internal + */ +export declare const marshallInput: (obj: any, keyNodes: KeyNodeChildren, options?: marshallOptions) => any; +/** + * @internal + */ +export declare const unmarshallOutput: (obj: any, keyNodes: KeyNodeChildren, options?: unmarshallOptions) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..957530d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; +export { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export { NativeAttributeValue, NativeAttributeBinary, NativeScalarAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts new file mode 100644 index 0000000..f98a7b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/Interfaces.d.ts @@ -0,0 +1,13 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBDocument } from "../DynamoDBDocument"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +/** + * @public + */ +export { PaginationConfiguration }; +/** + * @public + */ +export interface DynamoDBDocumentPaginationConfiguration extends PaginationConfiguration { + client: DynamoDBDocument | DynamoDBDocumentClient; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..692d9f9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/QueryPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { QueryCommandInput, QueryCommandOutput } from "../commands/QueryCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export { Paginator }; +/** + * @public + */ +export declare const paginateQuery: (config: DynamoDBDocumentPaginationConfiguration, input: QueryCommandInput, ...additionalArguments: any) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..b47d0a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/ScanPaginator.d.ts @@ -0,0 +1,11 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +/** + * @public + */ +export { Paginator }; +/** + * @public + */ +export declare const paginateScan: (config: DynamoDBDocumentPaginationConfiguration, input: ScanCommandInput, ...additionalArguments: any) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts new file mode 100644 index 0000000..fbcd8b7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocument.d.ts @@ -0,0 +1,221 @@ +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetCommandInput, + BatchGetCommandOutput, +} from "./commands/BatchGetCommand"; +import { + BatchWriteCommandInput, + BatchWriteCommandOutput, +} from "./commands/BatchWriteCommand"; +import { + DeleteCommandInput, + DeleteCommandOutput, +} from "./commands/DeleteCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TransactGetCommandInput, + TransactGetCommandOutput, +} from "./commands/TransactGetCommand"; +import { + TransactWriteCommandInput, + TransactWriteCommandOutput, +} from "./commands/TransactWriteCommand"; +import { + UpdateCommandInput, + UpdateCommandOutput, +} from "./commands/UpdateCommand"; +import { + DynamoDBDocumentClient, + TranslateConfig, +} from "./DynamoDBDocumentClient"; +export declare class DynamoDBDocument extends DynamoDBDocumentClient { + static from( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ): DynamoDBDocument; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchExecuteStatement( + args: BatchExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchExecuteStatementCommandOutput) => void + ): void; + batchGet( + args: BatchGetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchGet( + args: BatchGetCommandInput, + cb: (err: any, data?: BatchGetCommandOutput) => void + ): void; + batchGet( + args: BatchGetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchGetCommandOutput) => void + ): void; + batchWrite( + args: BatchWriteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + batchWrite( + args: BatchWriteCommandInput, + cb: (err: any, data?: BatchWriteCommandOutput) => void + ): void; + batchWrite( + args: BatchWriteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchWriteCommandOutput) => void + ): void; + delete( + args: DeleteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + delete( + args: DeleteCommandInput, + cb: (err: any, data?: DeleteCommandOutput) => void + ): void; + delete( + args: DeleteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeStatement( + args: ExecuteStatementCommandInput, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeStatement( + args: ExecuteStatementCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteStatementCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + executeTransaction( + args: ExecuteTransactionCommandInput, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + executeTransaction( + args: ExecuteTransactionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ExecuteTransactionCommandOutput) => void + ): void; + get( + args: GetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + get( + args: GetCommandInput, + cb: (err: any, data?: GetCommandOutput) => void + ): void; + get( + args: GetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetCommandOutput) => void + ): void; + put( + args: PutCommandInput, + options?: __HttpHandlerOptions + ): Promise; + put( + args: PutCommandInput, + cb: (err: any, data?: PutCommandOutput) => void + ): void; + put( + args: PutCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options?: __HttpHandlerOptions + ): Promise; + query( + args: QueryCommandInput, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + query( + args: QueryCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: QueryCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options?: __HttpHandlerOptions + ): Promise; + scan( + args: ScanCommandInput, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + scan( + args: ScanCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ScanCommandOutput) => void + ): void; + transactGet( + args: TransactGetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactGet( + args: TransactGetCommandInput, + cb: (err: any, data?: TransactGetCommandOutput) => void + ): void; + transactGet( + args: TransactGetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactGetCommandOutput) => void + ): void; + transactWrite( + args: TransactWriteCommandInput, + options?: __HttpHandlerOptions + ): Promise; + transactWrite( + args: TransactWriteCommandInput, + cb: (err: any, data?: TransactWriteCommandOutput) => void + ): void; + transactWrite( + args: TransactWriteCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TransactWriteCommandOutput) => void + ): void; + update( + args: UpdateCommandInput, + options?: __HttpHandlerOptions + ): Promise; + update( + args: UpdateCommandInput, + cb: (err: any, data?: UpdateCommandOutput) => void + ): void; + update( + args: UpdateCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateCommandOutput) => void + ): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts new file mode 100644 index 0000000..67494ec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/DynamoDBDocumentClient.d.ts @@ -0,0 +1,105 @@ +import { + DynamoDBClient, + DynamoDBClientResolvedConfig, + ServiceInputTypes as __ServiceInputTypes, + ServiceOutputTypes as __ServiceOutputTypes, +} from "@aws-sdk/client-dynamodb"; +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +import { Client as __Client } from "@smithy/smithy-client"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, +} from "./commands/BatchExecuteStatementCommand"; +import { + BatchGetCommandInput, + BatchGetCommandOutput, +} from "./commands/BatchGetCommand"; +import { + BatchWriteCommandInput, + BatchWriteCommandOutput, +} from "./commands/BatchWriteCommand"; +import { + DeleteCommandInput, + DeleteCommandOutput, +} from "./commands/DeleteCommand"; +import { + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, +} from "./commands/ExecuteStatementCommand"; +import { + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, +} from "./commands/ExecuteTransactionCommand"; +import { GetCommandInput, GetCommandOutput } from "./commands/GetCommand"; +import { PutCommandInput, PutCommandOutput } from "./commands/PutCommand"; +import { QueryCommandInput, QueryCommandOutput } from "./commands/QueryCommand"; +import { ScanCommandInput, ScanCommandOutput } from "./commands/ScanCommand"; +import { + TransactGetCommandInput, + TransactGetCommandOutput, +} from "./commands/TransactGetCommand"; +import { + TransactWriteCommandInput, + TransactWriteCommandOutput, +} from "./commands/TransactWriteCommand"; +import { + UpdateCommandInput, + UpdateCommandOutput, +} from "./commands/UpdateCommand"; +export { __Client }; +export type ServiceInputTypes = + | __ServiceInputTypes + | BatchExecuteStatementCommandInput + | BatchGetCommandInput + | BatchWriteCommandInput + | DeleteCommandInput + | ExecuteStatementCommandInput + | ExecuteTransactionCommandInput + | GetCommandInput + | PutCommandInput + | QueryCommandInput + | ScanCommandInput + | TransactGetCommandInput + | TransactWriteCommandInput + | UpdateCommandInput; +export type ServiceOutputTypes = + | __ServiceOutputTypes + | BatchExecuteStatementCommandOutput + | BatchGetCommandOutput + | BatchWriteCommandOutput + | DeleteCommandOutput + | ExecuteStatementCommandOutput + | ExecuteTransactionCommandOutput + | GetCommandOutput + | PutCommandOutput + | QueryCommandOutput + | ScanCommandOutput + | TransactGetCommandOutput + | TransactWriteCommandOutput + | UpdateCommandOutput; +export type TranslateConfig = { + marshallOptions?: marshallOptions; + unmarshallOptions?: unmarshallOptions; +}; +export type DynamoDBDocumentClientResolvedConfig = + DynamoDBClientResolvedConfig & { + translateConfig?: TranslateConfig; + }; +export declare class DynamoDBDocumentClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + DynamoDBDocumentClientResolvedConfig +> { + readonly config: DynamoDBDocumentClientResolvedConfig; + protected constructor( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ); + static from( + client: DynamoDBClient, + translateConfig?: TranslateConfig + ): DynamoDBDocumentClient; + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts new file mode 100644 index 0000000..17c787f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/baseCommand/DynamoDBDocumentClientCommand.d.ts @@ -0,0 +1,30 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MiddlewareStack } from "@smithy/types"; +import { KeyNodeChildren } from "../commands/utils"; +import { DynamoDBDocumentClientResolvedConfig } from "../DynamoDBDocumentClient"; +export declare abstract class DynamoDBDocumentClientCommand< + Input extends object, + Output extends object, + BaseInput extends object, + BaseOutput extends object, + ResolvedClientConfiguration +> extends $Command< + Input | BaseInput, + Output | BaseOutput, + ResolvedClientConfiguration +> { + protected abstract readonly inputKeyNodes: KeyNodeChildren; + protected abstract readonly outputKeyNodes: KeyNodeChildren; + protected abstract clientCommand: $Command< + Input | BaseInput, + Output | BaseOutput, + ResolvedClientConfiguration + >; + abstract middlewareStack: MiddlewareStack< + Input | BaseInput, + Output | BaseOutput + >; + protected addMarshallingMiddleware( + configuration: DynamoDBDocumentClientResolvedConfig + ): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts new file mode 100644 index 0000000..d8f3dfe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchExecuteStatementCommand.d.ts @@ -0,0 +1,96 @@ +import { BatchExecuteStatementCommand as __BatchExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchExecuteStatementCommandInput = Pick< + __BatchExecuteStatementCommandInput, + Exclude +> & { + Statements: + | (Pick< + BatchStatementRequest, + Exclude + > & { + Parameters?: NativeAttributeValue[] | undefined; + })[] + | undefined; +}; +export type BatchExecuteStatementCommandOutput = Pick< + __BatchExecuteStatementCommandOutput, + Exclude +> & { + Responses?: + | (Pick< + BatchStatementResponse, + Exclude + > & { + Error?: + | (Pick< + BatchStatementError, + Exclude + > & { + Item?: Record | undefined; + }) + | undefined; + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class BatchExecuteStatementCommand extends DynamoDBDocumentClientCommand< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput, + __BatchExecuteStatementCommandInput, + __BatchExecuteStatementCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Statements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Error: { + Item: import("../commands/utils").KeyNodeChildren; + }; + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __BatchExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack< + BatchExecuteStatementCommandInput | __BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput | __BatchExecuteStatementCommandOutput + >; + constructor(input: BatchExecuteStatementCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler< + BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput + >; +} +import { + BatchExecuteStatementCommandInput as __BatchExecuteStatementCommandInput, + BatchExecuteStatementCommandOutput as __BatchExecuteStatementCommandOutput, + BatchStatementError, + BatchStatementRequest, + BatchStatementResponse, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts new file mode 100644 index 0000000..6203cf6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchGetCommand.d.ts @@ -0,0 +1,92 @@ +import { BatchGetItemCommand as __BatchGetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchGetCommandInput = Pick< + __BatchGetItemCommandInput, + Exclude +> & { + RequestItems: + | Record< + string, + Pick> & { + Keys: Record[] | undefined; + } + > + | undefined; +}; +export type BatchGetCommandOutput = Pick< + __BatchGetItemCommandOutput, + Exclude +> & { + Responses?: + | Record[]> + | undefined; + UnprocessedKeys?: + | Record< + string, + Pick> & { + Keys: Record[] | undefined; + } + > + | undefined; +}; +export declare class BatchGetCommand extends DynamoDBDocumentClientCommand< + BatchGetCommandInput, + BatchGetCommandOutput, + __BatchGetItemCommandInput, + __BatchGetItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchGetCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + UnprocessedKeys: { + "*": { + Keys: { + "*": import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchGetItemCommand; + readonly middlewareStack: MiddlewareStack< + BatchGetCommandInput | __BatchGetItemCommandInput, + BatchGetCommandOutput | __BatchGetItemCommandOutput + >; + constructor(input: BatchGetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + BatchGetItemCommandInput as __BatchGetItemCommandInput, + BatchGetItemCommandOutput as __BatchGetItemCommandOutput, + KeysAndAttributes, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts new file mode 100644 index 0000000..a02d177 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/BatchWriteCommand.d.ts @@ -0,0 +1,142 @@ +import { BatchWriteItemCommand as __BatchWriteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type BatchWriteCommandInput = Pick< + __BatchWriteItemCommandInput, + Exclude +> & { + RequestItems: + | Record< + string, + (Pick< + WriteRequest, + Exclude + > & { + PutRequest?: + | (Pick> & { + Item: Record | undefined; + }) + | undefined; + DeleteRequest?: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + > + | undefined; +}; +export type BatchWriteCommandOutput = Pick< + __BatchWriteItemCommandOutput, + Exclude< + keyof __BatchWriteItemCommandOutput, + "UnprocessedItems" | "ItemCollectionMetrics" + > +> & { + UnprocessedItems?: + | Record< + string, + (Pick< + WriteRequest, + Exclude + > & { + PutRequest?: + | (Pick> & { + Item: Record | undefined; + }) + | undefined; + DeleteRequest?: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + > + | undefined; + ItemCollectionMetrics?: + | Record< + string, + (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + })[] + > + | undefined; +}; +export declare class BatchWriteCommand extends DynamoDBDocumentClientCommand< + BatchWriteCommandInput, + BatchWriteCommandOutput, + __BatchWriteItemCommandInput, + __BatchWriteItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: BatchWriteCommandInput; + protected readonly inputKeyNodes: { + RequestItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + UnprocessedItems: { + "*": { + "*": { + PutRequest: { + Item: import("../commands/utils").KeyNodeChildren; + }; + DeleteRequest: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __BatchWriteItemCommand; + readonly middlewareStack: MiddlewareStack< + BatchWriteCommandInput | __BatchWriteItemCommandInput, + BatchWriteCommandOutput | __BatchWriteItemCommandOutput + >; + constructor(input: BatchWriteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + BatchWriteItemCommandInput as __BatchWriteItemCommandInput, + BatchWriteItemCommandOutput as __BatchWriteItemCommandOutput, + DeleteRequest, + ItemCollectionMetrics, + PutRequest, + WriteRequest, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts new file mode 100644 index 0000000..9906c10 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/DeleteCommand.d.ts @@ -0,0 +1,96 @@ +import { DeleteItemCommand as __DeleteItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type DeleteCommandInput = Pick< + __DeleteItemCommandInput, + Exclude< + keyof __DeleteItemCommandInput, + "Key" | "Expected" | "ExpressionAttributeValues" + > +> & { + Key: Record | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type DeleteCommandOutput = Pick< + __DeleteItemCommandOutput, + Exclude< + keyof __DeleteItemCommandOutput, + "Attributes" | "ItemCollectionMetrics" + > +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class DeleteCommand extends DynamoDBDocumentClientCommand< + DeleteCommandInput, + DeleteCommandOutput, + __DeleteItemCommandInput, + __DeleteItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: DeleteCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __DeleteItemCommand; + readonly middlewareStack: MiddlewareStack< + DeleteCommandInput | __DeleteItemCommandInput, + DeleteCommandOutput | __DeleteItemCommandOutput + >; + constructor(input: DeleteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + DeleteItemCommandInput as __DeleteItemCommandInput, + DeleteItemCommandOutput as __DeleteItemCommandOutput, + ExpectedAttributeValue, + ItemCollectionMetrics, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts new file mode 100644 index 0000000..938727c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteStatementCommand.d.ts @@ -0,0 +1,61 @@ +import { ExecuteStatementCommand as __ExecuteStatementCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ExecuteStatementCommandInput = Pick< + __ExecuteStatementCommandInput, + Exclude +> & { + Parameters?: NativeAttributeValue[] | undefined; +}; +export type ExecuteStatementCommandOutput = Pick< + __ExecuteStatementCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class ExecuteStatementCommand extends DynamoDBDocumentClientCommand< + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + __ExecuteStatementCommandInput, + __ExecuteStatementCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ExecuteStatementCommandInput; + protected readonly inputKeyNodes: { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ExecuteStatementCommand; + readonly middlewareStack: MiddlewareStack< + ExecuteStatementCommandInput | __ExecuteStatementCommandInput, + ExecuteStatementCommandOutput | __ExecuteStatementCommandOutput + >; + constructor(input: ExecuteStatementCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExecuteStatementCommandInput as __ExecuteStatementCommandInput, + ExecuteStatementCommandOutput as __ExecuteStatementCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts new file mode 100644 index 0000000..10205be --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ExecuteTransactionCommand.d.ts @@ -0,0 +1,78 @@ +import { ExecuteTransactionCommand as __ExecuteTransactionCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ExecuteTransactionCommandInput = Pick< + __ExecuteTransactionCommandInput, + Exclude +> & { + TransactStatements: + | (Pick< + ParameterizedStatement, + Exclude + > & { + Parameters?: NativeAttributeValue[] | undefined; + })[] + | undefined; +}; +export type ExecuteTransactionCommandOutput = Pick< + __ExecuteTransactionCommandOutput, + Exclude +> & { + Responses?: + | (Pick> & { + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class ExecuteTransactionCommand extends DynamoDBDocumentClientCommand< + ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput, + __ExecuteTransactionCommandInput, + __ExecuteTransactionCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ExecuteTransactionCommandInput; + protected readonly inputKeyNodes: { + TransactStatements: { + "*": { + Parameters: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __ExecuteTransactionCommand; + readonly middlewareStack: MiddlewareStack< + ExecuteTransactionCommandInput | __ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput | __ExecuteTransactionCommandOutput + >; + constructor(input: ExecuteTransactionCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExecuteTransactionCommandInput as __ExecuteTransactionCommandInput, + ExecuteTransactionCommandOutput as __ExecuteTransactionCommandOutput, + ItemResponse, + ParameterizedStatement, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts new file mode 100644 index 0000000..dba5fd4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/GetCommand.d.ts @@ -0,0 +1,57 @@ +import { GetItemCommand as __GetItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type GetCommandInput = Pick< + __GetItemCommandInput, + Exclude +> & { + Key: Record | undefined; +}; +export type GetCommandOutput = Pick< + __GetItemCommandOutput, + Exclude +> & { + Item?: Record | undefined; +}; +export declare class GetCommand extends DynamoDBDocumentClientCommand< + GetCommandInput, + GetCommandOutput, + __GetItemCommandInput, + __GetItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: GetCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __GetItemCommand; + readonly middlewareStack: MiddlewareStack< + GetCommandInput | __GetItemCommandInput, + GetCommandOutput | __GetItemCommandOutput + >; + constructor(input: GetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + GetItemCommandInput as __GetItemCommandInput, + GetItemCommandOutput as __GetItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts new file mode 100644 index 0000000..af2ca2c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/PutCommand.d.ts @@ -0,0 +1,93 @@ +import { PutItemCommand as __PutItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type PutCommandInput = Pick< + __PutItemCommandInput, + Exclude< + keyof __PutItemCommandInput, + "Item" | "Expected" | "ExpressionAttributeValues" + > +> & { + Item: Record | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type PutCommandOutput = Pick< + __PutItemCommandOutput, + Exclude +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class PutCommand extends DynamoDBDocumentClientCommand< + PutCommandInput, + PutCommandOutput, + __PutItemCommandInput, + __PutItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: PutCommandInput; + protected readonly inputKeyNodes: { + Item: import("../commands/utils").KeyNodeChildren; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __PutItemCommand; + readonly middlewareStack: MiddlewareStack< + PutCommandInput | __PutItemCommandInput, + PutCommandOutput | __PutItemCommandOutput + >; + constructor(input: PutCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ExpectedAttributeValue, + ItemCollectionMetrics, + PutItemCommandInput as __PutItemCommandInput, + PutItemCommandOutput as __PutItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts new file mode 100644 index 0000000..80c57e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/QueryCommand.d.ts @@ -0,0 +1,96 @@ +import { QueryCommand as __QueryCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type QueryCommandInput = Pick< + __QueryCommandInput, + Exclude< + keyof __QueryCommandInput, + | "KeyConditions" + | "QueryFilter" + | "ExclusiveStartKey" + | "ExpressionAttributeValues" + > +> & { + KeyConditions?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + QueryFilter?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type QueryCommandOutput = Pick< + __QueryCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class QueryCommand extends DynamoDBDocumentClientCommand< + QueryCommandInput, + QueryCommandOutput, + __QueryCommandInput, + __QueryCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: QueryCommandInput; + protected readonly inputKeyNodes: { + KeyConditions: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + QueryFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __QueryCommand; + readonly middlewareStack: MiddlewareStack< + QueryCommandInput | __QueryCommandInput, + QueryCommandOutput | __QueryCommandOutput + >; + constructor(input: QueryCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Condition, + QueryCommandInput as __QueryCommandInput, + QueryCommandOutput as __QueryCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts new file mode 100644 index 0000000..c2dc93b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/ScanCommand.d.ts @@ -0,0 +1,80 @@ +import { ScanCommand as __ScanCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type ScanCommandInput = Pick< + __ScanCommandInput, + Exclude< + keyof __ScanCommandInput, + "ScanFilter" | "ExclusiveStartKey" | "ExpressionAttributeValues" + > +> & { + ScanFilter?: + | Record< + string, + Pick> & { + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExclusiveStartKey?: Record | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type ScanCommandOutput = Pick< + __ScanCommandOutput, + Exclude +> & { + Items?: Record[] | undefined; + LastEvaluatedKey?: Record | undefined; +}; +export declare class ScanCommand extends DynamoDBDocumentClientCommand< + ScanCommandInput, + ScanCommandOutput, + __ScanCommandInput, + __ScanCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: ScanCommandInput; + protected readonly inputKeyNodes: { + ScanFilter: { + "*": { + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExclusiveStartKey: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Items: { + "*": import("../commands/utils").KeyNodeChildren; + }; + LastEvaluatedKey: import("../commands/utils").KeyNodeChildren; + }; + protected readonly clientCommand: __ScanCommand; + readonly middlewareStack: MiddlewareStack< + ScanCommandInput | __ScanCommandInput, + ScanCommandOutput | __ScanCommandOutput + >; + constructor(input: ScanCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Condition, + ScanCommandInput as __ScanCommandInput, + ScanCommandOutput as __ScanCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts new file mode 100644 index 0000000..6568c81 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactGetCommand.d.ts @@ -0,0 +1,82 @@ +import { TransactGetItemsCommand as __TransactGetItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type TransactGetCommandInput = Pick< + __TransactGetItemsCommandInput, + Exclude +> & { + TransactItems: + | (Pick> & { + Get: + | (Pick> & { + Key: Record | undefined; + }) + | undefined; + })[] + | undefined; +}; +export type TransactGetCommandOutput = Pick< + __TransactGetItemsCommandOutput, + Exclude +> & { + Responses?: + | (Pick> & { + Item?: Record | undefined; + })[] + | undefined; +}; +export declare class TransactGetCommand extends DynamoDBDocumentClientCommand< + TransactGetCommandInput, + TransactGetCommandOutput, + __TransactGetItemsCommandInput, + __TransactGetItemsCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: TransactGetCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + Get: { + Key: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + Responses: { + "*": { + Item: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + protected readonly clientCommand: __TransactGetItemsCommand; + readonly middlewareStack: MiddlewareStack< + TransactGetCommandInput | __TransactGetItemsCommandInput, + TransactGetCommandOutput | __TransactGetItemsCommandOutput + >; + constructor(input: TransactGetCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + Get, + ItemResponse, + TransactGetItem, + TransactGetItemsCommandInput as __TransactGetItemsCommandInput, + TransactGetItemsCommandOutput as __TransactGetItemsCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts new file mode 100644 index 0000000..91ba34e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/TransactWriteCommand.d.ts @@ -0,0 +1,151 @@ +import { TransactWriteItemsCommand as __TransactWriteItemsCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type TransactWriteCommandInput = Pick< + __TransactWriteItemsCommandInput, + Exclude +> & { + TransactItems: + | (Pick< + TransactWriteItem, + Exclude< + keyof TransactWriteItem, + "ConditionCheck" | "Put" | "Delete" | "Update" + > + > & { + ConditionCheck?: + | (Pick< + ConditionCheck, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Put?: + | (Pick< + Put, + Exclude + > & { + Item: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Delete?: + | (Pick< + Delete, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + Update?: + | (Pick< + Update, + Exclude + > & { + Key: Record | undefined; + ExpressionAttributeValues?: + | Record + | undefined; + }) + | undefined; + })[] + | undefined; +}; +export type TransactWriteCommandOutput = Pick< + __TransactWriteItemsCommandOutput, + Exclude +> & { + ItemCollectionMetrics?: + | Record< + string, + (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + })[] + > + | undefined; +}; +export declare class TransactWriteCommand extends DynamoDBDocumentClientCommand< + TransactWriteCommandInput, + TransactWriteCommandOutput, + __TransactWriteItemsCommandInput, + __TransactWriteItemsCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: TransactWriteCommandInput; + protected readonly inputKeyNodes: { + TransactItems: { + "*": { + ConditionCheck: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Put: { + Item: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Delete: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + Update: { + Key: import("../commands/utils").KeyNodeChildren; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly outputKeyNodes: { + ItemCollectionMetrics: { + "*": { + "*": { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + }; + }; + protected readonly clientCommand: __TransactWriteItemsCommand; + readonly middlewareStack: MiddlewareStack< + TransactWriteCommandInput | __TransactWriteItemsCommandInput, + TransactWriteCommandOutput | __TransactWriteItemsCommandOutput + >; + constructor(input: TransactWriteCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + ConditionCheck, + Delete, + ItemCollectionMetrics, + Put, + TransactWriteItem, + TransactWriteItemsCommandInput as __TransactWriteItemsCommandInput, + TransactWriteItemsCommandOutput as __TransactWriteItemsCommandOutput, + Update, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts new file mode 100644 index 0000000..7c97b97 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/UpdateCommand.d.ts @@ -0,0 +1,113 @@ +import { UpdateItemCommand as __UpdateItemCommand } from "@aws-sdk/client-dynamodb"; +import { Command as $Command } from "@smithy/smithy-client"; +import { + Handler, + HttpHandlerOptions as __HttpHandlerOptions, + MiddlewareStack, +} from "@smithy/types"; +import { DynamoDBDocumentClientCommand } from "../baseCommand/DynamoDBDocumentClientCommand"; +import { + DynamoDBDocumentClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../DynamoDBDocumentClient"; +export { DynamoDBDocumentClientCommand, $Command }; +export type UpdateCommandInput = Pick< + __UpdateItemCommandInput, + Exclude< + keyof __UpdateItemCommandInput, + "Key" | "AttributeUpdates" | "Expected" | "ExpressionAttributeValues" + > +> & { + Key: Record | undefined; + AttributeUpdates?: + | Record< + string, + Pick< + AttributeValueUpdate, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + } + > + | undefined; + Expected?: + | Record< + string, + Pick< + ExpectedAttributeValue, + Exclude + > & { + Value?: NativeAttributeValue | undefined; + AttributeValueList?: NativeAttributeValue[] | undefined; + } + > + | undefined; + ExpressionAttributeValues?: Record | undefined; +}; +export type UpdateCommandOutput = Pick< + __UpdateItemCommandOutput, + Exclude< + keyof __UpdateItemCommandOutput, + "Attributes" | "ItemCollectionMetrics" + > +> & { + Attributes?: Record | undefined; + ItemCollectionMetrics?: + | (Pick< + ItemCollectionMetrics, + Exclude + > & { + ItemCollectionKey?: Record | undefined; + }) + | undefined; +}; +export declare class UpdateCommand extends DynamoDBDocumentClientCommand< + UpdateCommandInput, + UpdateCommandOutput, + __UpdateItemCommandInput, + __UpdateItemCommandOutput, + DynamoDBDocumentClientResolvedConfig +> { + readonly input: UpdateCommandInput; + protected readonly inputKeyNodes: { + Key: import("../commands/utils").KeyNodeChildren; + AttributeUpdates: { + "*": { + Value: null; + }; + }; + Expected: { + "*": { + Value: null; + AttributeValueList: import("../commands/utils").KeyNodeChildren; + }; + }; + ExpressionAttributeValues: import("../commands/utils").KeyNodeChildren; + }; + protected readonly outputKeyNodes: { + Attributes: import("../commands/utils").KeyNodeChildren; + ItemCollectionMetrics: { + ItemCollectionKey: import("../commands/utils").KeyNodeChildren; + }; + }; + protected readonly clientCommand: __UpdateItemCommand; + readonly middlewareStack: MiddlewareStack< + UpdateCommandInput | __UpdateItemCommandInput, + UpdateCommandOutput | __UpdateItemCommandOutput + >; + constructor(input: UpdateCommandInput); + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DynamoDBDocumentClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler; +} +import { + AttributeValueUpdate, + ExpectedAttributeValue, + ItemCollectionMetrics, + UpdateItemCommandInput as __UpdateItemCommandInput, + UpdateItemCommandOutput as __UpdateItemCommandOutput, +} from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts new file mode 100644 index 0000000..49e8a4e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/index.d.ts @@ -0,0 +1,13 @@ +export * from "./BatchExecuteStatementCommand"; +export * from "./BatchGetCommand"; +export * from "./BatchWriteCommand"; +export * from "./DeleteCommand"; +export * from "./ExecuteStatementCommand"; +export * from "./ExecuteTransactionCommand"; +export * from "./GetCommand"; +export * from "./PutCommand"; +export * from "./QueryCommand"; +export * from "./ScanCommand"; +export * from "./TransactGetCommand"; +export * from "./TransactWriteCommand"; +export * from "./UpdateCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts new file mode 100644 index 0000000..c0473c3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/commands/utils.d.ts @@ -0,0 +1,17 @@ +import { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export type KeyNodeSelf = null; +export declare const SELF: KeyNodeSelf; +export type KeyNodeChildren = Record; +export declare const ALL_VALUES: KeyNodeChildren; +export declare const ALL_MEMBERS: KeyNodeChildren; +export type KeyNodes = KeyNodeSelf | KeyNodeChildren; +export declare const marshallInput: ( + obj: any, + keyNodes: KeyNodeChildren, + options?: marshallOptions +) => any; +export declare const unmarshallOutput: ( + obj: any, + keyNodes: KeyNodeChildren, + options?: unmarshallOptions +) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab7a55d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,11 @@ +export * from "./DynamoDBDocument"; +export * from "./DynamoDBDocumentClient"; +export * from "./commands"; +export * from "./pagination"; +export { NumberValueImpl as NumberValue } from "@aws-sdk/util-dynamodb"; +export { marshallOptions, unmarshallOptions } from "@aws-sdk/util-dynamodb"; +export { + NativeAttributeValue, + NativeAttributeBinary, + NativeScalarAttributeValue, +} from "@aws-sdk/util-dynamodb"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts new file mode 100644 index 0000000..5bd45d2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/Interfaces.d.ts @@ -0,0 +1,8 @@ +import { PaginationConfiguration } from "@smithy/types"; +import { DynamoDBDocument } from "../DynamoDBDocument"; +import { DynamoDBDocumentClient } from "../DynamoDBDocumentClient"; +export { PaginationConfiguration }; +export interface DynamoDBDocumentPaginationConfiguration + extends PaginationConfiguration { + client: DynamoDBDocument | DynamoDBDocumentClient; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts new file mode 100644 index 0000000..93d4aff --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/QueryPaginator.d.ts @@ -0,0 +1,12 @@ +import { Paginator } from "@smithy/types"; +import { + QueryCommandInput, + QueryCommandOutput, +} from "../commands/QueryCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +export { Paginator }; +export declare const paginateQuery: ( + config: DynamoDBDocumentPaginationConfiguration, + input: QueryCommandInput, + ...additionalArguments: any +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts new file mode 100644 index 0000000..0a2c6d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/ScanPaginator.d.ts @@ -0,0 +1,9 @@ +import { Paginator } from "@smithy/types"; +import { ScanCommandInput, ScanCommandOutput } from "../commands/ScanCommand"; +import { DynamoDBDocumentPaginationConfiguration } from "./Interfaces"; +export { Paginator }; +export declare const paginateScan: ( + config: DynamoDBDocumentPaginationConfiguration, + input: ScanCommandInput, + ...additionalArguments: any +) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts new file mode 100644 index 0000000..0d9540e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/dist-types/ts3.4/pagination/index.d.ts @@ -0,0 +1,3 @@ +export * from "./Interfaces"; +export * from "./QueryPaginator"; +export * from "./ScanPaginator"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/package.json new file mode 100644 index 0000000..0b72a76 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/lib-dynamodb/package.json @@ -0,0 +1,66 @@ +{ + "name": "@aws-sdk/lib-dynamodb", + "version": "3.803.0", + "description": "The document client simplifies working with items in Amazon DynamoDB by abstracting away the notion of attribute values.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline lib-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts --mode development", + "test:watch": "yarn g:vitest watch", + "test:e2e:watch": "yarn g:vitest watch -c vitest.config.e2e.ts" + }, + "engines": { + "node": ">=18.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + }, + "devDependencies": { + "@aws-sdk/client-dynamodb": "3.803.0", + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/lib/lib-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "lib/lib-dynamodb" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md new file mode 100644 index 0000000..4a50903 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-endpoint-discovery + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-endpoint-discovery/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-endpoint-discovery) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-endpoint-discovery.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-endpoint-discovery) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js new file mode 100644 index 0000000..f534fd5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-cjs/index.js @@ -0,0 +1,229 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: () => NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, + endpointDiscoveryMiddlewareOptions: () => endpointDiscoveryMiddlewareOptions, + getEndpointDiscoveryOptionalPlugin: () => getEndpointDiscoveryOptionalPlugin, + getEndpointDiscoveryPlugin: () => getEndpointDiscoveryPlugin, + getEndpointDiscoveryRequiredPlugin: () => getEndpointDiscoveryRequiredPlugin, + resolveEndpointDiscoveryConfig: () => resolveEndpointDiscoveryConfig +}); +module.exports = __toCommonJS(index_exports); + +// src/configurations.ts +var ENV_ENDPOINT_DISCOVERY = ["AWS_ENABLE_ENDPOINT_DISCOVERY", "AWS_ENDPOINT_DISCOVERY_ENABLED"]; +var CONFIG_ENDPOINT_DISCOVERY = "endpoint_discovery_enabled"; +var isFalsy = /* @__PURE__ */ __name((value) => ["false", "0"].indexOf(value) >= 0, "isFalsy"); +var NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => { + for (let i = 0; i < ENV_ENDPOINT_DISCOVERY.length; i++) { + const envKey = ENV_ENDPOINT_DISCOVERY[i]; + if (envKey in env) { + const value = env[envKey]; + if (value === "") { + throw Error(`Environment variable ${envKey} can't be empty of undefined, got "${value}"`); + } + return !isFalsy(value); + } + } + }, "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => { + if (CONFIG_ENDPOINT_DISCOVERY in profile) { + const value = profile[CONFIG_ENDPOINT_DISCOVERY]; + if (value === void 0) { + throw Error(`Shared config entry ${CONFIG_ENDPOINT_DISCOVERY} can't be undefined, got "${value}"`); + } + return !isFalsy(value); + } + }, "configFileSelector"), + default: void 0 +}; + +// src/endpointDiscoveryMiddleware.ts +var import_protocol_http = require("@smithy/protocol-http"); + +// src/getCacheKey.ts +var getCacheKey = /* @__PURE__ */ __name(async (commandName, config, options) => { + const { accessKeyId } = await config.credentials(); + const { identifiers } = options; + return JSON.stringify({ + ...accessKeyId && { accessKeyId }, + ...identifiers && { + commandName, + identifiers: Object.entries(identifiers).sort().reduce((acc, [key, value]) => ({ ...acc, [key]: value }), {}) + } + }); +}, "getCacheKey"); + +// src/updateDiscoveredEndpointInCache.ts +var requestQueue = {}; +var updateDiscoveredEndpointInCache = /* @__PURE__ */ __name(async (config, options) => new Promise((resolve, reject) => { + const { endpointCache } = config; + const { cacheKey, commandName, identifiers } = options; + const endpoints = endpointCache.get(cacheKey); + if (endpoints && endpoints.length === 1 && endpoints[0].Address === "") { + if (options.isDiscoveredEndpointRequired) { + if (!requestQueue[cacheKey]) requestQueue[cacheKey] = []; + requestQueue[cacheKey].push({ resolve, reject }); + } else { + resolve(); + } + } else if (endpoints && endpoints.length > 0) { + resolve(); + } else { + const placeholderEndpoints = [{ Address: "", CachePeriodInMinutes: 1 }]; + endpointCache.set(cacheKey, placeholderEndpoints); + const command = new options.endpointDiscoveryCommandCtor({ + Operation: commandName.slice(0, -7), + // strip "Command" + Identifiers: identifiers + }); + const handler = command.resolveMiddleware(options.clientStack, config, options.options); + handler(command).then((result) => { + endpointCache.set(cacheKey, result.output.Endpoints); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ resolve: resolve2 }) => { + resolve2(); + }); + delete requestQueue[cacheKey]; + } + resolve(); + }).catch((error) => { + endpointCache.delete(cacheKey); + const errorToThrow = Object.assign( + new Error( + `The operation to discover endpoint failed. Please retry, or provide a custom endpoint and disable endpoint discovery to proceed.` + ), + { reason: error } + ); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ reject: reject2 }) => { + reject2(errorToThrow); + }); + delete requestQueue[cacheKey]; + } + if (options.isDiscoveredEndpointRequired) { + reject(errorToThrow); + } else { + endpointCache.set(cacheKey, placeholderEndpoints); + resolve(); + } + }); + } +}), "updateDiscoveredEndpointInCache"); + +// src/endpointDiscoveryMiddleware.ts +var endpointDiscoveryMiddleware = /* @__PURE__ */ __name((config, middlewareConfig) => (next, context) => async (args) => { + if (config.isCustomEndpoint) { + if (config.isClientEndpointDiscoveryEnabled) { + throw new Error(`Custom endpoint is supplied; endpointDiscoveryEnabled must not be true.`); + } + return next(args); + } + const { endpointDiscoveryCommandCtor } = config; + const { isDiscoveredEndpointRequired, identifiers } = middlewareConfig; + const clientName = context.clientName; + const commandName = context.commandName; + const isEndpointDiscoveryEnabled = await config.endpointDiscoveryEnabled(); + const cacheKey = await getCacheKey(commandName, config, { identifiers }); + if (isDiscoveredEndpointRequired) { + if (isEndpointDiscoveryEnabled === false) { + throw new Error( + `Endpoint Discovery is disabled but ${commandName} on ${clientName} requires it. Please check your configurations.` + ); + } + await updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor + }); + } else if (isEndpointDiscoveryEnabled) { + updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor + }); + } + const { request } = args; + if (cacheKey && import_protocol_http.HttpRequest.isInstance(request)) { + const endpoint = config.endpointCache.getEndpoint(cacheKey); + if (endpoint) { + request.hostname = endpoint; + } + } + return next(args); +}, "endpointDiscoveryMiddleware"); + +// src/getEndpointDiscoveryPlugin.ts +var endpointDiscoveryMiddlewareOptions = { + name: "endpointDiscoveryMiddleware", + step: "build", + tags: ["ENDPOINT_DISCOVERY"], + override: true +}; +var getEndpointDiscoveryPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, middlewareConfig), endpointDiscoveryMiddlewareOptions); + }, "applyToStack") +}), "getEndpointDiscoveryPlugin"); +var getEndpointDiscoveryRequiredPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add( + endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: true }), + endpointDiscoveryMiddlewareOptions + ); + }, "applyToStack") +}), "getEndpointDiscoveryRequiredPlugin"); +var getEndpointDiscoveryOptionalPlugin = /* @__PURE__ */ __name((pluginConfig, middlewareConfig) => ({ + applyToStack: /* @__PURE__ */ __name((commandStack) => { + commandStack.add( + endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: false }), + endpointDiscoveryMiddlewareOptions + ); + }, "applyToStack") +}), "getEndpointDiscoveryOptionalPlugin"); + +// src/resolveEndpointDiscoveryConfig.ts +var import_endpoint_cache = require("@aws-sdk/endpoint-cache"); +var resolveEndpointDiscoveryConfig = /* @__PURE__ */ __name((input, { endpointDiscoveryCommandCtor }) => { + const { endpointCacheSize, endpointDiscoveryEnabled, endpointDiscoveryEnabledProvider } = input; + return Object.assign(input, { + endpointDiscoveryCommandCtor, + endpointCache: new import_endpoint_cache.EndpointCache(endpointCacheSize ?? 1e3), + endpointDiscoveryEnabled: endpointDiscoveryEnabled !== void 0 ? () => Promise.resolve(endpointDiscoveryEnabled) : endpointDiscoveryEnabledProvider, + isClientEndpointDiscoveryEnabled: endpointDiscoveryEnabled !== void 0 + }); +}, "resolveEndpointDiscoveryConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS, + endpointDiscoveryMiddlewareOptions, + getEndpointDiscoveryPlugin, + getEndpointDiscoveryRequiredPlugin, + getEndpointDiscoveryOptionalPlugin, + resolveEndpointDiscoveryConfig +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js new file mode 100644 index 0000000..8a5fdd6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/configurations.js @@ -0,0 +1,27 @@ +const ENV_ENDPOINT_DISCOVERY = ["AWS_ENABLE_ENDPOINT_DISCOVERY", "AWS_ENDPOINT_DISCOVERY_ENABLED"]; +const CONFIG_ENDPOINT_DISCOVERY = "endpoint_discovery_enabled"; +const isFalsy = (value) => ["false", "0"].indexOf(value) >= 0; +export const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + for (let i = 0; i < ENV_ENDPOINT_DISCOVERY.length; i++) { + const envKey = ENV_ENDPOINT_DISCOVERY[i]; + if (envKey in env) { + const value = env[envKey]; + if (value === "") { + throw Error(`Environment variable ${envKey} can't be empty of undefined, got "${value}"`); + } + return !isFalsy(value); + } + } + }, + configFileSelector: (profile) => { + if (CONFIG_ENDPOINT_DISCOVERY in profile) { + const value = profile[CONFIG_ENDPOINT_DISCOVERY]; + if (value === undefined) { + throw Error(`Shared config entry ${CONFIG_ENDPOINT_DISCOVERY} can't be undefined, got "${value}"`); + } + return !isFalsy(value); + } + }, + default: undefined, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js new file mode 100644 index 0000000..80672eb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/endpointDiscoveryMiddleware.js @@ -0,0 +1,45 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { getCacheKey } from "./getCacheKey"; +import { updateDiscoveredEndpointInCache } from "./updateDiscoveredEndpointInCache"; +export const endpointDiscoveryMiddleware = (config, middlewareConfig) => (next, context) => async (args) => { + if (config.isCustomEndpoint) { + if (config.isClientEndpointDiscoveryEnabled) { + throw new Error(`Custom endpoint is supplied; endpointDiscoveryEnabled must not be true.`); + } + return next(args); + } + const { endpointDiscoveryCommandCtor } = config; + const { isDiscoveredEndpointRequired, identifiers } = middlewareConfig; + const clientName = context.clientName; + const commandName = context.commandName; + const isEndpointDiscoveryEnabled = await config.endpointDiscoveryEnabled(); + const cacheKey = await getCacheKey(commandName, config, { identifiers }); + if (isDiscoveredEndpointRequired) { + if (isEndpointDiscoveryEnabled === false) { + throw new Error(`Endpoint Discovery is disabled but ${commandName} on ${clientName} requires it.` + + ` Please check your configurations.`); + } + await updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor, + }); + } + else if (isEndpointDiscoveryEnabled) { + updateDiscoveredEndpointInCache(config, { + ...middlewareConfig, + commandName, + cacheKey, + endpointDiscoveryCommandCtor, + }); + } + const { request } = args; + if (cacheKey && HttpRequest.isInstance(request)) { + const endpoint = config.endpointCache.getEndpoint(cacheKey); + if (endpoint) { + request.hostname = endpoint; + } + } + return next(args); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js new file mode 100644 index 0000000..ca72e41 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getCacheKey.js @@ -0,0 +1,13 @@ +export const getCacheKey = async (commandName, config, options) => { + const { accessKeyId } = await config.credentials(); + const { identifiers } = options; + return JSON.stringify({ + ...(accessKeyId && { accessKeyId }), + ...(identifiers && { + commandName, + identifiers: Object.entries(identifiers) + .sort() + .reduce((acc, [key, value]) => ({ ...acc, [key]: value }), {}), + }), + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js new file mode 100644 index 0000000..656e7fe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/getEndpointDiscoveryPlugin.js @@ -0,0 +1,22 @@ +import { endpointDiscoveryMiddleware } from "./endpointDiscoveryMiddleware"; +export const endpointDiscoveryMiddlewareOptions = { + name: "endpointDiscoveryMiddleware", + step: "build", + tags: ["ENDPOINT_DISCOVERY"], + override: true, +}; +export const getEndpointDiscoveryPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, middlewareConfig), endpointDiscoveryMiddlewareOptions); + }, +}); +export const getEndpointDiscoveryRequiredPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: true }), endpointDiscoveryMiddlewareOptions); + }, +}); +export const getEndpointDiscoveryOptionalPlugin = (pluginConfig, middlewareConfig) => ({ + applyToStack: (commandStack) => { + commandStack.add(endpointDiscoveryMiddleware(pluginConfig, { ...middlewareConfig, isDiscoveredEndpointRequired: false }), endpointDiscoveryMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js new file mode 100644 index 0000000..cc1cc9c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/resolveEndpointDiscoveryConfig.js @@ -0,0 +1,12 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +export const resolveEndpointDiscoveryConfig = (input, { endpointDiscoveryCommandCtor }) => { + const { endpointCacheSize, endpointDiscoveryEnabled, endpointDiscoveryEnabledProvider } = input; + return Object.assign(input, { + endpointDiscoveryCommandCtor, + endpointCache: new EndpointCache(endpointCacheSize ?? 1000), + endpointDiscoveryEnabled: endpointDiscoveryEnabled !== undefined + ? () => Promise.resolve(endpointDiscoveryEnabled) + : endpointDiscoveryEnabledProvider, + isClientEndpointDiscoveryEnabled: endpointDiscoveryEnabled !== undefined, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js new file mode 100644 index 0000000..c0a9831 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-es/updateDiscoveredEndpointInCache.js @@ -0,0 +1,57 @@ +const requestQueue = {}; +export const updateDiscoveredEndpointInCache = async (config, options) => new Promise((resolve, reject) => { + const { endpointCache } = config; + const { cacheKey, commandName, identifiers } = options; + const endpoints = endpointCache.get(cacheKey); + if (endpoints && endpoints.length === 1 && endpoints[0].Address === "") { + if (options.isDiscoveredEndpointRequired) { + if (!requestQueue[cacheKey]) + requestQueue[cacheKey] = []; + requestQueue[cacheKey].push({ resolve, reject }); + } + else { + resolve(); + } + } + else if (endpoints && endpoints.length > 0) { + resolve(); + } + else { + const placeholderEndpoints = [{ Address: "", CachePeriodInMinutes: 1 }]; + endpointCache.set(cacheKey, placeholderEndpoints); + const command = new options.endpointDiscoveryCommandCtor({ + Operation: commandName.slice(0, -7), + Identifiers: identifiers, + }); + const handler = command.resolveMiddleware(options.clientStack, config, options.options); + handler(command) + .then((result) => { + endpointCache.set(cacheKey, result.output.Endpoints); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ resolve }) => { + resolve(); + }); + delete requestQueue[cacheKey]; + } + resolve(); + }) + .catch((error) => { + endpointCache.delete(cacheKey); + const errorToThrow = Object.assign(new Error(`The operation to discover endpoint failed.` + + ` Please retry, or provide a custom endpoint and disable endpoint discovery to proceed.`), { reason: error }); + if (requestQueue[cacheKey]) { + requestQueue[cacheKey].forEach(({ reject }) => { + reject(errorToThrow); + }); + delete requestQueue[cacheKey]; + } + if (options.isDiscoveredEndpointRequired) { + reject(errorToThrow); + } + else { + endpointCache.set(cacheKey, placeholderEndpoints); + resolve(); + } + }); + } +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts new file mode 100644 index 0000000..428209a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/configurations.d.ts @@ -0,0 +1,5 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts new file mode 100644 index 0000000..0116bfc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/endpointDiscoveryMiddleware.d.ts @@ -0,0 +1,4 @@ +import { BuildHandler, HandlerExecutionContext, MetadataBearer } from "@smithy/types"; +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddleware: (config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: EndpointDiscoveryMiddlewareConfig) => (next: BuildHandler, context: HandlerExecutionContext) => BuildHandler; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts new file mode 100644 index 0000000..153a5b9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getCacheKey.d.ts @@ -0,0 +1,9 @@ +import { AwsCredentialIdentity, Provider } from "@smithy/types"; +/** + * Generate key to index the endpoints in the cache + */ +export declare const getCacheKey: (commandName: string, config: { + credentials: Provider; +}, options: { + identifiers?: Record; +}) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts new file mode 100644 index 0000000..06565e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/getEndpointDiscoveryPlugin.d.ts @@ -0,0 +1,29 @@ +import { BuildHandlerOptions, HttpHandlerOptions, MiddlewareStack, Pluggable } from "@smithy/types"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +/** + * @internal + */ +export declare const endpointDiscoveryMiddlewareOptions: BuildHandlerOptions; +/** + * @public + */ +export interface EndpointDiscoveryMiddlewareConfig { + isDiscoveredEndpointRequired: boolean; + clientStack: MiddlewareStack; + options?: HttpHandlerOptions; + identifiers?: Record; +} +/** + * @internal + */ +export declare const getEndpointDiscoveryPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: EndpointDiscoveryMiddlewareConfig) => Pluggable; +/** + * @internal + * @deprecated Use getEndpointDiscoveryPlugin + */ +export declare const getEndpointDiscoveryRequiredPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: Omit) => Pluggable; +/** + * @internal + * @deprecated Use getEndpointDiscoveryPlugin + */ +export declare const getEndpointDiscoveryOptionalPlugin: (pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, middlewareConfig: Omit) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts new file mode 100644 index 0000000..dd132a4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/resolveEndpointDiscoveryConfig.d.ts @@ -0,0 +1,60 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +import { AwsCredentialIdentity, MemoizedProvider, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + isCustomEndpoint?: boolean; + credentials: MemoizedProvider; + endpointDiscoveryEnabledProvider: Provider; +} +/** + * @public + */ +export interface EndpointDiscoveryInputConfig { + /** + * The size of the client cache storing endpoints from endpoint discovery operations. + * Defaults to 1000. + */ + endpointCacheSize?: number; + /** + * Whether to call operations with endpoints given by service dynamically. + * Setting this config to `true` will enable endpoint discovery for all applicable operations. + * Setting it to `false` will explicitly disable endpoint discovery even though operations that + * require endpoint discovery will presumably fail. Leaving it to undefined means SDK only do + * endpoint discovery when it's required. Defaults to `undefined`. + */ + endpointDiscoveryEnabled?: boolean | undefined; +} +export interface EndpointDiscoveryResolvedConfig { + /** + * LRU Cache which stores endpoints from endpoint discovery operations. + * The size is either provided by {@link EndpointDiscoveryInputConfig.endpointCacheSize}. + */ + endpointCache: EndpointCache; + /** + * The constructor of the Command used for discovering endpoints. + * @internal + */ + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; + /** + * Resolved value for input config {@link EndpointDiscoveryInputConfig.endpointDiscoveryEnabled}. + */ + endpointDiscoveryEnabled: Provider; + /** + * Stores whether endpoint discovery configuration is set locally by passing + * {@link EndpointDiscoveryInputConfig.endpointDiscoveryEnabled} during client creation. + * @internal + */ + isClientEndpointDiscoveryEnabled: boolean; +} +export interface EndpointDiscoveryConfigOptions { + /** + * The constructor of the Command used for discovering endpoints. + */ + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +/** + * @internal + */ +export declare const resolveEndpointDiscoveryConfig: (input: T & PreviouslyResolved & EndpointDiscoveryInputConfig, { endpointDiscoveryCommandCtor }: EndpointDiscoveryConfigOptions) => T & EndpointDiscoveryResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..366f145 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,4 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const NODE_ENDPOINT_DISCOVERY_CONFIG_OPTIONS: LoadedConfigSelectors< + boolean | undefined +>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts new file mode 100644 index 0000000..ceff474 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/endpointDiscoveryMiddleware.d.ts @@ -0,0 +1,17 @@ +import { + BuildHandler, + HandlerExecutionContext, + MetadataBearer, +} from "@smithy/types"; +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddleware: ( + config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: EndpointDiscoveryMiddlewareConfig +) => ( + next: BuildHandler, + context: HandlerExecutionContext +) => BuildHandler; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts new file mode 100644 index 0000000..d9be17e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getCacheKey.d.ts @@ -0,0 +1,10 @@ +import { AwsCredentialIdentity, Provider } from "@smithy/types"; +export declare const getCacheKey: ( + commandName: string, + config: { + credentials: Provider; + }, + options: { + identifiers?: Record; + } +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts new file mode 100644 index 0000000..8c60174 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/getEndpointDiscoveryPlugin.d.ts @@ -0,0 +1,41 @@ +import { + BuildHandlerOptions, + HttpHandlerOptions, + MiddlewareStack, + Pluggable, +} from "@smithy/types"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export declare const endpointDiscoveryMiddlewareOptions: BuildHandlerOptions; +export interface EndpointDiscoveryMiddlewareConfig { + isDiscoveredEndpointRequired: boolean; + clientStack: MiddlewareStack; + options?: HttpHandlerOptions; + identifiers?: Record; +} +export declare const getEndpointDiscoveryPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: EndpointDiscoveryMiddlewareConfig +) => Pluggable; +export declare const getEndpointDiscoveryRequiredPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: Pick< + EndpointDiscoveryMiddlewareConfig, + Exclude< + keyof EndpointDiscoveryMiddlewareConfig, + "isDiscoveredEndpointRequired" + > + > +) => Pluggable; +export declare const getEndpointDiscoveryOptionalPlugin: ( + pluginConfig: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + middlewareConfig: Pick< + EndpointDiscoveryMiddlewareConfig, + Exclude< + keyof EndpointDiscoveryMiddlewareConfig, + "isDiscoveredEndpointRequired" + > + > +) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..1c74159 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./configurations"; +export * from "./getEndpointDiscoveryPlugin"; +export * from "./resolveEndpointDiscoveryConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts new file mode 100644 index 0000000..eaa95f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/resolveEndpointDiscoveryConfig.d.ts @@ -0,0 +1,28 @@ +import { EndpointCache } from "@aws-sdk/endpoint-cache"; +import { + AwsCredentialIdentity, + MemoizedProvider, + Provider, +} from "@smithy/types"; +export interface PreviouslyResolved { + isCustomEndpoint?: boolean; + credentials: MemoizedProvider; + endpointDiscoveryEnabledProvider: Provider; +} +export interface EndpointDiscoveryInputConfig { + endpointCacheSize?: number; + endpointDiscoveryEnabled?: boolean | undefined; +} +export interface EndpointDiscoveryResolvedConfig { + endpointCache: EndpointCache; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; + endpointDiscoveryEnabled: Provider; + isClientEndpointDiscoveryEnabled: boolean; +} +export interface EndpointDiscoveryConfigOptions { + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const resolveEndpointDiscoveryConfig: ( + input: T & PreviouslyResolved & EndpointDiscoveryInputConfig, + { endpointDiscoveryCommandCtor }: EndpointDiscoveryConfigOptions +) => T & EndpointDiscoveryResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts new file mode 100644 index 0000000..0887cb5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/ts3.4/updateDiscoveredEndpointInCache.d.ts @@ -0,0 +1,15 @@ +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { + EndpointDiscoveryResolvedConfig, + PreviouslyResolved, +} from "./resolveEndpointDiscoveryConfig"; +export interface UpdateDiscoveredEndpointInCacheOptions + extends EndpointDiscoveryMiddlewareConfig { + cacheKey: string; + commandName: string; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const updateDiscoveredEndpointInCache: ( + config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, + options: UpdateDiscoveredEndpointInCacheOptions +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts new file mode 100644 index 0000000..993753d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/dist-types/updateDiscoveredEndpointInCache.d.ts @@ -0,0 +1,8 @@ +import { EndpointDiscoveryMiddlewareConfig } from "./getEndpointDiscoveryPlugin"; +import { EndpointDiscoveryResolvedConfig, PreviouslyResolved } from "./resolveEndpointDiscoveryConfig"; +export interface UpdateDiscoveredEndpointInCacheOptions extends EndpointDiscoveryMiddlewareConfig { + cacheKey: string; + commandName: string; + endpointDiscoveryCommandCtor: new (comandConfig: any) => any; +} +export declare const updateDiscoveredEndpointInCache: (config: EndpointDiscoveryResolvedConfig & PreviouslyResolved, options: UpdateDiscoveredEndpointInCacheOptions) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json new file mode 100644 index 0000000..44fec2e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-endpoint-discovery/package.json @@ -0,0 +1,60 @@ +{ + "name": "@aws-sdk/middleware-endpoint-discovery", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-endpoint-discovery", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-endpoint-discovery", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-endpoint-discovery" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/README.md new file mode 100644 index 0000000..123940e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-host-header + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-host-header/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-host-header) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-host-header.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-host-header) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js new file mode 100644 index 0000000..bdfe2a5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-cjs/index.js @@ -0,0 +1,69 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getHostHeaderPlugin: () => getHostHeaderPlugin, + hostHeaderMiddleware: () => hostHeaderMiddleware, + hostHeaderMiddlewareOptions: () => hostHeaderMiddlewareOptions, + resolveHostHeaderConfig: () => resolveHostHeaderConfig +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +function resolveHostHeaderConfig(input) { + return input; +} +__name(resolveHostHeaderConfig, "resolveHostHeaderConfig"); +var hostHeaderMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}, "hostHeaderMiddleware"); +var hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true +}; +var getHostHeaderPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + }, "applyToStack") +}), "getHostHeaderPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveHostHeaderConfig, + hostHeaderMiddleware, + hostHeaderMiddlewareOptions, + getHostHeaderPlugin +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js new file mode 100644 index 0000000..2e2fb62 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-es/index.js @@ -0,0 +1,33 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export function resolveHostHeaderConfig(input) { + return input; +} +export const hostHeaderMiddleware = (options) => (next) => async (args) => { + if (!HttpRequest.isInstance(args.request)) + return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } + else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) + host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}; +export const hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true, +}; +export const getHostHeaderPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts new file mode 100644 index 0000000..752bb00 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-types/index.d.ts @@ -0,0 +1,35 @@ +import { AbsoluteLocation, BuildHandlerOptions, BuildMiddleware, Pluggable, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface HostHeaderInputConfig { +} +interface PreviouslyResolved { + requestHandler: RequestHandler; +} +/** + * @internal + */ +export interface HostHeaderResolvedConfig { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler: RequestHandler; +} +/** + * @internal + */ +export declare function resolveHostHeaderConfig(input: T & PreviouslyResolved & HostHeaderInputConfig): T & HostHeaderResolvedConfig; +/** + * @internal + */ +export declare const hostHeaderMiddleware: (options: HostHeaderResolvedConfig) => BuildMiddleware; +/** + * @internal + */ +export declare const hostHeaderMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getHostHeaderPlugin: (options: HostHeaderResolvedConfig) => Pluggable; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..3ca5561 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/dist-types/ts3.4/index.d.ts @@ -0,0 +1,29 @@ +import { + AbsoluteLocation, + BuildHandlerOptions, + BuildMiddleware, + Pluggable, + RequestHandler, +} from "@smithy/types"; +export interface HostHeaderInputConfig {} +interface PreviouslyResolved { + requestHandler: RequestHandler; +} +export interface HostHeaderResolvedConfig { + requestHandler: RequestHandler; +} +export declare function resolveHostHeaderConfig( + input: T & PreviouslyResolved & HostHeaderInputConfig +): T & HostHeaderResolvedConfig; +export declare const hostHeaderMiddleware: < + Input extends object, + Output extends object +>( + options: HostHeaderResolvedConfig +) => BuildMiddleware; +export declare const hostHeaderMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getHostHeaderPlugin: ( + options: HostHeaderResolvedConfig +) => Pluggable; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/package.json new file mode 100644 index 0000000..523f8a1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-host-header/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/middleware-host-header", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-host-header", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-host-header", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-host-header" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/README.md new file mode 100644 index 0000000..861fa43 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-logger + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-logger/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-logger) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-logger.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-logger) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js new file mode 100644 index 0000000..b1db308 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-cjs/index.js @@ -0,0 +1,79 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + getLoggerPlugin: () => getLoggerPlugin, + loggerMiddleware: () => loggerMiddleware, + loggerMiddlewareOptions: () => loggerMiddlewareOptions +}); +module.exports = __toCommonJS(index_exports); + +// src/loggerMiddleware.ts +var loggerMiddleware = /* @__PURE__ */ __name(() => (next, context) => async (args) => { + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + logger?.info?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata + }); + return response; + } catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + logger?.error?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata + }); + throw error; + } +}, "loggerMiddleware"); +var loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true +}; +var getLoggerPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + }, "applyToStack") +}), "getLoggerPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + loggerMiddleware, + loggerMiddlewareOptions, + getLoggerPlugin +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-es/index.js new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-es/index.js @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js new file mode 100644 index 0000000..50da4cc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-es/loggerMiddleware.js @@ -0,0 +1,42 @@ +export const loggerMiddleware = () => (next, context) => async (args) => { + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + logger?.info?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata, + }); + return response; + } + catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + logger?.error?.({ + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata, + }); + throw error; + } +}; +export const loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true, +}; +export const getLoggerPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts new file mode 100644 index 0000000..5712017 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/loggerMiddleware.d.ts @@ -0,0 +1,4 @@ +import { AbsoluteLocation, HandlerExecutionContext, InitializeHandler, InitializeHandlerOptions, MetadataBearer, Pluggable } from "@smithy/types"; +export declare const loggerMiddleware: () => (next: InitializeHandler, context: HandlerExecutionContext) => InitializeHandler; +export declare const loggerMiddlewareOptions: InitializeHandlerOptions & AbsoluteLocation; +export declare const getLoggerPlugin: (options: any) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..171e3bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./loggerMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts new file mode 100644 index 0000000..10ded9e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/dist-types/ts3.4/loggerMiddleware.d.ts @@ -0,0 +1,17 @@ +import { + AbsoluteLocation, + HandlerExecutionContext, + InitializeHandler, + InitializeHandlerOptions, + MetadataBearer, + Pluggable, +} from "@smithy/types"; +export declare const loggerMiddleware: () => < + Output extends MetadataBearer = MetadataBearer +>( + next: InitializeHandler, + context: HandlerExecutionContext +) => InitializeHandler; +export declare const loggerMiddlewareOptions: InitializeHandlerOptions & + AbsoluteLocation; +export declare const getLoggerPlugin: (options: any) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/package.json new file mode 100644 index 0000000..7187da9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-logger/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/middleware-logger", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-logger", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-logger", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-logger" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/README.md new file mode 100644 index 0000000..2d5437e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/middleware-recursion-detection + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-recursion-detection/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-recursion-detection) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-recursion-detection.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-recursion-detection) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js new file mode 100644 index 0000000..a387687 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-cjs/index.js @@ -0,0 +1,72 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + addRecursionDetectionMiddlewareOptions: () => addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin: () => getRecursionDetectionPlugin, + recursionDetectionMiddleware: () => recursionDetectionMiddleware +}); +module.exports = __toCommonJS(index_exports); +var import_protocol_http = require("@smithy/protocol-http"); +var TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +var ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +var ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +var recursionDetectionMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request) || options.runtime !== "node") { + return next(args); + } + const traceIdHeader = Object.keys(request.headers ?? {}).find((h) => h.toLowerCase() === TRACE_ID_HEADER_NAME.toLowerCase()) ?? TRACE_ID_HEADER_NAME; + if (request.headers.hasOwnProperty(traceIdHeader)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0, "nonEmptyString"); + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request + }); +}, "recursionDetectionMiddleware"); +var addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low" +}; +var getRecursionDetectionPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + }, "applyToStack") +}), "getRecursionDetectionPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + recursionDetectionMiddleware, + addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js new file mode 100644 index 0000000..8ac4748 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-es/index.js @@ -0,0 +1,37 @@ +import { HttpRequest } from "@smithy/protocol-http"; +const TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +const ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +const ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +export const recursionDetectionMiddleware = (options) => (next) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request) || options.runtime !== "node") { + return next(args); + } + const traceIdHeader = Object.keys(request.headers ?? {}).find((h) => h.toLowerCase() === TRACE_ID_HEADER_NAME.toLowerCase()) ?? + TRACE_ID_HEADER_NAME; + if (request.headers.hasOwnProperty(traceIdHeader)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = (str) => typeof str === "string" && str.length > 0; + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request, + }); +}; +export const addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low", +}; +export const getRecursionDetectionPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts new file mode 100644 index 0000000..9f92984 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/index.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; +} +/** + * Inject to trace ID to request header to detect recursion invocation in Lambda. + * @internal + */ +export declare const recursionDetectionMiddleware: (options: PreviouslyResolved) => BuildMiddleware; +/** + * @internal + */ +export declare const addRecursionDetectionMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRecursionDetectionPlugin: (options: PreviouslyResolved) => Pluggable; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..8d1658b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/dist-types/ts3.4/index.d.ts @@ -0,0 +1,18 @@ +import { + AbsoluteLocation, + BuildHandlerOptions, + BuildMiddleware, + Pluggable, +} from "@smithy/types"; +interface PreviouslyResolved { + runtime: string; +} +export declare const recursionDetectionMiddleware: ( + options: PreviouslyResolved +) => BuildMiddleware; +export declare const addRecursionDetectionMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getRecursionDetectionPlugin: ( + options: PreviouslyResolved +) => Pluggable; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/package.json new file mode 100644 index 0000000..7c831f9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-recursion-detection/package.json @@ -0,0 +1,57 @@ +{ + "name": "@aws-sdk/middleware-recursion-detection", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-recursion-detection", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-recursion-detection", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-recursion-detection" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/README.md new file mode 100644 index 0000000..a0bf1a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/middleware-user-agent + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/middleware-user-agent/latest.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-user-agent) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/middleware-user-agent.svg)](https://www.npmjs.com/package/@aws-sdk/middleware-user-agent) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js new file mode 100644 index 0000000..aaf267c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-cjs/index.js @@ -0,0 +1,227 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + DEFAULT_UA_APP_ID: () => DEFAULT_UA_APP_ID, + getUserAgentMiddlewareOptions: () => getUserAgentMiddlewareOptions, + getUserAgentPlugin: () => getUserAgentPlugin, + resolveUserAgentConfig: () => resolveUserAgentConfig, + userAgentMiddleware: () => userAgentMiddleware +}); +module.exports = __toCommonJS(index_exports); + +// src/configurations.ts +var import_core = require("@smithy/core"); +var DEFAULT_UA_APP_ID = void 0; +function isValidUserAgentAppId(appId) { + if (appId === void 0) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +__name(isValidUserAgentAppId, "isValidUserAgentAppId"); +function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = (0, import_core.normalizeProvider)(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + const { customUserAgent } = input; + return Object.assign(input, { + customUserAgent: typeof customUserAgent === "string" ? [[customUserAgent]] : customUserAgent, + userAgentAppId: /* @__PURE__ */ __name(async () => { + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = input.logger?.constructor?.name === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger?.warn("userAgentAppId must be a string or undefined."); + } else if (appId.length > 50) { + logger?.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + }, "userAgentAppId") + }); +} +__name(resolveUserAgentConfig, "resolveUserAgentConfig"); + +// src/user-agent-middleware.ts +var import_util_endpoints = require("@aws-sdk/util-endpoints"); +var import_protocol_http = require("@smithy/protocol-http"); + +// src/check-features.ts +var import_core2 = require("@aws-sdk/core"); +var ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +async function checkFeatures(context, config, args) { + const request = args.request; + if (request?.headers?.["smithy-protocol"] === "rpc-v2-cbor") { + (0, import_core2.setFeature)(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if (retryStrategy.constructor?.name?.includes("Adaptive")) { + (0, import_core2.setFeature)(context, "RETRY_MODE_ADAPTIVE", "F"); + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_STANDARD", "E"); + } + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String(endpointV2?.url?.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + (0, import_core2.setFeature)(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await config.accountIdEndpointMode?.()) { + case "disabled": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = context.__smithy_context?.selectedHttpAuthScheme?.identity; + if (identity?.$source) { + const credentials = identity; + if (credentials.accountId) { + (0, import_core2.setFeature)(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + (0, import_core2.setFeature)(context, key, value); + } + } +} +__name(checkFeatures, "checkFeatures"); + +// src/constants.ts +var USER_AGENT = "user-agent"; +var X_AMZ_USER_AGENT = "x-amz-user-agent"; +var SPACE = " "; +var UA_NAME_SEPARATOR = "/"; +var UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +var UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +var UA_ESCAPE_CHAR = "-"; + +// src/encode-features.ts +var BYTE_LIMIT = 1024; +function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} +__name(encodeFeatures, "encodeFeatures"); + +// src/user-agent-middleware.ts +var userAgentMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = context?.userAgent?.map(escapeUserAgent) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push( + `m/${encodeFeatures( + Object.assign({}, context.__smithy_context?.features, awsContext.__aws_sdk_context?.features) + )}` + ); + const customUserAgent = options?.customUserAgent?.map(escapeUserAgent) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = (0, import_util_endpoints.getUserAgentPrefix)(); + const sdkUserAgentValue = (prefix ? [prefix] : []).concat([...defaultUserAgent, ...userAgent, ...customUserAgent]).join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] ? `${headers[USER_AGENT]} ${normalUAValue}` : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request + }); +}, "userAgentMiddleware"); +var escapeUserAgent = /* @__PURE__ */ __name((userAgentPair) => { + const name = userAgentPair[0].split(UA_NAME_SEPARATOR).map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)).join(UA_NAME_SEPARATOR); + const version = userAgentPair[1]?.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version].filter((item) => item && item.length > 0).reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}, "escapeUserAgent"); +var getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true +}; +var getUserAgentPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: /* @__PURE__ */ __name((clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + }, "applyToStack") +}), "getUserAgentPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DEFAULT_UA_APP_ID, + resolveUserAgentConfig, + userAgentMiddleware, + getUserAgentMiddlewareOptions, + getUserAgentPlugin +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js new file mode 100644 index 0000000..1f115a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/check-features.js @@ -0,0 +1,49 @@ +import { setFeature } from "@aws-sdk/core"; +const ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +export async function checkFeatures(context, config, args) { + const request = args.request; + if (request?.headers?.["smithy-protocol"] === "rpc-v2-cbor") { + setFeature(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if (retryStrategy.constructor?.name?.includes("Adaptive")) { + setFeature(context, "RETRY_MODE_ADAPTIVE", "F"); + } + else { + setFeature(context, "RETRY_MODE_STANDARD", "E"); + } + } + else { + setFeature(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String(endpointV2?.url?.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + setFeature(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await config.accountIdEndpointMode?.()) { + case "disabled": + setFeature(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + setFeature(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + setFeature(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = context.__smithy_context?.selectedHttpAuthScheme?.identity; + if (identity?.$source) { + const credentials = identity; + if (credentials.accountId) { + setFeature(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + setFeature(context, key, value); + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js new file mode 100644 index 0000000..7fff087 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/configurations.js @@ -0,0 +1,28 @@ +import { normalizeProvider } from "@smithy/core"; +export const DEFAULT_UA_APP_ID = undefined; +function isValidUserAgentAppId(appId) { + if (appId === undefined) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +export function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = normalizeProvider(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + const { customUserAgent } = input; + return Object.assign(input, { + customUserAgent: typeof customUserAgent === "string" ? [[customUserAgent]] : customUserAgent, + userAgentAppId: async () => { + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = input.logger?.constructor?.name === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger?.warn("userAgentAppId must be a string or undefined."); + } + else if (appId.length > 50) { + logger?.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + }, + }); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js new file mode 100644 index 0000000..33e3391 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/constants.js @@ -0,0 +1,7 @@ +export const USER_AGENT = "user-agent"; +export const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export const SPACE = " "; +export const UA_NAME_SEPARATOR = "/"; +export const UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +export const UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +export const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js new file mode 100644 index 0000000..23002b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/encode-features.js @@ -0,0 +1,18 @@ +const BYTE_LIMIT = 1024; +export function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } + else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js new file mode 100644 index 0000000..188bda0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-es/user-agent-middleware.js @@ -0,0 +1,82 @@ +import { getUserAgentPrefix } from "@aws-sdk/util-endpoints"; +import { HttpRequest } from "@smithy/protocol-http"; +import { checkFeatures } from "./check-features"; +import { SPACE, UA_ESCAPE_CHAR, UA_NAME_ESCAPE_REGEX, UA_NAME_SEPARATOR, UA_VALUE_ESCAPE_REGEX, USER_AGENT, X_AMZ_USER_AGENT, } from "./constants"; +import { encodeFeatures } from "./encode-features"; +export const userAgentMiddleware = (options) => (next, context) => async (args) => { + const { request } = args; + if (!HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = context?.userAgent?.map(escapeUserAgent) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push(`m/${encodeFeatures(Object.assign({}, context.__smithy_context?.features, awsContext.__aws_sdk_context?.features))}`); + const customUserAgent = options?.customUserAgent?.map(escapeUserAgent) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = getUserAgentPrefix(); + const sdkUserAgentValue = (prefix ? [prefix] : []) + .concat([...defaultUserAgent, ...userAgent, ...customUserAgent]) + .join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent, + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] + ? `${headers[USER_AGENT]} ${normalUAValue}` + : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } + else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request, + }); +}; +const escapeUserAgent = (userAgentPair) => { + const name = userAgentPair[0] + .split(UA_NAME_SEPARATOR) + .map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)) + .join(UA_NAME_SEPARATOR); + const version = userAgentPair[1]?.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version] + .filter((item) => item && item.length > 0) + .reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}; +export const getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true, +}; +export const getUserAgentPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts new file mode 100644 index 0000000..a75d08b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/check-features.d.ts @@ -0,0 +1,18 @@ +import type { AccountIdEndpointMode } from "@aws-sdk/core/account-id-endpoint"; +import type { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import type { AwsCredentialIdentityProvider, BuildHandlerArguments, Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +type PreviouslyResolved = Partial<{ + credentials?: AwsCredentialIdentityProvider; + accountIdEndpointMode?: Provider; + retryStrategy?: Provider; +}>; +/** + * @internal + * Check for features that don't have a middleware activation site but + * may be detected on the context, client config, or request. + */ +export declare function checkFeatures(context: AwsHandlerExecutionContext, config: PreviouslyResolved, args: BuildHandlerArguments): Promise; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts new file mode 100644 index 0000000..f8183f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/configurations.d.ts @@ -0,0 +1,44 @@ +import { Logger, Provider, UserAgent } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_UA_APP_ID: undefined; +/** + * @public + */ +export interface UserAgentInputConfig { + /** + * The custom user agent header that would be appended to default one + */ + customUserAgent?: string | UserAgent; + /** + * The application ID used to identify the application. + */ + userAgentAppId?: string | undefined | Provider; +} +interface PreviouslyResolved { + defaultUserAgentProvider: Provider; + runtime: string; + logger?: Logger; +} +export interface UserAgentResolvedConfig { + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header. + * @internal + */ + defaultUserAgentProvider: Provider; + /** + * The custom user agent header that would be appended to default one + */ + customUserAgent?: UserAgent; + /** + * The runtime environment + */ + runtime: string; + /** + * Resolved value for input config {config.userAgentAppId} + */ + userAgentAppId: Provider; +} +export declare function resolveUserAgentConfig(input: T & PreviouslyResolved & UserAgentInputConfig): T & UserAgentResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts new file mode 100644 index 0000000..8c0dfc9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/constants.d.ts @@ -0,0 +1,7 @@ +export declare const USER_AGENT = "user-agent"; +export declare const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export declare const SPACE = " "; +export declare const UA_NAME_SEPARATOR = "/"; +export declare const UA_NAME_ESCAPE_REGEX: RegExp; +export declare const UA_VALUE_ESCAPE_REGEX: RegExp; +export declare const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts new file mode 100644 index 0000000..d6079ae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/encode-features.d.ts @@ -0,0 +1,5 @@ +import type { AwsSdkFeatures } from "@aws-sdk/types"; +/** + * @internal + */ +export declare function encodeFeatures(features: AwsSdkFeatures): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts new file mode 100644 index 0000000..d8fc201 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/check-features.d.ts @@ -0,0 +1,20 @@ +import { AccountIdEndpointMode } from "@aws-sdk/core/account-id-endpoint"; +import { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { + AwsCredentialIdentityProvider, + BuildHandlerArguments, + Provider, + RetryStrategy, + RetryStrategyV2, +} from "@smithy/types"; +type PreviouslyResolved = Partial<{ + credentials?: AwsCredentialIdentityProvider; + accountIdEndpointMode?: Provider; + retryStrategy?: Provider; +}>; +export declare function checkFeatures( + context: AwsHandlerExecutionContext, + config: PreviouslyResolved, + args: BuildHandlerArguments +): Promise; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..a4a1b10 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,21 @@ +import { Logger, Provider, UserAgent } from "@smithy/types"; +export declare const DEFAULT_UA_APP_ID: undefined; +export interface UserAgentInputConfig { + customUserAgent?: string | UserAgent; + userAgentAppId?: string | undefined | Provider; +} +interface PreviouslyResolved { + defaultUserAgentProvider: Provider; + runtime: string; + logger?: Logger; +} +export interface UserAgentResolvedConfig { + defaultUserAgentProvider: Provider; + customUserAgent?: UserAgent; + runtime: string; + userAgentAppId: Provider; +} +export declare function resolveUserAgentConfig( + input: T & PreviouslyResolved & UserAgentInputConfig +): T & UserAgentResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..8c0dfc9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,7 @@ +export declare const USER_AGENT = "user-agent"; +export declare const X_AMZ_USER_AGENT = "x-amz-user-agent"; +export declare const SPACE = " "; +export declare const UA_NAME_SEPARATOR = "/"; +export declare const UA_NAME_ESCAPE_REGEX: RegExp; +export declare const UA_VALUE_ESCAPE_REGEX: RegExp; +export declare const UA_ESCAPE_CHAR = "-"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts new file mode 100644 index 0000000..a7be5b7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/encode-features.d.ts @@ -0,0 +1,2 @@ +import { AwsSdkFeatures } from "@aws-sdk/types"; +export declare function encodeFeatures(features: AwsSdkFeatures): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..0456ec7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./configurations"; +export * from "./user-agent-middleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts new file mode 100644 index 0000000..a4da01e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/ts3.4/user-agent-middleware.d.ts @@ -0,0 +1,21 @@ +import { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { + AbsoluteLocation, + BuildHandler, + BuildHandlerOptions, + HandlerExecutionContext, + MetadataBearer, + Pluggable, +} from "@smithy/types"; +import { UserAgentResolvedConfig } from "./configurations"; +export declare const userAgentMiddleware: ( + options: UserAgentResolvedConfig +) => ( + next: BuildHandler, + context: HandlerExecutionContext | AwsHandlerExecutionContext +) => BuildHandler; +export declare const getUserAgentMiddlewareOptions: BuildHandlerOptions & + AbsoluteLocation; +export declare const getUserAgentPlugin: ( + config: UserAgentResolvedConfig +) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts new file mode 100644 index 0000000..d36dee5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/dist-types/user-agent-middleware.d.ts @@ -0,0 +1,18 @@ +import type { AwsHandlerExecutionContext } from "@aws-sdk/types"; +import { AbsoluteLocation, BuildHandler, BuildHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { UserAgentResolvedConfig } from "./configurations"; +/** + * Build user agent header sections from: + * 1. runtime-specific default user agent provider; + * 2. custom user agent from `customUserAgent` client config; + * 3. handler execution context set by internal SDK components; + * The built user agent will be set to `x-amz-user-agent` header for ALL the + * runtimes. + * Please note that any override to the `user-agent` or `x-amz-user-agent` header + * in the HTTP request is discouraged. Please use `customUserAgent` client + * config or middleware setting the `userAgent` context to generate desired user + * agent. + */ +export declare const userAgentMiddleware: (options: UserAgentResolvedConfig) => (next: BuildHandler, context: HandlerExecutionContext | AwsHandlerExecutionContext) => BuildHandler; +export declare const getUserAgentMiddlewareOptions: BuildHandlerOptions & AbsoluteLocation; +export declare const getUserAgentPlugin: (config: UserAgentResolvedConfig) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/package.json new file mode 100644 index 0000000..34cff38 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/middleware-user-agent/package.json @@ -0,0 +1,61 @@ +{ + "name": "@aws-sdk/middleware-user-agent", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline middleware-user-agent", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/middleware-user-agent", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/middleware-user-agent" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/README.md new file mode 100644 index 0000000..1182bbd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/README.md @@ -0,0 +1,13 @@ +# @aws-sdk/nested-clients + +## Description + +This is an internal package. Do not install this as a direct dependency. + +This package contains separate internal implementations of the STS and SSO-OIDC AWS SDK clients +to be used by the AWS SDK credential providers to break a cyclic dependency. + +### Bundlers + +This package may be marked as external if you do not use STS nor SSO-OIDC +in your credential resolution process. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..7a9f28a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/auth/httpAuthSchemeProvider.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOOIDCHttpAuthSchemeProvider = exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = defaultSSOOIDCHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOOIDCHttpAuthSchemeProvider = defaultSSOOIDCHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return Object.assign(config_0, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js new file mode 100644 index 0000000..7258a35 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js new file mode 100644 index 0000000..72e0adc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js new file mode 100644 index 0000000..55f595a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/index.js @@ -0,0 +1,872 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/sso-oidc/index.ts +var index_exports = {}; +__export(index_exports, { + $Command: () => import_smithy_client6.Command, + AccessDeniedException: () => AccessDeniedException, + AuthorizationPendingException: () => AuthorizationPendingException, + CreateTokenCommand: () => CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog: () => CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog: () => CreateTokenResponseFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + InternalServerException: () => InternalServerException, + InvalidClientException: () => InvalidClientException, + InvalidGrantException: () => InvalidGrantException, + InvalidRequestException: () => InvalidRequestException, + InvalidScopeException: () => InvalidScopeException, + SSOOIDC: () => SSOOIDC, + SSOOIDCClient: () => SSOOIDCClient, + SSOOIDCServiceException: () => SSOOIDCServiceException, + SlowDownException: () => SlowDownException, + UnauthorizedClientException: () => UnauthorizedClientException, + UnsupportedGrantTypeException: () => UnsupportedGrantTypeException, + __Client: () => import_smithy_client2.Client +}); +module.exports = __toCommonJS(index_exports); + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var import_middleware_host_header = require("@aws-sdk/middleware-host-header"); +var import_middleware_logger = require("@aws-sdk/middleware-logger"); +var import_middleware_recursion_detection = require("@aws-sdk/middleware-recursion-detection"); +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var import_config_resolver = require("@smithy/config-resolver"); +var import_core = require("@smithy/core"); +var import_middleware_content_length = require("@smithy/middleware-content-length"); +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_retry = require("@smithy/middleware-retry"); +var import_smithy_client2 = require("@smithy/smithy-client"); +var import_httpAuthSchemeProvider = require("./auth/httpAuthSchemeProvider"); + +// src/submodules/sso-oidc/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth" + }); +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var import_runtimeConfig = require("./runtimeConfig"); + +// src/submodules/sso-oidc/runtimeExtensions.ts +var import_region_config_resolver = require("@aws-sdk/region-config-resolver"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/submodules/sso-oidc/runtimeExtensions.ts +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign( + (0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig), + (0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig), + (0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig), + getHttpAuthExtensionConfiguration(runtimeConfig) + ); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign( + runtimeConfig, + (0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + (0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + (0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + resolveHttpAuthRuntimeConfig(extensionConfiguration) + ); +}, "resolveRuntimeExtensions"); + +// src/submodules/sso-oidc/SSOOIDCClient.ts +var SSOOIDCClient = class extends import_smithy_client2.Client { + static { + __name(this, "SSOOIDCClient"); + } + /** + * The resolved configuration of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ + config; + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: /* @__PURE__ */ __name(async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }), "identityProviderConfigProvider") + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; + +// src/submodules/sso-oidc/SSOOIDC.ts +var import_smithy_client7 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/commands/CreateTokenCommand.ts +var import_middleware_endpoint2 = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); +var import_smithy_client6 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/models/models_0.ts +var import_smithy_client4 = require("@smithy/smithy-client"); + +// src/submodules/sso-oidc/models/SSOOIDCServiceException.ts +var import_smithy_client3 = require("@smithy/smithy-client"); +var SSOOIDCServiceException = class _SSOOIDCServiceException extends import_smithy_client3.ServiceException { + static { + __name(this, "SSOOIDCServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOOIDCServiceException.prototype); + } +}; + +// src/submodules/sso-oidc/models/models_0.ts +var AccessDeniedException = class _AccessDeniedException extends SSOOIDCServiceException { + static { + __name(this, "AccessDeniedException"); + } + name = "AccessDeniedException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be access_denied.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var AuthorizationPendingException = class _AuthorizationPendingException extends SSOOIDCServiceException { + static { + __name(this, "AuthorizationPendingException"); + } + name = "AuthorizationPendingException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * authorization_pending.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var CreateTokenRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.clientSecret && { clientSecret: import_smithy_client4.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.codeVerifier && { codeVerifier: import_smithy_client4.SENSITIVE_STRING } +}), "CreateTokenRequestFilterSensitiveLog"); +var CreateTokenResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client4.SENSITIVE_STRING }, + ...obj.idToken && { idToken: import_smithy_client4.SENSITIVE_STRING } +}), "CreateTokenResponseFilterSensitiveLog"); +var ExpiredTokenException = class _ExpiredTokenException extends SSOOIDCServiceException { + static { + __name(this, "ExpiredTokenException"); + } + name = "ExpiredTokenException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be expired_token.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InternalServerException = class _InternalServerException extends SSOOIDCServiceException { + static { + __name(this, "InternalServerException"); + } + name = "InternalServerException"; + $fault = "server"; + /** + *

Single error code. For this exception the value will be server_error.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts + }); + Object.setPrototypeOf(this, _InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidClientException = class _InvalidClientException extends SSOOIDCServiceException { + static { + __name(this, "InvalidClientException"); + } + name = "InvalidClientException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * invalid_client.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidGrantException = class _InvalidGrantException extends SSOOIDCServiceException { + static { + __name(this, "InvalidGrantException"); + } + name = "InvalidGrantException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be invalid_grant.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidRequestException = class _InvalidRequestException extends SSOOIDCServiceException { + static { + __name(this, "InvalidRequestException"); + } + name = "InvalidRequestException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * invalid_request.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var InvalidScopeException = class _InvalidScopeException extends SSOOIDCServiceException { + static { + __name(this, "InvalidScopeException"); + } + name = "InvalidScopeException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be invalid_scope.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var SlowDownException = class _SlowDownException extends SSOOIDCServiceException { + static { + __name(this, "SlowDownException"); + } + name = "SlowDownException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be slow_down.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var UnauthorizedClientException = class _UnauthorizedClientException extends SSOOIDCServiceException { + static { + __name(this, "UnauthorizedClientException"); + } + name = "UnauthorizedClientException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * unauthorized_client.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +var UnsupportedGrantTypeException = class _UnsupportedGrantTypeException extends SSOOIDCServiceException { + static { + __name(this, "UnsupportedGrantTypeException"); + } + name = "UnsupportedGrantTypeException"; + $fault = "client"; + /** + *

Single error code. For this exception the value will be + * unsupported_grant_type.

+ * @public + */ + error; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description; + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; + +// src/submodules/sso-oidc/protocols/Aws_restJson1.ts +var import_core2 = require("@aws-sdk/core"); +var import_core3 = require("@smithy/core"); +var import_smithy_client5 = require("@smithy/smithy-client"); +var se_CreateTokenCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core3.requestBuilder)(input, context); + const headers = { + "content-type": "application/json" + }; + b.bp("/token"); + let body; + body = JSON.stringify( + (0, import_smithy_client5.take)(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: /* @__PURE__ */ __name((_) => (0, import_smithy_client5._json)(_), "scope") + }) + ); + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_CreateTokenCommand"); +var de_CreateTokenCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client5.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client5.expectNonNull)((0, import_smithy_client5.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client5.take)(data, { + accessToken: import_smithy_client5.expectString, + expiresIn: import_smithy_client5.expectInt32, + idToken: import_smithy_client5.expectString, + refreshToken: import_smithy_client5.expectString, + tokenType: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_CreateTokenCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client5.withBaseException)(SSOOIDCServiceException); +var de_AccessDeniedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_AccessDeniedExceptionRes"); +var de_AuthorizationPendingExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_AuthorizationPendingExceptionRes"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_ExpiredTokenExceptionRes"); +var de_InternalServerExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InternalServerExceptionRes"); +var de_InvalidClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidClientExceptionRes"); +var de_InvalidGrantExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidGrantExceptionRes"); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_InvalidScopeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidScopeExceptionRes"); +var de_SlowDownExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_SlowDownExceptionRes"); +var de_UnauthorizedClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedClientExceptionRes"); +var de_UnsupportedGrantTypeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client5.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client5.take)(data, { + error: import_smithy_client5.expectString, + error_description: import_smithy_client5.expectString + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client5.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnsupportedGrantTypeExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/submodules/sso-oidc/commands/CreateTokenCommand.ts +var CreateTokenCommand = class extends import_smithy_client6.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint2.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSSOOIDCService", "CreateToken", {}).n("SSOOIDCClient", "CreateTokenCommand").f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog).ser(se_CreateTokenCommand).de(de_CreateTokenCommand).build() { + static { + __name(this, "CreateTokenCommand"); + } +}; + +// src/submodules/sso-oidc/SSOOIDC.ts +var commands = { + CreateTokenCommand +}; +var SSOOIDC = class extends SSOOIDCClient { + static { + __name(this, "SSOOIDC"); + } +}; +(0, import_smithy_client7.createAggregatedClient)(commands, SSOOIDC); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + $Command, + AccessDeniedException, + AuthorizationPendingException, + CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog, + ExpiredTokenException, + InternalServerException, + InvalidClientException, + InvalidGrantException, + InvalidRequestException, + InvalidScopeException, + SSOOIDC, + SSOOIDCClient, + SSOOIDCServiceException, + SlowDownException, + UnauthorizedClientException, + UnsupportedGrantTypeException, + __Client +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js new file mode 100644 index 0000000..6654024 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.browser.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js new file mode 100644 index 0000000..9cc237f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js new file mode 100644 index 0000000..a305a1b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sso-oidc/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js new file mode 100644 index 0000000..13c3c74 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/STSClient.js @@ -0,0 +1,52 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.STSClient = exports.__Client = void 0; +const middleware_host_header_1 = require("@aws-sdk/middleware-host-header"); +const middleware_logger_1 = require("@aws-sdk/middleware-logger"); +const middleware_recursion_detection_1 = require("@aws-sdk/middleware-recursion-detection"); +const middleware_user_agent_1 = require("@aws-sdk/middleware-user-agent"); +const config_resolver_1 = require("@smithy/config-resolver"); +const core_1 = require("@smithy/core"); +const middleware_content_length_1 = require("@smithy/middleware-content-length"); +const middleware_endpoint_1 = require("@smithy/middleware-endpoint"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const smithy_client_1 = require("@smithy/smithy-client"); +Object.defineProperty(exports, "__Client", { enumerable: true, get: function () { return smithy_client_1.Client; } }); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const EndpointParameters_1 = require("./endpoint/EndpointParameters"); +const runtimeConfig_1 = require("./runtimeConfig"); +const runtimeExtensions_1 = require("./runtimeExtensions"); +class STSClient extends smithy_client_1.Client { + config; + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, middleware_retry_1.resolveRetryConfig)(_config_2); + const _config_4 = (0, config_resolver_1.resolveRegionConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_5); + const _config_7 = (0, httpAuthSchemeProvider_1.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = (0, runtimeExtensions_1.resolveRuntimeExtensions)(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, core_1.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new core_1.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use((0, core_1.getHttpSigningPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.STSClient = STSClient; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..239095e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthRuntimeConfig = exports.getHttpAuthExtensionConfiguration = void 0; +const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +exports.getHttpAuthExtensionConfiguration = getHttpAuthExtensionConfiguration; +const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; +exports.resolveHttpAuthRuntimeConfig = resolveHttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..842241a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/auth/httpAuthSchemeProvider.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveHttpAuthSchemeConfig = exports.resolveStsAuthConfig = exports.defaultSTSHttpAuthSchemeProvider = exports.defaultSTSHttpAuthSchemeParametersProvider = void 0; +const core_1 = require("@aws-sdk/core"); +const util_middleware_1 = require("@smithy/util-middleware"); +const STSClient_1 = require("../STSClient"); +const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSTSHttpAuthSchemeParametersProvider = defaultSTSHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSTSHttpAuthSchemeProvider = defaultSTSHttpAuthSchemeProvider; +const resolveStsAuthConfig = (input) => Object.assign(input, { + stsClientCtor: STSClient_1.STSClient, +}); +exports.resolveStsAuthConfig = resolveStsAuthConfig; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, exports.resolveStsAuthConfig)(config); + const config_1 = (0, core_1.resolveAwsSdkSigV4Config)(config_0); + return Object.assign(config_1, { + authSchemePreference: (0, util_middleware_1.normalizeProvider)(config.authSchemePreference ?? []), + }); +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js new file mode 100644 index 0000000..3aec6a5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/EndpointParameters.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.commonParams = exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }); +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; +exports.commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js new file mode 100644 index 0000000..6bfb6e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/endpointResolver.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = require("@aws-sdk/util-endpoints"); +const util_endpoints_2 = require("@smithy/util-endpoints"); +const ruleset_1 = require("./ruleset"); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js new file mode 100644 index 0000000..7428259 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/endpoint/ruleset.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ruleSet = void 0; +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +exports.ruleSet = _data; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js new file mode 100644 index 0000000..bb0c42a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/index.js @@ -0,0 +1,951 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/sts/index.ts +var index_exports = {}; +__export(index_exports, { + AssumeRoleCommand: () => AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog: () => AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand: () => AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog: () => AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog: () => AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + ClientInputEndpointParameters: () => import_EndpointParameters3.ClientInputEndpointParameters, + CredentialsFilterSensitiveLog: () => CredentialsFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + IDPCommunicationErrorException: () => IDPCommunicationErrorException, + IDPRejectedClaimException: () => IDPRejectedClaimException, + InvalidIdentityTokenException: () => InvalidIdentityTokenException, + MalformedPolicyDocumentException: () => MalformedPolicyDocumentException, + PackedPolicyTooLargeException: () => PackedPolicyTooLargeException, + RegionDisabledException: () => RegionDisabledException, + STS: () => STS, + STSServiceException: () => STSServiceException, + decorateDefaultCredentialProvider: () => decorateDefaultCredentialProvider, + getDefaultRoleAssumer: () => getDefaultRoleAssumer2, + getDefaultRoleAssumerWithWebIdentity: () => getDefaultRoleAssumerWithWebIdentity2 +}); +module.exports = __toCommonJS(index_exports); +__reExport(index_exports, require("./STSClient"), module.exports); + +// src/submodules/sts/STS.ts +var import_smithy_client6 = require("@smithy/smithy-client"); + +// src/submodules/sts/commands/AssumeRoleCommand.ts +var import_middleware_endpoint = require("@smithy/middleware-endpoint"); +var import_middleware_serde = require("@smithy/middleware-serde"); +var import_smithy_client4 = require("@smithy/smithy-client"); +var import_EndpointParameters = require("./endpoint/EndpointParameters"); + +// src/submodules/sts/models/models_0.ts +var import_smithy_client2 = require("@smithy/smithy-client"); + +// src/submodules/sts/models/STSServiceException.ts +var import_smithy_client = require("@smithy/smithy-client"); +var STSServiceException = class _STSServiceException extends import_smithy_client.ServiceException { + static { + __name(this, "STSServiceException"); + } + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _STSServiceException.prototype); + } +}; + +// src/submodules/sts/models/models_0.ts +var CredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretAccessKey && { SecretAccessKey: import_smithy_client2.SENSITIVE_STRING } +}), "CredentialsFilterSensitiveLog"); +var AssumeRoleResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleResponseFilterSensitiveLog"); +var ExpiredTokenException = class _ExpiredTokenException extends STSServiceException { + static { + __name(this, "ExpiredTokenException"); + } + name = "ExpiredTokenException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + } +}; +var MalformedPolicyDocumentException = class _MalformedPolicyDocumentException extends STSServiceException { + static { + __name(this, "MalformedPolicyDocumentException"); + } + name = "MalformedPolicyDocumentException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _MalformedPolicyDocumentException.prototype); + } +}; +var PackedPolicyTooLargeException = class _PackedPolicyTooLargeException extends STSServiceException { + static { + __name(this, "PackedPolicyTooLargeException"); + } + name = "PackedPolicyTooLargeException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _PackedPolicyTooLargeException.prototype); + } +}; +var RegionDisabledException = class _RegionDisabledException extends STSServiceException { + static { + __name(this, "RegionDisabledException"); + } + name = "RegionDisabledException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _RegionDisabledException.prototype); + } +}; +var IDPRejectedClaimException = class _IDPRejectedClaimException extends STSServiceException { + static { + __name(this, "IDPRejectedClaimException"); + } + name = "IDPRejectedClaimException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IDPRejectedClaimException.prototype); + } +}; +var InvalidIdentityTokenException = class _InvalidIdentityTokenException extends STSServiceException { + static { + __name(this, "InvalidIdentityTokenException"); + } + name = "InvalidIdentityTokenException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _InvalidIdentityTokenException.prototype); + } +}; +var AssumeRoleWithWebIdentityRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.WebIdentityToken && { WebIdentityToken: import_smithy_client2.SENSITIVE_STRING } +}), "AssumeRoleWithWebIdentityRequestFilterSensitiveLog"); +var AssumeRoleWithWebIdentityResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleWithWebIdentityResponseFilterSensitiveLog"); +var IDPCommunicationErrorException = class _IDPCommunicationErrorException extends STSServiceException { + static { + __name(this, "IDPCommunicationErrorException"); + } + name = "IDPCommunicationErrorException"; + $fault = "client"; + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts + }); + Object.setPrototypeOf(this, _IDPCommunicationErrorException.prototype); + } +}; + +// src/submodules/sts/protocols/Aws_query.ts +var import_core = require("@aws-sdk/core"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_smithy_client3 = require("@smithy/smithy-client"); +var se_AssumeRoleCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleCommand"); +var se_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleWithWebIdentityCommand"); +var de_AssumeRoleCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleCommand"); +var de_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleWithWebIdentityCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseXmlErrorBody)(output.body, context) + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode + }); + } +}, "de_CommandError"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_ExpiredTokenExceptionRes"); +var de_IDPCommunicationErrorExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_IDPCommunicationErrorExceptionRes"); +var de_IDPRejectedClaimExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_IDPRejectedClaimExceptionRes"); +var de_InvalidIdentityTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_InvalidIdentityTokenExceptionRes"); +var de_MalformedPolicyDocumentExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_MalformedPolicyDocumentExceptionRes"); +var de_PackedPolicyTooLargeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_PackedPolicyTooLargeExceptionRes"); +var de_RegionDisabledExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client3.decorateServiceException)(exception, body); +}, "de_RegionDisabledExceptionRes"); +var se_AssumeRoleRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (input[_T]?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (input[_TTK]?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (input[_PC]?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}, "se_AssumeRoleRequest"); +var se_AssumeRoleWithWebIdentityRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}, "se_AssumeRoleWithWebIdentityRequest"); +var se_policyDescriptorListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_policyDescriptorListType"); +var se_PolicyDescriptorType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}, "se_PolicyDescriptorType"); +var se_ProvidedContext = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}, "se_ProvidedContext"); +var se_ProvidedContextsListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_ProvidedContextsListType"); +var se_Tag = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}, "se_Tag"); +var se_tagKeyListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}, "se_tagKeyListType"); +var se_tagListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_tagListType"); +var de_AssumedRoleUser = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = (0, import_smithy_client3.expectString)(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = (0, import_smithy_client3.expectString)(output[_Ar]); + } + return contents; +}, "de_AssumedRoleUser"); +var de_AssumeRoleResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client3.strictParseInt32)(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client3.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleResponse"); +var de_AssumeRoleWithWebIdentityResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = (0, import_smithy_client3.expectString)(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client3.strictParseInt32)(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = (0, import_smithy_client3.expectString)(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = (0, import_smithy_client3.expectString)(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client3.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleWithWebIdentityResponse"); +var de_Credentials = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = (0, import_smithy_client3.expectString)(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = (0, import_smithy_client3.expectString)(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = (0, import_smithy_client3.expectString)(output[_ST]); + } + if (output[_E] != null) { + contents[_E] = (0, import_smithy_client3.expectNonNull)((0, import_smithy_client3.parseRfc3339DateTimeWithOffset)(output[_E])); + } + return contents; +}, "de_Credentials"); +var de_ExpiredTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_ExpiredTokenException"); +var de_IDPCommunicationErrorException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_IDPCommunicationErrorException"); +var de_IDPRejectedClaimException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_IDPRejectedClaimException"); +var de_InvalidIdentityTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_InvalidIdentityTokenException"); +var de_MalformedPolicyDocumentException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_MalformedPolicyDocumentException"); +var de_PackedPolicyTooLargeException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_PackedPolicyTooLargeException"); +var de_RegionDisabledException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client3.expectString)(output[_m]); + } + return contents; +}, "de_RegionDisabledException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client3.withBaseException)(STSServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +var SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded" +}; +var _ = "2011-06-15"; +var _A = "Action"; +var _AKI = "AccessKeyId"; +var _AR = "AssumeRole"; +var _ARI = "AssumedRoleId"; +var _ARU = "AssumedRoleUser"; +var _ARWWI = "AssumeRoleWithWebIdentity"; +var _Ar = "Arn"; +var _Au = "Audience"; +var _C = "Credentials"; +var _CA = "ContextAssertion"; +var _DS = "DurationSeconds"; +var _E = "Expiration"; +var _EI = "ExternalId"; +var _K = "Key"; +var _P = "Policy"; +var _PA = "PolicyArns"; +var _PAr = "ProviderArn"; +var _PC = "ProvidedContexts"; +var _PI = "ProviderId"; +var _PPS = "PackedPolicySize"; +var _Pr = "Provider"; +var _RA = "RoleArn"; +var _RSN = "RoleSessionName"; +var _SAK = "SecretAccessKey"; +var _SFWIT = "SubjectFromWebIdentityToken"; +var _SI = "SourceIdentity"; +var _SN = "SerialNumber"; +var _ST = "SessionToken"; +var _T = "Tags"; +var _TC = "TokenCode"; +var _TTK = "TransitiveTagKeys"; +var _V = "Version"; +var _Va = "Value"; +var _WIT = "WebIdentityToken"; +var _a = "arn"; +var _m = "message"; +var buildFormUrlencodedString = /* @__PURE__ */ __name((formEntries) => Object.entries(formEntries).map(([key, value]) => (0, import_smithy_client3.extendedEncodeURIComponent)(key) + "=" + (0, import_smithy_client3.extendedEncodeURIComponent)(value)).join("&"), "buildFormUrlencodedString"); +var loadQueryErrorCode = /* @__PURE__ */ __name((output, data) => { + if (data.Error?.Code !== void 0) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadQueryErrorCode"); + +// src/submodules/sts/commands/AssumeRoleCommand.ts +var AssumeRoleCommand = class extends import_smithy_client4.Command.classBuilder().ep(import_EndpointParameters.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}).n("STSClient", "AssumeRoleCommand").f(void 0, AssumeRoleResponseFilterSensitiveLog).ser(se_AssumeRoleCommand).de(de_AssumeRoleCommand).build() { + static { + __name(this, "AssumeRoleCommand"); + } +}; + +// src/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.ts +var import_middleware_endpoint2 = require("@smithy/middleware-endpoint"); +var import_middleware_serde2 = require("@smithy/middleware-serde"); +var import_smithy_client5 = require("@smithy/smithy-client"); +var import_EndpointParameters2 = require("./endpoint/EndpointParameters"); +var AssumeRoleWithWebIdentityCommand = class extends import_smithy_client5.Command.classBuilder().ep(import_EndpointParameters2.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde2.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint2.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}).n("STSClient", "AssumeRoleWithWebIdentityCommand").f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog).ser(se_AssumeRoleWithWebIdentityCommand).de(de_AssumeRoleWithWebIdentityCommand).build() { + static { + __name(this, "AssumeRoleWithWebIdentityCommand"); + } +}; + +// src/submodules/sts/STS.ts +var import_STSClient = require("./STSClient"); +var commands = { + AssumeRoleCommand, + AssumeRoleWithWebIdentityCommand +}; +var STS = class extends import_STSClient.STSClient { + static { + __name(this, "STS"); + } +}; +(0, import_smithy_client6.createAggregatedClient)(commands, STS); + +// src/submodules/sts/index.ts +var import_EndpointParameters3 = require("./endpoint/EndpointParameters"); + +// src/submodules/sts/defaultStsRoleAssumers.ts +var import_client = require("@aws-sdk/core/client"); +var ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +var getAccountIdFromAssumedRoleUser = /* @__PURE__ */ __name((assumedRoleUser) => { + if (typeof assumedRoleUser?.Arn === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return void 0; +}, "getAccountIdFromAssumedRoleUser"); +var resolveRegion = /* @__PURE__ */ __name(async (_region, _parentRegion, credentialProviderLogger) => { + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + credentialProviderLogger?.debug?.( + "@aws-sdk/client-sts::resolveRegion", + "accepting first of:", + `${region} (provider)`, + `${parentRegion} (parent client)`, + `${ASSUME_ROLE_DEFAULT_REGION} (STS default)` + ); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}, "resolveRegion"); +var getDefaultRoleAssumer = /* @__PURE__ */ __name((stsOptions, STSClient3) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { + logger = stsOptions?.parentClientConfig?.logger, + region, + requestHandler = stsOptions?.parentClientConfig?.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + stsOptions?.parentClientConfig?.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient3({ + profile: stsOptions?.parentClientConfig?.profile, + // A hack to make sts client uses the credential in current closure. + credentialDefaultProvider: /* @__PURE__ */ __name(() => async () => closureSourceCreds, "credentialDefaultProvider"), + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}, "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity = /* @__PURE__ */ __name((stsOptions, STSClient3) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { + logger = stsOptions?.parentClientConfig?.logger, + region, + requestHandler = stsOptions?.parentClientConfig?.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + stsOptions?.parentClientConfig?.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient3({ + profile: stsOptions?.parentClientConfig?.profile, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + if (accountId) { + (0, import_client.setCredentialFeature)(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}, "getDefaultRoleAssumerWithWebIdentity"); +var isH2 = /* @__PURE__ */ __name((requestHandler) => { + return requestHandler?.metadata?.handlerProtocol === "h2"; +}, "isH2"); + +// src/submodules/sts/defaultRoleAssumers.ts +var import_STSClient2 = require("./STSClient"); +var getCustomizableStsClientCtor = /* @__PURE__ */ __name((baseCtor, customizations) => { + if (!customizations) return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + static { + __name(this, "CustomizableSTSClient"); + } + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}, "getCustomizableStsClientCtor"); +var getDefaultRoleAssumer2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumerWithWebIdentity"); +var decorateDefaultCredentialProvider = /* @__PURE__ */ __name((provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer2(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity2(input), + ...input +}), "decorateDefaultCredentialProvider"); +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + ClientInputEndpointParameters, + CredentialsFilterSensitiveLog, + ExpiredTokenException, + IDPCommunicationErrorException, + IDPRejectedClaimException, + InvalidIdentityTokenException, + MalformedPolicyDocumentException, + PackedPolicyTooLargeException, + RegionDisabledException, + STS, + STSServiceException, + decorateDefaultCredentialProvider, + getDefaultRoleAssumer, + getDefaultRoleAssumerWithWebIdentity, + ...require("./STSClient") +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js new file mode 100644 index 0000000..63cedb1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.browser.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const sha256_browser_1 = require("@aws-crypto/sha256-browser"); +const util_user_agent_browser_1 = require("@aws-sdk/util-user-agent-browser"); +const config_resolver_1 = require("@smithy/config-resolver"); +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const invalid_dependency_1 = require("@smithy/invalid-dependency"); +const util_body_length_browser_1 = require("@smithy/util-body-length-browser"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_browser_1 = require("@smithy/util-defaults-mode-browser"); +const getRuntimeConfig = (config) => { + const defaultsMode = (0, util_defaults_mode_browser_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_browser_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_browser_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? util_retry_1.DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? (0, invalid_dependency_1.invalidProvider)("Region is missing"), + requestHandler: fetch_http_handler_1.FetchHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? sha256_browser_1.Sha256, + streamCollector: config?.streamCollector ?? fetch_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(config_resolver_1.DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js new file mode 100644 index 0000000..de3b0e7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.js @@ -0,0 +1,65 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const tslib_1 = require("tslib"); +const package_json_1 = tslib_1.__importDefault(require("../../../package.json")); +const core_1 = require("@aws-sdk/core"); +const util_user_agent_node_1 = require("@aws-sdk/util-user-agent-node"); +const config_resolver_1 = require("@smithy/config-resolver"); +const core_2 = require("@smithy/core"); +const hash_node_1 = require("@smithy/hash-node"); +const middleware_retry_1 = require("@smithy/middleware-retry"); +const node_config_provider_1 = require("@smithy/node-config-provider"); +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_body_length_node_1 = require("@smithy/util-body-length-node"); +const util_retry_1 = require("@smithy/util-retry"); +const runtimeConfig_shared_1 = require("./runtimeConfig.shared"); +const smithy_client_1 = require("@smithy/smithy-client"); +const util_defaults_mode_node_1 = require("@smithy/util-defaults-mode-node"); +const smithy_client_2 = require("@smithy/smithy-client"); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? (0, node_config_provider_1.loadConfig)(core_1.NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await config.credentialDefaultProvider(idProps?.__config || {})()), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js new file mode 100644 index 0000000..34c5f8e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.native.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const sha256_js_1 = require("@aws-crypto/sha256-js"); +const runtimeConfig_browser_1 = require("./runtimeConfig.browser"); +const getRuntimeConfig = (config) => { + const browserDefaults = (0, runtimeConfig_browser_1.getRuntimeConfig)(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? sha256_js_1.Sha256, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js new file mode 100644 index 0000000..1e03d8b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeConfig.shared.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRuntimeConfig = void 0; +const core_1 = require("@aws-sdk/core"); +const core_2 = require("@smithy/core"); +const smithy_client_1 = require("@smithy/smithy-client"); +const url_parser_1 = require("@smithy/url-parser"); +const util_base64_1 = require("@smithy/util-base64"); +const util_utf8_1 = require("@smithy/util-utf8"); +const httpAuthSchemeProvider_1 = require("./auth/httpAuthSchemeProvider"); +const endpointResolver_1 = require("./endpoint/endpointResolver"); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js new file mode 100644 index 0000000..a50ebec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-cjs/submodules/sts/runtimeExtensions.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveRuntimeExtensions = void 0; +const region_config_resolver_1 = require("@aws-sdk/region-config-resolver"); +const protocol_http_1 = require("@smithy/protocol-http"); +const smithy_client_1 = require("@smithy/smithy-client"); +const httpAuthExtensionConfiguration_1 = require("./auth/httpAuthExtensionConfiguration"); +const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign((0, region_config_resolver_1.getAwsRegionExtensionConfiguration)(runtimeConfig), (0, smithy_client_1.getDefaultExtensionConfiguration)(runtimeConfig), (0, protocol_http_1.getHttpHandlerExtensionConfiguration)(runtimeConfig), (0, httpAuthExtensionConfiguration_1.getHttpAuthExtensionConfiguration)(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, (0, region_config_resolver_1.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), (0, smithy_client_1.resolveDefaultRuntimeConfig)(extensionConfiguration), (0, protocol_http_1.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), (0, httpAuthExtensionConfiguration_1.resolveHttpAuthRuntimeConfig)(extensionConfiguration)); +}; +exports.resolveRuntimeExtensions = resolveRuntimeExtensions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/index.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/index.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js new file mode 100644 index 0000000..bcb161f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDC.js @@ -0,0 +1,9 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { CreateTokenCommand } from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +const commands = { + CreateTokenCommand, +}; +export class SSOOIDC extends SSOOIDCClient { +} +createAggregatedClient(commands, SSOOIDC); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js new file mode 100644 index 0000000..003cad7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/SSOOIDCClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSSOOIDCHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class SSOOIDCClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..a5e9eab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/auth/httpAuthSchemeProvider.js @@ -0,0 +1,50 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +export const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveAwsSdkSigV4Config(config); + return Object.assign(config_0, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js new file mode 100644 index 0000000..7863247 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/CreateTokenCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_CreateTokenCommand, se_CreateTokenCommand } from "../protocols/Aws_restJson1"; +export { $Command }; +export class CreateTokenCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSSOOIDCService", "CreateToken", {}) + .n("SSOOIDCClient", "CreateTokenCommand") + .f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog) + .ser(se_CreateTokenCommand) + .de(de_CreateTokenCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/commands/index.js @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js new file mode 100644 index 0000000..2b26c44 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/EndpointParameters.js @@ -0,0 +1,13 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth", + }); +}; +export const commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js new file mode 100644 index 0000000..0ac15bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js new file mode 100644 index 0000000..040ea39 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +export const ruleSet = _data; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js new file mode 100644 index 0000000..c2894a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/index.js @@ -0,0 +1,5 @@ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js new file mode 100644 index 0000000..176cec3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/SSOOIDCServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class SSOOIDCServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, SSOOIDCServiceException.prototype); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js new file mode 100644 index 0000000..b350ef1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/models/models_0.js @@ -0,0 +1,190 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +export class AccessDeniedException extends __BaseException { + name = "AccessDeniedException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class AuthorizationPendingException extends __BaseException { + name = "AuthorizationPendingException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export const CreateTokenRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.clientSecret && { clientSecret: SENSITIVE_STRING }), + ...(obj.refreshToken && { refreshToken: SENSITIVE_STRING }), + ...(obj.codeVerifier && { codeVerifier: SENSITIVE_STRING }), +}); +export const CreateTokenResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.accessToken && { accessToken: SENSITIVE_STRING }), + ...(obj.refreshToken && { refreshToken: SENSITIVE_STRING }), + ...(obj.idToken && { idToken: SENSITIVE_STRING }), +}); +export class ExpiredTokenException extends __BaseException { + name = "ExpiredTokenException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InternalServerException extends __BaseException { + name = "InternalServerException"; + $fault = "server"; + error; + error_description; + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts, + }); + Object.setPrototypeOf(this, InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidClientException extends __BaseException { + name = "InvalidClientException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidGrantException extends __BaseException { + name = "InvalidGrantException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidRequestException extends __BaseException { + name = "InvalidRequestException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class InvalidScopeException extends __BaseException { + name = "InvalidScopeException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class SlowDownException extends __BaseException { + name = "SlowDownException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class UnauthorizedClientException extends __BaseException { + name = "UnauthorizedClientException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} +export class UnsupportedGrantTypeException extends __BaseException { + name = "UnsupportedGrantTypeException"; + $fault = "client"; + error; + error_description; + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js new file mode 100644 index 0000000..b58850b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/protocols/Aws_restJson1.js @@ -0,0 +1,255 @@ +import { loadRestJsonErrorCode, parseJsonBody as parseBody, parseJsonErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { requestBuilder as rb } from "@smithy/core"; +import { _json, collectBody, decorateServiceException as __decorateServiceException, expectInt32 as __expectInt32, expectNonNull as __expectNonNull, expectObject as __expectObject, expectString as __expectString, map, take, withBaseException, } from "@smithy/smithy-client"; +import { AccessDeniedException, AuthorizationPendingException, ExpiredTokenException, InternalServerException, InvalidClientException, InvalidGrantException, InvalidRequestException, InvalidScopeException, SlowDownException, UnauthorizedClientException, UnsupportedGrantTypeException, } from "../models/models_0"; +import { SSOOIDCServiceException as __BaseException } from "../models/SSOOIDCServiceException"; +export const se_CreateTokenCommand = async (input, context) => { + const b = rb(input, context); + const headers = { + "content-type": "application/json", + }; + b.bp("/token"); + let body; + body = JSON.stringify(take(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: (_) => _json(_), + })); + b.m("POST").h(headers).b(body); + return b.build(); +}; +export const de_CreateTokenCommand = async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = map({ + $metadata: deserializeMetadata(output), + }); + const data = __expectNonNull(__expectObject(await parseBody(output.body, context)), "body"); + const doc = take(data, { + accessToken: __expectString, + expiresIn: __expectInt32, + idToken: __expectString, + refreshToken: __expectString, + tokenType: __expectString, + }); + Object.assign(contents, doc); + return contents; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode, + }); + } +}; +const throwDefaultError = withBaseException(__BaseException); +const de_AccessDeniedExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_AuthorizationPendingExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InternalServerExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidClientExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidGrantExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidRequestExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_InvalidScopeExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_SlowDownExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnauthorizedClientExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const de_UnsupportedGrantTypeExceptionRes = async (parsedOutput, context) => { + const contents = map({}); + const data = parsedOutput.body; + const doc = take(data, { + error: __expectString, + error_description: __expectString, + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents, + }); + return __decorateServiceException(exception, parsedOutput.body); +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js new file mode 100644 index 0000000..94d7b87 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.browser.js @@ -0,0 +1,33 @@ +import packageInfo from "../../../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js new file mode 100644 index 0000000..32d413c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.js @@ -0,0 +1,46 @@ +import packageInfo from "../../../package.json"; +import { NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js new file mode 100644 index 0000000..49a0235 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSSOOIDCHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sso-oidc/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js new file mode 100644 index 0000000..71edef7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STS.js @@ -0,0 +1,11 @@ +import { createAggregatedClient } from "@smithy/smithy-client"; +import { AssumeRoleCommand } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommand, } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +const commands = { + AssumeRoleCommand, + AssumeRoleWithWebIdentityCommand, +}; +export class STS extends STSClient { +} +createAggregatedClient(commands, STS); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js new file mode 100644 index 0000000..81b1040 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/STSClient.js @@ -0,0 +1,48 @@ +import { getHostHeaderPlugin, resolveHostHeaderConfig, } from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { getRecursionDetectionPlugin } from "@aws-sdk/middleware-recursion-detection"; +import { getUserAgentPlugin, resolveUserAgentConfig, } from "@aws-sdk/middleware-user-agent"; +import { resolveRegionConfig } from "@smithy/config-resolver"; +import { DefaultIdentityProviderConfig, getHttpAuthSchemeEndpointRuleSetPlugin, getHttpSigningPlugin, } from "@smithy/core"; +import { getContentLengthPlugin } from "@smithy/middleware-content-length"; +import { resolveEndpointConfig } from "@smithy/middleware-endpoint"; +import { getRetryPlugin, resolveRetryConfig } from "@smithy/middleware-retry"; +import { Client as __Client, } from "@smithy/smithy-client"; +import { defaultSTSHttpAuthSchemeParametersProvider, resolveHttpAuthSchemeConfig, } from "./auth/httpAuthSchemeProvider"; +import { resolveClientEndpointParameters, } from "./endpoint/EndpointParameters"; +import { getRuntimeConfig as __getRuntimeConfig } from "./runtimeConfig"; +import { resolveRuntimeExtensions } from "./runtimeExtensions"; +export { __Client }; +export class STSClient extends __Client { + config; + constructor(...[configuration]) { + const _config_0 = __getRuntimeConfig(configuration || {}); + super(_config_0); + this.initConfig = _config_0; + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = resolveUserAgentConfig(_config_1); + const _config_3 = resolveRetryConfig(_config_2); + const _config_4 = resolveRegionConfig(_config_3); + const _config_5 = resolveHostHeaderConfig(_config_4); + const _config_6 = resolveEndpointConfig(_config_5); + const _config_7 = resolveHttpAuthSchemeConfig(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, configuration?.extensions || []); + this.config = _config_8; + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + this.middlewareStack.use(getRecursionDetectionPlugin(this.config)); + this.middlewareStack.use(getHttpAuthSchemeEndpointRuleSetPlugin(this.config, { + httpAuthSchemeParametersProvider: defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use(getHttpSigningPlugin(this.config)); + } + destroy() { + super.destroy(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js new file mode 100644 index 0000000..2ba1d48 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthExtensionConfiguration.js @@ -0,0 +1,38 @@ +export const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +export const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js new file mode 100644 index 0000000..3ea1e49 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/auth/httpAuthSchemeProvider.js @@ -0,0 +1,55 @@ +import { resolveAwsSdkSigV4Config, } from "@aws-sdk/core"; +import { getSmithyContext, normalizeProvider } from "@smithy/util-middleware"; +import { STSClient } from "../STSClient"; +export const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: getSmithyContext(context).operation, + region: (await normalizeProvider(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +export const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +export const resolveStsAuthConfig = (input) => Object.assign(input, { + stsClientCtor: STSClient, +}); +export const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = resolveStsAuthConfig(config); + const config_1 = resolveAwsSdkSigV4Config(config_0); + return Object.assign(config_1, { + authSchemePreference: normalizeProvider(config.authSchemePreference ?? []), + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js new file mode 100644 index 0000000..bcb8589 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { AssumeRoleResponseFilterSensitiveLog } from "../models/models_0"; +import { de_AssumeRoleCommand, se_AssumeRoleCommand } from "../protocols/Aws_query"; +export { $Command }; +export class AssumeRoleCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}) + .n("STSClient", "AssumeRoleCommand") + .f(void 0, AssumeRoleResponseFilterSensitiveLog) + .ser(se_AssumeRoleCommand) + .de(de_AssumeRoleCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js new file mode 100644 index 0000000..e4ecc2e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.js @@ -0,0 +1,23 @@ +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { commonParams } from "../endpoint/EndpointParameters"; +import { AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog, } from "../models/models_0"; +import { de_AssumeRoleWithWebIdentityCommand, se_AssumeRoleWithWebIdentityCommand } from "../protocols/Aws_query"; +export { $Command }; +export class AssumeRoleWithWebIdentityCommand extends $Command + .classBuilder() + .ep(commonParams) + .m(function (Command, cs, config, o) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; +}) + .s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}) + .n("STSClient", "AssumeRoleWithWebIdentityCommand") + .f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog) + .ser(se_AssumeRoleWithWebIdentityCommand) + .de(de_AssumeRoleWithWebIdentityCommand) + .build() { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/commands/index.js @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js new file mode 100644 index 0000000..aafb8c4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultRoleAssumers.js @@ -0,0 +1,22 @@ +import { getDefaultRoleAssumer as StsGetDefaultRoleAssumer, getDefaultRoleAssumerWithWebIdentity as StsGetDefaultRoleAssumerWithWebIdentity, } from "./defaultStsRoleAssumers"; +import { STSClient } from "./STSClient"; +const getCustomizableStsClientCtor = (baseCtor, customizations) => { + if (!customizations) + return baseCtor; + else + return class CustomizableSTSClient extends baseCtor { + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }; +}; +export const getDefaultRoleAssumer = (stsOptions = {}, stsPlugins) => StsGetDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(STSClient, stsPlugins)); +export const getDefaultRoleAssumerWithWebIdentity = (stsOptions = {}, stsPlugins) => StsGetDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(STSClient, stsPlugins)); +export const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity(input), + ...input, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js new file mode 100644 index 0000000..e7c7a90 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/defaultStsRoleAssumers.js @@ -0,0 +1,95 @@ +import { setCredentialFeature } from "@aws-sdk/core/client"; +import { AssumeRoleCommand } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommand, } from "./commands/AssumeRoleWithWebIdentityCommand"; +const ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +const getAccountIdFromAssumedRoleUser = (assumedRoleUser) => { + if (typeof assumedRoleUser?.Arn === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return undefined; +}; +const resolveRegion = async (_region, _parentRegion, credentialProviderLogger) => { + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + credentialProviderLogger?.debug?.("@aws-sdk/client-sts::resolveRegion", "accepting first of:", `${region} (provider)`, `${parentRegion} (parent client)`, `${ASSUME_ROLE_DEFAULT_REGION} (STS default)`); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}; +export const getDefaultRoleAssumer = (stsOptions, STSClient) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { logger = stsOptions?.parentClientConfig?.logger, region, requestHandler = stsOptions?.parentClientConfig?.requestHandler, credentialProviderLogger, } = stsOptions; + const resolvedRegion = await resolveRegion(region, stsOptions?.parentClientConfig?.region, credentialProviderLogger); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient({ + profile: stsOptions?.parentClientConfig?.profile, + credentialDefaultProvider: () => async () => closureSourceCreds, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : undefined, + logger: logger, + }); + } + const { Credentials, AssumedRoleUser } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser); + const credentials = { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + ...(Credentials.CredentialScope && { credentialScope: Credentials.CredentialScope }), + ...(accountId && { accountId }), + }; + setCredentialFeature(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}; +export const getDefaultRoleAssumerWithWebIdentity = (stsOptions, STSClient) => { + let stsClient; + return async (params) => { + if (!stsClient) { + const { logger = stsOptions?.parentClientConfig?.logger, region, requestHandler = stsOptions?.parentClientConfig?.requestHandler, credentialProviderLogger, } = stsOptions; + const resolvedRegion = await resolveRegion(region, stsOptions?.parentClientConfig?.region, credentialProviderLogger); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new STSClient({ + profile: stsOptions?.parentClientConfig?.profile, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : undefined, + logger: logger, + }); + } + const { Credentials, AssumedRoleUser } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials || !Credentials.AccessKeyId || !Credentials.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser); + const credentials = { + accessKeyId: Credentials.AccessKeyId, + secretAccessKey: Credentials.SecretAccessKey, + sessionToken: Credentials.SessionToken, + expiration: Credentials.Expiration, + ...(Credentials.CredentialScope && { credentialScope: Credentials.CredentialScope }), + ...(accountId && { accountId }), + }; + if (accountId) { + setCredentialFeature(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + setCredentialFeature(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}; +export const decorateDefaultCredentialProvider = (provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer(input, input.stsClientCtor), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity(input, input.stsClientCtor), + ...input, +}); +const isH2 = (requestHandler) => { + return requestHandler?.metadata?.handlerProtocol === "h2"; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js new file mode 100644 index 0000000..1c74b01 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/EndpointParameters.js @@ -0,0 +1,15 @@ +export const resolveClientEndpointParameters = (options) => { + return Object.assign(options, { + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }); +}; +export const commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js new file mode 100644 index 0000000..f54d279 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/endpointResolver.js @@ -0,0 +1,14 @@ +import { awsEndpointFunctions } from "@aws-sdk/util-endpoints"; +import { customEndpointFunctions, EndpointCache, resolveEndpoint } from "@smithy/util-endpoints"; +import { ruleSet } from "./ruleset"; +const cache = new EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +export const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => resolveEndpoint(ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js new file mode 100644 index 0000000..99a438a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/endpoint/ruleset.js @@ -0,0 +1,4 @@ +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +export const ruleSet = _data; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/extensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js new file mode 100644 index 0000000..fa366be --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/index.js @@ -0,0 +1,6 @@ +export * from "./STSClient"; +export * from "./STS"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js new file mode 100644 index 0000000..6d2963c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/STSServiceException.js @@ -0,0 +1,8 @@ +import { ServiceException as __ServiceException, } from "@smithy/smithy-client"; +export { __ServiceException }; +export class STSServiceException extends __ServiceException { + constructor(options) { + super(options); + Object.setPrototypeOf(this, STSServiceException.prototype); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/index.js @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js new file mode 100644 index 0000000..63e9c52 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/models/models_0.js @@ -0,0 +1,102 @@ +import { SENSITIVE_STRING } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +export const CredentialsFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.SecretAccessKey && { SecretAccessKey: SENSITIVE_STRING }), +}); +export const AssumeRoleResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export class ExpiredTokenException extends __BaseException { + name = "ExpiredTokenException"; + $fault = "client"; + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, ExpiredTokenException.prototype); + } +} +export class MalformedPolicyDocumentException extends __BaseException { + name = "MalformedPolicyDocumentException"; + $fault = "client"; + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, MalformedPolicyDocumentException.prototype); + } +} +export class PackedPolicyTooLargeException extends __BaseException { + name = "PackedPolicyTooLargeException"; + $fault = "client"; + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, PackedPolicyTooLargeException.prototype); + } +} +export class RegionDisabledException extends __BaseException { + name = "RegionDisabledException"; + $fault = "client"; + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, RegionDisabledException.prototype); + } +} +export class IDPRejectedClaimException extends __BaseException { + name = "IDPRejectedClaimException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IDPRejectedClaimException.prototype); + } +} +export class InvalidIdentityTokenException extends __BaseException { + name = "InvalidIdentityTokenException"; + $fault = "client"; + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, InvalidIdentityTokenException.prototype); + } +} +export const AssumeRoleWithWebIdentityRequestFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.WebIdentityToken && { WebIdentityToken: SENSITIVE_STRING }), +}); +export const AssumeRoleWithWebIdentityResponseFilterSensitiveLog = (obj) => ({ + ...obj, + ...(obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) }), +}); +export class IDPCommunicationErrorException extends __BaseException { + name = "IDPCommunicationErrorException"; + $fault = "client"; + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts, + }); + Object.setPrototypeOf(this, IDPCommunicationErrorException.prototype); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js new file mode 100644 index 0000000..a98e41a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/protocols/Aws_query.js @@ -0,0 +1,528 @@ +import { parseXmlBody as parseBody, parseXmlErrorBody as parseErrorBody } from "@aws-sdk/core"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { collectBody, decorateServiceException as __decorateServiceException, expectNonNull as __expectNonNull, expectString as __expectString, extendedEncodeURIComponent as __extendedEncodeURIComponent, parseRfc3339DateTimeWithOffset as __parseRfc3339DateTimeWithOffset, strictParseInt32 as __strictParseInt32, withBaseException, } from "@smithy/smithy-client"; +import { ExpiredTokenException, IDPCommunicationErrorException, IDPRejectedClaimException, InvalidIdentityTokenException, MalformedPolicyDocumentException, PackedPolicyTooLargeException, RegionDisabledException, } from "../models/models_0"; +import { STSServiceException as __BaseException } from "../models/STSServiceException"; +export const se_AssumeRoleCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _, + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const se_AssumeRoleWithWebIdentityCommand = async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _, + }); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; +export const de_AssumeRoleCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +export const de_AssumeRoleWithWebIdentityCommand = async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await parseBody(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return response; +}; +const de_CommandError = async (output, context) => { + const parsedOutput = { + ...output, + body: await parseErrorBody(output.body, context), + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode, + }); + } +}; +const de_ExpiredTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IDPCommunicationErrorExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_IDPRejectedClaimExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_InvalidIdentityTokenExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_MalformedPolicyDocumentExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_PackedPolicyTooLargeExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const de_RegionDisabledExceptionRes = async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }); + return __decorateServiceException(exception, body); +}; +const se_AssumeRoleRequest = (input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (input[_T]?.length === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (input[_TTK]?.length === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (input[_PC]?.length === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}; +const se_AssumeRoleWithWebIdentityRequest = (input, context) => { + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (input[_PA]?.length === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}; +const se_policyDescriptorListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_PolicyDescriptorType = (input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}; +const se_ProvidedContext = (input, context) => { + const entries = {}; + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}; +const se_ProvidedContextsListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const se_Tag = (input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}; +const se_tagKeyListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}; +const se_tagListType = (input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}; +const de_AssumedRoleUser = (output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = __expectString(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = __expectString(output[_Ar]); + } + return contents; +}; +const de_AssumeRoleResponse = (output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = __strictParseInt32(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = __expectString(output[_SI]); + } + return contents; +}; +const de_AssumeRoleWithWebIdentityResponse = (output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = __expectString(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = __strictParseInt32(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = __expectString(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = __expectString(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = __expectString(output[_SI]); + } + return contents; +}; +const de_Credentials = (output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = __expectString(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = __expectString(output[_SAK]); + } + if (output[_ST] != null) { + contents[_ST] = __expectString(output[_ST]); + } + if (output[_E] != null) { + contents[_E] = __expectNonNull(__parseRfc3339DateTimeWithOffset(output[_E])); + } + return contents; +}; +const de_ExpiredTokenException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_IDPCommunicationErrorException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_IDPRejectedClaimException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_InvalidIdentityTokenException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_MalformedPolicyDocumentException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_PackedPolicyTooLargeException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const de_RegionDisabledException = (output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = __expectString(output[_m]); + } + return contents; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); +const collectBodyString = (streamBody, context) => collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); +const throwDefaultError = withBaseException(__BaseException); +const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new __HttpRequest(contents); +}; +const SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded", +}; +const _ = "2011-06-15"; +const _A = "Action"; +const _AKI = "AccessKeyId"; +const _AR = "AssumeRole"; +const _ARI = "AssumedRoleId"; +const _ARU = "AssumedRoleUser"; +const _ARWWI = "AssumeRoleWithWebIdentity"; +const _Ar = "Arn"; +const _Au = "Audience"; +const _C = "Credentials"; +const _CA = "ContextAssertion"; +const _DS = "DurationSeconds"; +const _E = "Expiration"; +const _EI = "ExternalId"; +const _K = "Key"; +const _P = "Policy"; +const _PA = "PolicyArns"; +const _PAr = "ProviderArn"; +const _PC = "ProvidedContexts"; +const _PI = "ProviderId"; +const _PPS = "PackedPolicySize"; +const _Pr = "Provider"; +const _RA = "RoleArn"; +const _RSN = "RoleSessionName"; +const _SAK = "SecretAccessKey"; +const _SFWIT = "SubjectFromWebIdentityToken"; +const _SI = "SourceIdentity"; +const _SN = "SerialNumber"; +const _ST = "SessionToken"; +const _T = "Tags"; +const _TC = "TokenCode"; +const _TTK = "TransitiveTagKeys"; +const _V = "Version"; +const _Va = "Value"; +const _WIT = "WebIdentityToken"; +const _a = "arn"; +const _m = "message"; +const buildFormUrlencodedString = (formEntries) => Object.entries(formEntries) + .map(([key, value]) => __extendedEncodeURIComponent(key) + "=" + __extendedEncodeURIComponent(value)) + .join("&"); +const loadQueryErrorCode = (output, data) => { + if (data.Error?.Code !== undefined) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js new file mode 100644 index 0000000..f45dbd3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.browser.js @@ -0,0 +1,34 @@ +import packageInfo from "../../../package.json"; +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-browser"; +import { DEFAULT_USE_DUALSTACK_ENDPOINT, DEFAULT_USE_FIPS_ENDPOINT } from "@smithy/config-resolver"; +import { FetchHttpHandler as RequestHandler, streamCollector } from "@smithy/fetch-http-handler"; +import { invalidProvider } from "@smithy/invalid-dependency"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-browser"; +export const getRuntimeConfig = (config) => { + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + return { + ...clientSharedValues, + ...config, + runtime: "browser", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? ((_) => () => Promise.reject(new Error("Credential is missing"))), + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + maxAttempts: config?.maxAttempts ?? DEFAULT_MAX_ATTEMPTS, + region: config?.region ?? invalidProvider("Region is missing"), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? (async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE), + sha256: config?.sha256 ?? Sha256, + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (() => Promise.resolve(DEFAULT_USE_DUALSTACK_ENDPOINT)), + useFipsEndpoint: config?.useFipsEndpoint ?? (() => Promise.resolve(DEFAULT_USE_FIPS_ENDPOINT)), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js new file mode 100644 index 0000000..6ac2412 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.js @@ -0,0 +1,60 @@ +import packageInfo from "../../../package.json"; +import { AwsSdkSigV4Signer, NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, emitWarningIfUnsupportedVersion as awsCheckVersion, } from "@aws-sdk/core"; +import { NODE_APP_ID_CONFIG_OPTIONS, createDefaultUserAgentProvider } from "@aws-sdk/util-user-agent-node"; +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS, NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, } from "@smithy/config-resolver"; +import { NoAuthSigner } from "@smithy/core"; +import { Hash } from "@smithy/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS, NODE_RETRY_MODE_CONFIG_OPTIONS } from "@smithy/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@smithy/node-config-provider"; +import { NodeHttpHandler as RequestHandler, streamCollector } from "@smithy/node-http-handler"; +import { calculateBodyLength } from "@smithy/util-body-length-node"; +import { DEFAULT_RETRY_MODE } from "@smithy/util-retry"; +import { getRuntimeConfig as getSharedRuntimeConfig } from "./runtimeConfig.shared"; +import { loadConfigsForDefaultMode } from "@smithy/smithy-client"; +import { resolveDefaultsModeConfig } from "@smithy/util-defaults-mode-node"; +import { emitWarningIfUnsupportedVersion } from "@smithy/smithy-client"; +export const getRuntimeConfig = (config) => { + emitWarningIfUnsupportedVersion(process.version); + const defaultsMode = resolveDefaultsModeConfig(config); + const defaultConfigProvider = () => defaultsMode().then(loadConfigsForDefaultMode); + const clientSharedValues = getSharedRuntimeConfig(config); + awsCheckVersion(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + authSchemePreference: config?.authSchemePreference ?? loadNodeConfig(NODE_AUTH_SCHEME_PREFERENCE_OPTIONS, profileConfig), + bodyLengthChecker: config?.bodyLengthChecker ?? calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + createDefaultUserAgentProvider({ serviceId: clientSharedValues.serviceId, clientVersion: packageInfo.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await config.credentialDefaultProvider(idProps?.__config || {})()), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, { ...NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: RequestHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + loadNodeConfig({ + ...NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? loadNodeConfig(NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? loadNodeConfig(NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? loadNodeConfig(NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js new file mode 100644 index 0000000..0b54695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.native.js @@ -0,0 +1,11 @@ +import { Sha256 } from "@aws-crypto/sha256-js"; +import { getRuntimeConfig as getBrowserRuntimeConfig } from "./runtimeConfig.browser"; +export const getRuntimeConfig = (config) => { + const browserDefaults = getBrowserRuntimeConfig(config); + return { + ...browserDefaults, + ...config, + runtime: "react-native", + sha256: config?.sha256 ?? Sha256, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js new file mode 100644 index 0000000..5c6df20 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeConfig.shared.js @@ -0,0 +1,36 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { parseUrl } from "@smithy/url-parser"; +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { defaultSTSHttpAuthSchemeProvider } from "./auth/httpAuthSchemeProvider"; +import { defaultEndpointResolver } from "./endpoint/endpointResolver"; +export const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? fromBase64, + base64Encoder: config?.base64Encoder ?? toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new NoAuthSigner(), + }, + ], + logger: config?.logger ?? new NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? parseUrl, + utf8Decoder: config?.utf8Decoder ?? fromUtf8, + utf8Encoder: config?.utf8Encoder ?? toUtf8, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js new file mode 100644 index 0000000..5b29695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-es/submodules/sts/runtimeExtensions.js @@ -0,0 +1,9 @@ +import { getAwsRegionExtensionConfiguration, resolveAwsRegionExtensionConfiguration, } from "@aws-sdk/region-config-resolver"; +import { getHttpHandlerExtensionConfiguration, resolveHttpHandlerRuntimeConfig } from "@smithy/protocol-http"; +import { getDefaultExtensionConfiguration, resolveDefaultRuntimeConfig } from "@smithy/smithy-client"; +import { getHttpAuthExtensionConfiguration, resolveHttpAuthRuntimeConfig } from "./auth/httpAuthExtensionConfiguration"; +export const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = Object.assign(getAwsRegionExtensionConfiguration(runtimeConfig), getDefaultExtensionConfiguration(runtimeConfig), getHttpHandlerExtensionConfiguration(runtimeConfig), getHttpAuthExtensionConfiguration(runtimeConfig)); + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return Object.assign(runtimeConfig, resolveAwsRegionExtensionConfiguration(extensionConfiguration), resolveDefaultRuntimeConfig(extensionConfiguration), resolveHttpHandlerRuntimeConfig(extensionConfiguration), resolveHttpAuthRuntimeConfig(extensionConfiguration)); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts new file mode 100644 index 0000000..9d99a73 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/index.d.ts @@ -0,0 +1,7 @@ +/** + * This package exports nothing at the root. + * Use submodules e.g. \@aws-sdk/nested-clients/client-sts. + * + * @internal + */ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts new file mode 100644 index 0000000..ebec5e6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDC.d.ts @@ -0,0 +1,55 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +export interface SSOOIDC { + /** + * @see {@link CreateTokenCommand} + */ + createToken(args: CreateTokenCommandInput, options?: __HttpHandlerOptions): Promise; + createToken(args: CreateTokenCommandInput, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; + createToken(args: CreateTokenCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; +} +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * @public + */ +export declare class SSOOIDC extends SSOOIDCClient implements SSOOIDC { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts new file mode 100644 index 0000000..5490889 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/SSOOIDCClient.d.ts @@ -0,0 +1,220 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = CreateTokenCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = CreateTokenCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type SSOOIDCClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of SSOOIDCClient class constructor that set the region, credentials and other options. + */ +export interface SSOOIDCClientConfig extends SSOOIDCClientConfigType { +} +/** + * @public + */ +export type SSOOIDCClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ +export interface SSOOIDCClientResolvedConfig extends SSOOIDCClientResolvedConfigType { +} +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * @public + */ +export declare class SSOOIDCClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, SSOOIDCClientResolvedConfig> { + /** + * The resolved configuration of SSOOIDCClient class. This is resolved and normalized from the {@link SSOOIDCClientConfig | constructor configuration interface}. + */ + readonly config: SSOOIDCClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..a56a608 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { SSOOIDCHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): SSOOIDCHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..8fc989a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,75 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSSOOIDCHttpAuthSchemeParametersProvider: (config: SSOOIDCClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface SSOOIDCHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSSOOIDCHttpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: SSOOIDCHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts new file mode 100644 index 0000000..042fb52 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/CreateTokenCommand.d.ts @@ -0,0 +1,174 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateTokenCommand}. + */ +export interface CreateTokenCommandInput extends CreateTokenRequest { +} +/** + * @public + * + * The output of {@link CreateTokenCommand}. + */ +export interface CreateTokenCommandOutput extends CreateTokenResponse, __MetadataBearer { +} +declare const CreateTokenCommand_base: { + new (input: CreateTokenCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: CreateTokenCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Creates and returns access and refresh tokens for clients that are authenticated using + * client secrets. The access token can be used to fetch short-lived credentials for the assigned + * AWS accounts or to access application APIs using bearer authentication.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SSOOIDCClient, CreateTokenCommand } from "@aws-sdk/client-sso-oidc"; // ES Modules import + * // const { SSOOIDCClient, CreateTokenCommand } = require("@aws-sdk/client-sso-oidc"); // CommonJS import + * const client = new SSOOIDCClient(config); + * const input = { // CreateTokenRequest + * clientId: "STRING_VALUE", // required + * clientSecret: "STRING_VALUE", // required + * grantType: "STRING_VALUE", // required + * deviceCode: "STRING_VALUE", + * code: "STRING_VALUE", + * refreshToken: "STRING_VALUE", + * scope: [ // Scopes + * "STRING_VALUE", + * ], + * redirectUri: "STRING_VALUE", + * codeVerifier: "STRING_VALUE", + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * // { // CreateTokenResponse + * // accessToken: "STRING_VALUE", + * // tokenType: "STRING_VALUE", + * // expiresIn: Number("int"), + * // refreshToken: "STRING_VALUE", + * // idToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param CreateTokenCommandInput - {@link CreateTokenCommandInput} + * @returns {@link CreateTokenCommandOutput} + * @see {@link CreateTokenCommandInput} for command's `input` shape. + * @see {@link CreateTokenCommandOutput} for command's `response` shape. + * @see {@link SSOOIDCClientResolvedConfig | config} for SSOOIDCClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *

You do not have sufficient access to perform this action.

+ * + * @throws {@link AuthorizationPendingException} (client fault) + *

Indicates that a request to authorize a client with an access user session token is + * pending.

+ * + * @throws {@link ExpiredTokenException} (client fault) + *

Indicates that the token issued by the service is expired and is no longer valid.

+ * + * @throws {@link InternalServerException} (server fault) + *

Indicates that an error from the service occurred while trying to process a + * request.

+ * + * @throws {@link InvalidClientException} (client fault) + *

Indicates that the clientId or clientSecret in the request is + * invalid. For example, this can occur when a client sends an incorrect clientId or + * an expired clientSecret.

+ * + * @throws {@link InvalidGrantException} (client fault) + *

Indicates that a request contains an invalid grant. This can occur if a client makes a + * CreateToken request with an invalid grant type.

+ * + * @throws {@link InvalidRequestException} (client fault) + *

Indicates that something is wrong with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * + * @throws {@link InvalidScopeException} (client fault) + *

Indicates that the scope provided in the request is invalid.

+ * + * @throws {@link SlowDownException} (client fault) + *

Indicates that the client is making the request too frequently and is more than the + * service can handle.

+ * + * @throws {@link UnauthorizedClientException} (client fault) + *

Indicates that the client is not currently authorized to make the request. This can happen + * when a clientId is not issued for a public client.

+ * + * @throws {@link UnsupportedGrantTypeException} (client fault) + *

Indicates that the grant type in the request is not supported by the service.

+ * + * @throws {@link SSOOIDCServiceException} + *

Base exception class for all service exceptions from SSOOIDC service.

+ * + * + * @example Call OAuth/OIDC /token endpoint for Device Code grant with Secret authentication + * ```javascript + * // + * const input = { + * clientId: "_yzkThXVzLWVhc3QtMQEXAMPLECLIENTID", + * clientSecret: "VERYLONGSECRETeyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0", + * deviceCode: "yJraWQiOiJrZXktMTU2Njk2ODA4OCIsImFsZyI6IkhTMzIn0EXAMPLEDEVICECODE", + * grantType: "urn:ietf:params:oauth:grant-type:device-code" + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * /* response is + * { + * accessToken: "aoal-YigITUDiNX1xZwOMXM5MxOWDL0E0jg9P6_C_jKQPxS_SKCP6f0kh1Up4g7TtvQqkMnD-GJiU_S1gvug6SrggAkc0:MGYCMQD3IatVjV7jAJU91kK3PkS/SfA2wtgWzOgZWDOR7sDGN9t0phCZz5It/aes/3C1Zj0CMQCKWOgRaiz6AIhza3DSXQNMLjRKXC8F8ceCsHlgYLMZ7hZidEXAMPLEACCESSTOKEN", + * expiresIn: 1579729529, + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * tokenType: "Bearer" + * } + * *\/ + * ``` + * + * @example Call OAuth/OIDC /token endpoint for Refresh Token grant with Secret authentication + * ```javascript + * // + * const input = { + * clientId: "_yzkThXVzLWVhc3QtMQEXAMPLECLIENTID", + * clientSecret: "VERYLONGSECRETeyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0", + * grantType: "refresh_token", + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * scope: [ + * "codewhisperer:completions" + * ] + * }; + * const command = new CreateTokenCommand(input); + * const response = await client.send(command); + * /* response is + * { + * accessToken: "aoal-YigITUDiNX1xZwOMXM5MxOWDL0E0jg9P6_C_jKQPxS_SKCP6f0kh1Up4g7TtvQqkMnD-GJiU_S1gvug6SrggAkc0:MGYCMQD3IatVjV7jAJU91kK3PkS/SfA2wtgWzOgZWDOR7sDGN9t0phCZz5It/aes/3C1Zj0CMQCKWOgRaiz6AIhza3DSXQNMLjRKXC8F8ceCsHlgYLMZ7hZidEXAMPLEACCESSTOKEN", + * expiresIn: 1579729529, + * refreshToken: "aorvJYubGpU6i91YnH7Mfo-AT2fIVa1zCfA_Rvq9yjVKIP3onFmmykuQ7E93y2I-9Nyj-A_sVvMufaLNL0bqnDRtgAkc0:MGUCMFrRsktMRVlWaOR70XGMFGLL0SlcCw4DiYveIiOVx1uK9BbD0gvAddsW3UTLozXKMgIxAJ3qxUvjpnlLIOaaKOoa/FuNgqJVvr9GMwDtnAtlh9iZzAkEXAMPLEREFRESHTOKEN", + * tokenType: "Bearer" + * } + * *\/ + * ``` + * + * @public + */ +export declare class CreateTokenCommand extends CreateTokenCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: CreateTokenRequest; + output: CreateTokenResponse; + }; + sdk: { + input: CreateTokenCommandInput; + output: CreateTokenCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/commands/index.d.ts @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..23f42e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/EndpointParameters.d.ts @@ -0,0 +1,40 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts new file mode 100644 index 0000000..c78de85 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface SSOOIDCExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts new file mode 100644 index 0000000..54c46dd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d.ts @@ -0,0 +1,51 @@ +/** + *

IAM Identity Center OpenID Connect (OIDC) is a web service that enables a client (such as CLI or a + * native application) to register with IAM Identity Center. The service also enables the client to fetch the + * user’s access token upon successful authentication and authorization with IAM Identity Center.

+ *

+ * API namespaces + *

+ *

IAM Identity Center uses the sso and identitystore API namespaces. IAM Identity Center + * OpenID Connect uses the sso-oidc namespace.

+ *

+ * Considerations for using this guide + *

+ *

Before you begin using this guide, we recommend that you first review the following + * important information about how the IAM Identity Center OIDC service works.

+ *
    + *
  • + *

    The IAM Identity Center OIDC service currently implements only the portions of the OAuth 2.0 Device + * Authorization Grant standard (https://tools.ietf.org/html/rfc8628) that are necessary to enable single + * sign-on authentication with the CLI.

    + *
  • + *
  • + *

    With older versions of the CLI, the service only emits OIDC access tokens, so to + * obtain a new token, users must explicitly re-authenticate. To access the OIDC flow that + * supports token refresh and doesn’t require re-authentication, update to the latest CLI + * version (1.27.10 for CLI V1 and 2.9.0 for CLI V2) with support for OIDC token refresh + * and configurable IAM Identity Center session durations. For more information, see Configure Amazon Web Services access portal session duration .

    + *
  • + *
  • + *

    The access tokens provided by this service grant access to all Amazon Web Services account + * entitlements assigned to an IAM Identity Center user, not just a particular application.

    + *
  • + *
  • + *

    The documentation in this guide does not describe the mechanism to convert the access + * token into Amazon Web Services Auth (“sigv4”) credentials for use with IAM-protected Amazon Web Services service + * endpoints. For more information, see GetRoleCredentials in the IAM Identity Center Portal API Reference + * Guide.

    + *
  • + *
+ *

For general information about IAM Identity Center, see What is + * IAM Identity Center? in the IAM Identity Center User Guide.

+ * + * @packageDocumentation + */ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts new file mode 100644 index 0000000..d45f71a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from SSOOIDC service. + */ +export declare class SSOOIDCServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts new file mode 100644 index 0000000..2d3c3f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/models/models_0.d.ts @@ -0,0 +1,387 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +/** + *

You do not have sufficient access to perform this action.

+ * @public + */ +export declare class AccessDeniedException extends __BaseException { + readonly name: "AccessDeniedException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be access_denied.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that a request to authorize a client with an access user session token is + * pending.

+ * @public + */ +export declare class AuthorizationPendingException extends __BaseException { + readonly name: "AuthorizationPendingException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * authorization_pending.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface CreateTokenRequest { + /** + *

The unique identifier string for the client or application. This value comes from the + * result of the RegisterClient API.

+ * @public + */ + clientId: string | undefined; + /** + *

A secret string generated for the client. This value should come from the persisted result + * of the RegisterClient API.

+ * @public + */ + clientSecret: string | undefined; + /** + *

Supports the following OAuth grant types: Authorization Code, Device Code, and Refresh + * Token. Specify one of the following values, depending on the grant type that you want:

+ *

* Authorization Code - authorization_code + *

+ *

* Device Code - urn:ietf:params:oauth:grant-type:device_code + *

+ *

* Refresh Token - refresh_token + *

+ * @public + */ + grantType: string | undefined; + /** + *

Used only when calling this API for the Device Code grant type. This short-lived code is + * used to identify this authorization request. This comes from the result of the StartDeviceAuthorization API.

+ * @public + */ + deviceCode?: string | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. The short-lived + * code is used to identify this authorization request.

+ * @public + */ + code?: string | undefined; + /** + *

Used only when calling this API for the Refresh Token grant type. This token is used to + * refresh short-lived tokens, such as the access token, that might expire.

+ *

For more information about the features and limitations of the current IAM Identity Center OIDC + * implementation, see Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ * @public + */ + refreshToken?: string | undefined; + /** + *

The list of scopes for which authorization is requested. The access token that is issued + * is limited to the scopes that are granted. If this value is not specified, IAM Identity Center authorizes + * all scopes that are configured for the client during the call to RegisterClient.

+ * @public + */ + scope?: string[] | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. This value + * specifies the location of the client or application that has registered to receive the + * authorization code.

+ * @public + */ + redirectUri?: string | undefined; + /** + *

Used only when calling this API for the Authorization Code grant type. This value is + * generated by the client and presented to validate the original code challenge value the client + * passed at authorization time.

+ * @public + */ + codeVerifier?: string | undefined; +} +/** + * @internal + */ +export declare const CreateTokenRequestFilterSensitiveLog: (obj: CreateTokenRequest) => any; +/** + * @public + */ +export interface CreateTokenResponse { + /** + *

A bearer token to access Amazon Web Services accounts and applications assigned to a user.

+ * @public + */ + accessToken?: string | undefined; + /** + *

Used to notify the client that the returned token is an access token. The supported token + * type is Bearer.

+ * @public + */ + tokenType?: string | undefined; + /** + *

Indicates the time in seconds when an access token will expire.

+ * @public + */ + expiresIn?: number | undefined; + /** + *

A token that, if present, can be used to refresh a previously issued access token that + * might have expired.

+ *

For more information about the features and limitations of the current IAM Identity Center OIDC + * implementation, see Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ * @public + */ + refreshToken?: string | undefined; + /** + *

The idToken is not implemented or supported. For more information about the + * features and limitations of the current IAM Identity Center OIDC implementation, see + * Considerations for Using this Guide in the IAM Identity Center + * OIDC API Reference.

+ *

A JSON Web Token (JWT) that identifies who is associated with the issued access token. + *

+ * @public + */ + idToken?: string | undefined; +} +/** + * @internal + */ +export declare const CreateTokenResponseFilterSensitiveLog: (obj: CreateTokenResponse) => any; +/** + *

Indicates that the token issued by the service is expired and is no longer valid.

+ * @public + */ +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be expired_token.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that an error from the service occurred while trying to process a + * request.

+ * @public + */ +export declare class InternalServerException extends __BaseException { + readonly name: "InternalServerException"; + readonly $fault: "server"; + /** + *

Single error code. For this exception the value will be server_error.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the clientId or clientSecret in the request is + * invalid. For example, this can occur when a client sends an incorrect clientId or + * an expired clientSecret.

+ * @public + */ +export declare class InvalidClientException extends __BaseException { + readonly name: "InvalidClientException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * invalid_client.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that a request contains an invalid grant. This can occur if a client makes a + * CreateToken request with an invalid grant type.

+ * @public + */ +export declare class InvalidGrantException extends __BaseException { + readonly name: "InvalidGrantException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be invalid_grant.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that something is wrong with the input to the request. For example, a required + * parameter might be missing or out of range.

+ * @public + */ +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * invalid_request.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the scope provided in the request is invalid.

+ * @public + */ +export declare class InvalidScopeException extends __BaseException { + readonly name: "InvalidScopeException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be invalid_scope.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the client is making the request too frequently and is more than the + * service can handle.

+ * @public + */ +export declare class SlowDownException extends __BaseException { + readonly name: "SlowDownException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be slow_down.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the client is not currently authorized to make the request. This can happen + * when a clientId is not issued for a public client.

+ * @public + */ +export declare class UnauthorizedClientException extends __BaseException { + readonly name: "UnauthorizedClientException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * unauthorized_client.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

Indicates that the grant type in the request is not supported by the service.

+ * @public + */ +export declare class UnsupportedGrantTypeException extends __BaseException { + readonly name: "UnsupportedGrantTypeException"; + readonly $fault: "client"; + /** + *

Single error code. For this exception the value will be + * unsupported_grant_type.

+ * @public + */ + error?: string | undefined; + /** + *

Human-readable text providing additional information, used to assist the client developer + * in understanding the error that occurred.

+ * @public + */ + error_description?: string | undefined; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..d4e38b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/protocols/Aws_restJson1.d.ts @@ -0,0 +1,11 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "../commands/CreateTokenCommand"; +/** + * serializeAws_restJson1CreateTokenCommand + */ +export declare const se_CreateTokenCommand: (input: CreateTokenCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_restJson1CreateTokenCommand + */ +export declare const de_CreateTokenCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..26c727f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.browser.d.ts @@ -0,0 +1,57 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts new file mode 100644 index 0000000..1819a97 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts new file mode 100644 index 0000000..86acac7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.native.d.ts @@ -0,0 +1,56 @@ +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..e110017 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + }) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts new file mode 100644 index 0000000..1bdf704 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: SSOOIDCExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts new file mode 100644 index 0000000..bee83a5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STS.d.ts @@ -0,0 +1,27 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +export interface STS { + /** + * @see {@link AssumeRoleCommand} + */ + assumeRole(args: AssumeRoleCommandInput, options?: __HttpHandlerOptions): Promise; + assumeRole(args: AssumeRoleCommandInput, cb: (err: any, data?: AssumeRoleCommandOutput) => void): void; + assumeRole(args: AssumeRoleCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AssumeRoleCommandOutput) => void): void; + /** + * @see {@link AssumeRoleWithWebIdentityCommand} + */ + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, options?: __HttpHandlerOptions): Promise; + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void): void; + assumeRoleWithWebIdentity(args: AssumeRoleWithWebIdentityCommandInput, options: __HttpHandlerOptions, cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void): void; +} +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * @public + */ +export declare class STS extends STSClient implements STS { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts new file mode 100644 index 0000000..bd21c4b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/STSClient.d.ts @@ -0,0 +1,192 @@ +import { HostHeaderInputConfig, HostHeaderResolvedConfig } from "@aws-sdk/middleware-host-header"; +import { UserAgentInputConfig, UserAgentResolvedConfig } from "@aws-sdk/middleware-user-agent"; +import { RegionInputConfig, RegionResolvedConfig } from "@smithy/config-resolver"; +import { EndpointInputConfig, EndpointResolvedConfig } from "@smithy/middleware-endpoint"; +import { RetryInputConfig, RetryResolvedConfig } from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { Client as __Client, DefaultsMode as __DefaultsMode, SmithyConfiguration as __SmithyConfiguration, SmithyResolvedConfiguration as __SmithyResolvedConfiguration } from "@smithy/smithy-client"; +import { AwsCredentialIdentityProvider, BodyLengthCalculator as __BodyLengthCalculator, CheckOptionalClientConfig as __CheckOptionalClientConfig, ChecksumConstructor as __ChecksumConstructor, Decoder as __Decoder, Encoder as __Encoder, HashConstructor as __HashConstructor, HttpHandlerOptions as __HttpHandlerOptions, Logger as __Logger, Provider as __Provider, Provider, StreamCollector as __StreamCollector, UrlParser as __UrlParser, UserAgent as __UserAgent } from "@smithy/types"; +import { HttpAuthSchemeInputConfig, HttpAuthSchemeResolvedConfig } from "./auth/httpAuthSchemeProvider"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { ClientInputEndpointParameters, ClientResolvedEndpointParameters, EndpointParameters } from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +/** + * @public + */ +export type ServiceInputTypes = AssumeRoleCommandInput | AssumeRoleWithWebIdentityCommandInput; +/** + * @public + */ +export type ServiceOutputTypes = AssumeRoleCommandOutput | AssumeRoleWithWebIdentityCommandOutput; +/** + * @public + */ +export interface ClientDefaults extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use or its constructor options. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandlerUserInput; + /** + * A constructor for a class implementing the {@link @smithy/types#ChecksumConstructor} interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + * @internal + */ + sha256?: __ChecksumConstructor | __HashConstructor; + /** + * The function that will be used to convert strings into HTTP endpoints. + * @internal + */ + urlParser?: __UrlParser; + /** + * A function that can calculate the length of a request body. + * @internal + */ + bodyLengthChecker?: __BodyLengthCalculator; + /** + * A function that converts a stream into an array of bytes. + * @internal + */ + streamCollector?: __StreamCollector; + /** + * The function that will be used to convert a base64-encoded string to a byte array. + * @internal + */ + base64Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a base64-encoded string. + * @internal + */ + base64Encoder?: __Encoder; + /** + * The function that will be used to convert a UTF8-encoded string to a byte array. + * @internal + */ + utf8Decoder?: __Decoder; + /** + * The function that will be used to convert binary data to a UTF-8 encoded string. + * @internal + */ + utf8Encoder?: __Encoder; + /** + * The runtime environment. + * @internal + */ + runtime?: string; + /** + * Disable dynamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | __Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | __Provider; + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + /** + * Setting a client profile is similar to setting a value for the + * AWS_PROFILE environment variable. Setting a profile on a client + * in code only affects the single client instance, unlike AWS_PROFILE. + * + * When set, and only for environments where an AWS configuration + * file exists, fields configurable by this file will be retrieved + * from the specified profile within that file. + * Conflicting code configuration and environment variables will + * still have higher priority. + * + * For client credential resolution that involves checking the AWS + * configuration file, the client's profile (this value) will be + * used unless a different profile is set in the credential + * provider options. + * + */ + profile?: string; + /** + * The provider populating default tracking information to be sent with `user-agent`, `x-amz-user-agent` header + * @internal + */ + defaultUserAgentProvider?: Provider<__UserAgent>; + /** + * Default credentials provider; Not available in browser runtime. + * @deprecated + * @internal + */ + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + /** + * Specifies which retry algorithm to use. + * @see https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-util-retry/Enum/RETRY_MODES/ + * + */ + retryMode?: string | __Provider; + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + /** + * Optional extensions + */ + extensions?: RuntimeExtension[]; + /** + * The {@link @smithy/smithy-client#DefaultsMode} that will be used to determine how certain default configuration options are resolved in the SDK. + */ + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +/** + * @public + */ +export type STSClientConfigType = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & ClientDefaults & UserAgentInputConfig & RetryInputConfig & RegionInputConfig & HostHeaderInputConfig & EndpointInputConfig & HttpAuthSchemeInputConfig & ClientInputEndpointParameters; +/** + * @public + * + * The configuration interface of STSClient class constructor that set the region, credentials and other options. + */ +export interface STSClientConfig extends STSClientConfigType { +} +/** + * @public + */ +export type STSClientResolvedConfigType = __SmithyResolvedConfiguration<__HttpHandlerOptions> & Required & RuntimeExtensionsConfig & UserAgentResolvedConfig & RetryResolvedConfig & RegionResolvedConfig & HostHeaderResolvedConfig & EndpointResolvedConfig & HttpAuthSchemeResolvedConfig & ClientResolvedEndpointParameters; +/** + * @public + * + * The resolved configuration interface of STSClient class. This is resolved and normalized from the {@link STSClientConfig | constructor configuration interface}. + */ +export interface STSClientResolvedConfig extends STSClientResolvedConfigType { +} +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * @public + */ +export declare class STSClient extends __Client<__HttpHandlerOptions, ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig> { + /** + * The resolved configuration of STSClient class. This is resolved and normalized from the {@link STSClientConfig | constructor configuration interface}. + */ + readonly config: STSClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..1066c88 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,29 @@ +import { AwsCredentialIdentity, AwsCredentialIdentityProvider, HttpAuthScheme } from "@smithy/types"; +import { STSHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +/** + * @internal + */ +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider(httpAuthSchemeProvider: STSHttpAuthSchemeProvider): void; + httpAuthSchemeProvider(): STSHttpAuthSchemeProvider; + setCredentials(credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider): void; + credentials(): AwsCredentialIdentity | AwsCredentialIdentityProvider | undefined; +} +/** + * @internal + */ +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: STSHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +/** + * @internal + */ +export declare const getHttpAuthExtensionConfiguration: (runtimeConfig: HttpAuthRuntimeConfig) => HttpAuthExtensionConfiguration; +/** + * @internal + */ +export declare const resolveHttpAuthRuntimeConfig: (config: HttpAuthExtensionConfiguration) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..8e39cbe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,85 @@ +import { AwsSdkSigV4AuthInputConfig, AwsSdkSigV4AuthResolvedConfig, AwsSdkSigV4PreviouslyResolved } from "@aws-sdk/core"; +import { Client, HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, Provider } from "@smithy/types"; +import { STSClientResolvedConfig } from "../STSClient"; +/** + * @internal + */ +export interface STSHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +/** + * @internal + */ +export interface STSHttpAuthSchemeParametersProvider extends HttpAuthSchemeParametersProvider { +} +/** + * @internal + */ +export declare const defaultSTSHttpAuthSchemeParametersProvider: (config: STSClientResolvedConfig, context: HandlerExecutionContext, input: object) => Promise; +/** + * @internal + */ +export interface STSHttpAuthSchemeProvider extends HttpAuthSchemeProvider { +} +/** + * @internal + */ +export declare const defaultSTSHttpAuthSchemeProvider: STSHttpAuthSchemeProvider; +export interface StsAuthInputConfig { +} +export interface StsAuthResolvedConfig { + /** + * Reference to STSClient class constructor. + * @internal + */ + stsClientCtor: new (clientConfig: any) => Client; +} +export declare const resolveStsAuthConfig: (input: T & StsAuthInputConfig) => T & StsAuthResolvedConfig; +/** + * @internal + */ +export interface HttpAuthSchemeInputConfig extends StsAuthInputConfig, AwsSdkSigV4AuthInputConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + authSchemePreference?: string[] | Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + httpAuthSchemes?: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + httpAuthSchemeProvider?: STSHttpAuthSchemeProvider; +} +/** + * @internal + */ +export interface HttpAuthSchemeResolvedConfig extends StsAuthResolvedConfig, AwsSdkSigV4AuthResolvedConfig { + /** + * A comma-separated list of case-sensitive auth scheme names. + * An auth scheme name is a fully qualified auth scheme ID with the namespace prefix trimmed. + * For example, the auth scheme with ID aws.auth#sigv4 is named sigv4. + * @public + */ + readonly authSchemePreference: Provider; + /** + * Configuration of HttpAuthSchemes for a client which provides default identity providers and signers per auth scheme. + * @internal + */ + readonly httpAuthSchemes: HttpAuthScheme[]; + /** + * Configuration of an HttpAuthSchemeProvider for a client which resolves which HttpAuthScheme to use. + * @internal + */ + readonly httpAuthSchemeProvider: STSHttpAuthSchemeProvider; +} +/** + * @internal + */ +export declare const resolveHttpAuthSchemeConfig: (config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts new file mode 100644 index 0000000..f9e6ccd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleCommand.d.ts @@ -0,0 +1,269 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleRequest, AssumeRoleResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig } from "../STSClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AssumeRoleCommand}. + */ +export interface AssumeRoleCommandInput extends AssumeRoleRequest { +} +/** + * @public + * + * The output of {@link AssumeRoleCommand}. + */ +export interface AssumeRoleCommandOutput extends AssumeRoleResponse, __MetadataBearer { +} +declare const AssumeRoleCommand_base: { + new (input: AssumeRoleCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AssumeRoleCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a set of temporary security credentials that you can use to access Amazon Web Services + * resources. These temporary credentials consist of an access key ID, a secret access key, + * and a security token. Typically, you use AssumeRole within your account or for + * cross-account access. For a comparison of AssumeRole with other API operations + * that produce temporary credentials, see Requesting Temporary Security + * Credentials and Compare STS + * credentials in the IAM User Guide.

+ *

+ * Permissions + *

+ *

The temporary security credentials created by AssumeRole can be used to + * make API calls to any Amazon Web Services service with the following exception: You cannot call the + * Amazon Web Services STS GetFederationToken or GetSessionToken API + * operations.

+ *

(Optional) You can pass inline or managed session policies to this operation. You can + * pass a single JSON policy document to use as an inline session policy. You can also specify + * up to 10 managed policy Amazon Resource Names (ARNs) to use as managed session policies. + * The plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

When you create a role, you create two policies: a role trust policy that specifies + * who can assume the role, and a permissions policy that specifies + * what can be done with the role. You specify the trusted principal + * that is allowed to assume the role in the role trust policy.

+ *

To assume a role from a different account, your Amazon Web Services account must be trusted by the + * role. The trust relationship is defined in the role's trust policy when the role is + * created. That trust policy states which accounts are allowed to delegate that access to + * users in the account.

+ *

A user who wants to access a role in a different account must also have permissions that + * are delegated from the account administrator. The administrator must attach a policy that + * allows the user to call AssumeRole for the ARN of the role in the other + * account.

+ *

To allow a user to assume a role in the same account, you can do either of the + * following:

+ *
    + *
  • + *

    Attach a policy to the user that allows the user to call AssumeRole + * (as long as the role's trust policy trusts the account).

    + *
  • + *
  • + *

    Add the user as a principal directly in the role's trust policy.

    + *
  • + *
+ *

You can do either because the role’s trust policy acts as an IAM resource-based + * policy. When a resource-based policy grants access to a principal in the same account, no + * additional identity-based policy is required. For more information about trust policies and + * resource-based policies, see IAM Policies in the + * IAM User Guide.

+ *

+ * Tags + *

+ *

(Optional) You can pass tag key-value pairs to your session. These tags are called + * session tags. For more information about session tags, see Passing Session Tags in STS in the + * IAM User Guide.

+ *

An administrator must grant you the permissions necessary to pass session tags. The + * administrator can also create granular permissions to allow you to pass only specific + * session tags. For more information, see Tutorial: Using Tags + * for Attribute-Based Access Control in the + * IAM User Guide.

+ *

You can set the session tags as transitive. Transitive tags persist during role + * chaining. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

+ * Using MFA with AssumeRole + *

+ *

(Optional) You can include multi-factor authentication (MFA) information when you call + * AssumeRole. This is useful for cross-account scenarios to ensure that the + * user that assumes the role has been authenticated with an Amazon Web Services MFA device. In that + * scenario, the trust policy of the role being assumed includes a condition that tests for + * MFA authentication. If the caller does not include valid MFA information, the request to + * assume the role is denied. The condition in a trust policy that tests for MFA + * authentication might look like the following example.

+ *

+ * "Condition": \{"Bool": \{"aws:MultiFactorAuthPresent": true\}\} + *

+ *

For more information, see Configuring MFA-Protected API Access + * in the IAM User Guide guide.

+ *

To use MFA with AssumeRole, you pass values for the + * SerialNumber and TokenCode parameters. The + * SerialNumber value identifies the user's hardware or virtual MFA device. + * The TokenCode is the time-based one-time password (TOTP) that the MFA device + * produces.

+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { STSClient, AssumeRoleCommand } from "@aws-sdk/client-sts"; // ES Modules import + * // const { STSClient, AssumeRoleCommand } = require("@aws-sdk/client-sts"); // CommonJS import + * const client = new STSClient(config); + * const input = { // AssumeRoleRequest + * RoleArn: "STRING_VALUE", // required + * RoleSessionName: "STRING_VALUE", // required + * PolicyArns: [ // policyDescriptorListType + * { // PolicyDescriptorType + * arn: "STRING_VALUE", + * }, + * ], + * Policy: "STRING_VALUE", + * DurationSeconds: Number("int"), + * Tags: [ // tagListType + * { // Tag + * Key: "STRING_VALUE", // required + * Value: "STRING_VALUE", // required + * }, + * ], + * TransitiveTagKeys: [ // tagKeyListType + * "STRING_VALUE", + * ], + * ExternalId: "STRING_VALUE", + * SerialNumber: "STRING_VALUE", + * TokenCode: "STRING_VALUE", + * SourceIdentity: "STRING_VALUE", + * ProvidedContexts: [ // ProvidedContextsListType + * { // ProvidedContext + * ProviderArn: "STRING_VALUE", + * ContextAssertion: "STRING_VALUE", + * }, + * ], + * }; + * const command = new AssumeRoleCommand(input); + * const response = await client.send(command); + * // { // AssumeRoleResponse + * // Credentials: { // Credentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // AssumedRoleUser: { // AssumedRoleUser + * // AssumedRoleId: "STRING_VALUE", // required + * // Arn: "STRING_VALUE", // required + * // }, + * // PackedPolicySize: Number("int"), + * // SourceIdentity: "STRING_VALUE", + * // }; + * + * ``` + * + * @param AssumeRoleCommandInput - {@link AssumeRoleCommandInput} + * @returns {@link AssumeRoleCommandOutput} + * @see {@link AssumeRoleCommandInput} for command's `input` shape. + * @see {@link AssumeRoleCommandOutput} for command's `response` shape. + * @see {@link STSClientResolvedConfig | config} for STSClient's `config` shape. + * + * @throws {@link ExpiredTokenException} (client fault) + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * + * @throws {@link MalformedPolicyDocumentException} (client fault) + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * + * @throws {@link PackedPolicyTooLargeException} (client fault) + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * + * @throws {@link RegionDisabledException} (client fault) + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * + * @throws {@link STSServiceException} + *

Base exception class for all service exceptions from STS service.

+ * + * + * @example To assume a role + * ```javascript + * // + * const input = { + * ExternalId: "123ABC", + * Policy: `{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:ListAllMyBuckets","Resource":"*"}]}`, + * RoleArn: "arn:aws:iam::123456789012:role/demo", + * RoleSessionName: "testAssumeRoleSession", + * Tags: [ + * { + * Key: "Project", + * Value: "Unicorn" + * }, + * { + * Key: "Team", + * Value: "Automation" + * }, + * { + * Key: "Cost-Center", + * Value: "12345" + * } + * ], + * TransitiveTagKeys: [ + * "Project", + * "Cost-Center" + * ] + * }; + * const command = new AssumeRoleCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AssumedRoleUser: { + * Arn: "arn:aws:sts::123456789012:assumed-role/demo/Bob", + * AssumedRoleId: "ARO123EXAMPLE123:Bob" + * }, + * Credentials: { + * AccessKeyId: "AKIAIOSFODNN7EXAMPLE", + * Expiration: "2011-07-15T23:28:33.359Z", + * SecretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", + * SessionToken: "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==" + * }, + * PackedPolicySize: 8 + * } + * *\/ + * ``` + * + * @public + */ +export declare class AssumeRoleCommand extends AssumeRoleCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AssumeRoleRequest; + output: AssumeRoleResponse; + }; + sdk: { + input: AssumeRoleCommandInput; + output: AssumeRoleCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts new file mode 100644 index 0000000..58d7df8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts @@ -0,0 +1,288 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleWithWebIdentityRequest, AssumeRoleWithWebIdentityResponse } from "../models/models_0"; +import { ServiceInputTypes, ServiceOutputTypes, STSClientResolvedConfig } from "../STSClient"; +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link AssumeRoleWithWebIdentityCommand}. + */ +export interface AssumeRoleWithWebIdentityCommandInput extends AssumeRoleWithWebIdentityRequest { +} +/** + * @public + * + * The output of {@link AssumeRoleWithWebIdentityCommand}. + */ +export interface AssumeRoleWithWebIdentityCommandOutput extends AssumeRoleWithWebIdentityResponse, __MetadataBearer { +} +declare const AssumeRoleWithWebIdentityCommand_base: { + new (input: AssumeRoleWithWebIdentityCommandInput): import("@smithy/smithy-client").CommandImpl; + new (__0_0: AssumeRoleWithWebIdentityCommandInput): import("@smithy/smithy-client").CommandImpl; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +/** + *

Returns a set of temporary security credentials for users who have been authenticated in + * a mobile or web application with a web identity provider. Example providers include the + * OAuth 2.0 providers Login with Amazon and Facebook, or any OpenID Connect-compatible + * identity provider such as Google or Amazon Cognito federated identities.

+ * + *

For mobile applications, we recommend that you use Amazon Cognito. You can use Amazon Cognito with the + * Amazon Web Services SDK for iOS Developer Guide and the Amazon Web Services SDK for Android Developer Guide to uniquely + * identify a user. You can also supply the user with a consistent identity throughout the + * lifetime of an application.

+ *

To learn more about Amazon Cognito, see Amazon Cognito identity + * pools in Amazon Cognito Developer Guide.

+ *
+ *

Calling AssumeRoleWithWebIdentity does not require the use of Amazon Web Services + * security credentials. Therefore, you can distribute an application (for example, on mobile + * devices) that requests temporary security credentials without including long-term Amazon Web Services + * credentials in the application. You also don't need to deploy server-based proxy services + * that use long-term Amazon Web Services credentials. Instead, the identity of the caller is validated by + * using a token from the web identity provider. For a comparison of + * AssumeRoleWithWebIdentity with the other API operations that produce + * temporary credentials, see Requesting Temporary Security + * Credentials and Compare STS + * credentials in the IAM User Guide.

+ *

The temporary security credentials returned by this API consist of an access key ID, a + * secret access key, and a security token. Applications can use these temporary security + * credentials to sign calls to Amazon Web Services service API operations.

+ *

+ * Session Duration + *

+ *

By default, the temporary security credentials created by + * AssumeRoleWithWebIdentity last for one hour. However, you can use the + * optional DurationSeconds parameter to specify the duration of your session. + * You can provide a value from 900 seconds (15 minutes) up to the maximum session duration + * setting for the role. This setting can have a value from 1 hour to 12 hours. To learn how + * to view the maximum value for your role, see Update the maximum session duration for a role in the + * IAM User Guide. The maximum session duration limit applies when + * you use the AssumeRole* API operations or the assume-role* CLI + * commands. However the limit does not apply when you use those operations to create a + * console URL. For more information, see Using IAM Roles in the + * IAM User Guide.

+ *

+ * Permissions + *

+ *

The temporary security credentials created by AssumeRoleWithWebIdentity can + * be used to make API calls to any Amazon Web Services service with the following exception: you cannot + * call the STS GetFederationToken or GetSessionToken API + * operations.

+ *

(Optional) You can pass inline or managed session policies to + * this operation. You can pass a single JSON policy document to use as an inline session + * policy. You can also specify up to 10 managed policy Amazon Resource Names (ARNs) to use as + * managed session policies. The plaintext that you use for both inline and managed session + * policies can't exceed 2,048 characters. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

+ * Tags + *

+ *

(Optional) You can configure your IdP to pass attributes into your web identity token as + * session tags. Each session tag consists of a key name and an associated value. For more + * information about session tags, see Passing Session Tags in STS in the + * IAM User Guide.

+ *

You can pass up to 50 session tags. The plaintext session tag keys can’t exceed 128 + * characters and the values can’t exceed 256 characters. For these and additional limits, see + * IAM + * and STS Character Limits in the IAM User Guide.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

You can pass a session tag with the same key as a tag that is attached to the role. When + * you do, the session tag overrides the role tag with the same key.

+ *

An administrator must grant you the permissions necessary to pass session tags. The + * administrator can also create granular permissions to allow you to pass only specific + * session tags. For more information, see Tutorial: Using Tags + * for Attribute-Based Access Control in the + * IAM User Guide.

+ *

You can set the session tags as transitive. Transitive tags persist during role + * chaining. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

+ * Identities + *

+ *

Before your application can call AssumeRoleWithWebIdentity, you must have + * an identity token from a supported identity provider and create a role that the application + * can assume. The role that your application assumes must trust the identity provider that is + * associated with the identity token. In other words, the identity provider must be specified + * in the role's trust policy.

+ * + *

Calling AssumeRoleWithWebIdentity can result in an entry in your + * CloudTrail logs. The entry includes the Subject of + * the provided web identity token. We recommend that you avoid using any personally + * identifiable information (PII) in this field. For example, you could instead use a GUID + * or a pairwise identifier, as suggested + * in the OIDC specification.

+ *
+ *

For more information about how to use OIDC federation and the + * AssumeRoleWithWebIdentity API, see the following resources:

+ * + * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { STSClient, AssumeRoleWithWebIdentityCommand } from "@aws-sdk/client-sts"; // ES Modules import + * // const { STSClient, AssumeRoleWithWebIdentityCommand } = require("@aws-sdk/client-sts"); // CommonJS import + * const client = new STSClient(config); + * const input = { // AssumeRoleWithWebIdentityRequest + * RoleArn: "STRING_VALUE", // required + * RoleSessionName: "STRING_VALUE", // required + * WebIdentityToken: "STRING_VALUE", // required + * ProviderId: "STRING_VALUE", + * PolicyArns: [ // policyDescriptorListType + * { // PolicyDescriptorType + * arn: "STRING_VALUE", + * }, + * ], + * Policy: "STRING_VALUE", + * DurationSeconds: Number("int"), + * }; + * const command = new AssumeRoleWithWebIdentityCommand(input); + * const response = await client.send(command); + * // { // AssumeRoleWithWebIdentityResponse + * // Credentials: { // Credentials + * // AccessKeyId: "STRING_VALUE", // required + * // SecretAccessKey: "STRING_VALUE", // required + * // SessionToken: "STRING_VALUE", // required + * // Expiration: new Date("TIMESTAMP"), // required + * // }, + * // SubjectFromWebIdentityToken: "STRING_VALUE", + * // AssumedRoleUser: { // AssumedRoleUser + * // AssumedRoleId: "STRING_VALUE", // required + * // Arn: "STRING_VALUE", // required + * // }, + * // PackedPolicySize: Number("int"), + * // Provider: "STRING_VALUE", + * // Audience: "STRING_VALUE", + * // SourceIdentity: "STRING_VALUE", + * // }; + * + * ``` + * + * @param AssumeRoleWithWebIdentityCommandInput - {@link AssumeRoleWithWebIdentityCommandInput} + * @returns {@link AssumeRoleWithWebIdentityCommandOutput} + * @see {@link AssumeRoleWithWebIdentityCommandInput} for command's `input` shape. + * @see {@link AssumeRoleWithWebIdentityCommandOutput} for command's `response` shape. + * @see {@link STSClientResolvedConfig | config} for STSClient's `config` shape. + * + * @throws {@link ExpiredTokenException} (client fault) + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * + * @throws {@link IDPCommunicationErrorException} (client fault) + *

The request could not be fulfilled because the identity provider (IDP) that was asked + * to verify the incoming identity token could not be reached. This is often a transient + * error caused by network conditions. Retry the request a limited number of times so that + * you don't exceed the request rate. If the error persists, the identity provider might be + * down or not responding.

+ * + * @throws {@link IDPRejectedClaimException} (client fault) + *

The identity provider (IdP) reported that authentication failed. This might be because + * the claim is invalid.

+ *

If this error is returned for the AssumeRoleWithWebIdentity operation, it + * can also mean that the claim has expired or has been explicitly revoked.

+ * + * @throws {@link InvalidIdentityTokenException} (client fault) + *

The web identity token that was passed could not be validated by Amazon Web Services. Get a new + * identity token from the identity provider and then retry the request.

+ * + * @throws {@link MalformedPolicyDocumentException} (client fault) + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * + * @throws {@link PackedPolicyTooLargeException} (client fault) + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * + * @throws {@link RegionDisabledException} (client fault) + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * + * @throws {@link STSServiceException} + *

Base exception class for all service exceptions from STS service.

+ * + * + * @example To assume a role as an OpenID Connect-federated user + * ```javascript + * // + * const input = { + * DurationSeconds: 3600, + * Policy: `{"Version":"2012-10-17","Statement":[{"Sid":"Stmt1","Effect":"Allow","Action":"s3:ListAllMyBuckets","Resource":"*"}]}`, + * ProviderId: "www.amazon.com", + * RoleArn: "arn:aws:iam::123456789012:role/FederatedWebIdentityRole", + * RoleSessionName: "app1", + * WebIdentityToken: "Atza%7CIQEBLjAsAhRFiXuWpUXuRvQ9PZL3GMFcYevydwIUFAHZwXZXXXXXXXXJnrulxKDHwy87oGKPznh0D6bEQZTSCzyoCtL_8S07pLpr0zMbn6w1lfVZKNTBdDansFBmtGnIsIapjI6xKR02Yc_2bQ8LZbUXSGm6Ry6_BG7PrtLZtj_dfCTj92xNGed-CrKqjG7nPBjNIL016GGvuS5gSvPRUxWES3VYfm1wl7WTI7jn-Pcb6M-buCgHhFOzTQxod27L9CqnOLio7N3gZAGpsp6n1-AJBOCJckcyXe2c6uD0srOJeZlKUm2eTDVMf8IehDVI0r1QOnTV6KzzAI3OY87Vd_cVMQ" + * }; + * const command = new AssumeRoleWithWebIdentityCommand(input); + * const response = await client.send(command); + * /* response is + * { + * AssumedRoleUser: { + * Arn: "arn:aws:sts::123456789012:assumed-role/FederatedWebIdentityRole/app1", + * AssumedRoleId: "AROACLKWSDQRAOEXAMPLE:app1" + * }, + * Audience: "client.5498841531868486423.1548@apps.example.com", + * Credentials: { + * AccessKeyId: "AKIAIOSFODNN7EXAMPLE", + * Expiration: "2014-10-24T23:00:23Z", + * SecretAccessKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY", + * SessionToken: "AQoDYXdzEE0a8ANXXXXXXXXNO1ewxE5TijQyp+IEXAMPLE" + * }, + * PackedPolicySize: 123, + * Provider: "www.amazon.com", + * SubjectFromWebIdentityToken: "amzn1.account.AF6RHO7KZU5XRVQJGXK6HEXAMPLE" + * } + * *\/ + * ``` + * + * @public + */ +export declare class AssumeRoleWithWebIdentityCommand extends AssumeRoleWithWebIdentityCommand_base { + /** @internal type navigation helper, not in runtime. */ + protected static __types: { + api: { + input: AssumeRoleWithWebIdentityRequest; + output: AssumeRoleWithWebIdentityResponse; + }; + sdk: { + input: AssumeRoleWithWebIdentityCommandInput; + output: AssumeRoleWithWebIdentityCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/commands/index.d.ts @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts new file mode 100644 index 0000000..0e25207 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultRoleAssumers.d.ts @@ -0,0 +1,23 @@ +import { Pluggable } from "@smithy/types"; +import { DefaultCredentialProvider, RoleAssumer, RoleAssumerWithWebIdentity, STSRoleAssumerOptions } from "./defaultStsRoleAssumers"; +import { ServiceInputTypes, ServiceOutputTypes } from "./STSClient"; +/** + * The default role assumer that used by credential providers when sts:AssumeRole API is needed. + */ +export declare const getDefaultRoleAssumer: (stsOptions?: STSRoleAssumerOptions, stsPlugins?: Pluggable[]) => RoleAssumer; +/** + * The default role assumer that used by credential providers when sts:AssumeRoleWithWebIdentity API is needed. + */ +export declare const getDefaultRoleAssumerWithWebIdentity: (stsOptions?: STSRoleAssumerOptions, stsPlugins?: Pluggable[]) => RoleAssumerWithWebIdentity; +/** + * The default credential providers depend STS client to assume role with desired API: sts:assumeRole, + * sts:assumeRoleWithWebIdentity, etc. This function decorates the default credential provider with role assumers which + * encapsulates the process of calling STS commands. This can only be imported by AWS client packages to avoid circular + * dependencies. + * + * @internal + * + * @deprecated this is no longer needed. Use the defaultProvider directly, + * which will load STS if needed. + */ +export declare const decorateDefaultCredentialProvider: (provider: DefaultCredentialProvider) => DefaultCredentialProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts new file mode 100644 index 0000000..c4ba0c4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/defaultStsRoleAssumers.d.ts @@ -0,0 +1,43 @@ +import type { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentity, Logger, Provider } from "@smithy/types"; +import { AssumeRoleCommandInput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import type { STSClient, STSClientConfig } from "./STSClient"; +/** + * @public + */ +export type STSRoleAssumerOptions = Pick & { + credentialProviderLogger?: Logger; + parentClientConfig?: CredentialProviderOptions["parentClientConfig"]; +}; +/** + * @internal + */ +export type RoleAssumer = (sourceCreds: AwsCredentialIdentity, params: AssumeRoleCommandInput) => Promise; +/** + * The default role assumer that used by credential providers when sts:AssumeRole API is needed. + * @internal + */ +export declare const getDefaultRoleAssumer: (stsOptions: STSRoleAssumerOptions, STSClient: new (options: STSClientConfig) => STSClient) => RoleAssumer; +/** + * @internal + */ +export type RoleAssumerWithWebIdentity = (params: AssumeRoleWithWebIdentityCommandInput) => Promise; +/** + * The default role assumer that used by credential providers when sts:AssumeRoleWithWebIdentity API is needed. + * @internal + */ +export declare const getDefaultRoleAssumerWithWebIdentity: (stsOptions: STSRoleAssumerOptions, STSClient: new (options: STSClientConfig) => STSClient) => RoleAssumerWithWebIdentity; +/** + * @internal + */ +export type DefaultCredentialProvider = (input: any) => Provider; +/** + * The default credential providers depend STS client to assume role with desired API: sts:assumeRole, + * sts:assumeRoleWithWebIdentity, etc. This function decorates the default credential provider with role assumers which + * encapsulates the process of calling STS commands. This can only be imported by AWS client packages to avoid circular + * dependencies. + * + * @internal + */ +export declare const decorateDefaultCredentialProvider: (provider: DefaultCredentialProvider) => DefaultCredentialProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..39f6c7e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/EndpointParameters.d.ts @@ -0,0 +1,46 @@ +import { Endpoint, EndpointParameters as __EndpointParameters, EndpointV2, Provider } from "@smithy/types"; +/** + * @public + */ +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: string | Provider | Endpoint | Provider | EndpointV2 | Provider; + useGlobalEndpoint?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: (options: T & ClientInputEndpointParameters) => T & ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const commonParams: { + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + UseGlobalEndpoint?: boolean; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..70a8eae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/endpointResolver.d.ts @@ -0,0 +1,5 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: (endpointParams: EndpointParameters, context?: { + logger?: Logger; +}) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts new file mode 100644 index 0000000..970e12b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +/** + * @internal + */ +export interface STSExtensionConfiguration extends HttpHandlerExtensionConfiguration, DefaultExtensionConfiguration, AwsRegionExtensionConfiguration, HttpAuthExtensionConfiguration { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts new file mode 100644 index 0000000..98b87b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/index.d.ts @@ -0,0 +1,17 @@ +/** + * Security Token Service + *

Security Token Service (STS) enables you to request temporary, limited-privilege + * credentials for users. This guide provides descriptions of the STS API. For + * more information about using this service, see Temporary Security Credentials.

+ * + * @packageDocumentation + */ +export * from "./STSClient"; +export * from "./STS"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export type { RuntimeExtension } from "./runtimeExtensions"; +export type { STSExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts new file mode 100644 index 0000000..fd1a9a2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/STSServiceException.d.ts @@ -0,0 +1,14 @@ +import { ServiceException as __ServiceException, ServiceExceptionOptions as __ServiceExceptionOptions } from "@smithy/smithy-client"; +export type { __ServiceExceptionOptions }; +export { __ServiceException }; +/** + * @public + * + * Base exception class for all service exceptions from STS service. + */ +export declare class STSServiceException extends __ServiceException { + /** + * @internal + */ + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts new file mode 100644 index 0000000..5b58b93 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/models/models_0.d.ts @@ -0,0 +1,712 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +/** + *

The identifiers for the temporary security credentials that the operation + * returns.

+ * @public + */ +export interface AssumedRoleUser { + /** + *

A unique identifier that contains the role ID and the role session name of the role that + * is being assumed. The role ID is generated by Amazon Web Services when the role is created.

+ * @public + */ + AssumedRoleId: string | undefined; + /** + *

The ARN of the temporary security credentials that are returned from the AssumeRole action. For more information about ARNs and how to use them in + * policies, see IAM Identifiers in the + * IAM User Guide.

+ * @public + */ + Arn: string | undefined; +} +/** + *

A reference to the IAM managed policy that is passed as a session policy for a role + * session or a federated user session.

+ * @public + */ +export interface PolicyDescriptorType { + /** + *

The Amazon Resource Name (ARN) of the IAM managed policy to use as a session policy + * for the role. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * @public + */ + arn?: string | undefined; +} +/** + *

Contains information about the provided context. This includes the signed and encrypted + * trusted context assertion and the context provider ARN from which the trusted context + * assertion was generated.

+ * @public + */ +export interface ProvidedContext { + /** + *

The context provider ARN from which the trusted context assertion was generated.

+ * @public + */ + ProviderArn?: string | undefined; + /** + *

The signed and encrypted trusted context assertion generated by the context provider. + * The trusted context assertion is signed and encrypted by Amazon Web Services STS.

+ * @public + */ + ContextAssertion?: string | undefined; +} +/** + *

You can pass custom key-value pair attributes when you assume a role or federate a user. + * These are called session tags. You can then use the session tags to control access to + * resources. For more information, see Tagging Amazon Web Services STS Sessions in the + * IAM User Guide.

+ * @public + */ +export interface Tag { + /** + *

The key for a session tag.

+ *

You can pass up to 50 session tags. The plain text session tag keys can’t exceed 128 + * characters. For these and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * @public + */ + Key: string | undefined; + /** + *

The value for a session tag.

+ *

You can pass up to 50 session tags. The plain text session tag values can’t exceed 256 + * characters. For these and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * @public + */ + Value: string | undefined; +} +/** + * @public + */ +export interface AssumeRoleRequest { + /** + *

The Amazon Resource Name (ARN) of the role to assume.

+ * @public + */ + RoleArn: string | undefined; + /** + *

An identifier for the assumed role session.

+ *

Use the role session name to uniquely identify a session when the same role is assumed + * by different principals or for different reasons. In cross-account scenarios, the role + * session name is visible to, and can be logged by the account that owns the role. The role + * session name is also used in the ARN of the assumed role principal. This means that + * subsequent cross-account API requests that use the temporary security credentials will + * expose the role session name to the external account in their CloudTrail logs.

+ *

For security purposes, administrators can view this field in CloudTrail logs to help identify who performed an action in Amazon Web Services. Your + * administrator might require that you specify your user name as the session name when you + * assume the role. For more information, see + * sts:RoleSessionName + * .

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + RoleSessionName: string | undefined; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ * @public + */ + PolicyArns?: PolicyDescriptorType[] | undefined; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plaintext that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

For more information about role session permissions, see Session + * policies.

+ * @public + */ + Policy?: string | undefined; + /** + *

The duration, in seconds, of the role session. The value specified can range from 900 + * seconds (15 minutes) up to the maximum session duration set for the role. The maximum + * session duration setting can have a value from 1 hour to 12 hours. If you specify a value + * higher than this setting or the administrator setting (whichever is lower), the operation + * fails. For example, if you specify a session duration of 12 hours, but your administrator + * set the maximum session duration to 6 hours, your operation fails.

+ *

Role chaining limits your Amazon Web Services CLI or Amazon Web Services API role session to a maximum of one hour. + * When you use the AssumeRole API operation to assume a role, you can specify + * the duration of your role session with the DurationSeconds parameter. You can + * specify a parameter value of up to 43200 seconds (12 hours), depending on the maximum + * session duration setting for your role. However, if you assume a role using role chaining + * and provide a DurationSeconds parameter value greater than one hour, the + * operation fails. To learn how to view the maximum value for your role, see Update the maximum session duration for a role.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the Amazon Web Services Management Console in the + * IAM User Guide.

+ *
+ * @public + */ + DurationSeconds?: number | undefined; + /** + *

A list of session tags that you want to pass. Each session tag consists of a key name + * and an associated value. For more information about session tags, see Tagging Amazon Web Services STS + * Sessions in the IAM User Guide.

+ *

This parameter is optional. You can pass up to 50 session tags. The plaintext session + * tag keys can’t exceed 128 characters, and the values can’t exceed 256 characters. For these + * and additional limits, see IAM + * and STS Character Limits in the IAM User Guide.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

You can pass a session tag with the same key as a tag that is already attached to the + * role. When you do, session tags override a role tag with the same key.

+ *

Tag key–value pairs are not case sensitive, but case is preserved. This means that you + * cannot have separate Department and department tag keys. Assume + * that the role has the Department=Marketing tag and you pass the + * department=engineering session tag. Department + * and department are not saved as separate tags, and the session tag passed in + * the request takes precedence over the role tag.

+ *

Additionally, if you used temporary credentials to perform this operation, the new + * session inherits any transitive session tags from the calling session. If you pass a + * session tag with the same key as an inherited tag, the operation fails. To view the + * inherited tags for a session, see the CloudTrail logs. For more information, see Viewing Session Tags in CloudTrail in the + * IAM User Guide.

+ * @public + */ + Tags?: Tag[] | undefined; + /** + *

A list of keys for session tags that you want to set as transitive. If you set a tag key + * as transitive, the corresponding key and value passes to subsequent sessions in a role + * chain. For more information, see Chaining Roles + * with Session Tags in the IAM User Guide.

+ *

This parameter is optional. The transitive status of a session tag does not impact its + * packed binary size.

+ *

If you choose not to specify a transitive tag key, then no tags are passed from this + * session to any subsequent sessions.

+ * @public + */ + TransitiveTagKeys?: string[] | undefined; + /** + *

A unique identifier that might be required when you assume a role in another account. If + * the administrator of the account to which the role belongs provided you with an external + * ID, then provide that value in the ExternalId parameter. This value can be any + * string, such as a passphrase or account number. A cross-account role is usually set up to + * trust everyone in an account. Therefore, the administrator of the trusting account might + * send an external ID to the administrator of the trusted account. That way, only someone + * with the ID can assume the role, rather than everyone in the account. For more information + * about the external ID, see How to Use an External ID + * When Granting Access to Your Amazon Web Services Resources to a Third Party in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of + * characters consisting of upper- and lower-case alphanumeric characters with no spaces. + * You can also include underscores or any of the following characters: =,.@:/-

+ * @public + */ + ExternalId?: string | undefined; + /** + *

The identification number of the MFA device that is associated with the user who is + * making the AssumeRole call. Specify this value if the trust policy of the role + * being assumed includes a condition that requires MFA authentication. The value is either + * the serial number for a hardware device (such as GAHT12345678) or an Amazon + * Resource Name (ARN) for a virtual device (such as + * arn:aws:iam::123456789012:mfa/user).

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + SerialNumber?: string | undefined; + /** + *

The value provided by the MFA device, if the trust policy of the role being assumed + * requires MFA. (In other words, if the policy includes a condition that tests for MFA). If + * the role being assumed requires MFA and if the TokenCode value is missing or + * expired, the AssumeRole call returns an "access denied" error.

+ *

The format for this parameter, as described by its regex pattern, is a sequence of six + * numeric digits.

+ * @public + */ + TokenCode?: string | undefined; + /** + *

The source identity specified by the principal that is calling the + * AssumeRole operation. The source identity value persists across chained role sessions.

+ *

You can require users to specify a source identity when they assume a role. You do this + * by using the + * sts:SourceIdentity + * condition key in a role trust policy. You + * can use source identity information in CloudTrail logs to determine who took actions with a + * role. You can use the aws:SourceIdentity condition key to further control + * access to Amazon Web Services resources based on the value of source identity. For more information about + * using source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters consisting of upper- + * and lower-case alphanumeric characters with no spaces. You can also include underscores or + * any of the following characters: +=,.@-. You cannot use a value that begins with the text + * aws:. This prefix is reserved for Amazon Web Services internal use.

+ * @public + */ + SourceIdentity?: string | undefined; + /** + *

A list of previously acquired trusted context assertions in the format of a JSON array. + * The trusted context assertion is signed and encrypted by Amazon Web Services STS.

+ *

The following is an example of a ProvidedContext value that includes a + * single trusted context assertion and the ARN of the context provider from which the trusted + * context assertion was generated.

+ *

+ * [\{"ProviderArn":"arn:aws:iam::aws:contextProvider/IdentityCenter","ContextAssertion":"trusted-context-assertion"\}] + *

+ * @public + */ + ProvidedContexts?: ProvidedContext[] | undefined; +} +/** + *

Amazon Web Services credentials for API authentication.

+ * @public + */ +export interface Credentials { + /** + *

The access key ID that identifies the temporary security credentials.

+ * @public + */ + AccessKeyId: string | undefined; + /** + *

The secret access key that can be used to sign requests.

+ * @public + */ + SecretAccessKey: string | undefined; + /** + *

The token that users must pass to the service API to use the temporary + * credentials.

+ * @public + */ + SessionToken: string | undefined; + /** + *

The date on which the current credentials expire.

+ * @public + */ + Expiration: Date | undefined; +} +/** + * @internal + */ +export declare const CredentialsFilterSensitiveLog: (obj: Credentials) => any; +/** + *

Contains the response to a successful AssumeRole request, including + * temporary Amazon Web Services credentials that can be used to make Amazon Web Services requests.

+ * @public + */ +export interface AssumeRoleResponse { + /** + *

The temporary security credentials, which include an access key ID, a secret access key, + * and a security (or session) token.

+ * + *

The size of the security token that STS API operations return is not fixed. We + * strongly recommend that you make no assumptions about the maximum size.

+ *
+ * @public + */ + Credentials?: Credentials | undefined; + /** + *

The Amazon Resource Name (ARN) and the assumed role ID, which are identifiers that you + * can use to refer to the resulting temporary security credentials. For example, you can + * reference these credentials as a principal in a resource-based policy by using the ARN or + * assumed role ID. The ARN and ID include the RoleSessionName that you specified + * when you called AssumeRole.

+ * @public + */ + AssumedRoleUser?: AssumedRoleUser | undefined; + /** + *

A percentage value that indicates the packed size of the session policies and session + * tags combined passed in the request. The request fails if the packed size is greater than 100 percent, + * which means the policies and tags exceeded the allowed space.

+ * @public + */ + PackedPolicySize?: number | undefined; + /** + *

The source identity specified by the principal that is calling the + * AssumeRole operation.

+ *

You can require users to specify a source identity when they assume a role. You do this + * by using the sts:SourceIdentity condition key in a role trust policy. You can + * use source identity information in CloudTrail logs to determine who took actions with a role. + * You can use the aws:SourceIdentity condition key to further control access to + * Amazon Web Services resources based on the value of source identity. For more information about using + * source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters consisting of upper- + * and lower-case alphanumeric characters with no spaces. You can also include underscores or + * any of the following characters: =,.@-

+ * @public + */ + SourceIdentity?: string | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleResponseFilterSensitiveLog: (obj: AssumeRoleResponse) => any; +/** + *

The web identity token that was passed is expired or is not valid. Get a new identity + * token from the identity provider and then retry the request.

+ * @public + */ +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The request was rejected because the policy document was malformed. The error message + * describes the specific error.

+ * @public + */ +export declare class MalformedPolicyDocumentException extends __BaseException { + readonly name: "MalformedPolicyDocumentException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The request was rejected because the total packed size of the session policies and + * session tags combined was too large. An Amazon Web Services conversion compresses the session policy + * document, session policy ARNs, and session tags into a packed binary format that has a + * separate limit. The error message indicates by percentage how close the policies and + * tags are to the upper size limit. For more information, see Passing Session Tags in STS in + * the IAM User Guide.

+ *

You could receive this error even though you meet other defined session policy and + * session tag limits. For more information, see IAM and STS Entity Character Limits in the IAM User + * Guide.

+ * @public + */ +export declare class PackedPolicyTooLargeException extends __BaseException { + readonly name: "PackedPolicyTooLargeException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

STS is not activated in the requested region for the account that is being asked to + * generate credentials. The account administrator must use the IAM console to activate + * STS in that region. For more information, see Activating and + * Deactivating STS in an Amazon Web Services Region in the IAM User + * Guide.

+ * @public + */ +export declare class RegionDisabledException extends __BaseException { + readonly name: "RegionDisabledException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The identity provider (IdP) reported that authentication failed. This might be because + * the claim is invalid.

+ *

If this error is returned for the AssumeRoleWithWebIdentity operation, it + * can also mean that the claim has expired or has been explicitly revoked.

+ * @public + */ +export declare class IDPRejectedClaimException extends __BaseException { + readonly name: "IDPRejectedClaimException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + *

The web identity token that was passed could not be validated by Amazon Web Services. Get a new + * identity token from the identity provider and then retry the request.

+ * @public + */ +export declare class InvalidIdentityTokenException extends __BaseException { + readonly name: "InvalidIdentityTokenException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} +/** + * @public + */ +export interface AssumeRoleWithWebIdentityRequest { + /** + *

The Amazon Resource Name (ARN) of the role that the caller is assuming.

+ * + *

Additional considerations apply to Amazon Cognito identity pools that assume cross-account IAM roles. The trust policies of these roles must accept the + * cognito-identity.amazonaws.com service principal and must contain the + * cognito-identity.amazonaws.com:aud condition key to restrict role + * assumption to users from your intended identity pools. A policy that trusts Amazon Cognito + * identity pools without this condition creates a risk that a user from an unintended + * identity pool can assume the role. For more information, see Trust policies for + * IAM roles in Basic (Classic) authentication in the Amazon Cognito + * Developer Guide.

+ *
+ * @public + */ + RoleArn: string | undefined; + /** + *

An identifier for the assumed role session. Typically, you pass the name or identifier + * that is associated with the user who is using your application. That way, the temporary + * security credentials that your application will use are associated with that user. This + * session name is included as part of the ARN and assumed role ID in the + * AssumedRoleUser response element.

+ *

For security purposes, administrators can view this field in CloudTrail logs to help identify who performed an action in Amazon Web Services. Your + * administrator might require that you specify your user name as the session name when you + * assume the role. For more information, see + * sts:RoleSessionName + * .

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + RoleSessionName: string | undefined; + /** + *

The OAuth 2.0 access token or OpenID Connect ID token that is provided by the identity + * provider. Your application must get this token by authenticating the user who is using your + * application with a web identity provider before the application makes an + * AssumeRoleWithWebIdentity call. Timestamps in the token must be formatted + * as either an integer or a long integer. Tokens must be signed using either RSA keys (RS256, + * RS384, or RS512) or ECDSA keys (ES256, ES384, or ES512).

+ * @public + */ + WebIdentityToken: string | undefined; + /** + *

The fully qualified host component of the domain name of the OAuth 2.0 identity + * provider. Do not specify this value for an OpenID Connect identity provider.

+ *

Currently www.amazon.com and graph.facebook.com are the only + * supported identity providers for OAuth 2.0 access tokens. Do not include URL schemes and + * port numbers.

+ *

Do not specify this value for OpenID Connect ID tokens.

+ * @public + */ + ProviderId?: string | undefined; + /** + *

The Amazon Resource Names (ARNs) of the IAM managed policies that you want to use as + * managed session policies. The policies must exist in the same account as the role.

+ *

This parameter is optional. You can provide up to 10 managed policy ARNs. However, the + * plaintext that you use for both inline and managed session policies can't exceed 2,048 + * characters. For more information about ARNs, see Amazon Resource Names (ARNs) and Amazon Web Services + * Service Namespaces in the Amazon Web Services General Reference.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ *

Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ * @public + */ + PolicyArns?: PolicyDescriptorType[] | undefined; + /** + *

An IAM policy in JSON format that you want to use as an inline session policy.

+ *

This parameter is optional. Passing policies to this operation returns new + * temporary credentials. The resulting session's permissions are the intersection of the + * role's identity-based policy and the session policies. You can use the role's temporary + * credentials in subsequent Amazon Web Services API calls to access resources in the account that owns + * the role. You cannot use session policies to grant more permissions than those allowed + * by the identity-based policy of the role that is being assumed. For more information, see + * Session + * Policies in the IAM User Guide.

+ *

The plaintext that you use for both inline and managed session policies can't exceed + * 2,048 characters. The JSON policy characters can be any ASCII character from the space + * character to the end of the valid character list (\u0020 through \u00FF). It can also + * include the tab (\u0009), linefeed (\u000A), and carriage return (\u000D) + * characters.

+ *

For more information about role session permissions, see Session + * policies.

+ * + *

An Amazon Web Services conversion compresses the passed inline session policy, managed policy ARNs, + * and session tags into a packed binary format that has a separate limit. Your request can + * fail for this limit even if your plaintext meets the other requirements. The + * PackedPolicySize response element indicates by percentage how close the + * policies and tags for your request are to the upper size limit.

+ *
+ * @public + */ + Policy?: string | undefined; + /** + *

The duration, in seconds, of the role session. The value can range from 900 seconds (15 + * minutes) up to the maximum session duration setting for the role. This setting can have a + * value from 1 hour to 12 hours. If you specify a value higher than this setting, the + * operation fails. For example, if you specify a session duration of 12 hours, but your + * administrator set the maximum session duration to 6 hours, your operation fails. To learn + * how to view the maximum value for your role, see View the + * Maximum Session Duration Setting for a Role in the + * IAM User Guide.

+ *

By default, the value is set to 3600 seconds.

+ * + *

The DurationSeconds parameter is separate from the duration of a console + * session that you might request using the returned credentials. The request to the + * federation endpoint for a console sign-in token takes a SessionDuration + * parameter that specifies the maximum length of the console session. For more + * information, see Creating a URL + * that Enables Federated Users to Access the Amazon Web Services Management Console in the + * IAM User Guide.

+ *
+ * @public + */ + DurationSeconds?: number | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleWithWebIdentityRequestFilterSensitiveLog: (obj: AssumeRoleWithWebIdentityRequest) => any; +/** + *

Contains the response to a successful AssumeRoleWithWebIdentity + * request, including temporary Amazon Web Services credentials that can be used to make Amazon Web Services requests.

+ * @public + */ +export interface AssumeRoleWithWebIdentityResponse { + /** + *

The temporary security credentials, which include an access key ID, a secret access key, + * and a security token.

+ * + *

The size of the security token that STS API operations return is not fixed. We + * strongly recommend that you make no assumptions about the maximum size.

+ *
+ * @public + */ + Credentials?: Credentials | undefined; + /** + *

The unique user identifier that is returned by the identity provider. This identifier is + * associated with the WebIdentityToken that was submitted with the + * AssumeRoleWithWebIdentity call. The identifier is typically unique to the + * user and the application that acquired the WebIdentityToken (pairwise + * identifier). For OpenID Connect ID tokens, this field contains the value returned by the + * identity provider as the token's sub (Subject) claim.

+ * @public + */ + SubjectFromWebIdentityToken?: string | undefined; + /** + *

The Amazon Resource Name (ARN) and the assumed role ID, which are identifiers that you + * can use to refer to the resulting temporary security credentials. For example, you can + * reference these credentials as a principal in a resource-based policy by using the ARN or + * assumed role ID. The ARN and ID include the RoleSessionName that you specified + * when you called AssumeRole.

+ * @public + */ + AssumedRoleUser?: AssumedRoleUser | undefined; + /** + *

A percentage value that indicates the packed size of the session policies and session + * tags combined passed in the request. The request fails if the packed size is greater than 100 percent, + * which means the policies and tags exceeded the allowed space.

+ * @public + */ + PackedPolicySize?: number | undefined; + /** + *

The issuing authority of the web identity token presented. For OpenID Connect ID + * tokens, this contains the value of the iss field. For OAuth 2.0 access tokens, + * this contains the value of the ProviderId parameter that was passed in the + * AssumeRoleWithWebIdentity request.

+ * @public + */ + Provider?: string | undefined; + /** + *

The intended audience (also known as client ID) of the web identity token. This is + * traditionally the client identifier issued to the application that requested the web + * identity token.

+ * @public + */ + Audience?: string | undefined; + /** + *

The value of the source identity that is returned in the JSON web token (JWT) from the + * identity provider.

+ *

You can require users to set a source identity value when they assume a role. You do + * this by using the sts:SourceIdentity condition key in a role trust policy. + * That way, actions that are taken with the role are associated with that user. After the + * source identity is set, the value cannot be changed. It is present in the request for all + * actions that are taken by the role and persists across chained role + * sessions. You can configure your identity provider to use an attribute associated with your + * users, like user name or email, as the source identity when calling + * AssumeRoleWithWebIdentity. You do this by adding a claim to the JSON web + * token. To learn more about OIDC tokens and claims, see Using Tokens with User Pools in the Amazon Cognito Developer Guide. + * For more information about using source identity, see Monitor and control + * actions taken with assumed roles in the + * IAM User Guide.

+ *

The regex used to validate this parameter is a string of characters + * consisting of upper- and lower-case alphanumeric characters with no spaces. You can + * also include underscores or any of the following characters: =,.@-

+ * @public + */ + SourceIdentity?: string | undefined; +} +/** + * @internal + */ +export declare const AssumeRoleWithWebIdentityResponseFilterSensitiveLog: (obj: AssumeRoleWithWebIdentityResponse) => any; +/** + *

The request could not be fulfilled because the identity provider (IDP) that was asked + * to verify the incoming identity token could not be reached. This is often a transient + * error caused by network conditions. Retry the request a limited number of times so that + * you don't exceed the request rate. If the error persists, the identity provider might be + * down or not responding.

+ * @public + */ +export declare class IDPCommunicationErrorException extends __BaseException { + readonly name: "IDPCommunicationErrorException"; + readonly $fault: "client"; + /** + * @internal + */ + constructor(opts: __ExceptionOptionType); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts new file mode 100644 index 0000000..db11c3a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/protocols/Aws_query.d.ts @@ -0,0 +1,20 @@ +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { AssumeRoleCommandInput, AssumeRoleCommandOutput } from "../commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput, AssumeRoleWithWebIdentityCommandOutput } from "../commands/AssumeRoleWithWebIdentityCommand"; +/** + * serializeAws_queryAssumeRoleCommand + */ +export declare const se_AssumeRoleCommand: (input: AssumeRoleCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * serializeAws_queryAssumeRoleWithWebIdentityCommand + */ +export declare const se_AssumeRoleWithWebIdentityCommand: (input: AssumeRoleWithWebIdentityCommandInput, context: __SerdeContext) => Promise<__HttpRequest>; +/** + * deserializeAws_queryAssumeRoleCommand + */ +export declare const de_AssumeRoleCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; +/** + * deserializeAws_queryAssumeRoleWithWebIdentityCommand + */ +export declare const de_AssumeRoleWithWebIdentityCommand: (output: __HttpResponse, context: __SerdeContext) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..5513a9b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.browser.d.ts @@ -0,0 +1,59 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: import("@smithy/protocol-http").HttpHandler | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts new file mode 100644 index 0000000..c9924b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.d.ts @@ -0,0 +1,57 @@ +import { NoAuthSigner } from "@smithy/core"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-node").PreviouslyResolved | undefined) => Promise; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + }[]; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: RequestHandler | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + credentialDefaultProvider?: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts new file mode 100644 index 0000000..5bf519f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.native.d.ts @@ -0,0 +1,58 @@ +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: import("@smithy/types").NodeHttpHandlerOptions | import("@smithy/types").FetchHttpHandlerOptions | Record | import("@smithy/protocol-http").HttpHandler | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: (config?: import("@aws-sdk/util-user-agent-browser").PreviouslyResolved | undefined) => Promise; + credentialDefaultProvider: ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) | ((_: unknown) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: import("@smithy/smithy-client").DefaultsMode | import("@smithy/types").Provider; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: string | import("@smithy/types").Provider | undefined; + retryStrategy?: import("@smithy/types").RetryStrategy | import("@smithy/types").RetryStrategyV2 | undefined; + endpoint?: ((string | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider) & (string | import("@smithy/types").Provider | import("@smithy/types").Endpoint | import("@smithy/types").Provider | import("@smithy/types").EndpointV2 | import("@smithy/types").Provider)) | undefined; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: string[] | import("@smithy/types").Provider | undefined; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: import("@smithy/types").IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + })[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: import("@smithy/types").AwsCredentialIdentity | import("@smithy/types").AwsCredentialIdentityProvider | undefined; + signer?: import("@smithy/types").RequestSigner | ((authScheme?: import("@smithy/types").AuthScheme | undefined) => Promise) | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: (new (options: import("@smithy/signature-v4").SignatureV4Init & import("@smithy/signature-v4").SignatureV4CryptoInit) => import("@smithy/types").RequestSigner) | undefined; + useGlobalEndpoint?: boolean | import("@smithy/types").Provider | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..5b99276 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeConfig.shared.d.ts @@ -0,0 +1,32 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +/** + * @internal + */ +export declare const getRuntimeConfig: (config: STSClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: (params: import("./endpoint/EndpointParameters").EndpointParameters, context?: { + logger?: import("@smithy/types").Logger | undefined; + } | undefined) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + httpAuthSchemes: import("@smithy/types").HttpAuthScheme[] | ({ + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | undefined; + signer: AwsSdkSigV4Signer; + } | { + schemeId: string; + identityProvider: (ipc: IdentityProviderConfig) => import("@smithy/types").IdentityProvider | (() => Promise<{}>); + signer: NoAuthSigner; + })[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts new file mode 100644 index 0000000..ebd8567 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/submodules/sts/runtimeExtensions.d.ts @@ -0,0 +1,17 @@ +import { STSExtensionConfiguration } from "./extensionConfiguration"; +/** + * @public + */ +export interface RuntimeExtension { + configure(extensionConfiguration: STSExtensionConfiguration): void; +} +/** + * @public + */ +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +/** + * @internal + */ +export declare const resolveRuntimeExtensions: (runtimeConfig: any, extensions: RuntimeExtension[]) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts new file mode 100644 index 0000000..10ee849 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDC.d.ts @@ -0,0 +1,22 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "./commands/CreateTokenCommand"; +import { SSOOIDCClient } from "./SSOOIDCClient"; +export interface SSOOIDC { + createToken( + args: CreateTokenCommandInput, + options?: __HttpHandlerOptions + ): Promise; + createToken( + args: CreateTokenCommandInput, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; + createToken( + args: CreateTokenCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; +} +export declare class SSOOIDC extends SSOOIDCClient implements SSOOIDC {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts new file mode 100644 index 0000000..d44b7af --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/SSOOIDCClient.d.ts @@ -0,0 +1,121 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "./commands/CreateTokenCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = CreateTokenCommandInput; +export type ServiceOutputTypes = CreateTokenCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type SSOOIDCClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface SSOOIDCClientConfig extends SSOOIDCClientConfigType {} +export type SSOOIDCClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface SSOOIDCClientResolvedConfig + extends SSOOIDCClientResolvedConfigType {} +export declare class SSOOIDCClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SSOOIDCClientResolvedConfig +> { + readonly config: SSOOIDCClientResolvedConfig; + constructor( + ...[configuration]: __CheckOptionalClientConfig + ); + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..c39ba91 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { SSOOIDCHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): SSOOIDCHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..936b101 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,47 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +export interface SSOOIDCHttpAuthSchemeParameters + extends HttpAuthSchemeParameters { + region?: string; +} +export interface SSOOIDCHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + SSOOIDCClientResolvedConfig, + HandlerExecutionContext, + SSOOIDCHttpAuthSchemeParameters, + object + > {} +export declare const defaultSSOOIDCHttpAuthSchemeParametersProvider: ( + config: SSOOIDCClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface SSOOIDCHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSSOOIDCHttpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +export interface HttpAuthSchemeInputConfig extends AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: SSOOIDCHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: SSOOIDCHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts new file mode 100644 index 0000000..cb1de8b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/CreateTokenCommand.d.ts @@ -0,0 +1,43 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { SSOOIDCClientResolvedConfig } from "../SSOOIDCClient"; +export { __MetadataBearer }; +export { $Command }; +export interface CreateTokenCommandInput extends CreateTokenRequest {} +export interface CreateTokenCommandOutput + extends CreateTokenResponse, + __MetadataBearer {} +declare const CreateTokenCommand_base: { + new ( + input: CreateTokenCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTokenCommandInput, + CreateTokenCommandOutput, + SSOOIDCClientResolvedConfig, + CreateTokenCommandInput, + CreateTokenCommandOutput + >; + new ( + __0_0: CreateTokenCommandInput + ): import("@smithy/smithy-client").CommandImpl< + CreateTokenCommandInput, + CreateTokenCommandOutput, + SSOOIDCClientResolvedConfig, + CreateTokenCommandInput, + CreateTokenCommandOutput + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class CreateTokenCommand extends CreateTokenCommand_base { + protected static __types: { + api: { + input: CreateTokenRequest; + output: CreateTokenResponse; + }; + sdk: { + input: CreateTokenCommandInput; + output: CreateTokenCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts new file mode 100644 index 0000000..09214ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/commands/index.d.ts @@ -0,0 +1 @@ +export * from "./CreateTokenCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..7f24540 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/EndpointParameters.d.ts @@ -0,0 +1,51 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts new file mode 100644 index 0000000..c208e33 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface SSOOIDCExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts new file mode 100644 index 0000000..1e9247f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/index.d.ts @@ -0,0 +1,8 @@ +export * from "./SSOOIDCClient"; +export * from "./SSOOIDC"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export { SSOOIDCServiceException } from "./models/SSOOIDCServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts new file mode 100644 index 0000000..dae636f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/SSOOIDCServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class SSOOIDCServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts new file mode 100644 index 0000000..68de714 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/models/models_0.d.ts @@ -0,0 +1,123 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { SSOOIDCServiceException as __BaseException } from "./SSOOIDCServiceException"; +export declare class AccessDeniedException extends __BaseException { + readonly name: "AccessDeniedException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class AuthorizationPendingException extends __BaseException { + readonly name: "AuthorizationPendingException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export interface CreateTokenRequest { + clientId: string | undefined; + clientSecret: string | undefined; + grantType: string | undefined; + deviceCode?: string | undefined; + code?: string | undefined; + refreshToken?: string | undefined; + scope?: string[] | undefined; + redirectUri?: string | undefined; + codeVerifier?: string | undefined; +} +export declare const CreateTokenRequestFilterSensitiveLog: ( + obj: CreateTokenRequest +) => any; +export interface CreateTokenResponse { + accessToken?: string | undefined; + tokenType?: string | undefined; + expiresIn?: number | undefined; + refreshToken?: string | undefined; + idToken?: string | undefined; +} +export declare const CreateTokenResponseFilterSensitiveLog: ( + obj: CreateTokenResponse +) => any; +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InternalServerException extends __BaseException { + readonly name: "InternalServerException"; + readonly $fault: "server"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidClientException extends __BaseException { + readonly name: "InvalidClientException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidGrantException extends __BaseException { + readonly name: "InvalidGrantException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidRequestException extends __BaseException { + readonly name: "InvalidRequestException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidScopeException extends __BaseException { + readonly name: "InvalidScopeException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class SlowDownException extends __BaseException { + readonly name: "SlowDownException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor(opts: __ExceptionOptionType); +} +export declare class UnauthorizedClientException extends __BaseException { + readonly name: "UnauthorizedClientException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class UnsupportedGrantTypeException extends __BaseException { + readonly name: "UnsupportedGrantTypeException"; + readonly $fault: "client"; + error?: string | undefined; + error_description?: string | undefined; + constructor( + opts: __ExceptionOptionType + ); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts new file mode 100644 index 0000000..d0657b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/protocols/Aws_restJson1.d.ts @@ -0,0 +1,17 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + CreateTokenCommandInput, + CreateTokenCommandOutput, +} from "../commands/CreateTokenCommand"; +export declare const se_CreateTokenCommand: ( + input: CreateTokenCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_CreateTokenCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..c469a24 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.browser.d.ts @@ -0,0 +1,120 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts new file mode 100644 index 0000000..a24c900 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.d.ts @@ -0,0 +1,114 @@ +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts new file mode 100644 index 0000000..c3610fd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.native.d.ts @@ -0,0 +1,124 @@ +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..130a1e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeConfig.shared.d.ts @@ -0,0 +1,49 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { SSOOIDCClientConfig } from "./SSOOIDCClient"; +export declare const getRuntimeConfig: (config: SSOOIDCClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + endpointParams: import("./endpoint/EndpointParameters").EndpointParameters, + context?: { + logger?: import("@smithy/types").Logger | undefined; + } + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").SSOOIDCHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts new file mode 100644 index 0000000..d226882 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sso-oidc/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { SSOOIDCExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: SSOOIDCExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts new file mode 100644 index 0000000..cca9cbb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STS.d.ts @@ -0,0 +1,39 @@ +import { HttpHandlerOptions as __HttpHandlerOptions } from "@smithy/types"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "./commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient } from "./STSClient"; +export interface STS { + assumeRole( + args: AssumeRoleCommandInput, + options?: __HttpHandlerOptions + ): Promise; + assumeRole( + args: AssumeRoleCommandInput, + cb: (err: any, data?: AssumeRoleCommandOutput) => void + ): void; + assumeRole( + args: AssumeRoleCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AssumeRoleCommandOutput) => void + ): void; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + options?: __HttpHandlerOptions + ): Promise; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void + ): void; + assumeRoleWithWebIdentity( + args: AssumeRoleWithWebIdentityCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AssumeRoleWithWebIdentityCommandOutput) => void + ): void; +} +export declare class STS extends STSClient implements STS {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts new file mode 100644 index 0000000..8bffddf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/STSClient.d.ts @@ -0,0 +1,128 @@ +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, +} from "@aws-sdk/middleware-host-header"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, +} from "@aws-sdk/middleware-user-agent"; +import { + RegionInputConfig, + RegionResolvedConfig, +} from "@smithy/config-resolver"; +import { + EndpointInputConfig, + EndpointResolvedConfig, +} from "@smithy/middleware-endpoint"; +import { + RetryInputConfig, + RetryResolvedConfig, +} from "@smithy/middleware-retry"; +import { HttpHandlerUserInput as __HttpHandlerUserInput } from "@smithy/protocol-http"; +import { + Client as __Client, + DefaultsMode as __DefaultsMode, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@smithy/smithy-client"; +import { + AwsCredentialIdentityProvider, + BodyLengthCalculator as __BodyLengthCalculator, + CheckOptionalClientConfig as __CheckOptionalClientConfig, + ChecksumConstructor as __ChecksumConstructor, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, + UserAgent as __UserAgent, +} from "@smithy/types"; +import { + HttpAuthSchemeInputConfig, + HttpAuthSchemeResolvedConfig, +} from "./auth/httpAuthSchemeProvider"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "./commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "./commands/AssumeRoleWithWebIdentityCommand"; +import { + ClientInputEndpointParameters, + ClientResolvedEndpointParameters, + EndpointParameters, +} from "./endpoint/EndpointParameters"; +import { RuntimeExtension, RuntimeExtensionsConfig } from "./runtimeExtensions"; +export { __Client }; +export type ServiceInputTypes = + | AssumeRoleCommandInput + | AssumeRoleWithWebIdentityCommandInput; +export type ServiceOutputTypes = + | AssumeRoleCommandOutput + | AssumeRoleWithWebIdentityCommandOutput; +export interface ClientDefaults + extends Partial<__SmithyConfiguration<__HttpHandlerOptions>> { + requestHandler?: __HttpHandlerUserInput; + sha256?: __ChecksumConstructor | __HashConstructor; + urlParser?: __UrlParser; + bodyLengthChecker?: __BodyLengthCalculator; + streamCollector?: __StreamCollector; + base64Decoder?: __Decoder; + base64Encoder?: __Encoder; + utf8Decoder?: __Decoder; + utf8Encoder?: __Encoder; + runtime?: string; + disableHostPrefix?: boolean; + serviceId?: string; + useDualstackEndpoint?: boolean | __Provider; + useFipsEndpoint?: boolean | __Provider; + region?: string | __Provider; + profile?: string; + defaultUserAgentProvider?: Provider<__UserAgent>; + credentialDefaultProvider?: (input: any) => AwsCredentialIdentityProvider; + maxAttempts?: number | __Provider; + retryMode?: string | __Provider; + logger?: __Logger; + extensions?: RuntimeExtension[]; + defaultsMode?: __DefaultsMode | __Provider<__DefaultsMode>; +} +export type STSClientConfigType = Partial< + __SmithyConfiguration<__HttpHandlerOptions> +> & + ClientDefaults & + UserAgentInputConfig & + RetryInputConfig & + RegionInputConfig & + HostHeaderInputConfig & + EndpointInputConfig & + HttpAuthSchemeInputConfig & + ClientInputEndpointParameters; +export interface STSClientConfig extends STSClientConfigType {} +export type STSClientResolvedConfigType = + __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RuntimeExtensionsConfig & + UserAgentResolvedConfig & + RetryResolvedConfig & + RegionResolvedConfig & + HostHeaderResolvedConfig & + EndpointResolvedConfig & + HttpAuthSchemeResolvedConfig & + ClientResolvedEndpointParameters; +export interface STSClientResolvedConfig extends STSClientResolvedConfigType {} +export declare class STSClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig +> { + readonly config: STSClientResolvedConfig; + constructor(...[configuration]: __CheckOptionalClientConfig); + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts new file mode 100644 index 0000000..ef83018 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthExtensionConfiguration.d.ts @@ -0,0 +1,32 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + HttpAuthScheme, +} from "@smithy/types"; +import { STSHttpAuthSchemeProvider } from "./httpAuthSchemeProvider"; +export interface HttpAuthExtensionConfiguration { + setHttpAuthScheme(httpAuthScheme: HttpAuthScheme): void; + httpAuthSchemes(): HttpAuthScheme[]; + setHttpAuthSchemeProvider( + httpAuthSchemeProvider: STSHttpAuthSchemeProvider + ): void; + httpAuthSchemeProvider(): STSHttpAuthSchemeProvider; + setCredentials( + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider + ): void; + credentials(): + | AwsCredentialIdentity + | AwsCredentialIdentityProvider + | undefined; +} +export type HttpAuthRuntimeConfig = Partial<{ + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: STSHttpAuthSchemeProvider; + credentials: AwsCredentialIdentity | AwsCredentialIdentityProvider; +}>; +export declare const getHttpAuthExtensionConfiguration: ( + runtimeConfig: HttpAuthRuntimeConfig +) => HttpAuthExtensionConfiguration; +export declare const resolveHttpAuthRuntimeConfig: ( + config: HttpAuthExtensionConfiguration +) => HttpAuthRuntimeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..0e17e2f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/auth/httpAuthSchemeProvider.d.ts @@ -0,0 +1,57 @@ +import { + AwsSdkSigV4AuthInputConfig, + AwsSdkSigV4AuthResolvedConfig, + AwsSdkSigV4PreviouslyResolved, +} from "@aws-sdk/core"; +import { + Client, + HandlerExecutionContext, + HttpAuthScheme, + HttpAuthSchemeParameters, + HttpAuthSchemeParametersProvider, + HttpAuthSchemeProvider, + Provider, +} from "@smithy/types"; +import { STSClientResolvedConfig } from "../STSClient"; +export interface STSHttpAuthSchemeParameters extends HttpAuthSchemeParameters { + region?: string; +} +export interface STSHttpAuthSchemeParametersProvider + extends HttpAuthSchemeParametersProvider< + STSClientResolvedConfig, + HandlerExecutionContext, + STSHttpAuthSchemeParameters, + object + > {} +export declare const defaultSTSHttpAuthSchemeParametersProvider: ( + config: STSClientResolvedConfig, + context: HandlerExecutionContext, + input: object +) => Promise; +export interface STSHttpAuthSchemeProvider + extends HttpAuthSchemeProvider {} +export declare const defaultSTSHttpAuthSchemeProvider: STSHttpAuthSchemeProvider; +export interface StsAuthInputConfig {} +export interface StsAuthResolvedConfig { + stsClientCtor: new (clientConfig: any) => Client; +} +export declare const resolveStsAuthConfig: ( + input: T & StsAuthInputConfig +) => T & StsAuthResolvedConfig; +export interface HttpAuthSchemeInputConfig + extends StsAuthInputConfig, + AwsSdkSigV4AuthInputConfig { + authSchemePreference?: string[] | Provider; + httpAuthSchemes?: HttpAuthScheme[]; + httpAuthSchemeProvider?: STSHttpAuthSchemeProvider; +} +export interface HttpAuthSchemeResolvedConfig + extends StsAuthResolvedConfig, + AwsSdkSigV4AuthResolvedConfig { + readonly authSchemePreference: Provider; + readonly httpAuthSchemes: HttpAuthScheme[]; + readonly httpAuthSchemeProvider: STSHttpAuthSchemeProvider; +} +export declare const resolveHttpAuthSchemeConfig: ( + config: T & HttpAuthSchemeInputConfig & AwsSdkSigV4PreviouslyResolved +) => T & HttpAuthSchemeResolvedConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts new file mode 100644 index 0000000..9333fbb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleCommand.d.ts @@ -0,0 +1,47 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { AssumeRoleRequest, AssumeRoleResponse } from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig, +} from "../STSClient"; +export { __MetadataBearer }; +export { $Command }; +export interface AssumeRoleCommandInput extends AssumeRoleRequest {} +export interface AssumeRoleCommandOutput + extends AssumeRoleResponse, + __MetadataBearer {} +declare const AssumeRoleCommand_base: { + new ( + input: AssumeRoleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleCommandInput, + AssumeRoleCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AssumeRoleCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleCommandInput, + AssumeRoleCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AssumeRoleCommand extends AssumeRoleCommand_base { + protected static __types: { + api: { + input: AssumeRoleRequest; + output: AssumeRoleResponse; + }; + sdk: { + input: AssumeRoleCommandInput; + output: AssumeRoleCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts new file mode 100644 index 0000000..222e034 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/AssumeRoleWithWebIdentityCommand.d.ts @@ -0,0 +1,51 @@ +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; +import { + AssumeRoleWithWebIdentityRequest, + AssumeRoleWithWebIdentityResponse, +} from "../models/models_0"; +import { + ServiceInputTypes, + ServiceOutputTypes, + STSClientResolvedConfig, +} from "../STSClient"; +export { __MetadataBearer }; +export { $Command }; +export interface AssumeRoleWithWebIdentityCommandInput + extends AssumeRoleWithWebIdentityRequest {} +export interface AssumeRoleWithWebIdentityCommandOutput + extends AssumeRoleWithWebIdentityResponse, + __MetadataBearer {} +declare const AssumeRoleWithWebIdentityCommand_base: { + new ( + input: AssumeRoleWithWebIdentityCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + new ( + __0_0: AssumeRoleWithWebIdentityCommandInput + ): import("@smithy/smithy-client").CommandImpl< + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, + STSClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >; + getEndpointParameterInstructions(): import("@smithy/middleware-endpoint").EndpointParameterInstructions; +}; +export declare class AssumeRoleWithWebIdentityCommand extends AssumeRoleWithWebIdentityCommand_base { + protected static __types: { + api: { + input: AssumeRoleWithWebIdentityRequest; + output: AssumeRoleWithWebIdentityResponse; + }; + sdk: { + input: AssumeRoleWithWebIdentityCommandInput; + output: AssumeRoleWithWebIdentityCommandOutput; + }; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts new file mode 100644 index 0000000..0f200f5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/commands/index.d.ts @@ -0,0 +1,2 @@ +export * from "./AssumeRoleCommand"; +export * from "./AssumeRoleWithWebIdentityCommand"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts new file mode 100644 index 0000000..b6f22cc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultRoleAssumers.d.ts @@ -0,0 +1,19 @@ +import { Pluggable } from "@smithy/types"; +import { + DefaultCredentialProvider, + RoleAssumer, + RoleAssumerWithWebIdentity, + STSRoleAssumerOptions, +} from "./defaultStsRoleAssumers"; +import { ServiceInputTypes, ServiceOutputTypes } from "./STSClient"; +export declare const getDefaultRoleAssumer: ( + stsOptions?: STSRoleAssumerOptions, + stsPlugins?: Pluggable[] +) => RoleAssumer; +export declare const getDefaultRoleAssumerWithWebIdentity: ( + stsOptions?: STSRoleAssumerOptions, + stsPlugins?: Pluggable[] +) => RoleAssumerWithWebIdentity; +export declare const decorateDefaultCredentialProvider: ( + provider: DefaultCredentialProvider +) => DefaultCredentialProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts new file mode 100644 index 0000000..3831379 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/defaultStsRoleAssumers.d.ts @@ -0,0 +1,33 @@ +import { CredentialProviderOptions } from "@aws-sdk/types"; +import { AwsCredentialIdentity, Logger, Provider } from "@smithy/types"; +import { AssumeRoleCommandInput } from "./commands/AssumeRoleCommand"; +import { AssumeRoleWithWebIdentityCommandInput } from "./commands/AssumeRoleWithWebIdentityCommand"; +import { STSClient, STSClientConfig } from "./STSClient"; +export type STSRoleAssumerOptions = Pick< + STSClientConfig, + "logger" | "region" | "requestHandler" +> & { + credentialProviderLogger?: Logger; + parentClientConfig?: CredentialProviderOptions["parentClientConfig"]; +}; +export type RoleAssumer = ( + sourceCreds: AwsCredentialIdentity, + params: AssumeRoleCommandInput +) => Promise; +export declare const getDefaultRoleAssumer: ( + stsOptions: STSRoleAssumerOptions, + STSClient: new (options: STSClientConfig) => STSClient +) => RoleAssumer; +export type RoleAssumerWithWebIdentity = ( + params: AssumeRoleWithWebIdentityCommandInput +) => Promise; +export declare const getDefaultRoleAssumerWithWebIdentity: ( + stsOptions: STSRoleAssumerOptions, + STSClient: new (options: STSClientConfig) => STSClient +) => RoleAssumerWithWebIdentity; +export type DefaultCredentialProvider = ( + input: any +) => Provider; +export declare const decorateDefaultCredentialProvider: ( + provider: DefaultCredentialProvider +) => DefaultCredentialProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts new file mode 100644 index 0000000..33567fd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/EndpointParameters.d.ts @@ -0,0 +1,57 @@ +import { + Endpoint, + EndpointParameters as __EndpointParameters, + EndpointV2, + Provider, +} from "@smithy/types"; +export interface ClientInputEndpointParameters { + region?: string | Provider; + useDualstackEndpoint?: boolean | Provider; + useFipsEndpoint?: boolean | Provider; + endpoint?: + | string + | Provider + | Endpoint + | Provider + | EndpointV2 + | Provider; + useGlobalEndpoint?: boolean | Provider; +} +export type ClientResolvedEndpointParameters = ClientInputEndpointParameters & { + defaultSigningName: string; +}; +export declare const resolveClientEndpointParameters: ( + options: T & ClientInputEndpointParameters +) => T & + ClientInputEndpointParameters & { + defaultSigningName: string; + }; +export declare const commonParams: { + readonly UseGlobalEndpoint: { + readonly type: "builtInParams"; + readonly name: "useGlobalEndpoint"; + }; + readonly UseFIPS: { + readonly type: "builtInParams"; + readonly name: "useFipsEndpoint"; + }; + readonly Endpoint: { + readonly type: "builtInParams"; + readonly name: "endpoint"; + }; + readonly Region: { + readonly type: "builtInParams"; + readonly name: "region"; + }; + readonly UseDualStack: { + readonly type: "builtInParams"; + readonly name: "useDualstackEndpoint"; + }; +}; +export interface EndpointParameters extends __EndpointParameters { + Region?: string; + UseDualStack?: boolean; + UseFIPS?: boolean; + Endpoint?: string; + UseGlobalEndpoint?: boolean; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts new file mode 100644 index 0000000..5909925 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/endpointResolver.d.ts @@ -0,0 +1,8 @@ +import { EndpointV2, Logger } from "@smithy/types"; +import { EndpointParameters } from "./EndpointParameters"; +export declare const defaultEndpointResolver: ( + endpointParams: EndpointParameters, + context?: { + logger?: Logger; + } +) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts new file mode 100644 index 0000000..4b23899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/endpoint/ruleset.d.ts @@ -0,0 +1,2 @@ +import { RuleSetObject } from "@smithy/types"; +export declare const ruleSet: RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts new file mode 100644 index 0000000..14b124b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/extensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { HttpHandlerExtensionConfiguration } from "@smithy/protocol-http"; +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { HttpAuthExtensionConfiguration } from "./auth/httpAuthExtensionConfiguration"; +export interface STSExtensionConfiguration + extends HttpHandlerExtensionConfiguration, + DefaultExtensionConfiguration, + AwsRegionExtensionConfiguration, + HttpAuthExtensionConfiguration {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts new file mode 100644 index 0000000..157a306 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/index.d.ts @@ -0,0 +1,9 @@ +export * from "./STSClient"; +export * from "./STS"; +export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; +export { RuntimeExtension } from "./runtimeExtensions"; +export { STSExtensionConfiguration } from "./extensionConfiguration"; +export * from "./commands"; +export * from "./models"; +export * from "./defaultRoleAssumers"; +export { STSServiceException } from "./models/STSServiceException"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts new file mode 100644 index 0000000..95fc485 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/STSServiceException.d.ts @@ -0,0 +1,9 @@ +import { + ServiceException as __ServiceException, + ServiceExceptionOptions as __ServiceExceptionOptions, +} from "@smithy/smithy-client"; +export { __ServiceExceptionOptions }; +export { __ServiceException }; +export declare class STSServiceException extends __ServiceException { + constructor(options: __ServiceExceptionOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts new file mode 100644 index 0000000..09c5d6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/index.d.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts new file mode 100644 index 0000000..1cba371 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/models/models_0.d.ts @@ -0,0 +1,123 @@ +import { ExceptionOptionType as __ExceptionOptionType } from "@smithy/smithy-client"; +import { STSServiceException as __BaseException } from "./STSServiceException"; +export interface AssumedRoleUser { + AssumedRoleId: string | undefined; + Arn: string | undefined; +} +export interface PolicyDescriptorType { + arn?: string | undefined; +} +export interface ProvidedContext { + ProviderArn?: string | undefined; + ContextAssertion?: string | undefined; +} +export interface Tag { + Key: string | undefined; + Value: string | undefined; +} +export interface AssumeRoleRequest { + RoleArn: string | undefined; + RoleSessionName: string | undefined; + PolicyArns?: PolicyDescriptorType[] | undefined; + Policy?: string | undefined; + DurationSeconds?: number | undefined; + Tags?: Tag[] | undefined; + TransitiveTagKeys?: string[] | undefined; + ExternalId?: string | undefined; + SerialNumber?: string | undefined; + TokenCode?: string | undefined; + SourceIdentity?: string | undefined; + ProvidedContexts?: ProvidedContext[] | undefined; +} +export interface Credentials { + AccessKeyId: string | undefined; + SecretAccessKey: string | undefined; + SessionToken: string | undefined; + Expiration: Date | undefined; +} +export declare const CredentialsFilterSensitiveLog: (obj: Credentials) => any; +export interface AssumeRoleResponse { + Credentials?: Credentials | undefined; + AssumedRoleUser?: AssumedRoleUser | undefined; + PackedPolicySize?: number | undefined; + SourceIdentity?: string | undefined; +} +export declare const AssumeRoleResponseFilterSensitiveLog: ( + obj: AssumeRoleResponse +) => any; +export declare class ExpiredTokenException extends __BaseException { + readonly name: "ExpiredTokenException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class MalformedPolicyDocumentException extends __BaseException { + readonly name: "MalformedPolicyDocumentException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType< + MalformedPolicyDocumentException, + __BaseException + > + ); +} +export declare class PackedPolicyTooLargeException extends __BaseException { + readonly name: "PackedPolicyTooLargeException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class RegionDisabledException extends __BaseException { + readonly name: "RegionDisabledException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class IDPRejectedClaimException extends __BaseException { + readonly name: "IDPRejectedClaimException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export declare class InvalidIdentityTokenException extends __BaseException { + readonly name: "InvalidIdentityTokenException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} +export interface AssumeRoleWithWebIdentityRequest { + RoleArn: string | undefined; + RoleSessionName: string | undefined; + WebIdentityToken: string | undefined; + ProviderId?: string | undefined; + PolicyArns?: PolicyDescriptorType[] | undefined; + Policy?: string | undefined; + DurationSeconds?: number | undefined; +} +export declare const AssumeRoleWithWebIdentityRequestFilterSensitiveLog: ( + obj: AssumeRoleWithWebIdentityRequest +) => any; +export interface AssumeRoleWithWebIdentityResponse { + Credentials?: Credentials | undefined; + SubjectFromWebIdentityToken?: string | undefined; + AssumedRoleUser?: AssumedRoleUser | undefined; + PackedPolicySize?: number | undefined; + Provider?: string | undefined; + Audience?: string | undefined; + SourceIdentity?: string | undefined; +} +export declare const AssumeRoleWithWebIdentityResponseFilterSensitiveLog: ( + obj: AssumeRoleWithWebIdentityResponse +) => any; +export declare class IDPCommunicationErrorException extends __BaseException { + readonly name: "IDPCommunicationErrorException"; + readonly $fault: "client"; + constructor( + opts: __ExceptionOptionType + ); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts new file mode 100644 index 0000000..1d03deb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/protocols/Aws_query.d.ts @@ -0,0 +1,29 @@ +import { + HttpRequest as __HttpRequest, + HttpResponse as __HttpResponse, +} from "@smithy/protocol-http"; +import { SerdeContext as __SerdeContext } from "@smithy/types"; +import { + AssumeRoleCommandInput, + AssumeRoleCommandOutput, +} from "../commands/AssumeRoleCommand"; +import { + AssumeRoleWithWebIdentityCommandInput, + AssumeRoleWithWebIdentityCommandOutput, +} from "../commands/AssumeRoleWithWebIdentityCommand"; +export declare const se_AssumeRoleCommand: ( + input: AssumeRoleCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const se_AssumeRoleWithWebIdentityCommand: ( + input: AssumeRoleWithWebIdentityCommandInput, + context: __SerdeContext +) => Promise<__HttpRequest>; +export declare const de_AssumeRoleCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; +export declare const de_AssumeRoleWithWebIdentityCommand: ( + output: __HttpResponse, + context: __SerdeContext +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts new file mode 100644 index 0000000..54a4e79 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.browser.d.ts @@ -0,0 +1,131 @@ +import { FetchHttpHandler as RequestHandler } from "@smithy/fetch-http-handler"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | import("@smithy/protocol-http").HttpHandler + | RequestHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts new file mode 100644 index 0000000..50cd2c7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.d.ts @@ -0,0 +1,112 @@ +import { NoAuthSigner } from "@smithy/core"; +import { NodeHttpHandler as RequestHandler } from "@smithy/node-http-handler"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + defaultsMode: import("@smithy/types").Provider< + import("@smithy/smithy-client").ResolvedDefaultsMode + >; + authSchemePreference: string[] | import("@smithy/types").Provider; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-node").PreviouslyResolved + | undefined + ) => Promise; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + }[]; + maxAttempts: number | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + requestHandler: + | RequestHandler + | import("@smithy/protocol-http").HttpHandler; + retryMode: string | import("@smithy/types").Provider; + sha256: import("@smithy/types").HashConstructor; + streamCollector: import("@smithy/types").StreamCollector; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + userAgentAppId: string | import("@smithy/types").Provider; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + profile?: string | undefined; + credentialDefaultProvider?: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | undefined; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts new file mode 100644 index 0000000..5eda45e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.native.d.ts @@ -0,0 +1,135 @@ +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + runtime: string; + sha256: import("@smithy/types").HashConstructor; + requestHandler: + | import("@smithy/types").NodeHttpHandlerOptions + | import("@smithy/types").FetchHttpHandlerOptions + | Record + | import("@smithy/protocol-http").HttpHandler + | import("@smithy/fetch-http-handler").FetchHttpHandler; + apiVersion: string; + cacheMiddleware?: boolean | undefined; + urlParser: import("@smithy/types").UrlParser; + bodyLengthChecker: import("@smithy/types").BodyLengthCalculator; + streamCollector: import("@smithy/types").StreamCollector; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; + disableHostPrefix: boolean; + serviceId: string; + useDualstackEndpoint: boolean | import("@smithy/types").Provider; + useFipsEndpoint: boolean | import("@smithy/types").Provider; + region: string | import("@smithy/types").Provider; + profile?: string | undefined; + defaultUserAgentProvider: ( + config?: + | import("@aws-sdk/util-user-agent-browser").PreviouslyResolved + | undefined + ) => Promise; + credentialDefaultProvider: + | ((input: any) => import("@smithy/types").AwsCredentialIdentityProvider) + | (( + _: unknown + ) => () => Promise); + maxAttempts: number | import("@smithy/types").Provider; + retryMode: string | import("@smithy/types").Provider; + logger: import("@smithy/types").Logger; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + defaultsMode: + | import("@smithy/smithy-client").DefaultsMode + | import("@smithy/types").Provider< + import("@smithy/smithy-client").DefaultsMode + >; + customUserAgent?: string | import("@smithy/types").UserAgent | undefined; + userAgentAppId?: + | string + | import("@smithy/types").Provider + | undefined; + retryStrategy?: + | import("@smithy/types").RetryStrategy + | import("@smithy/types").RetryStrategyV2 + | undefined; + endpoint?: + | (( + | string + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + ) & + ( + | string + | import("@smithy/types").Provider + | import("@smithy/types").Endpoint + | import("@smithy/types").Provider + | import("@smithy/types").EndpointV2 + | import("@smithy/types").Provider + )) + | undefined; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + tls?: boolean | undefined; + serviceConfiguredEndpoint?: undefined; + authSchemePreference?: + | string[] + | import("@smithy/types").Provider + | undefined; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: import("@aws-sdk/core").AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: import("@smithy/types").IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: import("@smithy/core").NoAuthSigner; + } + )[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + credentials?: + | import("@smithy/types").AwsCredentialIdentity + | import("@smithy/types").AwsCredentialIdentityProvider + | undefined; + signer?: + | import("@smithy/types").RequestSigner + | (( + authScheme?: import("@smithy/types").AuthScheme | undefined + ) => Promise) + | undefined; + signingEscapePath?: boolean | undefined; + systemClockOffset?: number | undefined; + signingRegion?: string | undefined; + signerConstructor?: + | (new ( + options: import("@smithy/signature-v4").SignatureV4Init & + import("@smithy/signature-v4").SignatureV4CryptoInit + ) => import("@smithy/types").RequestSigner) + | undefined; + useGlobalEndpoint?: + | boolean + | import("@smithy/types").Provider + | undefined; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts new file mode 100644 index 0000000..860b0c8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeConfig.shared.d.ts @@ -0,0 +1,51 @@ +import { AwsSdkSigV4Signer } from "@aws-sdk/core"; +import { NoAuthSigner } from "@smithy/core"; +import { IdentityProviderConfig } from "@smithy/types"; +import { STSClientConfig } from "./STSClient"; +export declare const getRuntimeConfig: (config: STSClientConfig) => { + apiVersion: string; + base64Decoder: import("@smithy/types").Decoder; + base64Encoder: (_input: string | Uint8Array) => string; + disableHostPrefix: boolean; + endpointProvider: ( + params: import("./endpoint/EndpointParameters").EndpointParameters, + context?: + | { + logger?: import("@smithy/types").Logger | undefined; + } + | undefined + ) => import("@smithy/types").EndpointV2; + extensions: import("./runtimeExtensions").RuntimeExtension[]; + httpAuthSchemeProvider: import("./auth/httpAuthSchemeProvider").STSHttpAuthSchemeProvider; + httpAuthSchemes: + | import("@smithy/types").HttpAuthScheme[] + | ( + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | undefined; + signer: AwsSdkSigV4Signer; + } + | { + schemeId: string; + identityProvider: ( + ipc: IdentityProviderConfig + ) => + | import("@smithy/types").IdentityProvider< + import("@smithy/types").Identity + > + | (() => Promise<{}>); + signer: NoAuthSigner; + } + )[]; + logger: import("@smithy/types").Logger; + serviceId: string; + urlParser: import("@smithy/types").UrlParser; + utf8Decoder: import("@smithy/types").Decoder; + utf8Encoder: (input: string | Uint8Array) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts new file mode 100644 index 0000000..d3cd411 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/dist-types/ts3.4/submodules/sts/runtimeExtensions.d.ts @@ -0,0 +1,11 @@ +import { STSExtensionConfiguration } from "./extensionConfiguration"; +export interface RuntimeExtension { + configure(extensionConfiguration: STSExtensionConfiguration): void; +} +export interface RuntimeExtensionsConfig { + extensions: RuntimeExtension[]; +} +export declare const resolveRuntimeExtensions: ( + runtimeConfig: any, + extensions: RuntimeExtension[] +) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/package.json new file mode 100644 index 0000000..26191ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/package.json @@ -0,0 +1,115 @@ +{ + "name": "@aws-sdk/nested-clients", + "version": "3.803.0", + "description": "Nested clients for AWS SDK packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline nested-clients", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "lint": "node ../../scripts/validation/submodules-linter.js --pkg nested-clients", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "engines": { + "node": ">=18.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "./sso-oidc.d.ts", + "./sso-oidc.js", + "./sts.d.ts", + "./sts.js", + "dist-*/**" + ], + "browser": { + "./dist-es/submodules/sso-oidc/runtimeConfig": "./dist-es/submodules/sso-oidc/runtimeConfig.browser", + "./dist-es/submodules/sts/runtimeConfig": "./dist-es/submodules/sts/runtimeConfig.browser" + }, + "react-native": {}, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/nested-clients", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/nested-clients" + }, + "exports": { + "./sso-oidc": { + "types": "./dist-types/submodules/sso-oidc/index.d.ts", + "module": "./dist-es/submodules/sso-oidc/index.js", + "node": "./dist-cjs/submodules/sso-oidc/index.js", + "import": "./dist-es/submodules/sso-oidc/index.js", + "require": "./dist-cjs/submodules/sso-oidc/index.js" + }, + "./sts": { + "types": "./dist-types/submodules/sts/index.d.ts", + "module": "./dist-es/submodules/sts/index.js", + "node": "./dist-cjs/submodules/sts/index.js", + "import": "./dist-es/submodules/sts/index.js", + "require": "./dist-cjs/submodules/sts/index.js" + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts new file mode 100644 index 0000000..ab47282 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sso-oidc.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/nested-clients/sso-oidc" { + export * from "@aws-sdk/nested-clients/dist-types/submodules/sso-oidc/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sso-oidc.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sso-oidc.js new file mode 100644 index 0000000..896865c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sso-oidc.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/sso-oidc/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sts.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sts.d.ts new file mode 100644 index 0000000..03b8e68 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sts.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@aws-sdk/nested-clients/sts" { + export * from "@aws-sdk/nested-clients/dist-types/submodules/sts/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sts.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sts.js new file mode 100644 index 0000000..8976f12 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/nested-clients/sts.js @@ -0,0 +1,5 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/sts/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/README.md new file mode 100644 index 0000000..389b765 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/README.md @@ -0,0 +1,12 @@ +# @aws-sdk/region-config-resolver + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/region-config-resolver/latest.svg)](https://www.npmjs.com/package/@aws-sdk/region-config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/region-config-resolver.svg)](https://www.npmjs.com/package/@aws-sdk/region-config-resolver) + +> An internal package + +This package provides utilities for AWS region config resolvers. + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js new file mode 100644 index 0000000..ddc184f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-cjs/index.js @@ -0,0 +1,105 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getAwsRegionExtensionConfiguration: () => getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration: () => resolveAwsRegionExtensionConfiguration, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(index_exports); + +// src/extensions/index.ts +var getAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setRegion(region) { + runtimeConfig.region = region; + }, + region() { + return runtimeConfig.region; + } + }; +}, "getAwsRegionExtensionConfiguration"); +var resolveAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region() + }; +}, "resolveAwsRegionExtensionConfiguration"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env) => env[REGION_ENV_NAME], "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => profile[REGION_INI_NAME], "configFileSelector"), + default: /* @__PURE__ */ __name(() => { + throw new Error("Region is missing"); + }, "default") +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: /* @__PURE__ */ __name(async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, "region"), + useFipsEndpoint: /* @__PURE__ */ __name(async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, "useFipsEndpoint") + }); +}, "resolveRegionConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration, + REGION_ENV_NAME, + REGION_INI_NAME, + NODE_REGION_CONFIG_OPTIONS, + NODE_REGION_CONFIG_FILE_OPTIONS, + resolveRegionConfig +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js new file mode 100644 index 0000000..eb03314 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/extensions/index.js @@ -0,0 +1,15 @@ +export const getAwsRegionExtensionConfiguration = (runtimeConfig) => { + return { + setRegion(region) { + runtimeConfig.region = region; + }, + region() { + return runtimeConfig.region; + }, + }; +}; +export const resolveAwsRegionExtensionConfiguration = (awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region(), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js new file mode 100644 index 0000000..7db9896 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/config.js @@ -0,0 +1,12 @@ +export const REGION_ENV_NAME = "AWS_REGION"; +export const REGION_INI_NAME = "region"; +export const NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +export const NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js new file mode 100644 index 0000000..8d1246b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/getRealRegion.js @@ -0,0 +1,6 @@ +import { isFipsRegion } from "./isFipsRegion"; +export const getRealRegion = (region) => isFipsRegion(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/index.js @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..d758967 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +export const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..f88e00f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-es/regionConfig/resolveRegionConfig.js @@ -0,0 +1,24 @@ +import { getRealRegion } from "./getRealRegion"; +import { isFipsRegion } from "./isFipsRegion"; +export const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..7756bad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/extensions/index.d.ts @@ -0,0 +1,16 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { Provider } from "@smithy/types"; +export type RegionExtensionRuntimeConfigType = Partial<{ + region: string | Provider; +}>; +/** + * @internal + */ +export declare const getAwsRegionExtensionConfiguration: (runtimeConfig: RegionExtensionRuntimeConfigType) => { + setRegion(region: Provider): void; + region(): Provider; +}; +/** + * @internal + */ +export declare const resolveAwsRegionExtensionConfiguration: (awsRegionExtensionConfiguration: AwsRegionExtensionConfiguration) => RegionExtensionRuntimeConfigType; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts new file mode 100644 index 0000000..d203bb0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..c70fb5b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts new file mode 100644 index 0000000..6dcf5e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..b42cee7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..84ed4d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,37 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..c1328e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,14 @@ +import { AwsRegionExtensionConfiguration } from "@aws-sdk/types"; +import { Provider } from "@smithy/types"; +export type RegionExtensionRuntimeConfigType = Partial<{ + region: string | Provider; +}>; +export declare const getAwsRegionExtensionConfiguration: ( + runtimeConfig: RegionExtensionRuntimeConfigType +) => { + setRegion(region: Provider): void; + region(): Provider; +}; +export declare const resolveAwsRegionExtensionConfiguration: ( + awsRegionExtensionConfiguration: AwsRegionExtensionConfiguration +) => RegionExtensionRuntimeConfigType; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6f4e482 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./extensions"; +export * from "./regionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts new file mode 100644 index 0000000..ceb3e02 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/config.d.ts @@ -0,0 +1,8 @@ +import { + LoadedConfigSelectors, + LocalConfigOptions, +} from "@smithy/node-config-provider"; +export declare const REGION_ENV_NAME = "AWS_REGION"; +export declare const REGION_INI_NAME = "region"; +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..f06119b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts @@ -0,0 +1 @@ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/index.d.ts @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..13d34f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts @@ -0,0 +1 @@ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..86b8364 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,14 @@ +import { Provider } from "@smithy/types"; +export interface RegionInputConfig { + region?: string | Provider; + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved {} +export interface RegionResolvedConfig { + region: Provider; + useFipsEndpoint: Provider; +} +export declare const resolveRegionConfig: ( + input: T & RegionInputConfig & PreviouslyResolved +) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/package.json new file mode 100644 index 0000000..605f530 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/region-config-resolver/package.json @@ -0,0 +1,59 @@ +{ + "name": "@aws-sdk/region-config-resolver", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline region-config-resolver", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "jest": "28.1.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/region-config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/region-config-resolver" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/README.md new file mode 100644 index 0000000..9078019 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/README.md @@ -0,0 +1,53 @@ +# @aws-sdk/token-providers + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/token-providers/latest.svg)](https://www.npmjs.com/package/@aws-sdk/token-providers) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/token-providers.svg)](https://www.npmjs.com/package/@aws-sdk/token-providers) + +A collection of all token providers. The token providers should be used when the authorization +type is going to be token based. For example, the `bearer` authorization type set using +[httpBearerAuth trait][http-bearer-auth-trait] in Smithy. + +## Static Token Provider + +```ts +import { fromStatic } from "@aws-sdk/token-providers"; + +const token = { token: "TOKEN" }; +const staticTokenProvider = fromStatic(token); + +const staticToken = await staticTokenProvider(); // returns { token: "TOKEN" } +``` + +## SSO Token Provider + +```ts +import { fromSso } from "@aws-sdk/token-providers"; + +// returns token from SSO token cache or ssoOidc.createToken() call. +const ssoToken = await fromSso(); +``` + +## Token Provider Chain + +```ts +import { nodeProvider } from "@aws-sdk/token-providers"; + +// returns token from default providers. +const token = await nodeProvider(); +``` + +[http-bearer-auth-trait]: https://smithy.io/2.0/spec/authentication-traits.html#smithy-api-httpbearerauth-trait + +--- + +### Development + +This package contains a minimal copy of the SSO OIDC client, instead of relying on the full client, which +would cause a circular dependency. + +When regenerating the bundled version of the SSO OIDC client, run the esbuild.js script and then make the following changes: + +- Remove any dependency of the generated client on the credential chain such that it would create + a circular dependency back to this package. Because we only need the `CreateTokenCommand`, the client, and this command's + associated `Exception`s, it is possible to remove auth dependencies. +- Ensure all required packages are declared in the `package.json` of token-providers. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-cjs/index.js new file mode 100644 index 0000000..51a38df --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-cjs/index.js @@ -0,0 +1,217 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + fromSso: () => fromSso, + fromStatic: () => fromStatic, + nodeProvider: () => nodeProvider +}); +module.exports = __toCommonJS(index_exports); + +// src/fromSso.ts + + + +// src/constants.ts +var EXPIRE_WINDOW_MS = 5 * 60 * 1e3; +var REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; + +// src/getSsoOidcClient.ts +var getSsoOidcClient = /* @__PURE__ */ __name(async (ssoRegion, init = {}) => { + const { SSOOIDCClient } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sso-oidc"))); + const ssoOidcClient = new SSOOIDCClient( + Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? init.clientConfig?.region, + logger: init.clientConfig?.logger ?? init.parentClientConfig?.logger + }) + ); + return ssoOidcClient; +}, "getSsoOidcClient"); + +// src/getNewSsoOidcToken.ts +var getNewSsoOidcToken = /* @__PURE__ */ __name(async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await Promise.resolve().then(() => __toESM(require("@aws-sdk/nested-clients/sso-oidc"))); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send( + new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token" + }) + ); +}, "getNewSsoOidcToken"); + +// src/validateTokenExpiry.ts +var import_property_provider = require("@smithy/property-provider"); +var validateTokenExpiry = /* @__PURE__ */ __name((token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new import_property_provider.TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}, "validateTokenExpiry"); + +// src/validateTokenKey.ts + +var validateTokenKey = /* @__PURE__ */ __name((key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new import_property_provider.TokenProviderError( + `Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, + false + ); + } +}, "validateTokenKey"); + +// src/writeSSOTokenToFile.ts +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var import_fs = require("fs"); +var { writeFile } = import_fs.promises; +var writeSSOTokenToFile = /* @__PURE__ */ __name((id, ssoToken) => { + const tokenFilepath = (0, import_shared_ini_file_loader.getSSOTokenFilepath)(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}, "writeSSOTokenToFile"); + +// src/fromSso.ts +var lastRefreshAttemptTime = /* @__PURE__ */ new Date(0); +var fromSso = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + init.logger?.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? callerClientConfig?.profile + }); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } else if (!profile["sso_session"]) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' could not be found in shared credentials file.`, + false + ); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, + false + ); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoSessionName); + } catch (e) { + throw new import_property_provider.TokenProviderError( + `The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, + false + ); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1e3) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1e3); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken + }); + } catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration + }; + } catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}, "fromSso"); + +// src/fromStatic.ts + +var fromStatic = /* @__PURE__ */ __name(({ token, logger }) => async () => { + logger?.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new import_property_provider.TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}, "fromStatic"); + +// src/nodeProvider.ts + +var nodeProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)(fromSso(init), async () => { + throw new import_property_provider.TokenProviderError("Could not load token from any providers", false); + }), + (token) => token.expiration !== void 0 && token.expiration.getTime() - Date.now() < 3e5, + (token) => token.expiration !== void 0 +), "nodeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromSso, + fromStatic, + nodeProvider +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/constants.js new file mode 100644 index 0000000..b84a126 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/constants.js @@ -0,0 +1,2 @@ +export const EXPIRE_WINDOW_MS = 5 * 60 * 1000; +export const REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js new file mode 100644 index 0000000..61d2075 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/fromSso.js @@ -0,0 +1,88 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { getProfileName, getSSOTokenFromFile, loadSsoSessionData, parseKnownFiles, } from "@smithy/shared-ini-file-loader"; +import { EXPIRE_WINDOW_MS, REFRESH_MESSAGE } from "./constants"; +import { getNewSsoOidcToken } from "./getNewSsoOidcToken"; +import { validateTokenExpiry } from "./validateTokenExpiry"; +import { validateTokenKey } from "./validateTokenKey"; +import { writeSSOTokenToFile } from "./writeSSOTokenToFile"; +const lastRefreshAttemptTime = new Date(0); +export const fromSso = (_init = {}) => async ({ callerClientConfig } = {}) => { + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig, + }, + }; + init.logger?.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await parseKnownFiles(init); + const profileName = getProfileName({ + profile: init.profile ?? callerClientConfig?.profile, + }); + const profile = profiles[profileName]; + if (!profile) { + throw new TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } + else if (!profile["sso_session"]) { + throw new TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await loadSsoSessionData(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new TokenProviderError(`Sso session '${ssoSessionName}' could not be found in shared credentials file.`, false); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new TokenProviderError(`Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, false); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await getSSOTokenFromFile(ssoSessionName); + } + catch (e) { + throw new TokenProviderError(`The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, false); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1000) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1000); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken, + }); + } + catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration, + }; + } + catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js new file mode 100644 index 0000000..0704ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/fromStatic.js @@ -0,0 +1,8 @@ +import { TokenProviderError } from "@smithy/property-provider"; +export const fromStatic = ({ token, logger }) => async () => { + logger?.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js new file mode 100644 index 0000000..00f7b2c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/getNewSsoOidcToken.js @@ -0,0 +1,11 @@ +import { getSsoOidcClient } from "./getSsoOidcClient"; +export const getNewSsoOidcToken = async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await import("@aws-sdk/nested-clients/sso-oidc"); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send(new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token", + })); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js new file mode 100644 index 0000000..689be72 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/getSsoOidcClient.js @@ -0,0 +1,8 @@ +export const getSsoOidcClient = async (ssoRegion, init = {}) => { + const { SSOOIDCClient } = await import("@aws-sdk/nested-clients/sso-oidc"); + const ssoOidcClient = new SSOOIDCClient(Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? init.clientConfig?.region, + logger: init.clientConfig?.logger ?? init.parentClientConfig?.logger, + })); + return ssoOidcClient; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/index.js new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js new file mode 100644 index 0000000..a0c7b52 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/nodeProvider.js @@ -0,0 +1,5 @@ +import { chain, memoize, TokenProviderError } from "@smithy/property-provider"; +import { fromSso } from "./fromSso"; +export const nodeProvider = (init = {}) => memoize(chain(fromSso(init), async () => { + throw new TokenProviderError("Could not load token from any providers", false); +}), (token) => token.expiration !== undefined && token.expiration.getTime() - Date.now() < 300000, (token) => token.expiration !== undefined); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js new file mode 100644 index 0000000..8118d7c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/validateTokenExpiry.js @@ -0,0 +1,7 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { REFRESH_MESSAGE } from "./constants"; +export const validateTokenExpiry = (token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js new file mode 100644 index 0000000..4979638 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/validateTokenKey.js @@ -0,0 +1,7 @@ +import { TokenProviderError } from "@smithy/property-provider"; +import { REFRESH_MESSAGE } from "./constants"; +export const validateTokenKey = (key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new TokenProviderError(`Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, false); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js new file mode 100644 index 0000000..6da2c9b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-es/writeSSOTokenToFile.js @@ -0,0 +1,8 @@ +import { getSSOTokenFilepath } from "@smithy/shared-ini-file-loader"; +import { promises as fsPromises } from "fs"; +const { writeFile } = fsPromises; +export const writeSSOTokenToFile = (id, ssoToken) => { + const tokenFilepath = getSSOTokenFilepath(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts new file mode 100644 index 0000000..de28cde --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/constants.d.ts @@ -0,0 +1,8 @@ +/** + * The time window (5 mins) that SDK will treat the SSO token expires in before the defined expiration date in token. + * This is needed because server side may have invalidated the token before the defined expiration date. + * + * @internal + */ +export declare const EXPIRE_WINDOW_MS: number; +export declare const REFRESH_MESSAGE = "To refresh this SSO session run 'aws sso login' with the corresponding profile."; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts new file mode 100644 index 0000000..03f5359 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/fromSso.d.ts @@ -0,0 +1,12 @@ +import { CredentialProviderOptions, RuntimeConfigIdentityProvider, TokenIdentity } from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromSsoInit extends SourceProfileInit, CredentialProviderOptions { + /** + * @see SSOOIDCClientConfig in \@aws-sdk/client-sso-oidc. + */ + clientConfig?: any; +} +/** + * Creates a token provider that will read from SSO token cache or ssoOidc.createToken() call. + */ +export declare const fromSso: (_init?: FromSsoInit) => RuntimeConfigIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..d496172 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { CredentialProviderOptions, TokenIdentity, TokenIdentityProvider } from "@aws-sdk/types"; +export interface FromStaticInit extends CredentialProviderOptions { + token?: TokenIdentity; +} +/** + * Creates a token provider that will read from static token. + * @public + */ +export declare const fromStatic: ({ token, logger }: FromStaticInit) => TokenIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts new file mode 100644 index 0000000..75c6322 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/getNewSsoOidcToken.d.ts @@ -0,0 +1,8 @@ +/// +import { SSOToken } from "@smithy/shared-ini-file-loader"; +import { FromSsoInit } from "./fromSso"; +/** + * Returns a new SSO OIDC token from ssoOids.createToken() API call. + * @internal + */ +export declare const getNewSsoOidcToken: (ssoToken: SSOToken, ssoRegion: string, init?: FromSsoInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts new file mode 100644 index 0000000..5c9dcb4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/getSsoOidcClient.d.ts @@ -0,0 +1,7 @@ +/// +import { FromSsoInit } from "./fromSso"; +/** + * Returns a SSOOIDC client for the given region. + * @internal + */ +export declare const getSsoOidcClient: (ssoRegion: string, init?: FromSsoInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts new file mode 100644 index 0000000..e4846ec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/nodeProvider.d.ts @@ -0,0 +1,18 @@ +import { TokenIdentityProvider } from "@aws-sdk/types"; +import { FromSsoInit } from "./fromSso"; +/** + * Creates a token provider that will attempt to find token from the + * following sources (listed in order of precedence): + * * SSO token from SSO cache or ssoOidc.createToken() call + * + * The default token provider is designed to invoke one provider at a time and only + * continue to the next if no token has been located. It currently has only SSO + * Token Provider in the chain. + * + * @param init Configuration that is passed to each individual + * provider + * + * @see fromSso The function used to source credentials from + * SSO cache or ssoOidc.createToken() call + */ +export declare const nodeProvider: (init?: FromSsoInit) => TokenIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..d7e7577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,3 @@ +export declare const EXPIRE_WINDOW_MS: number; +export declare const REFRESH_MESSAGE = + "To refresh this SSO session run 'aws sso login' with the corresponding profile."; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts new file mode 100644 index 0000000..3b5bb60 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromSso.d.ts @@ -0,0 +1,14 @@ +import { + CredentialProviderOptions, + RuntimeConfigIdentityProvider, + TokenIdentity, +} from "@aws-sdk/types"; +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +export interface FromSsoInit + extends SourceProfileInit, + CredentialProviderOptions { + clientConfig?: any; +} +export declare const fromSso: ( + _init?: FromSsoInit +) => RuntimeConfigIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..e680012 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,12 @@ +import { + CredentialProviderOptions, + TokenIdentity, + TokenIdentityProvider, +} from "@aws-sdk/types"; +export interface FromStaticInit extends CredentialProviderOptions { + token?: TokenIdentity; +} +export declare const fromStatic: ({ + token, + logger, +}: FromStaticInit) => TokenIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts new file mode 100644 index 0000000..6bcd71d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getNewSsoOidcToken.d.ts @@ -0,0 +1,9 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +import { FromSsoInit } from "./fromSso"; +export declare const getNewSsoOidcToken: ( + ssoToken: SSOToken, + ssoRegion: string, + init?: FromSsoInit +) => Promise< + import("@aws-sdk/nested-clients/sso-oidc").CreateTokenCommandOutput +>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts new file mode 100644 index 0000000..c07dc69 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/getSsoOidcClient.d.ts @@ -0,0 +1,5 @@ +import { FromSsoInit } from "./fromSso"; +export declare const getSsoOidcClient: ( + ssoRegion: string, + init?: FromSsoInit +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a0b176b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromSso"; +export * from "./fromStatic"; +export * from "./nodeProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts new file mode 100644 index 0000000..11a9bd4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/nodeProvider.d.ts @@ -0,0 +1,5 @@ +import { TokenIdentityProvider } from "@aws-sdk/types"; +import { FromSsoInit } from "./fromSso"; +export declare const nodeProvider: ( + init?: FromSsoInit +) => TokenIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts new file mode 100644 index 0000000..9003605 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenExpiry.d.ts @@ -0,0 +1,2 @@ +import { TokenIdentity } from "@aws-sdk/types"; +export declare const validateTokenExpiry: (token: TokenIdentity) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts new file mode 100644 index 0000000..105b2b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/validateTokenKey.d.ts @@ -0,0 +1,5 @@ +export declare const validateTokenKey: ( + key: string, + value: unknown, + forRefresh?: boolean +) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts new file mode 100644 index 0000000..a6d025f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/ts3.4/writeSSOTokenToFile.d.ts @@ -0,0 +1,5 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +export declare const writeSSOTokenToFile: ( + id: string, + ssoToken: SSOToken +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts new file mode 100644 index 0000000..1253784 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/validateTokenExpiry.d.ts @@ -0,0 +1,5 @@ +import { TokenIdentity } from "@aws-sdk/types"; +/** + * Throws TokenProviderError is token is expired. + */ +export declare const validateTokenExpiry: (token: TokenIdentity) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts new file mode 100644 index 0000000..a9618fd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/validateTokenKey.d.ts @@ -0,0 +1,4 @@ +/** + * Throws TokenProviderError if value is undefined for key. + */ +export declare const validateTokenKey: (key: string, value: unknown, forRefresh?: boolean) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts new file mode 100644 index 0000000..a1e17e8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/dist-types/writeSSOTokenToFile.d.ts @@ -0,0 +1,5 @@ +import { SSOToken } from "@smithy/shared-ini-file-loader"; +/** + * Writes SSO token to file based on filepath computed from ssoStartUrl or session name. + */ +export declare const writeSSOTokenToFile: (id: string, ssoToken: SSOToken) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/package.json new file mode 100644 index 0000000..d035426 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/token-providers/package.json @@ -0,0 +1,67 @@ +{ + "name": "@aws-sdk/token-providers", + "version": "3.803.0", + "description": "A collection of token providers", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "sideEffects": false, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline token-providers", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "token" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": {}, + "react-native": {}, + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/token-providers", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/token-providers" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/README.md new file mode 100644 index 0000000..a5658db --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/types + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/types/latest.svg)](https://www.npmjs.com/package/@aws-sdk/types) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/types.svg)](https://www.npmjs.com/package/@aws-sdk/types) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-cjs/index.js new file mode 100644 index 0000000..8114db0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-cjs/index.js @@ -0,0 +1,294 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + AbortController: () => import_types.AbortController, + AbortHandler: () => import_types.AbortHandler, + AbortSignal: () => import_types.AbortSignal, + AbsoluteLocation: () => import_types.AbsoluteLocation, + AuthScheme: () => import_types.AuthScheme, + AvailableMessage: () => import_types.AvailableMessage, + AvailableMessages: () => import_types.AvailableMessages, + AwsCredentialIdentity: () => import_types.AwsCredentialIdentity, + AwsCredentialIdentityProvider: () => import_types.AwsCredentialIdentityProvider, + BinaryHeaderValue: () => import_types.BinaryHeaderValue, + BlobTypes: () => import_types.BlobTypes, + BodyLengthCalculator: () => import_types.BodyLengthCalculator, + BooleanHeaderValue: () => import_types.BooleanHeaderValue, + BuildHandler: () => import_types.BuildHandler, + BuildHandlerArguments: () => import_types.BuildHandlerArguments, + BuildHandlerOptions: () => import_types.BuildHandlerOptions, + BuildHandlerOutput: () => import_types.BuildHandlerOutput, + BuildMiddleware: () => import_types.BuildMiddleware, + ByteHeaderValue: () => import_types.ByteHeaderValue, + Checksum: () => import_types.Checksum, + ChecksumConstructor: () => import_types.ChecksumConstructor, + Client: () => import_types.Client, + Command: () => import_types.Command, + ConnectConfiguration: () => import_types.ConnectConfiguration, + ConnectionManager: () => import_types.ConnectionManager, + ConnectionManagerConfiguration: () => import_types.ConnectionManagerConfiguration, + ConnectionPool: () => import_types.ConnectionPool, + DateInput: () => import_types.DateInput, + Decoder: () => import_types.Decoder, + DeserializeHandler: () => import_types.DeserializeHandler, + DeserializeHandlerArguments: () => import_types.DeserializeHandlerArguments, + DeserializeHandlerOptions: () => import_types.DeserializeHandlerOptions, + DeserializeHandlerOutput: () => import_types.DeserializeHandlerOutput, + DeserializeMiddleware: () => import_types.DeserializeMiddleware, + DocumentType: () => import_types.DocumentType, + Encoder: () => import_types.Encoder, + Endpoint: () => import_types.Endpoint, + EndpointARN: () => import_types.EndpointARN, + EndpointBearer: () => import_types.EndpointBearer, + EndpointObjectProperty: () => import_types.EndpointObjectProperty, + EndpointParameters: () => import_types.EndpointParameters, + EndpointPartition: () => import_types.EndpointPartition, + EndpointURL: () => import_types.EndpointURL, + EndpointURLScheme: () => import_types.EndpointURLScheme, + EndpointV2: () => import_types.EndpointV2, + EventSigner: () => import_types.EventSigner, + EventSigningArguments: () => import_types.EventSigningArguments, + EventStreamMarshaller: () => import_types.EventStreamMarshaller, + EventStreamMarshallerDeserFn: () => import_types.EventStreamMarshallerDeserFn, + EventStreamMarshallerSerFn: () => import_types.EventStreamMarshallerSerFn, + EventStreamPayloadHandler: () => import_types.EventStreamPayloadHandler, + EventStreamPayloadHandlerProvider: () => import_types.EventStreamPayloadHandlerProvider, + EventStreamRequestSigner: () => import_types.EventStreamRequestSigner, + EventStreamSerdeContext: () => import_types.EventStreamSerdeContext, + EventStreamSerdeProvider: () => import_types.EventStreamSerdeProvider, + EventStreamSignerProvider: () => import_types.EventStreamSignerProvider, + ExponentialBackoffJitterType: () => import_types.ExponentialBackoffJitterType, + ExponentialBackoffStrategyOptions: () => import_types.ExponentialBackoffStrategyOptions, + FinalizeHandler: () => import_types.FinalizeHandler, + FinalizeHandlerArguments: () => import_types.FinalizeHandlerArguments, + FinalizeHandlerOutput: () => import_types.FinalizeHandlerOutput, + FinalizeRequestHandlerOptions: () => import_types.FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware: () => import_types.FinalizeRequestMiddleware, + FormattedEvent: () => import_types.FormattedEvent, + GetAwsChunkedEncodingStream: () => import_types.GetAwsChunkedEncodingStream, + GetAwsChunkedEncodingStreamOptions: () => import_types.GetAwsChunkedEncodingStreamOptions, + Handler: () => import_types.Handler, + HandlerExecutionContext: () => import_types.HandlerExecutionContext, + HandlerOptions: () => import_types.HandlerOptions, + Hash: () => import_types.Hash, + HashConstructor: () => import_types.HashConstructor, + HeaderBag: () => import_types.HeaderBag, + HostAddressType: () => HostAddressType, + HttpAuthDefinition: () => import_types.HttpAuthDefinition, + HttpAuthLocation: () => import_types.HttpAuthLocation, + HttpHandlerOptions: () => import_types.HttpHandlerOptions, + HttpMessage: () => import_types.HttpMessage, + HttpRequest: () => import_types.HttpRequest, + HttpResponse: () => import_types.HttpResponse, + Identity: () => import_types.Identity, + IniSection: () => import_types.IniSection, + InitializeHandler: () => import_types.InitializeHandler, + InitializeHandlerArguments: () => import_types.InitializeHandlerArguments, + InitializeHandlerOptions: () => import_types.InitializeHandlerOptions, + InitializeHandlerOutput: () => import_types.InitializeHandlerOutput, + InitializeMiddleware: () => import_types.InitializeMiddleware, + Int64: () => import_types.Int64, + IntegerHeaderValue: () => import_types.IntegerHeaderValue, + LongHeaderValue: () => import_types.LongHeaderValue, + MemoizedProvider: () => import_types.MemoizedProvider, + Message: () => import_types.Message, + MessageDecoder: () => import_types.MessageDecoder, + MessageEncoder: () => import_types.MessageEncoder, + MessageHeaderValue: () => import_types.MessageHeaderValue, + MessageHeaders: () => import_types.MessageHeaders, + MessageSigner: () => import_types.MessageSigner, + MetadataBearer: () => import_types.MetadataBearer, + MiddlewareStack: () => import_types.MiddlewareStack, + MiddlewareType: () => import_types.MiddlewareType, + PaginationConfiguration: () => import_types.PaginationConfiguration, + Paginator: () => import_types.Paginator, + ParsedIniData: () => import_types.ParsedIniData, + Pluggable: () => import_types.Pluggable, + Priority: () => import_types.Priority, + Profile: () => import_types.Profile, + Provider: () => import_types.Provider, + QueryParameterBag: () => import_types.QueryParameterBag, + RegionInfo: () => import_types.RegionInfo, + RegionInfoProvider: () => import_types.RegionInfoProvider, + RegionInfoProviderOptions: () => import_types.RegionInfoProviderOptions, + Relation: () => import_types.Relation, + RelativeLocation: () => import_types.RelativeLocation, + RelativeMiddlewareOptions: () => import_types.RelativeMiddlewareOptions, + RequestContext: () => import_types.RequestContext, + RequestHandler: () => import_types.RequestHandler, + RequestHandlerMetadata: () => import_types.RequestHandlerMetadata, + RequestHandlerOutput: () => import_types.RequestHandlerOutput, + RequestHandlerProtocol: () => import_types.RequestHandlerProtocol, + RequestPresigner: () => import_types.RequestPresigner, + RequestPresigningArguments: () => import_types.RequestPresigningArguments, + RequestSerializer: () => import_types.RequestSerializer, + RequestSigner: () => import_types.RequestSigner, + RequestSigningArguments: () => import_types.RequestSigningArguments, + ResponseDeserializer: () => import_types.ResponseDeserializer, + ResponseMetadata: () => import_types.ResponseMetadata, + RetryBackoffStrategy: () => import_types.RetryBackoffStrategy, + RetryErrorInfo: () => import_types.RetryErrorInfo, + RetryErrorType: () => import_types.RetryErrorType, + RetryStrategy: () => import_types.RetryStrategy, + RetryStrategyOptions: () => import_types.RetryStrategyOptions, + RetryStrategyV2: () => import_types.RetryStrategyV2, + RetryToken: () => import_types.RetryToken, + RetryableTrait: () => import_types.RetryableTrait, + SdkError: () => import_types.SdkError, + SdkStream: () => import_types.SdkStream, + SdkStreamMixin: () => import_types.SdkStreamMixin, + SdkStreamMixinInjector: () => import_types.SdkStreamMixinInjector, + SdkStreamSerdeContext: () => import_types.SdkStreamSerdeContext, + SerdeContext: () => import_types.SerdeContext, + SerializeHandler: () => import_types.SerializeHandler, + SerializeHandlerArguments: () => import_types.SerializeHandlerArguments, + SerializeHandlerOptions: () => import_types.SerializeHandlerOptions, + SerializeHandlerOutput: () => import_types.SerializeHandlerOutput, + SerializeMiddleware: () => import_types.SerializeMiddleware, + SharedConfigFiles: () => import_types.SharedConfigFiles, + ShortHeaderValue: () => import_types.ShortHeaderValue, + SignableMessage: () => import_types.SignableMessage, + SignedMessage: () => import_types.SignedMessage, + SigningArguments: () => import_types.SigningArguments, + SmithyException: () => import_types.SmithyException, + SourceData: () => import_types.SourceData, + StandardRetryBackoffStrategy: () => import_types.StandardRetryBackoffStrategy, + StandardRetryToken: () => import_types.StandardRetryToken, + Step: () => import_types.Step, + StreamCollector: () => import_types.StreamCollector, + StreamHasher: () => import_types.StreamHasher, + StringHeaderValue: () => import_types.StringHeaderValue, + StringSigner: () => import_types.StringSigner, + Terminalware: () => import_types.Terminalware, + TimestampHeaderValue: () => import_types.TimestampHeaderValue, + TokenIdentity: () => import_types.TokenIdentity, + TokenIdentityProvider: () => import_types.TokenIdentityProvider, + URI: () => import_types.URI, + UrlParser: () => import_types.UrlParser, + UserAgent: () => import_types.UserAgent, + UserAgentPair: () => import_types.UserAgentPair, + UuidHeaderValue: () => import_types.UuidHeaderValue, + WaiterConfiguration: () => import_types.WaiterConfiguration, + WithSdkStreamMixin: () => import_types.WithSdkStreamMixin, + randomValues: () => import_types.randomValues +}); +module.exports = __toCommonJS(index_exports); + +// src/abort.ts +var import_types = require("@smithy/types"); + +// src/auth.ts + + +// src/blob/blob-types.ts + + +// src/checksum.ts + + +// src/client.ts + + +// src/command.ts + + +// src/connection.ts + + +// src/crypto.ts + + +// src/dns.ts +var HostAddressType = /* @__PURE__ */ ((HostAddressType2) => { + HostAddressType2["AAAA"] = "AAAA"; + HostAddressType2["A"] = "A"; + return HostAddressType2; +})(HostAddressType || {}); + +// src/encode.ts + + +// src/endpoint.ts + + +// src/eventStream.ts + + +// src/http.ts + + +// src/identity/AwsCredentialIdentity.ts + + +// src/identity/Identity.ts + + +// src/identity/TokenIdentity.ts + + +// src/middleware.ts + + +// src/pagination.ts + + +// src/profile.ts + + +// src/response.ts + + +// src/retry.ts + + +// src/serde.ts + + +// src/shapes.ts + + +// src/signature.ts + + +// src/stream.ts + + +// src/transfer.ts + + +// src/uri.ts + + +// src/util.ts + + +// src/waiter.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + HttpAuthLocation, + HostAddressType, + EndpointURLScheme, + RequestHandlerProtocol +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/abort.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/abort.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/abort.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/auth.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/auth.js new file mode 100644 index 0000000..81f903b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/auth.js @@ -0,0 +1 @@ +export { HttpAuthLocation } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/blob/blob-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/checksum.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/checksum.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/checksum.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/client.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/client.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/client.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/command.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/command.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/command.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/connection.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/connection.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/connection.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/credentials.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/credentials.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/credentials.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/crypto.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/crypto.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/crypto.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/dns.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/dns.js new file mode 100644 index 0000000..c6a2cd9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/dns.js @@ -0,0 +1,5 @@ +export var HostAddressType; +(function (HostAddressType) { + HostAddressType["AAAA"] = "AAAA"; + HostAddressType["A"] = "A"; +})(HostAddressType || (HostAddressType = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/encode.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/encode.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/encode.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/endpoint.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/endpoint.js new file mode 100644 index 0000000..ec53acc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/endpoint.js @@ -0,0 +1 @@ +export { EndpointURLScheme, } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/eventStream.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/eventStream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/eventStream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/extensions/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/extensions/index.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/extensions/index.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/feature-ids.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/feature-ids.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/feature-ids.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/function.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/function.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/function.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/http.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/http.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/http.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/AnonymousIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/AwsCredentialIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/Identity.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/Identity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/Identity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/LoginIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/TokenIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/index.js new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/identity/index.js @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/index.js new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/index.js @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/logger.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/logger.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/logger.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/middleware.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/middleware.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/middleware.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/pagination.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/pagination.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/pagination.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/profile.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/profile.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/profile.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/request.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/request.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/request.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/response.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/response.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/response.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/retry.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/serde.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/serde.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/serde.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/shapes.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/shapes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/shapes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/signature.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/signature.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/signature.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/stream.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/stream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/stream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/token.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/token.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/token.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/transfer.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/transfer.js new file mode 100644 index 0000000..ba57589 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/transfer.js @@ -0,0 +1 @@ +export { RequestHandlerProtocol, } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/uri.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/uri.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/uri.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/util.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/util.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/util.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/waiter.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/waiter.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-es/waiter.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/abort.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/abort.d.ts new file mode 100644 index 0000000..dad6079 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/abort.d.ts @@ -0,0 +1 @@ +export { AbortController, AbortHandler, AbortSignal } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/auth.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/auth.d.ts new file mode 100644 index 0000000..6626c16 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/auth.d.ts @@ -0,0 +1 @@ +export { AuthScheme, HttpAuthDefinition, HttpAuthLocation } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts new file mode 100644 index 0000000..fedb3d5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/blob/blob-types.d.ts @@ -0,0 +1,2 @@ +import { BlobTypes } from '@smithy/types'; +export { BlobTypes }; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/checksum.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/checksum.d.ts new file mode 100644 index 0000000..f805d72 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/checksum.d.ts @@ -0,0 +1 @@ +export { Checksum, ChecksumConstructor } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/client.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/client.d.ts new file mode 100644 index 0000000..d6b3dcf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/client.d.ts @@ -0,0 +1 @@ +export { Client } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/command.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/command.d.ts new file mode 100644 index 0000000..3887267 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/command.d.ts @@ -0,0 +1 @@ +export { Command } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/connection.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/connection.d.ts new file mode 100644 index 0000000..efcb4d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/connection.d.ts @@ -0,0 +1 @@ +export { ConnectConfiguration, ConnectionManager, ConnectionManagerConfiguration, ConnectionPool } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/credentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/credentials.d.ts new file mode 100644 index 0000000..181bf8b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/credentials.d.ts @@ -0,0 +1,50 @@ +import { Logger } from "@smithy/types"; +import { AwsCredentialIdentity } from "./identity"; +import { Provider } from "./util"; +/** + * @public + * + * An object representing temporary or permanent AWS credentials. + * + * @deprecated Use {@link AwsCredentialIdentity} + */ +export interface Credentials extends AwsCredentialIdentity { +} +/** + * @public + * + * @deprecated Use {@link AwsCredentialIdentityProvider} + */ +export type CredentialProvider = Provider; +/** + * @public + * + * Common options for credential providers. + */ +export type CredentialProviderOptions = { + /** + * This logger is only used to provide information + * on what credential providers were used during resolution. + * + * It does not log credentials. + */ + logger?: Logger; + /** + * Present if the credential provider was created by calling + * the defaultCredentialProvider in a client's middleware, having + * access to the client's config. + * + * The region of that parent or outer client is important because + * an inner client used by the credential provider may need + * to match its default partition or region with that of + * the outer client. + * + * @internal + * @deprecated - not truly deprecated, marked as a warning to not use this. + */ + parentClientConfig?: { + region?: string | Provider; + profile?: string; + [key: string]: unknown; + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/crypto.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/crypto.d.ts new file mode 100644 index 0000000..aeeea50 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/crypto.d.ts @@ -0,0 +1 @@ +export { Hash, HashConstructor, StreamHasher, randomValues, SourceData } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/dns.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/dns.d.ts new file mode 100644 index 0000000..8348cc4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/dns.d.ts @@ -0,0 +1,85 @@ +/** + * @public + * + * DNS record types + */ +export declare enum HostAddressType { + /** + * IPv6 + */ + AAAA = "AAAA", + /** + * IPv4 + */ + A = "A" +} +/** + * @public + */ +export interface HostAddress { + /** + * The {@link HostAddressType} of the host address. + */ + addressType: HostAddressType; + /** + * The resolved numerical address represented as a + * string. + */ + address: string; + /** + * The host name the {@link address} was resolved from. + */ + hostName: string; + /** + * The service record of {@link hostName}. + */ + service?: string; +} +/** + * @public + */ +export interface HostResolverArguments { + /** + * The host name to resolve. + */ + hostName: string; + /** + * The service record of {@link hostName}. + */ + service?: string; +} +/** + * @public + * + * Host Resolver interface for DNS queries + */ +export interface HostResolver { + /** + * Resolves the address(es) for {@link HostResolverArguments} and returns a + * list of addresses with (most likely) two addresses, one {@link HostAddressType.AAAA} + * and one {@link HostAddressType.A}. Calls to this function will likely alter + * the cache (if implemented) so that if there's multiple addresses, a different + * set will be returned on the next call. + * In the case of multi-answer, still only a maximum of two records should be + * returned. The resolver implementation is responsible for caching and rotation + * of the multiple addresses that get returned. + * Implementations don't have to explictly call getaddrinfo(), they can use + * high level abstractions provided in their language runtimes/libraries. + * @param args - arguments with host name query addresses for + * @returns promise with a list of {@link HostAddress} + */ + resolveAddress(args: HostResolverArguments): Promise; + /** + * Reports a failure on a {@link HostAddress} so that the cache (if implemented) + * can accomodate the failure and likely not return the address until it recovers. + * @param addr - host address to report a failure on + */ + reportFailureOnAddress(addr: HostAddress): void; + /** + * Empties the cache (if implemented) for a {@link HostResolverArguments.hostName}. + * If {@link HostResolverArguments.hostName} is not provided, the cache (if + * implemented) is emptied for all host names. + * @param args - optional arguments to empty the cache for + */ + purgeCache(args?: HostResolverArguments): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/encode.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/encode.d.ts new file mode 100644 index 0000000..128ee57 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/encode.d.ts @@ -0,0 +1 @@ +export { MessageDecoder, MessageEncoder, AvailableMessage, AvailableMessages } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts new file mode 100644 index 0000000..f2ffaf5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/endpoint.d.ts @@ -0,0 +1 @@ +export { EndpointARN, EndpointPartition, EndpointURLScheme, EndpointURL, EndpointObjectProperty, EndpointV2, EndpointParameters, } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts new file mode 100644 index 0000000..cee02f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/eventStream.d.ts @@ -0,0 +1 @@ +export { Message, MessageHeaders, BooleanHeaderValue, ByteHeaderValue, ShortHeaderValue, IntegerHeaderValue, LongHeaderValue, BinaryHeaderValue, StringHeaderValue, TimestampHeaderValue, UuidHeaderValue, MessageHeaderValue, Int64, EventStreamSerdeContext, EventStreamMarshaller, EventStreamMarshallerDeserFn, EventStreamMarshallerSerFn, EventStreamPayloadHandler, EventStreamPayloadHandlerProvider, EventStreamRequestSigner, EventStreamSerdeProvider, EventStreamSignerProvider, } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..5a45bcb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/extensions/index.d.ts @@ -0,0 +1,8 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface AwsRegionExtensionConfiguration { + setRegion(region: Provider): void; + region(): Provider; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts new file mode 100644 index 0000000..f1679fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/feature-ids.d.ts @@ -0,0 +1,59 @@ +/** + * @internal + */ +export type AwsSdkFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + S3_EXPRESS_BUCKET: "J"; + S3_ACCESS_GRANTS: "K"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + ACCOUNT_ID_ENDPOINT: "O"; + ACCOUNT_ID_MODE_PREFERRED: "P"; + ACCOUNT_ID_MODE_DISABLED: "Q"; + ACCOUNT_ID_MODE_REQUIRED: "R"; + SIGV4A_SIGNING: "S"; + FLEXIBLE_CHECKSUMS_REQ_CRC32: "U"; + FLEXIBLE_CHECKSUMS_REQ_CRC32C: "V"; + FLEXIBLE_CHECKSUMS_REQ_CRC64: "W"; + FLEXIBLE_CHECKSUMS_REQ_SHA1: "X"; + FLEXIBLE_CHECKSUMS_REQ_SHA256: "Y"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED: "Z"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED: "a"; + FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED: "b"; + FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED: "c"; + DDB_MAPPER: "d"; +}> & AwsSdkCredentialsFeatures; +/** + * @internal + */ +export type AwsSdkCredentialsFeatures = Partial<{ + RESOLVED_ACCOUNT_ID: "T"; + CREDENTIALS_CODE: "e"; + CREDENTIALS_ENV_VARS: "g"; + CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN: "h"; + CREDENTIALS_STS_ASSUME_ROLE: "i"; + CREDENTIALS_STS_ASSUME_ROLE_SAML: "j"; + CREDENTIALS_STS_ASSUME_ROLE_WEB_ID: "k"; + CREDENTIALS_STS_FEDERATION_TOKEN: "l"; + CREDENTIALS_STS_SESSION_TOKEN: "m"; + CREDENTIALS_PROFILE: "n"; + CREDENTIALS_PROFILE_SOURCE_PROFILE: "o"; + CREDENTIALS_PROFILE_NAMED_PROVIDER: "p"; + CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN: "q"; + CREDENTIALS_PROFILE_SSO: "r"; + CREDENTIALS_SSO: "s"; + CREDENTIALS_PROFILE_SSO_LEGACY: "t"; + CREDENTIALS_SSO_LEGACY: "u"; + CREDENTIALS_PROFILE_PROCESS: "v"; + CREDENTIALS_PROCESS: "w"; + CREDENTIALS_BOTO2_CONFIG_FILE: "x"; + CREDENTIALS_AWS_SDK_STORE: "y"; + CREDENTIALS_HTTP: "z"; + CREDENTIALS_IMDS: "0"; +}>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/function.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/function.d.ts new file mode 100644 index 0000000..3c777fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/function.d.ts @@ -0,0 +1,7 @@ +/** + * Resolves a function that accepts both the object argument fields of F1 and F2. + * The function returns an intersection of what F1 and F2 return. + * + * @public + */ +export type MergeFunctions = F1 extends (arg: infer A1) => infer R1 ? F2 extends (arg: infer A2) => infer R2 ? R1 extends Promise ? (arg?: A1 & A2) => Promise & Awaited> : (arg?: A1 & A2) => R1 & R2 : never : never; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/http.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/http.d.ts new file mode 100644 index 0000000..7594b5a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/http.d.ts @@ -0,0 +1,33 @@ +import { HttpResponse } from "@smithy/types"; +export { Endpoint, HeaderBag, HttpHandlerOptions, HttpMessage, HttpRequest, HttpResponse, QueryParameterBag, } from "@smithy/types"; +/** + * @public + * + * A collection of key/value pairs with case-insensitive keys. + */ +export interface Headers extends Map { + /** + * Returns a new instance of Headers with the specified header set to the + * provided value. Does not modify the original Headers instance. + * + * @param headerName - The name of the header to add or overwrite + * @param headerValue - The value to which the header should be set + */ + withHeader(headerName: string, headerValue: string): Headers; + /** + * Returns a new instance of Headers without the specified header. Does not + * modify the original Headers instance. + * + * @param headerName - The name of the header to remove + */ + withoutHeader(headerName: string): Headers; +} +/** + * @public + * + * Represents HTTP message whose body has been resolved to a string. This is + * used in parsing http message. + */ +export interface ResolvedHttpResponse extends HttpResponse { + body: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts new file mode 100644 index 0000000..c7006e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/AnonymousIdentity.d.ts @@ -0,0 +1,6 @@ +import { Identity } from "./Identity"; +/** + * @public + */ +export interface AnonymousIdentity extends Identity { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts new file mode 100644 index 0000000..c94b6c4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/AwsCredentialIdentity.d.ts @@ -0,0 +1,60 @@ +import type { AwsCredentialIdentity, AwsCredentialIdentityProvider, Logger, RequestHandler } from "@smithy/types"; +import type { AwsSdkCredentialsFeatures } from "../feature-ids"; +export { AwsCredentialIdentity, AwsCredentialIdentityProvider, IdentityProvider } from "@smithy/types"; +/** + * @public + */ +export interface AwsIdentityProperties { + /** + * These are resolved client config values, and may be async providers. + */ + callerClientConfig?: { + /** + * It is likely a programming error if you use + * the caller client config credentials in a credential provider, since + * it will recurse. + * + * @deprecated do not use. + */ + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + /** + * @internal + * @deprecated minimize use. + */ + credentialDefaultProvider?: (input?: any) => AwsCredentialIdentityProvider; + logger?: Logger; + profile?: string; + region(): Promise; + requestHandler?: RequestHandler; + }; +} +/** + * @public + * + * Variation of {@link IdentityProvider} which accepts a contextual + * client configuration that includes an AWS region and potentially other + * configurable fields. + * + * Used to link a credential provider to a client if it is being called + * in the context of a client. + */ +export type RuntimeConfigIdentityProvider = (awsIdentityProperties?: AwsIdentityProperties) => Promise; +/** + * @public + * + * Variation of {@link AwsCredentialIdentityProvider} which accepts a contextual + * client configuration that includes an AWS region and potentially other + * configurable fields. + * + * Used to link a credential provider to a client if it is being called + * in the context of a client. + */ +export type RuntimeConfigAwsCredentialIdentityProvider = RuntimeConfigIdentityProvider; +/** + * @public + * + * AwsCredentialIdentity with source attribution metadata. + */ +export type AttributedAwsCredentialIdentity = AwsCredentialIdentity & { + $source?: AwsSdkCredentialsFeatures; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts new file mode 100644 index 0000000..4175fd3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/Identity.d.ts @@ -0,0 +1 @@ +export { Identity, IdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts new file mode 100644 index 0000000..13793f9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/LoginIdentity.d.ts @@ -0,0 +1,18 @@ +import { Identity, IdentityProvider } from "./Identity"; +/** + * @public + */ +export interface LoginIdentity extends Identity { + /** + * Identity username + */ + readonly username: string; + /** + * Identity password + */ + readonly password: string; +} +/** + * @public + */ +export type LoginIdentityProvider = IdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts new file mode 100644 index 0000000..66301bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/TokenIdentity.d.ts @@ -0,0 +1 @@ +export { TokenIdentity, TokenIdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/identity/index.d.ts @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/index.d.ts new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/index.d.ts @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/logger.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/logger.d.ts new file mode 100644 index 0000000..11a33c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/logger.d.ts @@ -0,0 +1,22 @@ +import type { Logger } from "@smithy/types"; +export type { Logger } from "@smithy/types"; +/** + * @public + * + * A list of logger's log level. These levels are sorted in + * order of increasing severity. Each log level includes itself and all + * the levels behind itself. + * + * @example `new Logger({logLevel: 'warn'})` will print all the warn and error + * message. + */ +export type LogLevel = "all" | "trace" | "debug" | "log" | "info" | "warn" | "error" | "off"; +/** + * @public + * + * An object consumed by Logger constructor to initiate a logger object. + */ +export interface LoggerOptions { + logger?: Logger; + logLevel?: LogLevel; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/middleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/middleware.d.ts new file mode 100644 index 0000000..06ba3e2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/middleware.d.ts @@ -0,0 +1,13 @@ +import { HandlerExecutionContext } from "@smithy/types"; +import { AwsSdkFeatures } from "./feature-ids"; +export { AbsoluteLocation, BuildHandler, BuildHandlerArguments, BuildHandlerOptions, BuildHandlerOutput, BuildMiddleware, DeserializeHandler, DeserializeHandlerArguments, DeserializeHandlerOptions, DeserializeHandlerOutput, DeserializeMiddleware, FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, FinalizeRequestHandlerOptions, FinalizeRequestMiddleware, Handler, HandlerExecutionContext, HandlerOptions, InitializeHandler, InitializeHandlerArguments, InitializeHandlerOptions, InitializeHandlerOutput, InitializeMiddleware, MiddlewareStack, MiddlewareType, Pluggable, Priority, Relation, RelativeLocation, RelativeMiddlewareOptions, SerializeHandler, SerializeHandlerArguments, SerializeHandlerOptions, SerializeHandlerOutput, SerializeMiddleware, Step, Terminalware, } from "@smithy/types"; +/** + * @internal + * Contains reserved keys for AWS SDK internal usage of the + * handler execution context object. + */ +export interface AwsHandlerExecutionContext extends HandlerExecutionContext { + __aws_sdk_context?: { + features?: AwsSdkFeatures; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/pagination.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/pagination.d.ts new file mode 100644 index 0000000..af791b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/pagination.d.ts @@ -0,0 +1 @@ +export { PaginationConfiguration, Paginator } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/profile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/profile.d.ts new file mode 100644 index 0000000..9916f3b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/profile.d.ts @@ -0,0 +1 @@ +export { IniSection, Profile, ParsedIniData, SharedConfigFiles } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/request.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/request.d.ts new file mode 100644 index 0000000..95405d1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/request.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export interface Request { + destination: URL; + body?: any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/response.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/response.d.ts new file mode 100644 index 0000000..8d99350 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/response.d.ts @@ -0,0 +1,7 @@ +export { MetadataBearer, ResponseMetadata } from "@smithy/types"; +/** + * @internal + */ +export interface Response { + body: any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/retry.d.ts new file mode 100644 index 0000000..4b7eb98 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/retry.d.ts @@ -0,0 +1 @@ +export { ExponentialBackoffJitterType, ExponentialBackoffStrategyOptions, RetryBackoffStrategy, RetryErrorInfo, RetryErrorType, RetryStrategyOptions, RetryStrategyV2, RetryToken, StandardRetryBackoffStrategy, StandardRetryToken, } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/serde.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/serde.d.ts new file mode 100644 index 0000000..c4cab79 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/serde.d.ts @@ -0,0 +1,24 @@ +export { EndpointBearer, StreamCollector, SerdeContext, ResponseDeserializer, RequestSerializer, SdkStreamMixin, SdkStream, WithSdkStreamMixin, SdkStreamMixinInjector, SdkStreamSerdeContext, } from "@smithy/types"; +/** + * @public + * + * Declare DOM interfaces in case dom.d.ts is not added to the tsconfig lib, causing + * interfaces to not be defined. For developers with dom.d.ts added, the interfaces will + * be merged correctly. + * + * This is also required for any clients with streaming interfaces where the corresponding + * types are also referred. The type is only declared here once since this `@aws-sdk/types` + * is depended by all `@aws-sdk` packages. + */ +declare global { + /** + * @public + */ + export interface ReadableStream { + } + /** + * @public + */ + export interface Blob { + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/shapes.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/shapes.d.ts new file mode 100644 index 0000000..bc19cc7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/shapes.d.ts @@ -0,0 +1 @@ +export { DocumentType, RetryableTrait, SmithyException, SdkError } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/signature.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/signature.d.ts new file mode 100644 index 0000000..23cbe97 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/signature.d.ts @@ -0,0 +1 @@ +export { DateInput, EventSigner, EventSigningArguments, FormattedEvent, MessageSigner, RequestSigningArguments, RequestPresigner, RequestPresigningArguments, RequestSigner, SignableMessage, SignedMessage, SigningArguments, StringSigner, } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/stream.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/stream.d.ts new file mode 100644 index 0000000..9092844 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/stream.d.ts @@ -0,0 +1 @@ +export { GetAwsChunkedEncodingStream, GetAwsChunkedEncodingStreamOptions } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/token.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/token.d.ts new file mode 100644 index 0000000..a68d58f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/token.d.ts @@ -0,0 +1,17 @@ +import { TokenIdentity } from "./identity"; +import { Provider } from "./util"; +/** + * @public + * + * An object representing temporary or permanent AWS token. + * + * @deprecated Use {@link TokenIdentity} + */ +export interface Token extends TokenIdentity { +} +/** + * @public + * + * @deprecated Use {@link TokenIdentityProvider} + */ +export type TokenProvider = Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/transfer.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/transfer.d.ts new file mode 100644 index 0000000..ba78190 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/transfer.d.ts @@ -0,0 +1 @@ +export { RequestContext, RequestHandler, RequestHandlerMetadata, RequestHandlerOutput, RequestHandlerProtocol, } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts new file mode 100644 index 0000000..dad6079 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/abort.d.ts @@ -0,0 +1 @@ +export { AbortController, AbortHandler, AbortSignal } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts new file mode 100644 index 0000000..8a02dbc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/auth.d.ts @@ -0,0 +1,5 @@ +export { + AuthScheme, + HttpAuthDefinition, + HttpAuthLocation, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts new file mode 100644 index 0000000..df39efe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/blob/blob-types.d.ts @@ -0,0 +1,2 @@ +import { BlobTypes } from "@smithy/types"; +export { BlobTypes }; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts new file mode 100644 index 0000000..f805d72 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/checksum.d.ts @@ -0,0 +1 @@ +export { Checksum, ChecksumConstructor } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..d6b3dcf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/client.d.ts @@ -0,0 +1 @@ +export { Client } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..3887267 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/command.d.ts @@ -0,0 +1 @@ +export { Command } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts new file mode 100644 index 0000000..36ebd00 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/connection.d.ts @@ -0,0 +1,6 @@ +export { + ConnectConfiguration, + ConnectionManager, + ConnectionManagerConfiguration, + ConnectionPool, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts new file mode 100644 index 0000000..6c91a35 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/credentials.d.ts @@ -0,0 +1,13 @@ +import { Logger } from "@smithy/types"; +import { AwsCredentialIdentity } from "./identity"; +import { Provider } from "./util"; +export interface Credentials extends AwsCredentialIdentity {} +export type CredentialProvider = Provider; +export type CredentialProviderOptions = { + logger?: Logger; + parentClientConfig?: { + region?: string | Provider; + profile?: string; + [key: string]: unknown; + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts new file mode 100644 index 0000000..dfe61bf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/crypto.d.ts @@ -0,0 +1,7 @@ +export { + Hash, + HashConstructor, + StreamHasher, + randomValues, + SourceData, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts new file mode 100644 index 0000000..d899949 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/dns.d.ts @@ -0,0 +1,19 @@ +export declare enum HostAddressType { + AAAA = "AAAA", + A = "A", +} +export interface HostAddress { + addressType: HostAddressType; + address: string; + hostName: string; + service?: string; +} +export interface HostResolverArguments { + hostName: string; + service?: string; +} +export interface HostResolver { + resolveAddress(args: HostResolverArguments): Promise; + reportFailureOnAddress(addr: HostAddress): void; + purgeCache(args?: HostResolverArguments): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts new file mode 100644 index 0000000..76966f9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/encode.d.ts @@ -0,0 +1,6 @@ +export { + MessageDecoder, + MessageEncoder, + AvailableMessage, + AvailableMessages, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts new file mode 100644 index 0000000..ff3c7de --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/endpoint.d.ts @@ -0,0 +1,9 @@ +export { + EndpointARN, + EndpointPartition, + EndpointURLScheme, + EndpointURL, + EndpointObjectProperty, + EndpointV2, + EndpointParameters, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts new file mode 100644 index 0000000..e4c04a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/eventStream.d.ts @@ -0,0 +1,24 @@ +export { + Message, + MessageHeaders, + BooleanHeaderValue, + ByteHeaderValue, + ShortHeaderValue, + IntegerHeaderValue, + LongHeaderValue, + BinaryHeaderValue, + StringHeaderValue, + TimestampHeaderValue, + UuidHeaderValue, + MessageHeaderValue, + Int64, + EventStreamSerdeContext, + EventStreamMarshaller, + EventStreamMarshallerDeserFn, + EventStreamMarshallerSerFn, + EventStreamPayloadHandler, + EventStreamPayloadHandlerProvider, + EventStreamRequestSigner, + EventStreamSerdeProvider, + EventStreamSignerProvider, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..accf5ec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +export interface AwsRegionExtensionConfiguration { + setRegion(region: Provider): void; + region(): Provider; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts new file mode 100644 index 0000000..6d57509 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/feature-ids.d.ts @@ -0,0 +1,54 @@ +export type AwsSdkFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + S3_EXPRESS_BUCKET: "J"; + S3_ACCESS_GRANTS: "K"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + ACCOUNT_ID_ENDPOINT: "O"; + ACCOUNT_ID_MODE_PREFERRED: "P"; + ACCOUNT_ID_MODE_DISABLED: "Q"; + ACCOUNT_ID_MODE_REQUIRED: "R"; + SIGV4A_SIGNING: "S"; + FLEXIBLE_CHECKSUMS_REQ_CRC32: "U"; + FLEXIBLE_CHECKSUMS_REQ_CRC32C: "V"; + FLEXIBLE_CHECKSUMS_REQ_CRC64: "W"; + FLEXIBLE_CHECKSUMS_REQ_SHA1: "X"; + FLEXIBLE_CHECKSUMS_REQ_SHA256: "Y"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_SUPPORTED: "Z"; + FLEXIBLE_CHECKSUMS_REQ_WHEN_REQUIRED: "a"; + FLEXIBLE_CHECKSUMS_RES_WHEN_SUPPORTED: "b"; + FLEXIBLE_CHECKSUMS_RES_WHEN_REQUIRED: "c"; + DDB_MAPPER: "d"; +}> & + AwsSdkCredentialsFeatures; +export type AwsSdkCredentialsFeatures = Partial<{ + RESOLVED_ACCOUNT_ID: "T"; + CREDENTIALS_CODE: "e"; + CREDENTIALS_ENV_VARS: "g"; + CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN: "h"; + CREDENTIALS_STS_ASSUME_ROLE: "i"; + CREDENTIALS_STS_ASSUME_ROLE_SAML: "j"; + CREDENTIALS_STS_ASSUME_ROLE_WEB_ID: "k"; + CREDENTIALS_STS_FEDERATION_TOKEN: "l"; + CREDENTIALS_STS_SESSION_TOKEN: "m"; + CREDENTIALS_PROFILE: "n"; + CREDENTIALS_PROFILE_SOURCE_PROFILE: "o"; + CREDENTIALS_PROFILE_NAMED_PROVIDER: "p"; + CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN: "q"; + CREDENTIALS_PROFILE_SSO: "r"; + CREDENTIALS_SSO: "s"; + CREDENTIALS_PROFILE_SSO_LEGACY: "t"; + CREDENTIALS_SSO_LEGACY: "u"; + CREDENTIALS_PROFILE_PROCESS: "v"; + CREDENTIALS_PROCESS: "w"; + CREDENTIALS_BOTO2_CONFIG_FILE: "x"; + CREDENTIALS_AWS_SDK_STORE: "y"; + CREDENTIALS_HTTP: "z"; + CREDENTIALS_IMDS: "0"; +}>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts new file mode 100644 index 0000000..d6efac5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/function.d.ts @@ -0,0 +1,7 @@ +export type MergeFunctions = F1 extends (arg: infer A1) => infer R1 + ? F2 extends (arg: infer A2) => infer R2 + ? R1 extends Promise + ? (arg?: A1 & A2) => Promise & Awaited> + : (arg?: A1 & A2) => R1 & R2 + : never + : never; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts new file mode 100644 index 0000000..d8e0eab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/http.d.ts @@ -0,0 +1,17 @@ +import { HttpResponse } from "@smithy/types"; +export { + Endpoint, + HeaderBag, + HttpHandlerOptions, + HttpMessage, + HttpRequest, + HttpResponse, + QueryParameterBag, +} from "@smithy/types"; +export interface Headers extends Map { + withHeader(headerName: string, headerValue: string): Headers; + withoutHeader(headerName: string): Headers; +} +export interface ResolvedHttpResponse extends HttpResponse { + body: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts new file mode 100644 index 0000000..5b175f6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AnonymousIdentity.d.ts @@ -0,0 +1,2 @@ +import { Identity } from "./Identity"; +export interface AnonymousIdentity extends Identity {} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts new file mode 100644 index 0000000..aaec358 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/AwsCredentialIdentity.d.ts @@ -0,0 +1,30 @@ +import { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + Logger, + RequestHandler, +} from "@smithy/types"; +import { AwsSdkCredentialsFeatures } from "../feature-ids"; +export { + AwsCredentialIdentity, + AwsCredentialIdentityProvider, + IdentityProvider, +} from "@smithy/types"; +export interface AwsIdentityProperties { + callerClientConfig?: { + credentials?: AwsCredentialIdentity | AwsCredentialIdentityProvider; + credentialDefaultProvider?: (input?: any) => AwsCredentialIdentityProvider; + logger?: Logger; + profile?: string; + region(): Promise; + requestHandler?: RequestHandler; + }; +} +export type RuntimeConfigIdentityProvider = ( + awsIdentityProperties?: AwsIdentityProperties +) => Promise; +export type RuntimeConfigAwsCredentialIdentityProvider = + RuntimeConfigIdentityProvider; +export type AttributedAwsCredentialIdentity = AwsCredentialIdentity & { + $source?: AwsSdkCredentialsFeatures; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts new file mode 100644 index 0000000..4175fd3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/Identity.d.ts @@ -0,0 +1 @@ +export { Identity, IdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts new file mode 100644 index 0000000..3258bbb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/LoginIdentity.d.ts @@ -0,0 +1,6 @@ +import { Identity, IdentityProvider } from "./Identity"; +export interface LoginIdentity extends Identity { + readonly username: string; + readonly password: string; +} +export type LoginIdentityProvider = IdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts new file mode 100644 index 0000000..66301bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/TokenIdentity.d.ts @@ -0,0 +1 @@ +export { TokenIdentity, TokenIdentityProvider } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts new file mode 100644 index 0000000..863e78e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/identity/index.d.ts @@ -0,0 +1,5 @@ +export * from "./AnonymousIdentity"; +export * from "./AwsCredentialIdentity"; +export * from "./Identity"; +export * from "./LoginIdentity"; +export * from "./TokenIdentity"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a7f99d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/index.d.ts @@ -0,0 +1,34 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./credentials"; +export * from "./crypto"; +export * from "./dns"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./function"; +export * from "./http"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./request"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./token"; +export * from "./transfer"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts new file mode 100644 index 0000000..c714915 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/logger.d.ts @@ -0,0 +1,15 @@ +import { Logger } from "@smithy/types"; +export { Logger } from "@smithy/types"; +export type LogLevel = + | "all" + | "trace" + | "debug" + | "log" + | "info" + | "warn" + | "error" + | "off"; +export interface LoggerOptions { + logger?: Logger; + logLevel?: LogLevel; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts new file mode 100644 index 0000000..e101e9b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/middleware.d.ts @@ -0,0 +1,47 @@ +import { HandlerExecutionContext } from "@smithy/types"; +import { AwsSdkFeatures } from "./feature-ids"; +export { + AbsoluteLocation, + BuildHandler, + BuildHandlerArguments, + BuildHandlerOptions, + BuildHandlerOutput, + BuildMiddleware, + DeserializeHandler, + DeserializeHandlerArguments, + DeserializeHandlerOptions, + DeserializeHandlerOutput, + DeserializeMiddleware, + FinalizeHandler, + FinalizeHandlerArguments, + FinalizeHandlerOutput, + FinalizeRequestHandlerOptions, + FinalizeRequestMiddleware, + Handler, + HandlerExecutionContext, + HandlerOptions, + InitializeHandler, + InitializeHandlerArguments, + InitializeHandlerOptions, + InitializeHandlerOutput, + InitializeMiddleware, + MiddlewareStack, + MiddlewareType, + Pluggable, + Priority, + Relation, + RelativeLocation, + RelativeMiddlewareOptions, + SerializeHandler, + SerializeHandlerArguments, + SerializeHandlerOptions, + SerializeHandlerOutput, + SerializeMiddleware, + Step, + Terminalware, +} from "@smithy/types"; +export interface AwsHandlerExecutionContext extends HandlerExecutionContext { + __aws_sdk_context?: { + features?: AwsSdkFeatures; + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts new file mode 100644 index 0000000..af791b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/pagination.d.ts @@ -0,0 +1 @@ +export { PaginationConfiguration, Paginator } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts new file mode 100644 index 0000000..b3813d8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/profile.d.ts @@ -0,0 +1,6 @@ +export { + IniSection, + Profile, + ParsedIniData, + SharedConfigFiles, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts new file mode 100644 index 0000000..5c6e793 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/request.d.ts @@ -0,0 +1,4 @@ +export interface Request { + destination: URL; + body?: any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts new file mode 100644 index 0000000..4e5fcd0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/response.d.ts @@ -0,0 +1,4 @@ +export { MetadataBearer, ResponseMetadata } from "@smithy/types"; +export interface Response { + body: any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts new file mode 100644 index 0000000..8fc946a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/retry.d.ts @@ -0,0 +1,12 @@ +export { + ExponentialBackoffJitterType, + ExponentialBackoffStrategyOptions, + RetryBackoffStrategy, + RetryErrorInfo, + RetryErrorType, + RetryStrategyOptions, + RetryStrategyV2, + RetryToken, + StandardRetryBackoffStrategy, + StandardRetryToken, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts new file mode 100644 index 0000000..a7ed76f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/serde.d.ts @@ -0,0 +1,16 @@ +export { + EndpointBearer, + StreamCollector, + SerdeContext, + ResponseDeserializer, + RequestSerializer, + SdkStreamMixin, + SdkStream, + WithSdkStreamMixin, + SdkStreamMixinInjector, + SdkStreamSerdeContext, +} from "@smithy/types"; +declare global { + export interface ReadableStream {} + export interface Blob {} +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts new file mode 100644 index 0000000..d1efa9a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/shapes.d.ts @@ -0,0 +1,6 @@ +export { + DocumentType, + RetryableTrait, + SmithyException, + SdkError, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts new file mode 100644 index 0000000..cbabd75 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/signature.d.ts @@ -0,0 +1,15 @@ +export { + DateInput, + EventSigner, + EventSigningArguments, + FormattedEvent, + MessageSigner, + RequestSigningArguments, + RequestPresigner, + RequestPresigningArguments, + RequestSigner, + SignableMessage, + SignedMessage, + SigningArguments, + StringSigner, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts new file mode 100644 index 0000000..1b79413 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/stream.d.ts @@ -0,0 +1,4 @@ +export { + GetAwsChunkedEncodingStream, + GetAwsChunkedEncodingStreamOptions, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts new file mode 100644 index 0000000..c33e506 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/token.d.ts @@ -0,0 +1,4 @@ +import { TokenIdentity } from "./identity"; +import { Provider } from "./util"; +export interface Token extends TokenIdentity {} +export type TokenProvider = Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts new file mode 100644 index 0000000..04a7f87 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/transfer.d.ts @@ -0,0 +1,7 @@ +export { + RequestContext, + RequestHandler, + RequestHandlerMetadata, + RequestHandlerOutput, + RequestHandlerProtocol, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts new file mode 100644 index 0000000..297dfe4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/uri.d.ts @@ -0,0 +1 @@ +export { URI } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..e7e43e6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/util.d.ts @@ -0,0 +1,14 @@ +export { + Encoder, + Decoder, + Provider, + UserAgentPair, + UserAgent, + UrlParser, + MemoizedProvider, + BodyLengthCalculator, + RegionInfo, + RegionInfoProviderOptions, + RegionInfoProvider, + RetryStrategy, +} from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..bb98020 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1 @@ +export { WaiterConfiguration } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/uri.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/uri.d.ts new file mode 100644 index 0000000..297dfe4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/uri.d.ts @@ -0,0 +1 @@ +export { URI } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/util.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/util.d.ts new file mode 100644 index 0000000..fd059b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/util.d.ts @@ -0,0 +1 @@ +export { Encoder, Decoder, Provider, UserAgentPair, UserAgent, UrlParser, MemoizedProvider, BodyLengthCalculator, RegionInfo, RegionInfoProviderOptions, RegionInfoProvider, RetryStrategy, } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/waiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/waiter.d.ts new file mode 100644 index 0000000..bb98020 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/dist-types/waiter.d.ts @@ -0,0 +1 @@ +export { WaiterConfiguration } from "@smithy/types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/package.json new file mode 100755 index 0000000..eaf5c44 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/types/package.json @@ -0,0 +1,56 @@ +{ + "name": "@aws-sdk/types", + "version": "3.775.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "description": "Types for the AWS SDK", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline types", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "tsc -p tsconfig.test.json" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/types", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/types" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "browser": {}, + "react-native": {} +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/README.md new file mode 100644 index 0000000..1d6d61a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/README.md @@ -0,0 +1,50 @@ +# @aws-sdk/util-dynamodb + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-dynamodb/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-dynamodb) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-dynamodb.svg)](https://www.npmjs.com/package/@aws-sdk/util-dynamodb) + +This package provides utilities to be used with `@aws-sdk/client-dynamodb` + +If you are looking for DynamoDB Document client, please check +[@aws-sdk/lib-dynamodb](https://www.npmjs.com/package/@aws-sdk/lib-dynamodb) +which automatically performs the necessary marshalling and unmarshalling. + +## Convert JavaScript object into DynamoDB Record + +```js +const { DynamoDB } = require("@aws-sdk/client-dynamodb"); +const { marshall } = require("@aws-sdk/util-dynamodb"); + +const client = new DynamoDB(clientParams); +const params = { + TableName: "Table", + Item: marshall({ + HashKey: "hashKey", + NumAttribute: 1, + BoolAttribute: true, + ListAttribute: [1, "two", false], + MapAttribute: { foo: "bar" }, + NullAttribute: null, + }), +}; + +await client.putItem(params); +``` + +## Convert DynamoDB Record into JavaScript object + +```js +const { DynamoDB } = require("@aws-sdk/client-dynamodb"); +const { marshall, unmarshall } = require("@aws-sdk/util-dynamodb"); + +const client = new DynamoDB(clientParams); +const params = { + TableName: "Table", + Key: marshall({ + HashKey: "hashKey", + }), +}; + +const { Item } = await client.getItem(params); +unmarshall(Item); +``` diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js new file mode 100644 index 0000000..955685c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-cjs/index.js @@ -0,0 +1,350 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NumberValueImpl: () => NumberValue, + convertToAttr: () => convertToAttr, + convertToNative: () => convertToNative, + marshall: () => marshall, + unmarshall: () => unmarshall +}); +module.exports = __toCommonJS(index_exports); + +// src/NumberValue.ts +var NumberValue = class _NumberValue { + static { + __name(this, "NumberValue"); + } + value; + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + constructor(value) { + if (typeof value === "object" && "N" in value) { + this.value = String(value.N); + } else { + this.value = String(value); + } + const valueOf = typeof value.valueOf() === "number" ? value.valueOf() : 0; + const imprecise = valueOf > Number.MAX_SAFE_INTEGER || valueOf < Number.MIN_SAFE_INTEGER || Math.abs(valueOf) === Infinity || Number.isNaN(valueOf); + if (imprecise) { + throw new Error( + `NumberValue should not be initialized with an imprecise number=${valueOf}. Use a string instead.` + ); + } + } + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + static from(value) { + return new _NumberValue(value); + } + /** + * @returns the AttributeValue form for DynamoDB. + */ + toAttributeValue() { + return { + N: this.toString() + }; + } + /** + * @returns BigInt representation. + * + * @throws SyntaxError if the string representation is not convertable to a BigInt. + */ + toBigInt() { + const stringValue = this.toString(); + return BigInt(stringValue); + } + /** + * @override + * + * @returns string representation. This is the canonical format in DynamoDB. + */ + toString() { + return String(this.value); + } + /** + * @override + */ + valueOf() { + return this.toString(); + } +}; + +// src/convertToAttr.ts +var convertToAttr = /* @__PURE__ */ __name((data, options) => { + if (data === void 0) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } else if (data === null && typeof data === "object") { + return convertToNullAttr(); + } else if (Array.isArray(data)) { + return convertToListAttr(data, options); + } else if (data?.constructor?.name === "Set") { + return convertToSetAttr(data, options); + } else if (data?.constructor?.name === "Map") { + return convertToMapAttrFromIterable(data, options); + } else if (data?.constructor?.name === "Object" || // for object which is result of Object.create(null), which doesn't have constructor defined + !data.constructor && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } else if (isBinary(data)) { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToBinaryAttr(data); + } else if (typeof data === "boolean" || data?.constructor?.name === "Boolean") { + return { BOOL: data.valueOf() }; + } else if (typeof data === "number" || data?.constructor?.name === "Number") { + return convertToNumberAttr(data, options); + } else if (data instanceof NumberValue) { + return data.toAttributeValue(); + } else if (typeof data === "bigint") { + return convertToBigIntAttr(data); + } else if (typeof data === "string" || data?.constructor?.name === "String") { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToStringAttr(data); + } else if (options?.convertClassInstanceToMap && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } + throw new Error( + `Unsupported type passed: ${data}. Pass options.convertClassInstanceToMap=true to marshall typeof object as map attribute.` + ); +}, "convertToAttr"); +var convertToListAttr = /* @__PURE__ */ __name((data, options) => ({ + L: data.filter( + (item) => typeof item !== "function" && (!options?.removeUndefinedValues || options?.removeUndefinedValues && item !== void 0) + ).map((item) => convertToAttr(item, options)) +}), "convertToListAttr"); +var convertToSetAttr = /* @__PURE__ */ __name((set, options) => { + const setToOperate = options?.removeUndefinedValues ? new Set([...set].filter((value) => value !== void 0)) : set; + if (!options?.removeUndefinedValues && setToOperate.has(void 0)) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + if (setToOperate.size === 0) { + if (options?.convertEmptyValues) { + return convertToNullAttr(); + } + throw new Error(`Pass a non-empty set, or options.convertEmptyValues=true.`); + } + const item = setToOperate.values().next().value; + if (item instanceof NumberValue) { + return { + NS: Array.from(setToOperate).map((_) => _.toString()) + }; + } else if (typeof item === "number") { + return { + NS: Array.from(setToOperate).map((num) => convertToNumberAttr(num, options)).map((item2) => item2.N) + }; + } else if (typeof item === "bigint") { + return { + NS: Array.from(setToOperate).map(convertToBigIntAttr).map((item2) => item2.N) + }; + } else if (typeof item === "string") { + return { + SS: Array.from(setToOperate).map(convertToStringAttr).map((item2) => item2.S) + }; + } else if (isBinary(item)) { + return { + // Do not alter binary data passed https://github.com/aws/aws-sdk-js-v3/issues/1530 + // @ts-expect-error Type 'ArrayBuffer' is not assignable to type 'Uint8Array' + BS: Array.from(setToOperate).map(convertToBinaryAttr).map((item2) => item2.B) + }; + } else { + throw new Error(`Only Number Set (NS), Binary Set (BS) or String Set (SS) are allowed.`); + } +}, "convertToSetAttr"); +var convertToMapAttrFromIterable = /* @__PURE__ */ __name((data, options) => ({ + M: ((data2) => { + const map = {}; + for (const [key, value] of data2) { + if (typeof value !== "function" && (value !== void 0 || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data) +}), "convertToMapAttrFromIterable"); +var convertToMapAttrFromEnumerableProps = /* @__PURE__ */ __name((data, options) => ({ + M: ((data2) => { + const map = {}; + for (const key in data2) { + const value = data2[key]; + if (typeof value !== "function" && (value !== void 0 || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data) +}), "convertToMapAttrFromEnumerableProps"); +var convertToNullAttr = /* @__PURE__ */ __name(() => ({ NULL: true }), "convertToNullAttr"); +var convertToBinaryAttr = /* @__PURE__ */ __name((data) => ({ B: data }), "convertToBinaryAttr"); +var convertToStringAttr = /* @__PURE__ */ __name((data) => ({ S: data.toString() }), "convertToStringAttr"); +var convertToBigIntAttr = /* @__PURE__ */ __name((data) => ({ N: data.toString() }), "convertToBigIntAttr"); +var validateBigIntAndThrow = /* @__PURE__ */ __name((errorPrefix) => { + throw new Error(`${errorPrefix} Use NumberValue from @aws-sdk/lib-dynamodb.`); +}, "validateBigIntAndThrow"); +var convertToNumberAttr = /* @__PURE__ */ __name((num, options) => { + if ([Number.NaN, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY].map((val) => val.toString()).includes(num.toString())) { + throw new Error(`Special numeric value ${num.toString()} is not allowed`); + } else if (!options?.allowImpreciseNumbers) { + if (Number(num) > Number.MAX_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is greater than Number.MAX_SAFE_INTEGER.`); + } else if (Number(num) < Number.MIN_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is lesser than Number.MIN_SAFE_INTEGER.`); + } + } + return { N: num.toString() }; +}, "convertToNumberAttr"); +var isBinary = /* @__PURE__ */ __name((data) => { + const binaryTypes = [ + "ArrayBuffer", + "Blob", + "Buffer", + "DataView", + "File", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "BigInt64Array", + "BigUint64Array" + ]; + if (data?.constructor) { + return binaryTypes.includes(data.constructor.name); + } + return false; +}, "isBinary"); + +// src/convertToNative.ts +var convertToNative = /* @__PURE__ */ __name((data, options) => { + for (const [key, value] of Object.entries(data)) { + if (value !== void 0) { + switch (key) { + case "NULL": + return null; + case "BOOL": + return Boolean(value); + case "N": + return convertNumber(value, options); + case "B": + return convertBinary(value); + case "S": + return convertString(value); + case "L": + return convertList(value, options); + case "M": + return convertMap(value, options); + case "NS": + return new Set(value.map((item) => convertNumber(item, options))); + case "BS": + return new Set(value.map(convertBinary)); + case "SS": + return new Set(value.map(convertString)); + default: + throw new Error(`Unsupported type passed: ${key}`); + } + } + } + throw new Error(`No value defined: ${JSON.stringify(data)}`); +}, "convertToNative"); +var convertNumber = /* @__PURE__ */ __name((numString, options) => { + if (typeof options?.wrapNumbers === "function") { + return options?.wrapNumbers(numString); + } + if (options?.wrapNumbers) { + return NumberValue.from(numString); + } + const num = Number(numString); + const infinityValues = [Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]; + const isLargeFiniteNumber = (num > Number.MAX_SAFE_INTEGER || num < Number.MIN_SAFE_INTEGER) && !infinityValues.includes(num); + if (isLargeFiniteNumber) { + if (typeof BigInt === "function") { + try { + return BigInt(numString); + } catch (error) { + throw new Error(`${numString} can't be converted to BigInt. Set options.wrapNumbers to get string value.`); + } + } else { + throw new Error(`${numString} is outside SAFE_INTEGER bounds. Set options.wrapNumbers to get string value.`); + } + } + return num; +}, "convertNumber"); +var convertString = /* @__PURE__ */ __name((stringValue) => stringValue, "convertString"); +var convertBinary = /* @__PURE__ */ __name((binaryValue) => binaryValue, "convertBinary"); +var convertList = /* @__PURE__ */ __name((list, options) => list.map((item) => convertToNative(item, options)), "convertList"); +var convertMap = /* @__PURE__ */ __name((map, options) => Object.entries(map).reduce( + (acc, [key, value]) => (acc[key] = convertToNative(value, options), acc), + {} +), "convertMap"); + +// src/marshall.ts +function marshall(data, options) { + const attributeValue = convertToAttr(data, options); + const [key, value] = Object.entries(attributeValue)[0]; + switch (key) { + case "M": + case "L": + return options?.convertTopLevelContainer ? attributeValue : value; + case "SS": + case "NS": + case "BS": + case "S": + case "N": + case "B": + case "NULL": + case "BOOL": + case "$unknown": + default: + return attributeValue; + } +} +__name(marshall, "marshall"); + +// src/unmarshall.ts +var unmarshall = /* @__PURE__ */ __name((data, options) => { + if (options?.convertWithoutMapWrapper) { + return convertToNative(data, options); + } + return convertToNative({ M: data }, options); +}, "unmarshall"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + NumberValueImpl, + convertToAttr, + convertToNative, + marshall, + unmarshall +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js new file mode 100644 index 0000000..a9df9f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/NumberValue.js @@ -0,0 +1,37 @@ +export class NumberValue { + value; + constructor(value) { + if (typeof value === "object" && "N" in value) { + this.value = String(value.N); + } + else { + this.value = String(value); + } + const valueOf = typeof value.valueOf() === "number" ? value.valueOf() : 0; + const imprecise = valueOf > Number.MAX_SAFE_INTEGER || + valueOf < Number.MIN_SAFE_INTEGER || + Math.abs(valueOf) === Infinity || + Number.isNaN(valueOf); + if (imprecise) { + throw new Error(`NumberValue should not be initialized with an imprecise number=${valueOf}. Use a string instead.`); + } + } + static from(value) { + return new NumberValue(value); + } + toAttributeValue() { + return { + N: this.toString(), + }; + } + toBigInt() { + const stringValue = this.toString(); + return BigInt(stringValue); + } + toString() { + return String(this.value); + } + valueOf() { + return this.toString(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js new file mode 100644 index 0000000..62a888f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToAttr.js @@ -0,0 +1,175 @@ +import { NumberValue } from "./NumberValue"; +export const convertToAttr = (data, options) => { + if (data === undefined) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + else if (data === null && typeof data === "object") { + return convertToNullAttr(); + } + else if (Array.isArray(data)) { + return convertToListAttr(data, options); + } + else if (data?.constructor?.name === "Set") { + return convertToSetAttr(data, options); + } + else if (data?.constructor?.name === "Map") { + return convertToMapAttrFromIterable(data, options); + } + else if (data?.constructor?.name === "Object" || + (!data.constructor && typeof data === "object")) { + return convertToMapAttrFromEnumerableProps(data, options); + } + else if (isBinary(data)) { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToBinaryAttr(data); + } + else if (typeof data === "boolean" || data?.constructor?.name === "Boolean") { + return { BOOL: data.valueOf() }; + } + else if (typeof data === "number" || data?.constructor?.name === "Number") { + return convertToNumberAttr(data, options); + } + else if (data instanceof NumberValue) { + return data.toAttributeValue(); + } + else if (typeof data === "bigint") { + return convertToBigIntAttr(data); + } + else if (typeof data === "string" || data?.constructor?.name === "String") { + if (data.length === 0 && options?.convertEmptyValues) { + return convertToNullAttr(); + } + return convertToStringAttr(data); + } + else if (options?.convertClassInstanceToMap && typeof data === "object") { + return convertToMapAttrFromEnumerableProps(data, options); + } + throw new Error(`Unsupported type passed: ${data}. Pass options.convertClassInstanceToMap=true to marshall typeof object as map attribute.`); +}; +const convertToListAttr = (data, options) => ({ + L: data + .filter((item) => typeof item !== "function" && + (!options?.removeUndefinedValues || (options?.removeUndefinedValues && item !== undefined))) + .map((item) => convertToAttr(item, options)), +}); +const convertToSetAttr = (set, options) => { + const setToOperate = options?.removeUndefinedValues ? new Set([...set].filter((value) => value !== undefined)) : set; + if (!options?.removeUndefinedValues && setToOperate.has(undefined)) { + throw new Error(`Pass options.removeUndefinedValues=true to remove undefined values from map/array/set.`); + } + if (setToOperate.size === 0) { + if (options?.convertEmptyValues) { + return convertToNullAttr(); + } + throw new Error(`Pass a non-empty set, or options.convertEmptyValues=true.`); + } + const item = setToOperate.values().next().value; + if (item instanceof NumberValue) { + return { + NS: Array.from(setToOperate).map((_) => _.toString()), + }; + } + else if (typeof item === "number") { + return { + NS: Array.from(setToOperate) + .map((num) => convertToNumberAttr(num, options)) + .map((item) => item.N), + }; + } + else if (typeof item === "bigint") { + return { + NS: Array.from(setToOperate) + .map(convertToBigIntAttr) + .map((item) => item.N), + }; + } + else if (typeof item === "string") { + return { + SS: Array.from(setToOperate) + .map(convertToStringAttr) + .map((item) => item.S), + }; + } + else if (isBinary(item)) { + return { + BS: Array.from(setToOperate) + .map(convertToBinaryAttr) + .map((item) => item.B), + }; + } + else { + throw new Error(`Only Number Set (NS), Binary Set (BS) or String Set (SS) are allowed.`); + } +}; +const convertToMapAttrFromIterable = (data, options) => ({ + M: ((data) => { + const map = {}; + for (const [key, value] of data) { + if (typeof value !== "function" && (value !== undefined || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data), +}); +const convertToMapAttrFromEnumerableProps = (data, options) => ({ + M: ((data) => { + const map = {}; + for (const key in data) { + const value = data[key]; + if (typeof value !== "function" && (value !== undefined || !options?.removeUndefinedValues)) { + map[key] = convertToAttr(value, options); + } + } + return map; + })(data), +}); +const convertToNullAttr = () => ({ NULL: true }); +const convertToBinaryAttr = (data) => ({ B: data }); +const convertToStringAttr = (data) => ({ S: data.toString() }); +const convertToBigIntAttr = (data) => ({ N: data.toString() }); +const validateBigIntAndThrow = (errorPrefix) => { + throw new Error(`${errorPrefix} Use NumberValue from @aws-sdk/lib-dynamodb.`); +}; +const convertToNumberAttr = (num, options) => { + if ([Number.NaN, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY] + .map((val) => val.toString()) + .includes(num.toString())) { + throw new Error(`Special numeric value ${num.toString()} is not allowed`); + } + else if (!options?.allowImpreciseNumbers) { + if (Number(num) > Number.MAX_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is greater than Number.MAX_SAFE_INTEGER.`); + } + else if (Number(num) < Number.MIN_SAFE_INTEGER) { + validateBigIntAndThrow(`Number ${num.toString()} is lesser than Number.MIN_SAFE_INTEGER.`); + } + } + return { N: num.toString() }; +}; +const isBinary = (data) => { + const binaryTypes = [ + "ArrayBuffer", + "Blob", + "Buffer", + "DataView", + "File", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "BigInt64Array", + "BigUint64Array", + ]; + if (data?.constructor) { + return binaryTypes.includes(data.constructor.name); + } + return false; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js new file mode 100644 index 0000000..3e7b2c2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/convertToNative.js @@ -0,0 +1,61 @@ +import { NumberValue } from "./NumberValue"; +export const convertToNative = (data, options) => { + for (const [key, value] of Object.entries(data)) { + if (value !== undefined) { + switch (key) { + case "NULL": + return null; + case "BOOL": + return Boolean(value); + case "N": + return convertNumber(value, options); + case "B": + return convertBinary(value); + case "S": + return convertString(value); + case "L": + return convertList(value, options); + case "M": + return convertMap(value, options); + case "NS": + return new Set(value.map((item) => convertNumber(item, options))); + case "BS": + return new Set(value.map(convertBinary)); + case "SS": + return new Set(value.map(convertString)); + default: + throw new Error(`Unsupported type passed: ${key}`); + } + } + } + throw new Error(`No value defined: ${JSON.stringify(data)}`); +}; +const convertNumber = (numString, options) => { + if (typeof options?.wrapNumbers === "function") { + return options?.wrapNumbers(numString); + } + if (options?.wrapNumbers) { + return NumberValue.from(numString); + } + const num = Number(numString); + const infinityValues = [Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY]; + const isLargeFiniteNumber = (num > Number.MAX_SAFE_INTEGER || num < Number.MIN_SAFE_INTEGER) && !infinityValues.includes(num); + if (isLargeFiniteNumber) { + if (typeof BigInt === "function") { + try { + return BigInt(numString); + } + catch (error) { + throw new Error(`${numString} can't be converted to BigInt. Set options.wrapNumbers to get string value.`); + } + } + else { + throw new Error(`${numString} is outside SAFE_INTEGER bounds. Set options.wrapNumbers to get string value.`); + } + } + return num; +}; +const convertString = (stringValue) => stringValue; +const convertBinary = (binaryValue) => binaryValue; +const convertList = (list, options) => list.map((item) => convertToNative(item, options)); +const convertMap = (map, options) => Object.entries(map).reduce((acc, [key, value]) => ((acc[key] = convertToNative(value, options)), acc), {}); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/index.js @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js new file mode 100644 index 0000000..9899c8a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/marshall.js @@ -0,0 +1,21 @@ +import { convertToAttr } from "./convertToAttr"; +export function marshall(data, options) { + const attributeValue = convertToAttr(data, options); + const [key, value] = Object.entries(attributeValue)[0]; + switch (key) { + case "M": + case "L": + return options?.convertTopLevelContainer ? attributeValue : value; + case "SS": + case "NS": + case "BS": + case "S": + case "N": + case "B": + case "NULL": + case "BOOL": + case "$unknown": + default: + return attributeValue; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/models.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js new file mode 100644 index 0000000..6028656 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-es/unmarshall.js @@ -0,0 +1,7 @@ +import { convertToNative } from "./convertToNative"; +export const unmarshall = (data, options) => { + if (options?.convertWithoutMapWrapper) { + return convertToNative(data, options); + } + return convertToNative({ M: data }, options); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts new file mode 100644 index 0000000..c444ff8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/NumberValue.d.ts @@ -0,0 +1,55 @@ +import { NumberValue as INumberValue } from "./models"; +/** + * + * Class for storing DynamoDB numbers that exceed the scale of + * JavaScript's MAX_SAFE_INTEGER and MIN_SAFE_INTEGER, or the + * decimal precision limit. + * + * This class does not support mathematical operations in JavaScript. + * Convert the contained string value to your application-specific + * large number implementation to perform mathematical operations. + * + * @public + * + */ +export declare class NumberValue implements INumberValue { + value: string; + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + constructor(value: number | Number | BigInt | string | { + N: string; + }); + /** + * This class does not validate that your string input is a valid number. + * + * @param value - a precise number, or any BigInt or string, or AttributeValue. + */ + static from(value: number | Number | BigInt | string | { + N: string; + }): NumberValue; + /** + * @returns the AttributeValue form for DynamoDB. + */ + toAttributeValue(): { + N: string; + }; + /** + * @returns BigInt representation. + * + * @throws SyntaxError if the string representation is not convertable to a BigInt. + */ + toBigInt(): bigint; + /** + * @override + * + * @returns string representation. This is the canonical format in DynamoDB. + */ + toString(): string; + /** + * @override + */ + valueOf(): string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts new file mode 100644 index 0000000..7b0eae7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToAttr.d.ts @@ -0,0 +1,10 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { marshallOptions } from "./marshall"; +import { NativeAttributeValue } from "./models"; +/** + * Convert a JavaScript value to its equivalent DynamoDB AttributeValue type. + * + * @param data - The data to convert to a DynamoDB AttributeValue. + * @param options - An optional configuration object for `convertToAttr`. + */ +export declare const convertToAttr: (data: NativeAttributeValue, options?: marshallOptions) => AttributeValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts new file mode 100644 index 0000000..4cbac6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/convertToNative.d.ts @@ -0,0 +1,10 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import type { NativeAttributeValue } from "./models"; +import { unmarshallOptions } from "./unmarshall"; +/** + * Convert a DynamoDB AttributeValue object to its equivalent JavaScript type. + * + * @param data - The DynamoDB record to convert to JavaScript type. + * @param options - An optional configuration object for `convertToNative`. + */ +export declare const convertToNative: (data: AttributeValue, options?: unmarshallOptions) => NativeAttributeValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/index.d.ts @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts new file mode 100644 index 0000000..a949240 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/marshall.d.ts @@ -0,0 +1,81 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeBinary, NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +/** + * An optional configuration object for `marshall` + */ +export interface marshallOptions { + /** + * Whether to automatically convert empty strings, blobs, and sets to `null` + */ + convertEmptyValues?: boolean; + /** + * Whether to remove undefined values from JS arrays/Sets/objects + * when marshalling to DynamoDB lists/sets/maps respectively. + * + * A DynamoDB item is not itself considered a map. Only + * attributes of an item are examined. + */ + removeUndefinedValues?: boolean; + /** + * Whether to convert typeof object to map attribute. + */ + convertClassInstanceToMap?: boolean; + /** + * Whether to convert the top level container + * if it is a map or list. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the marshall function (backwards compatibility). + */ + convertTopLevelContainer?: boolean; + /** + * Whether to allow numbers beyond Number.MAX_SAFE_INTEGER during marshalling. + * When set to true, allows numbers that may lose precision when converted to JavaScript numbers. + * When false (default), throws an error if a number exceeds Number.MAX_SAFE_INTEGER to prevent + * unintended loss of precision. Consider using the NumberValue type from @aws-sdk/lib-dynamodb + * for precise handling of large numbers. + */ + allowImpreciseNumbers?: boolean; +} +/** + * Convert a JavaScript object into a DynamoDB record. + * + * @param data - The data to convert to a DynamoDB record + * @param options - An optional configuration object for `marshall` + * + */ +export declare function marshall(data: null, options?: marshallOptions): AttributeValue.NULLMember; +export declare function marshall(data: Set | Set | Set, options?: marshallOptions): AttributeValue.NSMember; +export declare function marshall(data: Set, options?: marshallOptions): AttributeValue.SSMember; +export declare function marshall(data: Set, options?: marshallOptions): AttributeValue.BSMember; +export declare function marshall(data: NativeAttributeBinary, options?: marshallOptions): AttributeValue.BMember; +export declare function marshall(data: boolean, options?: marshallOptions): AttributeValue.BOOLMember; +export declare function marshall(data: number | NumberValue | bigint, options?: marshallOptions): AttributeValue.NMember; +export declare function marshall(data: string, options?: marshallOptions): AttributeValue.SMember; +export declare function marshall(data: boolean, options?: marshallOptions): AttributeValue.BOOLMember; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue.LMember; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[]; +export declare function marshall(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[] | AttributeValue.LMember; +export declare function marshall(data: NativeAttributeValue[], options?: marshallOptions): AttributeValue[]; +export declare function marshall(data: Map | Record, options: marshallOptions & O): AttributeValue.MMember; +export declare function marshall(data: Map | Record, options: marshallOptions & O): Record; +export declare function marshall(data: Map | Record, options: marshallOptions & O): Record | AttributeValue.MMember; +export declare function marshall(data: Map | Record, options?: marshallOptions): Record; +export declare function marshall(data: any, options?: marshallOptions): any; +/** + * This signature will be unmatchable but is included for information. + */ +export declare function marshall(data: unknown, options?: marshallOptions): AttributeValue.$UnknownMember; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts new file mode 100644 index 0000000..7f0a963 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/models.d.ts @@ -0,0 +1,40 @@ +/// +/// +/** + * A interface recognizable as a numeric value that stores the underlying number + * as a string. + * + * Intended to be a deserialization target for the DynamoDB Document Client when + * the `wrapNumbers` flag is set. This allows for numeric values that lose + * precision when converted to JavaScript's `number` type. + */ +export interface NumberValue { + readonly value: string; +} +/** + * @public + */ +export type NativeAttributeValue = NativeScalarAttributeValue | { + [key: string]: NativeAttributeValue; +} | NativeAttributeValue[] | Set | InstanceType<{ + new (...args: any[]): any; +}>; +/** + * @public + */ +export type NativeScalarAttributeValue = null | undefined | boolean | number | NumberValue | bigint | NativeAttributeBinary | string; +/** + * Declare File in case DOM is not added to the tsconfig lib causing + * File interface is not defined. For developers with DOM lib added, + * the File interface will be merged correctly. + */ +declare global { + interface File { + } +} +type IfDefined = {} extends T ? never : T; +/** + * @public + */ +export type NativeAttributeBinary = ArrayBuffer | IfDefined | IfDefined | DataView | IfDefined | Int8Array | Uint8Array | Uint8ClampedArray | Int16Array | Uint16Array | Int32Array | Uint32Array | Float32Array | Float64Array | BigInt64Array | BigUint64Array; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts new file mode 100644 index 0000000..8180624 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/NumberValue.d.ts @@ -0,0 +1,30 @@ +import { NumberValue as INumberValue } from "./models"; +export declare class NumberValue implements INumberValue { + value: string; + constructor( + value: + | number + | Number + | BigInt + | string + | { + N: string; + } + ); + static from( + value: + | number + | Number + | BigInt + | string + | { + N: string; + } + ): NumberValue; + toAttributeValue(): { + N: string; + }; + toBigInt(): bigint; + toString(): string; + valueOf(): string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts new file mode 100644 index 0000000..d148d57 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToAttr.d.ts @@ -0,0 +1,7 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { marshallOptions } from "./marshall"; +import { NativeAttributeValue } from "./models"; +export declare const convertToAttr: ( + data: NativeAttributeValue, + options?: marshallOptions +) => AttributeValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts new file mode 100644 index 0000000..c1a03f4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/convertToNative.d.ts @@ -0,0 +1,7 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { unmarshallOptions } from "./unmarshall"; +export declare const convertToNative: ( + data: AttributeValue, + options?: unmarshallOptions +) => NativeAttributeValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6087756 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +export { NumberValue as NumberValueImpl } from "./NumberValue"; +export * from "./convertToAttr"; +export * from "./convertToNative"; +export * from "./marshall"; +export * from "./models"; +export * from "./unmarshall"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts new file mode 100644 index 0000000..f81b876 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/marshall.d.ts @@ -0,0 +1,112 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeBinary, NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +export interface marshallOptions { + convertEmptyValues?: boolean; + removeUndefinedValues?: boolean; + convertClassInstanceToMap?: boolean; + convertTopLevelContainer?: boolean; + allowImpreciseNumbers?: boolean; +} +export declare function marshall( + data: null, + options?: marshallOptions +): AttributeValue.NULLMember; +export declare function marshall( + data: Set | Set | Set, + options?: marshallOptions +): AttributeValue.NSMember; +export declare function marshall( + data: Set, + options?: marshallOptions +): AttributeValue.SSMember; +export declare function marshall( + data: Set, + options?: marshallOptions +): AttributeValue.BSMember; +export declare function marshall( + data: NativeAttributeBinary, + options?: marshallOptions +): AttributeValue.BMember; +export declare function marshall( + data: boolean, + options?: marshallOptions +): AttributeValue.BOOLMember; +export declare function marshall( + data: number | NumberValue | bigint, + options?: marshallOptions +): AttributeValue.NMember; +export declare function marshall( + data: string, + options?: marshallOptions +): AttributeValue.SMember; +export declare function marshall( + data: boolean, + options?: marshallOptions +): AttributeValue.BOOLMember; +export declare function marshall< + O extends { + convertTopLevelContainer: true; + } +>( + data: NativeAttributeValue[], + options: marshallOptions & O +): AttributeValue.LMember; +export declare function marshall< + O extends { + convertTopLevelContainer: false; + } +>(data: NativeAttributeValue[], options: marshallOptions & O): AttributeValue[]; +export declare function marshall< + O extends { + convertTopLevelContainer: boolean; + } +>( + data: NativeAttributeValue[], + options: marshallOptions & O +): AttributeValue[] | AttributeValue.LMember; +export declare function marshall( + data: NativeAttributeValue[], + options?: marshallOptions +): AttributeValue[]; +export declare function marshall< + O extends { + convertTopLevelContainer: true; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): AttributeValue.MMember; +export declare function marshall< + O extends { + convertTopLevelContainer: false; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): Record; +export declare function marshall< + O extends { + convertTopLevelContainer: boolean; + } +>( + data: + | Map + | Record, + options: marshallOptions & O +): Record | AttributeValue.MMember; +export declare function marshall( + data: + | Map + | Record, + options?: marshallOptions +): Record; +export declare function marshall(data: any, options?: marshallOptions): any; +export declare function marshall( + data: unknown, + options?: marshallOptions +): AttributeValue.$UnknownMember; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts new file mode 100644 index 0000000..f2939b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/models.d.ts @@ -0,0 +1,46 @@ +export interface NumberValue { + readonly value: string; +} +export type NativeAttributeValue = + | NativeScalarAttributeValue + | { + [key: string]: NativeAttributeValue; + } + | NativeAttributeValue[] + | Set< + number | bigint | NumberValue | string | NativeAttributeBinary | undefined + > + | InstanceType<{ + new (...args: any[]): any; + }>; +export type NativeScalarAttributeValue = + | null + | undefined + | boolean + | number + | NumberValue + | bigint + | NativeAttributeBinary + | string; +declare global { + interface File {} +} +type IfDefined = {} extends T ? never : T; +export type NativeAttributeBinary = + | ArrayBuffer + | IfDefined + | IfDefined + | DataView + | IfDefined + | Int8Array + | Uint8Array + | Uint8ClampedArray + | Int16Array + | Uint16Array + | Int32Array + | Uint32Array + | Float32Array + | Float64Array + | BigInt64Array + | BigUint64Array; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts new file mode 100644 index 0000000..9d511e0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/ts3.4/unmarshall.d.ts @@ -0,0 +1,13 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +export interface unmarshallOptions { + wrapNumbers?: + | boolean + | ((value: string) => number | bigint | NumberValue | any); + convertWithoutMapWrapper?: boolean; +} +export declare const unmarshall: ( + data: Record | AttributeValue, + options?: unmarshallOptions +) => Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts new file mode 100644 index 0000000..c477e32 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/dist-types/unmarshall.d.ts @@ -0,0 +1,31 @@ +import { AttributeValue } from "@aws-sdk/client-dynamodb"; +import { NativeAttributeValue } from "./models"; +import { NumberValue } from "./NumberValue"; +/** + * An optional configuration object for `convertToNative` + */ +export interface unmarshallOptions { + /** + * Whether to modify how numbers are unmarshalled from DynamoDB. + * When set to true, returns numbers as NumberValue instances instead of native JavaScript numbers. + * This allows for the safe round-trip transport of numbers of arbitrary size. + * + * If a function is provided, it will be called with the string representation of numbers to handle + * custom conversions (e.g., using BigInt or decimal libraries). + */ + wrapNumbers?: boolean | ((value: string) => number | bigint | NumberValue | any); + /** + * When true, skip wrapping the data in `{ M: data }` before converting. + * + * Default is true when using the DynamoDBDocumentClient, + * but false if directly using the unmarshall function (backwards compatibility). + */ + convertWithoutMapWrapper?: boolean; +} +/** + * Convert a DynamoDB record into a JavaScript object. + * + * @param data - The DynamoDB record + * @param options - An optional configuration object for `unmarshall` + */ +export declare const unmarshall: (data: Record | AttributeValue, options?: unmarshallOptions) => Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/package.json new file mode 100644 index 0000000..db385c7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-dynamodb/package.json @@ -0,0 +1,57 @@ +{ + "name": "@aws-sdk/util-dynamodb", + "version": "3.803.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-dynamodb", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-sdk/client-dynamodb": "3.803.0", + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-dynamodb", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-dynamodb" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/README.md new file mode 100644 index 0000000..641f54a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/util-endpoints + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-endpoints/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-endpoints) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-endpoints.svg)](https://www.npmjs.com/package/@aws-sdk/util-endpoints) + +> An internal package diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js new file mode 100644 index 0000000..ee0a932 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-cjs/index.js @@ -0,0 +1,450 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + ConditionObject: () => import_util_endpoints.ConditionObject, + DeprecatedObject: () => import_util_endpoints.DeprecatedObject, + EndpointError: () => import_util_endpoints.EndpointError, + EndpointObject: () => import_util_endpoints.EndpointObject, + EndpointObjectHeaders: () => import_util_endpoints.EndpointObjectHeaders, + EndpointObjectProperties: () => import_util_endpoints.EndpointObjectProperties, + EndpointParams: () => import_util_endpoints.EndpointParams, + EndpointResolverOptions: () => import_util_endpoints.EndpointResolverOptions, + EndpointRuleObject: () => import_util_endpoints.EndpointRuleObject, + ErrorRuleObject: () => import_util_endpoints.ErrorRuleObject, + EvaluateOptions: () => import_util_endpoints.EvaluateOptions, + Expression: () => import_util_endpoints.Expression, + FunctionArgv: () => import_util_endpoints.FunctionArgv, + FunctionObject: () => import_util_endpoints.FunctionObject, + FunctionReturn: () => import_util_endpoints.FunctionReturn, + ParameterObject: () => import_util_endpoints.ParameterObject, + ReferenceObject: () => import_util_endpoints.ReferenceObject, + ReferenceRecord: () => import_util_endpoints.ReferenceRecord, + RuleSetObject: () => import_util_endpoints.RuleSetObject, + RuleSetRules: () => import_util_endpoints.RuleSetRules, + TreeRuleObject: () => import_util_endpoints.TreeRuleObject, + awsEndpointFunctions: () => awsEndpointFunctions, + getUserAgentPrefix: () => getUserAgentPrefix, + isIpAddress: () => import_util_endpoints.isIpAddress, + partition: () => partition, + resolveEndpoint: () => import_util_endpoints.resolveEndpoint, + setPartitionInfo: () => setPartitionInfo, + useDefaultPartitionInfo: () => useDefaultPartitionInfo +}); +module.exports = __toCommonJS(index_exports); + +// src/aws.ts + + +// src/lib/aws/isVirtualHostableS3Bucket.ts + + +// src/lib/isIpAddress.ts +var import_util_endpoints = require("@smithy/util-endpoints"); + +// src/lib/aws/isVirtualHostableS3Bucket.ts +var isVirtualHostableS3Bucket = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!(0, import_util_endpoints.isValidHostLabel)(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if ((0, import_util_endpoints.isIpAddress)(value)) { + return false; + } + return true; +}, "isVirtualHostableS3Bucket"); + +// src/lib/aws/parseArn.ts +var ARN_DELIMITER = ":"; +var RESOURCE_DELIMITER = "/"; +var parseArn = /* @__PURE__ */ __name((value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) return null; + const [arn, partition2, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition2 === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition: partition2, + service, + region, + accountId, + resourceId + }; +}, "parseArn"); + +// src/lib/aws/partitions.json +var partitions_default = { + partitions: [{ + id: "aws", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-east-1", + name: "aws", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + regions: { + "af-south-1": { + description: "Africa (Cape Town)" + }, + "ap-east-1": { + description: "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + description: "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + description: "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + description: "Asia Pacific (Osaka)" + }, + "ap-south-1": { + description: "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + description: "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + description: "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + description: "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + description: "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + description: "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + description: "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + description: "Asia Pacific (Thailand)" + }, + "aws-global": { + description: "AWS Standard global region" + }, + "ca-central-1": { + description: "Canada (Central)" + }, + "ca-west-1": { + description: "Canada West (Calgary)" + }, + "eu-central-1": { + description: "Europe (Frankfurt)" + }, + "eu-central-2": { + description: "Europe (Zurich)" + }, + "eu-north-1": { + description: "Europe (Stockholm)" + }, + "eu-south-1": { + description: "Europe (Milan)" + }, + "eu-south-2": { + description: "Europe (Spain)" + }, + "eu-west-1": { + description: "Europe (Ireland)" + }, + "eu-west-2": { + description: "Europe (London)" + }, + "eu-west-3": { + description: "Europe (Paris)" + }, + "il-central-1": { + description: "Israel (Tel Aviv)" + }, + "me-central-1": { + description: "Middle East (UAE)" + }, + "me-south-1": { + description: "Middle East (Bahrain)" + }, + "mx-central-1": { + description: "Mexico (Central)" + }, + "sa-east-1": { + description: "South America (Sao Paulo)" + }, + "us-east-1": { + description: "US East (N. Virginia)" + }, + "us-east-2": { + description: "US East (Ohio)" + }, + "us-west-1": { + description: "US West (N. California)" + }, + "us-west-2": { + description: "US West (Oregon)" + } + } + }, { + id: "aws-cn", + outputs: { + dnsSuffix: "amazonaws.com.cn", + dualStackDnsSuffix: "api.amazonwebservices.com.cn", + implicitGlobalRegion: "cn-northwest-1", + name: "aws-cn", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^cn\\-\\w+\\-\\d+$", + regions: { + "aws-cn-global": { + description: "AWS China global region" + }, + "cn-north-1": { + description: "China (Beijing)" + }, + "cn-northwest-1": { + description: "China (Ningxia)" + } + } + }, { + id: "aws-us-gov", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-gov-west-1", + name: "aws-us-gov", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^us\\-gov\\-\\w+\\-\\d+$", + regions: { + "aws-us-gov-global": { + description: "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + description: "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + description: "AWS GovCloud (US-West)" + } + } + }, { + id: "aws-iso", + outputs: { + dnsSuffix: "c2s.ic.gov", + dualStackDnsSuffix: "c2s.ic.gov", + implicitGlobalRegion: "us-iso-east-1", + name: "aws-iso", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-iso\\-\\w+\\-\\d+$", + regions: { + "aws-iso-global": { + description: "AWS ISO (US) global region" + }, + "us-iso-east-1": { + description: "US ISO East" + }, + "us-iso-west-1": { + description: "US ISO WEST" + } + } + }, { + id: "aws-iso-b", + outputs: { + dnsSuffix: "sc2s.sgov.gov", + dualStackDnsSuffix: "sc2s.sgov.gov", + implicitGlobalRegion: "us-isob-east-1", + name: "aws-iso-b", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isob\\-\\w+\\-\\d+$", + regions: { + "aws-iso-b-global": { + description: "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + description: "US ISOB East (Ohio)" + } + } + }, { + id: "aws-iso-e", + outputs: { + dnsSuffix: "cloud.adc-e.uk", + dualStackDnsSuffix: "cloud.adc-e.uk", + implicitGlobalRegion: "eu-isoe-west-1", + name: "aws-iso-e", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eu\\-isoe\\-\\w+\\-\\d+$", + regions: { + "aws-iso-e-global": { + description: "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + description: "EU ISOE West" + } + } + }, { + id: "aws-iso-f", + outputs: { + dnsSuffix: "csp.hci.ic.gov", + dualStackDnsSuffix: "csp.hci.ic.gov", + implicitGlobalRegion: "us-isof-south-1", + name: "aws-iso-f", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isof\\-\\w+\\-\\d+$", + regions: { + "aws-iso-f-global": { + description: "AWS ISOF global region" + }, + "us-isof-east-1": { + description: "US ISOF EAST" + }, + "us-isof-south-1": { + description: "US ISOF SOUTH" + } + } + }, { + id: "aws-eusc", + outputs: { + dnsSuffix: "amazonaws.eu", + dualStackDnsSuffix: "amazonaws.eu", + implicitGlobalRegion: "eusc-de-east-1", + name: "aws-eusc", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eusc\\-(de)\\-\\w+\\-\\d+$", + regions: { + "eusc-de-east-1": { + description: "EU (Germany)" + } + } + }], + version: "1.1" +}; + +// src/lib/aws/partition.ts +var selectedPartitionsInfo = partitions_default; +var selectedUserAgentPrefix = ""; +var partition = /* @__PURE__ */ __name((value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition2 of partitions) { + const { regions, outputs } = partition2; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData + }; + } + } + } + for (const partition2 of partitions) { + const { regionRegex, outputs } = partition2; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition2) => partition2.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error( + "Provided region was not found in the partition array or regex, and default partition with id 'aws' doesn't exist." + ); + } + return { + ...DEFAULT_PARTITION.outputs + }; +}, "partition"); +var setPartitionInfo = /* @__PURE__ */ __name((partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}, "setPartitionInfo"); +var useDefaultPartitionInfo = /* @__PURE__ */ __name(() => { + setPartitionInfo(partitions_default, ""); +}, "useDefaultPartitionInfo"); +var getUserAgentPrefix = /* @__PURE__ */ __name(() => selectedUserAgentPrefix, "getUserAgentPrefix"); + +// src/aws.ts +var awsEndpointFunctions = { + isVirtualHostableS3Bucket, + parseArn, + partition +}; +import_util_endpoints.customEndpointFunctions.aws = awsEndpointFunctions; + +// src/resolveEndpoint.ts + + +// src/types/EndpointError.ts + + +// src/types/EndpointRuleObject.ts + + +// src/types/ErrorRuleObject.ts + + +// src/types/RuleSetObject.ts + + +// src/types/TreeRuleObject.ts + + +// src/types/shared.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + awsEndpointFunctions, + partition, + setPartitionInfo, + useDefaultPartitionInfo, + getUserAgentPrefix, + isIpAddress, + resolveEndpoint, + EndpointError +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json new file mode 100644 index 0000000..a11705a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-cjs/lib/aws/partitions.json @@ -0,0 +1,258 @@ +{ + "partitions": [{ + "id": "aws", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-east-1", + "name": "aws", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + "regions": { + "af-south-1": { + "description": "Africa (Cape Town)" + }, + "ap-east-1": { + "description": "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + "description": "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + "description": "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + "description": "Asia Pacific (Osaka)" + }, + "ap-south-1": { + "description": "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + "description": "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + "description": "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + "description": "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + "description": "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + "description": "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + "description": "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + "description": "Asia Pacific (Thailand)" + }, + "aws-global": { + "description": "AWS Standard global region" + }, + "ca-central-1": { + "description": "Canada (Central)" + }, + "ca-west-1": { + "description": "Canada West (Calgary)" + }, + "eu-central-1": { + "description": "Europe (Frankfurt)" + }, + "eu-central-2": { + "description": "Europe (Zurich)" + }, + "eu-north-1": { + "description": "Europe (Stockholm)" + }, + "eu-south-1": { + "description": "Europe (Milan)" + }, + "eu-south-2": { + "description": "Europe (Spain)" + }, + "eu-west-1": { + "description": "Europe (Ireland)" + }, + "eu-west-2": { + "description": "Europe (London)" + }, + "eu-west-3": { + "description": "Europe (Paris)" + }, + "il-central-1": { + "description": "Israel (Tel Aviv)" + }, + "me-central-1": { + "description": "Middle East (UAE)" + }, + "me-south-1": { + "description": "Middle East (Bahrain)" + }, + "mx-central-1": { + "description": "Mexico (Central)" + }, + "sa-east-1": { + "description": "South America (Sao Paulo)" + }, + "us-east-1": { + "description": "US East (N. Virginia)" + }, + "us-east-2": { + "description": "US East (Ohio)" + }, + "us-west-1": { + "description": "US West (N. California)" + }, + "us-west-2": { + "description": "US West (Oregon)" + } + } + }, { + "id": "aws-cn", + "outputs": { + "dnsSuffix": "amazonaws.com.cn", + "dualStackDnsSuffix": "api.amazonwebservices.com.cn", + "implicitGlobalRegion": "cn-northwest-1", + "name": "aws-cn", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^cn\\-\\w+\\-\\d+$", + "regions": { + "aws-cn-global": { + "description": "AWS China global region" + }, + "cn-north-1": { + "description": "China (Beijing)" + }, + "cn-northwest-1": { + "description": "China (Ningxia)" + } + } + }, { + "id": "aws-us-gov", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-gov-west-1", + "name": "aws-us-gov", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^us\\-gov\\-\\w+\\-\\d+$", + "regions": { + "aws-us-gov-global": { + "description": "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + "description": "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + "description": "AWS GovCloud (US-West)" + } + } + }, { + "id": "aws-iso", + "outputs": { + "dnsSuffix": "c2s.ic.gov", + "dualStackDnsSuffix": "c2s.ic.gov", + "implicitGlobalRegion": "us-iso-east-1", + "name": "aws-iso", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-iso\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-global": { + "description": "AWS ISO (US) global region" + }, + "us-iso-east-1": { + "description": "US ISO East" + }, + "us-iso-west-1": { + "description": "US ISO WEST" + } + } + }, { + "id": "aws-iso-b", + "outputs": { + "dnsSuffix": "sc2s.sgov.gov", + "dualStackDnsSuffix": "sc2s.sgov.gov", + "implicitGlobalRegion": "us-isob-east-1", + "name": "aws-iso-b", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isob\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-b-global": { + "description": "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + "description": "US ISOB East (Ohio)" + } + } + }, { + "id": "aws-iso-e", + "outputs": { + "dnsSuffix": "cloud.adc-e.uk", + "dualStackDnsSuffix": "cloud.adc-e.uk", + "implicitGlobalRegion": "eu-isoe-west-1", + "name": "aws-iso-e", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-e-global": { + "description": "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + "description": "EU ISOE West" + } + } + }, { + "id": "aws-iso-f", + "outputs": { + "dnsSuffix": "csp.hci.ic.gov", + "dualStackDnsSuffix": "csp.hci.ic.gov", + "implicitGlobalRegion": "us-isof-south-1", + "name": "aws-iso-f", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isof\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-f-global": { + "description": "AWS ISOF global region" + }, + "us-isof-east-1": { + "description": "US ISOF EAST" + }, + "us-isof-south-1": { + "description": "US ISOF SOUTH" + } + } + }, { + "id": "aws-eusc", + "outputs": { + "dnsSuffix": "amazonaws.eu", + "dualStackDnsSuffix": "amazonaws.eu", + "implicitGlobalRegion": "eusc-de-east-1", + "name": "aws-eusc", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eusc\\-(de)\\-\\w+\\-\\d+$", + "regions": { + "eusc-de-east-1": { + "description": "EU (Germany)" + } + } + }], + "version": "1.1" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js new file mode 100644 index 0000000..49a408e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/aws.js @@ -0,0 +1,10 @@ +import { customEndpointFunctions } from "@smithy/util-endpoints"; +import { isVirtualHostableS3Bucket } from "./lib/aws/isVirtualHostableS3Bucket"; +import { parseArn } from "./lib/aws/parseArn"; +import { partition } from "./lib/aws/partition"; +export const awsEndpointFunctions = { + isVirtualHostableS3Bucket: isVirtualHostableS3Bucket, + parseArn: parseArn, + partition: partition, +}; +customEndpointFunctions.aws = awsEndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/index.js new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/index.js @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js new file mode 100644 index 0000000..f2bacc0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/isVirtualHostableS3Bucket.js @@ -0,0 +1,25 @@ +import { isValidHostLabel } from "@smithy/util-endpoints"; +import { isIpAddress } from "../isIpAddress"; +export const isVirtualHostableS3Bucket = (value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!isValidHostLabel(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if (isIpAddress(value)) { + return false; + } + return true; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js new file mode 100644 index 0000000..6b12887 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/parseArn.js @@ -0,0 +1,18 @@ +const ARN_DELIMITER = ":"; +const RESOURCE_DELIMITER = "/"; +export const parseArn = (value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) + return null; + const [arn, partition, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") + return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition, + service, + region, + accountId, + resourceId, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js new file mode 100644 index 0000000..8d39d81 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partition.js @@ -0,0 +1,41 @@ +import partitionsInfo from "./partitions.json"; +let selectedPartitionsInfo = partitionsInfo; +let selectedUserAgentPrefix = ""; +export const partition = (value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition of partitions) { + const { regions, outputs } = partition; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData, + }; + } + } + } + for (const partition of partitions) { + const { regionRegex, outputs } = partition; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs, + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition) => partition.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error("Provided region was not found in the partition array or regex," + + " and default partition with id 'aws' doesn't exist."); + } + return { + ...DEFAULT_PARTITION.outputs, + }; +}; +export const setPartitionInfo = (partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}; +export const useDefaultPartitionInfo = () => { + setPartitionInfo(partitionsInfo, ""); +}; +export const getUserAgentPrefix = () => selectedUserAgentPrefix; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json new file mode 100644 index 0000000..a11705a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/aws/partitions.json @@ -0,0 +1,258 @@ +{ + "partitions": [{ + "id": "aws", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-east-1", + "name": "aws", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + "regions": { + "af-south-1": { + "description": "Africa (Cape Town)" + }, + "ap-east-1": { + "description": "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + "description": "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + "description": "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + "description": "Asia Pacific (Osaka)" + }, + "ap-south-1": { + "description": "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + "description": "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + "description": "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + "description": "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + "description": "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + "description": "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + "description": "Asia Pacific (Malaysia)" + }, + "ap-southeast-7": { + "description": "Asia Pacific (Thailand)" + }, + "aws-global": { + "description": "AWS Standard global region" + }, + "ca-central-1": { + "description": "Canada (Central)" + }, + "ca-west-1": { + "description": "Canada West (Calgary)" + }, + "eu-central-1": { + "description": "Europe (Frankfurt)" + }, + "eu-central-2": { + "description": "Europe (Zurich)" + }, + "eu-north-1": { + "description": "Europe (Stockholm)" + }, + "eu-south-1": { + "description": "Europe (Milan)" + }, + "eu-south-2": { + "description": "Europe (Spain)" + }, + "eu-west-1": { + "description": "Europe (Ireland)" + }, + "eu-west-2": { + "description": "Europe (London)" + }, + "eu-west-3": { + "description": "Europe (Paris)" + }, + "il-central-1": { + "description": "Israel (Tel Aviv)" + }, + "me-central-1": { + "description": "Middle East (UAE)" + }, + "me-south-1": { + "description": "Middle East (Bahrain)" + }, + "mx-central-1": { + "description": "Mexico (Central)" + }, + "sa-east-1": { + "description": "South America (Sao Paulo)" + }, + "us-east-1": { + "description": "US East (N. Virginia)" + }, + "us-east-2": { + "description": "US East (Ohio)" + }, + "us-west-1": { + "description": "US West (N. California)" + }, + "us-west-2": { + "description": "US West (Oregon)" + } + } + }, { + "id": "aws-cn", + "outputs": { + "dnsSuffix": "amazonaws.com.cn", + "dualStackDnsSuffix": "api.amazonwebservices.com.cn", + "implicitGlobalRegion": "cn-northwest-1", + "name": "aws-cn", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^cn\\-\\w+\\-\\d+$", + "regions": { + "aws-cn-global": { + "description": "AWS China global region" + }, + "cn-north-1": { + "description": "China (Beijing)" + }, + "cn-northwest-1": { + "description": "China (Ningxia)" + } + } + }, { + "id": "aws-us-gov", + "outputs": { + "dnsSuffix": "amazonaws.com", + "dualStackDnsSuffix": "api.aws", + "implicitGlobalRegion": "us-gov-west-1", + "name": "aws-us-gov", + "supportsDualStack": true, + "supportsFIPS": true + }, + "regionRegex": "^us\\-gov\\-\\w+\\-\\d+$", + "regions": { + "aws-us-gov-global": { + "description": "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + "description": "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + "description": "AWS GovCloud (US-West)" + } + } + }, { + "id": "aws-iso", + "outputs": { + "dnsSuffix": "c2s.ic.gov", + "dualStackDnsSuffix": "c2s.ic.gov", + "implicitGlobalRegion": "us-iso-east-1", + "name": "aws-iso", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-iso\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-global": { + "description": "AWS ISO (US) global region" + }, + "us-iso-east-1": { + "description": "US ISO East" + }, + "us-iso-west-1": { + "description": "US ISO WEST" + } + } + }, { + "id": "aws-iso-b", + "outputs": { + "dnsSuffix": "sc2s.sgov.gov", + "dualStackDnsSuffix": "sc2s.sgov.gov", + "implicitGlobalRegion": "us-isob-east-1", + "name": "aws-iso-b", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isob\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-b-global": { + "description": "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + "description": "US ISOB East (Ohio)" + } + } + }, { + "id": "aws-iso-e", + "outputs": { + "dnsSuffix": "cloud.adc-e.uk", + "dualStackDnsSuffix": "cloud.adc-e.uk", + "implicitGlobalRegion": "eu-isoe-west-1", + "name": "aws-iso-e", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-e-global": { + "description": "AWS ISOE (Europe) global region" + }, + "eu-isoe-west-1": { + "description": "EU ISOE West" + } + } + }, { + "id": "aws-iso-f", + "outputs": { + "dnsSuffix": "csp.hci.ic.gov", + "dualStackDnsSuffix": "csp.hci.ic.gov", + "implicitGlobalRegion": "us-isof-south-1", + "name": "aws-iso-f", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^us\\-isof\\-\\w+\\-\\d+$", + "regions": { + "aws-iso-f-global": { + "description": "AWS ISOF global region" + }, + "us-isof-east-1": { + "description": "US ISOF EAST" + }, + "us-isof-south-1": { + "description": "US ISOF SOUTH" + } + } + }, { + "id": "aws-eusc", + "outputs": { + "dnsSuffix": "amazonaws.eu", + "dualStackDnsSuffix": "amazonaws.eu", + "implicitGlobalRegion": "eusc-de-east-1", + "name": "aws-eusc", + "supportsDualStack": false, + "supportsFIPS": true + }, + "regionRegex": "^eusc\\-(de)\\-\\w+\\-\\d+$", + "regions": { + "eusc-de-east-1": { + "description": "EU (Germany)" + } + } + }], + "version": "1.1" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/lib/isIpAddress.js @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/resolveEndpoint.js @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointError.js @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/index.js @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-es/types/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts new file mode 100644 index 0000000..13c64a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/aws.d.ts @@ -0,0 +1,2 @@ +import { EndpointFunctions } from "@smithy/util-endpoints"; +export declare const awsEndpointFunctions: EndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/index.d.ts @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/index.d.ts @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts new file mode 100644 index 0000000..25d46e4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/isVirtualHostableS3Bucket.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a string is a DNS compatible bucket name and can be used with + * virtual hosted style addressing. + */ +export declare const isVirtualHostableS3Bucket: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts new file mode 100644 index 0000000..fa5af83 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/parseArn.d.ts @@ -0,0 +1,7 @@ +import { EndpointARN } from "@smithy/types"; +/** + * Evaluates a single string argument value, and returns an object containing + * details about the parsed ARN. + * If the input was not a valid ARN, the function returns null. + */ +export declare const parseArn: (value: string) => EndpointARN | null; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts new file mode 100644 index 0000000..96d14e4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/aws/partition.d.ts @@ -0,0 +1,38 @@ +import { EndpointPartition } from "@smithy/types"; +export type PartitionsInfo = { + partitions: Array<{ + id: string; + outputs: { + dnsSuffix: string; + dualStackDnsSuffix: string; + name: string; + supportsDualStack: boolean; + supportsFIPS: boolean; + }; + regionRegex: string; + regions: Record; + }>; +}; +/** + * Evaluates a single string argument value as a region, and matches the + * string value to an AWS partition. + * The matcher MUST always return a successful object describing the partition + * that the region has been determined to be a part of. + */ +export declare const partition: (value: string) => EndpointPartition; +/** + * Set custom partitions.json data. + * @internal + */ +export declare const setPartitionInfo: (partitionsInfo: PartitionsInfo, userAgentPrefix?: string) => void; +/** + * Reset to the default partitions.json data. + * @internal + */ +export declare const useDefaultPartitionInfo: () => void; +/** + * @internal + */ +export declare const getUserAgentPrefix: () => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/lib/isIpAddress.d.ts @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/resolveEndpoint.d.ts @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts new file mode 100644 index 0000000..13c64a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/aws.d.ts @@ -0,0 +1,2 @@ +import { EndpointFunctions } from "@smithy/util-endpoints"; +export declare const awsEndpointFunctions: EndpointFunctions; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d046d90 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +export * from "./aws"; +export * from "./lib/aws/partition"; +export * from "./lib/isIpAddress"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts new file mode 100644 index 0000000..03be049 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/index.d.ts @@ -0,0 +1,3 @@ +export * from "./isVirtualHostableS3Bucket"; +export * from "./parseArn"; +export * from "./partition"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts new file mode 100644 index 0000000..5ef3296 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/isVirtualHostableS3Bucket.d.ts @@ -0,0 +1,4 @@ +export declare const isVirtualHostableS3Bucket: ( + value: string, + allowSubDomains?: boolean +) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts new file mode 100644 index 0000000..690d459 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/parseArn.d.ts @@ -0,0 +1,2 @@ +import { EndpointARN } from "@smithy/types"; +export declare const parseArn: (value: string) => EndpointARN | null; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts new file mode 100644 index 0000000..0683113 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/aws/partition.d.ts @@ -0,0 +1,28 @@ +import { EndpointPartition } from "@smithy/types"; +export type PartitionsInfo = { + partitions: Array<{ + id: string; + outputs: { + dnsSuffix: string; + dualStackDnsSuffix: string; + name: string; + supportsDualStack: boolean; + supportsFIPS: boolean; + }; + regionRegex: string; + regions: Record< + string, + | { + description?: string; + } + | undefined + >; + }>; +}; +export declare const partition: (value: string) => EndpointPartition; +export declare const setPartitionInfo: ( + partitionsInfo: PartitionsInfo, + userAgentPrefix?: string +) => void; +export declare const useDefaultPartitionInfo: () => void; +export declare const getUserAgentPrefix: () => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts new file mode 100644 index 0000000..59bfcd8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts @@ -0,0 +1 @@ +export { isIpAddress } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts new file mode 100644 index 0000000..e2453f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts @@ -0,0 +1 @@ +export { resolveEndpoint } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..b48af7f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts @@ -0,0 +1,6 @@ +export { + EndpointObjectProperties, + EndpointObjectHeaders, + EndpointObject, + EndpointRuleObject, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..e7b8881 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts @@ -0,0 +1 @@ +export { ErrorRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts new file mode 100644 index 0000000..2a489c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts @@ -0,0 +1,5 @@ +export { + DeprecatedObject, + ParameterObject, + RuleSetObject, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..716ddcf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts @@ -0,0 +1 @@ +export { RuleSetRules, TreeRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts new file mode 100644 index 0000000..cfd2248 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/ts3.4/types/shared.d.ts @@ -0,0 +1,12 @@ +export { + ReferenceObject, + FunctionObject, + FunctionArgv, + FunctionReturn, + ConditionObject, + Expression, + EndpointParams, + EndpointResolverOptions, + ReferenceRecord, + EvaluateOptions, +} from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts new file mode 100644 index 0000000..521e688 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointError.d.ts @@ -0,0 +1 @@ +export { EndpointError } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..ef666fe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/EndpointRuleObject.d.ts @@ -0,0 +1 @@ +export { EndpointObjectProperties, EndpointObjectHeaders, EndpointObject, EndpointRuleObject, } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..e7b8881 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/ErrorRuleObject.d.ts @@ -0,0 +1 @@ +export { ErrorRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts new file mode 100644 index 0000000..c052af0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/RuleSetObject.d.ts @@ -0,0 +1 @@ +export { DeprecatedObject, ParameterObject, RuleSetObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..716ddcf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/TreeRuleObject.d.ts @@ -0,0 +1 @@ +export { RuleSetRules, TreeRuleObject } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts new file mode 100644 index 0000000..daba501 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./EndpointError"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts new file mode 100644 index 0000000..af7cc53 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/dist-types/types/shared.d.ts @@ -0,0 +1 @@ +export { ReferenceObject, FunctionObject, FunctionArgv, FunctionReturn, ConditionObject, Expression, EndpointParams, EndpointResolverOptions, ReferenceRecord, EvaluateOptions, } from "@smithy/util-endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/package.json new file mode 100644 index 0000000..36d8d2a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-endpoints/package.json @@ -0,0 +1,58 @@ +{ + "name": "@aws-sdk/util-endpoints", + "version": "3.787.0", + "description": "Utilities to help with endpoint resolution", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-endpoints", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-endpoints", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-endpoints" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/README.md new file mode 100644 index 0000000..cac53d3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/README.md @@ -0,0 +1,4 @@ +# @aws-sdk/util-locate-window + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-locate-window/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-locate-window) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-locate-window.svg)](https://www.npmjs.com/package/@aws-sdk/util-locate-window) diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js new file mode 100644 index 0000000..95a6423 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-cjs/index.js @@ -0,0 +1,42 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + locateWindow: () => locateWindow +}); +module.exports = __toCommonJS(src_exports); +var fallbackWindow = {}; +function locateWindow() { + if (typeof window !== "undefined") { + return window; + } else if (typeof self !== "undefined") { + return self; + } + return fallbackWindow; +} +__name(locateWindow, "locateWindow"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + locateWindow +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-es/index.js new file mode 100644 index 0000000..a51e644 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-es/index.js @@ -0,0 +1,10 @@ +const fallbackWindow = {}; +export function locateWindow() { + if (typeof window !== "undefined") { + return window; + } + else if (typeof self !== "undefined") { + return self; + } + return fallbackWindow; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts new file mode 100644 index 0000000..2b02d7f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-types/index.d.ts @@ -0,0 +1,6 @@ +/** + * Locates the global scope for a browser or browser-like environment. If + * neither `window` nor `self` is defined by the environment, the same object + * will be returned on each invocation. + */ +export declare function locateWindow(): Window; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..a5bbba3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export declare function locateWindow(): Window; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/package.json new file mode 100644 index 0000000..2835b09 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-locate-window/package.json @@ -0,0 +1,53 @@ +{ + "name": "@aws-sdk/util-locate-window", + "version": "3.723.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-locate-window", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-locate-window", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-locate-window" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/README.md new file mode 100644 index 0000000..f2b6c62 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/util-user-agent-browser + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-user-agent-browser/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-user-agent-browser.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/configurations.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js new file mode 100644 index 0000000..aaf7621 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultUserAgent = exports.createDefaultUserAgentProvider = void 0; +const tslib_1 = require("tslib"); +const bowser_1 = tslib_1.__importDefault(require("bowser")); +const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser_1.default.parse(window.navigator.userAgent) + : undefined; + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${parsedUA?.os?.name || "other"}`, parsedUA?.os?.version], + ["lang/js"], + ["md/browser", `${parsedUA?.browser?.name ?? "unknown"}_${parsedUA?.browser?.version ?? "unknown"}`], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +exports.createDefaultUserAgentProvider = createDefaultUserAgentProvider; +exports.defaultUserAgent = exports.createDefaultUserAgentProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js new file mode 100644 index 0000000..4d06e36 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-cjs/index.native.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultUserAgent = exports.createDefaultUserAgentProvider = void 0; +const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + ["os/other"], + ["lang/js"], + ["md/rn"], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +exports.createDefaultUserAgentProvider = createDefaultUserAgentProvider; +exports.defaultUserAgent = exports.createDefaultUserAgentProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/configurations.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js new file mode 100644 index 0000000..1584d7e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.js @@ -0,0 +1,22 @@ +import bowser from "bowser"; +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser.parse(window.navigator.userAgent) + : undefined; + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${parsedUA?.os?.name || "other"}`, parsedUA?.os?.version], + ["lang/js"], + ["md/browser", `${parsedUA?.browser?.name ?? "unknown"}_${parsedUA?.browser?.version ?? "unknown"}`], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js new file mode 100644 index 0000000..04c7ae5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-es/index.native.js @@ -0,0 +1,18 @@ +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + ["os/other"], + ["lang/js"], + ["md/rn"], + ]; + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + const appId = await config?.userAgentAppId?.(); + if (appId) { + sections.push([`app/${appId}`]); + } + return sections; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts new file mode 100644 index 0000000..00537a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/configurations.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts new file mode 100644 index 0000000..fb107d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Default provider to the user agent in browsers. It's a best effort to infer + * the device information. It uses bowser library to detect the browser and version + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * @internal + * @deprecated use createDefaultUserAgentProvider + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts new file mode 100644 index 0000000..5b4926b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/index.native.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Default provider to the user agent in ReactNative. It's a best effort to infer + * the device information. It uses bowser library to detect the browser and virsion + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * @internal + * @deprecated use createDefaultUserAgentProvider + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..1428231 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,4 @@ +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..32e643a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts new file mode 100644 index 0000000..32e643a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/dist-types/ts3.4/index.native.d.ts @@ -0,0 +1,17 @@ +import { Provider, UserAgent } from "@smithy/types"; +import { DefaultUserAgentOptions } from "./configurations"; +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/package.json new file mode 100644 index 0000000..4065f6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-browser/package.json @@ -0,0 +1,54 @@ +{ + "name": "@aws-sdk/util-user-agent-browser", + "version": "3.775.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-user-agent-browser", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "browser": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "react-native": "dist-es/index.native.js", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-user-agent-browser", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-user-agent-browser" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/LICENSE b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/README.md b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/README.md new file mode 100644 index 0000000..fccfbb5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/README.md @@ -0,0 +1,10 @@ +# @aws-sdk/util-user-agent-node + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/util-user-agent-node/latest.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-node) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/util-user-agent-node.svg)](https://www.npmjs.com/package/@aws-sdk/util-user-agent-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js new file mode 100644 index 0000000..083dccb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-cjs/index.js @@ -0,0 +1,102 @@ +"use strict"; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var index_exports = {}; +__export(index_exports, { + NODE_APP_ID_CONFIG_OPTIONS: () => NODE_APP_ID_CONFIG_OPTIONS, + UA_APP_ID_ENV_NAME: () => UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME: () => UA_APP_ID_INI_NAME, + createDefaultUserAgentProvider: () => createDefaultUserAgentProvider, + crtAvailability: () => crtAvailability, + defaultUserAgent: () => defaultUserAgent +}); +module.exports = __toCommonJS(index_exports); + +// src/defaultUserAgent.ts +var import_os = require("os"); +var import_process = require("process"); + +// src/crt-availability.ts +var crtAvailability = { + isCrtAvailable: false +}; + +// src/is-crt-available.ts +var isCrtAvailable = /* @__PURE__ */ __name(() => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}, "isCrtAvailable"); + +// src/defaultUserAgent.ts +var createDefaultUserAgentProvider = /* @__PURE__ */ __name(({ serviceId, clientVersion }) => { + return async (config) => { + const sections = [ + // sdk-metadata + ["aws-sdk-js", clientVersion], + // ua-metadata + ["ua", "2.1"], + // os-metadata + [`os/${(0, import_os.platform)()}`, (0, import_os.release)()], + // language-metadata + // ECMAScript edition doesn't matter in JS, so no version needed. + ["lang/js"], + ["md/nodejs", `${import_process.versions.node}`] + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (import_process.env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${import_process.env.AWS_EXECUTION_ENV}`]); + } + const appId = await config?.userAgentAppId?.(); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}, "createDefaultUserAgentProvider"); +var defaultUserAgent = createDefaultUserAgentProvider; + +// src/nodeAppIdConfigOptions.ts +var import_middleware_user_agent = require("@aws-sdk/middleware-user-agent"); +var UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +var UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +var UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +var NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: /* @__PURE__ */ __name((env2) => env2[UA_APP_ID_ENV_NAME], "environmentVariableSelector"), + configFileSelector: /* @__PURE__ */ __name((profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], "configFileSelector"), + default: import_middleware_user_agent.DEFAULT_UA_APP_ID +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + crtAvailability, + createDefaultUserAgentProvider, + defaultUserAgent, + UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME, + NODE_APP_ID_CONFIG_OPTIONS +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js new file mode 100644 index 0000000..99ebeb9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/crt-availability.js @@ -0,0 +1,3 @@ +export const crtAvailability = { + isCrtAvailable: false, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js new file mode 100644 index 0000000..d92681d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/defaultUserAgent.js @@ -0,0 +1,29 @@ +import { platform, release } from "os"; +import { env, versions } from "process"; +import { isCrtAvailable } from "./is-crt-available"; +export { crtAvailability } from "./crt-availability"; +export const createDefaultUserAgentProvider = ({ serviceId, clientVersion }) => { + return async (config) => { + const sections = [ + ["aws-sdk-js", clientVersion], + ["ua", "2.1"], + [`os/${platform()}`, release()], + ["lang/js"], + ["md/nodejs", `${versions.node}`], + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${env.AWS_EXECUTION_ENV}`]); + } + const appId = await config?.userAgentAppId?.(); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}; +export const defaultUserAgent = createDefaultUserAgentProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js new file mode 100644 index 0000000..e9f8b0d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/is-crt-available.js @@ -0,0 +1,7 @@ +import { crtAvailability } from "./crt-availability"; +export const isCrtAvailable = () => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js new file mode 100644 index 0000000..f270db9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-es/nodeAppIdConfigOptions.js @@ -0,0 +1,9 @@ +import { DEFAULT_UA_APP_ID } from "@aws-sdk/middleware-user-agent"; +export const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +export const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +const UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +export const NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[UA_APP_ID_ENV_NAME], + configFileSelector: (profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], + default: DEFAULT_UA_APP_ID, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts new file mode 100644 index 0000000..c2033a0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/crt-availability.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + * + * If \@aws-sdk/signature-v4-crt is installed and loaded, it will register + * this value to true. + */ +export declare const crtAvailability: { + isCrtAvailable: boolean; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts new file mode 100644 index 0000000..28537a6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/defaultUserAgent.d.ts @@ -0,0 +1,23 @@ +import { Provider, UserAgent } from "@smithy/types"; +export { crtAvailability } from "./crt-availability"; +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +/** + * @internal + * + * Collect metrics from runtime to put into user agent. + */ +export declare const createDefaultUserAgentProvider: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; +/** + * + * @internal + * + * @deprecated use createDefaultUserAgentProvider + * + */ +export declare const defaultUserAgent: ({ serviceId, clientVersion }: DefaultUserAgentOptions) => (config?: PreviouslyResolved) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts new file mode 100644 index 0000000..675ffa8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/is-crt-available.d.ts @@ -0,0 +1,5 @@ +import { UserAgentPair } from "@smithy/types"; +/** + * @internal + */ +export declare const isCrtAvailable: () => UserAgentPair | null; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts new file mode 100644 index 0000000..92a8edc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/nodeAppIdConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +/** + * @internal + */ +export declare const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +/** + * @internal + */ +export declare const NODE_APP_ID_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts new file mode 100644 index 0000000..9dccfb0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/crt-availability.d.ts @@ -0,0 +1,3 @@ +export declare const crtAvailability: { + isCrtAvailable: boolean; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts new file mode 100644 index 0000000..6e4884f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/defaultUserAgent.d.ts @@ -0,0 +1,21 @@ +import { Provider, UserAgent } from "@smithy/types"; +export { crtAvailability } from "./crt-availability"; +export interface DefaultUserAgentOptions { + serviceId?: string; + clientVersion: string; +} +export interface PreviouslyResolved { + userAgentAppId: Provider; +} +export declare const createDefaultUserAgentProvider: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; +export declare const defaultUserAgent: ({ + serviceId, + clientVersion, +}: DefaultUserAgentOptions) => ( + config?: PreviouslyResolved +) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..cbf37f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./defaultUserAgent"; +export * from "./nodeAppIdConfigOptions"; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts new file mode 100644 index 0000000..d28355c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/is-crt-available.d.ts @@ -0,0 +1,2 @@ +import { UserAgentPair } from "@smithy/types"; +export declare const isCrtAvailable: () => UserAgentPair | null; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts new file mode 100644 index 0000000..b9fa123 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/dist-types/ts3.4/nodeAppIdConfigOptions.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +export declare const UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +export declare const NODE_APP_ID_CONFIG_OPTIONS: LoadedConfigSelectors< + string | undefined +>; diff --git a/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/package.json b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/package.json new file mode 100644 index 0000000..14742a5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@aws-sdk/util-user-agent-node/package.json @@ -0,0 +1,65 @@ +{ + "name": "@aws-sdk/util-user-agent-node", + "version": "3.799.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types'", + "build:cjs": "node ../../scripts/compilation/inline util-user-agent-node", + "build:es": "tsc -p tsconfig.es.json", + "build:include:deps": "lerna run --scope $npm_package_name --include-dependencies build", + "build:types": "tsc -p tsconfig.types.json", + "build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@tsconfig/recommended": "1.0.1", + "@types/node": "^18.19.69", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typescript": "~5.2.2" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/main/packages/util-user-agent-node", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "packages/util-user-agent-node" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/README.md new file mode 100644 index 0000000..175bc37 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/README.md @@ -0,0 +1,4 @@ +# @smithy/abort-controller + +[![NPM version](https://img.shields.io/npm/v/@smithy/abort-controller/latest.svg)](https://www.npmjs.com/package/@smithy/abort-controller) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/abort-controller.svg)](https://www.npmjs.com/package/@smithy/abort-controller) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/AbortController.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/AbortSignal.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/index.js new file mode 100644 index 0000000..e2f7caa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-cjs/index.js @@ -0,0 +1,84 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AbortController: () => AbortController, + AbortHandler: () => import_types.AbortHandler, + AbortSignal: () => AbortSignal, + IAbortController: () => import_types.AbortController, + IAbortSignal: () => import_types.AbortSignal +}); +module.exports = __toCommonJS(src_exports); + +// src/AbortController.ts + + +// src/AbortSignal.ts +var import_types = require("@smithy/types"); +var AbortSignal = class { + constructor() { + this.onabort = null; + this._aborted = false; + Object.defineProperty(this, "_aborted", { + value: false, + writable: true + }); + } + static { + __name(this, "AbortSignal"); + } + /** + * Whether the associated operation has already been cancelled. + */ + get aborted() { + return this._aborted; + } + /** + * @internal + */ + abort() { + this._aborted = true; + if (this.onabort) { + this.onabort(this); + this.onabort = null; + } + } +}; + +// src/AbortController.ts +var AbortController = class { + constructor() { + this.signal = new AbortSignal(); + } + static { + __name(this, "AbortController"); + } + abort() { + this.signal.abort(); + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AbortController, + AbortSignal +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/AbortController.js b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/AbortController.js new file mode 100644 index 0000000..696f137 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/AbortController.js @@ -0,0 +1,9 @@ +import { AbortSignal } from "./AbortSignal"; +export class AbortController { + constructor() { + this.signal = new AbortSignal(); + } + abort() { + this.signal.abort(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js new file mode 100644 index 0000000..9fc0813 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/AbortSignal.js @@ -0,0 +1,20 @@ +export class AbortSignal { + constructor() { + this.onabort = null; + this._aborted = false; + Object.defineProperty(this, "_aborted", { + value: false, + writable: true, + }); + } + get aborted() { + return this._aborted; + } + abort() { + this._aborted = true; + if (this.onabort) { + this.onabort(this); + this.onabort = null; + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/index.js new file mode 100644 index 0000000..a0f47f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts new file mode 100644 index 0000000..007f0f6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/AbortController.d.ts @@ -0,0 +1,16 @@ +import { AbortController as DeprecatedAbortController } from "@smithy/types"; +import { AbortSignal } from "./AbortSignal"; +/** + * @public + */ +export { DeprecatedAbortController as IAbortController }; +/** + * @deprecated This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @public + */ +export declare class AbortController implements DeprecatedAbortController { + readonly signal: AbortSignal; + abort(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts new file mode 100644 index 0000000..a97c3dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/AbortSignal.d.ts @@ -0,0 +1,21 @@ +import { AbortHandler, AbortSignal as DeprecatedAbortSignal } from "@smithy/types"; +/** + * @public + */ +export { AbortHandler, DeprecatedAbortSignal as IAbortSignal }; +/** + * @public + */ +export declare class AbortSignal implements DeprecatedAbortSignal { + onabort: AbortHandler | null; + private _aborted; + constructor(); + /** + * Whether the associated operation has already been cancelled. + */ + get aborted(): boolean; + /** + * @internal + */ + abort(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/index.d.ts new file mode 100644 index 0000000..8788e2f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/index.d.ts @@ -0,0 +1,9 @@ +/** + * This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @deprecated Use standard implementations in [Browsers](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) and [Node.js](https://nodejs.org/docs/latest/api/globals.html#class-abortcontroller) + * @packageDocumentation + */ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts new file mode 100644 index 0000000..89457d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortController.d.ts @@ -0,0 +1,16 @@ +import { AbortController as DeprecatedAbortController } from "@smithy/types"; +import { AbortSignal } from "./AbortSignal"; +/** + * @public + */ +export { DeprecatedAbortController as IAbortController }; +/** + * @deprecated This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @public + */ +export declare class AbortController implements DeprecatedAbortController { + readonly signal: AbortSignal; + abort(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts new file mode 100644 index 0000000..92130a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/AbortSignal.d.ts @@ -0,0 +1,21 @@ +import { AbortHandler, AbortSignal as DeprecatedAbortSignal } from "@smithy/types"; +/** + * @public + */ +export { AbortHandler, DeprecatedAbortSignal as IAbortSignal }; +/** + * @public + */ +export declare class AbortSignal implements DeprecatedAbortSignal { + onabort: AbortHandler | null; + private _aborted; + constructor(); + /* + * Whether the associated operation has already been cancelled. + */ + readonly aborted: boolean; + /** + * @internal + */ + abort(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..5a907b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +/** + * This implementation was added as Node.js didn't support AbortController prior to 15.x + * Use native implementation in browsers or Node.js \>=15.4.0. + * + * @deprecated Use standard implementations in [Browsers](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) and [Node.js](https://nodejs.org/docs/latest/api/globals.html#class-abortcontroller) + * @packageDocumentation + */ +export * from "./AbortController"; +export * from "./AbortSignal"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/package.json new file mode 100644 index 0000000..b7e5769 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/abort-controller/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/abort-controller", + "version": "4.0.2", + "description": "A simple abort controller library", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline abort-controller", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/abort-controller", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/abort-controller" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/README.md new file mode 100644 index 0000000..2a25da2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/README.md @@ -0,0 +1,10 @@ +# @smithy/config-resolver + +[![NPM version](https://img.shields.io/npm/v/@smithy/config-resolver/latest.svg)](https://www.npmjs.com/package/@smithy/config-resolver) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/config-resolver.svg)](https://www.npmjs.com/package/@smithy/config-resolver) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/NodeUseFipsEndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveCustomEndpointsConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/resolveEndpointsConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/endpointsConfig/utils/getEndpointFromRegion.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/index.js new file mode 100644 index 0000000..42f7a4c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/index.js @@ -0,0 +1,228 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_USE_DUALSTACK_ENDPOINT: () => CONFIG_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT: () => CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT: () => DEFAULT_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT: () => DEFAULT_USE_FIPS_ENDPOINT, + ENV_USE_DUALSTACK_ENDPOINT: () => ENV_USE_DUALSTACK_ENDPOINT, + ENV_USE_FIPS_ENDPOINT: () => ENV_USE_FIPS_ENDPOINT, + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getRegionInfo: () => getRegionInfo, + resolveCustomEndpointsConfig: () => resolveCustomEndpointsConfig, + resolveEndpointsConfig: () => resolveEndpointsConfig, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/endpointsConfig/NodeUseDualstackEndpointConfigOptions.ts +var import_util_config_provider = require("@smithy/util-config-provider"); +var ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +var CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +var DEFAULT_USE_DUALSTACK_ENDPOINT = false; +var NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/NodeUseFipsEndpointConfigOptions.ts + +var ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +var CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +var DEFAULT_USE_FIPS_ENDPOINT = false; +var NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/resolveCustomEndpointsConfig.ts +var import_util_middleware = require("@smithy/util-middleware"); +var resolveCustomEndpointsConfig = /* @__PURE__ */ __name((input) => { + const { tls, endpoint, urlParser, useDualstackEndpoint } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(useDualstackEndpoint ?? false) + }); +}, "resolveCustomEndpointsConfig"); + +// src/endpointsConfig/resolveEndpointsConfig.ts + + +// src/endpointsConfig/utils/getEndpointFromRegion.ts +var getEndpointFromRegion = /* @__PURE__ */ __name(async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}, "getEndpointFromRegion"); + +// src/endpointsConfig/resolveEndpointsConfig.ts +var resolveEndpointsConfig = /* @__PURE__ */ __name((input) => { + const useDualstackEndpoint = (0, import_util_middleware.normalizeProvider)(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser, tls } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: endpoint ? (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint + }); +}, "resolveEndpointsConfig"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + } +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + } + }); +}, "resolveRegionConfig"); + +// src/regionInfo/getHostnameFromVariants.ts +var getHostnameFromVariants = /* @__PURE__ */ __name((variants = [], { useFipsEndpoint, useDualstackEndpoint }) => variants.find( + ({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack") +)?.hostname, "getHostnameFromVariants"); + +// src/regionInfo/getResolvedHostname.ts +var getResolvedHostname = /* @__PURE__ */ __name((resolvedRegion, { regionHostname, partitionHostname }) => regionHostname ? regionHostname : partitionHostname ? partitionHostname.replace("{region}", resolvedRegion) : void 0, "getResolvedHostname"); + +// src/regionInfo/getResolvedPartition.ts +var getResolvedPartition = /* @__PURE__ */ __name((region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws", "getResolvedPartition"); + +// src/regionInfo/getResolvedSigningRegion.ts +var getResolvedSigningRegion = /* @__PURE__ */ __name((hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}, "getResolvedSigningRegion"); + +// src/regionInfo/getRegionInfo.ts +var getRegionInfo = /* @__PURE__ */ __name((region, { + useFipsEndpoint = false, + useDualstackEndpoint = false, + signingService, + regionHash, + partitionHash +}) => { + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : partitionHash[partition]?.endpoint ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants(regionHash[resolvedRegion]?.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants(partitionHash[partition]?.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === void 0) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: regionHash[resolvedRegion]?.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint + }); + return { + partition, + signingService, + hostname, + ...signingRegion && { signingRegion }, + ...regionHash[resolvedRegion]?.signingService && { + signingService: regionHash[resolvedRegion].signingService + } + }; +}, "getRegionInfo"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + ENV_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + ENV_USE_FIPS_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + resolveCustomEndpointsConfig, + resolveEndpointsConfig, + REGION_ENV_NAME, + REGION_INI_NAME, + NODE_REGION_CONFIG_OPTIONS, + NODE_REGION_CONFIG_FILE_OPTIONS, + resolveRegionConfig, + getRegionInfo +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/config.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/getRealRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionConfig/resolveRegionConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariant.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/EndpointVariantTag.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/PartitionHash.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/RegionHash.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getHostnameFromVariants.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getRegionInfo.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedHostname.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedPartition.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/getResolvedSigningRegion.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-cjs/regionInfo/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js new file mode 100644 index 0000000..d061567 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseDualstackEndpointConfigOptions.js @@ -0,0 +1,9 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +export const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +export const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +export const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, ENV_USE_DUALSTACK_ENDPOINT, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, CONFIG_USE_DUALSTACK_ENDPOINT, SelectorType.CONFIG), + default: false, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js new file mode 100644 index 0000000..8cac1e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/NodeUseFipsEndpointConfigOptions.js @@ -0,0 +1,9 @@ +import { booleanSelector, SelectorType } from "@smithy/util-config-provider"; +export const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +export const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +export const DEFAULT_USE_FIPS_ENDPOINT = false; +export const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => booleanSelector(env, ENV_USE_FIPS_ENDPOINT, SelectorType.ENV), + configFileSelector: (profile) => booleanSelector(profile, CONFIG_USE_FIPS_ENDPOINT, SelectorType.CONFIG), + default: false, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js new file mode 100644 index 0000000..1424c22 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/index.js @@ -0,0 +1,4 @@ +export * from "./NodeUseDualstackEndpointConfigOptions"; +export * from "./NodeUseFipsEndpointConfigOptions"; +export * from "./resolveCustomEndpointsConfig"; +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js new file mode 100644 index 0000000..7f9a953 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveCustomEndpointsConfig.js @@ -0,0 +1,10 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +export const resolveCustomEndpointsConfig = (input) => { + const { tls, endpoint, urlParser, useDualstackEndpoint } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: normalizeProvider(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: normalizeProvider(useDualstackEndpoint ?? false), + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js new file mode 100644 index 0000000..440657d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/resolveEndpointsConfig.js @@ -0,0 +1,14 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { getEndpointFromRegion } from "./utils/getEndpointFromRegion"; +export const resolveEndpointsConfig = (input) => { + const useDualstackEndpoint = normalizeProvider(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser, tls } = input; + return Object.assign(input, { + tls: tls ?? true, + endpoint: endpoint + ? normalizeProvider(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) + : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js new file mode 100644 index 0000000..5627c32 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/endpointsConfig/utils/getEndpointFromRegion.js @@ -0,0 +1,15 @@ +export const getEndpointFromRegion = async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = (await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint })) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/index.js new file mode 100644 index 0000000..61456a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./endpointsConfig"; +export * from "./regionConfig"; +export * from "./regionInfo"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js new file mode 100644 index 0000000..7db9896 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/config.js @@ -0,0 +1,12 @@ +export const REGION_ENV_NAME = "AWS_REGION"; +export const REGION_INI_NAME = "region"; +export const NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + }, +}; +export const NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials", +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js new file mode 100644 index 0000000..8d1246b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/getRealRegion.js @@ -0,0 +1,6 @@ +import { isFipsRegion } from "./isFipsRegion"; +export const getRealRegion = (region) => isFipsRegion(region) + ? ["fips-aws-global", "aws-fips"].includes(region) + ? "us-east-1" + : region.replace(/fips-(dkr-|prod-)?|-fips/, "") + : region; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js new file mode 100644 index 0000000..83675f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/index.js @@ -0,0 +1,2 @@ +export * from "./config"; +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js new file mode 100644 index 0000000..d758967 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/isFipsRegion.js @@ -0,0 +1 @@ +export const isFipsRegion = (region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js new file mode 100644 index 0000000..f88e00f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionConfig/resolveRegionConfig.js @@ -0,0 +1,24 @@ +import { getRealRegion } from "./getRealRegion"; +import { isFipsRegion } from "./isFipsRegion"; +export const resolveRegionConfig = (input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return Object.assign(input, { + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + }, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariant.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/EndpointVariantTag.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/PartitionHash.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/RegionHash.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js new file mode 100644 index 0000000..84fc50e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getHostnameFromVariants.js @@ -0,0 +1 @@ +export const getHostnameFromVariants = (variants = [], { useFipsEndpoint, useDualstackEndpoint }) => variants.find(({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack"))?.hostname; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js new file mode 100644 index 0000000..c39e2f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getRegionInfo.js @@ -0,0 +1,29 @@ +import { getHostnameFromVariants } from "./getHostnameFromVariants"; +import { getResolvedHostname } from "./getResolvedHostname"; +import { getResolvedPartition } from "./getResolvedPartition"; +import { getResolvedSigningRegion } from "./getResolvedSigningRegion"; +export const getRegionInfo = (region, { useFipsEndpoint = false, useDualstackEndpoint = false, signingService, regionHash, partitionHash, }) => { + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : partitionHash[partition]?.endpoint ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants(regionHash[resolvedRegion]?.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants(partitionHash[partition]?.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === undefined) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: regionHash[resolvedRegion]?.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint, + }); + return { + partition, + signingService, + hostname, + ...(signingRegion && { signingRegion }), + ...(regionHash[resolvedRegion]?.signingService && { + signingService: regionHash[resolvedRegion].signingService, + }), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js new file mode 100644 index 0000000..35fb988 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedHostname.js @@ -0,0 +1,5 @@ +export const getResolvedHostname = (resolvedRegion, { regionHostname, partitionHostname }) => regionHostname + ? regionHostname + : partitionHostname + ? partitionHostname.replace("{region}", resolvedRegion) + : undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js new file mode 100644 index 0000000..3d7bc55 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedPartition.js @@ -0,0 +1 @@ +export const getResolvedPartition = (region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js new file mode 100644 index 0000000..7977e00 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/getResolvedSigningRegion.js @@ -0,0 +1,12 @@ +export const getResolvedSigningRegion = (hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } + else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js new file mode 100644 index 0000000..e29686a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-es/regionInfo/index.js @@ -0,0 +1,3 @@ +export * from "./PartitionHash"; +export * from "./RegionHash"; +export * from "./getRegionInfo"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts new file mode 100644 index 0000000..172d8c1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts new file mode 100644 index 0000000..106bbdb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_FIPS_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts new file mode 100644 index 0000000..ea1cf59 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./NodeUseDualstackEndpointConfigOptions"; +/** + * @internal + */ +export * from "./NodeUseFipsEndpointConfigOptions"; +/** + * @internal + */ +export * from "./resolveCustomEndpointsConfig"; +/** + * @internal + */ +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts new file mode 100644 index 0000000..477afbc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveCustomEndpointsConfig.d.ts @@ -0,0 +1,32 @@ +import { Endpoint, Provider, UrlParser } from "@smithy/types"; +import { EndpointsInputConfig, EndpointsResolvedConfig } from "./resolveEndpointsConfig"; +/** + * @public + */ +export interface CustomEndpointsInputConfig extends EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. + */ + endpoint: string | Endpoint | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; +} +/** + * @internal + */ +export interface CustomEndpointsResolvedConfig extends EndpointsResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint: true; +} +/** + * @internal + */ +export declare const resolveCustomEndpointsConfig: (input: T & CustomEndpointsInputConfig & PreviouslyResolved) => T & CustomEndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts new file mode 100644 index 0000000..4cd1d8f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/resolveEndpointsConfig.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +/** + * @public + */ +export interface EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only required when using + * a custom endpoint (for example, when using a local version of S3). + */ + endpoint?: string | Endpoint | Provider; + /** + * Whether TLS is enabled for requests. + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + region: Provider; + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export interface EndpointsResolvedConfig extends Required { + /** + * Resolved value for input {@link EndpointsInputConfig.endpoint} + */ + endpoint: Provider; + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; +} +/** + * @internal + * + * @deprecated endpoints rulesets use \@smithy/middleware-endpoint resolveEndpointConfig. + * All generated clients should migrate to Endpoints 2.0 endpointRuleSet traits. + */ +export declare const resolveEndpointsConfig: (input: T & EndpointsInputConfig & PreviouslyResolved) => T & EndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts new file mode 100644 index 0000000..5ded732 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/endpointsConfig/utils/getEndpointFromRegion.d.ts @@ -0,0 +1,11 @@ +import { Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +interface GetEndpointFromRegionOptions { + region: Provider; + tls?: boolean; + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + useDualstackEndpoint: Provider; + useFipsEndpoint: Provider; +} +export declare const getEndpointFromRegion: (input: GetEndpointFromRegionOptions) => Promise; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/index.d.ts new file mode 100644 index 0000000..fde7086 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./endpointsConfig"; +/** + * @internal + */ +export * from "./regionConfig"; +/** + * @internal + */ +export * from "./regionInfo"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts new file mode 100644 index 0000000..d203bb0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..c70fb5b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts new file mode 100644 index 0000000..6dcf5e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..b42cee7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..c06c9d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,34 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts new file mode 100644 index 0000000..9b68e93 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariant.d.ts @@ -0,0 +1,10 @@ +import { EndpointVariantTag } from "./EndpointVariantTag"; +/** + * @internal + * + * Provides hostname information for specific host label. + */ +export type EndpointVariant = { + hostname: string; + tags: EndpointVariantTag[]; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts new file mode 100644 index 0000000..ca50e1f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/EndpointVariantTag.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The tag which mentions which area variant is providing information for. + * Can be either "fips" or "dualstack". + */ +export type EndpointVariantTag = "fips" | "dualstack"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts new file mode 100644 index 0000000..0a5be17 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/PartitionHash.d.ts @@ -0,0 +1,14 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of partition with the information specific to that partition. + * The information includes the list of regions belonging to that partition, + * and the hostname to be used for the partition. + */ +export type PartitionHash = Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts new file mode 100644 index 0000000..01cd843 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/RegionHash.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of region with the information specific to that region. + * The information can include hostname, signingService and signingRegion. + */ +export type RegionHash = Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts new file mode 100644 index 0000000..47bcf70 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getHostnameFromVariants.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + */ +export interface GetHostnameFromVariantsOptions { + useFipsEndpoint: boolean; + useDualstackEndpoint: boolean; +} +/** + * @internal + */ +export declare const getHostnameFromVariants: (variants: EndpointVariant[] | undefined, { useFipsEndpoint, useDualstackEndpoint }: GetHostnameFromVariantsOptions) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts new file mode 100644 index 0000000..0aaae08 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getRegionInfo.d.ts @@ -0,0 +1,17 @@ +import { RegionInfo } from "@smithy/types"; +import { PartitionHash } from "./PartitionHash"; +import { RegionHash } from "./RegionHash"; +/** + * @internal + */ +export interface GetRegionInfoOptions { + useFipsEndpoint?: boolean; + useDualstackEndpoint?: boolean; + signingService: string; + regionHash: RegionHash; + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getRegionInfo: (region: string, { useFipsEndpoint, useDualstackEndpoint, signingService, regionHash, partitionHash, }: GetRegionInfoOptions) => RegionInfo; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts new file mode 100644 index 0000000..bf7a2b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedHostname.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface GetResolvedHostnameOptions { + regionHostname?: string; + partitionHostname?: string; +} +/** + * @internal + */ +export declare const getResolvedHostname: (resolvedRegion: string, { regionHostname, partitionHostname }: GetResolvedHostnameOptions) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts new file mode 100644 index 0000000..587b4fc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedPartition.d.ts @@ -0,0 +1,11 @@ +import { PartitionHash } from "./PartitionHash"; +/** + * @internal + */ +export interface GetResolvedPartitionOptions { + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getResolvedPartition: (region: string, { partitionHash }: GetResolvedPartitionOptions) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts new file mode 100644 index 0000000..3f5f7af --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/getResolvedSigningRegion.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export interface GetResolvedSigningRegionOptions { + regionRegex: string; + signingRegion?: string; + useFipsEndpoint: boolean; +} +/** + * @internal + */ +export declare const getResolvedSigningRegion: (hostname: string, { signingRegion, regionRegex, useFipsEndpoint }: GetResolvedSigningRegionOptions) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts new file mode 100644 index 0000000..64ef0d5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/regionInfo/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./PartitionHash"; +/** + * @internal + */ +export * from "./RegionHash"; +/** + * @internal + */ +export * from "./getRegionInfo"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts new file mode 100644 index 0000000..169720a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseDualstackEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_DUALSTACK_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts new file mode 100644 index 0000000..b17417e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/NodeUseFipsEndpointConfigOptions.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +/** + * @internal + */ +export declare const DEFAULT_USE_FIPS_ENDPOINT = false; +/** + * @internal + */ +export declare const NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts new file mode 100644 index 0000000..cbabe5b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/index.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export * from "./NodeUseDualstackEndpointConfigOptions"; +/** + * @internal + */ +export * from "./NodeUseFipsEndpointConfigOptions"; +/** + * @internal + */ +export * from "./resolveCustomEndpointsConfig"; +/** + * @internal + */ +export * from "./resolveEndpointsConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts new file mode 100644 index 0000000..f49306e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveCustomEndpointsConfig.d.ts @@ -0,0 +1,32 @@ +import { Endpoint, Provider, UrlParser } from "@smithy/types"; +import { EndpointsInputConfig, EndpointsResolvedConfig } from "./resolveEndpointsConfig"; +/** + * @public + */ +export interface CustomEndpointsInputConfig extends EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. + */ + endpoint: string | Endpoint | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; +} +/** + * @internal + */ +export interface CustomEndpointsResolvedConfig extends EndpointsResolvedConfig { + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint: true; +} +/** + * @internal + */ +export declare const resolveCustomEndpointsConfig: (input: T & CustomEndpointsInputConfig & PreviouslyResolved) => T & CustomEndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts new file mode 100644 index 0000000..388819d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/resolveEndpointsConfig.d.ts @@ -0,0 +1,54 @@ +import { Endpoint, Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +/** + * @public + */ +export interface EndpointsInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only required when using + * a custom endpoint (for example, when using a local version of S3). + */ + endpoint?: string | Endpoint | Provider; + /** + * Whether TLS is enabled for requests. + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; +} +/** + * @internal + */ +interface PreviouslyResolved { + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + region: Provider; + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export interface EndpointsResolvedConfig extends Required { + /** + * Resolved value for input {@link EndpointsInputConfig.endpoint} + */ + endpoint: Provider; + /** + * Whether the endpoint is specified by caller. + * @internal + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; +} +/** + * @internal + * + * @deprecated endpoints rulesets use \@smithy/middleware-endpoint resolveEndpointConfig. + * All generated clients should migrate to Endpoints 2.0 endpointRuleSet traits. + */ +export declare const resolveEndpointsConfig: (input: T & EndpointsInputConfig & PreviouslyResolved) => T & EndpointsResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts new file mode 100644 index 0000000..83d4635 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/endpointsConfig/utils/getEndpointFromRegion.d.ts @@ -0,0 +1,11 @@ +import { Provider, RegionInfoProvider, UrlParser } from "@smithy/types"; +interface GetEndpointFromRegionOptions { + region: Provider; + tls?: boolean; + regionInfoProvider: RegionInfoProvider; + urlParser: UrlParser; + useDualstackEndpoint: Provider; + useFipsEndpoint: Provider; +} +export declare const getEndpointFromRegion: (input: GetEndpointFromRegionOptions) => Promise; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e205411 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./endpointsConfig"; +/** + * @internal + */ +export * from "./regionConfig"; +/** + * @internal + */ +export * from "./regionInfo"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts new file mode 100644 index 0000000..8f3a9b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/config.d.ts @@ -0,0 +1,17 @@ +import { LoadedConfigSelectors, LocalConfigOptions } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const REGION_ENV_NAME = "AWS_REGION"; +/** + * @internal + */ +export declare const REGION_INI_NAME = "region"; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @internal + */ +export declare const NODE_REGION_CONFIG_FILE_OPTIONS: LocalConfigOptions; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts new file mode 100644 index 0000000..6c11d4d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/getRealRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getRealRegion: (region: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts new file mode 100644 index 0000000..0e6f55d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./config"; +/** + * @internal + */ +export * from "./resolveRegionConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts new file mode 100644 index 0000000..1ee8bd4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/isFipsRegion.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isFipsRegion: (region: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts new file mode 100644 index 0000000..7aaf9e1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionConfig/resolveRegionConfig.d.ts @@ -0,0 +1,34 @@ +import { Provider } from "@smithy/types"; +/** + * @public + */ +export interface RegionInputConfig { + /** + * The AWS region to which this client will send requests + */ + region?: string | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; +} +interface PreviouslyResolved { +} +/** + * @internal + */ +export interface RegionResolvedConfig { + /** + * Resolved value for input config {@link RegionInputConfig.region} + */ + region: Provider; + /** + * Resolved value for input {@link RegionInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; +} +/** + * @internal + */ +export declare const resolveRegionConfig: (input: T & RegionInputConfig & PreviouslyResolved) => T & RegionResolvedConfig; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts new file mode 100644 index 0000000..e533cc7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariant.d.ts @@ -0,0 +1,10 @@ +import { EndpointVariantTag } from "./EndpointVariantTag"; +/** + * @internal + * + * Provides hostname information for specific host label. + */ +export type EndpointVariant = { + hostname: string; + tags: EndpointVariantTag[]; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts new file mode 100644 index 0000000..755bbe5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/EndpointVariantTag.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The tag which mentions which area variant is providing information for. + * Can be either "fips" or "dualstack". + */ +export type EndpointVariantTag = "fips" | "dualstack"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts new file mode 100644 index 0000000..6fed65e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/PartitionHash.d.ts @@ -0,0 +1,14 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of partition with the information specific to that partition. + * The information includes the list of regions belonging to that partition, + * and the hostname to be used for the partition. + */ +export type PartitionHash = Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts new file mode 100644 index 0000000..cd90c70 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/RegionHash.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + * + * The hash of region with the information specific to that region. + * The information can include hostname, signingService and signingRegion. + */ +export type RegionHash = Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts new file mode 100644 index 0000000..3d61daa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getHostnameFromVariants.d.ts @@ -0,0 +1,12 @@ +import { EndpointVariant } from "./EndpointVariant"; +/** + * @internal + */ +export interface GetHostnameFromVariantsOptions { + useFipsEndpoint: boolean; + useDualstackEndpoint: boolean; +} +/** + * @internal + */ +export declare const getHostnameFromVariants: (variants: EndpointVariant[] | undefined, { useFipsEndpoint, useDualstackEndpoint }: GetHostnameFromVariantsOptions) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts new file mode 100644 index 0000000..820a548 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getRegionInfo.d.ts @@ -0,0 +1,17 @@ +import { RegionInfo } from "@smithy/types"; +import { PartitionHash } from "./PartitionHash"; +import { RegionHash } from "./RegionHash"; +/** + * @internal + */ +export interface GetRegionInfoOptions { + useFipsEndpoint?: boolean; + useDualstackEndpoint?: boolean; + signingService: string; + regionHash: RegionHash; + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getRegionInfo: (region: string, { useFipsEndpoint, useDualstackEndpoint, signingService, regionHash, partitionHash, }: GetRegionInfoOptions) => RegionInfo; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts new file mode 100644 index 0000000..6aae405 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedHostname.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + */ +export interface GetResolvedHostnameOptions { + regionHostname?: string; + partitionHostname?: string; +} +/** + * @internal + */ +export declare const getResolvedHostname: (resolvedRegion: string, { regionHostname, partitionHostname }: GetResolvedHostnameOptions) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts new file mode 100644 index 0000000..355c318 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedPartition.d.ts @@ -0,0 +1,11 @@ +import { PartitionHash } from "./PartitionHash"; +/** + * @internal + */ +export interface GetResolvedPartitionOptions { + partitionHash: PartitionHash; +} +/** + * @internal + */ +export declare const getResolvedPartition: (region: string, { partitionHash }: GetResolvedPartitionOptions) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts new file mode 100644 index 0000000..a7b1db6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/getResolvedSigningRegion.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export interface GetResolvedSigningRegionOptions { + regionRegex: string; + signingRegion?: string; + useFipsEndpoint: boolean; +} +/** + * @internal + */ +export declare const getResolvedSigningRegion: (hostname: string, { signingRegion, regionRegex, useFipsEndpoint }: GetResolvedSigningRegionOptions) => string | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts new file mode 100644 index 0000000..5826308 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/dist-types/ts3.4/regionInfo/index.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + */ +export * from "./PartitionHash"; +/** + * @internal + */ +export * from "./RegionHash"; +/** + * @internal + */ +export * from "./getRegionInfo"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/package.json new file mode 100644 index 0000000..2c4927f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/config-resolver/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/config-resolver", + "version": "4.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline config-resolver", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/config-resolver", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/config-resolver" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/core/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/core/README.md new file mode 100644 index 0000000..51f8922 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/README.md @@ -0,0 +1,45 @@ +# @smithy/core + +[![NPM version](https://img.shields.io/npm/v/@smithy/core/latest.svg)](https://www.npmjs.com/package/@smithy/core) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/core.svg)](https://www.npmjs.com/package/@smithy/core) + +> An internal package. You probably shouldn't use this package, at least directly. + +This package provides common or core functionality for generic Smithy clients. + +You do not need to explicitly install this package, since it will be installed during code generation if used. + +## Development of `@smithy/core` submodules + +Core submodules are organized for distribution via the `package.json` `exports` field. + +`exports` is supported by default by the latest Node.js, webpack, and esbuild. For react-native, it can be +enabled via instructions found at [reactnative.dev/blog](https://reactnative.dev/blog/2023/06/21/package-exports-support), but we also provide a compatibility redirect. + +Think of `@smithy/core` as a mono-package within the monorepo. +It preserves the benefits of modularization, for example to optimize Node.js initialization speed, +while making it easier to have a consistent version of core dependencies, reducing package sprawl when +installing a Smithy runtime client. + +### Guide for submodules + +- Each `index.ts` file corresponding to the pattern `./src/submodules//index.ts` will be + published as a separate `dist-cjs` bundled submodule index using the `Inliner.js` build script. +- create a folder as `./src/submodules/` including an `index.ts` file and a `README.md` file. + - The linter will throw an error on missing submodule metadata in `package.json` and the various `tsconfig.json` files, but it will automatically fix them if possible. +- a submodule is equivalent to a standalone `@smithy/` package in that importing it in Node.js will resolve a separate bundle. +- submodules may not relatively import files from other submodules. Instead, directly use the `@scope/pkg/submodule` name as the import. + - The linter will check for this and throw an error. +- To the extent possible, correctly declaring submodule metadata is validated by the linter in `@smithy/core`. + The linter runs during `yarn build` and also as `yarn lint`. + +### When should I create an `@smithy/core/submodule` vs. `@smithy/new-package`? + +Keep in mind that the core package is installed by all downstream clients. + +If the component functionality is upstream of multiple clients, it is +a good candidate for a core submodule. For example, if `middleware-retry` had been written +after the support for submodules was added, it would have been a submodule. + +If the component's functionality is downstream of a client (rare), or only expected to be used by a very small +subset of clients, it could be written as a standalone package. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/cbor.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/cbor.d.ts new file mode 100644 index 0000000..c44b707 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/cbor.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/cbor" { + export * from "@smithy/core/dist-types/submodules/cbor/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/cbor.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/cbor.js new file mode 100644 index 0000000..710fb79 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/cbor.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/cbor/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/getSmithyContext.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/getSmithyContext.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/getSmithyContext.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/index.js new file mode 100644 index 0000000..a3735f6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/index.js @@ -0,0 +1,454 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DefaultIdentityProviderConfig: () => DefaultIdentityProviderConfig, + EXPIRATION_MS: () => EXPIRATION_MS, + HttpApiKeyAuthSigner: () => HttpApiKeyAuthSigner, + HttpBearerAuthSigner: () => HttpBearerAuthSigner, + NoAuthSigner: () => NoAuthSigner, + createIsIdentityExpiredFunction: () => createIsIdentityExpiredFunction, + createPaginator: () => createPaginator, + doesIdentityRequireRefresh: () => doesIdentityRequireRefresh, + getHttpAuthSchemeEndpointRuleSetPlugin: () => getHttpAuthSchemeEndpointRuleSetPlugin, + getHttpAuthSchemePlugin: () => getHttpAuthSchemePlugin, + getHttpSigningPlugin: () => getHttpSigningPlugin, + getSmithyContext: () => getSmithyContext, + httpAuthSchemeEndpointRuleSetMiddlewareOptions: () => httpAuthSchemeEndpointRuleSetMiddlewareOptions, + httpAuthSchemeMiddleware: () => httpAuthSchemeMiddleware, + httpAuthSchemeMiddlewareOptions: () => httpAuthSchemeMiddlewareOptions, + httpSigningMiddleware: () => httpSigningMiddleware, + httpSigningMiddlewareOptions: () => httpSigningMiddlewareOptions, + isIdentityExpired: () => isIdentityExpired, + memoizeIdentityProvider: () => memoizeIdentityProvider, + normalizeProvider: () => normalizeProvider, + requestBuilder: () => import_protocols.requestBuilder, + setFeature: () => setFeature +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = require("@smithy/types"); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +var import_util_middleware = require("@smithy/util-middleware"); + +// src/middleware-http-auth-scheme/resolveAuthOptions.ts +var resolveAuthOptions = /* @__PURE__ */ __name((candidateAuthOptions, authSchemePreference) => { + if (!authSchemePreference || authSchemePreference.length === 0) { + return candidateAuthOptions; + } + const preferredAuthOptions = []; + for (const preferredSchemeName of authSchemePreference) { + for (const candidateAuthOption of candidateAuthOptions) { + const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1]; + if (candidateAuthSchemeName === preferredSchemeName) { + preferredAuthOptions.push(candidateAuthOption); + } + } + } + for (const candidateAuthOption of candidateAuthOptions) { + if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) { + preferredAuthOptions.push(candidateAuthOption); + } + } + return preferredAuthOptions; +}, "resolveAuthOptions"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = /* @__PURE__ */ new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +__name(convertHttpAuthSchemesToMap, "convertHttpAuthSchemesToMap"); +var httpAuthSchemeMiddleware = /* @__PURE__ */ __name((config, mwOptions) => (next, context) => async (args) => { + const options = config.httpAuthSchemeProvider( + await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input) + ); + const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : []; + const resolvedOptions = resolveAuthOptions(options, authSchemePreference); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const failureReasons = []; + for (const option of resolvedOptions) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}, "httpAuthSchemeMiddleware"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.ts +var httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware" +}; +var getHttpAuthSchemeEndpointRuleSetPlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeEndpointRuleSetMiddlewareOptions + ); + } +}), "getHttpAuthSchemeEndpointRuleSetPlugin"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemePlugin.ts +var import_middleware_serde = require("@smithy/middleware-serde"); +var httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getHttpAuthSchemePlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeMiddlewareOptions + ); + } +}), "getHttpAuthSchemePlugin"); + +// src/middleware-http-signing/httpSigningMiddleware.ts +var import_protocol_http = require("@smithy/protocol-http"); + +var defaultErrorHandler = /* @__PURE__ */ __name((signingProperties) => (error) => { + throw error; +}, "defaultErrorHandler"); +var defaultSuccessHandler = /* @__PURE__ */ __name((httpResponse, signingProperties) => { +}, "defaultSuccessHandler"); +var httpSigningMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { + httpAuthOption: { signingProperties = {} }, + identity, + signer + } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties) + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}, "httpSigningMiddleware"); + +// src/middleware-http-signing/getHttpSigningMiddleware.ts +var httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware" +}; +var getHttpSigningPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + } +}), "getHttpSigningPlugin"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); + +// src/pagination/createPaginator.ts +var makePagedClientRequest = /* @__PURE__ */ __name(async (CommandCtor, client, input, withCommand = (_) => _, ...args) => { + let command = new CommandCtor(input); + command = withCommand(command) ?? command; + return await client.send(command, ...args); +}, "makePagedClientRequest"); +function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return /* @__PURE__ */ __name(async function* paginateOperation(config, input, ...additionalArguments) { + const _input = input; + let token = config.startingToken ?? _input[inputTokenName]; + let hasNext = true; + let page; + while (hasNext) { + _input[inputTokenName] = token; + if (pageSizeTokenName) { + _input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest( + CommandCtor, + config.client, + input, + config.withCommand, + ...additionalArguments + ); + } else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return void 0; + }, "paginateOperation"); +} +__name(createPaginator, "createPaginator"); +var get = /* @__PURE__ */ __name((fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return void 0; + } + cursor = cursor[step]; + } + return cursor; +}, "get"); + +// src/protocols/requestBuilder.ts +var import_protocols = require("@smithy/core/protocols"); + +// src/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {} + }; + } else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} +__name(setFeature, "setFeature"); + +// src/util-identity-and-auth/DefaultIdentityProviderConfig.ts +var DefaultIdentityProviderConfig = class { + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config) { + this.authSchemes = /* @__PURE__ */ new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== void 0) { + this.authSchemes.set(key, value); + } + } + } + static { + __name(this, "DefaultIdentityProviderConfig"); + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.ts + + +var HttpApiKeyAuthSigner = class { + static { + __name(this, "HttpApiKeyAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error( + "request could not be signed with `apiKey` since the `name` and `in` signer properties are missing" + ); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (signingProperties.in === import_types.HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } else if (signingProperties.in === import_types.HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme ? `${signingProperties.scheme} ${identity.apiKey}` : identity.apiKey; + } else { + throw new Error( + "request can only be signed with `apiKey` locations `query` or `header`, but found: `" + signingProperties.in + "`" + ); + } + return clonedRequest; + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.ts + +var HttpBearerAuthSigner = class { + static { + __name(this, "HttpBearerAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +}; + +// src/util-identity-and-auth/httpAuthSchemes/noAuth.ts +var NoAuthSigner = class { + static { + __name(this, "NoAuthSigner"); + } + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +}; + +// src/util-identity-and-auth/memoizeIdentityProvider.ts +var createIsIdentityExpiredFunction = /* @__PURE__ */ __name((expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs, "createIsIdentityExpiredFunction"); +var EXPIRATION_MS = 3e5; +var isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +var doesIdentityRequireRefresh = /* @__PURE__ */ __name((identity) => identity.expiration !== void 0, "doesIdentityRequireRefresh"); +var memoizeIdentityProvider = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + if (provider === void 0) { + return void 0; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}, "memoizeIdentityProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + createPaginator, + getSmithyContext, + httpAuthSchemeMiddleware, + httpAuthSchemeEndpointRuleSetMiddlewareOptions, + getHttpAuthSchemeEndpointRuleSetPlugin, + httpAuthSchemeMiddlewareOptions, + getHttpAuthSchemePlugin, + httpSigningMiddleware, + httpSigningMiddlewareOptions, + getHttpSigningPlugin, + normalizeProvider, + requestBuilder, + setFeature, + DefaultIdentityProviderConfig, + HttpApiKeyAuthSigner, + HttpBearerAuthSigner, + NoAuthSigner, + createIsIdentityExpiredFunction, + EXPIRATION_MS, + isIdentityExpired, + doesIdentityRequireRefresh, + memoizeIdentityProvider +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-auth-scheme/resolveAuthOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/getHttpSigningMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/httpSigningMiddleware.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/middleware-http-signing/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/normalizeProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/normalizeProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/normalizeProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/pagination/createPaginator.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/protocols/requestBuilder.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/setFeature.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/setFeature.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/setFeature.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js new file mode 100644 index 0000000..0f69723 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/cbor/index.js @@ -0,0 +1,733 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/cbor/index.ts +var cbor_exports = {}; +__export(cbor_exports, { + buildHttpRpcRequest: () => buildHttpRpcRequest, + cbor: () => cbor, + checkCborResponse: () => checkCborResponse, + dateToTag: () => dateToTag, + loadSmithyRpcV2CborErrorCode: () => loadSmithyRpcV2CborErrorCode, + parseCborBody: () => parseCborBody, + parseCborErrorBody: () => parseCborErrorBody, + tag: () => tag, + tagSymbol: () => tagSymbol +}); +module.exports = __toCommonJS(cbor_exports); + +// src/submodules/cbor/cbor-decode.ts +var import_util_utf8 = require("@smithy/util-utf8"); + +// src/submodules/cbor/cbor-types.ts +var majorUint64 = 0; +var majorNegativeInt64 = 1; +var majorUnstructuredByteString = 2; +var majorUtf8String = 3; +var majorList = 4; +var majorMap = 5; +var majorTag = 6; +var majorSpecial = 7; +var specialFalse = 20; +var specialTrue = 21; +var specialNull = 22; +var specialUndefined = 23; +var extendedOneByte = 24; +var extendedFloat16 = 25; +var extendedFloat32 = 26; +var extendedFloat64 = 27; +var minorIndefinite = 31; +function alloc(size) { + return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size); +} +var tagSymbol = Symbol("@smithy/core/cbor::tagSymbol"); +function tag(data2) { + data2[tagSymbol] = true; + return data2; +} + +// src/submodules/cbor/cbor-decode.ts +var USE_TEXT_DECODER = typeof TextDecoder !== "undefined"; +var USE_BUFFER = typeof Buffer !== "undefined"; +var payload = alloc(0); +var dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +var textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null; +var _offset = 0; +function setPayload(bytes) { + payload = bytes; + dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +} +function decode(at, to) { + if (at >= to) { + throw new Error("unexpected end of (decode) payload."); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + switch (major) { + case majorUint64: + case majorNegativeInt64: + case majorTag: + let unsignedInt; + let offset; + if (minor < 24) { + unsignedInt = minor; + offset = 1; + } else { + switch (minor) { + case extendedOneByte: + case extendedFloat16: + case extendedFloat32: + case extendedFloat64: + const countLength = minorValueToArgumentLength[minor]; + const countOffset = countLength + 1; + offset = countOffset; + if (to - at < countOffset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + unsignedInt = payload[countIndex]; + } else if (countLength === 2) { + unsignedInt = dataView.getUint16(countIndex); + } else if (countLength === 4) { + unsignedInt = dataView.getUint32(countIndex); + } else { + unsignedInt = dataView.getBigUint64(countIndex); + } + break; + default: + throw new Error(`unexpected minor value ${minor}.`); + } + } + if (major === majorUint64) { + _offset = offset; + return castBigInt(unsignedInt); + } else if (major === majorNegativeInt64) { + let negativeInt; + if (typeof unsignedInt === "bigint") { + negativeInt = BigInt(-1) - unsignedInt; + } else { + negativeInt = -1 - unsignedInt; + } + _offset = offset; + return castBigInt(negativeInt); + } else { + const value = decode(at + offset, to); + const valueOffset = _offset; + _offset = offset + valueOffset; + return tag({ tag: castBigInt(unsignedInt), value }); + } + case majorUtf8String: + case majorMap: + case majorList: + case majorUnstructuredByteString: + if (minor === minorIndefinite) { + switch (major) { + case majorUtf8String: + return decodeUtf8StringIndefinite(at, to); + case majorMap: + return decodeMapIndefinite(at, to); + case majorList: + return decodeListIndefinite(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteStringIndefinite(at, to); + } + } else { + switch (major) { + case majorUtf8String: + return decodeUtf8String(at, to); + case majorMap: + return decodeMap(at, to); + case majorList: + return decodeList(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteString(at, to); + } + } + default: + return decodeSpecial(at, to); + } +} +function bytesToUtf8(bytes, at, to) { + if (USE_BUFFER && bytes.constructor?.name === "Buffer") { + return bytes.toString("utf-8", at, to); + } + if (textDecoder) { + return textDecoder.decode(bytes.subarray(at, to)); + } + return (0, import_util_utf8.toUtf8)(bytes.subarray(at, to)); +} +function demote(bigInteger) { + const num = Number(bigInteger); + if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) { + console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`)); + } + return num; +} +var minorValueToArgumentLength = { + [extendedOneByte]: 1, + [extendedFloat16]: 2, + [extendedFloat32]: 4, + [extendedFloat64]: 8 +}; +function bytesToFloat16(a, b) { + const sign = a >> 7; + const exponent = (a & 124) >> 2; + const fraction = (a & 3) << 8 | b; + const scalar = sign === 0 ? 1 : -1; + let exponentComponent; + let summation; + if (exponent === 0) { + if (fraction === 0) { + return 0; + } else { + exponentComponent = Math.pow(2, 1 - 15); + summation = 0; + } + } else if (exponent === 31) { + if (fraction === 0) { + return scalar * Infinity; + } else { + return NaN; + } + } else { + exponentComponent = Math.pow(2, exponent - 15); + summation = 1; + } + summation += fraction / 1024; + return scalar * (exponentComponent * summation); +} +function decodeCount(at, to) { + const minor = payload[at] & 31; + if (minor < 24) { + _offset = 1; + return minor; + } + if (minor === extendedOneByte || minor === extendedFloat16 || minor === extendedFloat32 || minor === extendedFloat64) { + const countLength = minorValueToArgumentLength[minor]; + _offset = countLength + 1; + if (to - at < _offset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + return payload[countIndex]; + } else if (countLength === 2) { + return dataView.getUint16(countIndex); + } else if (countLength === 4) { + return dataView.getUint32(countIndex); + } + return demote(dataView.getBigUint64(countIndex)); + } + throw new Error(`unexpected minor value ${minor}.`); +} +function decodeUtf8String(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`string len ${length} greater than remaining buf len.`); + } + const value = bytesToUtf8(payload, at, at + length); + _offset = offset + length; + return value; +} +function decodeUtf8StringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + const data2 = alloc(vector.length); + data2.set(vector, 0); + _offset = at - base + 2; + return bytesToUtf8(data2, 0, data2.length); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeUnstructuredByteString(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`); + } + const value = payload.subarray(at, at + length); + _offset = offset + length; + return value; +} +function decodeUnstructuredByteStringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + const data2 = alloc(vector.length); + data2.set(vector, 0); + _offset = at - base + 2; + return data2; + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUnstructuredByteString) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeList(at, to) { + const listDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const list = Array(listDataLength); + for (let i = 0; i < listDataLength; ++i) { + const item = decode(at, to); + const itemOffset = _offset; + list[i] = item; + at += itemOffset; + } + _offset = offset + (at - base); + return list; +} +function decodeListIndefinite(at, to) { + at += 1; + const list = []; + for (const base = at; at < to; ) { + if (payload[at] === 255) { + _offset = at - base + 2; + return list; + } + const item = decode(at, to); + const n = _offset; + at += n; + list.push(item); + } + throw new Error("expected break marker."); +} +function decodeMap(at, to) { + const mapDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const map = {}; + for (let i = 0; i < mapDataLength; ++i) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key at index ${at}.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + _offset = offset + (at - base); + return map; +} +function decodeMapIndefinite(at, to) { + at += 1; + const base = at; + const map = {}; + for (; at < to; ) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + if (payload[at] === 255) { + _offset = at - base + 2; + return map; + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + throw new Error("expected break marker."); +} +function decodeSpecial(at, to) { + const minor = payload[at] & 31; + switch (minor) { + case specialTrue: + case specialFalse: + _offset = 1; + return minor === specialTrue; + case specialNull: + _offset = 1; + return null; + case specialUndefined: + _offset = 1; + return null; + case extendedFloat16: + if (to - at < 3) { + throw new Error("incomplete float16 at end of buf."); + } + _offset = 3; + return bytesToFloat16(payload[at + 1], payload[at + 2]); + case extendedFloat32: + if (to - at < 5) { + throw new Error("incomplete float32 at end of buf."); + } + _offset = 5; + return dataView.getFloat32(at + 1); + case extendedFloat64: + if (to - at < 9) { + throw new Error("incomplete float64 at end of buf."); + } + _offset = 9; + return dataView.getFloat64(at + 1); + default: + throw new Error(`unexpected minor value ${minor}.`); + } +} +function castBigInt(bigInt) { + if (typeof bigInt === "number") { + return bigInt; + } + const num = Number(bigInt); + if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) { + return num; + } + return bigInt; +} + +// src/submodules/cbor/cbor-encode.ts +var import_util_utf82 = require("@smithy/util-utf8"); +var USE_BUFFER2 = typeof Buffer !== "undefined"; +var initialSize = 2048; +var data = alloc(initialSize); +var dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength); +var cursor = 0; +function ensureSpace(bytes) { + const remaining = data.byteLength - cursor; + if (remaining < bytes) { + if (cursor < 16e6) { + resize(Math.max(data.byteLength * 4, data.byteLength + bytes)); + } else { + resize(data.byteLength + bytes + 16e6); + } + } +} +function toUint8Array() { + const out = alloc(cursor); + out.set(data.subarray(0, cursor), 0); + cursor = 0; + return out; +} +function resize(size) { + const old = data; + data = alloc(size); + if (old) { + if (old.copy) { + old.copy(data, 0, 0, old.byteLength); + } else { + data.set(old, 0); + } + } + dataView2 = new DataView(data.buffer, data.byteOffset, data.byteLength); +} +function encodeHeader(major, value) { + if (value < 24) { + data[cursor++] = major << 5 | value; + } else if (value < 1 << 8) { + data[cursor++] = major << 5 | 24; + data[cursor++] = value; + } else if (value < 1 << 16) { + data[cursor++] = major << 5 | extendedFloat16; + dataView2.setUint16(cursor, value); + cursor += 2; + } else if (value < 2 ** 32) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, value); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value)); + cursor += 8; + } +} +function encode(_input) { + const encodeStack = [_input]; + while (encodeStack.length) { + const input = encodeStack.pop(); + ensureSpace(typeof input === "string" ? input.length * 4 : 64); + if (typeof input === "string") { + if (USE_BUFFER2) { + encodeHeader(majorUtf8String, Buffer.byteLength(input)); + cursor += data.write(input, cursor); + } else { + const bytes = (0, import_util_utf82.fromUtf8)(input); + encodeHeader(majorUtf8String, bytes.byteLength); + data.set(bytes, cursor); + cursor += bytes.byteLength; + } + continue; + } else if (typeof input === "number") { + if (Number.isInteger(input)) { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - 1; + if (value < 24) { + data[cursor++] = major << 5 | value; + } else if (value < 256) { + data[cursor++] = major << 5 | 24; + data[cursor++] = value; + } else if (value < 65536) { + data[cursor++] = major << 5 | extendedFloat16; + data[cursor++] = value >> 8; + data[cursor++] = value; + } else if (value < 4294967296) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, value); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, BigInt(value)); + cursor += 8; + } + continue; + } + data[cursor++] = majorSpecial << 5 | extendedFloat64; + dataView2.setFloat64(cursor, input); + cursor += 8; + continue; + } else if (typeof input === "bigint") { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - BigInt(1); + const n = Number(value); + if (n < 24) { + data[cursor++] = major << 5 | n; + } else if (n < 256) { + data[cursor++] = major << 5 | 24; + data[cursor++] = n; + } else if (n < 65536) { + data[cursor++] = major << 5 | extendedFloat16; + data[cursor++] = n >> 8; + data[cursor++] = n & 255; + } else if (n < 4294967296) { + data[cursor++] = major << 5 | extendedFloat32; + dataView2.setUint32(cursor, n); + cursor += 4; + } else { + data[cursor++] = major << 5 | extendedFloat64; + dataView2.setBigUint64(cursor, value); + cursor += 8; + } + continue; + } else if (input === null) { + data[cursor++] = majorSpecial << 5 | specialNull; + continue; + } else if (typeof input === "boolean") { + data[cursor++] = majorSpecial << 5 | (input ? specialTrue : specialFalse); + continue; + } else if (typeof input === "undefined") { + throw new Error("@smithy/core/cbor: client may not serialize undefined value."); + } else if (Array.isArray(input)) { + for (let i = input.length - 1; i >= 0; --i) { + encodeStack.push(input[i]); + } + encodeHeader(majorList, input.length); + continue; + } else if (typeof input.byteLength === "number") { + ensureSpace(input.length * 2); + encodeHeader(majorUnstructuredByteString, input.length); + data.set(input, cursor); + cursor += input.byteLength; + continue; + } else if (typeof input === "object") { + if (input[tagSymbol]) { + if ("tag" in input && "value" in input) { + encodeStack.push(input.value); + encodeHeader(majorTag, input.tag); + continue; + } else { + throw new Error( + "tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input) + ); + } + } + const keys = Object.keys(input); + for (let i = keys.length - 1; i >= 0; --i) { + const key = keys[i]; + encodeStack.push(input[key]); + encodeStack.push(key); + } + encodeHeader(majorMap, keys.length); + continue; + } + throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`); + } +} + +// src/submodules/cbor/cbor.ts +var cbor = { + deserialize(payload2) { + setPayload(payload2); + return decode(0, payload2.length); + }, + serialize(input) { + try { + encode(input); + return toUint8Array(); + } catch (e) { + toUint8Array(); + throw e; + } + }, + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size) { + resize(size); + } +}; + +// src/submodules/cbor/parseCborBody.ts +var import_protocols = require("@smithy/core/protocols"); +var import_protocol_http = require("@smithy/protocol-http"); +var import_util_body_length_browser = require("@smithy/util-body-length-browser"); +var parseCborBody = (streamBody, context) => { + return (0, import_protocols.collectBody)(streamBody, context).then(async (bytes) => { + if (bytes.length) { + try { + return cbor.deserialize(bytes); + } catch (e) { + Object.defineProperty(e, "$responseBodyText", { + value: context.utf8Encoder(bytes) + }); + throw e; + } + } + return {}; + }); +}; +var dateToTag = (date) => { + return tag({ + tag: 1, + value: date.getTime() / 1e3 + }); +}; +var parseCborErrorBody = async (errorBody, context) => { + const value = await parseCborBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +var loadSmithyRpcV2CborErrorCode = (output, data2) => { + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + if (data2["__type"] !== void 0) { + return sanitizeErrorCode(data2["__type"]); + } + if (data2.code !== void 0) { + return sanitizeErrorCode(data2.code); + } +}; +var checkCborResponse = (response) => { + if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") { + throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode); + } +}; +var buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers: { + // intentional copy. + ...headers + } + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + try { + contents.headers["content-length"] = String((0, import_util_body_length_browser.calculateBodyLength)(body)); + } catch (e) { + } + } + return new import_protocol_http.HttpRequest(contents); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + buildHttpRpcRequest, + cbor, + checkCborResponse, + dateToTag, + loadSmithyRpcV2CborErrorCode, + parseCborBody, + parseCborErrorBody, + tag, + tagSymbol +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js new file mode 100644 index 0000000..455a5de --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/protocols/index.js @@ -0,0 +1,164 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var protocols_exports = {}; +__export(protocols_exports, { + RequestBuilder: () => RequestBuilder, + collectBody: () => collectBody, + extendedEncodeURIComponent: () => extendedEncodeURIComponent, + requestBuilder: () => requestBuilder, + resolvedPath: () => resolvedPath +}); +module.exports = __toCommonJS(protocols_exports); + +// src/submodules/protocols/collect-stream-body.ts +var import_util_stream = require("@smithy/util-stream"); +var collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return import_util_stream.Uint8ArrayBlobAdapter.mutate(await fromContext); +}; + +// src/submodules/protocols/extended-encode-uri-component.ts +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +// src/submodules/protocols/requestBuilder.ts +var import_protocol_http = require("@smithy/protocol-http"); + +// src/submodules/protocols/resolve-path.ts +var resolvedPath = (resolvedPath2, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== void 0) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath2 = resolvedPath2.replace( + uriLabel, + isGreedyLabel ? labelValue.split("/").map((segment) => extendedEncodeURIComponent(segment)).join("/") : extendedEncodeURIComponent(labelValue) + ); + } else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath2; +}; + +// src/submodules/protocols/requestBuilder.ts +function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +var RequestBuilder = class { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new import_protocol_http.HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers + }); + } + /** + * Brevity setter for "hostname". + */ + hn(hostname) { + this.hostname = hostname; + return this; + } + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + /** + * Brevity incremental builder for "path". + */ + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + /** + * Brevity setter for "headers". + */ + h(headers) { + this.headers = headers; + return this; + } + /** + * Brevity setter for "query". + */ + q(query) { + this.query = query; + return this; + } + /** + * Brevity setter for "body". + */ + b(body) { + this.body = body; + return this; + } + /** + * Brevity setter for "method". + */ + m(method) { + this.method = method; + return this; + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + RequestBuilder, + collectBody, + extendedEncodeURIComponent, + requestBuilder, + resolvedPath +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js new file mode 100644 index 0000000..047fb9b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/submodules/serde/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/serde/index.ts +var serde_exports = {}; +__export(serde_exports, { + NumericValue: () => NumericValue, + nv: () => nv +}); +module.exports = __toCommonJS(serde_exports); + +// src/submodules/serde/value/NumericValue.ts +var NumericValue = class { + constructor(string, type) { + this.string = string; + this.type = type; + } +}; +function nv(string) { + return new NumericValue(string, "bigDecimal"); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + NumericValue, + nv +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/DefaultIdentityProviderConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/index.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/httpAuthSchemes/noAuth.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-cjs/util-identity-and-auth/memoizeIdentityProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/getSmithyContext.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/getSmithyContext.js new file mode 100644 index 0000000..3848a0c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/getSmithyContext.js @@ -0,0 +1,2 @@ +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/index.js new file mode 100644 index 0000000..1dcdba1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js new file mode 100644 index 0000000..d0aaae6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.js @@ -0,0 +1,17 @@ +import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware"; +export const httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware", +}; +export const getHttpAuthSchemeEndpointRuleSetPlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider, + }), httpAuthSchemeEndpointRuleSetMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js new file mode 100644 index 0000000..3fe03c5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/getHttpAuthSchemePlugin.js @@ -0,0 +1,18 @@ +import { serializerMiddlewareOption } from "@smithy/middleware-serde"; +import { httpAuthSchemeMiddleware } from "./httpAuthSchemeMiddleware"; +export const httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: serializerMiddlewareOption.name, +}; +export const getHttpAuthSchemePlugin = (config, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider, + }), httpAuthSchemeMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js new file mode 100644 index 0000000..9869f65 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/httpAuthSchemeMiddleware.js @@ -0,0 +1,43 @@ +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { resolveAuthOptions } from "./resolveAuthOptions"; +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +export const httpAuthSchemeMiddleware = (config, mwOptions) => (next, context) => async (args) => { + const options = config.httpAuthSchemeProvider(await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input)); + const authSchemePreference = config.authSchemePreference ? await config.authSchemePreference() : []; + const resolvedOptions = resolveAuthOptions(options, authSchemePreference); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = getSmithyContext(context); + const failureReasons = []; + for (const option of resolvedOptions) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = option.propertiesExtractor?.(config, context) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer, + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js new file mode 100644 index 0000000..5042e7d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/index.js @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js new file mode 100644 index 0000000..8260757 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-auth-scheme/resolveAuthOptions.js @@ -0,0 +1,20 @@ +export const resolveAuthOptions = (candidateAuthOptions, authSchemePreference) => { + if (!authSchemePreference || authSchemePreference.length === 0) { + return candidateAuthOptions; + } + const preferredAuthOptions = []; + for (const preferredSchemeName of authSchemePreference) { + for (const candidateAuthOption of candidateAuthOptions) { + const candidateAuthSchemeName = candidateAuthOption.schemeId.split("#")[1]; + if (candidateAuthSchemeName === preferredSchemeName) { + preferredAuthOptions.push(candidateAuthOption); + } + } + } + for (const candidateAuthOption of candidateAuthOptions) { + if (!preferredAuthOptions.find(({ schemeId }) => schemeId === candidateAuthOption.schemeId)) { + preferredAuthOptions.push(candidateAuthOption); + } + } + return preferredAuthOptions; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js new file mode 100644 index 0000000..e199712 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/getHttpSigningMiddleware.js @@ -0,0 +1,15 @@ +import { httpSigningMiddleware } from "./httpSigningMiddleware"; +export const httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware", +}; +export const getHttpSigningPlugin = (config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js new file mode 100644 index 0000000..dbc1b28 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/httpSigningMiddleware.js @@ -0,0 +1,24 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { SMITHY_CONTEXT_KEY, } from "@smithy/types"; +import { getSmithyContext } from "@smithy/util-middleware"; +const defaultErrorHandler = (signingProperties) => (error) => { + throw error; +}; +const defaultSuccessHandler = (httpResponse, signingProperties) => { }; +export const httpSigningMiddleware = (config) => (next, context) => async (args) => { + if (!HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = getSmithyContext(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { httpAuthOption: { signingProperties = {} }, identity, signer, } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties), + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js new file mode 100644 index 0000000..7bc6cfe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/middleware-http-signing/index.js @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/normalizeProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/normalizeProvider.js new file mode 100644 index 0000000..a83ea99 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/normalizeProvider.js @@ -0,0 +1,6 @@ +export const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/pagination/createPaginator.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/pagination/createPaginator.js new file mode 100644 index 0000000..4e8f889 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/pagination/createPaginator.js @@ -0,0 +1,41 @@ +const makePagedClientRequest = async (CommandCtor, client, input, withCommand = (_) => _, ...args) => { + let command = new CommandCtor(input); + command = withCommand(command) ?? command; + return await client.send(command, ...args); +}; +export function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return async function* paginateOperation(config, input, ...additionalArguments) { + const _input = input; + let token = config.startingToken ?? _input[inputTokenName]; + let hasNext = true; + let page; + while (hasNext) { + _input[inputTokenName] = token; + if (pageSizeTokenName) { + _input[pageSizeTokenName] = _input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest(CommandCtor, config.client, input, config.withCommand, ...additionalArguments); + } + else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return undefined; + }; +} +const get = (fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return undefined; + } + cursor = cursor[step]; + } + return cursor; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js new file mode 100644 index 0000000..5b790a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/protocols/requestBuilder.js @@ -0,0 +1 @@ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/setFeature.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/setFeature.js new file mode 100644 index 0000000..a3a0303 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/setFeature.js @@ -0,0 +1,11 @@ +export function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {}, + }; + } + else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js new file mode 100644 index 0000000..dca1c63 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-decode.js @@ -0,0 +1,391 @@ +import { toUtf8 } from "@smithy/util-utf8"; +import { alloc, extendedFloat16, extendedFloat32, extendedFloat64, extendedOneByte, majorList, majorMap, majorNegativeInt64, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, minorIndefinite, specialFalse, specialNull, specialTrue, specialUndefined, tag, } from "./cbor-types"; +const USE_TEXT_DECODER = typeof TextDecoder !== "undefined"; +const USE_BUFFER = typeof Buffer !== "undefined"; +let payload = alloc(0); +let dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +const textDecoder = USE_TEXT_DECODER ? new TextDecoder() : null; +let _offset = 0; +export function setPayload(bytes) { + payload = bytes; + dataView = new DataView(payload.buffer, payload.byteOffset, payload.byteLength); +} +export function decode(at, to) { + if (at >= to) { + throw new Error("unexpected end of (decode) payload."); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + switch (major) { + case majorUint64: + case majorNegativeInt64: + case majorTag: + let unsignedInt; + let offset; + if (minor < 24) { + unsignedInt = minor; + offset = 1; + } + else { + switch (minor) { + case extendedOneByte: + case extendedFloat16: + case extendedFloat32: + case extendedFloat64: + const countLength = minorValueToArgumentLength[minor]; + const countOffset = (countLength + 1); + offset = countOffset; + if (to - at < countOffset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + unsignedInt = payload[countIndex]; + } + else if (countLength === 2) { + unsignedInt = dataView.getUint16(countIndex); + } + else if (countLength === 4) { + unsignedInt = dataView.getUint32(countIndex); + } + else { + unsignedInt = dataView.getBigUint64(countIndex); + } + break; + default: + throw new Error(`unexpected minor value ${minor}.`); + } + } + if (major === majorUint64) { + _offset = offset; + return castBigInt(unsignedInt); + } + else if (major === majorNegativeInt64) { + let negativeInt; + if (typeof unsignedInt === "bigint") { + negativeInt = BigInt(-1) - unsignedInt; + } + else { + negativeInt = -1 - unsignedInt; + } + _offset = offset; + return castBigInt(negativeInt); + } + else { + const value = decode(at + offset, to); + const valueOffset = _offset; + _offset = offset + valueOffset; + return tag({ tag: castBigInt(unsignedInt), value }); + } + case majorUtf8String: + case majorMap: + case majorList: + case majorUnstructuredByteString: + if (minor === minorIndefinite) { + switch (major) { + case majorUtf8String: + return decodeUtf8StringIndefinite(at, to); + case majorMap: + return decodeMapIndefinite(at, to); + case majorList: + return decodeListIndefinite(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteStringIndefinite(at, to); + } + } + else { + switch (major) { + case majorUtf8String: + return decodeUtf8String(at, to); + case majorMap: + return decodeMap(at, to); + case majorList: + return decodeList(at, to); + case majorUnstructuredByteString: + return decodeUnstructuredByteString(at, to); + } + } + default: + return decodeSpecial(at, to); + } +} +function bytesToUtf8(bytes, at, to) { + if (USE_BUFFER && bytes.constructor?.name === "Buffer") { + return bytes.toString("utf-8", at, to); + } + if (textDecoder) { + return textDecoder.decode(bytes.subarray(at, to)); + } + return toUtf8(bytes.subarray(at, to)); +} +function demote(bigInteger) { + const num = Number(bigInteger); + if (num < Number.MIN_SAFE_INTEGER || Number.MAX_SAFE_INTEGER < num) { + console.warn(new Error(`@smithy/core/cbor - truncating BigInt(${bigInteger}) to ${num} with loss of precision.`)); + } + return num; +} +const minorValueToArgumentLength = { + [extendedOneByte]: 1, + [extendedFloat16]: 2, + [extendedFloat32]: 4, + [extendedFloat64]: 8, +}; +export function bytesToFloat16(a, b) { + const sign = a >> 7; + const exponent = (a & 124) >> 2; + const fraction = ((a & 3) << 8) | b; + const scalar = sign === 0 ? 1 : -1; + let exponentComponent; + let summation; + if (exponent === 0b00000) { + if (fraction === 0) { + return 0; + } + else { + exponentComponent = Math.pow(2, 1 - 15); + summation = 0; + } + } + else if (exponent === 0b11111) { + if (fraction === 0) { + return scalar * Infinity; + } + else { + return NaN; + } + } + else { + exponentComponent = Math.pow(2, exponent - 15); + summation = 1; + } + summation += fraction / 1024; + return scalar * (exponentComponent * summation); +} +function decodeCount(at, to) { + const minor = payload[at] & 31; + if (minor < 24) { + _offset = 1; + return minor; + } + if (minor === extendedOneByte || + minor === extendedFloat16 || + minor === extendedFloat32 || + minor === extendedFloat64) { + const countLength = minorValueToArgumentLength[minor]; + _offset = (countLength + 1); + if (to - at < _offset) { + throw new Error(`countLength ${countLength} greater than remaining buf len.`); + } + const countIndex = at + 1; + if (countLength === 1) { + return payload[countIndex]; + } + else if (countLength === 2) { + return dataView.getUint16(countIndex); + } + else if (countLength === 4) { + return dataView.getUint32(countIndex); + } + return demote(dataView.getBigUint64(countIndex)); + } + throw new Error(`unexpected minor value ${minor}.`); +} +function decodeUtf8String(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`string len ${length} greater than remaining buf len.`); + } + const value = bytesToUtf8(payload, at, at + length); + _offset = offset + length; + return value; +} +function decodeUtf8StringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + const data = alloc(vector.length); + data.set(vector, 0); + _offset = at - base + 2; + return bytesToUtf8(data, 0, data.length); + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeUnstructuredByteString(at, to) { + const length = decodeCount(at, to); + const offset = _offset; + at += offset; + if (to - at < length) { + throw new Error(`unstructured byte string len ${length} greater than remaining buf len.`); + } + const value = payload.subarray(at, at + length); + _offset = offset + length; + return value; +} +function decodeUnstructuredByteStringIndefinite(at, to) { + at += 1; + const vector = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + const data = alloc(vector.length); + data.set(vector, 0); + _offset = at - base + 2; + return data; + } + const major = (payload[at] & 224) >> 5; + const minor = payload[at] & 31; + if (major !== majorUnstructuredByteString) { + throw new Error(`unexpected major type ${major} in indefinite string.`); + } + if (minor === minorIndefinite) { + throw new Error("nested indefinite string."); + } + const bytes = decodeUnstructuredByteString(at, to); + const length = _offset; + at += length; + for (let i = 0; i < bytes.length; ++i) { + vector.push(bytes[i]); + } + } + throw new Error("expected break marker."); +} +function decodeList(at, to) { + const listDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const list = Array(listDataLength); + for (let i = 0; i < listDataLength; ++i) { + const item = decode(at, to); + const itemOffset = _offset; + list[i] = item; + at += itemOffset; + } + _offset = offset + (at - base); + return list; +} +function decodeListIndefinite(at, to) { + at += 1; + const list = []; + for (const base = at; at < to;) { + if (payload[at] === 255) { + _offset = at - base + 2; + return list; + } + const item = decode(at, to); + const n = _offset; + at += n; + list.push(item); + } + throw new Error("expected break marker."); +} +function decodeMap(at, to) { + const mapDataLength = decodeCount(at, to); + const offset = _offset; + at += offset; + const base = at; + const map = {}; + for (let i = 0; i < mapDataLength; ++i) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key at index ${at}.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + _offset = offset + (at - base); + return map; +} +function decodeMapIndefinite(at, to) { + at += 1; + const base = at; + const map = {}; + for (; at < to;) { + if (at >= to) { + throw new Error("unexpected end of map payload."); + } + if (payload[at] === 255) { + _offset = at - base + 2; + return map; + } + const major = (payload[at] & 224) >> 5; + if (major !== majorUtf8String) { + throw new Error(`unexpected major type ${major} for map key.`); + } + const key = decode(at, to); + at += _offset; + const value = decode(at, to); + at += _offset; + map[key] = value; + } + throw new Error("expected break marker."); +} +function decodeSpecial(at, to) { + const minor = payload[at] & 31; + switch (minor) { + case specialTrue: + case specialFalse: + _offset = 1; + return minor === specialTrue; + case specialNull: + _offset = 1; + return null; + case specialUndefined: + _offset = 1; + return null; + case extendedFloat16: + if (to - at < 3) { + throw new Error("incomplete float16 at end of buf."); + } + _offset = 3; + return bytesToFloat16(payload[at + 1], payload[at + 2]); + case extendedFloat32: + if (to - at < 5) { + throw new Error("incomplete float32 at end of buf."); + } + _offset = 5; + return dataView.getFloat32(at + 1); + case extendedFloat64: + if (to - at < 9) { + throw new Error("incomplete float64 at end of buf."); + } + _offset = 9; + return dataView.getFloat64(at + 1); + default: + throw new Error(`unexpected minor value ${minor}.`); + } +} +function castBigInt(bigInt) { + if (typeof bigInt === "number") { + return bigInt; + } + const num = Number(bigInt); + if (Number.MIN_SAFE_INTEGER <= num && num <= Number.MAX_SAFE_INTEGER) { + return num; + } + return bigInt; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js new file mode 100644 index 0000000..17af4e2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-encode.js @@ -0,0 +1,191 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { extendedFloat16, extendedFloat32, extendedFloat64, majorList, majorMap, majorNegativeInt64, majorSpecial, majorTag, majorUint64, majorUnstructuredByteString, majorUtf8String, specialFalse, specialNull, specialTrue, tagSymbol, } from "./cbor-types"; +import { alloc } from "./cbor-types"; +const USE_BUFFER = typeof Buffer !== "undefined"; +const initialSize = 2048; +let data = alloc(initialSize); +let dataView = new DataView(data.buffer, data.byteOffset, data.byteLength); +let cursor = 0; +function ensureSpace(bytes) { + const remaining = data.byteLength - cursor; + if (remaining < bytes) { + if (cursor < 16000000) { + resize(Math.max(data.byteLength * 4, data.byteLength + bytes)); + } + else { + resize(data.byteLength + bytes + 16000000); + } + } +} +export function toUint8Array() { + const out = alloc(cursor); + out.set(data.subarray(0, cursor), 0); + cursor = 0; + return out; +} +export function resize(size) { + const old = data; + data = alloc(size); + if (old) { + if (old.copy) { + old.copy(data, 0, 0, old.byteLength); + } + else { + data.set(old, 0); + } + } + dataView = new DataView(data.buffer, data.byteOffset, data.byteLength); +} +function encodeHeader(major, value) { + if (value < 24) { + data[cursor++] = (major << 5) | value; + } + else if (value < 1 << 8) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = value; + } + else if (value < 1 << 16) { + data[cursor++] = (major << 5) | extendedFloat16; + dataView.setUint16(cursor, value); + cursor += 2; + } + else if (value < 2 ** 32) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, value); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, typeof value === "bigint" ? value : BigInt(value)); + cursor += 8; + } +} +export function encode(_input) { + const encodeStack = [_input]; + while (encodeStack.length) { + const input = encodeStack.pop(); + ensureSpace(typeof input === "string" ? input.length * 4 : 64); + if (typeof input === "string") { + if (USE_BUFFER) { + encodeHeader(majorUtf8String, Buffer.byteLength(input)); + cursor += data.write(input, cursor); + } + else { + const bytes = fromUtf8(input); + encodeHeader(majorUtf8String, bytes.byteLength); + data.set(bytes, cursor); + cursor += bytes.byteLength; + } + continue; + } + else if (typeof input === "number") { + if (Number.isInteger(input)) { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - 1; + if (value < 24) { + data[cursor++] = (major << 5) | value; + } + else if (value < 256) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = value; + } + else if (value < 65536) { + data[cursor++] = (major << 5) | extendedFloat16; + data[cursor++] = value >> 8; + data[cursor++] = value; + } + else if (value < 4294967296) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, value); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, BigInt(value)); + cursor += 8; + } + continue; + } + data[cursor++] = (majorSpecial << 5) | extendedFloat64; + dataView.setFloat64(cursor, input); + cursor += 8; + continue; + } + else if (typeof input === "bigint") { + const nonNegative = input >= 0; + const major = nonNegative ? majorUint64 : majorNegativeInt64; + const value = nonNegative ? input : -input - BigInt(1); + const n = Number(value); + if (n < 24) { + data[cursor++] = (major << 5) | n; + } + else if (n < 256) { + data[cursor++] = (major << 5) | 24; + data[cursor++] = n; + } + else if (n < 65536) { + data[cursor++] = (major << 5) | extendedFloat16; + data[cursor++] = n >> 8; + data[cursor++] = n & 255; + } + else if (n < 4294967296) { + data[cursor++] = (major << 5) | extendedFloat32; + dataView.setUint32(cursor, n); + cursor += 4; + } + else { + data[cursor++] = (major << 5) | extendedFloat64; + dataView.setBigUint64(cursor, value); + cursor += 8; + } + continue; + } + else if (input === null) { + data[cursor++] = (majorSpecial << 5) | specialNull; + continue; + } + else if (typeof input === "boolean") { + data[cursor++] = (majorSpecial << 5) | (input ? specialTrue : specialFalse); + continue; + } + else if (typeof input === "undefined") { + throw new Error("@smithy/core/cbor: client may not serialize undefined value."); + } + else if (Array.isArray(input)) { + for (let i = input.length - 1; i >= 0; --i) { + encodeStack.push(input[i]); + } + encodeHeader(majorList, input.length); + continue; + } + else if (typeof input.byteLength === "number") { + ensureSpace(input.length * 2); + encodeHeader(majorUnstructuredByteString, input.length); + data.set(input, cursor); + cursor += input.byteLength; + continue; + } + else if (typeof input === "object") { + if (input[tagSymbol]) { + if ("tag" in input && "value" in input) { + encodeStack.push(input.value); + encodeHeader(majorTag, input.tag); + continue; + } + else { + throw new Error("tag encountered with missing fields, need 'tag' and 'value', found: " + JSON.stringify(input)); + } + } + const keys = Object.keys(input); + for (let i = keys.length - 1; i >= 0; --i) { + const key = keys[i]; + encodeStack.push(input[key]); + encodeStack.push(key); + } + encodeHeader(majorMap, keys.length); + continue; + } + throw new Error(`data type ${input?.constructor?.name ?? typeof input} not compatible for encoding.`); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js new file mode 100644 index 0000000..a720eb7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor-types.js @@ -0,0 +1,25 @@ +export const majorUint64 = 0; +export const majorNegativeInt64 = 1; +export const majorUnstructuredByteString = 2; +export const majorUtf8String = 3; +export const majorList = 4; +export const majorMap = 5; +export const majorTag = 6; +export const majorSpecial = 7; +export const specialFalse = 20; +export const specialTrue = 21; +export const specialNull = 22; +export const specialUndefined = 23; +export const extendedOneByte = 24; +export const extendedFloat16 = 25; +export const extendedFloat32 = 26; +export const extendedFloat64 = 27; +export const minorIndefinite = 31; +export function alloc(size) { + return typeof Buffer !== "undefined" ? Buffer.alloc(size) : new Uint8Array(size); +} +export const tagSymbol = Symbol("@smithy/core/cbor::tagSymbol"); +export function tag(data) { + data[tagSymbol] = true; + return data; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js new file mode 100644 index 0000000..8df975f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/cbor.js @@ -0,0 +1,21 @@ +import { decode, setPayload } from "./cbor-decode"; +import { encode, resize, toUint8Array } from "./cbor-encode"; +export const cbor = { + deserialize(payload) { + setPayload(payload); + return decode(0, payload.length); + }, + serialize(input) { + try { + encode(input); + return toUint8Array(); + } + catch (e) { + toUint8Array(); + throw e; + } + }, + resizeEncodingBuffer(size) { + resize(size); + }, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/index.js new file mode 100644 index 0000000..0910d27 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/index.js @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js new file mode 100644 index 0000000..03eeae6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/cbor/parseCborBody.js @@ -0,0 +1,85 @@ +import { collectBody } from "@smithy/core/protocols"; +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { calculateBodyLength } from "@smithy/util-body-length-browser"; +import { cbor } from "./cbor"; +import { tag, tagSymbol } from "./cbor-types"; +export const parseCborBody = (streamBody, context) => { + return collectBody(streamBody, context).then(async (bytes) => { + if (bytes.length) { + try { + return cbor.deserialize(bytes); + } + catch (e) { + Object.defineProperty(e, "$responseBodyText", { + value: context.utf8Encoder(bytes), + }); + throw e; + } + } + return {}; + }); +}; +export const dateToTag = (date) => { + return tag({ + tag: 1, + value: date.getTime() / 1000, + }); +}; +export const parseCborErrorBody = async (errorBody, context) => { + const value = await parseCborBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}; +export const loadSmithyRpcV2CborErrorCode = (output, data) => { + const sanitizeErrorCode = (rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } +}; +export const checkCborResponse = (response) => { + if (String(response.headers["smithy-protocol"]).toLowerCase() !== "rpc-v2-cbor") { + throw new Error("Malformed RPCv2 CBOR response, status: " + response.statusCode); + } +}; +export const buildHttpRpcRequest = async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers: { + ...headers, + }, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + try { + contents.headers["content-length"] = String(calculateBodyLength(body)); + } + catch (e) { } + } + return new __HttpRequest(contents); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js new file mode 100644 index 0000000..b6a5c0b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/collect-stream-body.js @@ -0,0 +1,11 @@ +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +export const collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return Uint8ArrayBlobAdapter.mutate(await fromContext); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js new file mode 100644 index 0000000..5baeaf5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/extended-encode-uri-component.js @@ -0,0 +1,5 @@ +export function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/index.js new file mode 100644 index 0000000..a5de22f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/index.js @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js new file mode 100644 index 0000000..3391ef2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/requestBuilder.js @@ -0,0 +1,67 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { resolvedPath } from "./resolve-path"; +export function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +export class RequestBuilder { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers, + }); + } + hn(hostname) { + this.hostname = hostname; + return this; + } + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${basePath?.endsWith("/") ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + h(headers) { + this.headers = headers; + return this; + } + q(query) { + this.query = query; + return this; + } + b(body) { + this.body = body; + return this; + } + m(method) { + this.method = method; + return this; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js new file mode 100644 index 0000000..8483e01 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/protocols/resolve-path.js @@ -0,0 +1,19 @@ +import { extendedEncodeURIComponent } from "./extended-encode-uri-component"; +export const resolvedPath = (resolvedPath, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== undefined) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath = resolvedPath.replace(uriLabel, isGreedyLabel + ? labelValue + .split("/") + .map((segment) => extendedEncodeURIComponent(segment)) + .join("/") + : extendedEncodeURIComponent(labelValue)); + } + else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/serde/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/serde/index.js new file mode 100644 index 0000000..a70d0dd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/serde/index.js @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js new file mode 100644 index 0000000..6af270f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/submodules/serde/value/NumericValue.js @@ -0,0 +1,9 @@ +export class NumericValue { + constructor(string, type) { + this.string = string; + this.type = type; + } +} +export function nv(string) { + return new NumericValue(string, "bigDecimal"); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js new file mode 100644 index 0000000..3bc1016 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/DefaultIdentityProviderConfig.js @@ -0,0 +1,13 @@ +export class DefaultIdentityProviderConfig { + constructor(config) { + this.authSchemes = new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== undefined) { + this.authSchemes.set(key, value); + } + } + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js new file mode 100644 index 0000000..8b6f598 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.js @@ -0,0 +1,34 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpApiKeyAuthLocation } from "@smithy/types"; +export class HttpApiKeyAuthSigner { + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error("request could not be signed with `apiKey` since the `name` and `in` signer properties are missing"); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = HttpRequest.clone(httpRequest); + if (signingProperties.in === HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } + else if (signingProperties.in === HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme + ? `${signingProperties.scheme} ${identity.apiKey}` + : identity.apiKey; + } + else { + throw new Error("request can only be signed with `apiKey` locations `query` or `header`, " + + "but found: `" + + signingProperties.in + + "`"); + } + return clonedRequest; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js new file mode 100644 index 0000000..b92a9c3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.js @@ -0,0 +1,11 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export class HttpBearerAuthSigner { + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js new file mode 100644 index 0000000..9d240fe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/index.js @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js new file mode 100644 index 0000000..356193d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/httpAuthSchemes/noAuth.js @@ -0,0 +1,5 @@ +export class NoAuthSigner { + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js new file mode 100644 index 0000000..87ba64b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/index.js @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js new file mode 100644 index 0000000..8050585 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-es/util-identity-and-auth/memoizeIdentityProvider.js @@ -0,0 +1,53 @@ +export const createIsIdentityExpiredFunction = (expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs; +export const EXPIRATION_MS = 300000; +export const isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +export const doesIdentityRequireRefresh = (identity) => identity.expiration !== undefined; +export const memoizeIdentityProvider = (provider, isExpired, requiresRefresh) => { + if (provider === undefined) { + return undefined; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts new file mode 100644 index 0000000..523ee47 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/index.d.ts new file mode 100644 index 0000000..1dcdba1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts new file mode 100644 index 0000000..996b0de --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemeEndpointRuleSetPluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemeEndpointRuleSetPlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts new file mode 100644 index 0000000..2e57733 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemePluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemePlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemePluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts new file mode 100644 index 0000000..50f1ea8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts @@ -0,0 +1,33 @@ +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, IdentityProviderConfig, Provider, SelectedHttpAuthScheme, SerializeMiddleware, SMITHY_CONTEXT_KEY } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + authSchemePreference?: Provider; + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: HttpAuthSchemeProvider; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareSmithyContext extends Record { + selectedHttpAuthScheme?: SelectedHttpAuthScheme; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareHandlerExecutionContext extends HandlerExecutionContext { + [SMITHY_CONTEXT_KEY]?: HttpAuthSchemeMiddlewareSmithyContext; +} +/** + * @internal + */ +export declare const httpAuthSchemeMiddleware: (config: TConfig & PreviouslyResolved, mwOptions: HttpAuthSchemeMiddlewareOptions) => SerializeMiddleware; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts new file mode 100644 index 0000000..5042e7d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts new file mode 100644 index 0000000..52fc604 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-auth-scheme/resolveAuthOptions.d.ts @@ -0,0 +1,10 @@ +import { HttpAuthOption } from "@smithy/types"; +/** + * Resolves list of auth options based on the supported ones, vs the preference list. + * + * @param candidateAuthOptions list of supported auth options selected by the standard + * resolution process (model-based, endpoints 2.0, etc.) + * @param authSchemePreference list of auth schemes preferred by user. + * @returns + */ +export declare const resolveAuthOptions: (candidateAuthOptions: HttpAuthOption[], authSchemePreference: string[]) => HttpAuthOption[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts new file mode 100644 index 0000000..56c89a2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/getHttpSigningMiddleware.d.ts @@ -0,0 +1,9 @@ +import { FinalizeRequestHandlerOptions, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddlewareOptions: FinalizeRequestHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getHttpSigningPlugin: (config: object) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts new file mode 100644 index 0000000..3b43611 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/httpSigningMiddleware.d.ts @@ -0,0 +1,5 @@ +import { FinalizeRequestMiddleware } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddleware: (config: object) => FinalizeRequestMiddleware; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts new file mode 100644 index 0000000..7bc6cfe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/middleware-http-signing/index.d.ts @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts new file mode 100644 index 0000000..4fe2d9a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts new file mode 100644 index 0000000..78fcbe0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/pagination/createPaginator.d.ts @@ -0,0 +1,7 @@ +import type { PaginationConfiguration, Paginator } from "@smithy/types"; +/** + * @internal + * + * Creates a paginator. + */ +export declare function createPaginator(ClientCtor: any, CommandCtor: any, inputTokenName: string, outputTokenName: string, pageSizeTokenName?: string): (config: PaginationConfigType, input: InputType, ...additionalArguments: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..8e2f2ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/protocols/requestBuilder.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/setFeature.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/setFeature.d.ts new file mode 100644 index 0000000..279106c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/setFeature.d.ts @@ -0,0 +1,12 @@ +import type { HandlerExecutionContext, SmithyFeatures } from "@smithy/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the library not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: HandlerExecutionContext, feature: F, value: SmithyFeatures[F]): void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts new file mode 100644 index 0000000..baf3961 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-decode.d.ts @@ -0,0 +1,17 @@ +import { CborValueType, Float32, Uint8, Uint32 } from "./cbor-types"; +/** + * @internal + * @param bytes - to be set as the decode source. + * + * Sets the decode bytearray source and its data view. + */ +export declare function setPayload(bytes: Uint8Array): void; +/** + * @internal + * Decodes the data between the two indices. + */ +export declare function decode(at: Uint32, to: Uint32): CborValueType; +/** + * @internal + */ +export declare function bytesToFloat16(a: Uint8, b: Uint8): Float32; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts new file mode 100644 index 0000000..bfc3328 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-encode.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + */ +export declare function toUint8Array(): Uint8Array; +export declare function resize(size: number): void; +/** + * @param _input - JS data object. + */ +export declare function encode(_input: any): void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts new file mode 100644 index 0000000..dd41338 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor-types.d.ts @@ -0,0 +1,64 @@ +/// +/// +export type CborItemType = undefined | boolean | number | bigint | [CborUnstructuredByteStringType, Uint64] | string | CborTagType; +export type CborTagType = { + tag: Uint64 | number; + value: CborValueType; + [tagSymbol]: true; +}; +export type CborUnstructuredByteStringType = Uint8Array; +export type CborListType = Array; +export type CborMapType = Record; +export type CborCollectionType = CborMapType | CborListType; +export type CborValueType = CborItemType | CborCollectionType | any; +export type CborArgumentLength = 1 | 2 | 4 | 8; +export type CborArgumentLengthOffset = 1 | 2 | 3 | 5 | 9; +export type CborOffset = number; +export type Uint8 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Float32 = number; +export type Int64 = bigint; +export type Float16Binary = number; +export type Float32Binary = number; +export type CborMajorType = typeof majorUint64 | typeof majorNegativeInt64 | typeof majorUnstructuredByteString | typeof majorUtf8String | typeof majorList | typeof majorMap | typeof majorTag | typeof majorSpecial; +export declare const majorUint64 = 0; +export declare const majorNegativeInt64 = 1; +export declare const majorUnstructuredByteString = 2; +export declare const majorUtf8String = 3; +export declare const majorList = 4; +export declare const majorMap = 5; +export declare const majorTag = 6; +export declare const majorSpecial = 7; +export declare const specialFalse = 20; +export declare const specialTrue = 21; +export declare const specialNull = 22; +export declare const specialUndefined = 23; +export declare const extendedOneByte = 24; +export declare const extendedFloat16 = 25; +export declare const extendedFloat32 = 26; +export declare const extendedFloat64 = 27; +export declare const minorIndefinite = 31; +export declare function alloc(size: number): Uint8Array | Buffer; +/** + * @public + * + * The presence of this symbol as an object key indicates it should be considered a tag + * for CBOR serialization purposes. + * + * The object must also have the properties "tag" and "value". + */ +export declare const tagSymbol: unique symbol; +/** + * @public + * Applies the tag symbol to the object. + */ +export declare function tag(data: { + tag: number | bigint; + value: any; + [tagSymbol]?: true; +}): { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts new file mode 100644 index 0000000..7577213 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/cbor.d.ts @@ -0,0 +1,26 @@ +/** + * This implementation is synchronous and only implements the parts of CBOR + * specification used by Smithy RPCv2 CBOR protocol. + * + * This cbor serde implementation is derived from AWS SDK for Go's implementation. + * @see https://github.com/aws/smithy-go/tree/main/encoding/cbor + * + * The cbor-x implementation was also instructional: + * @see https://github.com/kriszyp/cbor-x + */ +export declare const cbor: { + deserialize(payload: Uint8Array): any; + serialize(input: any): Uint8Array; + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size: number): void; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts new file mode 100644 index 0000000..0910d27 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/index.d.ts @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts new file mode 100644 index 0000000..8811679 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/cbor/parseCborBody.d.ts @@ -0,0 +1,31 @@ +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { HeaderBag as __HeaderBag, HttpResponse, SerdeContext as __SerdeContext, SerdeContext } from "@smithy/types"; +import { tag, tagSymbol } from "./cbor-types"; +/** + * @internal + */ +export declare const parseCborBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const dateToTag: (date: Date) => { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; +/** + * @internal + */ +export declare const parseCborErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadSmithyRpcV2CborErrorCode: (output: HttpResponse, data: any) => string | undefined; +/** + * @internal + */ +export declare const checkCborResponse: (response: HttpResponse) => void; +/** + * @internal + */ +export declare const buildHttpRpcRequest: (context: __SerdeContext, headers: __HeaderBag, path: string, resolvedHostname: string | undefined, body: any) => Promise<__HttpRequest>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts new file mode 100644 index 0000000..b555804 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/collect-stream-body.d.ts @@ -0,0 +1,10 @@ +import { SerdeContext } from "@smithy/types"; +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +/** + * @internal + * + * Collect low-level response body stream to Uint8Array. + */ +export declare const collectBody: (streamBody: any, context: { + streamCollector: SerdeContext["streamCollector"]; +}) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..403e9ae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/extended-encode-uri-component.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Function that wraps encodeURIComponent to encode additional characters + * to fully adhere to RFC 3986. + */ +export declare function extendedEncodeURIComponent(str: string): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts new file mode 100644 index 0000000..a5de22f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..3013d8a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/requestBuilder.d.ts @@ -0,0 +1,51 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import type { SerdeContext } from "@smithy/types"; +/** + * @internal + * used in code-generated serde. + */ +export declare function requestBuilder(input: any, context: SerdeContext): RequestBuilder; +/** + * @internal + */ +export declare class RequestBuilder { + private input; + private context; + private query; + private method; + private headers; + private path; + private body; + private hostname; + private resolvePathStack; + constructor(input: any, context: SerdeContext); + build(): Promise; + /** + * Brevity setter for "hostname". + */ + hn(hostname: string): this; + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel: string): this; + /** + * Brevity incremental builder for "path". + */ + p(memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean): this; + /** + * Brevity setter for "headers". + */ + h(headers: Record): this; + /** + * Brevity setter for "query". + */ + q(query: Record): this; + /** + * Brevity setter for "body". + */ + b(body: any): this; + /** + * Brevity setter for "method". + */ + m(method: string): this; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts new file mode 100644 index 0000000..03386d6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/protocols/resolve-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const resolvedPath: (resolvedPath: string, input: unknown, memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts new file mode 100644 index 0000000..a70d0dd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/serde/index.d.ts @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts new file mode 100644 index 0000000..c3736fc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/submodules/serde/value/NumericValue.d.ts @@ -0,0 +1,31 @@ +/** + * Types which may be represented by {@link NumericValue}. + * + * There is currently only one option, because BigInteger and Long should + * use JS BigInt directly, and all other numeric types can be contained in JS Number. + * + * @public + */ +export type NumericType = "bigDecimal"; +/** + * Serialization container for Smithy simple types that do not have a + * direct JavaScript runtime representation. + * + * This container does not perform numeric mathematical operations. + * It is a container for discerning a value's true type. + * + * It allows storage of numeric types not representable in JS without + * making a decision on what numeric library to use. + * + * @public + */ +export declare class NumericValue { + readonly string: string; + readonly type: NumericType; + constructor(string: string, type: NumericType); +} +/** + * Serde shortcut. + * @internal + */ +export declare function nv(string: string): NumericValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts new file mode 100644 index 0000000..14cd7c4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..347898d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getSmithyContext"; +export * from "./middleware-http-auth-scheme"; +export * from "./middleware-http-signing"; +export * from "./normalizeProvider"; +export { createPaginator } from "./pagination/createPaginator"; +export * from "./protocols/requestBuilder"; +export * from "./setFeature"; +export * from "./util-identity-and-auth"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts new file mode 100644 index 0000000..27e2e26 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeEndpointRuleSetMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemeEndpointRuleSetPluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemeEndpointRuleSetPlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemeEndpointRuleSetPluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts new file mode 100644 index 0000000..531e6ec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/getHttpAuthSchemePlugin.d.ts @@ -0,0 +1,18 @@ +import { HandlerExecutionContext, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, IdentityProviderConfig, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { PreviouslyResolved } from "./httpAuthSchemeMiddleware"; +/** + * @internal + */ +export declare const httpAuthSchemeMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +interface HttpAuthSchemePluginOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +export declare const getHttpAuthSchemePlugin: (config: TConfig & PreviouslyResolved, { httpAuthSchemeParametersProvider, identityProviderConfigProvider, }: HttpAuthSchemePluginOptions) => Pluggable; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts new file mode 100644 index 0000000..bbeaf5f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/httpAuthSchemeMiddleware.d.ts @@ -0,0 +1,33 @@ +import { HandlerExecutionContext, HttpAuthScheme, HttpAuthSchemeParameters, HttpAuthSchemeParametersProvider, HttpAuthSchemeProvider, IdentityProviderConfig, Provider, SelectedHttpAuthScheme, SerializeMiddleware, SMITHY_CONTEXT_KEY } from "@smithy/types"; +/** + * @internal + */ +export interface PreviouslyResolved { + authSchemePreference?: Provider; + httpAuthSchemes: HttpAuthScheme[]; + httpAuthSchemeProvider: HttpAuthSchemeProvider; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareOptions { + httpAuthSchemeParametersProvider: HttpAuthSchemeParametersProvider; + identityProviderConfigProvider: (config: TConfig) => Promise; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareSmithyContext extends Record { + selectedHttpAuthScheme?: SelectedHttpAuthScheme; +} +/** + * @internal + */ +interface HttpAuthSchemeMiddlewareHandlerExecutionContext extends HandlerExecutionContext { + [SMITHY_CONTEXT_KEY]?: HttpAuthSchemeMiddlewareSmithyContext; +} +/** + * @internal + */ +export declare const httpAuthSchemeMiddleware: (config: TConfig & PreviouslyResolved, mwOptions: HttpAuthSchemeMiddlewareOptions) => SerializeMiddleware; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts new file mode 100644 index 0000000..2f275c5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpAuthSchemeMiddleware"; +export * from "./getHttpAuthSchemeEndpointRuleSetPlugin"; +export * from "./getHttpAuthSchemePlugin"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts new file mode 100644 index 0000000..8088683 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-auth-scheme/resolveAuthOptions.d.ts @@ -0,0 +1,10 @@ +import { HttpAuthOption } from "@smithy/types"; +/** + * Resolves list of auth options based on the supported ones, vs the preference list. + * + * @param candidateAuthOptions list of supported auth options selected by the standard + * resolution process (model-based, endpoints 2.0, etc.) + * @param authSchemePreference list of auth schemes preferred by user. + * @returns + */ +export declare const resolveAuthOptions: (candidateAuthOptions: HttpAuthOption[], authSchemePreference: string[]) => HttpAuthOption[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts new file mode 100644 index 0000000..a01bb31 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/getHttpSigningMiddleware.d.ts @@ -0,0 +1,9 @@ +import { FinalizeRequestHandlerOptions, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddlewareOptions: FinalizeRequestHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getHttpSigningPlugin: (config: object) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts new file mode 100644 index 0000000..7a86b0b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/httpSigningMiddleware.d.ts @@ -0,0 +1,5 @@ +import { FinalizeRequestMiddleware } from "@smithy/types"; +/** + * @internal + */ +export declare const httpSigningMiddleware: (config: object) => FinalizeRequestMiddleware; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts new file mode 100644 index 0000000..578f26d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/middleware-http-signing/index.d.ts @@ -0,0 +1,2 @@ +export * from "./httpSigningMiddleware"; +export * from "./getHttpSigningMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts new file mode 100644 index 0000000..594e8fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts new file mode 100644 index 0000000..50400d8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/pagination/createPaginator.d.ts @@ -0,0 +1,7 @@ +import { PaginationConfiguration, Paginator } from "@smithy/types"; +/** + * @internal + * + * Creates a paginator. + */ +export declare function createPaginator(ClientCtor: any, CommandCtor: any, inputTokenName: string, outputTokenName: string, pageSizeTokenName?: string): (config: PaginationConfigType, input: InputType, ...additionalArguments: any[]) => Paginator; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..25459a8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/protocols/requestBuilder.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { requestBuilder } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts new file mode 100644 index 0000000..a1995ab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/setFeature.d.ts @@ -0,0 +1,12 @@ +import { HandlerExecutionContext, SmithyFeatures } from "@smithy/types"; +/** + * @internal + * Indicates to the request context that a given feature is active. + * + * @param context - handler execution context. + * @param feature - readable name of feature. + * @param value - encoding value of feature. This is required because the + * specification asks the library not to include a runtime lookup of all + * the feature identifiers. + */ +export declare function setFeature(context: HandlerExecutionContext, feature: F, value: SmithyFeatures[F]): void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts new file mode 100644 index 0000000..9ddc992 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-decode.d.ts @@ -0,0 +1,17 @@ +import { CborValueType, Float32, Uint8, Uint32 } from "./cbor-types"; +/** + * @internal + * @param bytes - to be set as the decode source. + * + * Sets the decode bytearray source and its data view. + */ +export declare function setPayload(bytes: Uint8Array): void; +/** + * @internal + * Decodes the data between the two indices. + */ +export declare function decode(at: Uint32, to: Uint32): CborValueType; +/** + * @internal + */ +export declare function bytesToFloat16(a: Uint8, b: Uint8): Float32; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts new file mode 100644 index 0000000..83218b5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-encode.d.ts @@ -0,0 +1,9 @@ +/** + * @internal + */ +export declare function toUint8Array(): Uint8Array; +export declare function resize(size: number): void; +/** + * @param _input - JS data object. + */ +export declare function encode(_input: any): void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts new file mode 100644 index 0000000..e37a6ac --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor-types.d.ts @@ -0,0 +1,66 @@ +/// +export type CborItemType = undefined | boolean | number | bigint | [ + CborUnstructuredByteStringType, + Uint64 +] | string | CborTagType; +export type CborTagType = { + tag: Uint64 | number; + value: CborValueType; + [tagSymbol]: true; +}; +export type CborUnstructuredByteStringType = Uint8Array; +export type CborListType = Array; +export type CborMapType = Record; +export type CborCollectionType = CborMapType | CborListType; +export type CborValueType = CborItemType | CborCollectionType | any; +export type CborArgumentLength = 1 | 2 | 4 | 8; +export type CborArgumentLengthOffset = 1 | 2 | 3 | 5 | 9; +export type CborOffset = number; +export type Uint8 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Float32 = number; +export type Int64 = bigint; +export type Float16Binary = number; +export type Float32Binary = number; +export type CborMajorType = typeof majorUint64 | typeof majorNegativeInt64 | typeof majorUnstructuredByteString | typeof majorUtf8String | typeof majorList | typeof majorMap | typeof majorTag | typeof majorSpecial; +export declare const majorUint64 = 0; +export declare const majorNegativeInt64 = 1; +export declare const majorUnstructuredByteString = 2; +export declare const majorUtf8String = 3; +export declare const majorList = 4; +export declare const majorMap = 5; +export declare const majorTag = 6; +export declare const majorSpecial = 7; +export declare const specialFalse = 20; +export declare const specialTrue = 21; +export declare const specialNull = 22; +export declare const specialUndefined = 23; +export declare const extendedOneByte = 24; +export declare const extendedFloat16 = 25; +export declare const extendedFloat32 = 26; +export declare const extendedFloat64 = 27; +export declare const minorIndefinite = 31; +export declare function alloc(size: number): Uint8Array | Buffer; +/** + * @public + * + * The presence of this symbol as an object key indicates it should be considered a tag + * for CBOR serialization purposes. + * + * The object must also have the properties "tag" and "value". + */ +export declare const tagSymbol: unique symbol; +/** + * @public + * Applies the tag symbol to the object. + */ +export declare function tag(data: { + tag: number | bigint; + value: any; + [tagSymbol]?: true; +}): { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts new file mode 100644 index 0000000..d317890 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/cbor.d.ts @@ -0,0 +1,26 @@ +/** + * This implementation is synchronous and only implements the parts of CBOR + * specification used by Smithy RPCv2 CBOR protocol. + * + * This cbor serde implementation is derived from AWS SDK for Go's implementation. + * @see https://github.com/aws/smithy-go/tree/main/encoding/cbor + * + * The cbor-x implementation was also instructional: + * @see https://github.com/kriszyp/cbor-x + */ +export declare const cbor: { + deserialize(payload: Uint8Array): any; + serialize(input: any): Uint8Array; + /** + * @public + * @param size - byte length to allocate. + * + * This may be used to garbage collect the CBOR + * shared encoding buffer space, + * e.g. resizeEncodingBuffer(0); + * + * This may also be used to pre-allocate more space for + * CBOR encoding, e.g. resizeEncodingBuffer(100_000_000); + */ + resizeEncodingBuffer(size: number): void; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts new file mode 100644 index 0000000..63e2787 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/index.d.ts @@ -0,0 +1,3 @@ +export { cbor } from "./cbor"; +export * from "./parseCborBody"; +export { tagSymbol, tag } from "./cbor-types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts new file mode 100644 index 0000000..90676a2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/cbor/parseCborBody.d.ts @@ -0,0 +1,31 @@ +import { HttpRequest as __HttpRequest } from "@smithy/protocol-http"; +import { HeaderBag as __HeaderBag, HttpResponse, SerdeContext as __SerdeContext, SerdeContext } from "@smithy/types"; +import { tag, tagSymbol } from "./cbor-types"; +/** + * @internal + */ +export declare const parseCborBody: (streamBody: any, context: SerdeContext) => any; +/** + * @internal + */ +export declare const dateToTag: (date: Date) => { + tag: number | bigint; + value: any; + [tagSymbol]: true; +}; +/** + * @internal + */ +export declare const parseCborErrorBody: (errorBody: any, context: SerdeContext) => Promise; +/** + * @internal + */ +export declare const loadSmithyRpcV2CborErrorCode: (output: HttpResponse, data: any) => string | undefined; +/** + * @internal + */ +export declare const checkCborResponse: (response: HttpResponse) => void; +/** + * @internal + */ +export declare const buildHttpRpcRequest: (context: __SerdeContext, headers: __HeaderBag, path: string, resolvedHostname: string | undefined, body: any) => Promise<__HttpRequest>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts new file mode 100644 index 0000000..9c5f471 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/collect-stream-body.d.ts @@ -0,0 +1,10 @@ +import { SerdeContext } from "@smithy/types"; +import { Uint8ArrayBlobAdapter } from "@smithy/util-stream"; +/** + * @internal + * + * Collect low-level response body stream to Uint8Array. + */ +export declare const collectBody: (streamBody: any, context: { + streamCollector: SerdeContext["streamCollector"]; +}) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..98c3802 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/extended-encode-uri-component.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Function that wraps encodeURIComponent to encode additional characters + * to fully adhere to RFC 3986. + */ +export declare function extendedEncodeURIComponent(str: string): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts new file mode 100644 index 0000000..4ffc290 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/index.d.ts @@ -0,0 +1,4 @@ +export * from "./collect-stream-body"; +export * from "./extended-encode-uri-component"; +export * from "./requestBuilder"; +export * from "./resolve-path"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts new file mode 100644 index 0000000..0449354 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/requestBuilder.d.ts @@ -0,0 +1,51 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { SerdeContext } from "@smithy/types"; +/** + * @internal + * used in code-generated serde. + */ +export declare function requestBuilder(input: any, context: SerdeContext): RequestBuilder; +/** + * @internal + */ +export declare class RequestBuilder { + private input; + private context; + private query; + private method; + private headers; + private path; + private body; + private hostname; + private resolvePathStack; + constructor(input: any, context: SerdeContext); + build(): Promise; + /** + * Brevity setter for "hostname". + */ + hn(hostname: string): this; + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel: string): this; + /** + * Brevity incremental builder for "path". + */ + p(memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean): this; + /** + * Brevity setter for "headers". + */ + h(headers: Record): this; + /** + * Brevity setter for "query". + */ + q(query: Record): this; + /** + * Brevity setter for "body". + */ + b(body: any): this; + /** + * Brevity setter for "method". + */ + m(method: string): this; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts new file mode 100644 index 0000000..4c4c443 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/protocols/resolve-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const resolvedPath: (resolvedPath: string, input: unknown, memberName: string, labelValueProvider: () => string | undefined, uriLabel: string, isGreedyLabel: boolean) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts new file mode 100644 index 0000000..3e78075 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/index.d.ts @@ -0,0 +1 @@ +export * from "./value/NumericValue"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts new file mode 100644 index 0000000..00dd3b7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/submodules/serde/value/NumericValue.d.ts @@ -0,0 +1,31 @@ +/** + * Types which may be represented by {@link NumericValue}. + * + * There is currently only one option, because BigInteger and Long should + * use JS BigInt directly, and all other numeric types can be contained in JS Number. + * + * @public + */ +export type NumericType = "bigDecimal"; +/** + * Serialization container for Smithy simple types that do not have a + * direct JavaScript runtime representation. + * + * This container does not perform numeric mathematical operations. + * It is a container for discerning a value's true type. + * + * It allows storage of numeric types not representable in JS without + * making a decision on what numeric library to use. + * + * @public + */ +export declare class NumericValue { + readonly string: string; + readonly type: NumericType; + constructor(string: string, type: NumericType); +} +/** + * Serde shortcut. + * @internal + */ +export declare function nv(string: string): NumericValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts new file mode 100644 index 0000000..7e80659 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts @@ -0,0 +1,15 @@ +import { HttpAuthSchemeId, Identity, IdentityProvider, IdentityProviderConfig } from "@smithy/types"; +/** + * Default implementation of IdentityProviderConfig + * @internal + */ +export declare class DefaultIdentityProviderConfig implements IdentityProviderConfig { + private authSchemes; + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config: Record | undefined>); + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts new file mode 100644 index 0000000..3981a1b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { ApiKeyIdentity, HttpRequest as IHttpRequest, HttpSigner } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpApiKeyAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: ApiKeyIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts new file mode 100644 index 0000000..9c83b1c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpRequest as IHttpRequest, HttpSigner, TokenIdentity } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpBearerAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: TokenIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..aa5caa8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts new file mode 100644 index 0000000..0d7b612 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest, HttpSigner, Identity } from "@smithy/types"; +/** + * Signer for the synthetic @smithy.api#noAuth auth scheme. + * @internal + */ +export declare class NoAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts new file mode 100644 index 0000000..626ade9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/index.d.ts @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts new file mode 100644 index 0000000..270aa71 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/ts3.4/util-identity-and-auth/memoizeIdentityProvider.d.ts @@ -0,0 +1,30 @@ +import { Identity, IdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const createIsIdentityExpiredFunction: (expirationMs: number) => (identity: Identity) => boolean; +/** + * @internal + * This may need to be configurable in the future, but for now it is defaulted to 5min. + */ +export declare const EXPIRATION_MS = 300000; +/** + * @internal + */ +export declare const isIdentityExpired: (identity: Identity) => boolean; +/** + * @internal + */ +export declare const doesIdentityRequireRefresh: (identity: Identity) => boolean; +/** + * @internal + */ +export interface MemoizedIdentityProvider { + (options?: Record & { + forceRefresh?: boolean; + }): Promise; +} +/** + * @internal + */ +export declare const memoizeIdentityProvider: (provider: IdentityT | IdentityProvider | undefined, isExpired: (resolved: Identity) => boolean, requiresRefresh: (resolved: Identity) => boolean) => MemoizedIdentityProvider | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts new file mode 100644 index 0000000..0b39204 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/DefaultIdentityProviderConfig.d.ts @@ -0,0 +1,15 @@ +import { HttpAuthSchemeId, Identity, IdentityProvider, IdentityProviderConfig } from "@smithy/types"; +/** + * Default implementation of IdentityProviderConfig + * @internal + */ +export declare class DefaultIdentityProviderConfig implements IdentityProviderConfig { + private authSchemes; + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config: Record | undefined>); + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts new file mode 100644 index 0000000..63de4bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { ApiKeyIdentity, HttpRequest as IHttpRequest, HttpSigner } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpApiKeyAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: ApiKeyIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts new file mode 100644 index 0000000..0e31e7d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { HttpRequest as IHttpRequest, HttpSigner, TokenIdentity } from "@smithy/types"; +/** + * @internal + */ +export declare class HttpBearerAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: TokenIdentity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts new file mode 100644 index 0000000..9d240fe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/index.d.ts @@ -0,0 +1,3 @@ +export * from "./httpApiKeyAuth"; +export * from "./httpBearerAuth"; +export * from "./noAuth"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts new file mode 100644 index 0000000..fc8d6b1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/httpAuthSchemes/noAuth.d.ts @@ -0,0 +1,8 @@ +import { HttpRequest, HttpSigner, Identity } from "@smithy/types"; +/** + * Signer for the synthetic @smithy.api#noAuth auth scheme. + * @internal + */ +export declare class NoAuthSigner implements HttpSigner { + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts new file mode 100644 index 0000000..87ba64b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/index.d.ts @@ -0,0 +1,3 @@ +export * from "./DefaultIdentityProviderConfig"; +export * from "./httpAuthSchemes"; +export * from "./memoizeIdentityProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts new file mode 100644 index 0000000..67b3be8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/dist-types/util-identity-and-auth/memoizeIdentityProvider.d.ts @@ -0,0 +1,30 @@ +import { Identity, IdentityProvider } from "@smithy/types"; +/** + * @internal + */ +export declare const createIsIdentityExpiredFunction: (expirationMs: number) => (identity: Identity) => boolean; +/** + * @internal + * This may need to be configurable in the future, but for now it is defaulted to 5min. + */ +export declare const EXPIRATION_MS = 300000; +/** + * @internal + */ +export declare const isIdentityExpired: (identity: Identity) => boolean; +/** + * @internal + */ +export declare const doesIdentityRequireRefresh: (identity: Identity) => boolean; +/** + * @internal + */ +export interface MemoizedIdentityProvider { + (options?: Record & { + forceRefresh?: boolean; + }): Promise; +} +/** + * @internal + */ +export declare const memoizeIdentityProvider: (provider: IdentityT | IdentityProvider | undefined, isExpired: (resolved: Identity) => boolean, requiresRefresh: (resolved: Identity) => boolean) => MemoizedIdentityProvider | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/core/package.json new file mode 100644 index 0000000..d3776e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/package.json @@ -0,0 +1,112 @@ +{ + "name": "@smithy/core", + "version": "3.3.1", + "scripts": { + "build": "yarn lint && concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline core", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "npx eslint -c ../../.eslintrc.js \"src/**/*.ts\" --fix && node ./scripts/lint", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:cbor:perf": "node ./scripts/cbor-perf.mjs", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "exports": { + ".": { + "module": "./dist-es/index.js", + "node": "./dist-cjs/index.js", + "import": "./dist-es/index.js", + "require": "./dist-cjs/index.js", + "types": "./dist-types/index.d.ts" + }, + "./package.json": { + "module": "./package.json", + "node": "./package.json", + "import": "./package.json", + "require": "./package.json" + }, + "./cbor": { + "module": "./dist-es/submodules/cbor/index.js", + "node": "./dist-cjs/submodules/cbor/index.js", + "import": "./dist-es/submodules/cbor/index.js", + "require": "./dist-cjs/submodules/cbor/index.js", + "types": "./dist-types/submodules/cbor/index.d.ts" + }, + "./protocols": { + "module": "./dist-es/submodules/protocols/index.js", + "node": "./dist-cjs/submodules/protocols/index.js", + "import": "./dist-es/submodules/protocols/index.js", + "require": "./dist-cjs/submodules/protocols/index.js", + "types": "./dist-types/submodules/protocols/index.d.ts" + }, + "./serde": { + "module": "./dist-es/submodules/serde/index.js", + "node": "./dist-cjs/submodules/serde/index.js", + "import": "./dist-es/submodules/serde/index.js", + "require": "./dist-cjs/submodules/serde/index.js", + "types": "./dist-types/submodules/serde/index.d.ts" + } + }, + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "./cbor.d.ts", + "./cbor.js", + "./protocols.d.ts", + "./protocols.js", + "./serde.d.ts", + "./serde.js", + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/core", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/core" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "json-bigint": "^1.0.0", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/protocols.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/protocols.d.ts new file mode 100644 index 0000000..e0afd4e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/protocols.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/protocols" { + export * from "@smithy/core/dist-types/submodules/protocols/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/protocols.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/protocols.js new file mode 100644 index 0000000..43e0c42 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/protocols.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/protocols/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/serde.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/core/serde.d.ts new file mode 100644 index 0000000..9906bb0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/serde.d.ts @@ -0,0 +1,7 @@ +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +declare module "@smithy/core/serde" { + export * from "@smithy/core/dist-types/submodules/serde/index.d"; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/core/serde.js b/amplify/functions/fetchDocuments/node_modules/@smithy/core/serde.js new file mode 100644 index 0000000..b2d727f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/core/serde.js @@ -0,0 +1,6 @@ + +/** + * Do not edit: + * This is a compatibility redirect for contexts that do not understand package.json exports field. + */ +module.exports = require("./dist-cjs/submodules/serde/index.js"); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/README.md new file mode 100644 index 0000000..9a8f8a5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/README.md @@ -0,0 +1,11 @@ +# @smithy/credential-provider-imds + +[![NPM version](https://img.shields.io/npm/v/@smithy/credential-provider-imds/latest.svg)](https://www.npmjs.com/package/@smithy/credential-provider-imds) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/credential-provider-imds.svg)](https://www.npmjs.com/package/@smithy/credential-provider-imds) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. Please use [@smithy/credential-providers](https://www.npmjs.com/package/@smithy/credential-providers) +instead. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/Endpoint.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointMode.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/config/EndpointModeConfigOptions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/error/InstanceMetadataV1FallbackError.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/fromContainerMetadata.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/fromInstanceMetadata.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js new file mode 100644 index 0000000..21b3423 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/index.js @@ -0,0 +1,445 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_MAX_RETRIES: () => DEFAULT_MAX_RETRIES, + DEFAULT_TIMEOUT: () => DEFAULT_TIMEOUT, + ENV_CMDS_AUTH_TOKEN: () => ENV_CMDS_AUTH_TOKEN, + ENV_CMDS_FULL_URI: () => ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI: () => ENV_CMDS_RELATIVE_URI, + Endpoint: () => Endpoint, + fromContainerMetadata: () => fromContainerMetadata, + fromInstanceMetadata: () => fromInstanceMetadata, + getInstanceMetadataEndpoint: () => getInstanceMetadataEndpoint, + httpRequest: () => httpRequest, + providerConfigFromInit: () => providerConfigFromInit +}); +module.exports = __toCommonJS(src_exports); + +// src/fromContainerMetadata.ts + +var import_url = require("url"); + +// src/remoteProvider/httpRequest.ts +var import_property_provider = require("@smithy/property-provider"); +var import_buffer = require("buffer"); +var import_http = require("http"); +function httpRequest(options) { + return new Promise((resolve, reject) => { + const req = (0, import_http.request)({ + method: "GET", + ...options, + // Node.js http module doesn't accept hostname with square brackets + // Refs: https://github.com/nodejs/node/issues/39738 + hostname: options.hostname?.replace(/^\[(.+)\]$/, "$1") + }); + req.on("error", (err) => { + reject(Object.assign(new import_property_provider.ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new import_property_provider.ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject( + Object.assign(new import_property_provider.ProviderError("Error response received from instance metadata service"), { statusCode }) + ); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(import_buffer.Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} +__name(httpRequest, "httpRequest"); + +// src/remoteProvider/ImdsCredentials.ts +var isImdsCredentials = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.AccessKeyId === "string" && typeof arg.SecretAccessKey === "string" && typeof arg.Token === "string" && typeof arg.Expiration === "string", "isImdsCredentials"); +var fromImdsCredentials = /* @__PURE__ */ __name((creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...creds.AccountId && { accountId: creds.AccountId } +}), "fromImdsCredentials"); + +// src/remoteProvider/RemoteProviderInit.ts +var DEFAULT_TIMEOUT = 1e3; +var DEFAULT_MAX_RETRIES = 0; +var providerConfigFromInit = /* @__PURE__ */ __name(({ + maxRetries = DEFAULT_MAX_RETRIES, + timeout = DEFAULT_TIMEOUT +}) => ({ maxRetries, timeout }), "providerConfigFromInit"); + +// src/remoteProvider/retry.ts +var retry = /* @__PURE__ */ __name((toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}, "retry"); + +// src/fromContainerMetadata.ts +var ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +var ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +var ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +var fromContainerMetadata = /* @__PURE__ */ __name((init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}, "fromContainerMetadata"); +var requestFromEcsImds = /* @__PURE__ */ __name(async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN] + }; + } + const buffer = await httpRequest({ + ...options, + timeout + }); + return buffer.toString(); +}, "requestFromEcsImds"); +var CMDS_IP = "169.254.170.2"; +var GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true +}; +var GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true +}; +var getCmdsUri = /* @__PURE__ */ __name(async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI] + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = (0, import_url.parse)(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : void 0 + }; + } + throw new import_property_provider.CredentialsProviderError( + `The container metadata credential provider cannot be used unless the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment variable is set`, + { + tryNextLink: false, + logger + } + ); +}, "getCmdsUri"); + +// src/fromInstanceMetadata.ts + + + +// src/error/InstanceMetadataV1FallbackError.ts + +var InstanceMetadataV1FallbackError = class _InstanceMetadataV1FallbackError extends import_property_provider.CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, _InstanceMetadataV1FallbackError.prototype); + } + static { + __name(this, "InstanceMetadataV1FallbackError"); + } +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var import_node_config_provider = require("@smithy/node-config-provider"); +var import_url_parser = require("@smithy/url-parser"); + +// src/config/Endpoint.ts +var Endpoint = /* @__PURE__ */ ((Endpoint2) => { + Endpoint2["IPv4"] = "http://169.254.169.254"; + Endpoint2["IPv6"] = "http://[fd00:ec2::254]"; + return Endpoint2; +})(Endpoint || {}); + +// src/config/EndpointConfigOptions.ts +var ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +var CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +var ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: void 0 +}; + +// src/config/EndpointMode.ts +var EndpointMode = /* @__PURE__ */ ((EndpointMode2) => { + EndpointMode2["IPv4"] = "IPv4"; + EndpointMode2["IPv6"] = "IPv6"; + return EndpointMode2; +})(EndpointMode || {}); + +// src/config/EndpointModeConfigOptions.ts +var ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +var CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +var ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: "IPv4" /* IPv4 */ +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var getInstanceMetadataEndpoint = /* @__PURE__ */ __name(async () => (0, import_url_parser.parseUrl)(await getFromEndpointConfig() || await getFromEndpointModeConfig()), "getInstanceMetadataEndpoint"); +var getFromEndpointConfig = /* @__PURE__ */ __name(async () => (0, import_node_config_provider.loadConfig)(ENDPOINT_CONFIG_OPTIONS)(), "getFromEndpointConfig"); +var getFromEndpointModeConfig = /* @__PURE__ */ __name(async () => { + const endpointMode = await (0, import_node_config_provider.loadConfig)(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case "IPv4" /* IPv4 */: + return "http://169.254.169.254" /* IPv4 */; + case "IPv6" /* IPv6 */: + return "http://[fd00:ec2::254]" /* IPv6 */; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}. Select from ${Object.values(EndpointMode)}`); + } +}, "getFromEndpointModeConfig"); + +// src/utils/getExtendedInstanceMetadataCredentials.ts +var STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +var STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +var STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +var getExtendedInstanceMetadataCredentials = /* @__PURE__ */ __name((credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1e3); + logger.warn( + `Attempting credential expiration extension due to a credential service availability issue. A refresh of these credentials will be attempted after ${new Date(newExpiration)}. +For more information, please visit: ` + STATIC_STABILITY_DOC_URL + ); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...originalExpiration ? { originalExpiration } : {}, + expiration: newExpiration + }; +}, "getExtendedInstanceMetadataCredentials"); + +// src/utils/staticStabilityProvider.ts +var staticStabilityProvider = /* @__PURE__ */ __name((provider, options = {}) => { + const logger = options?.logger || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}, "staticStabilityProvider"); + +// src/fromInstanceMetadata.ts +var IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +var IMDS_TOKEN_PATH = "/latest/api/token"; +var AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +var PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +var X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +var fromInstanceMetadata = /* @__PURE__ */ __name((init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }), "fromInstanceMetadata"); +var getInstanceMetadataProvider = /* @__PURE__ */ __name((init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = /* @__PURE__ */ __name(async (maxRetries2, options) => { + const isImdsV1Fallback = disableFetchToken || options.headers?.[X_AWS_EC2_METADATA_TOKEN] == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await (0, import_node_config_provider.loadConfig)( + { + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === void 0) { + throw new import_property_provider.CredentialsProviderError( + `${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, + { logger: init.logger } + ); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile2) => { + const profileValue = profile2[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false + }, + { + profile + } + )(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError( + `AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join( + ", " + )}].` + ); + } + } + const imdsProfile = (await retry(async () => { + let profile2; + try { + profile2 = await getProfile(options); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile2; + }, maxRetries2)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries2); + }, "getCredentials"); + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } catch (error) { + if (error?.statusCode === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error" + }); + } else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token + }, + timeout + }); + } + }; +}, "getInstanceMetadataProvider"); +var getMetadataToken = /* @__PURE__ */ __name(async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600" + } +}), "getMetadataToken"); +var getProfile = /* @__PURE__ */ __name(async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(), "getProfile"); +var getCredentialsFromProfile = /* @__PURE__ */ __name(async (profile, options, init) => { + const credentialsResponse = JSON.parse( + (await httpRequest({ + ...options, + path: IMDS_PATH + profile + })).toString() + ); + if (!isImdsCredentials(credentialsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credentialsResponse); +}, "getCredentialsFromProfile"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + httpRequest, + getInstanceMetadataEndpoint, + Endpoint, + ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI, + ENV_CMDS_AUTH_TOKEN, + fromContainerMetadata, + fromInstanceMetadata, + DEFAULT_TIMEOUT, + DEFAULT_MAX_RETRIES, + providerConfigFromInit +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/ImdsCredentials.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/RemoteProviderInit.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/httpRequest.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/remoteProvider/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getExtendedInstanceMetadataCredentials.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/getInstanceMetadataEndpoint.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-cjs/utils/staticStabilityProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js new file mode 100644 index 0000000..b088eb0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/Endpoint.js @@ -0,0 +1,5 @@ +export var Endpoint; +(function (Endpoint) { + Endpoint["IPv4"] = "http://169.254.169.254"; + Endpoint["IPv6"] = "http://[fd00:ec2::254]"; +})(Endpoint || (Endpoint = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js new file mode 100644 index 0000000..f043de9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointConfigOptions.js @@ -0,0 +1,7 @@ +export const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +export const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +export const ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: undefined, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js new file mode 100644 index 0000000..bace819 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointMode.js @@ -0,0 +1,5 @@ +export var EndpointMode; +(function (EndpointMode) { + EndpointMode["IPv4"] = "IPv4"; + EndpointMode["IPv6"] = "IPv6"; +})(EndpointMode || (EndpointMode = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js new file mode 100644 index 0000000..15b19d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/config/EndpointModeConfigOptions.js @@ -0,0 +1,8 @@ +import { EndpointMode } from "./EndpointMode"; +export const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +export const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +export const ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: EndpointMode.IPv4, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js new file mode 100644 index 0000000..29aaf50 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/error/InstanceMetadataV1FallbackError.js @@ -0,0 +1,9 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +export class InstanceMetadataV1FallbackError extends CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, InstanceMetadataV1FallbackError.prototype); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js new file mode 100644 index 0000000..4340e3e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/fromContainerMetadata.js @@ -0,0 +1,77 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { parse } from "url"; +import { httpRequest } from "./remoteProvider/httpRequest"; +import { fromImdsCredentials, isImdsCredentials } from "./remoteProvider/ImdsCredentials"; +import { providerConfigFromInit } from "./remoteProvider/RemoteProviderInit"; +import { retry } from "./remoteProvider/retry"; +export const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +export const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +export const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +export const fromContainerMetadata = (init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger, + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}; +const requestFromEcsImds = async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN], + }; + } + const buffer = await httpRequest({ + ...options, + timeout, + }); + return buffer.toString(); +}; +const CMDS_IP = "169.254.170.2"; +const GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true, +}; +const GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true, +}; +const getCmdsUri = async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI], + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = parse(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger, + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger, + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : undefined, + }; + } + throw new CredentialsProviderError("The container metadata credential provider cannot be used unless" + + ` the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment` + + " variable is set", { + tryNextLink: false, + logger, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js new file mode 100644 index 0000000..24ecbfd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/fromInstanceMetadata.js @@ -0,0 +1,134 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { CredentialsProviderError } from "@smithy/property-provider"; +import { InstanceMetadataV1FallbackError } from "./error/InstanceMetadataV1FallbackError"; +import { httpRequest } from "./remoteProvider/httpRequest"; +import { fromImdsCredentials, isImdsCredentials } from "./remoteProvider/ImdsCredentials"; +import { providerConfigFromInit } from "./remoteProvider/RemoteProviderInit"; +import { retry } from "./remoteProvider/retry"; +import { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +import { staticStabilityProvider } from "./utils/staticStabilityProvider"; +const IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +const IMDS_TOKEN_PATH = "/latest/api/token"; +const AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +const PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +const X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +export const fromInstanceMetadata = (init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }); +const getInstanceMetadataProvider = (init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = async (maxRetries, options) => { + const isImdsV1Fallback = disableFetchToken || options.headers?.[X_AWS_EC2_METADATA_TOKEN] == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await loadConfig({ + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === undefined) { + throw new CredentialsProviderError(`${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, { logger: init.logger }); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile) => { + const profileValue = profile[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false, + }, { + profile, + })(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError(`AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join(", ")}].`); + } + } + const imdsProfile = (await retry(async () => { + let profile; + try { + profile = await getProfile(options); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile; + }, maxRetries)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } + catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries); + }; + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } + catch (error) { + if (error?.statusCode === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error", + }); + } + else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger?.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token, + }, + timeout, + }); + } + }; +}; +const getMetadataToken = async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600", + }, +}); +const getProfile = async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(); +const getCredentialsFromProfile = async (profile, options, init) => { + const credentialsResponse = JSON.parse((await httpRequest({ + ...options, + path: IMDS_PATH + profile, + })).toString()); + if (!isImdsCredentials(credentialsResponse)) { + throw new CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger, + }); + } + return fromImdsCredentials(credentialsResponse); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/index.js new file mode 100644 index 0000000..5362760 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./fromContainerMetadata"; +export * from "./fromInstanceMetadata"; +export * from "./remoteProvider/RemoteProviderInit"; +export * from "./types"; +export { httpRequest } from "./remoteProvider/httpRequest"; +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js new file mode 100644 index 0000000..c559c4f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/ImdsCredentials.js @@ -0,0 +1,13 @@ +export const isImdsCredentials = (arg) => Boolean(arg) && + typeof arg === "object" && + typeof arg.AccessKeyId === "string" && + typeof arg.SecretAccessKey === "string" && + typeof arg.Token === "string" && + typeof arg.Expiration === "string"; +export const fromImdsCredentials = (creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...(creds.AccountId && { accountId: creds.AccountId }), +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js new file mode 100644 index 0000000..39ace38 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/RemoteProviderInit.js @@ -0,0 +1,3 @@ +export const DEFAULT_TIMEOUT = 1000; +export const DEFAULT_MAX_RETRIES = 0; +export const providerConfigFromInit = ({ maxRetries = DEFAULT_MAX_RETRIES, timeout = DEFAULT_TIMEOUT, }) => ({ maxRetries, timeout }); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js new file mode 100644 index 0000000..91742d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/httpRequest.js @@ -0,0 +1,36 @@ +import { ProviderError } from "@smithy/property-provider"; +import { Buffer } from "buffer"; +import { request } from "http"; +export function httpRequest(options) { + return new Promise((resolve, reject) => { + const req = request({ + method: "GET", + ...options, + hostname: options.hostname?.replace(/^\[(.+)\]$/, "$1"), + }); + req.on("error", (err) => { + reject(Object.assign(new ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject(Object.assign(new ProviderError("Error response received from instance metadata service"), { statusCode })); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js new file mode 100644 index 0000000..d4ad601 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/index.js @@ -0,0 +1,2 @@ +export * from "./ImdsCredentials"; +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js new file mode 100644 index 0000000..22b79bb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/remoteProvider/retry.js @@ -0,0 +1,7 @@ +export const retry = (toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js new file mode 100644 index 0000000..5614692 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/getExtendedInstanceMetadataCredentials.js @@ -0,0 +1,17 @@ +const STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +const STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +const STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +export const getExtendedInstanceMetadataCredentials = (credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1000); + logger.warn("Attempting credential expiration extension due to a credential service availability issue. A refresh of these " + + `credentials will be attempted after ${new Date(newExpiration)}.\nFor more information, please visit: ` + + STATIC_STABILITY_DOC_URL); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...(originalExpiration ? { originalExpiration } : {}), + expiration: newExpiration, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js new file mode 100644 index 0000000..4c611ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/getInstanceMetadataEndpoint.js @@ -0,0 +1,19 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { parseUrl } from "@smithy/url-parser"; +import { Endpoint as InstanceMetadataEndpoint } from "../config/Endpoint"; +import { ENDPOINT_CONFIG_OPTIONS } from "../config/EndpointConfigOptions"; +import { EndpointMode } from "../config/EndpointMode"; +import { ENDPOINT_MODE_CONFIG_OPTIONS, } from "../config/EndpointModeConfigOptions"; +export const getInstanceMetadataEndpoint = async () => parseUrl((await getFromEndpointConfig()) || (await getFromEndpointModeConfig())); +const getFromEndpointConfig = async () => loadConfig(ENDPOINT_CONFIG_OPTIONS)(); +const getFromEndpointModeConfig = async () => { + const endpointMode = await loadConfig(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case EndpointMode.IPv4: + return InstanceMetadataEndpoint.IPv4; + case EndpointMode.IPv6: + return InstanceMetadataEndpoint.IPv6; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}.` + ` Select from ${Object.values(EndpointMode)}`); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js new file mode 100644 index 0000000..9a1e742 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-es/utils/staticStabilityProvider.js @@ -0,0 +1,25 @@ +import { getExtendedInstanceMetadataCredentials } from "./getExtendedInstanceMetadataCredentials"; +export const staticStabilityProvider = (provider, options = {}) => { + const logger = options?.logger || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } + catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } + else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts new file mode 100644 index 0000000..000e313 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/Endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum Endpoint { + IPv4 = "http://169.254.169.254", + IPv6 = "http://[fd00:ec2::254]" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts new file mode 100644 index 0000000..c03e22c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +/** + * @internal + */ +export declare const ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts new file mode 100644 index 0000000..db70619 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointMode.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum EndpointMode { + IPv4 = "IPv4", + IPv6 = "IPv6" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..c743199 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/config/EndpointModeConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +/** + * @internal + */ +export declare const ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts new file mode 100644 index 0000000..8338ccb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/error/InstanceMetadataV1FallbackError.d.ts @@ -0,0 +1,12 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +/** + * @public + * + * A specific sub-case of CredentialsProviderError, when the IMDSv1 fallback + * has been attempted but shut off by SDK configuration. + */ +export declare class InstanceMetadataV1FallbackError extends CredentialsProviderError { + readonly tryNextLink: boolean; + name: string; + constructor(message: string, tryNextLink?: boolean); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts new file mode 100644 index 0000000..f6f28f0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/fromContainerMetadata.d.ts @@ -0,0 +1,21 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export declare const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the ECS + * Container Metadata Service + */ +export declare const fromContainerMetadata: (init?: RemoteProviderInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts new file mode 100644 index 0000000..24db95a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/fromInstanceMetadata.d.ts @@ -0,0 +1,10 @@ +import { Provider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +import { InstanceMetadataCredentials } from "./types"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the EC2 + * Instance Metadata Service + */ +export declare const fromInstanceMetadata: (init?: RemoteProviderInit) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts new file mode 100644 index 0000000..5a87b2f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/index.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export * from "./fromContainerMetadata"; +/** + * @internal + */ +export * from "./fromInstanceMetadata"; +/** + * @internal + */ +export * from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export { httpRequest } from "./remoteProvider/httpRequest"; +/** + * @internal + */ +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +/** + * @internal + */ +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts new file mode 100644 index 0000000..c2c7d51 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/ImdsCredentials.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface ImdsCredentials { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + Expiration: string; + AccountId?: string; +} +/** + * @internal + */ +export declare const isImdsCredentials: (arg: any) => arg is ImdsCredentials; +/** + * @internal + */ +export declare const fromImdsCredentials: (creds: ImdsCredentials) => AwsCredentialIdentity; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts new file mode 100644 index 0000000..df9eff7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/RemoteProviderInit.d.ts @@ -0,0 +1,40 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_TIMEOUT = 1000; +/** + * @internal + */ +export declare const DEFAULT_MAX_RETRIES = 0; +/** + * @public + */ +export interface RemoteProviderConfig { + /** + * The connection timeout (in milliseconds) + */ + timeout: number; + /** + * The maximum number of times the HTTP connection should be retried + */ + maxRetries: number; +} +/** + * @public + */ +export interface RemoteProviderInit extends Partial { + logger?: Logger; + /** + * Only used in the IMDS credential provider. + */ + ec2MetadataV1Disabled?: boolean; + /** + * AWS_PROFILE. + */ + profile?: string; +} +/** + * @internal + */ +export declare const providerConfigFromInit: ({ maxRetries, timeout, }: RemoteProviderInit) => RemoteProviderConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts new file mode 100644 index 0000000..87c7d0d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/httpRequest.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +import { Buffer } from "buffer"; +import { RequestOptions } from "http"; +/** + * @internal + */ +export declare function httpRequest(options: RequestOptions): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts new file mode 100644 index 0000000..ed18a70 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./ImdsCredentials"; +/** + * @internal + */ +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts new file mode 100644 index 0000000..4e8abc0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/remoteProvider/retry.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retry: (toRetry: RetryableProvider, maxRetries: number) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts new file mode 100644 index 0000000..b700953 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/Endpoint.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum Endpoint { + IPv4 = "http://169.254.169.254", + IPv6 = "http://[fd00:ec2::254]" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts new file mode 100644 index 0000000..dbcb243 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +/** + * @internal + */ +export declare const ENDPOINT_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts new file mode 100644 index 0000000..7dee86e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointMode.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum EndpointMode { + IPv4 = "IPv4", + IPv6 = "IPv6" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts new file mode 100644 index 0000000..1d5e458 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/config/EndpointModeConfigOptions.d.ts @@ -0,0 +1,13 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +/** + * @internal + */ +export declare const ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +/** + * @internal + */ +export declare const CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +/** + * @internal + */ +export declare const ENDPOINT_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts new file mode 100644 index 0000000..93ac220 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/error/InstanceMetadataV1FallbackError.d.ts @@ -0,0 +1,12 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +/** + * @public + * + * A specific sub-case of CredentialsProviderError, when the IMDSv1 fallback + * has been attempted but shut off by SDK configuration. + */ +export declare class InstanceMetadataV1FallbackError extends CredentialsProviderError { + readonly tryNextLink: boolean; + name: string; + constructor(message: string, tryNextLink?: boolean); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts new file mode 100644 index 0000000..deb48fd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromContainerMetadata.d.ts @@ -0,0 +1,21 @@ +import { AwsCredentialIdentityProvider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export declare const ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +/** + * @internal + */ +export declare const ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the ECS + * Container Metadata Service + */ +export declare const fromContainerMetadata: (init?: RemoteProviderInit) => AwsCredentialIdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts new file mode 100644 index 0000000..8a533f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/fromInstanceMetadata.d.ts @@ -0,0 +1,10 @@ +import { Provider } from "@smithy/types"; +import { RemoteProviderInit } from "./remoteProvider/RemoteProviderInit"; +import { InstanceMetadataCredentials } from "./types"; +/** + * @internal + * + * Creates a credential provider that will source credentials from the EC2 + * Instance Metadata Service + */ +export declare const fromInstanceMetadata: (init?: RemoteProviderInit) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c0bc7e4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/index.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export * from "./fromContainerMetadata"; +/** + * @internal + */ +export * from "./fromInstanceMetadata"; +/** + * @internal + */ +export * from "./remoteProvider/RemoteProviderInit"; +/** + * @internal + */ +export * from "./types"; +/** + * @internal + */ +export { httpRequest } from "./remoteProvider/httpRequest"; +/** + * @internal + */ +export { getInstanceMetadataEndpoint } from "./utils/getInstanceMetadataEndpoint"; +/** + * @internal + */ +export { Endpoint } from "./config/Endpoint"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts new file mode 100644 index 0000000..c621e0a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/ImdsCredentials.d.ts @@ -0,0 +1,19 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface ImdsCredentials { + AccessKeyId: string; + SecretAccessKey: string; + Token: string; + Expiration: string; + AccountId?: string; +} +/** + * @internal + */ +export declare const isImdsCredentials: (arg: any) => arg is ImdsCredentials; +/** + * @internal + */ +export declare const fromImdsCredentials: (creds: ImdsCredentials) => AwsCredentialIdentity; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts new file mode 100644 index 0000000..4fe25f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/RemoteProviderInit.d.ts @@ -0,0 +1,40 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULT_TIMEOUT = 1000; +/** + * @internal + */ +export declare const DEFAULT_MAX_RETRIES = 0; +/** + * @public + */ +export interface RemoteProviderConfig { + /** + * The connection timeout (in milliseconds) + */ + timeout: number; + /** + * The maximum number of times the HTTP connection should be retried + */ + maxRetries: number; +} +/** + * @public + */ +export interface RemoteProviderInit extends Partial { + logger?: Logger; + /** + * Only used in the IMDS credential provider. + */ + ec2MetadataV1Disabled?: boolean; + /** + * AWS_PROFILE. + */ + profile?: string; +} +/** + * @internal + */ +export declare const providerConfigFromInit: ({ maxRetries, timeout, }: RemoteProviderInit) => RemoteProviderConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts new file mode 100644 index 0000000..b514fef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/httpRequest.d.ts @@ -0,0 +1,7 @@ +/// +import { Buffer } from "buffer"; +import { RequestOptions } from "http"; +/** + * @internal + */ +export declare function httpRequest(options: RequestOptions): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts new file mode 100644 index 0000000..a9d6094 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./ImdsCredentials"; +/** + * @internal + */ +export * from "./RemoteProviderInit"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts new file mode 100644 index 0000000..d72d604 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/remoteProvider/retry.d.ts @@ -0,0 +1,10 @@ +/** + * @internal + */ +export interface RetryableProvider { + (): Promise; +} +/** + * @internal + */ +export declare const retry: (toRetry: RetryableProvider, maxRetries: number) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..2e9592b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/types.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface InstanceMetadataCredentials extends AwsCredentialIdentity { + readonly originalExpiration?: Date; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts new file mode 100644 index 0000000..67edd2c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getExtendedInstanceMetadataCredentials.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + */ +export declare const getExtendedInstanceMetadataCredentials: (credentials: InstanceMetadataCredentials, logger: Logger) => InstanceMetadataCredentials; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts new file mode 100644 index 0000000..1ad772d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/getInstanceMetadataEndpoint.d.ts @@ -0,0 +1,21 @@ +import { Endpoint } from "@smithy/types"; +/** + * Returns the host to use for instance metadata service call. + * + * The host is read from endpoint which can be set either in + * {@link ENV_ENDPOINT_NAME} environment variable or {@link CONFIG_ENDPOINT_NAME} + * configuration property. + * + * If endpoint is not set, then endpoint mode is read either from + * {@link ENV_ENDPOINT_MODE_NAME} environment variable or {@link CONFIG_ENDPOINT_MODE_NAME} + * configuration property. If endpoint mode is not set, then default endpoint mode + * {@link EndpointMode.IPv4} is used. + * + * If endpoint mode is set to {@link EndpointMode.IPv4}, then the host is {@link Endpoint.IPv4}. + * If endpoint mode is set to {@link EndpointMode.IPv6}, then the host is {@link Endpoint.IPv6}. + * + * @returns Host to use for instance metadata service call. + * + * @internal + */ +export declare const getInstanceMetadataEndpoint: () => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts new file mode 100644 index 0000000..337091e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/ts3.4/utils/staticStabilityProvider.d.ts @@ -0,0 +1,16 @@ +import { Logger, Provider } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + * + * IMDS credential supports static stability feature. When used, the expiration + * of recently issued credentials is extended. The server side allows using + * the recently expired credentials. This mitigates impact when clients using + * refreshable credentials are unable to retrieve updates. + * + * @param provider Credential provider + * @returns A credential provider that supports static stability + */ +export declare const staticStabilityProvider: (provider: Provider, options?: { + logger?: Logger | undefined; +}) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts new file mode 100644 index 0000000..e74ec99 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/types.d.ts @@ -0,0 +1,7 @@ +import { AwsCredentialIdentity } from "@smithy/types"; +/** + * @internal + */ +export interface InstanceMetadataCredentials extends AwsCredentialIdentity { + readonly originalExpiration?: Date; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts new file mode 100644 index 0000000..f0ed41b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/getExtendedInstanceMetadataCredentials.d.ts @@ -0,0 +1,6 @@ +import { Logger } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + */ +export declare const getExtendedInstanceMetadataCredentials: (credentials: InstanceMetadataCredentials, logger: Logger) => InstanceMetadataCredentials; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts new file mode 100644 index 0000000..db6b6da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/getInstanceMetadataEndpoint.d.ts @@ -0,0 +1,21 @@ +import { Endpoint } from "@smithy/types"; +/** + * Returns the host to use for instance metadata service call. + * + * The host is read from endpoint which can be set either in + * {@link ENV_ENDPOINT_NAME} environment variable or {@link CONFIG_ENDPOINT_NAME} + * configuration property. + * + * If endpoint is not set, then endpoint mode is read either from + * {@link ENV_ENDPOINT_MODE_NAME} environment variable or {@link CONFIG_ENDPOINT_MODE_NAME} + * configuration property. If endpoint mode is not set, then default endpoint mode + * {@link EndpointMode.IPv4} is used. + * + * If endpoint mode is set to {@link EndpointMode.IPv4}, then the host is {@link Endpoint.IPv4}. + * If endpoint mode is set to {@link EndpointMode.IPv6}, then the host is {@link Endpoint.IPv6}. + * + * @returns Host to use for instance metadata service call. + * + * @internal + */ +export declare const getInstanceMetadataEndpoint: () => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts new file mode 100644 index 0000000..6bfcb69 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/dist-types/utils/staticStabilityProvider.d.ts @@ -0,0 +1,16 @@ +import { Logger, Provider } from "@smithy/types"; +import { InstanceMetadataCredentials } from "../types"; +/** + * @internal + * + * IMDS credential supports static stability feature. When used, the expiration + * of recently issued credentials is extended. The server side allows using + * the recently expired credentials. This mitigates impact when clients using + * refreshable credentials are unable to retrieve updates. + * + * @param provider Credential provider + * @returns A credential provider that supports static stability + */ +export declare const staticStabilityProvider: (provider: Provider, options?: { + logger?: Logger | undefined; +}) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/package.json new file mode 100644 index 0000000..8fd0824 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/credential-provider-imds/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/credential-provider-imds", + "version": "4.0.2", + "description": "AWS credential provider that sources credentials from the EC2 instance metadata service and ECS container metadata service", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline credential-provider-imds", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "credentials" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/credential-provider-imds", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/credential-provider-imds" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/README.md new file mode 100644 index 0000000..e52e8f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/README.md @@ -0,0 +1,11 @@ +# @smithy/fetch-http-handler + +[![NPM version](https://img.shields.io/npm/v/@smithy/fetch-http-handler/latest.svg)](https://www.npmjs.com/package/@smithy/fetch-http-handler) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/fetch-http-handler.svg)](https://www.npmjs.com/package/@smithy/fetch-http-handler) + +This is the default `requestHandler` used for browser applications. +Since Node.js introduced experimental Web Streams API in v16.5.0 and made it stable in v21.0.0, +you can consider using `fetch-http-handler` in Node.js, although it's not recommended. + +For the Node.js default `requestHandler` implementation, see instead +[`@smithy/node-http-handler`](https://www.npmjs.com/package/@smithy/node-http-handler). diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/create-request.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/fetch-http-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js new file mode 100644 index 0000000..9c9c44b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/index.js @@ -0,0 +1,264 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + FetchHttpHandler: () => FetchHttpHandler, + keepAliveSupport: () => keepAliveSupport, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/fetch-http-handler.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_querystring_builder = require("@smithy/querystring-builder"); + +// src/create-request.ts +function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} +__name(createRequest, "createRequest"); + +// src/request-timeout.ts +function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} +__name(requestTimeout, "requestTimeout"); + +// src/fetch-http-handler.ts +var keepAliveSupport = { + supported: void 0 +}; +var FetchHttpHandler = class _FetchHttpHandler { + static { + __name(this, "FetchHttpHandler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === void 0) { + keepAliveSupport.supported = Boolean( + typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]") + ); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? void 0 : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method, + credentials + }; + if (this.config?.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = /* @__PURE__ */ __name(() => { + }, "removeSignalEventListener"); + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != void 0; + if (!hasReadableStream) { + return response.blob().then((body2) => ({ + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: body2 + }) + })); + } + return { + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body + }) + }; + }), + requestTimeout(requestTimeoutInMs) + ]; + if (abortSignal) { + raceOfPromises.push( + new Promise((resolve, reject) => { + const onAbort = /* @__PURE__ */ __name(() => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = /* @__PURE__ */ __name(() => signal.removeEventListener("abort", onAbort), "removeSignalEventListener"); + } else { + abortSignal.onabort = onAbort; + } + }) + ); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; + +// src/stream-collector.ts +var import_util_base64 = require("@smithy/util-base64"); +var streamCollector = /* @__PURE__ */ __name(async (stream) => { + if (typeof Blob === "function" && stream instanceof Blob || stream.constructor?.name === "Blob") { + if (Blob.prototype.arrayBuffer !== void 0) { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectBlob(stream); + } + return collectStream(stream); +}, "streamCollector"); +async function collectBlob(blob) { + const base64 = await readToBase64(blob); + const arrayBuffer = (0, import_util_base64.fromBase64)(base64); + return new Uint8Array(arrayBuffer); +} +__name(collectBlob, "collectBlob"); +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectStream, "collectStream"); +function readToBase64(blob) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onloadend = () => { + if (reader.readyState !== 2) { + return reject(new Error("Reader aborted too early")); + } + const result = reader.result ?? ""; + const commaIndex = result.indexOf(","); + const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length; + resolve(result.substring(dataOffset)); + }; + reader.onabort = () => reject(new Error("Read aborted")); + reader.onerror = () => reject(reader.error); + reader.readAsDataURL(blob); + }); +} +__name(readToBase64, "readToBase64"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + keepAliveSupport, + FetchHttpHandler, + streamCollector +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/request-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-cjs/stream-collector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js new file mode 100644 index 0000000..b6f1816 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/create-request.js @@ -0,0 +1,3 @@ +export function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js new file mode 100644 index 0000000..dd56e37 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/fetch-http-handler.js @@ -0,0 +1,139 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { createRequest } from "./create-request"; +import { requestTimeout } from "./request-timeout"; +export const keepAliveSupport = { + supported: undefined, +}; +export class FetchHttpHandler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } + else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === undefined) { + keepAliveSupport.supported = Boolean(typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]")); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = buildQueryString(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? undefined : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method: method, + credentials, + }; + if (this.config?.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = () => { }; + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != undefined; + if (!hasReadableStream) { + return response.blob().then((body) => ({ + response: new HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body, + }), + })); + } + return { + response: new HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body, + }), + }; + }), + requestTimeout(requestTimeoutInMs), + ]; + if (abortSignal) { + raceOfPromises.push(new Promise((resolve, reject) => { + const onAbort = () => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = () => signal.removeEventListener("abort", onAbort); + } + else { + abortSignal.onabort = onAbort; + } + })); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/index.js new file mode 100644 index 0000000..a0c61f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js new file mode 100644 index 0000000..66b09b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/request-timeout.js @@ -0,0 +1,11 @@ +export function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js new file mode 100644 index 0000000..a400d9b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-es/stream-collector.js @@ -0,0 +1,53 @@ +import { fromBase64 } from "@smithy/util-base64"; +export const streamCollector = async (stream) => { + if ((typeof Blob === "function" && stream instanceof Blob) || stream.constructor?.name === "Blob") { + if (Blob.prototype.arrayBuffer !== undefined) { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectBlob(stream); + } + return collectStream(stream); +}; +async function collectBlob(blob) { + const base64 = await readToBase64(blob); + const arrayBuffer = fromBase64(base64); + return new Uint8Array(arrayBuffer); +} +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +function readToBase64(blob) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onloadend = () => { + if (reader.readyState !== 2) { + return reject(new Error("Reader aborted too early")); + } + const result = (reader.result ?? ""); + const commaIndex = result.indexOf(","); + const dataOffset = commaIndex > -1 ? commaIndex + 1 : result.length; + resolve(result.substring(dataOffset)); + }; + reader.onabort = () => reject(new Error("Read aborted")); + reader.onerror = () => reject(reader.error); + reader.readAsDataURL(blob); + }); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts new file mode 100644 index 0000000..d668b06 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/create-request.d.ts @@ -0,0 +1,6 @@ +import { AdditionalRequestParameters } from "./fetch-http-handler"; +/** + * @internal + * For mocking/interception. + */ +export declare function createRequest(url: string, requestOptions?: RequestInit & AdditionalRequestParameters): Request; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts new file mode 100644 index 0000000..446301c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/fetch-http-handler.d.ts @@ -0,0 +1,41 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import type { FetchHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * @public + */ +export { FetchHttpHandlerOptions }; +/** + * @internal + * Detection of keepalive support. Can be overridden for testing. + */ +export declare const keepAliveSupport: { + supported: boolean | undefined; +}; +/** + * @internal + */ +export type AdditionalRequestParameters = { + duplex?: "half"; +}; +/** + * @public + * + * HttpHandler implementation using browsers' `fetch` global function. + */ +export declare class FetchHttpHandler implements HttpHandler { + private config?; + private configProvider; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | FetchHttpHandlerOptions | Provider): FetchHttpHandler | HttpHandler; + constructor(options?: FetchHttpHandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof FetchHttpHandlerOptions, value: FetchHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): FetchHttpHandlerOptions; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts new file mode 100644 index 0000000..a0c61f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts new file mode 100644 index 0000000..28d784b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/request-timeout.d.ts @@ -0,0 +1 @@ +export declare function requestTimeout(timeoutInMs?: number): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts new file mode 100644 index 0000000..b2ca812 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/stream-collector.d.ts @@ -0,0 +1,2 @@ +import { StreamCollector } from "@smithy/types"; +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts new file mode 100644 index 0000000..5f0b074 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/create-request.d.ts @@ -0,0 +1,6 @@ +import { AdditionalRequestParameters } from "./fetch-http-handler"; +/** + * @internal + * For mocking/interception. + */ +export declare function createRequest(url: string, requestOptions?: RequestInit & AdditionalRequestParameters): Request; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts new file mode 100644 index 0000000..19a2943 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/fetch-http-handler.d.ts @@ -0,0 +1,41 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { FetchHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * @public + */ +export { FetchHttpHandlerOptions }; +/** + * @internal + * Detection of keepalive support. Can be overridden for testing. + */ +export declare const keepAliveSupport: { + supported: boolean | undefined; +}; +/** + * @internal + */ +export type AdditionalRequestParameters = { + duplex?: "half"; +}; +/** + * @public + * + * HttpHandler implementation using browsers' `fetch` global function. + */ +export declare class FetchHttpHandler implements HttpHandler { + private config?; + private configProvider; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | FetchHttpHandlerOptions | Provider): FetchHttpHandler | HttpHandler; + constructor(options?: FetchHttpHandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof FetchHttpHandlerOptions, value: FetchHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): FetchHttpHandlerOptions; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d30edab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fetch-http-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts new file mode 100644 index 0000000..ca24128 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/request-timeout.d.ts @@ -0,0 +1 @@ +export declare function requestTimeout(timeoutInMs?: number): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts new file mode 100644 index 0000000..8259097 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/dist-types/ts3.4/stream-collector.d.ts @@ -0,0 +1,2 @@ +import { StreamCollector } from "@smithy/types"; +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/package.json new file mode 100644 index 0000000..8ebcaa1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/fetch-http-handler/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/fetch-http-handler", + "version": "5.0.2", + "description": "Provides a way to make requests", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline fetch-http-handler", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run && yarn test:browser", + "test:watch": "yarn g:vitest watch", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/abort-controller": "^4.0.2", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/fetch-http-handler", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/fetch-http-handler" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/README.md new file mode 100644 index 0000000..a160019 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/README.md @@ -0,0 +1,10 @@ +# @smithy/md5-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/hash-node/latest.svg)](https://www.npmjs.com/package/@smithy/hash-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/hash-node.svg)](https://www.npmjs.com/package/@smithy/hash-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-cjs/index.js new file mode 100644 index 0000000..fc7f7de --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-cjs/index.js @@ -0,0 +1,67 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Hash: () => Hash +}); +module.exports = __toCommonJS(src_exports); +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var import_util_utf8 = require("@smithy/util-utf8"); +var import_buffer = require("buffer"); +var import_crypto = require("crypto"); +var Hash = class { + static { + __name(this, "Hash"); + } + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update((0, import_util_utf8.toUint8Array)(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret ? (0, import_crypto.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) : (0, import_crypto.createHash)(this.algorithmIdentifier); + } +}; +function castSourceData(toCast, encoding) { + if (import_buffer.Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return (0, import_util_buffer_from.fromString)(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return (0, import_util_buffer_from.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return (0, import_util_buffer_from.fromArrayBuffer)(toCast); +} +__name(castSourceData, "castSourceData"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Hash +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-es/index.js new file mode 100644 index 0000000..718d9c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-es/index.js @@ -0,0 +1,34 @@ +import { fromArrayBuffer, fromString } from "@smithy/util-buffer-from"; +import { toUint8Array } from "@smithy/util-utf8"; +import { Buffer } from "buffer"; +import { createHash, createHmac } from "crypto"; +export class Hash { + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update(toUint8Array(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret + ? createHmac(this.algorithmIdentifier, castSourceData(this.secret)) + : createHash(this.algorithmIdentifier); + } +} +function castSourceData(toCast, encoding) { + if (Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return fromString(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return fromArrayBuffer(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return fromArrayBuffer(toCast); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-types/index.d.ts new file mode 100644 index 0000000..20ed5ed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Hash implements Checksum { + private readonly algorithmIdentifier; + private readonly secret?; + private hash; + constructor(algorithmIdentifier: string, secret?: SourceData); + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..313ab7e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Checksum, SourceData } from "@smithy/types"; +/** + * @internal + */ +export declare class Hash implements Checksum { + private readonly algorithmIdentifier; + private readonly secret?; + private hash; + constructor(algorithmIdentifier: string, secret?: SourceData); + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + digest(): Promise; + reset(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/package.json new file mode 100644 index 0000000..527b45a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/hash-node/package.json @@ -0,0 +1,64 @@ +{ + "name": "@smithy/hash-node", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline hash-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "hash-test-vectors": "^1.3.2", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/hash-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/hash-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/README.md new file mode 100644 index 0000000..9110465 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/README.md @@ -0,0 +1,10 @@ +# @smithy/invalid-dependency + +[![NPM version](https://img.shields.io/npm/v/@smithy/invalid-dependency/latest.svg)](https://www.npmjs.com/package/@smithy/invalid-dependency) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/invalid-dependency.svg)](https://www.npmjs.com/package/@smithy/invalid-dependency) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/index.js new file mode 100644 index 0000000..8eeb1d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/index.js @@ -0,0 +1,41 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + invalidFunction: () => invalidFunction, + invalidProvider: () => invalidProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/invalidFunction.ts +var invalidFunction = /* @__PURE__ */ __name((message) => () => { + throw new Error(message); +}, "invalidFunction"); + +// src/invalidProvider.ts +var invalidProvider = /* @__PURE__ */ __name((message) => () => Promise.reject(message), "invalidProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + invalidFunction, + invalidProvider +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/invalidFunction.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-cjs/invalidProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/index.js new file mode 100644 index 0000000..fa0f1a6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./invalidFunction"; +export * from "./invalidProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js new file mode 100644 index 0000000..676f9cb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/invalidFunction.js @@ -0,0 +1,3 @@ +export const invalidFunction = (message) => () => { + throw new Error(message); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js new file mode 100644 index 0000000..5305a0b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-es/invalidProvider.js @@ -0,0 +1 @@ +export const invalidProvider = (message) => () => Promise.reject(message); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts new file mode 100644 index 0000000..1c99a56 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./invalidFunction"; +/** + * @internal + */ +export * from "./invalidProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts new file mode 100644 index 0000000..2118b32 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/invalidFunction.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const invalidFunction: (message: string) => () => never; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts new file mode 100644 index 0000000..3e9c28c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/invalidProvider.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const invalidProvider: (message: string) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..6818f1c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./invalidFunction"; +/** + * @internal + */ +export * from "./invalidProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts new file mode 100644 index 0000000..b0e8f32 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidFunction.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const invalidFunction: (message: string) => () => never; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts new file mode 100644 index 0000000..765ee5a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/dist-types/ts3.4/invalidProvider.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const invalidProvider: (message: string) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/package.json new file mode 100644 index 0000000..4782ea4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/invalid-dependency/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/invalid-dependency", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline invalid-dependency", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/invalid-dependency", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/invalid-dependency" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/README.md new file mode 100644 index 0000000..31853f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/README.md @@ -0,0 +1,10 @@ +# @smithy/is-array-buffer + +[![NPM version](https://img.shields.io/npm/v/@smithy/is-array-buffer/latest.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/is-array-buffer.svg)](https://www.npmjs.com/package/@smithy/is-array-buffer) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-cjs/index.js new file mode 100644 index 0000000..5d792e7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-cjs/index.js @@ -0,0 +1,32 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isArrayBuffer +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-es/index.js new file mode 100644 index 0000000..8096cca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-es/index.js @@ -0,0 +1,2 @@ +export const isArrayBuffer = (arg) => (typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer) || + Object.prototype.toString.call(arg) === "[object ArrayBuffer]"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts new file mode 100644 index 0000000..64f452e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ca8fd6b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const isArrayBuffer: (arg: any) => arg is ArrayBuffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/package.json new file mode 100644 index 0000000..93a468c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/is-array-buffer/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/is-array-buffer", + "version": "4.0.0", + "description": "Provides a function for detecting if an argument is an ArrayBuffer", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline is-array-buffer", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/is-array-buffer", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/is-array-buffer" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/README.md new file mode 100644 index 0000000..2d40d92 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/README.md @@ -0,0 +1,4 @@ +# @smithy/middleware-content-length + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-content-length/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-content-length) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-content-length.svg)](https://www.npmjs.com/package/@smithy/middleware-content-length) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-cjs/index.js new file mode 100644 index 0000000..9585153 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-cjs/index.js @@ -0,0 +1,71 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + contentLengthMiddleware: () => contentLengthMiddleware, + contentLengthMiddlewareOptions: () => contentLengthMiddlewareOptions, + getContentLengthPlugin: () => getContentLengthPlugin +}); +module.exports = __toCommonJS(src_exports); +var import_protocol_http = require("@smithy/protocol-http"); +var CONTENT_LENGTH_HEADER = "content-length"; +function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (import_protocol_http.HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && Object.keys(headers).map((str) => str.toLowerCase()).indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length) + }; + } catch (error) { + } + } + } + return next({ + ...args, + request + }); + }; +} +__name(contentLengthMiddleware, "contentLengthMiddleware"); +var contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true +}; +var getContentLengthPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + } +}), "getContentLengthPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + contentLengthMiddleware, + contentLengthMiddlewareOptions, + getContentLengthPlugin +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-es/index.js new file mode 100644 index 0000000..fa18e71 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-es/index.js @@ -0,0 +1,39 @@ +import { HttpRequest } from "@smithy/protocol-http"; +const CONTENT_LENGTH_HEADER = "content-length"; +export function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && + Object.keys(headers) + .map((str) => str.toLowerCase()) + .indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length), + }; + } + catch (error) { + } + } + } + return next({ + ...args, + request, + }); + }; +} +export const contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true, +}; +export const getContentLengthPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts new file mode 100644 index 0000000..91a7000 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-types/index.d.ts @@ -0,0 +1,6 @@ +import { BodyLengthCalculator, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +export declare function contentLengthMiddleware(bodyLengthChecker: BodyLengthCalculator): BuildMiddleware; +export declare const contentLengthMiddlewareOptions: BuildHandlerOptions; +export declare const getContentLengthPlugin: (options: { + bodyLengthChecker: BodyLengthCalculator; +}) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..10e1e18 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +import { BodyLengthCalculator, BuildHandlerOptions, BuildMiddleware, Pluggable } from "@smithy/types"; +export declare function contentLengthMiddleware(bodyLengthChecker: BodyLengthCalculator): BuildMiddleware; +export declare const contentLengthMiddlewareOptions: BuildHandlerOptions; +export declare const getContentLengthPlugin: (options: { + bodyLengthChecker: BodyLengthCalculator; +}) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/package.json new file mode 100644 index 0000000..807c95b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-content-length/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-content-length", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-content-length", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-content-length", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-content-length" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/README.md new file mode 100644 index 0000000..e03cbb2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/README.md @@ -0,0 +1,10 @@ +# @smithy/middleware-endpoint + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-endpoint/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-endpoint) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-endpoint.svg)](https://www.npmjs.com/package/@smithy/middleware-endpoint) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/createConfigValueProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js new file mode 100644 index 0000000..9b578a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointFromConfig = void 0; +const getEndpointFromConfig = async (serviceId) => undefined; +exports.getEndpointFromConfig = getEndpointFromConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js new file mode 100644 index 0000000..c7c302b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromConfig.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointFromConfig = void 0; +const node_config_provider_1 = require("@smithy/node-config-provider"); +const getEndpointUrlConfig_1 = require("./getEndpointUrlConfig"); +const getEndpointFromConfig = async (serviceId) => (0, node_config_provider_1.loadConfig)((0, getEndpointUrlConfig_1.getEndpointUrlConfig)(serviceId !== null && serviceId !== void 0 ? serviceId : ""))(); +exports.getEndpointFromConfig = getEndpointFromConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointFromInstructions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js new file mode 100644 index 0000000..fe5c010 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/getEndpointUrlConfig.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getEndpointUrlConfig = void 0; +const shared_ini_file_loader_1 = require("@smithy/shared-ini-file-loader"); +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); +exports.getEndpointUrlConfig = getEndpointUrlConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/adaptors/toEndpointV1.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/endpointMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/getEndpointPlugin.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js new file mode 100644 index 0000000..177fdc3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/index.js @@ -0,0 +1,279 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + endpointMiddleware: () => endpointMiddleware, + endpointMiddlewareOptions: () => endpointMiddlewareOptions, + getEndpointFromInstructions: () => getEndpointFromInstructions, + getEndpointPlugin: () => getEndpointPlugin, + resolveEndpointConfig: () => resolveEndpointConfig, + resolveParams: () => resolveParams, + toEndpointV1: () => toEndpointV1 +}); +module.exports = __toCommonJS(src_exports); + +// src/service-customizations/s3.ts +var resolveParamsForS3 = /* @__PURE__ */ __name(async (endpointParams) => { + const bucket = endpointParams?.Bucket || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } else if (!isDnsCompatibleBucketName(bucket) || bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:") || bucket.toLowerCase() !== bucket || bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}, "resolveParamsForS3"); +var DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +var IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +var DOTS_PATTERN = /\.\./; +var isDnsCompatibleBucketName = /* @__PURE__ */ __name((bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName), "isDnsCompatibleBucketName"); +var isArnBucketName = /* @__PURE__ */ __name((bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}, "isArnBucketName"); + +// src/adaptors/createConfigValueProvider.ts +var createConfigValueProvider = /* @__PURE__ */ __name((configKey, canonicalEndpointParamKey, config) => { + const configProvider = /* @__PURE__ */ __name(async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }, "configProvider"); + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.credentialScope ?? credentials?.CredentialScope; + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.accountId ?? credentials?.AccountId; + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}, "createConfigValueProvider"); + +// src/adaptors/getEndpointFromInstructions.ts +var import_getEndpointFromConfig = require("./adaptors/getEndpointFromConfig"); + +// src/adaptors/toEndpointV1.ts +var import_url_parser = require("@smithy/url-parser"); +var toEndpointV1 = /* @__PURE__ */ __name((endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, import_url_parser.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, import_url_parser.parseUrl)(endpoint); +}, "toEndpointV1"); + +// src/adaptors/getEndpointFromInstructions.ts +var getEndpointFromInstructions = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } else { + endpointFromConfig = await (0, import_getEndpointFromConfig.getEndpointFromConfig)(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}, "getEndpointFromInstructions"); +var resolveParams = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig) => { + const endpointParams = {}; + const instructions = instructionsSupplier?.getEndpointParameterInstructions?.() || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}, "resolveParams"); + +// src/endpointMiddleware.ts +var import_core = require("@smithy/core"); +var import_util_middleware = require("@smithy/util-middleware"); +var endpointMiddleware = /* @__PURE__ */ __name(({ + config, + instructions +}) => { + return (next, context) => async (args) => { + if (config.endpoint) { + (0, import_core.setFeature)(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions( + args.input, + { + getEndpointParameterInstructions() { + return instructions; + } + }, + { ...config }, + context + ); + context.endpointV2 = endpoint; + context.authSchemes = endpoint.properties?.authSchemes; + const authScheme = context.authSchemes?.[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const httpAuthOption = smithyContext?.selectedHttpAuthScheme?.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign( + httpAuthOption.signingProperties || {}, + { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet + }, + authScheme.properties + ); + } + } + return next({ + ...args + }); + }; +}, "endpointMiddleware"); + +// src/getEndpointPlugin.ts +var import_middleware_serde = require("@smithy/middleware-serde"); +var endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getEndpointPlugin = /* @__PURE__ */ __name((config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + endpointMiddleware({ + config, + instructions + }), + endpointMiddlewareOptions + ); + } +}), "getEndpointPlugin"); + +// src/resolveEndpointConfig.ts + +var import_getEndpointFromConfig2 = require("./adaptors/getEndpointFromConfig"); +var resolveEndpointConfig = /* @__PURE__ */ __name((input) => { + const tls = input.tls ?? true; + const { endpoint, useDualstackEndpoint, useFipsEndpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await (0, import_util_middleware.normalizeProvider)(endpoint)()) : void 0; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = Object.assign(input, { + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(useDualstackEndpoint ?? false), + useFipsEndpoint: (0, import_util_middleware.normalizeProvider)(useFipsEndpoint ?? false) + }); + let configuredEndpointPromise = void 0; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = (0, import_getEndpointFromConfig2.getEndpointFromConfig)(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}, "resolveEndpointConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getEndpointFromInstructions, + resolveParams, + toEndpointV1, + endpointMiddleware, + endpointMiddlewareOptions, + getEndpointPlugin, + resolveEndpointConfig +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/resolveEndpointConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/service-customizations/s3.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js new file mode 100644 index 0000000..b468b83 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/createConfigValueProvider.js @@ -0,0 +1,39 @@ +export const createConfigValueProvider = (configKey, canonicalEndpointParamKey, config) => { + const configProvider = async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }; + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.credentialScope ?? credentials?.CredentialScope; + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = credentials?.accountId ?? credentials?.AccountId; + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js new file mode 100644 index 0000000..75fc136 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.browser.js @@ -0,0 +1 @@ +export const getEndpointFromConfig = async (serviceId) => undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js new file mode 100644 index 0000000..33c1d45 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromConfig.js @@ -0,0 +1,3 @@ +import { loadConfig } from "@smithy/node-config-provider"; +import { getEndpointUrlConfig } from "./getEndpointUrlConfig"; +export const getEndpointFromConfig = async (serviceId) => loadConfig(getEndpointUrlConfig(serviceId ?? ""))(); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js new file mode 100644 index 0000000..e445646 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointFromInstructions.js @@ -0,0 +1,54 @@ +import { resolveParamsForS3 } from "../service-customizations"; +import { createConfigValueProvider } from "./createConfigValueProvider"; +import { getEndpointFromConfig } from "./getEndpointFromConfig"; +import { toEndpointV1 } from "./toEndpointV1"; +export const getEndpointFromInstructions = async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } + else { + endpointFromConfig = await getEndpointFromConfig(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}; +export const resolveParams = async (commandInput, instructionsSupplier, clientConfig) => { + const endpointParams = {}; + const instructions = instructionsSupplier?.getEndpointParameterInstructions?.() || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js new file mode 100644 index 0000000..82a1519 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/getEndpointUrlConfig.js @@ -0,0 +1,31 @@ +import { CONFIG_PREFIX_SEPARATOR } from "@smithy/shared-ini-file-loader"; +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +export const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js new file mode 100644 index 0000000..17752da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/index.js @@ -0,0 +1,2 @@ +export * from "./getEndpointFromInstructions"; +export * from "./toEndpointV1"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js new file mode 100644 index 0000000..83f4324 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/adaptors/toEndpointV1.js @@ -0,0 +1,10 @@ +import { parseUrl } from "@smithy/url-parser"; +export const toEndpointV1 = (endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return parseUrl(endpoint.url); + } + return endpoint; + } + return parseUrl(endpoint); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js new file mode 100644 index 0000000..df25795 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/endpointMiddleware.js @@ -0,0 +1,36 @@ +import { setFeature } from "@smithy/core"; +import { getSmithyContext } from "@smithy/util-middleware"; +import { getEndpointFromInstructions } from "./adaptors/getEndpointFromInstructions"; +export const endpointMiddleware = ({ config, instructions, }) => { + return (next, context) => async (args) => { + if (config.endpoint) { + setFeature(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions(args.input, { + getEndpointParameterInstructions() { + return instructions; + }, + }, { ...config }, context); + context.endpointV2 = endpoint; + context.authSchemes = endpoint.properties?.authSchemes; + const authScheme = context.authSchemes?.[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = getSmithyContext(context); + const httpAuthOption = smithyContext?.selectedHttpAuthScheme?.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign(httpAuthOption.signingProperties || {}, { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet, + }, authScheme.properties); + } + } + return next({ + ...args, + }); + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js new file mode 100644 index 0000000..e2335f4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/getEndpointPlugin.js @@ -0,0 +1,18 @@ +import { serializerMiddlewareOption } from "@smithy/middleware-serde"; +import { endpointMiddleware } from "./endpointMiddleware"; +export const endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: serializerMiddlewareOption.name, +}; +export const getEndpointPlugin = (config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(endpointMiddleware({ + config, + instructions, + }), endpointMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/index.js new file mode 100644 index 0000000..f89653e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/index.js @@ -0,0 +1,5 @@ +export * from "./adaptors"; +export * from "./endpointMiddleware"; +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js new file mode 100644 index 0000000..c3a0eea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/resolveEndpointConfig.js @@ -0,0 +1,24 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { getEndpointFromConfig } from "./adaptors/getEndpointFromConfig"; +import { toEndpointV1 } from "./adaptors/toEndpointV1"; +export const resolveEndpointConfig = (input) => { + const tls = input.tls ?? true; + const { endpoint, useDualstackEndpoint, useFipsEndpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await normalizeProvider(endpoint)()) : undefined; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = Object.assign(input, { + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: normalizeProvider(useDualstackEndpoint ?? false), + useFipsEndpoint: normalizeProvider(useFipsEndpoint ?? false), + }); + let configuredEndpointPromise = undefined; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = getEndpointFromConfig(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js new file mode 100644 index 0000000..e50e107 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/index.js @@ -0,0 +1 @@ +export * from "./s3"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js new file mode 100644 index 0000000..e993fc7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/service-customizations/s3.js @@ -0,0 +1,37 @@ +export const resolveParamsForS3 = async (endpointParams) => { + const bucket = endpointParams?.Bucket || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } + else if (!isDnsCompatibleBucketName(bucket) || + (bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:")) || + bucket.toLowerCase() !== bucket || + bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}; +const DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +const IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +const DOTS_PATTERN = /\.\./; +export const DOT_PATTERN = /\./; +export const S3_HOSTNAME_PATTERN = /^(.+\.)?s3(-fips)?(\.dualstack)?[.-]([a-z0-9-]+)\./; +export const isDnsCompatibleBucketName = (bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName); +export const isArnBucketName = (bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts new file mode 100644 index 0000000..df65914 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/createConfigValueProvider.d.ts @@ -0,0 +1,13 @@ +/** + * Normalize some key of the client config to an async provider. + * @internal + * + * @param configKey - the key to look up in config. + * @param canonicalEndpointParamKey - this is the name the EndpointRuleSet uses. + * it will most likely not contain the config + * value, but we use it as a fallback. + * @param config - container of the config values. + * + * @returns async function that will resolve with the value. + */ +export declare const createConfigValueProvider: >(configKey: string, canonicalEndpointParamKey: string, config: Config) => () => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts new file mode 100644 index 0000000..de05fa5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.browser.d.ts @@ -0,0 +1 @@ +export declare const getEndpointFromConfig: (serviceId: string) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts new file mode 100644 index 0000000..42a3566 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromConfig.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getEndpointFromConfig: (serviceId?: string) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts new file mode 100644 index 0000000..49cef2a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointFromInstructions.d.ts @@ -0,0 +1,28 @@ +import { EndpointParameters, EndpointV2, HandlerExecutionContext } from "@smithy/types"; +import { EndpointResolvedConfig } from "../resolveEndpointConfig"; +import { EndpointParameterInstructions } from "../types"; +/** + * @internal + */ +export type EndpointParameterInstructionsSupplier = Partial<{ + getEndpointParameterInstructions(): EndpointParameterInstructions; +}>; +/** + * This step in the endpoint resolution process is exposed as a function + * to allow packages such as signers, lib-upload, etc. to get + * the V2 Endpoint associated to an instance of some api operation command + * without needing to send it or resolve its middleware stack. + * + * @internal + * @param commandInput - the input of the Command in question. + * @param instructionsSupplier - this is typically a Command constructor. A static function supplying the + * endpoint parameter instructions will exist for commands in services + * having an endpoints ruleset trait. + * @param clientConfig - config of the service client. + * @param context - optional context. + */ +export declare const getEndpointFromInstructions: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config, context?: HandlerExecutionContext) => Promise; +/** + * @internal + */ +export declare const resolveParams: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..0971010 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts new file mode 100644 index 0000000..cc13488 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getEndpointFromInstructions"; +/** + * @internal + */ +export * from "./toEndpointV1"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts new file mode 100644 index 0000000..834aabb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/adaptors/toEndpointV1.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const toEndpointV1: (endpoint: string | Endpoint | EndpointV2) => Endpoint; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts new file mode 100644 index 0000000..67cee64 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/endpointMiddleware.d.ts @@ -0,0 +1,10 @@ +import { EndpointParameters, SerializeMiddleware } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddleware: ({ config, instructions, }: { + config: EndpointResolvedConfig; + instructions: EndpointParameterInstructions; +}) => SerializeMiddleware; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts new file mode 100644 index 0000000..910f44d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/getEndpointPlugin.d.ts @@ -0,0 +1,11 @@ +import { EndpointParameters, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getEndpointPlugin: (config: EndpointResolvedConfig, instructions: EndpointParameterInstructions) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts new file mode 100644 index 0000000..bea06cf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/index.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export * from "./adaptors"; +/** + * @internal + */ +export * from "./endpointMiddleware"; +/** + * @internal + */ +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +/** + * @internal + */ +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts new file mode 100644 index 0000000..ec7dc70 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/resolveEndpointConfig.d.ts @@ -0,0 +1,107 @@ +import { Endpoint, EndpointParameters, EndpointV2, Logger, Provider, UrlParser } from "@smithy/types"; +/** + * @public + * + * Endpoint config interfaces and resolver for Endpoint v2. They live in separate package to allow per-service onboarding. + * When all services onboard Endpoint v2, the resolver in config-resolver package can be removed. + * This interface includes all the endpoint parameters with built-in bindings of "AWS::*" and "SDK::*" + */ +export interface EndpointInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only for using + * a custom endpoint (for example, when using a local version of S3). + * + * Endpoint transformations such as S3 applying a bucket to the hostname are + * still applicable to this custom endpoint. + */ + endpoint?: string | Endpoint | Provider | EndpointV2 | Provider; + /** + * Providing a custom endpointProvider will override + * built-in transformations of the endpoint such as S3 adding the bucket + * name to the hostname, since they are part of the default endpointProvider. + */ + endpointProvider?: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; + /** + * @internal + * This field is used internally so you should not fill any value to this field. + */ + serviceConfiguredEndpoint?: never; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; + region: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + logger?: Logger; + serviceId?: string; +} +/** + * @internal + * + * This supercedes the similarly named EndpointsResolvedConfig (no parametric types) + * from resolveEndpointsConfig.ts in \@smithy/config-resolver. + */ +export interface EndpointResolvedConfig { + /** + * Custom endpoint provided by the user. + * This is normalized to a single interface from the various acceptable types. + * This field will be undefined if a custom endpoint is not provided. + */ + endpoint?: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls: boolean; + /** + * Whether the endpoint is specified by caller. + * @internal + * @deprecated + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input {@link EndpointsInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * A configured endpoint global or specific to the service from ENV or AWS SDK configuration files. + * @internal + */ + serviceConfiguredEndpoint?: Provider; +} +/** + * @internal + */ +export declare const resolveEndpointConfig: (input: T & EndpointInputConfig

& PreviouslyResolved

) => T & EndpointResolvedConfig

; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts new file mode 100644 index 0000000..716a15d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./s3"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts new file mode 100644 index 0000000..80b2e6a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/service-customizations/s3.d.ts @@ -0,0 +1,26 @@ +import { EndpointParameters } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveParamsForS3: (endpointParams: EndpointParameters) => Promise; +/** + * @internal + */ +export declare const DOT_PATTERN: RegExp; +/** + * @internal + */ +export declare const S3_HOSTNAME_PATTERN: RegExp; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +/** + * @internal + */ +export declare const isArnBucketName: (bucketName: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts new file mode 100644 index 0000000..842f8fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/createConfigValueProvider.d.ts @@ -0,0 +1,13 @@ +/** + * Normalize some key of the client config to an async provider. + * @internal + * + * @param configKey - the key to look up in config. + * @param canonicalEndpointParamKey - this is the name the EndpointRuleSet uses. + * it will most likely not contain the config + * value, but we use it as a fallback. + * @param config - container of the config values. + * + * @returns async function that will resolve with the value. + */ +export declare const createConfigValueProvider: >(configKey: string, canonicalEndpointParamKey: string, config: Config) => () => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts new file mode 100644 index 0000000..1a4f6ba --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.browser.d.ts @@ -0,0 +1 @@ +export declare const getEndpointFromConfig: (serviceId: string) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts new file mode 100644 index 0000000..641570c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromConfig.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const getEndpointFromConfig: (serviceId?: string) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts new file mode 100644 index 0000000..82dc8df --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointFromInstructions.d.ts @@ -0,0 +1,28 @@ +import { EndpointParameters, EndpointV2, HandlerExecutionContext } from "@smithy/types"; +import { EndpointResolvedConfig } from "../resolveEndpointConfig"; +import { EndpointParameterInstructions } from "../types"; +/** + * @internal + */ +export type EndpointParameterInstructionsSupplier = Partial<{ + getEndpointParameterInstructions(): EndpointParameterInstructions; +}>; +/** + * This step in the endpoint resolution process is exposed as a function + * to allow packages such as signers, lib-upload, etc. to get + * the V2 Endpoint associated to an instance of some api operation command + * without needing to send it or resolve its middleware stack. + * + * @internal + * @param commandInput - the input of the Command in question. + * @param instructionsSupplier - this is typically a Command constructor. A static function supplying the + * endpoint parameter instructions will exist for commands in services + * having an endpoints ruleset trait. + * @param clientConfig - config of the service client. + * @param context - optional context. + */ +export declare const getEndpointFromInstructions: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config, context?: HandlerExecutionContext) => Promise; +/** + * @internal + */ +export declare const resolveParams: , Config extends Record>(commandInput: CommandInput, instructionsSupplier: EndpointParameterInstructionsSupplier, clientConfig: Partial> & Config) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..7b9d068 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts new file mode 100644 index 0000000..ced0520 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getEndpointFromInstructions"; +/** + * @internal + */ +export * from "./toEndpointV1"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts new file mode 100644 index 0000000..047ded8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/adaptors/toEndpointV1.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const toEndpointV1: (endpoint: string | Endpoint | EndpointV2) => Endpoint; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts new file mode 100644 index 0000000..3f7e40a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/endpointMiddleware.d.ts @@ -0,0 +1,10 @@ +import { EndpointParameters, SerializeMiddleware } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddleware: ({ config, instructions, }: { + config: EndpointResolvedConfig; + instructions: EndpointParameterInstructions; +}) => SerializeMiddleware; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts new file mode 100644 index 0000000..39f93a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/getEndpointPlugin.d.ts @@ -0,0 +1,11 @@ +import { EndpointParameters, Pluggable, RelativeMiddlewareOptions, SerializeHandlerOptions } from "@smithy/types"; +import { EndpointResolvedConfig } from "./resolveEndpointConfig"; +import { EndpointParameterInstructions } from "./types"; +/** + * @internal + */ +export declare const endpointMiddlewareOptions: SerializeHandlerOptions & RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getEndpointPlugin: (config: EndpointResolvedConfig, instructions: EndpointParameterInstructions) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..2ad75b9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/index.d.ts @@ -0,0 +1,17 @@ +/** + * @internal + */ +export * from "./adaptors"; +/** + * @internal + */ +export * from "./endpointMiddleware"; +/** + * @internal + */ +export * from "./getEndpointPlugin"; +export * from "./resolveEndpointConfig"; +/** + * @internal + */ +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts new file mode 100644 index 0000000..875c9fc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/resolveEndpointConfig.d.ts @@ -0,0 +1,107 @@ +import { Endpoint, EndpointParameters, EndpointV2, Logger, Provider, UrlParser } from "@smithy/types"; +/** + * @public + * + * Endpoint config interfaces and resolver for Endpoint v2. They live in separate package to allow per-service onboarding. + * When all services onboard Endpoint v2, the resolver in config-resolver package can be removed. + * This interface includes all the endpoint parameters with built-in bindings of "AWS::*" and "SDK::*" + */ +export interface EndpointInputConfig { + /** + * The fully qualified endpoint of the webservice. This is only for using + * a custom endpoint (for example, when using a local version of S3). + * + * Endpoint transformations such as S3 applying a bucket to the hostname are + * still applicable to this custom endpoint. + */ + endpoint?: string | Endpoint | Provider | EndpointV2 | Provider; + /** + * Providing a custom endpointProvider will override + * built-in transformations of the endpoint such as S3 adding the bucket + * name to the hostname, since they are part of the default endpointProvider. + */ + endpointProvider?: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls?: boolean; + /** + * Enables IPv6/IPv4 dualstack endpoint. + */ + useDualstackEndpoint?: boolean | Provider; + /** + * Enables FIPS compatible endpoints. + */ + useFipsEndpoint?: boolean | Provider; + /** + * @internal + * This field is used internally so you should not fill any value to this field. + */ + serviceConfiguredEndpoint?: never; +} +/** + * @internal + */ +interface PreviouslyResolved { + urlParser: UrlParser; + region: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + logger?: Logger; + serviceId?: string; +} +/** + * @internal + * + * This supercedes the similarly named EndpointsResolvedConfig (no parametric types) + * from resolveEndpointsConfig.ts in \@smithy/config-resolver. + */ +export interface EndpointResolvedConfig { + /** + * Custom endpoint provided by the user. + * This is normalized to a single interface from the various acceptable types. + * This field will be undefined if a custom endpoint is not provided. + */ + endpoint?: Provider; + endpointProvider: (params: T, context?: { + logger?: Logger; + }) => EndpointV2; + /** + * Whether TLS is enabled for requests. + * @deprecated + */ + tls: boolean; + /** + * Whether the endpoint is specified by caller. + * @internal + * @deprecated + */ + isCustomEndpoint?: boolean; + /** + * Resolved value for input {@link EndpointsInputConfig.useDualstackEndpoint} + */ + useDualstackEndpoint: Provider; + /** + * Resolved value for input {@link EndpointsInputConfig.useFipsEndpoint} + */ + useFipsEndpoint: Provider; + /** + * Unique service identifier. + * @internal + */ + serviceId?: string; + /** + * A configured endpoint global or specific to the service from ENV or AWS SDK configuration files. + * @internal + */ + serviceConfiguredEndpoint?: Provider; +} +/** + * @internal + */ +export declare const resolveEndpointConfig: (input: T & EndpointInputConfig

& PreviouslyResolved

) => T & EndpointResolvedConfig

; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts new file mode 100644 index 0000000..6529752 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./s3"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts new file mode 100644 index 0000000..cace227 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/service-customizations/s3.d.ts @@ -0,0 +1,26 @@ +import { EndpointParameters } from "@smithy/types"; +/** + * @internal + */ +export declare const resolveParamsForS3: (endpointParams: EndpointParameters) => Promise; +/** + * @internal + */ +export declare const DOT_PATTERN: RegExp; +/** + * @internal + */ +export declare const S3_HOSTNAME_PATTERN: RegExp; +/** + * Determines whether a given string is DNS compliant per the rules outlined by + * S3. Length, capitaization, and leading dot restrictions are enforced by the + * DOMAIN_PATTERN regular expression. + * @internal + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html + */ +export declare const isDnsCompatibleBucketName: (bucketName: string) => boolean; +/** + * @internal + */ +export declare const isArnBucketName: (bucketName: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..a6084c8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/ts3.4/types.d.ts @@ -0,0 +1,41 @@ +/** + * @internal + */ +export interface EndpointParameterInstructions { + [name: string]: BuiltInParamInstruction | ClientContextParamInstruction | StaticContextParamInstruction | ContextParamInstruction | OperationContextParamInstruction; +} +/** + * @internal + */ +export interface BuiltInParamInstruction { + type: "builtInParams"; + name: string; +} +/** + * @internal + */ +export interface ClientContextParamInstruction { + type: "clientContextParams"; + name: string; +} +/** + * @internal + */ +export interface StaticContextParamInstruction { + type: "staticContextParams"; + value: string | boolean; +} +/** + * @internal + */ +export interface ContextParamInstruction { + type: "contextParams"; + name: string; +} +/** + * @internal + */ +export interface OperationContextParamInstruction { + type: "operationContextParams"; + get(input: any): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts new file mode 100644 index 0000000..0d1d9e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/dist-types/types.d.ts @@ -0,0 +1,41 @@ +/** + * @internal + */ +export interface EndpointParameterInstructions { + [name: string]: BuiltInParamInstruction | ClientContextParamInstruction | StaticContextParamInstruction | ContextParamInstruction | OperationContextParamInstruction; +} +/** + * @internal + */ +export interface BuiltInParamInstruction { + type: "builtInParams"; + name: string; +} +/** + * @internal + */ +export interface ClientContextParamInstruction { + type: "clientContextParams"; + name: string; +} +/** + * @internal + */ +export interface StaticContextParamInstruction { + type: "staticContextParams"; + value: string | boolean; +} +/** + * @internal + */ +export interface ContextParamInstruction { + type: "contextParams"; + name: string; +} +/** + * @internal + */ +export interface OperationContextParamInstruction { + type: "operationContextParams"; + get(input: any): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/package.json new file mode 100644 index 0000000..e95e228 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-endpoint/package.json @@ -0,0 +1,74 @@ +{ + "name": "@smithy/middleware-endpoint", + "version": "4.1.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-endpoint", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "extract:docs": "api-extractor run --local", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/adaptors/getEndpointFromConfig": "./dist-es/adaptors/getEndpointFromConfig.browser" + }, + "react-native": { + "./dist-es/adaptors/getEndpointFromConfig": "./dist-es/adaptors/getEndpointFromConfig.browser", + "./dist-cjs/adaptors/getEndpointFromConfig": "./dist-cjs/adaptors/getEndpointFromConfig.browser" + }, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-endpoint", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-endpoint" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/README.md new file mode 100644 index 0000000..21ce947 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/README.md @@ -0,0 +1,11 @@ +# @smithy/middleware-retry + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-retry/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-retry) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-retry.svg)](https://www.npmjs.com/package/@smithy/middleware-retry) + +## Usage + +See [@smithy/util-retry](https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-retry) +for retry behavior and configuration. + +See also: [AWS Documentation: Retry behavior](https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html). diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/AdaptiveRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/StandardRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/configurations.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/defaultRetryQuota.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/delayDecider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/index.js new file mode 100644 index 0000000..c8375f0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/index.js @@ -0,0 +1,425 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + CONFIG_MAX_ATTEMPTS: () => CONFIG_MAX_ATTEMPTS, + CONFIG_RETRY_MODE: () => CONFIG_RETRY_MODE, + ENV_MAX_ATTEMPTS: () => ENV_MAX_ATTEMPTS, + ENV_RETRY_MODE: () => ENV_RETRY_MODE, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS: () => NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + NODE_RETRY_MODE_CONFIG_OPTIONS: () => NODE_RETRY_MODE_CONFIG_OPTIONS, + StandardRetryStrategy: () => StandardRetryStrategy, + defaultDelayDecider: () => defaultDelayDecider, + defaultRetryDecider: () => defaultRetryDecider, + getOmitRetryHeadersPlugin: () => getOmitRetryHeadersPlugin, + getRetryAfterHint: () => getRetryAfterHint, + getRetryPlugin: () => getRetryPlugin, + omitRetryHeadersMiddleware: () => omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions: () => omitRetryHeadersMiddlewareOptions, + resolveRetryConfig: () => resolveRetryConfig, + retryMiddleware: () => retryMiddleware, + retryMiddlewareOptions: () => retryMiddlewareOptions +}); +module.exports = __toCommonJS(src_exports); + +// src/AdaptiveRetryStrategy.ts + + +// src/StandardRetryStrategy.ts +var import_protocol_http = require("@smithy/protocol-http"); + + +var import_uuid = require("uuid"); + +// src/defaultRetryQuota.ts +var import_util_retry = require("@smithy/util-retry"); +var getDefaultRetryQuota = /* @__PURE__ */ __name((initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = options?.noRetryIncrement ?? import_util_retry.NO_RETRY_INCREMENT; + const retryCost = options?.retryCost ?? import_util_retry.RETRY_COST; + const timeoutRetryCost = options?.timeoutRetryCost ?? import_util_retry.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = /* @__PURE__ */ __name((error) => error.name === "TimeoutError" ? timeoutRetryCost : retryCost, "getCapacityAmount"); + const hasRetryTokens = /* @__PURE__ */ __name((error) => getCapacityAmount(error) <= availableCapacity, "hasRetryTokens"); + const retrieveRetryTokens = /* @__PURE__ */ __name((error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }, "retrieveRetryTokens"); + const releaseRetryTokens = /* @__PURE__ */ __name((capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }, "releaseRetryTokens"); + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens + }); +}, "getDefaultRetryQuota"); + +// src/delayDecider.ts + +var defaultDelayDecider = /* @__PURE__ */ __name((delayBase, attempts) => Math.floor(Math.min(import_util_retry.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)), "defaultDelayDecider"); + +// src/retryDecider.ts +var import_service_error_classification = require("@smithy/service-error-classification"); +var defaultRetryDecider = /* @__PURE__ */ __name((error) => { + if (!error) { + return false; + } + return (0, import_service_error_classification.isRetryableByTrait)(error) || (0, import_service_error_classification.isClockSkewError)(error) || (0, import_service_error_classification.isThrottlingError)(error) || (0, import_service_error_classification.isTransientError)(error); +}, "defaultRetryDecider"); + +// src/util.ts +var asSdkError = /* @__PURE__ */ __name((error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}, "asSdkError"); + +// src/StandardRetryStrategy.ts +var StandardRetryStrategy = class { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = import_util_retry.RETRY_MODES.STANDARD; + this.retryDecider = options?.retryDecider ?? defaultRetryDecider; + this.delayDecider = options?.delayDecider ?? defaultDelayDecider; + this.retryQuota = options?.retryQuota ?? getDefaultRetryQuota(import_util_retry.INITIAL_RETRY_TOKENS); + } + static { + __name(this, "StandardRetryStrategy"); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } catch (error) { + maxAttempts = import_util_retry.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options?.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options?.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider( + (0, import_service_error_classification.isThrottlingError)(err) ? import_util_retry.THROTTLING_RETRY_DELAY_BASE : import_util_retry.DEFAULT_RETRY_DELAY_BASE, + attempts + ); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +}; +var getDelayFromRetryAfterHeader = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1e3; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}, "getDelayFromRetryAfterHeader"); + +// src/AdaptiveRetryStrategy.ts +var AdaptiveRetryStrategy = class extends StandardRetryStrategy { + static { + __name(this, "AdaptiveRetryStrategy"); + } + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new import_util_retry.DefaultRateLimiter(); + this.mode = import_util_retry.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + } + }); + } +}; + +// src/configurations.ts +var import_util_middleware = require("@smithy/util-middleware"); + +var ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +var CONFIG_MAX_ATTEMPTS = "max_attempts"; +var NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: import_util_retry.DEFAULT_MAX_ATTEMPTS +}; +var resolveRetryConfig = /* @__PURE__ */ __name((input) => { + const { retryStrategy, retryMode: _retryMode, maxAttempts: _maxAttempts } = input; + const maxAttempts = (0, import_util_middleware.normalizeProvider)(_maxAttempts ?? import_util_retry.DEFAULT_MAX_ATTEMPTS); + return Object.assign(input, { + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, import_util_middleware.normalizeProvider)(_retryMode)(); + if (retryMode === import_util_retry.RETRY_MODES.ADAPTIVE) { + return new import_util_retry.AdaptiveRetryStrategy(maxAttempts); + } + return new import_util_retry.StandardRetryStrategy(maxAttempts); + } + }); +}, "resolveRetryConfig"); +var ENV_RETRY_MODE = "AWS_RETRY_MODE"; +var CONFIG_RETRY_MODE = "retry_mode"; +var NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: import_util_retry.DEFAULT_RETRY_MODE +}; + +// src/omitRetryHeadersMiddleware.ts + + +var omitRetryHeadersMiddleware = /* @__PURE__ */ __name(() => (next) => async (args) => { + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + delete request.headers[import_util_retry.INVOCATION_ID_HEADER]; + delete request.headers[import_util_retry.REQUEST_HEADER]; + } + return next(args); +}, "omitRetryHeadersMiddleware"); +var omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true +}; +var getOmitRetryHeadersPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + } +}), "getOmitRetryHeadersPlugin"); + +// src/retryMiddleware.ts + + +var import_smithy_client = require("@smithy/smithy-client"); + + +var import_isStreamingPayload = require("./isStreamingPayload/isStreamingPayload"); +var retryMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = import_protocol_http.HttpRequest.isInstance(request); + if (isRequest) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (isRequest) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && (0, import_isStreamingPayload.isStreamingPayload)(request)) { + (context.logger instanceof import_smithy_client.NoOpLogger ? console : context.logger)?.warn( + "An error was encountered in a non-retryable streaming request." + ); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } else { + retryStrategy = retryStrategy; + if (retryStrategy?.mode) + context.userAgent = [...context.userAgent || [], ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}, "retryMiddleware"); +var isRetryStrategyV2 = /* @__PURE__ */ __name((retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && typeof retryStrategy.recordSuccess !== "undefined", "isRetryStrategyV2"); +var getRetryErrorInfo = /* @__PURE__ */ __name((error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error) + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}, "getRetryErrorInfo"); +var getRetryErrorType = /* @__PURE__ */ __name((error) => { + if ((0, import_service_error_classification.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, import_service_error_classification.isTransientError)(error)) + return "TRANSIENT"; + if ((0, import_service_error_classification.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}, "getRetryErrorType"); +var retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true +}; +var getRetryPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + } +}), "getRetryPlugin"); +var getRetryAfterHint = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1e3); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}, "getRetryAfterHint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AdaptiveRetryStrategy, + StandardRetryStrategy, + ENV_MAX_ATTEMPTS, + CONFIG_MAX_ATTEMPTS, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + resolveRetryConfig, + ENV_RETRY_MODE, + CONFIG_RETRY_MODE, + NODE_RETRY_MODE_CONFIG_OPTIONS, + defaultDelayDecider, + omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions, + getOmitRetryHeadersPlugin, + defaultRetryDecider, + retryMiddleware, + retryMiddlewareOptions, + getRetryPlugin, + getRetryAfterHint +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js new file mode 100644 index 0000000..21fc19a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.browser.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isStreamingPayload = void 0; +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream; +exports.isStreamingPayload = isStreamingPayload; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js new file mode 100644 index 0000000..06f420b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/isStreamingPayload/isStreamingPayload.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isStreamingPayload = void 0; +const stream_1 = require("stream"); +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof stream_1.Readable || + (typeof ReadableStream !== "undefined" && (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream); +exports.isStreamingPayload = isStreamingPayload; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/omitRetryHeadersMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/retryDecider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/retryMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/util.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/util.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-cjs/util.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..d349451 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/AdaptiveRetryStrategy.js @@ -0,0 +1,20 @@ +import { DefaultRateLimiter, RETRY_MODES } from "@smithy/util-retry"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class AdaptiveRetryStrategy extends StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.mode = RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + }, + }); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js new file mode 100644 index 0000000..e718ad6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/StandardRetryStrategy.js @@ -0,0 +1,90 @@ +import { HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { isThrottlingError } from "@smithy/service-error-classification"; +import { DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_DELAY_BASE, INITIAL_RETRY_TOKENS, INVOCATION_ID_HEADER, REQUEST_HEADER, RETRY_MODES, THROTTLING_RETRY_DELAY_BASE, } from "@smithy/util-retry"; +import { v4 } from "uuid"; +import { getDefaultRetryQuota } from "./defaultRetryQuota"; +import { defaultDelayDecider } from "./delayDecider"; +import { defaultRetryDecider } from "./retryDecider"; +import { asSdkError } from "./util"; +export class StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = RETRY_MODES.STANDARD; + this.retryDecider = options?.retryDecider ?? defaultRetryDecider; + this.delayDecider = options?.delayDecider ?? defaultDelayDecider; + this.retryQuota = options?.retryQuota ?? getDefaultRetryQuota(INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } + catch (error) { + maxAttempts = DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (HttpRequest.isInstance(request)) { + request.headers[INVOCATION_ID_HEADER] = v4(); + } + while (true) { + try { + if (HttpRequest.isInstance(request)) { + request.headers[REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options?.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options?.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } + catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider(isThrottlingError(err) ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE, attempts); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +} +const getDelayFromRetryAfterHeader = (response) => { + if (!HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1000; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/configurations.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/configurations.js new file mode 100644 index 0000000..ec375e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/configurations.js @@ -0,0 +1,51 @@ +import { normalizeProvider } from "@smithy/util-middleware"; +import { AdaptiveRetryStrategy, DEFAULT_MAX_ATTEMPTS, DEFAULT_RETRY_MODE, RETRY_MODES, StandardRetryStrategy, } from "@smithy/util-retry"; +export const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +export const CONFIG_MAX_ATTEMPTS = "max_attempts"; +export const NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return undefined; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: DEFAULT_MAX_ATTEMPTS, +}; +export const resolveRetryConfig = (input) => { + const { retryStrategy, retryMode: _retryMode, maxAttempts: _maxAttempts } = input; + const maxAttempts = normalizeProvider(_maxAttempts ?? DEFAULT_MAX_ATTEMPTS); + return Object.assign(input, { + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await normalizeProvider(_retryMode)(); + if (retryMode === RETRY_MODES.ADAPTIVE) { + return new AdaptiveRetryStrategy(maxAttempts); + } + return new StandardRetryStrategy(maxAttempts); + }, + }); +}; +export const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +export const CONFIG_RETRY_MODE = "retry_mode"; +export const NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: DEFAULT_RETRY_MODE, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js new file mode 100644 index 0000000..4bf6771 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/defaultRetryQuota.js @@ -0,0 +1,27 @@ +import { NO_RETRY_INCREMENT, RETRY_COST, TIMEOUT_RETRY_COST } from "@smithy/util-retry"; +export const getDefaultRetryQuota = (initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = options?.noRetryIncrement ?? NO_RETRY_INCREMENT; + const retryCost = options?.retryCost ?? RETRY_COST; + const timeoutRetryCost = options?.timeoutRetryCost ?? TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = (error) => (error.name === "TimeoutError" ? timeoutRetryCost : retryCost); + const hasRetryTokens = (error) => getCapacityAmount(error) <= availableCapacity; + const retrieveRetryTokens = (error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }; + const releaseRetryTokens = (capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }; + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js new file mode 100644 index 0000000..2928506 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/delayDecider.js @@ -0,0 +1,2 @@ +import { MAXIMUM_RETRY_DELAY } from "@smithy/util-retry"; +export const defaultDelayDecider = (delayBase, attempts) => Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/index.js new file mode 100644 index 0000000..9ebe326 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js new file mode 100644 index 0000000..9569e92 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.browser.js @@ -0,0 +1 @@ +export const isStreamingPayload = (request) => request?.body instanceof ReadableStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js new file mode 100644 index 0000000..7dcc687 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/isStreamingPayload/isStreamingPayload.js @@ -0,0 +1,3 @@ +import { Readable } from "stream"; +export const isStreamingPayload = (request) => request?.body instanceof Readable || + (typeof ReadableStream !== "undefined" && request?.body instanceof ReadableStream); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js new file mode 100644 index 0000000..cb3c372 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/omitRetryHeadersMiddleware.js @@ -0,0 +1,22 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { INVOCATION_ID_HEADER, REQUEST_HEADER } from "@smithy/util-retry"; +export const omitRetryHeadersMiddleware = () => (next) => async (args) => { + const { request } = args; + if (HttpRequest.isInstance(request)) { + delete request.headers[INVOCATION_ID_HEADER]; + delete request.headers[REQUEST_HEADER]; + } + return next(args); +}; +export const omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true, +}; +export const getOmitRetryHeadersPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + }, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js new file mode 100644 index 0000000..b965fba --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/retryDecider.js @@ -0,0 +1,7 @@ +import { isClockSkewError, isRetryableByTrait, isThrottlingError, isTransientError, } from "@smithy/service-error-classification"; +export const defaultRetryDecider = (error) => { + if (!error) { + return false; + } + return isRetryableByTrait(error) || isClockSkewError(error) || isThrottlingError(error) || isTransientError(error); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js new file mode 100644 index 0000000..a897735 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/retryMiddleware.js @@ -0,0 +1,112 @@ +import { HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { isServerError, isThrottlingError, isTransientError } from "@smithy/service-error-classification"; +import { NoOpLogger } from "@smithy/smithy-client"; +import { INVOCATION_ID_HEADER, REQUEST_HEADER } from "@smithy/util-retry"; +import { v4 } from "uuid"; +import { isStreamingPayload } from "./isStreamingPayload/isStreamingPayload"; +import { asSdkError } from "./util"; +export const retryMiddleware = (options) => (next, context) => async (args) => { + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = HttpRequest.isInstance(request); + if (isRequest) { + request.headers[INVOCATION_ID_HEADER] = v4(); + } + while (true) { + try { + if (isRequest) { + request.headers[REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } + catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && isStreamingPayload(request)) { + (context.logger instanceof NoOpLogger ? console : context.logger)?.warn("An error was encountered in a non-retryable streaming request."); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } + catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } + else { + retryStrategy = retryStrategy; + if (retryStrategy?.mode) + context.userAgent = [...(context.userAgent || []), ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}; +const isRetryStrategyV2 = (retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && + typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && + typeof retryStrategy.recordSuccess !== "undefined"; +const getRetryErrorInfo = (error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error), + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}; +const getRetryErrorType = (error) => { + if (isThrottlingError(error)) + return "THROTTLING"; + if (isTransientError(error)) + return "TRANSIENT"; + if (isServerError(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}; +export const retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true, +}; +export const getRetryPlugin = (options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + }, +}); +export const getRetryAfterHint = (response) => { + if (!HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1000); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/util.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/util.js new file mode 100644 index 0000000..f45e6b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-es/util.js @@ -0,0 +1,9 @@ +export const asSdkError = (error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..98a6a1d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,22 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider } from "@smithy/types"; +import { RateLimiter } from "@smithy/util-retry"; +import { StandardRetryStrategy, StandardRetryStrategyOptions } from "./StandardRetryStrategy"; +/** + * @public + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions extends StandardRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * @deprecated use AdaptiveRetryStrategy from @smithy/util-retry + */ +export declare class AdaptiveRetryStrategy extends StandardRetryStrategy { + private rateLimiter; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + retry(next: FinalizeHandler, args: FinalizeHandlerArguments): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..7007ac3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/StandardRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider, RetryStrategy } from "@smithy/types"; +import { DelayDecider, RetryDecider, RetryQuota } from "./types"; +/** + * Strategy options to be passed to StandardRetryStrategy + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export interface StandardRetryStrategyOptions { + retryDecider?: RetryDecider; + delayDecider?: DelayDecider; + retryQuota?: RetryQuota; +} +/** + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export declare class StandardRetryStrategy implements RetryStrategy { + private readonly maxAttemptsProvider; + private retryDecider; + private delayDecider; + private retryQuota; + mode: string; + constructor(maxAttemptsProvider: Provider, options?: StandardRetryStrategyOptions); + private shouldRetry; + private getMaxAttempts; + retry(next: FinalizeHandler, args: FinalizeHandlerArguments, options?: { + beforeRequest: Function; + afterRequest: Function; + }): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts new file mode 100644 index 0000000..150c2a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/configurations.d.ts @@ -0,0 +1,66 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +/** + * @internal + */ +export declare const CONFIG_MAX_ATTEMPTS = "max_attempts"; +/** + * @internal + */ +export declare const NODE_MAX_ATTEMPT_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @public + */ +export interface RetryInputConfig { + /** + * The maximum number of times requests that encounter retryable failures should be attempted. + */ + maxAttempts?: number | Provider; + /** + * The strategy to retry the request. Using built-in exponential backoff strategy by default. + */ + retryStrategy?: RetryStrategy | RetryStrategyV2; +} +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * Specifies provider for retry algorithm to use. + * @internal + */ + retryMode: string | Provider; +} +/** + * @internal + */ +export interface RetryResolvedConfig { + /** + * Resolved value for input config {@link RetryInputConfig.maxAttempts} + */ + maxAttempts: Provider; + /** + * Resolved value for input config {@link RetryInputConfig.retryStrategy} + */ + retryStrategy: Provider; +} +/** + * @internal + */ +export declare const resolveRetryConfig: (input: T & PreviouslyResolved & RetryInputConfig) => T & RetryResolvedConfig; +/** + * @internal + */ +export declare const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +/** + * @internal + */ +export declare const CONFIG_RETRY_MODE = "retry_mode"; +/** + * @internal + */ +export declare const NODE_RETRY_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts new file mode 100644 index 0000000..332a494 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/defaultRetryQuota.d.ts @@ -0,0 +1,24 @@ +import { RetryQuota } from "./types"; +/** + * @internal + */ +export interface DefaultRetryQuotaOptions { + /** + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ + noRetryIncrement?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance. + */ + retryCost?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ + timeoutRetryCost?: number; +} +/** + * @internal + */ +export declare const getDefaultRetryQuota: (initialRetryTokens: number, options?: DefaultRetryQuotaOptions) => RetryQuota; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts new file mode 100644 index 0000000..986ff42 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/delayDecider.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Calculate a capped, fully-jittered exponential backoff time. + */ +export declare const defaultDelayDecider: (delayBase: number, attempts: number) => number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/index.d.ts new file mode 100644 index 0000000..9ebe326 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts new file mode 100644 index 0000000..48d70ba --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.browser.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts new file mode 100644 index 0000000..48d70ba --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/isStreamingPayload/isStreamingPayload.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts new file mode 100644 index 0000000..50c1ab6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/omitRetryHeadersMiddleware.d.ts @@ -0,0 +1,13 @@ +import { FinalizeHandler, MetadataBearer, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const omitRetryHeadersMiddleware: () => (next: FinalizeHandler) => FinalizeHandler; +/** + * @internal + */ +export declare const omitRetryHeadersMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getOmitRetryHeadersPlugin: (options: unknown) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts new file mode 100644 index 0000000..11a4a9c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/retryDecider.d.ts @@ -0,0 +1,6 @@ +import { SdkError } from "@smithy/types"; +/** + * @internal + * @deprecated this is only used in the deprecated StandardRetryStrategy. Do not use in new code. + */ +export declare const defaultRetryDecider: (error: SdkError) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts new file mode 100644 index 0000000..9310301 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/retryMiddleware.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, FinalizeHandler, FinalizeRequestHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { RetryResolvedConfig } from "./configurations"; +/** + * @internal + */ +export declare const retryMiddleware: (options: RetryResolvedConfig) => (next: FinalizeHandler, context: HandlerExecutionContext) => FinalizeHandler; +/** + * @internal + */ +export declare const retryMiddlewareOptions: FinalizeRequestHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRetryPlugin: (options: RetryResolvedConfig) => Pluggable; +/** + * @internal + */ +export declare const getRetryAfterHint: (response: unknown) => Date | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..33f0416 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,22 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider } from "@smithy/types"; +import { RateLimiter } from "@smithy/util-retry"; +import { StandardRetryStrategy, StandardRetryStrategyOptions } from "./StandardRetryStrategy"; +/** + * @public + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions extends StandardRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * @deprecated use AdaptiveRetryStrategy from @smithy/util-retry + */ +export declare class AdaptiveRetryStrategy extends StandardRetryStrategy { + private rateLimiter; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + retry(next: FinalizeHandler, args: FinalizeHandlerArguments): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..b4656d2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { FinalizeHandler, FinalizeHandlerArguments, MetadataBearer, Provider, RetryStrategy } from "@smithy/types"; +import { DelayDecider, RetryDecider, RetryQuota } from "./types"; +/** + * Strategy options to be passed to StandardRetryStrategy + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export interface StandardRetryStrategyOptions { + retryDecider?: RetryDecider; + delayDecider?: DelayDecider; + retryQuota?: RetryQuota; +} +/** + * @public + * @deprecated use StandardRetryStrategy from @smithy/util-retry + */ +export declare class StandardRetryStrategy implements RetryStrategy { + private readonly maxAttemptsProvider; + private retryDecider; + private delayDecider; + private retryQuota; + mode: string; + constructor(maxAttemptsProvider: Provider, options?: StandardRetryStrategyOptions); + private shouldRetry; + private getMaxAttempts; + retry(next: FinalizeHandler, args: FinalizeHandlerArguments, options?: { + beforeRequest: Function; + afterRequest: Function; + }): Promise<{ + response: unknown; + output: Ouput; + }>; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts new file mode 100644 index 0000000..79f8646 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/configurations.d.ts @@ -0,0 +1,66 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { Provider, RetryStrategy, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export declare const ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +/** + * @internal + */ +export declare const CONFIG_MAX_ATTEMPTS = "max_attempts"; +/** + * @internal + */ +export declare const NODE_MAX_ATTEMPT_CONFIG_OPTIONS: LoadedConfigSelectors; +/** + * @public + */ +export interface RetryInputConfig { + /** + * The maximum number of times requests that encounter retryable failures should be attempted. + */ + maxAttempts?: number | Provider; + /** + * The strategy to retry the request. Using built-in exponential backoff strategy by default. + */ + retryStrategy?: RetryStrategy | RetryStrategyV2; +} +/** + * @internal + */ +export interface PreviouslyResolved { + /** + * Specifies provider for retry algorithm to use. + * @internal + */ + retryMode: string | Provider; +} +/** + * @internal + */ +export interface RetryResolvedConfig { + /** + * Resolved value for input config {@link RetryInputConfig.maxAttempts} + */ + maxAttempts: Provider; + /** + * Resolved value for input config {@link RetryInputConfig.retryStrategy} + */ + retryStrategy: Provider; +} +/** + * @internal + */ +export declare const resolveRetryConfig: (input: T & PreviouslyResolved & RetryInputConfig) => T & RetryResolvedConfig; +/** + * @internal + */ +export declare const ENV_RETRY_MODE = "AWS_RETRY_MODE"; +/** + * @internal + */ +export declare const CONFIG_RETRY_MODE = "retry_mode"; +/** + * @internal + */ +export declare const NODE_RETRY_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts new file mode 100644 index 0000000..704b5af --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/defaultRetryQuota.d.ts @@ -0,0 +1,24 @@ +import { RetryQuota } from "./types"; +/** + * @internal + */ +export interface DefaultRetryQuotaOptions { + /** + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ + noRetryIncrement?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance. + */ + retryCost?: number; + /** + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ + timeoutRetryCost?: number; +} +/** + * @internal + */ +export declare const getDefaultRetryQuota: (initialRetryTokens: number, options?: DefaultRetryQuotaOptions) => RetryQuota; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts new file mode 100644 index 0000000..7fa73ec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/delayDecider.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Calculate a capped, fully-jittered exponential backoff time. + */ +export declare const defaultDelayDecider: (delayBase: number, attempts: number) => number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e366bbb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./StandardRetryStrategy"; +export * from "./configurations"; +export * from "./delayDecider"; +export * from "./omitRetryHeadersMiddleware"; +export * from "./retryDecider"; +export * from "./retryMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts new file mode 100644 index 0000000..2a4d542 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.browser.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts new file mode 100644 index 0000000..2a4d542 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/isStreamingPayload/isStreamingPayload.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/protocol-http"; +/** + * @internal + */ +export declare const isStreamingPayload: (request: HttpRequest) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts new file mode 100644 index 0000000..abd8f71 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/omitRetryHeadersMiddleware.d.ts @@ -0,0 +1,13 @@ +import { FinalizeHandler, MetadataBearer, Pluggable, RelativeMiddlewareOptions } from "@smithy/types"; +/** + * @internal + */ +export declare const omitRetryHeadersMiddleware: () => (next: FinalizeHandler) => FinalizeHandler; +/** + * @internal + */ +export declare const omitRetryHeadersMiddlewareOptions: RelativeMiddlewareOptions; +/** + * @internal + */ +export declare const getOmitRetryHeadersPlugin: (options: unknown) => Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts new file mode 100644 index 0000000..c00661a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryDecider.d.ts @@ -0,0 +1,6 @@ +import { SdkError } from "@smithy/types"; +/** + * @internal + * @deprecated this is only used in the deprecated StandardRetryStrategy. Do not use in new code. + */ +export declare const defaultRetryDecider: (error: SdkError) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts new file mode 100644 index 0000000..137dbf1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/retryMiddleware.d.ts @@ -0,0 +1,18 @@ +import { AbsoluteLocation, FinalizeHandler, FinalizeRequestHandlerOptions, HandlerExecutionContext, MetadataBearer, Pluggable } from "@smithy/types"; +import { RetryResolvedConfig } from "./configurations"; +/** + * @internal + */ +export declare const retryMiddleware: (options: RetryResolvedConfig) => (next: FinalizeHandler, context: HandlerExecutionContext) => FinalizeHandler; +/** + * @internal + */ +export declare const retryMiddlewareOptions: FinalizeRequestHandlerOptions & AbsoluteLocation; +/** + * @internal + */ +export declare const getRetryPlugin: (options: RetryResolvedConfig) => Pluggable; +/** + * @internal + */ +export declare const getRetryAfterHint: (response: unknown) => Date | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..06775c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/types.d.ts @@ -0,0 +1,65 @@ +import { SdkError } from "@smithy/types"; +/** + * Determines whether an error is retryable based on the number of retries + * already attempted, the HTTP status code, and the error received (if any). + * + * @param error - The error encountered. + * + * @deprecated + * @internal + */ +export interface RetryDecider { + (error: SdkError): boolean; +} +/** + * Determines the number of milliseconds to wait before retrying an action. + * + * @param delayBase - The base delay (in milliseconds). + * @param attempts - The number of times the action has already been tried. + * + * @deprecated + * @internal + */ +export interface DelayDecider { + (delayBase: number, attempts: number): number; +} +/** + * Interface that specifies the retry quota behavior. + * @deprecated + * @internal + */ +export interface RetryQuota { + /** + * returns true if retry tokens are available from the retry quota bucket. + */ + hasRetryTokens: (error: SdkError) => boolean; + /** + * returns token amount from the retry quota bucket. + * throws error is retry tokens are not available. + */ + retrieveRetryTokens: (error: SdkError) => number; + /** + * releases tokens back to the retry quota. + */ + releaseRetryTokens: (releaseCapacityAmount?: number) => void; +} +/** + * @deprecated + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..7684a9f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/ts3.4/util.d.ts @@ -0,0 +1,2 @@ +import { SdkError } from "@smithy/types"; +export declare const asSdkError: (error: unknown) => SdkError; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/types.d.ts new file mode 100644 index 0000000..8f22712 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/types.d.ts @@ -0,0 +1,65 @@ +import { SdkError } from "@smithy/types"; +/** + * Determines whether an error is retryable based on the number of retries + * already attempted, the HTTP status code, and the error received (if any). + * + * @param error - The error encountered. + * + * @deprecated + * @internal + */ +export interface RetryDecider { + (error: SdkError): boolean; +} +/** + * Determines the number of milliseconds to wait before retrying an action. + * + * @param delayBase - The base delay (in milliseconds). + * @param attempts - The number of times the action has already been tried. + * + * @deprecated + * @internal + */ +export interface DelayDecider { + (delayBase: number, attempts: number): number; +} +/** + * Interface that specifies the retry quota behavior. + * @deprecated + * @internal + */ +export interface RetryQuota { + /** + * returns true if retry tokens are available from the retry quota bucket. + */ + hasRetryTokens: (error: SdkError) => boolean; + /** + * returns token amount from the retry quota bucket. + * throws error is retry tokens are not available. + */ + retrieveRetryTokens: (error: SdkError) => number; + /** + * releases tokens back to the retry quota. + */ + releaseRetryTokens: (releaseCapacityAmount?: number) => void; +} +/** + * @deprecated + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/util.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/util.d.ts new file mode 100644 index 0000000..00939b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/dist-types/util.d.ts @@ -0,0 +1,2 @@ +import { SdkError } from "@smithy/types"; +export declare const asSdkError: (error: unknown) => SdkError; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/package.json new file mode 100644 index 0000000..b029e53 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-retry/package.json @@ -0,0 +1,79 @@ +{ + "name": "@smithy/middleware-retry", + "version": "4.1.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-retry", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "browser": { + "./dist-es/isStreamingPayload/isStreamingPayload": "./dist-es/isStreamingPayload/isStreamingPayload.browser" + }, + "react-native": { + "./dist-cjs/isStreamingPayload/isStreamingPayload": "./dist-cjs/isStreamingPayload/isStreamingPayload.browser", + "./dist-es/isStreamingPayload/isStreamingPayload": "./dist-es/isStreamingPayload/isStreamingPayload.browser" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "@types/uuid": "^8.3.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-retry", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-retry" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/README.md new file mode 100644 index 0000000..d2bbfa6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/README.md @@ -0,0 +1,4 @@ +# @smithy/middleware-serde + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-serde/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-serde) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-serde.svg)](https://www.npmjs.com/package/@smithy/middleware-serde) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/deserializerMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/index.js new file mode 100644 index 0000000..04fa6f3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/index.js @@ -0,0 +1,109 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + deserializerMiddleware: () => deserializerMiddleware, + deserializerMiddlewareOption: () => deserializerMiddlewareOption, + getSerdePlugin: () => getSerdePlugin, + serializerMiddleware: () => serializerMiddleware, + serializerMiddlewareOption: () => serializerMiddlewareOption +}); +module.exports = __toCommonJS(src_exports); + +// src/deserializerMiddleware.ts +var deserializerMiddleware = /* @__PURE__ */ __name((options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed + }; + } catch (error) { + Object.defineProperty(error, "$response", { + value: response + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + try { + error.message += "\n " + hint; + } catch (e) { + if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") { + console.warn(hint); + } else { + context.logger?.warn?.(hint); + } + } + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}, "deserializerMiddleware"); + +// src/serializerMiddleware.ts +var serializerMiddleware = /* @__PURE__ */ __name((options, serializer) => (next, context) => async (args) => { + const endpoint = context.endpointV2?.url && options.urlParser ? async () => options.urlParser(context.endpointV2.url) : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request + }); +}, "serializerMiddleware"); + +// src/serdePlugin.ts +var deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true +}; +var serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + } + }; +} +__name(getSerdePlugin, "getSerdePlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + deserializerMiddleware, + deserializerMiddlewareOption, + serializerMiddlewareOption, + getSerdePlugin, + serializerMiddleware +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/serdePlugin.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-cjs/serializerMiddleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js new file mode 100644 index 0000000..19c0c27 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/deserializerMiddleware.js @@ -0,0 +1,35 @@ +export const deserializerMiddleware = (options, deserializer) => (next, context) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed, + }; + } + catch (error) { + Object.defineProperty(error, "$response", { + value: response, + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + try { + error.message += "\n " + hint; + } + catch (e) { + if (!context.logger || context.logger?.constructor?.name === "NoOpLogger") { + console.warn(hint); + } + else { + context.logger?.warn?.(hint); + } + } + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/index.js new file mode 100644 index 0000000..166a2be --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js new file mode 100644 index 0000000..be2a06e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/serdePlugin.js @@ -0,0 +1,22 @@ +import { deserializerMiddleware } from "./deserializerMiddleware"; +import { serializerMiddleware } from "./serializerMiddleware"; +export const deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true, +}; +export const serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true, +}; +export function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + }, + }; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js new file mode 100644 index 0000000..b02b93d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-es/serializerMiddleware.js @@ -0,0 +1,13 @@ +export const serializerMiddleware = (options, serializer) => (next, context) => async (args) => { + const endpoint = context.endpointV2?.url && options.urlParser + ? async () => options.urlParser(context.endpointV2.url) + : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts new file mode 100644 index 0000000..4d81141 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/deserializerMiddleware.d.ts @@ -0,0 +1,5 @@ +import { DeserializeMiddleware, ResponseDeserializer, SerdeContext, SerdeFunctions } from "@smithy/types"; +/** + * @internal + */ +export declare const deserializerMiddleware: (options: SerdeFunctions, deserializer: ResponseDeserializer) => DeserializeMiddleware; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/index.d.ts new file mode 100644 index 0000000..166a2be --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts new file mode 100644 index 0000000..bf1091a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/serdePlugin.d.ts @@ -0,0 +1,12 @@ +import { DeserializeHandlerOptions, Endpoint, MetadataBearer, Pluggable, Provider, RequestSerializer, ResponseDeserializer, SerdeContext, SerdeFunctions, SerializeHandlerOptions, UrlParser } from "@smithy/types"; +export declare const deserializerMiddlewareOption: DeserializeHandlerOptions; +export declare const serializerMiddlewareOption: SerializeHandlerOptions; +export type V1OrV2Endpoint = { + urlParser?: UrlParser; + endpoint?: Provider; +}; +/** + * @internal + * + */ +export declare function getSerdePlugin(config: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer, deserializer: ResponseDeserializer): Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts new file mode 100644 index 0000000..5437298 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/serializerMiddleware.d.ts @@ -0,0 +1,6 @@ +import { RequestSerializer, SerdeContext, SerdeFunctions, SerializeMiddleware } from "@smithy/types"; +import type { V1OrV2Endpoint } from "./serdePlugin"; +/** + * @internal + */ +export declare const serializerMiddleware: (options: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer) => SerializeMiddleware; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts new file mode 100644 index 0000000..b0ed492 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/deserializerMiddleware.d.ts @@ -0,0 +1,5 @@ +import { DeserializeMiddleware, ResponseDeserializer, SerdeContext, SerdeFunctions } from "@smithy/types"; +/** + * @internal + */ +export declare const deserializerMiddleware: (options: SerdeFunctions, deserializer: ResponseDeserializer) => DeserializeMiddleware; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ec66df4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./deserializerMiddleware"; +export * from "./serdePlugin"; +export * from "./serializerMiddleware"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts new file mode 100644 index 0000000..c381721 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serdePlugin.d.ts @@ -0,0 +1,12 @@ +import { DeserializeHandlerOptions, Endpoint, MetadataBearer, Pluggable, Provider, RequestSerializer, ResponseDeserializer, SerdeContext, SerdeFunctions, SerializeHandlerOptions, UrlParser } from "@smithy/types"; +export declare const deserializerMiddlewareOption: DeserializeHandlerOptions; +export declare const serializerMiddlewareOption: SerializeHandlerOptions; +export type V1OrV2Endpoint = { + urlParser?: UrlParser; + endpoint?: Provider; +}; +/** + * @internal + * + */ +export declare function getSerdePlugin(config: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer, deserializer: ResponseDeserializer): Pluggable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts new file mode 100644 index 0000000..914b3b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/dist-types/ts3.4/serializerMiddleware.d.ts @@ -0,0 +1,6 @@ +import { RequestSerializer, SerdeContext, SerdeFunctions, SerializeMiddleware } from "@smithy/types"; +import { V1OrV2Endpoint } from "./serdePlugin"; +/** + * @internal + */ +export declare const serializerMiddleware: (options: V1OrV2Endpoint & SerdeFunctions, serializer: RequestSerializer) => SerializeMiddleware; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/package.json new file mode 100644 index 0000000..042be08 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-serde/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-serde", + "version": "4.0.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-serde", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-serde", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-serde" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/README.md new file mode 100644 index 0000000..c09d4d3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/README.md @@ -0,0 +1,78 @@ +# @smithy/middleware-stack + +[![NPM version](https://img.shields.io/npm/v/@smithy/middleware-stack/latest.svg)](https://www.npmjs.com/package/@smithy/middleware-stack) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/middleware-stack.svg)](https://www.npmjs.com/package/@smithy/middleware-stack) + +The package contains an implementation of middleware stack interface. Middleware +stack is a structure storing middleware in specified order and resolve these +middleware into a single handler. + +A middleware stack has five `Step`s, each of them represents a specific request life cycle: + +- **initialize**: The input is being prepared. Examples of typical initialization tasks include injecting default options computing derived parameters. + +- **serialize**: The input is complete and ready to be serialized. Examples of typical serialization tasks include input validation and building an HTTP request from user input. + +- **build**: The input has been serialized into an HTTP request, but that request may require further modification. Any request alterations will be applied to all retries. Examples of typical build tasks include injecting HTTP headers that describe a stable aspect of the request, such as `Content-Length` or a body checksum. + +- **finalizeRequest**: The request is being prepared to be sent over the wire. The request in this stage should already be semantically complete and should therefore only be altered to match the recipient's expectations. Examples of typical finalization tasks include request signing and injecting hop-by-hop headers. + +- **deserialize**: The response has arrived, the middleware here will deserialize the raw response object to structured response + +## Adding Middleware + +There are two ways to add middleware to a middleware stack. They both add middleware to specified `Step` but they provide fine-grained location control differently. + +### Absolute Location + +You can add middleware to specified step with: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", +}); +``` + +This approach works for most cases. Sometimes you want your middleware to be executed in the front of the `Step`, you can set the `Priority` to `high`. Set the `Priority` to `low` then this middleware will be executed at the end of `Step`: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + priority: "high", +}); +``` + +If multiple middleware is added to same `step` with same `priority`, the order of them is determined by the order of adding them. + +### Relative Location + +In some cases, you might want to execute your middleware before some other known middleware, then you can use `addRelativeTo()`: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + name: "myMiddleware", +}); +stack.addRelativeTo(anotherMiddleware, { + relation: "before", //or 'after' + toMiddleware: "myMiddleware", +}); +``` + +## Removing Middleware + +You can remove middleware by name one at a time: + +```javascript +stack.remove("Middleware1"); +``` + +If you specify tags for middleware, you can remove multiple middleware at a time according to tag: + +```javascript +stack.add(middleware, { + step: "finalizeRequest", + tags: ["final"], +}); +stack.removeByTag("final"); +``` diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/MiddlewareStack.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/index.js new file mode 100644 index 0000000..4c78597 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/index.js @@ -0,0 +1,313 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + constructStack: () => constructStack +}); +module.exports = __toCommonJS(src_exports); + +// src/MiddlewareStack.ts +var getAllAliases = /* @__PURE__ */ __name((name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}, "getAllAliases"); +var getMiddlewareNameWithAliases = /* @__PURE__ */ __name((name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}, "getMiddlewareNameWithAliases"); +var constructStack = /* @__PURE__ */ __name(() => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = /* @__PURE__ */ new Set(); + const sort = /* @__PURE__ */ __name((entries) => entries.sort( + (a, b) => stepWeights[b.step] - stepWeights[a.step] || priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"] + ), "sort"); + const removeByName = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByName"); + const removeByReference = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByReference"); + const cloneTo = /* @__PURE__ */ __name((toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + toStack.identifyOnResolve?.(stack.identifyOnResolve()); + return toStack; + }, "cloneTo"); + const expandRelativeMiddlewareList = /* @__PURE__ */ __name((from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }, "expandRelativeMiddlewareList"); + const getMiddlewareList = /* @__PURE__ */ __name((debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === void 0) { + if (debug) { + return; + } + throw new Error( + `${entry.toMiddleware} is not found when adding ${getMiddlewareNameWithAliases(entry.name, entry.aliases)} middleware ${entry.relation} ${entry.toMiddleware}` + ); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries).map(expandRelativeMiddlewareList).reduce( + (wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, + [] + ); + return mainChain; + }, "getMiddlewareList"); + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex( + (entry2) => entry2.name === alias || entry2.aliases?.some((a) => a === alias) + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ${toOverride.priority} priority in ${toOverride.step} step cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ${entry.priority} priority in ${entry.step} step.` + ); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex( + (entry2) => entry2.name === alias || entry2.aliases?.some((a) => a === alias) + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} "${entry.toMiddleware}" middleware.` + ); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve( + identifyOnResolve || cloned.identifyOnResolve() || (from.identifyOnResolve?.() ?? false) + ); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? mw.relation + " " + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList().map((entry) => entry.middleware).reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + } + }; + return stack; +}, "constructStack"); +var stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1 +}; +var priorityWeights = { + high: 3, + normal: 2, + low: 1 +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + constructStack +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js new file mode 100644 index 0000000..2e02c73 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/MiddlewareStack.js @@ -0,0 +1,281 @@ +const getAllAliases = (name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}; +const getMiddlewareNameWithAliases = (name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}; +export const constructStack = () => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = new Set(); + const sort = (entries) => entries.sort((a, b) => stepWeights[b.step] - stepWeights[a.step] || + priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"]); + const removeByName = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const removeByReference = (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }; + const cloneTo = (toStack) => { + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + toStack.identifyOnResolve?.(stack.identifyOnResolve()); + return toStack; + }; + const expandRelativeMiddlewareList = (from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } + else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }; + const getMiddlewareList = (debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [], + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === undefined) { + if (debug) { + return; + } + throw new Error(`${entry.toMiddleware} is not found when adding ` + + `${getMiddlewareNameWithAliases(entry.name, entry.aliases)} ` + + `middleware ${entry.relation} ${entry.toMiddleware}`); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries) + .map(expandRelativeMiddlewareList) + .reduce((wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, []); + return mainChain; + }; + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex((entry) => entry.name === alias || entry.aliases?.some((a) => a === alias)); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ` + + `${toOverride.priority} priority in ${toOverride.step} step cannot ` + + `be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ` + + `${entry.priority} priority in ${entry.step} step.`); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options, + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex((entry) => entry.name === alias || entry.aliases?.some((a) => a === alias)); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error(`"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ` + + `${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden ` + + `by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} ` + + `"${entry.toMiddleware}" middleware.`); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = (entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }; + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve(identifyOnResolve || cloned.identifyOnResolve() || (from.identifyOnResolve?.() ?? false)); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? + mw.relation + + " " + + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList() + .map((entry) => entry.middleware) + .reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + }, + }; + return stack; +}; +const stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1, +}; +const priorityWeights = { + high: 3, + normal: 2, + low: 1, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/index.js new file mode 100644 index 0000000..16f56ce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/index.js @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts new file mode 100644 index 0000000..2aa088b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/MiddlewareStack.d.ts @@ -0,0 +1,5 @@ +import { MiddlewareStack } from "@smithy/types"; +/** + * @internal + */ +export declare const constructStack: () => MiddlewareStack; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/index.d.ts new file mode 100644 index 0000000..16f56ce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts new file mode 100644 index 0000000..d93ce93 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/MiddlewareStack.d.ts @@ -0,0 +1,5 @@ +import { MiddlewareStack } from "@smithy/types"; +/** + * @internal + */ +export declare const constructStack: () => MiddlewareStack; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d906b7d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./MiddlewareStack"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..38eb54c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/ts3.4/types.d.ts @@ -0,0 +1,22 @@ +import { AbsoluteLocation, HandlerOptions, MiddlewareType, Priority, RelativeLocation, Step } from "@smithy/types"; +export interface MiddlewareEntry extends HandlerOptions { + middleware: MiddlewareType; +} +export interface AbsoluteMiddlewareEntry extends MiddlewareEntry, AbsoluteLocation { + step: Step; + priority: Priority; +} +export interface RelativeMiddlewareEntry extends MiddlewareEntry, RelativeLocation { +} +export type Normalized, Input extends object = {}, Output extends object = {}> = T & { + after: Normalized, Input, Output>[]; + before: Normalized, Input, Output>[]; +}; +export interface NormalizedRelativeEntry extends HandlerOptions { + step: Step; + middleware: MiddlewareType; + next?: NormalizedRelativeEntry; + prev?: NormalizedRelativeEntry; + priority: null; +} +export type NamedMiddlewareEntriesMap = Record>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/types.d.ts new file mode 100644 index 0000000..4aa5fc6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/dist-types/types.d.ts @@ -0,0 +1,22 @@ +import { AbsoluteLocation, HandlerOptions, MiddlewareType, Priority, RelativeLocation, Step } from "@smithy/types"; +export interface MiddlewareEntry extends HandlerOptions { + middleware: MiddlewareType; +} +export interface AbsoluteMiddlewareEntry extends MiddlewareEntry, AbsoluteLocation { + step: Step; + priority: Priority; +} +export interface RelativeMiddlewareEntry extends MiddlewareEntry, RelativeLocation { +} +export type Normalized, Input extends object = {}, Output extends object = {}> = T & { + after: Normalized, Input, Output>[]; + before: Normalized, Input, Output>[]; +}; +export interface NormalizedRelativeEntry extends HandlerOptions { + step: Step; + middleware: MiddlewareType; + next?: NormalizedRelativeEntry; + prev?: NormalizedRelativeEntry; + priority: null; +} +export type NamedMiddlewareEntriesMap = Record>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/package.json new file mode 100644 index 0000000..57077ab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/middleware-stack/package.json @@ -0,0 +1,63 @@ +{ + "name": "@smithy/middleware-stack", + "version": "4.0.2", + "description": "Provides a means for composing multiple middleware functions into a single handler", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline middleware-stack", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/middleware-stack", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/middleware-stack" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/README.md new file mode 100644 index 0000000..af591d2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/README.md @@ -0,0 +1,4 @@ +# @smithy/node-config-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/node-config-provider/latest.svg)](https://www.npmjs.com/package/@smithy/node-config-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/node-config-provider.svg)](https://www.npmjs.com/package/@smithy/node-config-provider) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/configLoader.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromEnv.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromSharedConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/fromStatic.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/getSelectorName.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/index.js new file mode 100644 index 0000000..8a98b1b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-cjs/index.js @@ -0,0 +1,105 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + loadConfig: () => loadConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/configLoader.ts + + +// src/fromEnv.ts +var import_property_provider = require("@smithy/property-provider"); + +// src/getSelectorName.ts +function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } catch (e) { + return functionString; + } +} +__name(getSelectorName, "getSelectorName"); + +// src/fromEnv.ts +var fromEnv = /* @__PURE__ */ __name((envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === void 0) { + throw new Error(); + } + return config; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, + { logger } + ); + } +}, "fromEnv"); + +// src/fromSharedConfigFiles.ts + +var import_shared_ini_file_loader = require("@smithy/shared-ini-file-loader"); +var fromSharedConfigFiles = /* @__PURE__ */ __name((configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, import_shared_ini_file_loader.getProfileName)(init); + const { configFile, credentialsFile } = await (0, import_shared_ini_file_loader.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" ? { ...profileFromCredentials, ...profileFromConfig } : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === void 0) { + throw new Error(); + } + return configValue; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, + { logger: init.logger } + ); + } +}, "fromSharedConfigFiles"); + +// src/fromStatic.ts + +var isFunction = /* @__PURE__ */ __name((func) => typeof func === "function", "isFunction"); +var fromStatic = /* @__PURE__ */ __name((defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, import_property_provider.fromStatic)(defaultValue), "fromStatic"); + +// src/configLoader.ts +var loadConfig = /* @__PURE__ */ __name(({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + fromEnv(environmentVariableSelector), + fromSharedConfigFiles(configFileSelector, configuration), + fromStatic(defaultValue) + ) +), "loadConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + loadConfig +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/configLoader.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/configLoader.js new file mode 100644 index 0000000..db044dd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/configLoader.js @@ -0,0 +1,5 @@ +import { chain, memoize } from "@smithy/property-provider"; +import { fromEnv } from "./fromEnv"; +import { fromSharedConfigFiles } from "./fromSharedConfigFiles"; +import { fromStatic } from "./fromStatic"; +export const loadConfig = ({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => memoize(chain(fromEnv(environmentVariableSelector), fromSharedConfigFiles(configFileSelector, configuration), fromStatic(defaultValue))); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js new file mode 100644 index 0000000..d43edbd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromEnv.js @@ -0,0 +1,14 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getSelectorName } from "./getSelectorName"; +export const fromEnv = (envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === undefined) { + throw new Error(); + } + return config; + } + catch (e) { + throw new CredentialsProviderError(e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, { logger }); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js new file mode 100644 index 0000000..b6435ed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromSharedConfigFiles.js @@ -0,0 +1,23 @@ +import { CredentialsProviderError } from "@smithy/property-provider"; +import { getProfileName, loadSharedConfigFiles } from "@smithy/shared-ini-file-loader"; +import { getSelectorName } from "./getSelectorName"; +export const fromSharedConfigFiles = (configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = getProfileName(init); + const { configFile, credentialsFile } = await loadSharedConfigFiles(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" + ? { ...profileFromCredentials, ...profileFromConfig } + : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === undefined) { + throw new Error(); + } + return configValue; + } + catch (e) { + throw new CredentialsProviderError(e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, { logger: init.logger }); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js new file mode 100644 index 0000000..c9f91ff --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/fromStatic.js @@ -0,0 +1,3 @@ +import { fromStatic as convertToProvider } from "@smithy/property-provider"; +const isFunction = (func) => typeof func === "function"; +export const fromStatic = (defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : convertToProvider(defaultValue); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js new file mode 100644 index 0000000..d5e0f78 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/getSelectorName.js @@ -0,0 +1,12 @@ +export function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } + catch (e) { + return functionString; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/index.js new file mode 100644 index 0000000..2d035d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-es/index.js @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts new file mode 100644 index 0000000..0d0b232 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/configLoader.d.ts @@ -0,0 +1,31 @@ +import { Provider } from "@smithy/types"; +import { GetterFromEnv } from "./fromEnv"; +import { GetterFromConfig, SharedConfigInit } from "./fromSharedConfigFiles"; +import { FromStaticConfig } from "./fromStatic"; +/** + * @internal + */ +export type LocalConfigOptions = SharedConfigInit; +/** + * @internal + */ +export interface LoadedConfigSelectors { + /** + * A getter function getting the config values from all the environment + * variables. + */ + environmentVariableSelector: GetterFromEnv; + /** + * A getter function getting config values associated with the inferred + * profile from shared INI files + */ + configFileSelector: GetterFromConfig; + /** + * Default value or getter + */ + default: FromStaticConfig; +} +/** + * @internal + */ +export declare const loadConfig: ({ environmentVariableSelector, configFileSelector, default: defaultValue }: LoadedConfigSelectors, configuration?: LocalConfigOptions) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts new file mode 100644 index 0000000..b2454c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromEnv.d.ts @@ -0,0 +1,7 @@ +import { Logger, Provider } from "@smithy/types"; +export type GetterFromEnv = (env: Record) => T | undefined; +/** + * Get config value given the environment variable name or getter from + * environment variable. + */ +export declare const fromEnv: (envVarSelector: GetterFromEnv, logger?: Logger) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts new file mode 100644 index 0000000..89a8eac --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromSharedConfigFiles.d.ts @@ -0,0 +1,22 @@ +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { ParsedIniData, Profile, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface SharedConfigInit extends SourceProfileInit { + /** + * The preferred shared ini file to load the config. "config" option refers to + * the shared config file(defaults to `~/.aws/config`). "credentials" option + * refers to the shared credentials file(defaults to `~/.aws/credentials`) + */ + preferredFile?: "config" | "credentials"; +} +/** + * @internal + */ +export type GetterFromConfig = (profile: Profile, configFile?: ParsedIniData) => T | undefined; +/** + * Get config value from the shared config files with inferred profile name. + * @internal + */ +export declare const fromSharedConfigFiles: (configSelector: GetterFromConfig, { preferredFile, ...init }?: SharedConfigInit) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..d2c32a4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export type FromStaticConfig = T | (() => T) | Provider; +/** + * @internal + */ +export declare const fromStatic: (defaultValue: FromStaticConfig) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts new file mode 100644 index 0000000..b5f1a1b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/getSelectorName.d.ts @@ -0,0 +1,9 @@ +/** + * Attempts to extract the name of the variable that the functional selector is looking for. + * Improves readability over the raw Function.toString() value. + * @internal + * @param functionString - function's string representation. + * + * @returns constant value used within the function. + */ +export declare function getSelectorName(functionString: string): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/index.d.ts new file mode 100644 index 0000000..2d035d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/index.d.ts @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts new file mode 100644 index 0000000..e877731 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/configLoader.d.ts @@ -0,0 +1,31 @@ +import { Provider } from "@smithy/types"; +import { GetterFromEnv } from "./fromEnv"; +import { GetterFromConfig, SharedConfigInit } from "./fromSharedConfigFiles"; +import { FromStaticConfig } from "./fromStatic"; +/** + * @internal + */ +export type LocalConfigOptions = SharedConfigInit; +/** + * @internal + */ +export interface LoadedConfigSelectors { + /** + * A getter function getting the config values from all the environment + * variables. + */ + environmentVariableSelector: GetterFromEnv; + /** + * A getter function getting config values associated with the inferred + * profile from shared INI files + */ + configFileSelector: GetterFromConfig; + /** + * Default value or getter + */ + default: FromStaticConfig; +} +/** + * @internal + */ +export declare const loadConfig: ({ environmentVariableSelector, configFileSelector, default: defaultValue }: LoadedConfigSelectors, configuration?: LocalConfigOptions) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts new file mode 100644 index 0000000..e0a4cc7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromEnv.d.ts @@ -0,0 +1,7 @@ +import { Logger, Provider } from "@smithy/types"; +export type GetterFromEnv = (env: Record) => T | undefined; +/** + * Get config value given the environment variable name or getter from + * environment variable. + */ +export declare const fromEnv: (envVarSelector: GetterFromEnv, logger?: Logger) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts new file mode 100644 index 0000000..aa0efa0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromSharedConfigFiles.d.ts @@ -0,0 +1,22 @@ +import { SourceProfileInit } from "@smithy/shared-ini-file-loader"; +import { ParsedIniData, Profile, Provider } from "@smithy/types"; +/** + * @internal + */ +export interface SharedConfigInit extends SourceProfileInit { + /** + * The preferred shared ini file to load the config. "config" option refers to + * the shared config file(defaults to `~/.aws/config`). "credentials" option + * refers to the shared credentials file(defaults to `~/.aws/credentials`) + */ + preferredFile?: "config" | "credentials"; +} +/** + * @internal + */ +export type GetterFromConfig = (profile: Profile, configFile?: ParsedIniData) => T | undefined; +/** + * Get config value from the shared config files with inferred profile name. + * @internal + */ +export declare const fromSharedConfigFiles: (configSelector: GetterFromConfig, { preferredFile, ...init }?: SharedConfigInit) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..a4bab2d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,9 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export type FromStaticConfig = T | (() => T) | Provider; +/** + * @internal + */ +export declare const fromStatic: (defaultValue: FromStaticConfig) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts new file mode 100644 index 0000000..11c5da2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/getSelectorName.d.ts @@ -0,0 +1,9 @@ +/** + * Attempts to extract the name of the variable that the functional selector is looking for. + * Improves readability over the raw Function.toString() value. + * @internal + * @param functionString - function's string representation. + * + * @returns constant value used within the function. + */ +export declare function getSelectorName(functionString: string): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..74a76f5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1 @@ +export * from "./configLoader"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/package.json new file mode 100644 index 0000000..3002d8e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-config-provider/package.json @@ -0,0 +1,65 @@ +{ + "name": "@smithy/node-config-provider", + "version": "4.0.2", + "description": "Load config default values from ini config files and environmental variable", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline node-config-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/node-config-provider", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/node-config-provider" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/README.md new file mode 100644 index 0000000..214719f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/README.md @@ -0,0 +1,9 @@ +# @smithy/node-http-handler + +[![NPM version](https://img.shields.io/npm/v/@smithy/node-http-handler/latest.svg)](https://www.npmjs.com/package/@smithy/node-http-handler) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/node-http-handler.svg)](https://www.npmjs.com/package/@smithy/node-http-handler) + +This package implements the default `requestHandler` for Node.js using `node:http`, `node:https`, and `node:http2`. + +For an example on how `requestHandler`s are used by Smithy generated SDK clients, refer to +the [AWS SDK for JavaScript (v3) supplemental docs](https://github.com/aws/aws-sdk-js-v3/blob/main/supplemental-docs/CLIENTS.md#request-handler-requesthandler). diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/get-transformed-headers.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/index.js new file mode 100644 index 0000000..e31976f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/index.js @@ -0,0 +1,806 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_REQUEST_TIMEOUT: () => DEFAULT_REQUEST_TIMEOUT, + NodeHttp2Handler: () => NodeHttp2Handler, + NodeHttpHandler: () => NodeHttpHandler, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/node-http-handler.ts +var import_protocol_http = require("@smithy/protocol-http"); +var import_querystring_builder = require("@smithy/querystring-builder"); +var import_http = require("http"); +var import_https = require("https"); + +// src/constants.ts +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; + +// src/get-transformed-headers.ts +var getTransformedHeaders = /* @__PURE__ */ __name((headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}, "getTransformedHeaders"); + +// src/timing.ts +var timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId) +}; + +// src/set-connection-timeout.ts +var DEFER_EVENT_LISTENER_TIME = 1e3; +var setConnectionTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject( + Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError" + }) + ); + }, timeoutInMs - offset); + const doWithSocket = /* @__PURE__ */ __name((socket) => { + if (socket?.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } else { + timing.clearTimeout(timeoutId); + } + }, "doWithSocket"); + if (request.socket) { + doWithSocket(request.socket); + } else { + request.on("socket", doWithSocket); + } + }, "registerTimeout"); + if (timeoutInMs < 2e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}, "setConnectionTimeout"); + +// src/set-socket-keep-alive.ts +var DEFER_EVENT_LISTENER_TIME2 = 3e3; +var setSocketKeepAlive = /* @__PURE__ */ __name((request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME2) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = /* @__PURE__ */ __name(() => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }, "registerListener"); + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}, "setSocketKeepAlive"); + +// src/set-socket-timeout.ts +var DEFER_EVENT_LISTENER_TIME3 = 3e3; +var setSocketTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = DEFAULT_REQUEST_TIMEOUT) => { + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = /* @__PURE__ */ __name(() => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }, "onTimeout"); + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + request.on("close", () => request.socket?.removeListener("timeout", onTimeout)); + } else { + request.setTimeout(timeout, onTimeout); + } + }, "registerTimeout"); + if (0 < timeoutInMs && timeoutInMs < 6e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout( + registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME3), + DEFER_EVENT_LISTENER_TIME3 + ); +}, "setSocketTimeout"); + +// src/write-request-body.ts +var import_stream = require("stream"); +var MIN_WAIT_TIME = 6e3; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(() => resolve(true), Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }) + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +__name(writeRequestBody, "writeRequestBody"); +function writeBody(httpRequest, body) { + if (body instanceof import_stream.Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && uint8.buffer && typeof uint8.byteOffset === "number" && typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} +__name(writeBody, "writeBody"); + +// src/node-http-handler.ts +var DEFAULT_REQUEST_TIMEOUT = 0; +var NodeHttpHandler = class _NodeHttpHandler { + constructor(options) { + this.socketWarningTimestamp = 0; + // Node http handler is hard-coded to http/1.1: https://github.com/nodejs/node/blob/ff5664b83b89c55e4ab5d5f60068fb457f1f5872/lib/_http_server.js#L286 + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }).catch(reject); + } else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + static { + __name(this, "NodeHttpHandler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _NodeHttpHandler(instanceOrOptions); + } + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15e3; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = sockets[origin]?.length ?? 0; + const requestsEnqueued = requests[origin]?.length ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + logger?.warn?.( + `@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.` + ); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, socketAcquisitionWarningTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + socketAcquisitionWarningTimeout, + httpAgent: (() => { + if (httpAgent instanceof import_http.Agent || typeof httpAgent?.destroy === "function") { + return httpAgent; + } + return new import_http.Agent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof import_https.Agent || typeof httpsAgent?.destroy === "function") { + return httpsAgent; + } + return new import_https.Agent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console + }; + } + destroy() { + this.config?.httpAgent?.destroy(); + this.config?.httpsAgent?.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = void 0; + const timeouts = []; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }, "reject"); + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push( + timing.setTimeout( + () => { + this.socketWarningTimestamp = _NodeHttpHandler.checkSocketUsage( + agent, + this.socketWarningTimestamp, + this.config.logger + ); + }, + this.config.socketAcquisitionWarningTimeout ?? (this.config.requestTimeout ?? 2e3) + (this.config.connectionTimeout ?? 1e3) + ) + ); + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + let auth = void 0; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth + }; + const requestFunc = isSSL ? import_https.request : import_http.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push( + setSocketKeepAlive(req, { + // @ts-expect-error keepAlive is not public on httpAgent. + keepAlive: httpAgent.keepAlive, + // @ts-expect-error keepAliveMsecs is not public on httpAgent. + keepAliveMsecs: httpAgent.keepAliveMsecs + }) + ); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; + +// src/node-http2-handler.ts + + +var import_http22 = require("http2"); + +// src/node-http2-connection-manager.ts +var import_http2 = __toESM(require("http2")); + +// src/node-http2-connection-pool.ts +var NodeHttp2ConnectionPool = class { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + static { + __name(this, "NodeHttp2ConnectionPool"); + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +}; + +// src/node-http2-connection-manager.ts +var NodeHttp2ConnectionManager = class { + constructor(config) { + this.sessionCache = /* @__PURE__ */ new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + static { + __name(this, "NodeHttp2ConnectionManager"); + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = import_http2.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error( + "Fail to set maxConcurrentStreams to " + this.config.maxConcurrency + "when creating new session for " + requestContext.destination.toString() + ); + } + }); + } + session.unref(); + const destroySessionCb = /* @__PURE__ */ __name(() => { + session.destroy(); + this.deleteSession(url, session); + }, "destroySessionCb"); + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + const cacheKey = this.getUrlString(requestContext); + this.sessionCache.get(cacheKey)?.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +}; + +// src/node-http2-handler.ts +var NodeHttp2Handler = class _NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((opts) => { + resolve(opts || {}); + }).catch(reject); + } else { + resolve(options || {}); + } + }); + } + static { + __name(this, "NodeHttp2Handler"); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new _NodeHttp2Handler(instanceOrOptions); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + let fulfilled = false; + let writeRequestBodyPromise = void 0; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }, "reject"); + if (abortSignal?.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: this.config?.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false + }); + const rejectWithDestroy = /* @__PURE__ */ __name((err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }, "rejectWithDestroy"); + const queryString = (0, import_querystring_builder.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [import_http22.constants.HTTP2_HEADER_PATH]: path, + [import_http22.constants.HTTP2_HEADER_METHOD]: method + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy( + new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`) + ); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + /** + * Destroys a session. + * @param session - the session to destroy. + */ + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +}; + +// src/stream-collector/collector.ts + +var Collector = class extends import_stream.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + static { + __name(this, "Collector"); + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +}; + +// src/stream-collector/index.ts +var streamCollector = /* @__PURE__ */ __name((stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function() { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}, "streamCollector"); +var isReadableStreamInstance = /* @__PURE__ */ __name((stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream, "isReadableStreamInstance"); +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectReadableStream, "collectReadableStream"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + DEFAULT_REQUEST_TIMEOUT, + NodeHttpHandler, + NodeHttp2Handler, + streamCollector +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-manager.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-connection-pool.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/node-http2-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/readable.mock.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/server.mock.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-connection-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-keep-alive.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/set-socket-timeout.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/collector.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/stream-collector/readable.mock.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/timing.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/timing.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/timing.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-cjs/write-request-body.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/constants.js new file mode 100644 index 0000000..0619d28 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/constants.js @@ -0,0 +1 @@ +export const NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js new file mode 100644 index 0000000..562883c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/get-transformed-headers.js @@ -0,0 +1,9 @@ +const getTransformedHeaders = (headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}; +export { getTransformedHeaders }; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/index.js new file mode 100644 index 0000000..09c0b9a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js new file mode 100644 index 0000000..f0ca1e7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http-handler.js @@ -0,0 +1,209 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { Agent as hAgent, request as hRequest } from "http"; +import { Agent as hsAgent, request as hsRequest } from "https"; +import { NODEJS_TIMEOUT_ERROR_CODES } from "./constants"; +import { getTransformedHeaders } from "./get-transformed-headers"; +import { setConnectionTimeout } from "./set-connection-timeout"; +import { setSocketKeepAlive } from "./set-socket-keep-alive"; +import { setSocketTimeout } from "./set-socket-timeout"; +import { timing } from "./timing"; +import { writeRequestBody } from "./write-request-body"; +export const DEFAULT_REQUEST_TIMEOUT = 0; +export class NodeHttpHandler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new NodeHttpHandler(instanceOrOptions); + } + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15000; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = sockets[origin]?.length ?? 0; + const requestsEnqueued = requests[origin]?.length ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + logger?.warn?.(`@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.`); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + constructor(options) { + this.socketWarningTimestamp = 0; + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }) + .catch(reject); + } + else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, socketAcquisitionWarningTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + socketAcquisitionWarningTimeout, + httpAgent: (() => { + if (httpAgent instanceof hAgent || typeof httpAgent?.destroy === "function") { + return httpAgent; + } + return new hAgent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof hsAgent || typeof httpsAgent?.destroy === "function") { + return httpsAgent; + } + return new hsAgent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console, + }; + } + destroy() { + this.config?.httpAgent?.destroy(); + this.config?.httpsAgent?.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = undefined; + const timeouts = []; + const resolve = async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }; + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal?.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push(timing.setTimeout(() => { + this.socketWarningTimestamp = NodeHttpHandler.checkSocketUsage(agent, this.socketWarningTimestamp, this.config.logger); + }, this.config.socketAcquisitionWarningTimeout ?? + (this.config.requestTimeout ?? 2000) + (this.config.connectionTimeout ?? 1000))); + const queryString = buildQueryString(request.query || {}); + let auth = undefined; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } + else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth, + }; + const requestFunc = isSSL ? hsRequest : hRequest; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res, + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } + else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = () => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } + else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push(setSocketKeepAlive(req, { + keepAlive: httpAgent.keepAlive, + keepAliveMsecs: httpAgent.keepAliveMsecs, + })); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js new file mode 100644 index 0000000..206d94f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-manager.js @@ -0,0 +1,86 @@ +import http2 from "http2"; +import { NodeHttp2ConnectionPool } from "./node-http2-connection-pool"; +export class NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = http2.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error("Fail to set maxConcurrentStreams to " + + this.config.maxConcurrency + + "when creating new session for " + + requestContext.destination.toString()); + } + }); + } + session.unref(); + const destroySessionCb = () => { + session.destroy(); + this.deleteSession(url, session); + }; + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + const cacheKey = this.getUrlString(requestContext); + this.sessionCache.get(cacheKey)?.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js new file mode 100644 index 0000000..429eb49 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-connection-pool.js @@ -0,0 +1,32 @@ +export class NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js new file mode 100644 index 0000000..b68601e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/node-http2-handler.js @@ -0,0 +1,167 @@ +import { HttpResponse } from "@smithy/protocol-http"; +import { buildQueryString } from "@smithy/querystring-builder"; +import { constants } from "http2"; +import { getTransformedHeaders } from "./get-transformed-headers"; +import { NodeHttp2ConnectionManager } from "./node-http2-connection-manager"; +import { writeRequestBody } from "./write-request-body"; +export class NodeHttp2Handler { + static create(instanceOrOptions) { + if (typeof instanceOrOptions?.handle === "function") { + return instanceOrOptions; + } + return new NodeHttp2Handler(instanceOrOptions); + } + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options() + .then((opts) => { + resolve(opts || {}); + }) + .catch(reject); + } + else { + resolve(options || {}); + } + }); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + let fulfilled = false; + let writeRequestBodyPromise = undefined; + const resolve = async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }; + const reject = async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }; + if (abortSignal?.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: this.config?.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false, + }); + const rejectWithDestroy = (err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }; + const queryString = buildQueryString(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [constants.HTTP2_HEADER_PATH]: path, + [constants.HTTP2_HEADER_METHOD]: method, + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req, + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = () => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }; + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } + else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy(new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`)); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = undefined; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value, + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js new file mode 100644 index 0000000..41fb0b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/readable.mock.js @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +export class ReadFromBuffers extends Readable { + constructor(options) { + super(options); + this.numBuffersRead = 0; + this.buffersToRead = options.buffers; + this.errorAfter = typeof options.errorAfter === "number" ? options.errorAfter : -1; + } + _read() { + if (this.errorAfter !== -1 && this.errorAfter === this.numBuffersRead) { + this.emit("error", new Error("Mock Error")); + return; + } + if (this.numBuffersRead >= this.buffersToRead.length) { + return this.push(null); + } + return this.push(this.buffersToRead[this.numBuffersRead++]); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/server.mock.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/server.mock.js new file mode 100644 index 0000000..6a31adf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/server.mock.js @@ -0,0 +1,88 @@ +import { readFileSync } from "fs"; +import { createServer as createHttpServer } from "http"; +import { createServer as createHttp2Server } from "http2"; +import { createServer as createHttpsServer } from "https"; +import { join } from "path"; +import { Readable } from "stream"; +import { timing } from "./timing"; +const fixturesDir = join(__dirname, "..", "fixtures"); +const setResponseHeaders = (response, headers) => { + for (const [key, value] of Object.entries(headers)) { + response.setHeader(key, value); + } +}; +const setResponseBody = (response, body) => { + if (body instanceof Readable) { + body.pipe(response); + } + else { + response.end(body); + } +}; +export const createResponseFunction = (httpResp) => (request, response) => { + response.statusCode = httpResp.statusCode; + if (httpResp.reason) { + response.statusMessage = httpResp.reason; + } + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, httpResp.body); +}; +export const createResponseFunctionWithDelay = (httpResp, delay) => (request, response) => { + response.statusCode = httpResp.statusCode; + if (httpResp.reason) { + response.statusMessage = httpResp.reason; + } + setResponseHeaders(response, httpResp.headers); + timing.setTimeout(() => setResponseBody(response, httpResp.body), delay); +}; +export const createContinueResponseFunction = (httpResp) => (request, response) => { + response.writeContinue(); + timing.setTimeout(() => { + createResponseFunction(httpResp)(request, response); + }, 100); +}; +export const createMockHttpsServer = () => { + const server = createHttpsServer({ + key: readFileSync(join(fixturesDir, "test-server-key.pem")), + cert: readFileSync(join(fixturesDir, "test-server-cert.pem")), + }); + return server; +}; +export const createMockHttpServer = () => { + const server = createHttpServer(); + return server; +}; +export const createMockHttp2Server = () => { + const server = createHttp2Server(); + return server; +}; +export const createMirrorResponseFunction = (httpResp) => (request, response) => { + const bufs = []; + request.on("data", (chunk) => { + bufs.push(chunk); + }); + request.on("end", () => { + response.statusCode = httpResp.statusCode; + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, Buffer.concat(bufs)); + }); + request.on("error", (err) => { + response.statusCode = 500; + setResponseHeaders(response, httpResp.headers); + setResponseBody(response, err.message); + }); +}; +export const getResponseBody = (response) => { + return new Promise((resolve, reject) => { + const bufs = []; + response.body.on("data", function (d) { + bufs.push(d); + }); + response.body.on("end", function () { + resolve(Buffer.concat(bufs).toString()); + }); + response.body.on("error", (err) => { + reject(err); + }); + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js new file mode 100644 index 0000000..587532e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-connection-timeout.js @@ -0,0 +1,36 @@ +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 1000; +export const setConnectionTimeout = (request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = (offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject(Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError", + })); + }, timeoutInMs - offset); + const doWithSocket = (socket) => { + if (socket?.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } + else { + timing.clearTimeout(timeoutId); + } + }; + if (request.socket) { + doWithSocket(request.socket); + } + else { + request.on("socket", doWithSocket); + } + }; + if (timeoutInMs < 2000) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js new file mode 100644 index 0000000..18391a8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-socket-keep-alive.js @@ -0,0 +1,22 @@ +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 3000; +export const setSocketKeepAlive = (request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = () => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } + else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }; + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js new file mode 100644 index 0000000..5c4456c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/set-socket-timeout.js @@ -0,0 +1,24 @@ +import { DEFAULT_REQUEST_TIMEOUT } from "./node-http-handler"; +import { timing } from "./timing"; +const DEFER_EVENT_LISTENER_TIME = 3000; +export const setSocketTimeout = (request, reject, timeoutInMs = DEFAULT_REQUEST_TIMEOUT) => { + const registerTimeout = (offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = () => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }; + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + request.on("close", () => request.socket?.removeListener("timeout", onTimeout)); + } + else { + request.setTimeout(timeout, onTimeout); + } + }; + if (0 < timeoutInMs && timeoutInMs < 6000) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js new file mode 100644 index 0000000..c3737e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/collector.js @@ -0,0 +1,11 @@ +import { Writable } from "stream"; +export class Collector extends Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js new file mode 100644 index 0000000..8ff09c0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/index.js @@ -0,0 +1,41 @@ +import { Collector } from "./collector"; +export const streamCollector = (stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}; +const isReadableStreamInstance = (stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream; +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js new file mode 100644 index 0000000..2f653c5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/stream-collector/readable.mock.js @@ -0,0 +1,19 @@ +import { Readable } from "stream"; +export class ReadFromBuffers extends Readable { + constructor(options) { + super(options); + this.numBuffersRead = 0; + this.buffersToRead = options.buffers; + this.errorAfter = typeof options.errorAfter === "number" ? options.errorAfter : -1; + } + _read(size) { + if (this.errorAfter !== -1 && this.errorAfter === this.numBuffersRead) { + this.emit("error", new Error("Mock Error")); + return; + } + if (this.numBuffersRead >= this.buffersToRead.length) { + return this.push(null); + } + return this.push(this.buffersToRead[this.numBuffersRead++]); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/timing.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/timing.js new file mode 100644 index 0000000..792ba48 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/timing.js @@ -0,0 +1,4 @@ +export const timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId), +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js new file mode 100644 index 0000000..36e15f9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-es/write-request-body.js @@ -0,0 +1,56 @@ +import { Readable } from "stream"; +import { timing } from "./timing"; +const MIN_WAIT_TIME = 6000; +export async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(() => resolve(true), Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }), + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +function writeBody(httpRequest, body) { + if (body instanceof Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && + uint8.buffer && + typeof uint8.byteOffset === "number" && + typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts new file mode 100644 index 0000000..3540461 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/constants.d.ts @@ -0,0 +1,5 @@ +/** + * Node.js system error codes that indicate timeout. + * @deprecated use NODEJS_TIMEOUT_ERROR_CODES from @smithy/service-error-classification/constants + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts new file mode 100644 index 0000000..bb7cd4e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/get-transformed-headers.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +import { IncomingHttpHeaders } from "http2"; +declare const getTransformedHeaders: (headers: IncomingHttpHeaders) => HeaderBag; +export { getTransformedHeaders }; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/index.d.ts new file mode 100644 index 0000000..09c0b9a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts new file mode 100644 index 0000000..b120313 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http-handler.d.ts @@ -0,0 +1,47 @@ +/// +/// +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import type { Logger, NodeHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +import { Agent as hAgent } from "http"; +import { Agent as hsAgent } from "https"; +export { NodeHttpHandlerOptions }; +/** + * @public + * A default of 0 means no timeout. + */ +export declare const DEFAULT_REQUEST_TIMEOUT = 0; +/** + * @public + * A request handler that uses the Node.js http and https modules. + */ +export declare class NodeHttpHandler implements HttpHandler { + private config?; + private configProvider; + private socketWarningTimestamp; + readonly metadata: { + handlerProtocol: string; + }; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttpHandlerOptions | Provider): NodeHttpHandler | HttpHandler; + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent: hAgent | hsAgent, socketWarningTimestamp: number, logger?: Logger): number; + constructor(options?: NodeHttpHandlerOptions | Provider); + private resolveDefaultConfig; + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttpHandlerOptions, value: NodeHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttpHandlerOptions; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts new file mode 100644 index 0000000..24bc3b5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-manager.d.ts @@ -0,0 +1,25 @@ +/// +import { RequestContext } from "@smithy/types"; +import { ConnectConfiguration } from "@smithy/types"; +import { ConnectionManager, ConnectionManagerConfiguration } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +/** + * @public + */ +export declare class NodeHttp2ConnectionManager implements ConnectionManager { + constructor(config: ConnectionManagerConfiguration); + private config; + private readonly sessionCache; + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): ClientHttp2Session; + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority: string, session: ClientHttp2Session): void; + release(requestContext: RequestContext, session: ClientHttp2Session): void; + destroy(): void; + setMaxConcurrentStreams(maxConcurrentStreams: number): void; + setDisableConcurrentStreams(disableConcurrentStreams: boolean): void; + private getUrlString; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts new file mode 100644 index 0000000..6695893 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-connection-pool.d.ts @@ -0,0 +1,13 @@ +/// +import { ConnectionPool } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +export declare class NodeHttp2ConnectionPool implements ConnectionPool { + private sessions; + constructor(sessions?: ClientHttp2Session[]); + poll(): ClientHttp2Session | void; + offerLast(session: ClientHttp2Session): void; + contains(session: ClientHttp2Session): boolean; + remove(session: ClientHttp2Session): void; + [Symbol.iterator](): IterableIterator; + destroy(connection: ClientHttp2Session): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts new file mode 100644 index 0000000..68610a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/node-http2-handler.d.ts @@ -0,0 +1,62 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * Represents the http2 options that can be passed to a node http2 client. + * @public + */ +export interface NodeHttp2HandlerOptions { + /** + * The maximum time in milliseconds that a stream may remain idle before it + * is closed. + */ + requestTimeout?: number; + /** + * The maximum time in milliseconds that a session or socket may remain idle + * before it is closed. + * https://nodejs.org/docs/latest-v12.x/api/http2.html#http2_http2session_and_sockets + */ + sessionTimeout?: number; + /** + * Disables processing concurrent streams on a ClientHttp2Session instance. When set + * to true, a new session instance is created for each request to a URL. + * **Default:** false. + * https://nodejs.org/api/http2.html#http2_class_clienthttp2session + */ + disableConcurrentStreams?: boolean; + /** + * Maximum number of concurrent Http2Stream instances per ClientHttp2Session. Each session + * may have up to 2^31-1 Http2Stream instances over its lifetime. + * This value must be greater than or equal to 0. + * https://nodejs.org/api/http2.html#class-http2stream + */ + maxConcurrentStreams?: number; +} +/** + * A request handler using the node:http2 package. + * @public + */ +export declare class NodeHttp2Handler implements HttpHandler { + private config?; + private configProvider; + readonly metadata: { + handlerProtocol: string; + }; + private readonly connectionManager; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttp2HandlerOptions | Provider): HttpHandler | NodeHttp2Handler; + constructor(options?: NodeHttp2HandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttp2HandlerOptions, value: NodeHttp2HandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttp2HandlerOptions; + /** + * Destroys a session. + * @param session - the session to destroy. + */ + private destroySession; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts new file mode 100644 index 0000000..cd7e77f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/readable.mock.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(): boolean | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts new file mode 100644 index 0000000..585a677 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/server.mock.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/types"; +import { IncomingMessage, Server as HttpServer, ServerResponse } from "http"; +import { Http2Server } from "http2"; +import { Server as HttpsServer } from "https"; +export declare const createResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createResponseFunctionWithDelay: (httpResp: HttpResponse, delay: number) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createContinueResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createMockHttpsServer: () => HttpsServer; +export declare const createMockHttpServer: () => HttpServer; +export declare const createMockHttp2Server: () => Http2Server; +export declare const createMirrorResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const getResponseBody: (response: HttpResponse) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts new file mode 100644 index 0000000..57b811f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-connection-timeout.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export declare const setConnectionTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts new file mode 100644 index 0000000..80507d8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-socket-keep-alive.d.ts @@ -0,0 +1,13 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export interface SocketKeepAliveOptions { + keepAlive: boolean; + keepAliveMsecs?: number; +} +export declare const setSocketKeepAlive: (request: ClientRequest, { keepAlive, keepAliveMsecs }: SocketKeepAliveOptions, deferTimeMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts new file mode 100644 index 0000000..019a62b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/set-socket-timeout.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +/// +/// +/// +/// +import { ClientRequest } from "http"; +export declare const setSocketTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts new file mode 100644 index 0000000..b7d4d12 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/collector.d.ts @@ -0,0 +1,8 @@ +/// +/// +/// +import { Writable } from "stream"; +export declare class Collector extends Writable { + readonly bufferedBytes: Buffer[]; + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts new file mode 100644 index 0000000..a9a9498 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/index.d.ts @@ -0,0 +1,6 @@ +import { StreamCollector } from "@smithy/types"; +/** + * @internal + * Converts a stream to a byte array. + */ +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts new file mode 100644 index 0000000..2543a28 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/stream-collector/readable.mock.d.ts @@ -0,0 +1,15 @@ +/// +/// +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(size: number): boolean | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts new file mode 100644 index 0000000..de5b695 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/timing.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * For test spies. + */ +export declare const timing: { + setTimeout: (cb: (...ignored: any[]) => void | unknown, ms?: number) => number; + clearTimeout: (timeoutId: string | number | undefined | unknown) => void; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..b02b0b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,5 @@ +/** + * Node.js system error codes that indicate timeout. + * @deprecated use NODEJS_TIMEOUT_ERROR_CODES from @smithy/service-error-classification/constants + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts new file mode 100644 index 0000000..c6f5a8b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/get-transformed-headers.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +import { IncomingHttpHeaders } from "http2"; +declare const getTransformedHeaders: (headers: IncomingHttpHeaders) => HeaderBag; +export { getTransformedHeaders }; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..055c48c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./node-http-handler"; +export * from "./node-http2-handler"; +export * from "./stream-collector"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts new file mode 100644 index 0000000..eb1da7b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http-handler.d.ts @@ -0,0 +1,46 @@ +/// +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { Logger, NodeHttpHandlerOptions } from "@smithy/types"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +import { Agent as hAgent } from "http"; +import { Agent as hsAgent } from "https"; +export { NodeHttpHandlerOptions }; +/** + * @public + * A default of 0 means no timeout. + */ +export declare const DEFAULT_REQUEST_TIMEOUT = 0; +/** + * @public + * A request handler that uses the Node.js http and https modules. + */ +export declare class NodeHttpHandler implements HttpHandler { + private config?; + private configProvider; + private socketWarningTimestamp; + readonly metadata: { + handlerProtocol: string; + }; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttpHandlerOptions | Provider): NodeHttpHandler | HttpHandler; + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent: hAgent | hsAgent, socketWarningTimestamp: number, logger?: Logger): number; + constructor(options?: NodeHttpHandlerOptions | Provider); + private resolveDefaultConfig; + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttpHandlerOptions, value: NodeHttpHandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttpHandlerOptions; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts new file mode 100644 index 0000000..8aa87c1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-manager.d.ts @@ -0,0 +1,25 @@ +/// +import { RequestContext } from "@smithy/types"; +import { ConnectConfiguration } from "@smithy/types"; +import { ConnectionManager, ConnectionManagerConfiguration } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +/** + * @public + */ +export declare class NodeHttp2ConnectionManager implements ConnectionManager { + constructor(config: ConnectionManagerConfiguration); + private config; + private readonly sessionCache; + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): ClientHttp2Session; + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority: string, session: ClientHttp2Session): void; + release(requestContext: RequestContext, session: ClientHttp2Session): void; + destroy(): void; + setMaxConcurrentStreams(maxConcurrentStreams: number): void; + setDisableConcurrentStreams(disableConcurrentStreams: boolean): void; + private getUrlString; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts new file mode 100644 index 0000000..e9116cb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-connection-pool.d.ts @@ -0,0 +1,13 @@ +/// +import { ConnectionPool } from "@smithy/types"; +import { ClientHttp2Session } from "http2"; +export declare class NodeHttp2ConnectionPool implements ConnectionPool { + private sessions; + constructor(sessions?: ClientHttp2Session[]); + poll(): ClientHttp2Session | void; + offerLast(session: ClientHttp2Session): void; + contains(session: ClientHttp2Session): boolean; + remove(session: ClientHttp2Session): void; + [Symbol.iterator](): IterableIterator; + destroy(connection: ClientHttp2Session): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts new file mode 100644 index 0000000..eaa24bd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/node-http2-handler.d.ts @@ -0,0 +1,62 @@ +import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http"; +import { HttpHandlerOptions, Provider } from "@smithy/types"; +/** + * Represents the http2 options that can be passed to a node http2 client. + * @public + */ +export interface NodeHttp2HandlerOptions { + /** + * The maximum time in milliseconds that a stream may remain idle before it + * is closed. + */ + requestTimeout?: number; + /** + * The maximum time in milliseconds that a session or socket may remain idle + * before it is closed. + * https://nodejs.org/docs/latest-v12.x/api/http2.html#http2_http2session_and_sockets + */ + sessionTimeout?: number; + /** + * Disables processing concurrent streams on a ClientHttp2Session instance. When set + * to true, a new session instance is created for each request to a URL. + * **Default:** false. + * https://nodejs.org/api/http2.html#http2_class_clienthttp2session + */ + disableConcurrentStreams?: boolean; + /** + * Maximum number of concurrent Http2Stream instances per ClientHttp2Session. Each session + * may have up to 2^31-1 Http2Stream instances over its lifetime. + * This value must be greater than or equal to 0. + * https://nodejs.org/api/http2.html#class-http2stream + */ + maxConcurrentStreams?: number; +} +/** + * A request handler using the node:http2 package. + * @public + */ +export declare class NodeHttp2Handler implements HttpHandler { + private config?; + private configProvider; + readonly metadata: { + handlerProtocol: string; + }; + private readonly connectionManager; + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions?: HttpHandler | NodeHttp2HandlerOptions | Provider): HttpHandler | NodeHttp2Handler; + constructor(options?: NodeHttp2HandlerOptions | Provider); + destroy(): void; + handle(request: HttpRequest, { abortSignal }?: HttpHandlerOptions): Promise<{ + response: HttpResponse; + }>; + updateHttpClientConfig(key: keyof NodeHttp2HandlerOptions, value: NodeHttp2HandlerOptions[typeof key]): void; + httpHandlerConfigs(): NodeHttp2HandlerOptions; + /** + * Destroys a session. + * @param session - the session to destroy. + */ + private destroySession; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts new file mode 100644 index 0000000..f0492d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/readable.mock.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(): boolean | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts new file mode 100644 index 0000000..6a7e350 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/server.mock.d.ts @@ -0,0 +1,12 @@ +import { HttpResponse } from "@smithy/types"; +import { IncomingMessage, Server as HttpServer, ServerResponse } from "http"; +import { Http2Server } from "http2"; +import { Server as HttpsServer } from "https"; +export declare const createResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createResponseFunctionWithDelay: (httpResp: HttpResponse, delay: number) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createContinueResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const createMockHttpsServer: () => HttpsServer; +export declare const createMockHttpServer: () => HttpServer; +export declare const createMockHttp2Server: () => Http2Server; +export declare const createMirrorResponseFunction: (httpResp: HttpResponse) => (request: IncomingMessage, response: ServerResponse) => void; +export declare const getResponseBody: (response: HttpResponse) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts new file mode 100644 index 0000000..96cdb66 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-connection-timeout.d.ts @@ -0,0 +1,3 @@ +/// +import { ClientRequest } from "http"; +export declare const setConnectionTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts new file mode 100644 index 0000000..3bb6ec0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-keep-alive.d.ts @@ -0,0 +1,7 @@ +/// +import { ClientRequest } from "http"; +export interface SocketKeepAliveOptions { + keepAlive: boolean; + keepAliveMsecs?: number; +} +export declare const setSocketKeepAlive: (request: ClientRequest, { keepAlive, keepAliveMsecs }: SocketKeepAliveOptions, deferTimeMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts new file mode 100644 index 0000000..57f8743 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/set-socket-timeout.d.ts @@ -0,0 +1,3 @@ +/// +import { ClientRequest } from "http"; +export declare const setSocketTimeout: (request: ClientRequest, reject: (err: Error) => void, timeoutInMs?: number) => NodeJS.Timeout | number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts new file mode 100644 index 0000000..c329bd4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/collector.d.ts @@ -0,0 +1,6 @@ +/// +import { Writable } from "stream"; +export declare class Collector extends Writable { + readonly bufferedBytes: Buffer[]; + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts new file mode 100644 index 0000000..1022a17 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/index.d.ts @@ -0,0 +1,6 @@ +import { StreamCollector } from "@smithy/types"; +/** + * @internal + * Converts a stream to a byte array. + */ +export declare const streamCollector: StreamCollector; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts new file mode 100644 index 0000000..e2c0a4c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/stream-collector/readable.mock.d.ts @@ -0,0 +1,13 @@ +/// +import { Readable, ReadableOptions } from "stream"; +export interface ReadFromBuffersOptions extends ReadableOptions { + buffers: Buffer[]; + errorAfter?: number; +} +export declare class ReadFromBuffers extends Readable { + private buffersToRead; + private numBuffersRead; + private errorAfter; + constructor(options: ReadFromBuffersOptions); + _read(size: number): boolean | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts new file mode 100644 index 0000000..c88dd2f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/timing.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * For test spies. + */ +export declare const timing: { + setTimeout: (cb: (...ignored: any[]) => void | unknown, ms?: number) => number; + clearTimeout: (timeoutId: string | number | undefined | unknown) => void; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts new file mode 100644 index 0000000..0f13e96 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/ts3.4/write-request-body.d.ts @@ -0,0 +1,12 @@ +/// +import { HttpRequest } from "@smithy/types"; +import { ClientRequest } from "http"; +import { ClientHttp2Stream } from "http2"; +/** + * This resolves when writeBody has been called. + * + * @param httpRequest - opened Node.js request. + * @param request - container with the request body. + * @param maxContinueTimeoutMs - time to wait for the continue event. + */ +export declare function writeRequestBody(httpRequest: ClientRequest | ClientHttp2Stream, request: HttpRequest, maxContinueTimeoutMs?: number): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts new file mode 100644 index 0000000..0c49e32 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/dist-types/write-request-body.d.ts @@ -0,0 +1,13 @@ +/// +/// +import { HttpRequest } from "@smithy/types"; +import { ClientRequest } from "http"; +import { ClientHttp2Stream } from "http2"; +/** + * This resolves when writeBody has been called. + * + * @param httpRequest - opened Node.js request. + * @param request - container with the request body. + * @param maxContinueTimeoutMs - time to wait for the continue event. + */ +export declare function writeRequestBody(httpRequest: ClientRequest | ClientHttp2Stream, request: HttpRequest, maxContinueTimeoutMs?: number): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/package.json new file mode 100644 index 0000000..2e4e1e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/node-http-handler/package.json @@ -0,0 +1,67 @@ +{ + "name": "@smithy/node-http-handler", + "version": "4.0.4", + "description": "Provides a way to make requests", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline node-http-handler", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/node-http-handler", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/node-http-handler" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/README.md new file mode 100644 index 0000000..b35fafb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/README.md @@ -0,0 +1,10 @@ +# @smithy/property-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/property-provider/latest.svg)](https://www.npmjs.com/package/@smithy/property-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/property-provider.svg)](https://www.npmjs.com/package/@smithy/property-provider) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/CredentialsProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/ProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/TokenProviderError.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/chain.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/chain.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/chain.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/fromStatic.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/index.js new file mode 100644 index 0000000..b0fa627 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/index.js @@ -0,0 +1,170 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CredentialsProviderError: () => CredentialsProviderError, + ProviderError: () => ProviderError, + TokenProviderError: () => TokenProviderError, + chain: () => chain, + fromStatic: () => fromStatic, + memoize: () => memoize +}); +module.exports = __toCommonJS(src_exports); + +// src/ProviderError.ts +var ProviderError = class _ProviderError extends Error { + constructor(message, options = true) { + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = void 0; + tryNextLink = options; + } else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, _ProviderError.prototype); + logger?.debug?.(`@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + static { + __name(this, "ProviderError"); + } + /** + * @deprecated use new operator. + */ + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +}; + +// src/CredentialsProviderError.ts +var CredentialsProviderError = class _CredentialsProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, _CredentialsProviderError.prototype); + } + static { + __name(this, "CredentialsProviderError"); + } +}; + +// src/TokenProviderError.ts +var TokenProviderError = class _TokenProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, _TokenProviderError.prototype); + } + static { + __name(this, "TokenProviderError"); + } +}; + +// src/chain.ts +var chain = /* @__PURE__ */ __name((...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } catch (err) { + lastProviderError = err; + if (err?.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}, "chain"); + +// src/fromStatic.ts +var fromStatic = /* @__PURE__ */ __name((staticValue) => () => Promise.resolve(staticValue), "fromStatic"); + +// src/memoize.ts +var memoize = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}, "memoize"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + CredentialsProviderError, + ProviderError, + TokenProviderError, + chain, + fromStatic, + memoize +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/memoize.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/memoize.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-cjs/memoize.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js new file mode 100644 index 0000000..cec1f9e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/CredentialsProviderError.js @@ -0,0 +1,8 @@ +import { ProviderError } from "./ProviderError"; +export class CredentialsProviderError extends ProviderError { + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, CredentialsProviderError.prototype); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/ProviderError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/ProviderError.js new file mode 100644 index 0000000..e0db2b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/ProviderError.js @@ -0,0 +1,22 @@ +export class ProviderError extends Error { + constructor(message, options = true) { + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = undefined; + tryNextLink = options; + } + else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, ProviderError.prototype); + logger?.debug?.(`@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js new file mode 100644 index 0000000..f0e75b1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/TokenProviderError.js @@ -0,0 +1,8 @@ +import { ProviderError } from "./ProviderError"; +export class TokenProviderError extends ProviderError { + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, TokenProviderError.prototype); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/chain.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/chain.js new file mode 100644 index 0000000..c389f7f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/chain.js @@ -0,0 +1,21 @@ +import { ProviderError } from "./ProviderError"; +export const chain = (...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } + catch (err) { + lastProviderError = err; + if (err?.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/fromStatic.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/fromStatic.js new file mode 100644 index 0000000..67da7a7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/fromStatic.js @@ -0,0 +1 @@ +export const fromStatic = (staticValue) => () => Promise.resolve(staticValue); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/index.js new file mode 100644 index 0000000..15d14e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./CredentialsProviderError"; +export * from "./ProviderError"; +export * from "./TokenProviderError"; +export * from "./chain"; +export * from "./fromStatic"; +export * from "./memoize"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/memoize.js b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/memoize.js new file mode 100644 index 0000000..e04839a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-es/memoize.js @@ -0,0 +1,45 @@ +export const memoize = (provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } + finally { + pending = undefined; + } + return resolved; + }; + if (isExpired === undefined) { + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || options?.forceRefresh) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts new file mode 100644 index 0000000..7955dc1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/CredentialsProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual credential provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class CredentialsProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts new file mode 100644 index 0000000..b87b014 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ProviderError.d.ts @@ -0,0 +1,39 @@ +import { Logger } from "@smithy/types"; +/** + * @public + */ +export type ProviderErrorOptionsType = { + tryNextLink?: boolean | undefined; + logger?: Logger; +}; +/** + * @public + * + * An error representing a failure of an individual provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class ProviderError extends Error { + name: string; + readonly tryNextLink: boolean; + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); + /** + * @deprecated use new operator. + */ + static from(error: Error, options?: boolean | ProviderErrorOptionsType): ProviderError; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts new file mode 100644 index 0000000..a2f9dd6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/TokenProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual token provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class TokenProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/chain.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/chain.d.ts new file mode 100644 index 0000000..168df5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/chain.d.ts @@ -0,0 +1,13 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * Compose a single credential provider function from multiple credential + * providers. The first provider in the argument list will always be invoked; + * subsequent providers in the list will be invoked in the order in which the + * were received if the preceding provider did not successfully resolve. + * + * If no providers were received or no provider resolves successfully, the + * returned promise will be rejected. + */ +export declare const chain: (...providers: Provider[]) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts new file mode 100644 index 0000000..f58bece --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/fromStatic.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const fromStatic: (staticValue: T) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/index.d.ts new file mode 100644 index 0000000..6326994 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/index.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export * from "./CredentialsProviderError"; +/** + * @internal + */ +export * from "./ProviderError"; +/** + * @internal + */ +export * from "./TokenProviderError"; +/** + * @internal + */ +export * from "./chain"; +/** + * @internal + */ +export * from "./fromStatic"; +/** + * @internal + */ +export * from "./memoize"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/memoize.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/memoize.d.ts new file mode 100644 index 0000000..ce197c0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/memoize.d.ts @@ -0,0 +1,40 @@ +import { MemoizedProvider, Provider } from "@smithy/types"; +interface MemoizeOverload { + /** + * + * Decorates a provider function with either static memoization. + * + * To create a statically memoized provider, supply a provider as the only + * argument to this function. The provider will be invoked once, and all + * invocations of the provider returned by `memoize` will return the same + * promise object. + * + * @param provider The provider whose result should be cached indefinitely. + */ + (provider: Provider): MemoizedProvider; + /** + * Decorates a provider function with refreshing memoization. + * + * @param provider The provider whose result should be cached. + * @param isExpired A function that will evaluate the resolved value and + * determine if it is expired. For example, when + * memoizing AWS credential providers, this function + * should return `true` when the credential's + * expiration is in the past (or very near future) and + * `false` otherwise. + * @param requiresRefresh A function that will evaluate the resolved value and + * determine if it represents static value or one that + * will eventually need to be refreshed. For example, + * AWS credentials that have no defined expiration will + * never need to be refreshed, so this function would + * return `true` if the credentials resolved by the + * underlying provider had an expiration and `false` + * otherwise. + */ + (provider: Provider, isExpired: (resolved: T) => boolean, requiresRefresh?: (resolved: T) => boolean): MemoizedProvider; +} +/** + * @internal + */ +export declare const memoize: MemoizeOverload; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts new file mode 100644 index 0000000..11e4aea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/CredentialsProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual credential provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class CredentialsProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts new file mode 100644 index 0000000..daf499c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/ProviderError.d.ts @@ -0,0 +1,39 @@ +import { Logger } from "@smithy/types"; +/** + * @public + */ +export type ProviderErrorOptionsType = { + tryNextLink?: boolean | undefined; + logger?: Logger; +}; +/** + * @public + * + * An error representing a failure of an individual provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class ProviderError extends Error { + name: string; + readonly tryNextLink: boolean; + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); + /** + * @deprecated use new operator. + */ + static from(error: Error, options?: boolean | ProviderErrorOptionsType): ProviderError; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts new file mode 100644 index 0000000..6f67fd5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/TokenProviderError.d.ts @@ -0,0 +1,30 @@ +import { ProviderError, ProviderErrorOptionsType } from "./ProviderError"; +/** + * @public + * + * An error representing a failure of an individual token provider. + * + * This error class has special meaning to the {@link chain} method. If a + * provider in the chain is rejected with an error, the chain will only proceed + * to the next provider if the value of the `tryNextLink` property on the error + * is truthy. This allows individual providers to halt the chain and also + * ensures the chain will stop if an entirely unexpected error is encountered. + */ +export declare class TokenProviderError extends ProviderError { + name: string; + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string); + /** + * @override + * @deprecated constructor should be given a logger. + */ + constructor(message: string, tryNextLink: boolean | undefined); + /** + * @override + * This signature is preferred for logging capability. + */ + constructor(message: string, options: ProviderErrorOptionsType); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts new file mode 100644 index 0000000..44390b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/chain.d.ts @@ -0,0 +1,13 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * Compose a single credential provider function from multiple credential + * providers. The first provider in the argument list will always be invoked; + * subsequent providers in the list will be invoked in the order in which the + * were received if the preceding provider did not successfully resolve. + * + * If no providers were received or no provider resolves successfully, the + * returned promise will be rejected. + */ +export declare const chain: (...providers: Provider[]) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts new file mode 100644 index 0000000..0df6309 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/fromStatic.d.ts @@ -0,0 +1,5 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const fromStatic: (staticValue: T) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..e28099d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export * from "./CredentialsProviderError"; +/** + * @internal + */ +export * from "./ProviderError"; +/** + * @internal + */ +export * from "./TokenProviderError"; +/** + * @internal + */ +export * from "./chain"; +/** + * @internal + */ +export * from "./fromStatic"; +/** + * @internal + */ +export * from "./memoize"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts new file mode 100644 index 0000000..29ce53d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/dist-types/ts3.4/memoize.d.ts @@ -0,0 +1,40 @@ +import { MemoizedProvider, Provider } from "@smithy/types"; +interface MemoizeOverload { + /** + * + * Decorates a provider function with either static memoization. + * + * To create a statically memoized provider, supply a provider as the only + * argument to this function. The provider will be invoked once, and all + * invocations of the provider returned by `memoize` will return the same + * promise object. + * + * @param provider The provider whose result should be cached indefinitely. + */ + (provider: Provider): MemoizedProvider; + /** + * Decorates a provider function with refreshing memoization. + * + * @param provider The provider whose result should be cached. + * @param isExpired A function that will evaluate the resolved value and + * determine if it is expired. For example, when + * memoizing AWS credential providers, this function + * should return `true` when the credential's + * expiration is in the past (or very near future) and + * `false` otherwise. + * @param requiresRefresh A function that will evaluate the resolved value and + * determine if it represents static value or one that + * will eventually need to be refreshed. For example, + * AWS credentials that have no defined expiration will + * never need to be refreshed, so this function would + * return `true` if the credentials resolved by the + * underlying provider had an expiration and `false` + * otherwise. + */ + (provider: Provider, isExpired: (resolved: T) => boolean, requiresRefresh?: (resolved: T) => boolean): MemoizedProvider; +} +/** + * @internal + */ +export declare const memoize: MemoizeOverload; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/package.json new file mode 100644 index 0000000..b2e7fc6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/property-provider/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/property-provider", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline property-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/property-provider", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/property-provider" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/README.md new file mode 100644 index 0000000..a547ab0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/README.md @@ -0,0 +1,4 @@ +# @smithy/protocol-http + +[![NPM version](https://img.shields.io/npm/v/@smithy/protocol-http/latest.svg)](https://www.npmjs.com/package/@smithy/protocol-http) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/protocol-http.svg)](https://www.npmjs.com/package/@smithy/protocol-http) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/Field.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/Field.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/Field.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/Fields.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/Fields.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/Fields.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/extensions/httpExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpHandler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpRequest.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/httpResponse.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/index.js new file mode 100644 index 0000000..df37109 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/index.js @@ -0,0 +1,262 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Field: () => Field, + Fields: () => Fields, + HttpRequest: () => HttpRequest, + HttpResponse: () => HttpResponse, + IHttpRequest: () => import_types.HttpRequest, + getHttpHandlerExtensionConfiguration: () => getHttpHandlerExtensionConfiguration, + isValidHostname: () => isValidHostname, + resolveHttpHandlerRuntimeConfig: () => resolveHttpHandlerRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/extensions/httpExtensionConfiguration.ts +var getHttpHandlerExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setHttpHandler(handler) { + runtimeConfig.httpHandler = handler; + }, + httpHandler() { + return runtimeConfig.httpHandler; + }, + updateHttpClientConfig(key, value) { + runtimeConfig.httpHandler?.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return runtimeConfig.httpHandler.httpHandlerConfigs(); + } + }; +}, "getHttpHandlerExtensionConfiguration"); +var resolveHttpHandlerRuntimeConfig = /* @__PURE__ */ __name((httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler() + }; +}, "resolveHttpHandlerRuntimeConfig"); + +// src/Field.ts +var import_types = require("@smithy/types"); +var Field = class { + static { + __name(this, "Field"); + } + constructor({ name, kind = import_types.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value) { + this.values.push(value); + } + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values) { + this.values = values; + } + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString() { + return this.values.map((v) => v.includes(",") || v.includes(" ") ? `"${v}"` : v).join(", "); + } + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get() { + return this.values; + } +}; + +// src/Fields.ts +var Fields = class { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + static { + __name(this, "Fields"); + } + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name) { + return this.entries[name.toLowerCase()]; + } + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +}; + +// src/httpRequest.ts + +var HttpRequest = class _HttpRequest { + static { + __name(this, "HttpRequest"); + } + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol ? options.protocol.slice(-1) !== ":" ? `${options.protocol}:` : options.protocol : "https:"; + this.path = options.path ? options.path.charAt(0) !== "/" ? `/${options.path}` : options.path : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + /** + * Note: this does not deep-clone the body. + */ + static clone(request) { + const cloned = new _HttpRequest({ + ...request, + headers: { ...request.headers } + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return "method" in req && "protocol" in req && "hostname" in req && "path" in req && typeof req["query"] === "object" && typeof req["headers"] === "object"; + } + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone() { + return _HttpRequest.clone(this); + } +}; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param + }; + }, {}); +} +__name(cloneQuery, "cloneQuery"); + +// src/httpResponse.ts +var HttpResponse = class { + static { + __name(this, "HttpResponse"); + } + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +}; + +// src/isValidHostname.ts +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +__name(isValidHostname, "isValidHostname"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getHttpHandlerExtensionConfiguration, + resolveHttpHandlerRuntimeConfig, + Field, + Fields, + HttpRequest, + HttpResponse, + isValidHostname +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/isValidHostname.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/Field.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/Field.js new file mode 100644 index 0000000..918c883 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/Field.js @@ -0,0 +1,23 @@ +import { FieldPosition } from "@smithy/types"; +export class Field { + constructor({ name, kind = FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + add(value) { + this.values.push(value); + } + set(values) { + this.values = values; + } + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + toString() { + return this.values.map((v) => (v.includes(",") || v.includes(" ") ? `"${v}"` : v)).join(", "); + } + get() { + return this.values; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/Fields.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/Fields.js new file mode 100644 index 0000000..efa591f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/Fields.js @@ -0,0 +1,19 @@ +export class Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + getField(name) { + return this.entries[name.toLowerCase()]; + } + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js new file mode 100644 index 0000000..1a5aa0c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/extensions/httpExtensionConfiguration.js @@ -0,0 +1,21 @@ +export const getHttpHandlerExtensionConfiguration = (runtimeConfig) => { + return { + setHttpHandler(handler) { + runtimeConfig.httpHandler = handler; + }, + httpHandler() { + return runtimeConfig.httpHandler; + }, + updateHttpClientConfig(key, value) { + runtimeConfig.httpHandler?.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return runtimeConfig.httpHandler.httpHandlerConfigs(); + }, + }; +}; +export const resolveHttpHandlerRuntimeConfig = (httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler(), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/extensions/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/extensions/index.js new file mode 100644 index 0000000..a215a4a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/extensions/index.js @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpHandler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpHandler.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpHandler.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpRequest.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpRequest.js new file mode 100644 index 0000000..fd426ab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpRequest.js @@ -0,0 +1,53 @@ +export class HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol + ? options.protocol.slice(-1) !== ":" + ? `${options.protocol}:` + : options.protocol + : "https:"; + this.path = options.path ? (options.path.charAt(0) !== "/" ? `/${options.path}` : options.path) : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + static clone(request) { + const cloned = new HttpRequest({ + ...request, + headers: { ...request.headers }, + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return ("method" in req && + "protocol" in req && + "hostname" in req && + "path" in req && + typeof req["query"] === "object" && + typeof req["headers"] === "object"); + } + clone() { + return HttpRequest.clone(this); + } +} +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param, + }; + }, {}); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpResponse.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpResponse.js new file mode 100644 index 0000000..75f470f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/httpResponse.js @@ -0,0 +1,14 @@ +export class HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/index.js new file mode 100644 index 0000000..8ff7f26 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js new file mode 100644 index 0000000..464c7db --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/isValidHostname.js @@ -0,0 +1,4 @@ +export function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/Field.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/Field.d.ts new file mode 100644 index 0000000..2d1613a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/Field.d.ts @@ -0,0 +1,49 @@ +import { FieldOptions, FieldPosition } from "@smithy/types"; +/** + * A name-value pair representing a single field + * transmitted in an HTTP Request or Response. + * + * The kind will dictate metadata placement within + * an HTTP message. + * + * All field names are case insensitive and + * case-variance must be treated as equivalent. + * Names MAY be normalized but SHOULD be preserved + * for accuracy during transmission. + */ +export declare class Field { + readonly name: string; + readonly kind: FieldPosition; + values: string[]; + constructor({ name, kind, values }: FieldOptions); + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value: string): void; + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values: string[]): void; + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value: string): void; + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString(): string; + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get(): string[]; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts new file mode 100644 index 0000000..8915826 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/Fields.d.ts @@ -0,0 +1,44 @@ +import { FieldPosition } from "@smithy/types"; +import { Field } from "./Field"; +export type FieldsOptions = { + fields?: Field[]; + encoding?: string; +}; +/** + * Collection of Field entries mapped by name. + */ +export declare class Fields { + private readonly entries; + private readonly encoding; + constructor({ fields, encoding }: FieldsOptions); + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field: Field): void; + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name: string): Field | undefined; + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name: string): void; + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind: FieldPosition): Field[]; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts new file mode 100644 index 0000000..bfe452d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/extensions/httpExtensionConfiguration.d.ts @@ -0,0 +1,37 @@ +import { HttpHandler } from "../httpHandler"; +/** + * @internal + */ +export interface HttpHandlerExtensionConfiguration { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[typeof key]): void; + httpHandlerConfigs(): HandlerConfig; +} +/** + * @internal + */ +export type HttpHandlerExtensionConfigType = Partial<{ + httpHandler: HttpHandler; +}>; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getHttpHandlerExtensionConfiguration: (runtimeConfig: Partial<{ + httpHandler: HttpHandler; +}>) => { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[keyof HandlerConfig]): void; + httpHandlerConfigs(): HandlerConfig; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveHttpHandlerRuntimeConfig: (httpHandlerExtensionConfiguration: HttpHandlerExtensionConfiguration) => Partial<{ + httpHandler: HttpHandler; +}>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..a215a4a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts new file mode 100644 index 0000000..8dc8d32 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpHandler.d.ts @@ -0,0 +1,35 @@ +import type { FetchHttpHandlerOptions, HttpHandlerOptions, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +import type { HttpRequest } from "./httpRequest"; +import type { HttpResponse } from "./httpResponse"; +/** + * @internal + */ +export type HttpHandler = RequestHandler & { + /** + * @internal + */ + updateHttpClientConfig(key: keyof HttpHandlerConfig, value: HttpHandlerConfig[typeof key]): void; + /** + * @internal + */ + httpHandlerConfigs(): HttpHandlerConfig; +}; +/** + * @public + * + * A type representing the accepted user inputs for the `requestHandler` field + * of a client's constructor object. + * + * You may provide an instance of an HttpHandler, or alternatively + * provide the constructor arguments as an object which will be passed + * to the constructor of the default request handler. + * + * The default class constructor to which your arguments will be passed + * varies. The Node.js default is the NodeHttpHandler and the browser/react-native + * default is the FetchHttpHandler. In rarer cases specific clients may be + * configured to use other default implementations such as Websocket or HTTP2. + * + * The fallback type Record is part of the union to allow + * passing constructor params to an unknown requestHandler type. + */ +export type HttpHandlerUserInput = HttpHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts new file mode 100644 index 0000000..8b64ff6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpRequest.d.ts @@ -0,0 +1,55 @@ +import { HeaderBag, HttpMessage, HttpRequest as IHttpRequest, QueryParameterBag, URI } from "@smithy/types"; +type HttpRequestOptions = Partial & Partial & { + method?: string; +}; +/** + * Use the distinct IHttpRequest interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpRequest extends IHttpRequest { +} +/** + * @public + */ +export { IHttpRequest }; +/** + * @public + */ +export declare class HttpRequest implements HttpMessage, URI { + method: string; + protocol: string; + hostname: string; + port?: number; + path: string; + query: QueryParameterBag; + headers: HeaderBag; + username?: string; + password?: string; + fragment?: string; + body?: any; + constructor(options: HttpRequestOptions); + /** + * Note: this does not deep-clone the body. + */ + static clone(request: IHttpRequest): HttpRequest; + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request: unknown): request is HttpRequest; + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone(): HttpRequest; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts new file mode 100644 index 0000000..e51f18b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/httpResponse.d.ts @@ -0,0 +1,29 @@ +import { HeaderBag, HttpMessage, HttpResponse as IHttpResponse } from "@smithy/types"; +type HttpResponseOptions = Partial & { + statusCode: number; + reason?: string; +}; +/** + * Use the distinct IHttpResponse interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpResponse extends IHttpResponse { +} +/** + * @public + */ +export declare class HttpResponse { + statusCode: number; + reason?: string; + headers: HeaderBag; + body?: any; + constructor(options: HttpResponseOptions); + static isInstance(response: unknown): response is HttpResponse; +} +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/index.d.ts new file mode 100644 index 0000000..8ff7f26 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts new file mode 100644 index 0000000..6fb5bcb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/isValidHostname.d.ts @@ -0,0 +1 @@ +export declare function isValidHostname(hostname: string): boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts new file mode 100644 index 0000000..faa4b70 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/Field.d.ts @@ -0,0 +1,49 @@ +import { FieldOptions, FieldPosition } from "@smithy/types"; +/** + * A name-value pair representing a single field + * transmitted in an HTTP Request or Response. + * + * The kind will dictate metadata placement within + * an HTTP message. + * + * All field names are case insensitive and + * case-variance must be treated as equivalent. + * Names MAY be normalized but SHOULD be preserved + * for accuracy during transmission. + */ +export declare class Field { + readonly name: string; + readonly kind: FieldPosition; + values: string[]; + constructor({ name, kind, values }: FieldOptions); + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value: string): void; + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values: string[]): void; + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value: string): void; + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString(): string; + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get(): string[]; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts new file mode 100644 index 0000000..616f55e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/Fields.d.ts @@ -0,0 +1,44 @@ +import { FieldPosition } from "@smithy/types"; +import { Field } from "./Field"; +export type FieldsOptions = { + fields?: Field[]; + encoding?: string; +}; +/** + * Collection of Field entries mapped by name. + */ +export declare class Fields { + private readonly entries; + private readonly encoding; + constructor({ fields, encoding }: FieldsOptions); + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field: Field): void; + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name: string): Field | undefined; + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name: string): void; + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind: FieldPosition): Field[]; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts new file mode 100644 index 0000000..3cd2cf6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/httpExtensionConfiguration.d.ts @@ -0,0 +1,37 @@ +import { HttpHandler } from "../httpHandler"; +/** + * @internal + */ +export interface HttpHandlerExtensionConfiguration { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[typeof key]): void; + httpHandlerConfigs(): HandlerConfig; +} +/** + * @internal + */ +export type HttpHandlerExtensionConfigType = Partial<{ + httpHandler: HttpHandler; +}>; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getHttpHandlerExtensionConfiguration: (runtimeConfig: Partial<{ + httpHandler: HttpHandler; +}>) => { + setHttpHandler(handler: HttpHandler): void; + httpHandler(): HttpHandler; + updateHttpClientConfig(key: keyof HandlerConfig, value: HandlerConfig[keyof HandlerConfig]): void; + httpHandlerConfigs(): HandlerConfig; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveHttpHandlerRuntimeConfig: (httpHandlerExtensionConfiguration: HttpHandlerExtensionConfiguration) => Partial<{ + httpHandler: HttpHandler; +}>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..e0f765b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./httpExtensionConfiguration"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts new file mode 100644 index 0000000..b8f1978 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpHandler.d.ts @@ -0,0 +1,35 @@ +import { FetchHttpHandlerOptions, HttpHandlerOptions, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +import { HttpRequest } from "./httpRequest"; +import { HttpResponse } from "./httpResponse"; +/** + * @internal + */ +export type HttpHandler = RequestHandler & { + /** + * @internal + */ + updateHttpClientConfig(key: keyof HttpHandlerConfig, value: HttpHandlerConfig[typeof key]): void; + /** + * @internal + */ + httpHandlerConfigs(): HttpHandlerConfig; +}; +/** + * @public + * + * A type representing the accepted user inputs for the `requestHandler` field + * of a client's constructor object. + * + * You may provide an instance of an HttpHandler, or alternatively + * provide the constructor arguments as an object which will be passed + * to the constructor of the default request handler. + * + * The default class constructor to which your arguments will be passed + * varies. The Node.js default is the NodeHttpHandler and the browser/react-native + * default is the FetchHttpHandler. In rarer cases specific clients may be + * configured to use other default implementations such as Websocket or HTTP2. + * + * The fallback type Record is part of the union to allow + * passing constructor params to an unknown requestHandler type. + */ +export type HttpHandlerUserInput = HttpHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts new file mode 100644 index 0000000..cdcf38b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpRequest.d.ts @@ -0,0 +1,55 @@ +import { HeaderBag, HttpMessage, HttpRequest as IHttpRequest, QueryParameterBag, URI } from "@smithy/types"; +type HttpRequestOptions = Partial & Partial & { + method?: string; +}; +/** + * Use the distinct IHttpRequest interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpRequest extends IHttpRequest { +} +/** + * @public + */ +export { IHttpRequest }; +/** + * @public + */ +export declare class HttpRequest implements HttpMessage, URI { + method: string; + protocol: string; + hostname: string; + port?: number; + path: string; + query: QueryParameterBag; + headers: HeaderBag; + username?: string; + password?: string; + fragment?: string; + body?: any; + constructor(options: HttpRequestOptions); + /** + * Note: this does not deep-clone the body. + */ + static clone(request: IHttpRequest): HttpRequest; + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request: unknown): request is HttpRequest; + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone(): HttpRequest; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts new file mode 100644 index 0000000..8babc91 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/httpResponse.d.ts @@ -0,0 +1,29 @@ +import { HeaderBag, HttpMessage, HttpResponse as IHttpResponse } from "@smithy/types"; +type HttpResponseOptions = Partial & { + statusCode: number; + reason?: string; +}; +/** + * Use the distinct IHttpResponse interface from \@smithy/types instead. + * This should not be used due to + * overlapping with the concrete class' name. + * + * This is not marked deprecated since that would mark the concrete class + * deprecated as well. + * + * @internal + */ +export interface HttpResponse extends IHttpResponse { +} +/** + * @public + */ +export declare class HttpResponse { + statusCode: number; + reason?: string; + headers: HeaderBag; + body?: any; + constructor(options: HttpResponseOptions); + static isInstance(response: unknown): response is HttpResponse; +} +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..08feffa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./extensions"; +export * from "./Field"; +export * from "./Fields"; +export * from "./httpHandler"; +export * from "./httpRequest"; +export * from "./httpResponse"; +export * from "./isValidHostname"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts new file mode 100644 index 0000000..7b85b36 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/isValidHostname.d.ts @@ -0,0 +1 @@ +export declare function isValidHostname(hostname: string): boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..42e3c66 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/ts3.4/types.d.ts @@ -0,0 +1,21 @@ +import { FieldOptions as __FieldOptions, FieldPosition as __FieldPosition, HeaderBag as __HeaderBag, HttpHandlerOptions as __HttpHandlerOptions, HttpMessage as __HttpMessage } from "@smithy/types"; +/** + * @deprecated Use FieldOptions from `@smithy/types` instead + */ +export type FieldOptions = __FieldOptions; +/** + * @deprecated Use FieldPosition from `@smithy/types` instead + */ +export type FieldPosition = __FieldPosition; +/** + * @deprecated Use HeaderBag from `@smithy/types` instead + */ +export type HeaderBag = __HeaderBag; +/** + * @deprecated Use HttpMessage from `@smithy/types` instead + */ +export type HttpMessage = __HttpMessage; +/** + * @deprecated Use HttpHandlerOptions from `@smithy/types` instead + */ +export type HttpHandlerOptions = __HttpHandlerOptions; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/types.d.ts new file mode 100644 index 0000000..0d597b9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/dist-types/types.d.ts @@ -0,0 +1,21 @@ +import { FieldOptions as __FieldOptions, FieldPosition as __FieldPosition, HeaderBag as __HeaderBag, HttpHandlerOptions as __HttpHandlerOptions, HttpMessage as __HttpMessage } from "@smithy/types"; +/** + * @deprecated Use FieldOptions from `@smithy/types` instead + */ +export type FieldOptions = __FieldOptions; +/** + * @deprecated Use FieldPosition from `@smithy/types` instead + */ +export type FieldPosition = __FieldPosition; +/** + * @deprecated Use HeaderBag from `@smithy/types` instead + */ +export type HeaderBag = __HeaderBag; +/** + * @deprecated Use HttpMessage from `@smithy/types` instead + */ +export type HttpMessage = __HttpMessage; +/** + * @deprecated Use HttpHandlerOptions from `@smithy/types` instead + */ +export type HttpHandlerOptions = __HttpHandlerOptions; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/package.json new file mode 100644 index 0000000..549711a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/protocol-http/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/protocol-http", + "version": "5.1.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline protocol-http", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/protocol-http", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/protocol-http" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/README.md new file mode 100644 index 0000000..00275da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/README.md @@ -0,0 +1,10 @@ +# @smithy/querystring-builder + +[![NPM version](https://img.shields.io/npm/v/@smithy/querystring-builder/latest.svg)](https://www.npmjs.com/package/@smithy/querystring-builder) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/querystring-builder.svg)](https://www.npmjs.com/package/@smithy/querystring-builder) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-cjs/index.js new file mode 100644 index 0000000..7030242 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-cjs/index.js @@ -0,0 +1,52 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + buildQueryString: () => buildQueryString +}); +module.exports = __toCommonJS(src_exports); +var import_util_uri_escape = require("@smithy/util-uri-escape"); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, import_util_uri_escape.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, import_util_uri_escape.escapeUri)(value[i])}`); + } + } else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, import_util_uri_escape.escapeUri)(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} +__name(buildQueryString, "buildQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + buildQueryString +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-es/index.js new file mode 100644 index 0000000..fbc7684 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-es/index.js @@ -0,0 +1,21 @@ +import { escapeUri } from "@smithy/util-uri-escape"; +export function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = escapeUri(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${escapeUri(value[i])}`); + } + } + else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${escapeUri(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-types/index.d.ts new file mode 100644 index 0000000..538b1b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function buildQueryString(query: QueryParameterBag): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..1f866f3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function buildQueryString(query: QueryParameterBag): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/package.json new file mode 100644 index 0000000..d144f0a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-builder/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/querystring-builder", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline querystring-builder", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "exit 0" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/querystring-builder", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/querystring-builder" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/README.md new file mode 100644 index 0000000..02dcf51 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/README.md @@ -0,0 +1,10 @@ +# @smithy/querystring-parser + +[![NPM version](https://img.shields.io/npm/v/@smithy/querystring-parser/latest.svg)](https://www.npmjs.com/package/@smithy/querystring-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/querystring-parser.svg)](https://www.npmjs.com/package/@smithy/querystring-parser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-cjs/index.js new file mode 100644 index 0000000..924647c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-cjs/index.js @@ -0,0 +1,53 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseQueryString: () => parseQueryString +}); +module.exports = __toCommonJS(src_exports); +function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } else if (Array.isArray(query[key])) { + query[key].push(value); + } else { + query[key] = [query[key], value]; + } + } + } + return query; +} +__name(parseQueryString, "parseQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + parseQueryString +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-es/index.js new file mode 100644 index 0000000..bd7bf00 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-es/index.js @@ -0,0 +1,23 @@ +export function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } + else if (Array.isArray(query[key])) { + query[key].push(value); + } + else { + query[key] = [query[key], value]; + } + } + } + return query; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-types/index.d.ts new file mode 100644 index 0000000..fdc1ba5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function parseQueryString(querystring: string): QueryParameterBag; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..8bb747d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare function parseQueryString(querystring: string): QueryParameterBag; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/package.json new file mode 100644 index 0000000..9a27e7e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/querystring-parser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/querystring-parser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline querystring-parser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/querystring-parser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/querystring-parser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/README.md new file mode 100644 index 0000000..902dd43 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/README.md @@ -0,0 +1,4 @@ +# @smithy/service-error-classification + +[![NPM version](https://img.shields.io/npm/v/@smithy/service-error-classification/latest.svg)](https://www.npmjs.com/package/@smithy/service-error-classification) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/service-error-classification.svg)](https://www.npmjs.com/package/@smithy/service-error-classification) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-cjs/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-cjs/index.js new file mode 100644 index 0000000..bcca2b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-cjs/index.js @@ -0,0 +1,109 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isBrowserNetworkError: () => isBrowserNetworkError, + isClockSkewCorrectedError: () => isClockSkewCorrectedError, + isClockSkewError: () => isClockSkewError, + isRetryableByTrait: () => isRetryableByTrait, + isServerError: () => isServerError, + isThrottlingError: () => isThrottlingError, + isTransientError: () => isTransientError +}); +module.exports = __toCommonJS(src_exports); + +// src/constants.ts +var CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch" +]; +var THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException" + // DynamoDB +]; +var TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +var TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; + +// src/index.ts +var isRetryableByTrait = /* @__PURE__ */ __name((error) => error.$retryable !== void 0, "isRetryableByTrait"); +var isClockSkewError = /* @__PURE__ */ __name((error) => CLOCK_SKEW_ERROR_CODES.includes(error.name), "isClockSkewError"); +var isClockSkewCorrectedError = /* @__PURE__ */ __name((error) => error.$metadata?.clockSkewCorrected, "isClockSkewCorrectedError"); +var isBrowserNetworkError = /* @__PURE__ */ __name((error) => { + const errorMessages = /* @__PURE__ */ new Set([ + "Failed to fetch", + // Chrome + "NetworkError when attempting to fetch resource", + // Firefox + "The Internet connection appears to be offline", + // Safari 16 + "Load failed", + // Safari 17+ + "Network request failed" + // `cross-fetch` + ]); + const isValid = error && error instanceof TypeError; + if (!isValid) { + return false; + } + return errorMessages.has(error.message); +}, "isBrowserNetworkError"); +var isThrottlingError = /* @__PURE__ */ __name((error) => error.$metadata?.httpStatusCode === 429 || THROTTLING_ERROR_CODES.includes(error.name) || error.$retryable?.throttling == true, "isThrottlingError"); +var isTransientError = /* @__PURE__ */ __name((error, depth = 0) => isClockSkewCorrectedError(error) || TRANSIENT_ERROR_CODES.includes(error.name) || NODEJS_TIMEOUT_ERROR_CODES.includes(error?.code || "") || TRANSIENT_ERROR_STATUS_CODES.includes(error.$metadata?.httpStatusCode || 0) || isBrowserNetworkError(error) || error.cause !== void 0 && depth <= 10 && isTransientError(error.cause, depth + 1), "isTransientError"); +var isServerError = /* @__PURE__ */ __name((error) => { + if (error.$metadata?.httpStatusCode !== void 0) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}, "isServerError"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + isRetryableByTrait, + isClockSkewError, + isClockSkewCorrectedError, + isBrowserNetworkError, + isThrottlingError, + isTransientError, + isServerError +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-es/constants.js new file mode 100644 index 0000000..267443b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-es/constants.js @@ -0,0 +1,27 @@ +export const CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch", +]; +export const THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException", +]; +export const TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +export const TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +export const NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-es/index.js new file mode 100644 index 0000000..1da4aa9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-es/index.js @@ -0,0 +1,37 @@ +import { CLOCK_SKEW_ERROR_CODES, NODEJS_TIMEOUT_ERROR_CODES, THROTTLING_ERROR_CODES, TRANSIENT_ERROR_CODES, TRANSIENT_ERROR_STATUS_CODES, } from "./constants"; +export const isRetryableByTrait = (error) => error.$retryable !== undefined; +export const isClockSkewError = (error) => CLOCK_SKEW_ERROR_CODES.includes(error.name); +export const isClockSkewCorrectedError = (error) => error.$metadata?.clockSkewCorrected; +export const isBrowserNetworkError = (error) => { + const errorMessages = new Set([ + "Failed to fetch", + "NetworkError when attempting to fetch resource", + "The Internet connection appears to be offline", + "Load failed", + "Network request failed", + ]); + const isValid = error && error instanceof TypeError; + if (!isValid) { + return false; + } + return errorMessages.has(error.message); +}; +export const isThrottlingError = (error) => error.$metadata?.httpStatusCode === 429 || + THROTTLING_ERROR_CODES.includes(error.name) || + error.$retryable?.throttling == true; +export const isTransientError = (error, depth = 0) => isClockSkewCorrectedError(error) || + TRANSIENT_ERROR_CODES.includes(error.name) || + NODEJS_TIMEOUT_ERROR_CODES.includes(error?.code || "") || + TRANSIENT_ERROR_STATUS_CODES.includes(error.$metadata?.httpStatusCode || 0) || + isBrowserNetworkError(error) || + (error.cause !== undefined && depth <= 10 && isTransientError(error.cause, depth + 1)); +export const isServerError = (error) => { + if (error.$metadata?.httpStatusCode !== undefined) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts new file mode 100644 index 0000000..f07663b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/constants.d.ts @@ -0,0 +1,26 @@ +/** + * Errors encountered when the client clock and server clock cannot agree on the + * current time. + * + * These errors are retryable, assuming the SDK has enabled clock skew + * correction. + */ +export declare const CLOCK_SKEW_ERROR_CODES: string[]; +/** + * Errors that indicate the SDK is being throttled. + * + * These errors are always retryable. + */ +export declare const THROTTLING_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_STATUS_CODES: number[]; +/** + * Node.js system error codes that indicate timeout. + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/index.d.ts new file mode 100644 index 0000000..6aad102 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/index.d.ts @@ -0,0 +1,24 @@ +import { SdkError } from "@smithy/types"; +export declare const isRetryableByTrait: (error: SdkError) => boolean; +/** + * @deprecated use isClockSkewCorrectedError. This is only used in deprecated code. + */ +export declare const isClockSkewError: (error: SdkError) => boolean; +/** + * @returns whether the error resulted in a systemClockOffset aka clock skew correction. + */ +export declare const isClockSkewCorrectedError: (error: SdkError) => true | undefined; +/** + * + * @internal + */ +export declare const isBrowserNetworkError: (error: SdkError) => boolean; +export declare const isThrottlingError: (error: SdkError) => boolean; +/** + * Though NODEJS_TIMEOUT_ERROR_CODES are platform specific, they are + * included here because there is an error scenario with unknown root + * cause where the NodeHttpHandler does not decorate the Error with + * the name "TimeoutError" to be checked by the TRANSIENT_ERROR_CODES condition. + */ +export declare const isTransientError: (error: SdkError, depth?: number) => boolean; +export declare const isServerError: (error: SdkError) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..74c4858 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,26 @@ +/** + * Errors encountered when the client clock and server clock cannot agree on the + * current time. + * + * These errors are retryable, assuming the SDK has enabled clock skew + * correction. + */ +export declare const CLOCK_SKEW_ERROR_CODES: string[]; +/** + * Errors that indicate the SDK is being throttled. + * + * These errors are always retryable. + */ +export declare const THROTTLING_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_CODES: string[]; +/** + * Error codes that indicate transient issues + */ +export declare const TRANSIENT_ERROR_STATUS_CODES: number[]; +/** + * Node.js system error codes that indicate timeout. + */ +export declare const NODEJS_TIMEOUT_ERROR_CODES: string[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c7909ae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/dist-types/ts3.4/index.d.ts @@ -0,0 +1,24 @@ +import { SdkError } from "@smithy/types"; +export declare const isRetryableByTrait: (error: SdkError) => boolean; +/** + * @deprecated use isClockSkewCorrectedError. This is only used in deprecated code. + */ +export declare const isClockSkewError: (error: SdkError) => boolean; +/** + * @returns whether the error resulted in a systemClockOffset aka clock skew correction. + */ +export declare const isClockSkewCorrectedError: (error: SdkError) => true | undefined; +/** + * + * @internal + */ +export declare const isBrowserNetworkError: (error: SdkError) => boolean; +export declare const isThrottlingError: (error: SdkError) => boolean; +/** + * Though NODEJS_TIMEOUT_ERROR_CODES are platform specific, they are + * included here because there is an error scenario with unknown root + * cause where the NodeHttpHandler does not decorate the Error with + * the name "TimeoutError" to be checked by the TRANSIENT_ERROR_CODES condition. + */ +export declare const isTransientError: (error: SdkError, depth?: number) => boolean; +export declare const isServerError: (error: SdkError) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/package.json new file mode 100644 index 0000000..a568aee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/service-error-classification/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/service-error-classification", + "version": "4.0.3", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline service-error-classification", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/service-error-classification", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/service-error-classification" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "dependencies": { + "@smithy/types": "^4.2.0" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/README.md new file mode 100644 index 0000000..45a4b2e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/README.md @@ -0,0 +1,105 @@ +# @smithy/shared-ini-file-loader + +[![NPM version](https://img.shields.io/npm/v/@smithy/shared-ini-file-loader/latest.svg)](https://www.npmjs.com/package/@smithy/shared-ini-file-loader) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/shared-ini-file-loader.svg)](https://www.npmjs.com/package/@smithy/shared-ini-file-loader) + +## AWS Shared Configuration File Loader + +This module provides a function that reads from AWS SDK configuration files and +returns a promise that will resolve with a hash of the parsed contents of the +AWS credentials file and of the AWS config file. Given the [sample +files](#sample-files) below, the promise returned by `loadSharedConfigFiles` +would resolve with: + +```javascript +{ + configFile: { + 'default': { + aws_access_key_id: 'foo', + aws_secret_access_key: 'bar', + }, + dev: { + aws_access_key_id: 'foo1', + aws_secret_access_key: 'bar1', + }, + prod: { + aws_access_key_id: 'foo2', + aws_secret_access_key: 'bar2', + }, + 'testing host': { + aws_access_key_id: 'foo4', + aws_secret_access_key: 'bar4', + } + }, + credentialsFile: { + 'default': { + aws_access_key_id: 'foo', + aws_secret_access_key: 'bar', + }, + dev: { + aws_access_key_id: 'foo1', + aws_secret_access_key: 'bar1', + }, + prod: { + aws_access_key_id: 'foo2', + aws_secret_access_key: 'bar2', + } + }, +} +``` + +If a file is not found, its key (`configFile` or `credentialsFile`) will instead +have a value of an empty object. + +## Supported configuration + +You may customize how the files are loaded by providing an options hash to the +`loadSharedConfigFiles` function. The following options are supported: + +- `filepath` - The path to the shared credentials file. If not specified, the + provider will use the value in the `AWS_SHARED_CREDENTIALS_FILE` environment + variable or a default of `~/.aws/credentials`. +- `configFilepath` - The path to the shared config file. If not specified, the + provider will use the value in the `AWS_CONFIG_FILE` environment variable or a + default of `~/.aws/config`. +- `ignoreCache` - The provider will normally cache the contents of the files it + loads. This option will force the provider to reload the files from disk. + Defaults to `false`. + +## Sample files + +### `~/.aws/credentials` + +```ini +[default] +aws_access_key_id=foo +aws_secret_access_key=bar + +[dev] +aws_access_key_id=foo2 +aws_secret_access_key=bar2 + +[prod] +aws_access_key_id=foo3 +aws_secret_access_key=bar3 +``` + +### `~/.aws/config` + +```ini +[default] +aws_access_key_id=foo +aws_secret_access_key=bar + +[profile dev] +aws_access_key_id=foo2 +aws_secret_access_key=bar2 + +[profile prod] +aws_access_key_id=foo3 +aws_secret_access_key=bar3 + +[profile "testing host"] +aws_access_key_id=foo4 +aws_secret_access_key=bar4 +``` diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getConfigFilepath.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getCredentialsFilepath.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js new file mode 100644 index 0000000..2a4f737 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getHomeDir.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getHomeDir = void 0; +const os_1 = require("os"); +const path_1 = require("path"); +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = (0, os_1.homedir)(); + return homeDirCache[homeDirCacheKey]; +}; +exports.getHomeDir = getHomeDir; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getProfileName.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js new file mode 100644 index 0000000..30d97b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFilepath.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSSOTokenFilepath = void 0; +const crypto_1 = require("crypto"); +const path_1 = require("path"); +const getHomeDir_1 = require("./getHomeDir"); +const getSSOTokenFilepath = (id) => { + const hasher = (0, crypto_1.createHash)("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); +}; +exports.getSSOTokenFilepath = getSSOTokenFilepath; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js new file mode 100644 index 0000000..688accb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSSOTokenFromFile.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSSOTokenFromFile = void 0; +const fs_1 = require("fs"); +const getSSOTokenFilepath_1 = require("./getSSOTokenFilepath"); +const { readFile } = fs_1.promises; +const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; +exports.getSSOTokenFromFile = getSSOTokenFromFile; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/getSsoSessionData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js new file mode 100644 index 0000000..de59bfa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/index.js @@ -0,0 +1,206 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_PREFIX_SEPARATOR: () => CONFIG_PREFIX_SEPARATOR, + DEFAULT_PROFILE: () => DEFAULT_PROFILE, + ENV_PROFILE: () => ENV_PROFILE, + getProfileName: () => getProfileName, + loadSharedConfigFiles: () => loadSharedConfigFiles, + loadSsoSessionData: () => loadSsoSessionData, + parseKnownFiles: () => parseKnownFiles +}); +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././getHomeDir"), module.exports); + +// src/getProfileName.ts +var ENV_PROFILE = "AWS_PROFILE"; +var DEFAULT_PROFILE = "default"; +var getProfileName = /* @__PURE__ */ __name((init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE, "getProfileName"); + +// src/index.ts +__reExport(src_exports, require("././getSSOTokenFilepath"), module.exports); +__reExport(src_exports, require("././getSSOTokenFromFile"), module.exports); + +// src/loadSharedConfigFiles.ts + + +// src/getConfigData.ts +var import_types = require("@smithy/types"); +var getConfigData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(import_types.IniSectionType).includes(key.substring(0, indexOfSeparator)); +}).reduce( + (acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === import_types.IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; + }, + { + // Populate default profile, if present. + ...data.default && { default: data.default } + } +), "getConfigData"); + +// src/getConfigFilepath.ts +var import_path = require("path"); +var import_getHomeDir = require("././getHomeDir"); +var ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +var getConfigFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CONFIG_PATH] || (0, import_path.join)((0, import_getHomeDir.getHomeDir)(), ".aws", "config"), "getConfigFilepath"); + +// src/getCredentialsFilepath.ts + +var import_getHomeDir2 = require("././getHomeDir"); +var ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +var getCredentialsFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CREDENTIALS_PATH] || (0, import_path.join)((0, import_getHomeDir2.getHomeDir)(), ".aws", "credentials"), "getCredentialsFilepath"); + +// src/loadSharedConfigFiles.ts +var import_getHomeDir3 = require("././getHomeDir"); + +// src/parseIni.ts + +var prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +var profileNameBlockList = ["__proto__", "profile __proto__"]; +var parseIni = /* @__PURE__ */ __name((iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = void 0; + currentSubSection = void 0; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(import_types.IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim() + ]; + if (value === "") { + currentSubSection = name; + } else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = void 0; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}, "parseIni"); + +// src/loadSharedConfigFiles.ts +var import_slurpFile = require("././slurpFile"); +var swallowError = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var CONFIG_PREFIX_SEPARATOR = "."; +var loadSharedConfigFiles = /* @__PURE__ */ __name(async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = (0, import_getHomeDir3.getHomeDir)(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = (0, import_path.join)(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = (0, import_path.join)(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + (0, import_slurpFile.slurpFile)(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).then(getConfigData).catch(swallowError), + (0, import_slurpFile.slurpFile)(resolvedFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).catch(swallowError) + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1] + }; +}, "loadSharedConfigFiles"); + +// src/getSsoSessionData.ts + +var getSsoSessionData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => key.startsWith(import_types.IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)).reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}), "getSsoSessionData"); + +// src/loadSsoSessionData.ts +var import_slurpFile2 = require("././slurpFile"); +var swallowError2 = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var loadSsoSessionData = /* @__PURE__ */ __name(async (init = {}) => (0, import_slurpFile2.slurpFile)(init.configFilepath ?? getConfigFilepath()).then(parseIni).then(getSsoSessionData).catch(swallowError2), "loadSsoSessionData"); + +// src/mergeConfigFiles.ts +var mergeConfigFiles = /* @__PURE__ */ __name((...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== void 0) { + Object.assign(merged[key], values); + } else { + merged[key] = values; + } + } + } + return merged; +}, "mergeConfigFiles"); + +// src/parseKnownFiles.ts +var parseKnownFiles = /* @__PURE__ */ __name(async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}, "parseKnownFiles"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getHomeDir, + ENV_PROFILE, + DEFAULT_PROFILE, + getProfileName, + getSSOTokenFilepath, + getSSOTokenFromFile, + CONFIG_PREFIX_SEPARATOR, + loadSharedConfigFiles, + loadSsoSessionData, + parseKnownFiles +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSharedConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/loadSsoSessionData.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/mergeConfigFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseIni.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/parseKnownFiles.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js new file mode 100644 index 0000000..82d7d65 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/slurpFile.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.slurpFile = void 0; +const fs_1 = require("fs"); +const { readFile } = fs_1.promises; +const filePromisesHash = {}; +const slurpFile = (path, options) => { + if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; +exports.slurpFile = slurpFile; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js new file mode 100644 index 0000000..4579286 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigData.js @@ -0,0 +1,18 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +export const getConfigData = (data) => Object.entries(data) + .filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(IniSectionType).includes(key.substring(0, indexOfSeparator)); +}) + .reduce((acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; +}, { + ...(data.default && { default: data.default }), +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js new file mode 100644 index 0000000..ca07c2d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getConfigFilepath.js @@ -0,0 +1,4 @@ +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export const getConfigFilepath = () => process.env[ENV_CONFIG_PATH] || join(getHomeDir(), ".aws", "config"); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js new file mode 100644 index 0000000..393c0ae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getCredentialsFilepath.js @@ -0,0 +1,4 @@ +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export const getCredentialsFilepath = () => process.env[ENV_CREDENTIALS_PATH] || join(getHomeDir(), ".aws", "credentials"); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js new file mode 100644 index 0000000..58772af --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getHomeDir.js @@ -0,0 +1,22 @@ +import { homedir } from "os"; +import { sep } from "path"; +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +export const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = homedir(); + return homeDirCache[homeDirCacheKey]; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js new file mode 100644 index 0000000..acc29f0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getProfileName.js @@ -0,0 +1,3 @@ +export const ENV_PROFILE = "AWS_PROFILE"; +export const DEFAULT_PROFILE = "default"; +export const getProfileName = (init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js new file mode 100644 index 0000000..a44b4ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFilepath.js @@ -0,0 +1,8 @@ +import { createHash } from "crypto"; +import { join } from "path"; +import { getHomeDir } from "./getHomeDir"; +export const getSSOTokenFilepath = (id) => { + const hasher = createHash("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return join(getHomeDir(), ".aws", "sso", "cache", `${cacheName}.json`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js new file mode 100644 index 0000000..42659db --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSSOTokenFromFile.js @@ -0,0 +1,8 @@ +import { promises as fsPromises } from "fs"; +import { getSSOTokenFilepath } from "./getSSOTokenFilepath"; +const { readFile } = fsPromises; +export const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = getSSOTokenFilepath(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js new file mode 100644 index 0000000..f2df194 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/getSsoSessionData.js @@ -0,0 +1,5 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +export const getSsoSessionData = (data) => Object.entries(data) + .filter(([key]) => key.startsWith(IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)) + .reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js new file mode 100644 index 0000000..3e8b2c7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/index.js @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js new file mode 100644 index 0000000..77ee32c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSharedConfigFiles.js @@ -0,0 +1,39 @@ +import { join } from "path"; +import { getConfigData } from "./getConfigData"; +import { getConfigFilepath } from "./getConfigFilepath"; +import { getCredentialsFilepath } from "./getCredentialsFilepath"; +import { getHomeDir } from "./getHomeDir"; +import { parseIni } from "./parseIni"; +import { slurpFile } from "./slurpFile"; +const swallowError = () => ({}); +export const CONFIG_PREFIX_SEPARATOR = "."; +export const loadSharedConfigFiles = async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = getHomeDir(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = join(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = join(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + slurpFile(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni) + .then(getConfigData) + .catch(swallowError), + slurpFile(resolvedFilepath, { + ignoreCache: init.ignoreCache, + }) + .then(parseIni) + .catch(swallowError), + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1], + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js new file mode 100644 index 0000000..3bd730b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/loadSsoSessionData.js @@ -0,0 +1,9 @@ +import { getConfigFilepath } from "./getConfigFilepath"; +import { getSsoSessionData } from "./getSsoSessionData"; +import { parseIni } from "./parseIni"; +import { slurpFile } from "./slurpFile"; +const swallowError = () => ({}); +export const loadSsoSessionData = async (init = {}) => slurpFile(init.configFilepath ?? getConfigFilepath()) + .then(parseIni) + .then(getSsoSessionData) + .catch(swallowError); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js new file mode 100644 index 0000000..58576f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/mergeConfigFiles.js @@ -0,0 +1,14 @@ +export const mergeConfigFiles = (...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== undefined) { + Object.assign(merged[key], values); + } + else { + merged[key] = values; + } + } + } + return merged; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js new file mode 100644 index 0000000..7af4a6a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/parseIni.js @@ -0,0 +1,52 @@ +import { IniSectionType } from "@smithy/types"; +import { CONFIG_PREFIX_SEPARATOR } from "./loadSharedConfigFiles"; +const prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +const profileNameBlockList = ["__proto__", "profile __proto__"]; +export const parseIni = (iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = undefined; + currentSubSection = undefined; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } + else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } + else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim(), + ]; + if (value === "") { + currentSubSection = name; + } + else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = undefined; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js new file mode 100644 index 0000000..4920e28 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/parseKnownFiles.js @@ -0,0 +1,6 @@ +import { loadSharedConfigFiles } from "./loadSharedConfigFiles"; +import { mergeConfigFiles } from "./mergeConfigFiles"; +export const parseKnownFiles = async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js new file mode 100644 index 0000000..7b360cc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/slurpFile.js @@ -0,0 +1,9 @@ +import { promises as fsPromises } from "fs"; +const { readFile } = fsPromises; +const filePromisesHash = {}; +export const slurpFile = (path, options) => { + if (!filePromisesHash[path] || options?.ignoreCache) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts new file mode 100644 index 0000000..4259831 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigData.d.ts @@ -0,0 +1,8 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the config data from parsed ini data. + * * Returns data for `default` + * * Returns profile name without prefix. + * * Returns non-profiles as is. + */ +export declare const getConfigData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts new file mode 100644 index 0000000..1d123be --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getConfigFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export declare const getConfigFilepath: () => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts new file mode 100644 index 0000000..26fda4a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getCredentialsFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export declare const getCredentialsFilepath: () => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts new file mode 100644 index 0000000..5d15bf1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getHomeDir.d.ts @@ -0,0 +1,6 @@ +/** + * Get the HOME directory for the current runtime. + * + * @internal + */ +export declare const getHomeDir: () => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts new file mode 100644 index 0000000..5a608b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getProfileName.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + */ +export declare const ENV_PROFILE = "AWS_PROFILE"; +/** + * @internal + */ +export declare const DEFAULT_PROFILE = "default"; +/** + * Returns profile with priority order code - ENV - default. + * @internal + */ +export declare const getProfileName: (init: { + profile?: string; +}) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts new file mode 100644 index 0000000..44a4030 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFilepath.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the filepath of the file where SSO token is stored. + * @internal + */ +export declare const getSSOTokenFilepath: (id: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts new file mode 100644 index 0000000..18199ac --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSSOTokenFromFile.d.ts @@ -0,0 +1,46 @@ +/** + * Cached SSO token retrieved from SSO login flow. + * @public + */ +export interface SSOToken { + /** + * A base64 encoded string returned by the sso-oidc service. + */ + accessToken: string; + /** + * The expiration time of the accessToken as an RFC 3339 formatted timestamp. + */ + expiresAt: string; + /** + * The token used to obtain an access token in the event that the accessToken is invalid or expired. + */ + refreshToken?: string; + /** + * The unique identifier string for each client. The client ID generated when performing the registration + * portion of the OIDC authorization flow. This is used to refresh the accessToken. + */ + clientId?: string; + /** + * A secret string generated when performing the registration portion of the OIDC authorization flow. + * This is used to refresh the accessToken. + */ + clientSecret?: string; + /** + * The expiration time of the client registration (clientId and clientSecret) as an RFC 3339 formatted timestamp. + */ + registrationExpiresAt?: string; + /** + * The configured sso_region for the profile that credentials are being resolved for. + */ + region?: string; + /** + * The configured sso_start_url for the profile that credentials are being resolved for. + */ + startUrl?: string; +} +/** + * @internal + * @param id - can be either a start URL or the SSO session name. + * Returns the SSO token from the file system. + */ +export declare const getSSOTokenFromFile: (id: string) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts new file mode 100644 index 0000000..9be020f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/getSsoSessionData.d.ts @@ -0,0 +1,6 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the sso-session data from parsed ini data by reading + * ssoSessionName after sso-session prefix including/excluding quotes + */ +export declare const getSsoSessionData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts new file mode 100644 index 0000000..3e8b2c7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts new file mode 100644 index 0000000..3897ac3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSharedConfigFiles.d.ts @@ -0,0 +1,36 @@ +import { Logger, SharedConfigFiles } from "@smithy/types"; +/** + * @public + */ +export interface SharedConfigInit { + /** + * The path at which to locate the ini credentials file. Defaults to the + * value of the `AWS_SHARED_CREDENTIALS_FILE` environment variable (if + * defined) or `~/.aws/credentials` otherwise. + */ + filepath?: string; + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; + /** + * Configuration files are normally cached after the first time they are loaded. When this + * property is set, the provider will always reload any configuration files loaded before. + */ + ignoreCache?: boolean; + /** + * For credential resolution trace logging. + */ + logger?: Logger; +} +/** + * @internal + */ +export declare const CONFIG_PREFIX_SEPARATOR = "."; +/** + * Loads the config and credentials files. + * @internal + */ +export declare const loadSharedConfigFiles: (init?: SharedConfigInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts new file mode 100644 index 0000000..ed6c367 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/loadSsoSessionData.d.ts @@ -0,0 +1,17 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Subset of {@link SharedConfigInit}. + * @internal + */ +export interface SsoSessionInit { + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; +} +/** + * @internal + */ +export declare const loadSsoSessionData: (init?: SsoSessionInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts new file mode 100644 index 0000000..46b8965 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/mergeConfigFiles.d.ts @@ -0,0 +1,7 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Merge multiple profile config files such that settings each file are kept together + * + * @internal + */ +export declare const mergeConfigFiles: (...files: ParsedIniData[]) => ParsedIniData; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts new file mode 100644 index 0000000..0ae5851 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/parseIni.d.ts @@ -0,0 +1,2 @@ +import { ParsedIniData } from "@smithy/types"; +export declare const parseIni: (iniData: string) => ParsedIniData; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts new file mode 100644 index 0000000..d5fcafa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/parseKnownFiles.d.ts @@ -0,0 +1,18 @@ +import { ParsedIniData } from "@smithy/types"; +import { SharedConfigInit } from "./loadSharedConfigFiles"; +/** + * @public + */ +export interface SourceProfileInit extends SharedConfigInit { + /** + * The configuration profile to use. + */ + profile?: string; +} +/** + * Load profiles from credentials and config INI files and normalize them into a + * single profile list. + * + * @internal + */ +export declare const parseKnownFiles: (init: SourceProfileInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts new file mode 100644 index 0000000..a3bc84c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/slurpFile.d.ts @@ -0,0 +1,5 @@ +interface SlurpFileOptions { + ignoreCache?: boolean; +} +export declare const slurpFile: (path: string, options?: SlurpFileOptions) => Promise; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts new file mode 100644 index 0000000..c6b7588 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigData.d.ts @@ -0,0 +1,8 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the config data from parsed ini data. + * * Returns data for `default` + * * Returns profile name without prefix. + * * Returns non-profiles as is. + */ +export declare const getConfigData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts new file mode 100644 index 0000000..dc3699b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getConfigFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +export declare const getConfigFilepath: () => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts new file mode 100644 index 0000000..f2c95b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getCredentialsFilepath.d.ts @@ -0,0 +1,2 @@ +export declare const ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +export declare const getCredentialsFilepath: () => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts new file mode 100644 index 0000000..4c1bd7a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getHomeDir.d.ts @@ -0,0 +1,6 @@ +/** + * Get the HOME directory for the current runtime. + * + * @internal + */ +export declare const getHomeDir: () => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts new file mode 100644 index 0000000..91cb16b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getProfileName.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + */ +export declare const ENV_PROFILE = "AWS_PROFILE"; +/** + * @internal + */ +export declare const DEFAULT_PROFILE = "default"; +/** + * Returns profile with priority order code - ENV - default. + * @internal + */ +export declare const getProfileName: (init: { + profile?: string; +}) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts new file mode 100644 index 0000000..e549daa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFilepath.d.ts @@ -0,0 +1,5 @@ +/** + * Returns the filepath of the file where SSO token is stored. + * @internal + */ +export declare const getSSOTokenFilepath: (id: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts new file mode 100644 index 0000000..140979c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSSOTokenFromFile.d.ts @@ -0,0 +1,46 @@ +/** + * Cached SSO token retrieved from SSO login flow. + * @public + */ +export interface SSOToken { + /** + * A base64 encoded string returned by the sso-oidc service. + */ + accessToken: string; + /** + * The expiration time of the accessToken as an RFC 3339 formatted timestamp. + */ + expiresAt: string; + /** + * The token used to obtain an access token in the event that the accessToken is invalid or expired. + */ + refreshToken?: string; + /** + * The unique identifier string for each client. The client ID generated when performing the registration + * portion of the OIDC authorization flow. This is used to refresh the accessToken. + */ + clientId?: string; + /** + * A secret string generated when performing the registration portion of the OIDC authorization flow. + * This is used to refresh the accessToken. + */ + clientSecret?: string; + /** + * The expiration time of the client registration (clientId and clientSecret) as an RFC 3339 formatted timestamp. + */ + registrationExpiresAt?: string; + /** + * The configured sso_region for the profile that credentials are being resolved for. + */ + region?: string; + /** + * The configured sso_start_url for the profile that credentials are being resolved for. + */ + startUrl?: string; +} +/** + * @internal + * @param id - can be either a start URL or the SSO session name. + * Returns the SSO token from the file system. + */ +export declare const getSSOTokenFromFile: (id: string) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts new file mode 100644 index 0000000..04a1a99 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/getSsoSessionData.d.ts @@ -0,0 +1,6 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Returns the sso-session data from parsed ini data by reading + * ssoSessionName after sso-session prefix including/excluding quotes + */ +export declare const getSsoSessionData: (data: ParsedIniData) => ParsedIniData; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..12ed3bb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +export * from "./getHomeDir"; +export * from "./getProfileName"; +export * from "./getSSOTokenFilepath"; +export * from "./getSSOTokenFromFile"; +export * from "./loadSharedConfigFiles"; +export * from "./loadSsoSessionData"; +export * from "./parseKnownFiles"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts new file mode 100644 index 0000000..969254e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSharedConfigFiles.d.ts @@ -0,0 +1,36 @@ +import { Logger, SharedConfigFiles } from "@smithy/types"; +/** + * @public + */ +export interface SharedConfigInit { + /** + * The path at which to locate the ini credentials file. Defaults to the + * value of the `AWS_SHARED_CREDENTIALS_FILE` environment variable (if + * defined) or `~/.aws/credentials` otherwise. + */ + filepath?: string; + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; + /** + * Configuration files are normally cached after the first time they are loaded. When this + * property is set, the provider will always reload any configuration files loaded before. + */ + ignoreCache?: boolean; + /** + * For credential resolution trace logging. + */ + logger?: Logger; +} +/** + * @internal + */ +export declare const CONFIG_PREFIX_SEPARATOR = "."; +/** + * Loads the config and credentials files. + * @internal + */ +export declare const loadSharedConfigFiles: (init?: SharedConfigInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts new file mode 100644 index 0000000..08e265e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/loadSsoSessionData.d.ts @@ -0,0 +1,17 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Subset of {@link SharedConfigInit}. + * @internal + */ +export interface SsoSessionInit { + /** + * The path at which to locate the ini config file. Defaults to the value of + * the `AWS_CONFIG_FILE` environment variable (if defined) or + * `~/.aws/config` otherwise. + */ + configFilepath?: string; +} +/** + * @internal + */ +export declare const loadSsoSessionData: (init?: SsoSessionInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts new file mode 100644 index 0000000..f94e725 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/mergeConfigFiles.d.ts @@ -0,0 +1,7 @@ +import { ParsedIniData } from "@smithy/types"; +/** + * Merge multiple profile config files such that settings each file are kept together + * + * @internal + */ +export declare const mergeConfigFiles: (...files: ParsedIniData[]) => ParsedIniData; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts new file mode 100644 index 0000000..4e58d0e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseIni.d.ts @@ -0,0 +1,2 @@ +import { ParsedIniData } from "@smithy/types"; +export declare const parseIni: (iniData: string) => ParsedIniData; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts new file mode 100644 index 0000000..46ba24b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/parseKnownFiles.d.ts @@ -0,0 +1,18 @@ +import { ParsedIniData } from "@smithy/types"; +import { SharedConfigInit } from "./loadSharedConfigFiles"; +/** + * @public + */ +export interface SourceProfileInit extends SharedConfigInit { + /** + * The configuration profile to use. + */ + profile?: string; +} +/** + * Load profiles from credentials and config INI files and normalize them into a + * single profile list. + * + * @internal + */ +export declare const parseKnownFiles: (init: SourceProfileInit) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts new file mode 100644 index 0000000..33e7a0c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/slurpFile.d.ts @@ -0,0 +1,5 @@ +interface SlurpFileOptions { + ignoreCache?: boolean; +} +export declare const slurpFile: (path: string, options?: SlurpFileOptions) => Promise; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..6d6acbd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/ts3.4/types.d.ts @@ -0,0 +1,16 @@ +import { ParsedIniData as __ParsedIniData, Profile as __Profile, SharedConfigFiles as __SharedConfigFiles } from "@smithy/types"; +/** + * @internal + * @deprecated Use Profile from "\@smithy/types" instead + */ +export type Profile = __Profile; +/** + * @internal + * @deprecated Use ParsedIniData from "\@smithy/types" instead + */ +export type ParsedIniData = __ParsedIniData; +/** + * @internal + * @deprecated Use SharedConfigFiles from "\@smithy/types" instead + */ +export type SharedConfigFiles = __SharedConfigFiles; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts new file mode 100644 index 0000000..c7c8d92 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/dist-types/types.d.ts @@ -0,0 +1,16 @@ +import { ParsedIniData as __ParsedIniData, Profile as __Profile, SharedConfigFiles as __SharedConfigFiles } from "@smithy/types"; +/** + * @internal + * @deprecated Use Profile from "\@smithy/types" instead + */ +export type Profile = __Profile; +/** + * @internal + * @deprecated Use ParsedIniData from "\@smithy/types" instead + */ +export type ParsedIniData = __ParsedIniData; +/** + * @internal + * @deprecated Use SharedConfigFiles from "\@smithy/types" instead + */ +export type SharedConfigFiles = __SharedConfigFiles; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/package.json new file mode 100644 index 0000000..e6dfbfe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/shared-ini-file-loader/package.json @@ -0,0 +1,72 @@ +{ + "name": "@smithy/shared-ini-file-loader", + "version": "4.0.2", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline shared-ini-file-loader", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "browser": { + "./dist-es/getSSOTokenFromFile": false, + "./dist-es/slurpFile": false + }, + "react-native": { + "./dist-cjs/getSSOTokenFromFile": false, + "./dist-cjs/slurpFile": false, + "./dist-es/getSSOTokenFromFile": false, + "./dist-es/slurpFile": false + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/shared-ini-file-loader", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/shared-ini-file-loader" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/README.md new file mode 100644 index 0000000..3bc9a17 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/README.md @@ -0,0 +1,11 @@ +# @smithy/signature-v4 + +[![NPM version](https://img.shields.io/npm/v/@smithy/signature-v4/latest.svg)](https://www.npmjs.com/package/@smithy/signature-v4) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/signature-v4.svg)](https://www.npmjs.com/package/@smithy/signature-v4) + +This package contains an implementation of the [AWS Signature Version 4](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) +authentication scheme. + +It is internal to Smithy-TypeScript generated clients, and not generally intended for standalone usage outside this context. + +For custom usage, inspect the interface of the SignatureV4 class. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/HeaderFormatter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/SignatureV4Base.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/credentialDerivation.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalHeaders.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getCanonicalQuery.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/getPayloadHash.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/headerUtil.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/index.js new file mode 100644 index 0000000..41f93bb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/index.js @@ -0,0 +1,682 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + ALGORITHM_IDENTIFIER: () => ALGORITHM_IDENTIFIER, + ALGORITHM_IDENTIFIER_V4A: () => ALGORITHM_IDENTIFIER_V4A, + ALGORITHM_QUERY_PARAM: () => ALGORITHM_QUERY_PARAM, + ALWAYS_UNSIGNABLE_HEADERS: () => ALWAYS_UNSIGNABLE_HEADERS, + AMZ_DATE_HEADER: () => AMZ_DATE_HEADER, + AMZ_DATE_QUERY_PARAM: () => AMZ_DATE_QUERY_PARAM, + AUTH_HEADER: () => AUTH_HEADER, + CREDENTIAL_QUERY_PARAM: () => CREDENTIAL_QUERY_PARAM, + DATE_HEADER: () => DATE_HEADER, + EVENT_ALGORITHM_IDENTIFIER: () => EVENT_ALGORITHM_IDENTIFIER, + EXPIRES_QUERY_PARAM: () => EXPIRES_QUERY_PARAM, + GENERATED_HEADERS: () => GENERATED_HEADERS, + HOST_HEADER: () => HOST_HEADER, + KEY_TYPE_IDENTIFIER: () => KEY_TYPE_IDENTIFIER, + MAX_CACHE_SIZE: () => MAX_CACHE_SIZE, + MAX_PRESIGNED_TTL: () => MAX_PRESIGNED_TTL, + PROXY_HEADER_PATTERN: () => PROXY_HEADER_PATTERN, + REGION_SET_PARAM: () => REGION_SET_PARAM, + SEC_HEADER_PATTERN: () => SEC_HEADER_PATTERN, + SHA256_HEADER: () => SHA256_HEADER, + SIGNATURE_HEADER: () => SIGNATURE_HEADER, + SIGNATURE_QUERY_PARAM: () => SIGNATURE_QUERY_PARAM, + SIGNED_HEADERS_QUERY_PARAM: () => SIGNED_HEADERS_QUERY_PARAM, + SignatureV4: () => SignatureV4, + SignatureV4Base: () => SignatureV4Base, + TOKEN_HEADER: () => TOKEN_HEADER, + TOKEN_QUERY_PARAM: () => TOKEN_QUERY_PARAM, + UNSIGNABLE_PATTERNS: () => UNSIGNABLE_PATTERNS, + UNSIGNED_PAYLOAD: () => UNSIGNED_PAYLOAD, + clearCredentialCache: () => clearCredentialCache, + createScope: () => createScope, + getCanonicalHeaders: () => getCanonicalHeaders, + getCanonicalQuery: () => getCanonicalQuery, + getPayloadHash: () => getPayloadHash, + getSigningKey: () => getSigningKey, + hasHeader: () => hasHeader, + moveHeadersToQuery: () => moveHeadersToQuery, + prepareRequest: () => prepareRequest, + signatureV4aContainer: () => signatureV4aContainer +}); +module.exports = __toCommonJS(src_exports); + +// src/SignatureV4.ts + +var import_util_utf85 = require("@smithy/util-utf8"); + +// src/constants.ts +var ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +var CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +var AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +var SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +var EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +var SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +var TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +var REGION_SET_PARAM = "X-Amz-Region-Set"; +var AUTH_HEADER = "authorization"; +var AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +var DATE_HEADER = "date"; +var GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +var SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +var SHA256_HEADER = "x-amz-content-sha256"; +var TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +var HOST_HEADER = "host"; +var ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true +}; +var PROXY_HEADER_PATTERN = /^proxy-/; +var SEC_HEADER_PATTERN = /^sec-/; +var UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +var ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +var ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +var EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +var UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +var MAX_CACHE_SIZE = 50; +var KEY_TYPE_IDENTIFIER = "aws4_request"; +var MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; + +// src/credentialDerivation.ts +var import_util_hex_encoding = require("@smithy/util-hex-encoding"); +var import_util_utf8 = require("@smithy/util-utf8"); +var signingKeyCache = {}; +var cacheQueue = []; +var createScope = /* @__PURE__ */ __name((shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`, "createScope"); +var getSigningKey = /* @__PURE__ */ __name(async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${(0, import_util_hex_encoding.toHex)(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return signingKeyCache[cacheKey] = key; +}, "getSigningKey"); +var clearCredentialCache = /* @__PURE__ */ __name(() => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}, "clearCredentialCache"); +var hmac = /* @__PURE__ */ __name((ctor, secret, data) => { + const hash = new ctor(secret); + hash.update((0, import_util_utf8.toUint8Array)(data)); + return hash.digest(); +}, "hmac"); + +// src/getCanonicalHeaders.ts +var getCanonicalHeaders = /* @__PURE__ */ __name(({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == void 0) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || unsignableHeaders?.has(canonicalHeaderName) || PROXY_HEADER_PATTERN.test(canonicalHeaderName) || SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || signableHeaders && !signableHeaders.has(canonicalHeaderName)) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}, "getCanonicalHeaders"); + +// src/getPayloadHash.ts +var import_is_array_buffer = require("@smithy/is-array-buffer"); + +var import_util_utf82 = require("@smithy/util-utf8"); +var getPayloadHash = /* @__PURE__ */ __name(async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == void 0) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, import_is_array_buffer.isArrayBuffer)(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update((0, import_util_utf82.toUint8Array)(body)); + return (0, import_util_hex_encoding.toHex)(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}, "getPayloadHash"); + +// src/HeaderFormatter.ts + +var import_util_utf83 = require("@smithy/util-utf8"); +var HeaderFormatter = class { + static { + __name(this, "HeaderFormatter"); + } + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = (0, import_util_utf83.fromUtf8)(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 /* boolTrue */ : 1 /* boolFalse */]); + case "byte": + return Uint8Array.from([2 /* byte */, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3 /* short */); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4 /* integer */); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5 /* long */; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6 /* byteArray */); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = (0, import_util_utf83.fromUtf8)(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7 /* string */); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8 /* timestamp */; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9 /* uuid */; + uuidBytes.set((0, import_util_hex_encoding.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +}; +var UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +var Int64 = class _Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static { + __name(this, "Int64"); + } + static fromNumber(number) { + if (number > 9223372036854776e3 || number < -9223372036854776e3) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new _Int64(bytes); + } + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 128; + if (negative) { + negate(bytes); + } + return parseInt((0, import_util_hex_encoding.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +}; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 255; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} +__name(negate, "negate"); + +// src/headerUtil.ts +var hasHeader = /* @__PURE__ */ __name((soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}, "hasHeader"); + +// src/moveHeadersToQuery.ts +var import_protocol_http = require("@smithy/protocol-http"); +var moveHeadersToQuery = /* @__PURE__ */ __name((request, options = {}) => { + const { headers, query = {} } = import_protocol_http.HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if (lname.slice(0, 6) === "x-amz-" && !options.unhoistableHeaders?.has(lname) || options.hoistableHeaders?.has(lname)) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query + }; +}, "moveHeadersToQuery"); + +// src/prepareRequest.ts + +var prepareRequest = /* @__PURE__ */ __name((request) => { + request = import_protocol_http.HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}, "prepareRequest"); + +// src/SignatureV4Base.ts + +var import_util_middleware = require("@smithy/util-middleware"); + +var import_util_utf84 = require("@smithy/util-utf8"); + +// src/getCanonicalQuery.ts +var import_util_uri_escape = require("@smithy/util-uri-escape"); +var getCanonicalQuery = /* @__PURE__ */ __name(({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = (0, import_util_uri_escape.escapeUri)(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value)}`; + } else if (Array.isArray(value)) { + serialized[encodedKey] = value.slice(0).reduce((encoded, value2) => encoded.concat([`${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value2)}`]), []).sort().join("&"); + } + } + return keys.sort().map((key) => serialized[key]).filter((serialized2) => serialized2).join("&"); +}, "getCanonicalQuery"); + +// src/utilDate.ts +var iso8601 = /* @__PURE__ */ __name((time) => toDate(time).toISOString().replace(/\.\d{3}Z$/, "Z"), "iso8601"); +var toDate = /* @__PURE__ */ __name((time) => { + if (typeof time === "number") { + return new Date(time * 1e3); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1e3); + } + return new Date(time); + } + return time; +}, "toDate"); + +// src/SignatureV4Base.ts +var SignatureV4Base = class { + static { + __name(this, "SignatureV4Base"); + } + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = (0, import_util_middleware.normalizeProvider)(region); + this.credentialProvider = (0, import_util_middleware.normalizeProvider)(credentials); + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest, algorithmIdentifier) { + const hash = new this.sha256(); + hash.update((0, import_util_utf84.toUint8Array)(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${algorithmIdentifier} +${longDate} +${credentialScope} +${(0, import_util_hex_encoding.toHex)(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if (pathSegment?.length === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${path?.startsWith("/") ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && path?.endsWith("/") ? "/" : ""}`; + const doubleEncoded = (0, import_util_uri_escape.escapeUri)(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || // @ts-expect-error: Property 'accessKeyId' does not exist on type 'object'.ts(2339) + typeof credentials.accessKeyId !== "string" || // @ts-expect-error: Property 'secretAccessKey' does not exist on type 'object'.ts(2339) + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } + formatDate(now) { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8) + }; + } + getCanonicalHeaderList(headers) { + return Object.keys(headers).sort().join(";"); + } +}; + +// src/SignatureV4.ts +var SignatureV4 = class extends SignatureV4Base { + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + super({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath + }); + this.headerFormatter = new HeaderFormatter(); + } + static { + __name(this, "SignatureV4"); + } + async presign(originalRequest, options = {}) { + const { + signingDate = /* @__PURE__ */ new Date(), + expiresIn = 3600, + unsignableHeaders, + unhoistableHeaders, + signableHeaders, + hoistableHeaders, + signingRegion, + signingService + } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { longDate, shortDate } = this.formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject( + "Signature version 4 presigned URLs must have an expiration date less than one week in the future" + ); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = this.getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256)) + ); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } else if (toSign.message) { + return this.signMessage(toSign, options); + } else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = /* @__PURE__ */ new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? await this.regionProvider(); + const { shortDate, longDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = (0, import_util_hex_encoding.toHex)(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService }) { + const promise = this.signEvent( + { + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body + }, + { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature + } + ); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { shortDate } = this.formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update((0, import_util_utf85.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + async signRequest(requestToSign, { + signingDate = /* @__PURE__ */ new Date(), + signableHeaders, + unsignableHeaders, + signingRegion, + signingService + } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, payloadHash) + ); + request.headers[AUTH_HEADER] = `${ALGORITHM_IDENTIFIER} Credential=${credentials.accessKeyId}/${scope}, SignedHeaders=${this.getCanonicalHeaderList(canonicalHeaders)}, Signature=${signature}`; + return request; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign( + longDate, + credentialScope, + canonicalRequest, + ALGORITHM_IDENTIFIER + ); + const hash = new this.sha256(await keyPromise); + hash.update((0, import_util_utf85.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } +}; + +// src/signature-v4a-container.ts +var signatureV4aContainer = { + SignatureV4a: null +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getCanonicalHeaders, + getCanonicalQuery, + getPayloadHash, + moveHeadersToQuery, + prepareRequest, + SignatureV4Base, + hasHeader, + SignatureV4, + ALGORITHM_QUERY_PARAM, + CREDENTIAL_QUERY_PARAM, + AMZ_DATE_QUERY_PARAM, + SIGNED_HEADERS_QUERY_PARAM, + EXPIRES_QUERY_PARAM, + SIGNATURE_QUERY_PARAM, + TOKEN_QUERY_PARAM, + REGION_SET_PARAM, + AUTH_HEADER, + AMZ_DATE_HEADER, + DATE_HEADER, + GENERATED_HEADERS, + SIGNATURE_HEADER, + SHA256_HEADER, + TOKEN_HEADER, + HOST_HEADER, + ALWAYS_UNSIGNABLE_HEADERS, + PROXY_HEADER_PATTERN, + SEC_HEADER_PATTERN, + UNSIGNABLE_PATTERNS, + ALGORITHM_IDENTIFIER, + ALGORITHM_IDENTIFIER_V4A, + EVENT_ALGORITHM_IDENTIFIER, + UNSIGNED_PAYLOAD, + MAX_CACHE_SIZE, + KEY_TYPE_IDENTIFIER, + MAX_PRESIGNED_TTL, + createScope, + getSigningKey, + clearCredentialCache, + signatureV4aContainer +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/moveHeadersToQuery.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/prepareRequest.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/signature-v4a-container.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/suite.fixture.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-cjs/utilDate.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js new file mode 100644 index 0000000..4edc4b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/HeaderFormatter.js @@ -0,0 +1,125 @@ +import { fromHex, toHex } from "@smithy/util-hex-encoding"; +import { fromUtf8 } from "@smithy/util-utf8"; +export class HeaderFormatter { + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = fromUtf8(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 : 1]); + case "byte": + return Uint8Array.from([2, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = fromUtf8(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9; + uuidBytes.set(fromHex(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +} +var HEADER_VALUE_TYPE; +(function (HEADER_VALUE_TYPE) { + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolTrue"] = 0] = "boolTrue"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["boolFalse"] = 1] = "boolFalse"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byte"] = 2] = "byte"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["short"] = 3] = "short"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["integer"] = 4] = "integer"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["long"] = 5] = "long"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["byteArray"] = 6] = "byteArray"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["string"] = 7] = "string"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["timestamp"] = 8] = "timestamp"; + HEADER_VALUE_TYPE[HEADER_VALUE_TYPE["uuid"] = 9] = "uuid"; +})(HEADER_VALUE_TYPE || (HEADER_VALUE_TYPE = {})); +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +export class Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776000 || number < -9223372036854776000) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new Int64(bytes); + } + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 0b10000000; + if (negative) { + negate(bytes); + } + return parseInt(toHex(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +} +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 0xff; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js new file mode 100644 index 0000000..d149132 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/SignatureV4.js @@ -0,0 +1,135 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { ALGORITHM_IDENTIFIER, ALGORITHM_QUERY_PARAM, AMZ_DATE_HEADER, AMZ_DATE_QUERY_PARAM, AUTH_HEADER, CREDENTIAL_QUERY_PARAM, EVENT_ALGORITHM_IDENTIFIER, EXPIRES_QUERY_PARAM, MAX_PRESIGNED_TTL, SHA256_HEADER, SIGNATURE_QUERY_PARAM, SIGNED_HEADERS_QUERY_PARAM, TOKEN_HEADER, TOKEN_QUERY_PARAM, } from "./constants"; +import { createScope, getSigningKey } from "./credentialDerivation"; +import { getCanonicalHeaders } from "./getCanonicalHeaders"; +import { getPayloadHash } from "./getPayloadHash"; +import { HeaderFormatter } from "./HeaderFormatter"; +import { hasHeader } from "./headerUtil"; +import { moveHeadersToQuery } from "./moveHeadersToQuery"; +import { prepareRequest } from "./prepareRequest"; +import { SignatureV4Base } from "./SignatureV4Base"; +export class SignatureV4 extends SignatureV4Base { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + super({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath, + }); + this.headerFormatter = new HeaderFormatter(); + } + async presign(originalRequest, options = {}) { + const { signingDate = new Date(), expiresIn = 3600, unsignableHeaders, unhoistableHeaders, signableHeaders, hoistableHeaders, signingRegion, signingService, } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const { longDate, shortDate } = this.formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject("Signature version 4 presigned URLs" + " must have an expiration date less than one week in" + " the future"); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = this.getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256))); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } + else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } + else if (toSign.message) { + return this.signMessage(toSign, options); + } + else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? (await this.regionProvider()); + const { shortDate, longDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = toHex(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload, + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = new Date(), signingRegion, signingService }) { + const promise = this.signEvent({ + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body, + }, { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature, + }); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const { shortDate } = this.formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update(toUint8Array(stringToSign)); + return toHex(await hash.digest()); + } + async signRequest(requestToSign, { signingDate = new Date(), signableHeaders, unsignableHeaders, signingRegion, signingService, } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? (await this.regionProvider()); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = this.formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature(longDate, scope, this.getSigningKey(credentials, region, shortDate, signingService), this.createCanonicalRequest(request, canonicalHeaders, payloadHash)); + request.headers[AUTH_HEADER] = + `${ALGORITHM_IDENTIFIER} ` + + `Credential=${credentials.accessKeyId}/${scope}, ` + + `SignedHeaders=${this.getCanonicalHeaderList(canonicalHeaders)}, ` + + `Signature=${signature}`; + return request; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest, ALGORITHM_IDENTIFIER); + const hash = new this.sha256(await keyPromise); + hash.update(toUint8Array(stringToSign)); + return toHex(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js new file mode 100644 index 0000000..857ff0c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/SignatureV4Base.js @@ -0,0 +1,73 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { normalizeProvider } from "@smithy/util-middleware"; +import { escapeUri } from "@smithy/util-uri-escape"; +import { toUint8Array } from "@smithy/util-utf8"; +import { getCanonicalQuery } from "./getCanonicalQuery"; +import { iso8601 } from "./utilDate"; +export class SignatureV4Base { + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath = true, }) { + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = normalizeProvider(region); + this.credentialProvider = normalizeProvider(credentials); + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest, algorithmIdentifier) { + const hash = new this.sha256(); + hash.update(toUint8Array(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${algorithmIdentifier} +${longDate} +${credentialScope} +${toHex(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if (pathSegment?.length === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } + else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${path?.startsWith("/") ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && path?.endsWith("/") ? "/" : ""}`; + const doubleEncoded = escapeUri(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || + typeof credentials.accessKeyId !== "string" || + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } + formatDate(now) { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8), + }; + } + getCanonicalHeaderList(headers) { + return Object.keys(headers).sort().join(";"); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/constants.js new file mode 100644 index 0000000..602728a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/constants.js @@ -0,0 +1,43 @@ +export const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export const REGION_SET_PARAM = "X-Amz-Region-Set"; +export const AUTH_HEADER = "authorization"; +export const AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +export const DATE_HEADER = "date"; +export const GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +export const SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +export const SHA256_HEADER = "x-amz-content-sha256"; +export const TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +export const HOST_HEADER = "host"; +export const ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true, +}; +export const PROXY_HEADER_PATTERN = /^proxy-/; +export const SEC_HEADER_PATTERN = /^sec-/; +export const UNSIGNABLE_PATTERNS = [/^proxy-/i, /^sec-/i]; +export const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export const MAX_CACHE_SIZE = 50; +export const KEY_TYPE_IDENTIFIER = "aws4_request"; +export const MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js new file mode 100644 index 0000000..b16ab8c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/credentialDerivation.js @@ -0,0 +1,33 @@ +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { KEY_TYPE_IDENTIFIER, MAX_CACHE_SIZE } from "./constants"; +const signingKeyCache = {}; +const cacheQueue = []; +export const createScope = (shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`; +export const getSigningKey = async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${toHex(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return (signingKeyCache[cacheKey] = key); +}; +export const clearCredentialCache = () => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}; +const hmac = (ctor, secret, data) => { + const hash = new ctor(secret); + hash.update(toUint8Array(data)); + return hash.digest(); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js new file mode 100644 index 0000000..3321125 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getCanonicalHeaders.js @@ -0,0 +1,20 @@ +import { ALWAYS_UNSIGNABLE_HEADERS, PROXY_HEADER_PATTERN, SEC_HEADER_PATTERN } from "./constants"; +export const getCanonicalHeaders = ({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == undefined) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || + unsignableHeaders?.has(canonicalHeaderName) || + PROXY_HEADER_PATTERN.test(canonicalHeaderName) || + SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || (signableHeaders && !signableHeaders.has(canonicalHeaderName))) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js new file mode 100644 index 0000000..0623f1a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getCanonicalQuery.js @@ -0,0 +1,29 @@ +import { escapeUri } from "@smithy/util-uri-escape"; +import { SIGNATURE_HEADER } from "./constants"; +export const getCanonicalQuery = ({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = escapeUri(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${escapeUri(value)}`; + } + else if (Array.isArray(value)) { + serialized[encodedKey] = value + .slice(0) + .reduce((encoded, value) => encoded.concat([`${encodedKey}=${escapeUri(value)}`]), []) + .sort() + .join("&"); + } + } + return keys + .sort() + .map((key) => serialized[key]) + .filter((serialized) => serialized) + .join("&"); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js new file mode 100644 index 0000000..cba165c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/getPayloadHash.js @@ -0,0 +1,20 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { toHex } from "@smithy/util-hex-encoding"; +import { toUint8Array } from "@smithy/util-utf8"; +import { SHA256_HEADER, UNSIGNED_PAYLOAD } from "./constants"; +export const getPayloadHash = async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == undefined) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } + else if (typeof body === "string" || ArrayBuffer.isView(body) || isArrayBuffer(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update(toUint8Array(body)); + return toHex(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/headerUtil.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/headerUtil.js new file mode 100644 index 0000000..e502cbb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/headerUtil.js @@ -0,0 +1,26 @@ +export const hasHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}; +export const getHeaderValue = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return headers[headerName]; + } + } + return undefined; +}; +export const deleteHeader = (soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + delete headers[headerName]; + } + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/index.js new file mode 100644 index 0000000..062752d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/index.js @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js new file mode 100644 index 0000000..806703a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/moveHeadersToQuery.js @@ -0,0 +1,17 @@ +import { HttpRequest } from "@smithy/protocol-http"; +export const moveHeadersToQuery = (request, options = {}) => { + const { headers, query = {} } = HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if ((lname.slice(0, 6) === "x-amz-" && !options.unhoistableHeaders?.has(lname)) || + options.hoistableHeaders?.has(lname)) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js new file mode 100644 index 0000000..7fe5136 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/prepareRequest.js @@ -0,0 +1,11 @@ +import { HttpRequest } from "@smithy/protocol-http"; +import { GENERATED_HEADERS } from "./constants"; +export const prepareRequest = (request) => { + request = HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js new file mode 100644 index 0000000..a309b0a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/signature-v4a-container.js @@ -0,0 +1,3 @@ +export const signatureV4aContainer = { + SignatureV4a: null, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js new file mode 100644 index 0000000..bb704a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/suite.fixture.js @@ -0,0 +1,399 @@ +export const region = "us-east-1"; +export const service = "service"; +export const credentials = { + accessKeyId: "AKIDEXAMPLE", + secretAccessKey: "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY", +}; +export const signingDate = new Date("2015-08-30T12:36:00Z"); +export const requests = [ + { + name: "get-header-key-duplicate", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value2,value2,value1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=c9d5ea9f3f72853aea855b47ea873832890dbdd183b4468f858259531a5138ea", + }, + { + name: "get-header-value-multiline", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1,value2,value3", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=ba17b383a53190154eb5fa66a1b836cc297cc0a3d70a5d00705980573d8ff790", + }, + { + name: "get-header-value-order", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value4,value1,value3,value2", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=08c7e5a9acfcfeb3ab6b2185e75ce8b1deb5e634ec47601a50643f830c755c01", + }, + { + name: "get-header-value-trim", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1", + "my-header2": '"a b c"', + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;my-header2;x-amz-date, Signature=acc3ed3afb60bb290fc8d2dd0098b9911fcaa05412b367055dee359757a9c736", + }, + { + name: "get-unreserved", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=07ef7494c76fa4850883e2b006601f940f8a34d404d0cfa977f52a65bbf5f24f", + }, + { + name: "get-utf8", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/ሴ", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=8318018e0b0f223aa2bbf98705b62bb787dc9c0e678f255a891fd03141be5d85", + }, + { + name: "get-vanilla", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5fa00fa31553b73ebf1942676e86291e8372ff2a2260956d9b8aae1d763fbf31", + }, + { + name: "get-vanilla-empty-query-key", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=a67d582fa61cc504c4bae71f336f98b97f1ea3c7a6bfe1b6e45aec72011b9aeb", + }, + { + name: "get-vanilla-query", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5fa00fa31553b73ebf1942676e86291e8372ff2a2260956d9b8aae1d763fbf31", + }, + { + name: "get-vanilla-query-order-key-case", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + Param2: "value2", + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=b97d918cfa904a5beff61c982a1b6f458b799221646efd99d3219ec94cdf2500", + }, + { + name: "get-vanilla-query-unreserved", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + "-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz": "-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=9c3e54bfcdf0b19771a7f523ee5669cdf59bc7cc0884027167c21bb143a40197", + }, + { + name: "get-vanilla-utf8-query", + request: { + protocol: "https:", + method: "GET", + hostname: "example.amazonaws.com", + query: { + ሴ: "bar", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=2cdec8eed098649ff3a119c94853b13c643bcf08f8b0a1d91e12c9027818dd04", + }, + { + name: "post-header-key-case", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-header-key-sort", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "value1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=c5410059b04c1ee005303aed430f6e6645f61f4dc9e1461ec8f8916fdf18852c", + }, + { + name: "post-header-value-case", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "my-header1": "VALUE1", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;my-header1;x-amz-date, Signature=cdbc9802e29d2942e5e10b5bccfdd67c5f22c7c4e8ae67b53629efa58b974b7d", + }, + { + name: "post-sts-header-after", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-sts-header-before", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + "x-amz-security-token": "AQoDYXdzEPT//////////wEXAMPLEtc764bNrC9SAPBSM22wDOk4x4HIZ8j4FZTwdQWLWsKWHGBuFqwAeMicRXmxfpSPfIeoIYRqTflfKD8YUuwthAx7mSEI/qkPpKPi/kMcGdQrmGdeehM4IC1NtBmUpp2wUE8phUZampKsburEDy0KPkyQDYwT7WZ0wq5VSXDvp75YU9HFvlRd8Tx6q6fE8YQcHNVXAkiY9q6d+xo0rKwT38xVqr7ZD0u0iPPkUL64lIZbqBAz+scqKmlzm8FDrypNC9Yjc8fPOLn9FX9KSYvKTr4rvx3iSIlTJabIQwj2ICCR/oLxBA==", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date;x-amz-security-token, Signature=85d96828115b5dc0cfc3bd16ad9e210dd772bbebba041836c64533a82be05ead", + }, + { + name: "post-vanilla", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b", + }, + { + name: "post-vanilla-empty-query-value", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=28038455d6de14eafc1f9222cf5aa6f1a96197d7deb8263271d420d138af7f11", + }, + { + name: "post-vanilla-query", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + Param1: "value1", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=28038455d6de14eafc1f9222cf5aa6f1a96197d7deb8263271d420d138af7f11", + }, + { + name: "post-vanilla-query-nonunreserved", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + "@#$%^": "", + "+": '/,?><`";:\\|][{}', + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=66c82657c86e26fb25238d0e69f011edc4c6df5ae71119d7cb98ed9b87393c1e", + }, + { + name: "post-vanilla-query-space", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: { + p: "", + }, + headers: { + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=e71688addb58a26418614085fb730ba3faa623b461c17f48f2fbdb9361b94a9b", + }, + { + name: "post-x-www-form-urlencoded", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + "content-type": "application/x-www-form-urlencoded", + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + body: "Param1=value1", + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=ff11897932ad3f4e8b18135d722051e5ac45fc38421b1da7b9d196a0fe09473a", + }, + { + name: "post-x-www-form-urlencoded-parameters", + request: { + protocol: "https:", + method: "POST", + hostname: "example.amazonaws.com", + query: {}, + headers: { + "content-type": "application/x-www-form-urlencoded; charset=utf8", + host: "example.amazonaws.com", + "x-amz-date": "20150830T123600Z", + }, + body: "Param1=value1", + path: "/", + }, + authorization: "AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/us-east-1/service/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature=1a72ec8f64bd914b0e42e42607c7fbce7fb2c7465f63e3092b3b0d39fa77a6fe", + }, +]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/utilDate.js b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/utilDate.js new file mode 100644 index 0000000..4aad623 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-es/utilDate.js @@ -0,0 +1,15 @@ +export const iso8601 = (time) => toDate(time) + .toISOString() + .replace(/\.\d{3}Z$/, "Z"); +export const toDate = (time) => { + if (typeof time === "number") { + return new Date(time * 1000); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1000); + } + return new Date(time); + } + return time; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts new file mode 100644 index 0000000..92056a6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/HeaderFormatter.d.ts @@ -0,0 +1,24 @@ +import type { Int64 as IInt64, MessageHeaders } from "@smithy/types"; +/** + * @internal + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class HeaderFormatter { + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; +} +/** + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class Int64 implements IInt64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts new file mode 100644 index 0000000..99499d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/SignatureV4.d.ts @@ -0,0 +1,20 @@ +import { EventSigner, EventSigningArguments, FormattedEvent, HttpRequest, MessageSigner, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments, SignableMessage, SignedMessage, SigningArguments, StringSigner } from "@smithy/types"; +import { SignatureV4Base, SignatureV4CryptoInit, SignatureV4Init } from "./SignatureV4Base"; +/** + * @public + */ +export declare class SignatureV4 extends SignatureV4Base implements RequestPresigner, RequestSigner, StringSigner, EventSigner, MessageSigner { + private readonly headerFormatter; + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + sign(stringToSign: string, options?: SigningArguments): Promise; + sign(event: FormattedEvent, options: EventSigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + private signEvent; + signMessage(signableMessage: SignableMessage, { signingDate, signingRegion, signingService }: SigningArguments): Promise; + private signString; + private signRequest; + private getSignature; + private getSigningKey; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts new file mode 100644 index 0000000..9a0e6ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/SignatureV4Base.d.ts @@ -0,0 +1,69 @@ +import { AwsCredentialIdentity, ChecksumConstructor, DateInput, HashConstructor, HeaderBag, HttpRequest, Provider } from "@smithy/types"; +/** + * @public + */ +export interface SignatureV4Init { + /** + * The service signing name. + */ + service: string; + /** + * The region name or a function that returns a promise that will be + * resolved with the region name. + */ + region: string | Provider; + /** + * The credentials with which the request should be signed or a function + * that returns a promise that will be resolved with credentials. + */ + credentials: AwsCredentialIdentity | Provider; + /** + * A constructor function for a hash object that will calculate SHA-256 HMAC + * checksums. + */ + sha256?: ChecksumConstructor | HashConstructor; + /** + * Whether to uri-escape the request URI path as part of computing the + * canonical request string. This is required for every AWS service, except + * Amazon S3, as of late 2017. + * + * @default [true] + */ + uriEscapePath?: boolean; + /** + * Whether to calculate a checksum of the request body and include it as + * either a request header (when signing) or as a query string parameter + * (when presigning). This is required for AWS Glacier and Amazon S3 and optional for + * every other AWS service as of late 2017. + * + * @default [true] + */ + applyChecksum?: boolean; +} +/** + * @public + */ +export interface SignatureV4CryptoInit { + sha256: ChecksumConstructor | HashConstructor; +} +/** + * @internal + */ +export declare abstract class SignatureV4Base { + protected readonly service: string; + protected readonly regionProvider: Provider; + protected readonly credentialProvider: Provider; + protected readonly sha256: ChecksumConstructor | HashConstructor; + private readonly uriEscapePath; + protected readonly applyChecksum: boolean; + protected constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + protected createCanonicalRequest(request: HttpRequest, canonicalHeaders: HeaderBag, payloadHash: string): string; + protected createStringToSign(longDate: string, credentialScope: string, canonicalRequest: string, algorithmIdentifier: string): Promise; + private getCanonicalPath; + protected validateResolvedCredentials(credentials: unknown): void; + protected formatDate(now: DateInput): { + longDate: string; + shortDate: string; + }; + protected getCanonicalHeaderList(headers: object): string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/constants.d.ts new file mode 100644 index 0000000..ea1cfb5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/constants.d.ts @@ -0,0 +1,43 @@ +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export declare const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export declare const REGION_SET_PARAM = "X-Amz-Region-Set"; +export declare const AUTH_HEADER = "authorization"; +export declare const AMZ_DATE_HEADER: string; +export declare const DATE_HEADER = "date"; +export declare const GENERATED_HEADERS: string[]; +export declare const SIGNATURE_HEADER: string; +export declare const SHA256_HEADER = "x-amz-content-sha256"; +export declare const TOKEN_HEADER: string; +export declare const HOST_HEADER = "host"; +export declare const ALWAYS_UNSIGNABLE_HEADERS: { + authorization: boolean; + "cache-control": boolean; + connection: boolean; + expect: boolean; + from: boolean; + "keep-alive": boolean; + "max-forwards": boolean; + pragma: boolean; + referer: boolean; + te: boolean; + trailer: boolean; + "transfer-encoding": boolean; + upgrade: boolean; + "user-agent": boolean; + "x-amzn-trace-id": boolean; +}; +export declare const PROXY_HEADER_PATTERN: RegExp; +export declare const SEC_HEADER_PATTERN: RegExp; +export declare const UNSIGNABLE_PATTERNS: RegExp[]; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export declare const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export declare const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const MAX_CACHE_SIZE = 50; +export declare const KEY_TYPE_IDENTIFIER = "aws4_request"; +export declare const MAX_PRESIGNED_TTL: number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts new file mode 100644 index 0000000..a560c2c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/credentialDerivation.d.ts @@ -0,0 +1,30 @@ +import { AwsCredentialIdentity, ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * Create a string describing the scope of credentials used to sign a request. + * + * @internal + * + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being sent. + */ +export declare const createScope: (shortDate: string, region: string, service: string) => string; +/** + * Derive a signing key from its composite parts. + * + * @internal + * + * @param sha256Constructor - a constructor function that can instantiate SHA-256 + * hash objects. + * @param credentials - the credentials with which the request will be + * signed. + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being + * sent. + */ +export declare const getSigningKey: (sha256Constructor: ChecksumConstructor | HashConstructor, credentials: AwsCredentialIdentity, shortDate: string, region: string, service: string) => Promise; +/** + * @internal + */ +export declare const clearCredentialCache: () => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts new file mode 100644 index 0000000..efc417c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getCanonicalHeaders.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalHeaders: ({ headers }: HttpRequest, unsignableHeaders?: Set, signableHeaders?: Set) => HeaderBag; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts new file mode 100644 index 0000000..a8e1800 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getCanonicalQuery.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalQuery: ({ query }: HttpRequest) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts new file mode 100644 index 0000000..2de0858 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/getPayloadHash.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getPayloadHash: ({ headers, body }: HttpRequest, hashConstructor: ChecksumConstructor | HashConstructor) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts new file mode 100644 index 0000000..c0b66eb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/headerUtil.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (soughtHeader: string, headers: HeaderBag) => boolean; +export declare const getHeaderValue: (soughtHeader: string, headers: HeaderBag) => string | undefined; +export declare const deleteHeader: (soughtHeader: string, headers: HeaderBag) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/index.d.ts new file mode 100644 index 0000000..9305cf3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/index.d.ts @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Init, SignatureV4CryptoInit, SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts new file mode 100644 index 0000000..e2c31e0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/moveHeadersToQuery.d.ts @@ -0,0 +1,10 @@ +import type { HttpRequest as IHttpRequest, QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare const moveHeadersToQuery: (request: IHttpRequest, options?: { + unhoistableHeaders?: Set; + hoistableHeaders?: Set; +}) => IHttpRequest & { + query: QueryParameterBag; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts new file mode 100644 index 0000000..b20e0e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/prepareRequest.d.ts @@ -0,0 +1,5 @@ +import type { HttpRequest as IHttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const prepareRequest: (request: IHttpRequest) => IHttpRequest; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts new file mode 100644 index 0000000..8901036 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/signature-v4a-container.d.ts @@ -0,0 +1,24 @@ +import type { RequestSigner } from "@smithy/types"; +/** + * @public + */ +export type OptionalSigV4aSigner = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4a package. + * + * The true type is SignatureV4a from @smithy/signature-v4a. + */ + new (options: any): RequestSigner; +}; +/** + * @public + * + * \@smithy/signature-v4a will install the constructor in this + * container if it's installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + */ +export declare const signatureV4aContainer: { + SignatureV4a: null | OptionalSigV4aSigner; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts new file mode 100644 index 0000000..383bc35 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/suite.fixture.d.ts @@ -0,0 +1,14 @@ +import { HttpRequest } from "@smithy/types"; +export interface TestCase { + name: string; + request: HttpRequest; + authorization: string; +} +export declare const region = "us-east-1"; +export declare const service = "service"; +export declare const credentials: { + accessKeyId: string; + secretAccessKey: string; +}; +export declare const signingDate: Date; +export declare const requests: Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts new file mode 100644 index 0000000..6c294c3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/HeaderFormatter.d.ts @@ -0,0 +1,24 @@ +import { Int64 as IInt64, MessageHeaders } from "@smithy/types"; +/** + * @internal + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class HeaderFormatter { + format(headers: MessageHeaders): Uint8Array; + private formatHeaderValue; +} +/** + * TODO: duplicated from @smithy/eventstream-codec to break large dependency. + * TODO: This should be moved to its own deduped submodule in @smithy/core when submodules are implemented. + */ +export declare class Int64 implements IInt64 { + readonly bytes: Uint8Array; + constructor(bytes: Uint8Array); + static fromNumber(number: number): Int64; + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf(): number; + toString(): string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts new file mode 100644 index 0000000..c613753 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4.d.ts @@ -0,0 +1,20 @@ +import { EventSigner, EventSigningArguments, FormattedEvent, HttpRequest, MessageSigner, RequestPresigner, RequestPresigningArguments, RequestSigner, RequestSigningArguments, SignableMessage, SignedMessage, SigningArguments, StringSigner } from "@smithy/types"; +import { SignatureV4Base, SignatureV4CryptoInit, SignatureV4Init } from "./SignatureV4Base"; +/** + * @public + */ +export declare class SignatureV4 extends SignatureV4Base implements RequestPresigner, RequestSigner, StringSigner, EventSigner, MessageSigner { + private readonly headerFormatter; + constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + presign(originalRequest: HttpRequest, options?: RequestPresigningArguments): Promise; + sign(stringToSign: string, options?: SigningArguments): Promise; + sign(event: FormattedEvent, options: EventSigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; + private signEvent; + signMessage(signableMessage: SignableMessage, { signingDate, signingRegion, signingService }: SigningArguments): Promise; + private signString; + private signRequest; + private getSignature; + private getSigningKey; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts new file mode 100644 index 0000000..be1da1f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/SignatureV4Base.d.ts @@ -0,0 +1,69 @@ +import { AwsCredentialIdentity, ChecksumConstructor, DateInput, HashConstructor, HeaderBag, HttpRequest, Provider } from "@smithy/types"; +/** + * @public + */ +export interface SignatureV4Init { + /** + * The service signing name. + */ + service: string; + /** + * The region name or a function that returns a promise that will be + * resolved with the region name. + */ + region: string | Provider; + /** + * The credentials with which the request should be signed or a function + * that returns a promise that will be resolved with credentials. + */ + credentials: AwsCredentialIdentity | Provider; + /** + * A constructor function for a hash object that will calculate SHA-256 HMAC + * checksums. + */ + sha256?: ChecksumConstructor | HashConstructor; + /** + * Whether to uri-escape the request URI path as part of computing the + * canonical request string. This is required for every AWS service, except + * Amazon S3, as of late 2017. + * + * @default [true] + */ + uriEscapePath?: boolean; + /** + * Whether to calculate a checksum of the request body and include it as + * either a request header (when signing) or as a query string parameter + * (when presigning). This is required for AWS Glacier and Amazon S3 and optional for + * every other AWS service as of late 2017. + * + * @default [true] + */ + applyChecksum?: boolean; +} +/** + * @public + */ +export interface SignatureV4CryptoInit { + sha256: ChecksumConstructor | HashConstructor; +} +/** + * @internal + */ +export declare abstract class SignatureV4Base { + protected readonly service: string; + protected readonly regionProvider: Provider; + protected readonly credentialProvider: Provider; + protected readonly sha256: ChecksumConstructor | HashConstructor; + private readonly uriEscapePath; + protected readonly applyChecksum: boolean; + protected constructor({ applyChecksum, credentials, region, service, sha256, uriEscapePath, }: SignatureV4Init & SignatureV4CryptoInit); + protected createCanonicalRequest(request: HttpRequest, canonicalHeaders: HeaderBag, payloadHash: string): string; + protected createStringToSign(longDate: string, credentialScope: string, canonicalRequest: string, algorithmIdentifier: string): Promise; + private getCanonicalPath; + protected validateResolvedCredentials(credentials: unknown): void; + protected formatDate(now: DateInput): { + longDate: string; + shortDate: string; + }; + protected getCanonicalHeaderList(headers: object): string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..ff54b67 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,43 @@ +export declare const ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +export declare const CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +export declare const AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +export declare const SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +export declare const EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +export declare const SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +export declare const TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +export declare const REGION_SET_PARAM = "X-Amz-Region-Set"; +export declare const AUTH_HEADER = "authorization"; +export declare const AMZ_DATE_HEADER: string; +export declare const DATE_HEADER = "date"; +export declare const GENERATED_HEADERS: string[]; +export declare const SIGNATURE_HEADER: string; +export declare const SHA256_HEADER = "x-amz-content-sha256"; +export declare const TOKEN_HEADER: string; +export declare const HOST_HEADER = "host"; +export declare const ALWAYS_UNSIGNABLE_HEADERS: { + authorization: boolean; + "cache-control": boolean; + connection: boolean; + expect: boolean; + from: boolean; + "keep-alive": boolean; + "max-forwards": boolean; + pragma: boolean; + referer: boolean; + te: boolean; + trailer: boolean; + "transfer-encoding": boolean; + upgrade: boolean; + "user-agent": boolean; + "x-amzn-trace-id": boolean; +}; +export declare const PROXY_HEADER_PATTERN: RegExp; +export declare const SEC_HEADER_PATTERN: RegExp; +export declare const UNSIGNABLE_PATTERNS: RegExp[]; +export declare const ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +export declare const ALGORITHM_IDENTIFIER_V4A = "AWS4-ECDSA-P256-SHA256"; +export declare const EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +export declare const UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +export declare const MAX_CACHE_SIZE = 50; +export declare const KEY_TYPE_IDENTIFIER = "aws4_request"; +export declare const MAX_PRESIGNED_TTL: number; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts new file mode 100644 index 0000000..6cba9b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/credentialDerivation.d.ts @@ -0,0 +1,30 @@ +import { AwsCredentialIdentity, ChecksumConstructor, HashConstructor } from "@smithy/types"; +/** + * Create a string describing the scope of credentials used to sign a request. + * + * @internal + * + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being sent. + */ +export declare const createScope: (shortDate: string, region: string, service: string) => string; +/** + * Derive a signing key from its composite parts. + * + * @internal + * + * @param sha256Constructor - a constructor function that can instantiate SHA-256 + * hash objects. + * @param credentials - the credentials with which the request will be + * signed. + * @param shortDate - the current calendar date in the form YYYYMMDD. + * @param region - the AWS region in which the service resides. + * @param service - the service to which the signed request is being + * sent. + */ +export declare const getSigningKey: (sha256Constructor: ChecksumConstructor | HashConstructor, credentials: AwsCredentialIdentity, shortDate: string, region: string, service: string) => Promise; +/** + * @internal + */ +export declare const clearCredentialCache: () => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts new file mode 100644 index 0000000..e8f2e98 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalHeaders.d.ts @@ -0,0 +1,5 @@ +import { HeaderBag, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalHeaders: ({ headers }: HttpRequest, unsignableHeaders?: Set, signableHeaders?: Set) => HeaderBag; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts new file mode 100644 index 0000000..6a2d4fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getCanonicalQuery.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getCanonicalQuery: ({ query }: HttpRequest) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts new file mode 100644 index 0000000..c14a46d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/getPayloadHash.d.ts @@ -0,0 +1,5 @@ +import { ChecksumConstructor, HashConstructor, HttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const getPayloadHash: ({ headers, body }: HttpRequest, hashConstructor: ChecksumConstructor | HashConstructor) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts new file mode 100644 index 0000000..41ca217 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/headerUtil.d.ts @@ -0,0 +1,4 @@ +import { HeaderBag } from "@smithy/types"; +export declare const hasHeader: (soughtHeader: string, headers: HeaderBag) => boolean; +export declare const getHeaderValue: (soughtHeader: string, headers: HeaderBag) => string | undefined; +export declare const deleteHeader: (soughtHeader: string, headers: HeaderBag) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c9fa5f6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/index.d.ts @@ -0,0 +1,11 @@ +export * from "./SignatureV4"; +export * from "./constants"; +export { getCanonicalHeaders } from "./getCanonicalHeaders"; +export { getCanonicalQuery } from "./getCanonicalQuery"; +export { getPayloadHash } from "./getPayloadHash"; +export { moveHeadersToQuery } from "./moveHeadersToQuery"; +export { prepareRequest } from "./prepareRequest"; +export * from "./credentialDerivation"; +export { SignatureV4Init, SignatureV4CryptoInit, SignatureV4Base } from "./SignatureV4Base"; +export { hasHeader } from "./headerUtil"; +export * from "./signature-v4a-container"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts new file mode 100644 index 0000000..2017f3b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/moveHeadersToQuery.d.ts @@ -0,0 +1,10 @@ +import { HttpRequest as IHttpRequest, QueryParameterBag } from "@smithy/types"; +/** + * @internal + */ +export declare const moveHeadersToQuery: (request: IHttpRequest, options?: { + unhoistableHeaders?: Set; + hoistableHeaders?: Set; +}) => IHttpRequest & { + query: QueryParameterBag; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts new file mode 100644 index 0000000..57cf782 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/prepareRequest.d.ts @@ -0,0 +1,5 @@ +import { HttpRequest as IHttpRequest } from "@smithy/types"; +/** + * @internal + */ +export declare const prepareRequest: (request: IHttpRequest) => IHttpRequest; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts new file mode 100644 index 0000000..f1adc97 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/signature-v4a-container.d.ts @@ -0,0 +1,24 @@ +import { RequestSigner } from "@smithy/types"; +/** + * @public + */ +export type OptionalSigV4aSigner = { + /** + * This constructor is not typed so as not to require a type import + * from the signature-v4a package. + * + * The true type is SignatureV4a from @smithy/signature-v4a. + */ + new (options: any): RequestSigner; +}; +/** + * @public + * + * \@smithy/signature-v4a will install the constructor in this + * container if it's installed. + * + * This avoids a runtime-require being interpreted statically by bundlers. + */ +export declare const signatureV4aContainer: { + SignatureV4a: null | OptionalSigV4aSigner; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts new file mode 100644 index 0000000..9363eeb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/suite.fixture.d.ts @@ -0,0 +1,14 @@ +import { HttpRequest } from "@smithy/types"; +export interface TestCase { + name: string; + request: HttpRequest; + authorization: string; +} +export declare const region = "us-east-1"; +export declare const service = "service"; +export declare const credentials: { + accessKeyId: string; + secretAccessKey: string; +}; +export declare const signingDate: Date; +export declare const requests: Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts new file mode 100644 index 0000000..9a6f383 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/ts3.4/utilDate.d.ts @@ -0,0 +1,2 @@ +export declare const iso8601: (time: number | string | Date) => string; +export declare const toDate: (time: number | string | Date) => Date; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts new file mode 100644 index 0000000..e8c6a68 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/dist-types/utilDate.d.ts @@ -0,0 +1,2 @@ +export declare const iso8601: (time: number | string | Date) => string; +export declare const toDate: (time: number | string | Date) => Date; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/package.json new file mode 100644 index 0000000..56e028c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/signature-v4/package.json @@ -0,0 +1,69 @@ +{ + "name": "@smithy/signature-v4", + "version": "5.1.0", + "description": "A standalone implementation of the AWS Signature V4 request signing algorithm", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline signature-v4", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@aws-crypto/sha256-js": "5.2.0", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/signature-v4", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/signature-v4" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/README.md new file mode 100644 index 0000000..365cd62 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/README.md @@ -0,0 +1,10 @@ +# @smithy/smithy-client + +[![NPM version](https://img.shields.io/npm/v/@smithy/smithy-client/latest.svg)](https://www.npmjs.com/package/@smithy/smithy-client) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/smithy-client.svg)](https://www.npmjs.com/package/@smithy/smithy-client) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/NoOpLogger.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/client.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/collect-stream-body.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/command.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/command.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/command.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/create-aggregated-client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/date-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/default-error-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/defaults-mode.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/emitWarningIfUnsupportedVersion.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/exceptions.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extended-encode-uri-component.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/checksum.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/extensions/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/get-array-if-single-item.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/get-value-from-text-node.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/index.js new file mode 100644 index 0000000..c410d8d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/index.js @@ -0,0 +1,1321 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Client: () => Client, + Command: () => Command, + LazyJsonString: () => LazyJsonString, + NoOpLogger: () => NoOpLogger, + SENSITIVE_STRING: () => SENSITIVE_STRING, + ServiceException: () => ServiceException, + _json: () => _json, + collectBody: () => import_protocols.collectBody, + convertMap: () => convertMap, + createAggregatedClient: () => createAggregatedClient, + dateToUtcString: () => dateToUtcString, + decorateServiceException: () => decorateServiceException, + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + expectBoolean: () => expectBoolean, + expectByte: () => expectByte, + expectFloat32: () => expectFloat32, + expectInt: () => expectInt, + expectInt32: () => expectInt32, + expectLong: () => expectLong, + expectNonNull: () => expectNonNull, + expectNumber: () => expectNumber, + expectObject: () => expectObject, + expectShort: () => expectShort, + expectString: () => expectString, + expectUnion: () => expectUnion, + extendedEncodeURIComponent: () => import_protocols.extendedEncodeURIComponent, + getArrayIfSingleItem: () => getArrayIfSingleItem, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + getDefaultExtensionConfiguration: () => getDefaultExtensionConfiguration, + getValueFromTextNode: () => getValueFromTextNode, + handleFloat: () => handleFloat, + isSerializableHeaderValue: () => isSerializableHeaderValue, + limitedParseDouble: () => limitedParseDouble, + limitedParseFloat: () => limitedParseFloat, + limitedParseFloat32: () => limitedParseFloat32, + loadConfigsForDefaultMode: () => loadConfigsForDefaultMode, + logger: () => logger, + map: () => map, + parseBoolean: () => parseBoolean, + parseEpochTimestamp: () => parseEpochTimestamp, + parseRfc3339DateTime: () => parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset: () => parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime: () => parseRfc7231DateTime, + quoteHeader: () => quoteHeader, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig, + resolvedPath: () => import_protocols.resolvedPath, + serializeDateTime: () => serializeDateTime, + serializeFloat: () => serializeFloat, + splitEvery: () => splitEvery, + splitHeader: () => splitHeader, + strictParseByte: () => strictParseByte, + strictParseDouble: () => strictParseDouble, + strictParseFloat: () => strictParseFloat, + strictParseFloat32: () => strictParseFloat32, + strictParseInt: () => strictParseInt, + strictParseInt32: () => strictParseInt32, + strictParseLong: () => strictParseLong, + strictParseShort: () => strictParseShort, + take: () => take, + throwDefaultError: () => throwDefaultError, + withBaseException: () => withBaseException +}); +module.exports = __toCommonJS(src_exports); + +// src/client.ts +var import_middleware_stack = require("@smithy/middleware-stack"); +var Client = class { + constructor(config) { + this.config = config; + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + static { + __name(this, "Client"); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : void 0; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === void 0 && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = /* @__PURE__ */ new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command).then( + (result) => callback(null, result.output), + (err) => callback(err) + ).catch( + // prevent any errors thrown in the callback from triggering an + // unhandled promise rejection + () => { + } + ); + } else { + return handler(command).then((result) => result.output); + } + } + destroy() { + this.config?.requestHandler?.destroy?.(); + delete this.handlers; + } +}; + +// src/collect-stream-body.ts +var import_protocols = require("@smithy/core/protocols"); + +// src/command.ts + +var import_types = require("@smithy/types"); +var Command = class { + constructor() { + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + static { + __name(this, "Command"); + } + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder() { + return new ClassBuilder(); + } + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack, configuration, options, { + middlewareFn, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + smithyContext, + additionalContext, + CommandCtor + }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger: logger2 } = configuration; + const handlerExecutionContext = { + logger: logger2, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [import_types.SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext + }, + ...additionalContext + }; + const { requestHandler } = configuration; + return stack.resolve( + (request) => requestHandler.handle(request.request, options || {}), + handlerExecutionContext + ); + } +}; +var ClassBuilder = class { + constructor() { + this._init = () => { + }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + static { + __name(this, "ClassBuilder"); + } + /** + * Optional init callback. + */ + init(cb) { + this._init = cb; + } + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + /** + * Add any number of middleware. + */ + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + /** + * Set the initial handler execution context Smithy field. + */ + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext + }; + return this; + } + /** + * Set the initial handler execution context. + */ + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + /** + * Set constant string identifiers for the operation. + */ + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + /** + * Sets the serializer. + */ + ser(serializer) { + this._serializer = serializer; + return this; + } + /** + * Sets the deserializer. + */ + de(deserializer) { + this._deserializer = deserializer; + return this; + } + /** + * @returns a Command class with the classBuilder properties. + */ + build() { + const closure = this; + let CommandRef; + return CommandRef = class extends Command { + /** + * @public + */ + constructor(...[input]) { + super(); + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.serialize = closure._serializer; + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + static { + __name(this, "CommandRef"); + } + /** + * @public + */ + static getEndpointParameterInstructions() { + return closure._ep; + } + /** + * @internal + */ + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext + }); + } + }; + } +}; + +// src/constants.ts +var SENSITIVE_STRING = "***SensitiveInformation***"; + +// src/create-aggregated-client.ts +var createAggregatedClient = /* @__PURE__ */ __name((commands, Client2) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = /* @__PURE__ */ __name(async function(args, optionsOrCb, cb) { + const command2 = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command2, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command2, optionsOrCb || {}, cb); + } else { + return this.send(command2, optionsOrCb); + } + }, "methodImpl"); + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client2.prototype[methodName] = methodImpl; + } +}, "createAggregatedClient"); + +// src/parse-utils.ts +var parseBoolean = /* @__PURE__ */ __name((value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}, "parseBoolean"); +var expectBoolean = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}, "expectBoolean"); +var expectNumber = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}, "expectNumber"); +var MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +var expectFloat32 = /* @__PURE__ */ __name((value) => { + const expected = expectNumber(value); + if (expected !== void 0 && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}, "expectFloat32"); +var expectLong = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}, "expectLong"); +var expectInt = expectLong; +var expectInt32 = /* @__PURE__ */ __name((value) => expectSizedInt(value, 32), "expectInt32"); +var expectShort = /* @__PURE__ */ __name((value) => expectSizedInt(value, 16), "expectShort"); +var expectByte = /* @__PURE__ */ __name((value) => expectSizedInt(value, 8), "expectByte"); +var expectSizedInt = /* @__PURE__ */ __name((value, size) => { + const expected = expectLong(value); + if (expected !== void 0 && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}, "expectSizedInt"); +var castInt = /* @__PURE__ */ __name((value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}, "castInt"); +var expectNonNull = /* @__PURE__ */ __name((value, location) => { + if (value === null || value === void 0) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}, "expectNonNull"); +var expectObject = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}, "expectObject"); +var expectString = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}, "expectString"); +var expectUnion = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject).filter(([, v]) => v != null).map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}, "expectUnion"); +var strictParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}, "strictParseDouble"); +var strictParseFloat = strictParseDouble; +var strictParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}, "strictParseFloat32"); +var NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +var parseNumber = /* @__PURE__ */ __name((value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}, "parseNumber"); +var limitedParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}, "limitedParseDouble"); +var handleFloat = limitedParseDouble; +var limitedParseFloat = limitedParseDouble; +var limitedParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}, "limitedParseFloat32"); +var parseFloatString = /* @__PURE__ */ __name((value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}, "parseFloatString"); +var strictParseLong = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}, "strictParseLong"); +var strictParseInt = strictParseLong; +var strictParseInt32 = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}, "strictParseInt32"); +var strictParseShort = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}, "strictParseShort"); +var strictParseByte = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}, "strictParseByte"); +var stackTraceWarning = /* @__PURE__ */ __name((message) => { + return String(new TypeError(message).stack || message).split("\n").slice(0, 5).filter((s) => !s.includes("stackTraceWarning")).join("\n"); +}, "stackTraceWarning"); +var logger = { + warn: console.warn +}; + +// src/date-utils.ts +var DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +var MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +__name(dateToUtcString, "dateToUtcString"); +var RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +var parseRfc3339DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}, "parseRfc3339DateTime"); +var RFC3339_WITH_OFFSET = new RegExp( + /^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/ +); +var parseRfc3339DateTimeWithOffset = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}, "parseRfc3339DateTimeWithOffset"); +var IMF_FIXDATE = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var RFC_850_DATE = new RegExp( + /^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var ASC_TIME = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/ +); +var parseRfc7231DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr, "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year( + buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds + }) + ); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr.trimLeft(), "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}, "parseRfc7231DateTime"); +var parseEpochTimestamp = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1e3)); +}, "parseEpochTimestamp"); +var buildDate = /* @__PURE__ */ __name((year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date( + Date.UTC( + year, + adjustedMonth, + day, + parseDateValue(time.hours, "hour", 0, 23), + parseDateValue(time.minutes, "minute", 0, 59), + // seconds can go up to 60 for leap seconds + parseDateValue(time.seconds, "seconds", 0, 60), + parseMilliseconds(time.fractionalMilliseconds) + ) + ); +}, "buildDate"); +var parseTwoDigitYear = /* @__PURE__ */ __name((value) => { + const thisYear = (/* @__PURE__ */ new Date()).getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}, "parseTwoDigitYear"); +var FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1e3; +var adjustRfc850Year = /* @__PURE__ */ __name((input) => { + if (input.getTime() - (/* @__PURE__ */ new Date()).getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date( + Date.UTC( + input.getUTCFullYear() - 100, + input.getUTCMonth(), + input.getUTCDate(), + input.getUTCHours(), + input.getUTCMinutes(), + input.getUTCSeconds(), + input.getUTCMilliseconds() + ) + ); + } + return input; +}, "adjustRfc850Year"); +var parseMonthByShortName = /* @__PURE__ */ __name((value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}, "parseMonthByShortName"); +var DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +var validateDayOfMonth = /* @__PURE__ */ __name((year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}, "validateDayOfMonth"); +var isLeapYear = /* @__PURE__ */ __name((year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}, "isLeapYear"); +var parseDateValue = /* @__PURE__ */ __name((value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}, "parseDateValue"); +var parseMilliseconds = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return 0; + } + return strictParseFloat32("0." + value) * 1e3; +}, "parseMilliseconds"); +var parseOffsetToMilliseconds = /* @__PURE__ */ __name((value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } else if (directionStr == "-") { + direction = -1; + } else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1e3; +}, "parseOffsetToMilliseconds"); +var stripLeadingZeroes = /* @__PURE__ */ __name((value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}, "stripLeadingZeroes"); + +// src/exceptions.ts +var ServiceException = class _ServiceException extends Error { + static { + __name(this, "ServiceException"); + } + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, Object.getPrototypeOf(this).constructor.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value) { + if (!value) + return false; + const candidate = value; + return _ServiceException.prototype.isPrototypeOf(candidate) || Boolean(candidate.$fault) && Boolean(candidate.$metadata) && (candidate.$fault === "client" || candidate.$fault === "server"); + } + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance) { + if (!instance) + return false; + const candidate = instance; + if (this === _ServiceException) { + return _ServiceException.isInstance(instance); + } + if (_ServiceException.isInstance(instance)) { + if (candidate.name && this.name) { + return this.prototype.isPrototypeOf(instance) || candidate.name === this.name; + } + return this.prototype.isPrototypeOf(instance); + } + return false; + } +}; +var decorateServiceException = /* @__PURE__ */ __name((exception, additions = {}) => { + Object.entries(additions).filter(([, v]) => v !== void 0).forEach(([k, v]) => { + if (exception[k] == void 0 || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}, "decorateServiceException"); + +// src/default-error-handler.ts +var throwDefaultError = /* @__PURE__ */ __name(({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : void 0; + const response = new exceptionCtor({ + name: parsedBody?.code || parsedBody?.Code || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata + }); + throw decorateServiceException(response, parsedBody); +}, "throwDefaultError"); +var withBaseException = /* @__PURE__ */ __name((ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}, "withBaseException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/defaults-mode.ts +var loadConfigsForDefaultMode = /* @__PURE__ */ __name((mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100 + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 3e4 + }; + default: + return {}; + } +}, "loadConfigsForDefaultMode"); + +// src/emitWarningIfUnsupportedVersion.ts +var warningEmitted = false; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}, "emitWarningIfUnsupportedVersion"); + +// src/extended-encode-uri-component.ts + + +// src/extensions/checksum.ts + +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in import_types.AlgorithmId) { + const algorithmId = import_types.AlgorithmId[id]; + if (runtimeConfig[algorithmId] === void 0) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId] + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/retry.ts +var getRetryConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + setRetryStrategy(retryStrategy) { + runtimeConfig.retryStrategy = retryStrategy; + }, + retryStrategy() { + return runtimeConfig.retryStrategy; + } + }; +}, "getRetryConfiguration"); +var resolveRetryRuntimeConfig = /* @__PURE__ */ __name((retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}, "resolveRetryRuntimeConfig"); + +// src/extensions/defaultExtensionConfiguration.ts +var getDefaultExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return Object.assign(getChecksumConfiguration(runtimeConfig), getRetryConfiguration(runtimeConfig)); +}, "getDefaultExtensionConfiguration"); +var getDefaultClientConfiguration = getDefaultExtensionConfiguration; +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return Object.assign(resolveChecksumRuntimeConfig(config), resolveRetryRuntimeConfig(config)); +}, "resolveDefaultRuntimeConfig"); + +// src/get-array-if-single-item.ts +var getArrayIfSingleItem = /* @__PURE__ */ __name((mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray], "getArrayIfSingleItem"); + +// src/get-value-from-text-node.ts +var getValueFromTextNode = /* @__PURE__ */ __name((obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== void 0) { + obj[key] = obj[key][textNodeName]; + } else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}, "getValueFromTextNode"); + +// src/is-serializable-header-value.ts +var isSerializableHeaderValue = /* @__PURE__ */ __name((value) => { + return value != null; +}, "isSerializableHeaderValue"); + +// src/lazy-json.ts +var LazyJsonString = /* @__PURE__ */ __name(function LazyJsonString2(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + } + }); + return str; +}, "LazyJsonString"); +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; + +// src/NoOpLogger.ts +var NoOpLogger = class { + static { + __name(this, "NoOpLogger"); + } + trace() { + } + debug() { + } + info() { + } + warn() { + } + error() { + } +}; + +// src/object-mapping.ts +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +__name(map, "map"); +var convertMap = /* @__PURE__ */ __name((target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}, "convertMap"); +var take = /* @__PURE__ */ __name((source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}, "take"); +var mapWithFilter = /* @__PURE__ */ __name((target, filter, instructions) => { + return map( + target, + Object.entries(instructions).reduce( + (_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, + {} + ) + ); +}, "mapWithFilter"); +var applyInstruction = /* @__PURE__ */ __name((target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter2 = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if (typeof filter2 === "function" && filter2(source[sourceKey]) || typeof filter2 !== "function" && !!filter2) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === void 0 && (_value = value()) != null; + const customFilterPassed = typeof filter === "function" && !!filter(void 0) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed) { + target[targetKey] = _value; + } else if (customFilterPassed) { + target[targetKey] = value(); + } + } else { + const defaultFilterPassed = filter === void 0 && value != null; + const customFilterPassed = typeof filter === "function" && !!filter(value) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}, "applyInstruction"); +var nonNullish = /* @__PURE__ */ __name((_) => _ != null, "nonNullish"); +var pass = /* @__PURE__ */ __name((_) => _, "pass"); + +// src/quote-header.ts +function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} +__name(quoteHeader, "quoteHeader"); + +// src/resolve-path.ts + + +// src/ser-utils.ts +var serializeFloat = /* @__PURE__ */ __name((value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}, "serializeFloat"); +var serializeDateTime = /* @__PURE__ */ __name((date) => date.toISOString().replace(".000Z", "Z"), "serializeDateTime"); + +// src/serde-json.ts +var _json = /* @__PURE__ */ __name((obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}, "_json"); + +// src/split-every.ts +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} +__name(splitEvery, "splitEvery"); + +// src/split-header.ts +var splitHeader = /* @__PURE__ */ __name((value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = void 0; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z2 = v.length; + if (z2 < 2) { + return v; + } + if (v[0] === `"` && v[z2 - 1] === `"`) { + v = v.slice(1, z2 - 1); + } + return v.replace(/\\"/g, '"'); + }); +}, "splitHeader"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Client, + collectBody, + Command, + SENSITIVE_STRING, + createAggregatedClient, + dateToUtcString, + parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime, + parseEpochTimestamp, + throwDefaultError, + withBaseException, + loadConfigsForDefaultMode, + emitWarningIfUnsupportedVersion, + ServiceException, + decorateServiceException, + extendedEncodeURIComponent, + getDefaultExtensionConfiguration, + getDefaultClientConfiguration, + resolveDefaultRuntimeConfig, + getArrayIfSingleItem, + getValueFromTextNode, + isSerializableHeaderValue, + LazyJsonString, + NoOpLogger, + map, + convertMap, + take, + parseBoolean, + expectBoolean, + expectNumber, + expectFloat32, + expectLong, + expectInt, + expectInt32, + expectShort, + expectByte, + expectNonNull, + expectObject, + expectString, + expectUnion, + strictParseDouble, + strictParseFloat, + strictParseFloat32, + limitedParseDouble, + handleFloat, + limitedParseFloat, + limitedParseFloat32, + strictParseLong, + strictParseInt, + strictParseInt32, + strictParseShort, + strictParseByte, + logger, + quoteHeader, + resolvedPath, + serializeFloat, + serializeDateTime, + _json, + splitEvery, + splitHeader +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/is-serializable-header-value.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/lazy-json.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/object-mapping.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/parse-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/quote-header.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/resolve-path.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/ser-utils.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/serde-json.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/split-every.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/split-every.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/split-every.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/split-header.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/split-header.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-cjs/split-header.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js new file mode 100644 index 0000000..73cd076 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/NoOpLogger.js @@ -0,0 +1,7 @@ +export class NoOpLogger { + trace() { } + debug() { } + info() { } + warn() { } + error() { } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/client.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/client.js new file mode 100644 index 0000000..56cbc2e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/client.js @@ -0,0 +1,42 @@ +import { constructStack } from "@smithy/middleware-stack"; +export class Client { + constructor(config) { + this.config = config; + this.middlewareStack = constructStack(); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : undefined; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === undefined && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } + else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } + else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command) + .then((result) => callback(null, result.output), (err) => callback(err)) + .catch(() => { }); + } + else { + return handler(command).then((result) => result.output); + } + } + destroy() { + this.config?.requestHandler?.destroy?.(); + delete this.handlers; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js new file mode 100644 index 0000000..2b76f0a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/collect-stream-body.js @@ -0,0 +1 @@ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/command.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/command.js new file mode 100644 index 0000000..078eecb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/command.js @@ -0,0 +1,115 @@ +import { constructStack } from "@smithy/middleware-stack"; +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export class Command { + constructor() { + this.middlewareStack = constructStack(); + } + static classBuilder() { + return new ClassBuilder(); + } + resolveMiddlewareWithContext(clientStack, configuration, options, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger } = configuration; + const handlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext, + }, + ...additionalContext, + }; + const { requestHandler } = configuration; + return stack.resolve((request) => requestHandler.handle(request.request, options || {}), handlerExecutionContext); + } +} +class ClassBuilder { + constructor() { + this._init = () => { }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + init(cb) { + this._init = cb; + } + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext, + }; + return this; + } + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + ser(serializer) { + this._serializer = serializer; + return this; + } + de(deserializer) { + this._deserializer = deserializer; + return this; + } + build() { + const closure = this; + let CommandRef; + return (CommandRef = class extends Command { + static getEndpointParameterInstructions() { + return closure._ep; + } + constructor(...[input]) { + super(); + this.serialize = closure._serializer; + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext, + }); + } + }); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/constants.js new file mode 100644 index 0000000..9b193d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/constants.js @@ -0,0 +1 @@ +export const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js new file mode 100644 index 0000000..44cf4dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/create-aggregated-client.js @@ -0,0 +1,21 @@ +export const createAggregatedClient = (commands, Client) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = async function (args, optionsOrCb, cb) { + const command = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } + else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } + else { + return this.send(command, optionsOrCb); + } + }; + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client.prototype[methodName] = methodImpl; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/date-utils.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/date-utils.js new file mode 100644 index 0000000..0d0abf2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/date-utils.js @@ -0,0 +1,190 @@ +import { strictParseByte, strictParseDouble, strictParseFloat32, strictParseShort } from "./parse-utils"; +const DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +const MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +export function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +const RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +export const parseRfc3339DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}; +const RFC3339_WITH_OFFSET = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/); +export const parseRfc3339DateTimeWithOffset = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}; +const IMF_FIXDATE = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const RFC_850_DATE = new RegExp(/^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/); +const ASC_TIME = new RegExp(/^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/); +export const parseRfc7231DateTime = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year(buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds, + })); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate(strictParseShort(stripLeadingZeroes(yearStr)), parseMonthByShortName(monthStr), parseDateValue(dayStr.trimLeft(), "day", 1, 31), { hours, minutes, seconds, fractionalMilliseconds }); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}; +export const parseEpochTimestamp = (value) => { + if (value === null || value === undefined) { + return undefined; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } + else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } + else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } + else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1000)); +}; +const buildDate = (year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date(Date.UTC(year, adjustedMonth, day, parseDateValue(time.hours, "hour", 0, 23), parseDateValue(time.minutes, "minute", 0, 59), parseDateValue(time.seconds, "seconds", 0, 60), parseMilliseconds(time.fractionalMilliseconds))); +}; +const parseTwoDigitYear = (value) => { + const thisYear = new Date().getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}; +const FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1000; +const adjustRfc850Year = (input) => { + if (input.getTime() - new Date().getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date(Date.UTC(input.getUTCFullYear() - 100, input.getUTCMonth(), input.getUTCDate(), input.getUTCHours(), input.getUTCMinutes(), input.getUTCSeconds(), input.getUTCMilliseconds())); + } + return input; +}; +const parseMonthByShortName = (value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}; +const DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +const validateDayOfMonth = (year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}; +const isLeapYear = (year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}; +const parseDateValue = (value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}; +const parseMilliseconds = (value) => { + if (value === null || value === undefined) { + return 0; + } + return strictParseFloat32("0." + value) * 1000; +}; +const parseOffsetToMilliseconds = (value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } + else if (directionStr == "-") { + direction = -1; + } + else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1000; +}; +const stripLeadingZeroes = (value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js new file mode 100644 index 0000000..7da1091 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/default-error-handler.js @@ -0,0 +1,22 @@ +import { decorateServiceException } from "./exceptions"; +export const throwDefaultError = ({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : undefined; + const response = new exceptionCtor({ + name: parsedBody?.code || parsedBody?.Code || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata, + }); + throw decorateServiceException(response, parsedBody); +}; +export const withBaseException = (ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}; +const deserializeMetadata = (output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"], +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js new file mode 100644 index 0000000..f19079c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/defaults-mode.js @@ -0,0 +1,26 @@ +export const loadConfigsForDefaultMode = (mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100, + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100, + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 30000, + }; + default: + return {}; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js new file mode 100644 index 0000000..7b30893 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/emitWarningIfUnsupportedVersion.js @@ -0,0 +1,6 @@ +let warningEmitted = false; +export const emitWarningIfUnsupportedVersion = (version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/exceptions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/exceptions.js new file mode 100644 index 0000000..db6a801 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/exceptions.js @@ -0,0 +1,46 @@ +export class ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, Object.getPrototypeOf(this).constructor.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } + static isInstance(value) { + if (!value) + return false; + const candidate = value; + return (ServiceException.prototype.isPrototypeOf(candidate) || + (Boolean(candidate.$fault) && + Boolean(candidate.$metadata) && + (candidate.$fault === "client" || candidate.$fault === "server"))); + } + static [Symbol.hasInstance](instance) { + if (!instance) + return false; + const candidate = instance; + if (this === ServiceException) { + return ServiceException.isInstance(instance); + } + if (ServiceException.isInstance(instance)) { + if (candidate.name && this.name) { + return this.prototype.isPrototypeOf(instance) || candidate.name === this.name; + } + return this.prototype.isPrototypeOf(instance); + } + return false; + } +} +export const decorateServiceException = (exception, additions = {}) => { + Object.entries(additions) + .filter(([, v]) => v !== undefined) + .forEach(([k, v]) => { + if (exception[k] == undefined || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js new file mode 100644 index 0000000..cb4f991 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extended-encode-uri-component.js @@ -0,0 +1 @@ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js new file mode 100644 index 0000000..f3831ee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/checksum.js @@ -0,0 +1,30 @@ +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId }; +export const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in AlgorithmId) { + const algorithmId = AlgorithmId[id]; + if (runtimeConfig[algorithmId] === undefined) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId], + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + }, + }; +}; +export const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..272cd3a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/defaultExtensionConfiguration.js @@ -0,0 +1,9 @@ +import { getChecksumConfiguration, resolveChecksumRuntimeConfig } from "./checksum"; +import { getRetryConfiguration, resolveRetryRuntimeConfig } from "./retry"; +export const getDefaultExtensionConfiguration = (runtimeConfig) => { + return Object.assign(getChecksumConfiguration(runtimeConfig), getRetryConfiguration(runtimeConfig)); +}; +export const getDefaultClientConfiguration = getDefaultExtensionConfiguration; +export const resolveDefaultRuntimeConfig = (config) => { + return Object.assign(resolveChecksumRuntimeConfig(config), resolveRetryRuntimeConfig(config)); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/index.js new file mode 100644 index 0000000..f1b8074 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/index.js @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js new file mode 100644 index 0000000..2c18b0a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/extensions/retry.js @@ -0,0 +1,15 @@ +export const getRetryConfiguration = (runtimeConfig) => { + return { + setRetryStrategy(retryStrategy) { + runtimeConfig.retryStrategy = retryStrategy; + }, + retryStrategy() { + return runtimeConfig.retryStrategy; + }, + }; +}; +export const resolveRetryRuntimeConfig = (retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js new file mode 100644 index 0000000..25d9432 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/get-array-if-single-item.js @@ -0,0 +1 @@ +export const getArrayIfSingleItem = (mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js new file mode 100644 index 0000000..aa0f827 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/get-value-from-text-node.js @@ -0,0 +1,12 @@ +export const getValueFromTextNode = (obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== undefined) { + obj[key] = obj[key][textNodeName]; + } + else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/index.js new file mode 100644 index 0000000..b05ab01 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/index.js @@ -0,0 +1,25 @@ +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js new file mode 100644 index 0000000..cb117ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/is-serializable-header-value.js @@ -0,0 +1,3 @@ +export const isSerializableHeaderValue = (value) => { + return value != null; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/lazy-json.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/lazy-json.js new file mode 100644 index 0000000..9bddfce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/lazy-json.js @@ -0,0 +1,24 @@ +export const LazyJsonString = function LazyJsonString(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + }, + }); + return str; +}; +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } + else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/object-mapping.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/object-mapping.js new file mode 100644 index 0000000..84a1f26 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/object-mapping.js @@ -0,0 +1,92 @@ +export function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } + else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } + else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +export const convertMap = (target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}; +export const take = (source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}; +const mapWithFilter = (target, filter, instructions) => { + return map(target, Object.entries(instructions).reduce((_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } + else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } + else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, {})); +}; +const applyInstruction = (target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if ((typeof filter === "function" && filter(source[sourceKey])) || (typeof filter !== "function" && !!filter)) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === undefined && (_value = value()) != null; + const customFilterPassed = (typeof filter === "function" && !!filter(void 0)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed) { + target[targetKey] = _value; + } + else if (customFilterPassed) { + target[targetKey] = value(); + } + } + else { + const defaultFilterPassed = filter === undefined && value != null; + const customFilterPassed = (typeof filter === "function" && !!filter(value)) || (typeof filter !== "function" && !!filter); + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}; +const nonNullish = (_) => _ != null; +const pass = (_) => _; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/parse-utils.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/parse-utils.js new file mode 100644 index 0000000..209db79 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/parse-utils.js @@ -0,0 +1,230 @@ +export const parseBoolean = (value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}; +export const expectBoolean = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}; +export const expectNumber = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}; +const MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +export const expectFloat32 = (value) => { + const expected = expectNumber(value); + if (expected !== undefined && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}; +export const expectLong = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}; +export const expectInt = expectLong; +export const expectInt32 = (value) => expectSizedInt(value, 32); +export const expectShort = (value) => expectSizedInt(value, 16); +export const expectByte = (value) => expectSizedInt(value, 8); +const expectSizedInt = (value, size) => { + const expected = expectLong(value); + if (expected !== undefined && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}; +const castInt = (value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}; +export const expectNonNull = (value, location) => { + if (value === null || value === undefined) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}; +export const expectObject = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}; +export const expectString = (value) => { + if (value === null || value === undefined) { + return undefined; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}; +export const expectUnion = (value) => { + if (value === null || value === undefined) { + return undefined; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject) + .filter(([, v]) => v != null) + .map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}; +export const strictParseDouble = (value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}; +export const strictParseFloat = strictParseDouble; +export const strictParseFloat32 = (value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}; +const NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +const parseNumber = (value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}; +export const limitedParseDouble = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}; +export const handleFloat = limitedParseDouble; +export const limitedParseFloat = limitedParseDouble; +export const limitedParseFloat32 = (value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}; +const parseFloatString = (value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}; +export const strictParseLong = (value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}; +export const strictParseInt = strictParseLong; +export const strictParseInt32 = (value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}; +export const strictParseShort = (value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}; +export const strictParseByte = (value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}; +const stackTraceWarning = (message) => { + return String(new TypeError(message).stack || message) + .split("\n") + .slice(0, 5) + .filter((s) => !s.includes("stackTraceWarning")) + .join("\n"); +}; +export const logger = { + warn: console.warn, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/quote-header.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/quote-header.js new file mode 100644 index 0000000..d0ddf67 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/quote-header.js @@ -0,0 +1,6 @@ +export function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/resolve-path.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/resolve-path.js new file mode 100644 index 0000000..6c70cb3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/resolve-path.js @@ -0,0 +1 @@ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/ser-utils.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/ser-utils.js new file mode 100644 index 0000000..207437f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/ser-utils.js @@ -0,0 +1,14 @@ +export const serializeFloat = (value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}; +export const serializeDateTime = (date) => date.toISOString().replace(".000Z", "Z"); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/serde-json.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/serde-json.js new file mode 100644 index 0000000..babb7c1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/serde-json.js @@ -0,0 +1,19 @@ +export const _json = (obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/split-every.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/split-every.js new file mode 100644 index 0000000..1d78dca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/split-every.js @@ -0,0 +1,27 @@ +export function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } + else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/split-header.js b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/split-header.js new file mode 100644 index 0000000..518e77f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-es/split-header.js @@ -0,0 +1,37 @@ +export const splitHeader = (value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = undefined; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z = v.length; + if (z < 2) { + return v; + } + if (v[0] === `"` && v[z - 1] === `"`) { + v = v.slice(1, z - 1); + } + return v.replace(/\\"/g, '"'); + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts new file mode 100644 index 0000000..93ebff4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/NoOpLogger.d.ts @@ -0,0 +1,11 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare class NoOpLogger implements Logger { + trace(): void; + debug(): void; + info(): void; + warn(): void; + error(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/client.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/client.d.ts new file mode 100644 index 0000000..6f155e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/client.d.ts @@ -0,0 +1,61 @@ +import { Client as IClient, Command, FetchHttpHandlerOptions, MetadataBearer, MiddlewareStack, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface SmithyConfiguration { + requestHandler: RequestHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; + /** + * The API version set internally by the SDK, and is + * not planned to be used by customer code. + * @internal + */ + readonly apiVersion: string; + /** + * @public + * + * Default false. + * + * When true, the client will only resolve the middleware stack once per + * Command class. This means modifying the middlewareStack of the + * command or client after requests have been made will not be + * recognized. + * + * Calling client.destroy() also clears this cache. + * + * Enable this only if needing the additional time saved (0-1ms per request) + * and not needing middleware modifications between requests. + */ + cacheMiddleware?: boolean; +} +/** + * @internal + */ +export type SmithyResolvedConfiguration = { + requestHandler: RequestHandler; + readonly apiVersion: string; + cacheMiddleware?: boolean; +}; +/** + * @public + */ +export declare class Client> implements IClient { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + /** + * Holds an object reference to the initial configuration object. + * Used to check that the config resolver stack does not create + * dangling instances of an intermediate form of the configuration object. + * + * @internal + */ + initConfig?: object; + /** + * May be used to cache the resolved handler function for a Command class. + */ + private handlers?; + constructor(config: ResolvedClientConfiguration); + send(command: Command>, options?: HandlerOptions): Promise; + send(command: Command>, cb: (err: any, data?: OutputType) => void): void; + send(command: Command>, options: HandlerOptions, cb: (err: any, data?: OutputType) => void): void; + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts new file mode 100644 index 0000000..33378b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/collect-stream-body.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/command.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/command.d.ts new file mode 100644 index 0000000..3625b88 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/command.d.ts @@ -0,0 +1,113 @@ +import type { EndpointParameterInstructions } from "@smithy/middleware-endpoint"; +import type { Command as ICommand, Handler, HandlerExecutionContext, HttpRequest as IHttpRequest, HttpResponse as IHttpResponse, Logger, MetadataBearer, MiddlewareStack as IMiddlewareStack, OptionalParameter, Pluggable, RequestHandler, SerdeContext } from "@smithy/types"; +/** + * @public + */ +export declare abstract class Command implements ICommand { + abstract input: Input; + readonly middlewareStack: IMiddlewareStack; + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder; + }, SI extends object = any, SO extends MetadataBearer = any>(): ClassBuilder; + abstract resolveMiddleware(stack: IMiddlewareStack, configuration: ResolvedClientConfiguration, options: any): Handler; + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack: IMiddlewareStack, configuration: { + logger: Logger; + requestHandler: RequestHandler; + }, options: any, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }: ResolveMiddlewareContextArgs): import("@smithy/types").InitializeHandler; +} +/** + * @internal + */ +type ResolveMiddlewareContextArgs = { + middlewareFn: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]; + clientName: string; + commandName: string; + smithyContext: Record; + additionalContext: HandlerExecutionContext; + inputFilterSensitiveLog: (_: any) => any; + outputFilterSensitiveLog: (_: any) => any; + CommandCtor: any; +}; +/** + * @internal + */ +declare class ClassBuilder; +}, SI extends object = any, SO extends MetadataBearer = any> { + private _init; + private _ep; + private _middlewareFn; + private _commandName; + private _clientName; + private _additionalContext; + private _smithyContext; + private _inputFilterSensitiveLog; + private _outputFilterSensitiveLog; + private _serializer; + private _deserializer; + /** + * Optional init callback. + */ + init(cb: (_: Command) => void): void; + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions: EndpointParameterInstructions): ClassBuilder; + /** + * Add any number of middleware. + */ + m(middlewareSupplier: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]): ClassBuilder; + /** + * Set the initial handler execution context Smithy field. + */ + s(service: string, operation: string, smithyContext?: Record): ClassBuilder; + /** + * Set the initial handler execution context. + */ + c(additionalContext?: HandlerExecutionContext): ClassBuilder; + /** + * Set constant string identifiers for the operation. + */ + n(clientName: string, commandName: string): ClassBuilder; + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter?: (_: any) => any, outputFilter?: (_: any) => any): ClassBuilder; + /** + * Sets the serializer. + */ + ser(serializer: (input: I, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * Sets the deserializer. + */ + de(deserializer: (output: IHttpResponse, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * @returns a Command class with the classBuilder properties. + */ + build(): { + new (input: I): CommandImpl; + new (...[input]: OptionalParameter): CommandImpl; + getEndpointParameterInstructions(): EndpointParameterInstructions; + }; +} +/** + * A concrete implementation of ICommand with no abstract members. + * @public + */ +export interface CommandImpl; +}, SI extends object = any, SO extends MetadataBearer = any> extends Command { + readonly input: I; + resolveMiddleware(stack: IMiddlewareStack, configuration: C, options: any): Handler; +} +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/constants.d.ts new file mode 100644 index 0000000..c17e1c8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/constants.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts new file mode 100644 index 0000000..00e23d8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/create-aggregated-client.d.ts @@ -0,0 +1,9 @@ +import { Client } from "./client"; +/** + * @internal + * + * @param commands - command lookup container. + * @param client - client instance on which to add aggregated methods. + * @returns an aggregated client with dynamically created methods. + */ +export declare const createAggregatedClient: (commands: Record, Client: new (...args: any) => Client) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts new file mode 100644 index 0000000..99c55f4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/date-utils.d.ts @@ -0,0 +1,73 @@ +/** + * @internal + * + * Builds a proper UTC HttpDate timestamp from a Date object + * since not all environments will have this as the expected + * format. + * + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString} + * - Prior to ECMAScript 2018, the format of the return value + * - varied according to the platform. The most common return + * - value was an RFC-1123 formatted date stamp, which is a + * - slightly updated version of RFC-822 date stamps. + */ +export declare function dateToUtcString(date: Date): string; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and cannot have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and can have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTimeWithOffset: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 7231 IMF-fixdate or obs-date. + * + * Input strings must conform to RFC7231 section 7.1.1.1. Fractional seconds are supported. + * + * @see {@link https://datatracker.ietf.org/doc/html/rfc7231.html#section-7.1.1.1} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc7231DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a number or a parseable string. + * + * Input strings must be an integer or floating point number. Fractional seconds are supported. + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseEpochTimestamp: (value: unknown) => Date | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts new file mode 100644 index 0000000..fd4b52d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/default-error-handler.d.ts @@ -0,0 +1,13 @@ +/** + * Always throws an error with the given `exceptionCtor` and other arguments. + * This is only called from an error handling code path. + * + * @internal + */ +export declare const throwDefaultError: ({ output, parsedBody, exceptionCtor, errorCode }: any) => never; +/** + * @internal + * + * Creates {@link throwDefaultError} with bound ExceptionCtor. + */ +export declare const withBaseException: (ExceptionCtor: new (...args: any) => any) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts new file mode 100644 index 0000000..1ddb6f0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/defaults-mode.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export declare const loadConfigsForDefaultMode: (mode: ResolvedDefaultsMode) => DefaultsModeConfigs; +/** + * Option determining how certain default configuration options are resolved in the SDK. It can be one of the value listed below: + * * `"standard"`:

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"in-region"`:

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"cross-region"`:

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"mobile"`:

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"auto"`:

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

+ * * `"legacy"`:

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

+ * + * @defaultValue "legacy" + */ +export type DefaultsMode = "standard" | "in-region" | "cross-region" | "mobile" | "auto" | "legacy"; +/** + * @internal + */ +export type ResolvedDefaultsMode = Exclude; +/** + * @internal + */ +export interface DefaultsModeConfigs { + retryMode?: string; + connectionTimeout?: number; + requestTimeout?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..8fc02ce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Emits warning if the provided Node.js version string is pending deprecation. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts new file mode 100644 index 0000000..0a362c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/exceptions.d.ts @@ -0,0 +1,42 @@ +import { HttpResponse, MetadataBearer, ResponseMetadata, RetryableTrait, SmithyException } from "@smithy/types"; +/** + * The type of the exception class constructor parameter. The returned type contains the properties + * in the `ExceptionType` but not in the `BaseExceptionType`. If the `BaseExceptionType` contains + * `$metadata` and `message` properties, it's also included in the returned type. + * @internal + */ +export type ExceptionOptionType = Omit>; +/** + * @public + */ +export interface ServiceExceptionOptions extends SmithyException, MetadataBearer { + message?: string; +} +/** + * @public + * + * Base exception class for the exceptions from the server-side. + */ +export declare class ServiceException extends Error implements SmithyException, MetadataBearer { + readonly $fault: "client" | "server"; + $response?: HttpResponse; + $retryable?: RetryableTrait; + $metadata: ResponseMetadata; + constructor(options: ServiceExceptionOptions); + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value: unknown): value is ServiceException; + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance: unknown): boolean; +} +/** + * This method inject unmodeled member to a deserialized SDK exception, + * and load the error message from different possible keys('message', + * 'Message'). + * + * @internal + */ +export declare const decorateServiceException: (exception: E, additions?: Record) => E; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..ced666a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extended-encode-uri-component.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts new file mode 100644 index 0000000..8b5dd7b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/checksum.d.ts @@ -0,0 +1,24 @@ +import type { ChecksumAlgorithm, ChecksumConfiguration, ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration }; +/** + * @internal + */ +export type PartialChecksumRuntimeConfigType = Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; + crc32: ChecksumConstructor | HashConstructor; + crc32c: ChecksumConstructor | HashConstructor; + sha1: ChecksumConstructor | HashConstructor; +}>; +/** + * @internal + */ +export declare const getChecksumConfiguration: (runtimeConfig: PartialChecksumRuntimeConfigType) => { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +}; +/** + * @internal + */ +export declare const resolveChecksumRuntimeConfig: (clientConfig: ChecksumConfiguration) => PartialChecksumRuntimeConfigType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..42de409 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,38 @@ +import type { DefaultExtensionConfiguration } from "@smithy/types"; +import { PartialChecksumRuntimeConfigType } from "./checksum"; +import { PartialRetryRuntimeConfigType } from "./retry"; +/** + * @internal + */ +export type DefaultExtensionRuntimeConfigType = PartialRetryRuntimeConfigType & PartialChecksumRuntimeConfigType; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultExtensionConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @deprecated use getDefaultExtensionConfiguration + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultClientConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveDefaultRuntimeConfig: (config: DefaultExtensionConfiguration) => DefaultExtensionRuntimeConfigType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..f1b8074 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts new file mode 100644 index 0000000..6e28827 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { Provider, RetryStrategy, RetryStrategyConfiguration, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export type PartialRetryRuntimeConfigType = Partial<{ + retryStrategy: Provider; +}>; +/** + * @internal + */ +export declare const getRetryConfiguration: (runtimeConfig: PartialRetryRuntimeConfigType) => { + setRetryStrategy(retryStrategy: Provider): void; + retryStrategy(): Provider; +}; +/** + * @internal + */ +export declare const resolveRetryRuntimeConfig: (retryStrategyConfiguration: RetryStrategyConfiguration) => PartialRetryRuntimeConfigType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts new file mode 100644 index 0000000..6468b91 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/get-array-if-single-item.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The XML parser will set one K:V for a member that could + * return multiple entries but only has one. + */ +export declare const getArrayIfSingleItem: (mayBeArray: T) => T | T[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts new file mode 100644 index 0000000..7163e5a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/get-value-from-text-node.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Recursively parses object and populates value is node from + * "#text" key if it's available + */ +export declare const getValueFromTextNode: (obj: any) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/index.d.ts new file mode 100644 index 0000000..4a4ac19 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/index.d.ts @@ -0,0 +1,26 @@ +export type { DocumentType, SdkError, SmithyException } from "@smithy/types"; +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts new file mode 100644 index 0000000..a35a23a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/is-serializable-header-value.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * @returns whether the header value is serializable. + */ +export declare const isSerializableHeaderValue: (value: any) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts new file mode 100644 index 0000000..df7eb51 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/lazy-json.d.ts @@ -0,0 +1,46 @@ +/** + * @public + * + * A model field with this type means that you may provide a JavaScript + * object in lieu of a JSON string, and it will be serialized to JSON + * automatically before being sent in a request. + * + * For responses, you will receive a "LazyJsonString", which is a boxed String object + * with additional mixin methods. + * To get the string value, call `.toString()`, or to get the JSON object value, + * call `.deserializeJSON()` or parse it yourself. + */ +export type AutomaticJsonStringConversion = Parameters[0] | LazyJsonString; +/** + * @internal + * + */ +export interface LazyJsonString extends String { + /** + * @returns the JSON parsing of the string value. + */ + deserializeJSON(): any; + /** + * @returns the original string value rather than a JSON.stringified value. + */ + toJSON(): string; +} +/** + * @internal + * + * Extension of the native String class in the previous implementation + * has negative global performance impact on method dispatch for strings, + * and is generally discouraged. + * + * This current implementation may look strange, but is necessary to preserve the interface and + * behavior of extending the String class. + */ +export declare const LazyJsonString: { + (s: string): LazyJsonString; + new (s: string): LazyJsonString; + from(s: any): LazyJsonString; + /** + * @deprecated use #from. + */ + fromObject(s: any): LazyJsonString; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts new file mode 100644 index 0000000..97e28e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/object-mapping.d.ts @@ -0,0 +1,162 @@ +/** + * @internal + * + * A set of instructions for multiple keys. + * The aim is to provide a concise yet readable way to map and filter values + * onto a target object. + * + * @example + * ```javascript + * const example: ObjectMappingInstructions = { + * lazyValue1: [, () => 1], + * lazyValue2: [, () => 2], + * lazyValue3: [, () => 3], + * lazyConditionalValue1: [() => true, () => 4], + * lazyConditionalValue2: [() => true, () => 5], + * lazyConditionalValue3: [true, () => 6], + * lazyConditionalValue4: [false, () => 44], + * lazyConditionalValue5: [() => false, () => 55], + * lazyConditionalValue6: ["", () => 66], + * simpleValue1: [, 7], + * simpleValue2: [, 8], + * simpleValue3: [, 9], + * conditionalValue1: [() => true, 10], + * conditionalValue2: [() => true, 11], + * conditionalValue3: [{}, 12], + * conditionalValue4: [false, 110], + * conditionalValue5: [() => false, 121], + * conditionalValue6: ["", 132], + * }; + * + * const exampleResult: Record = { + * lazyValue1: 1, + * lazyValue2: 2, + * lazyValue3: 3, + * lazyConditionalValue1: 4, + * lazyConditionalValue2: 5, + * lazyConditionalValue3: 6, + * simpleValue1: 7, + * simpleValue2: 8, + * simpleValue3: 9, + * conditionalValue1: 10, + * conditionalValue2: 11, + * conditionalValue3: 12, + * }; + * ``` + */ +export type ObjectMappingInstructions = Record; +/** + * @internal + * + * A variant of the object mapping instruction for the `take` function. + * In this case, the source value is provided to the value function, turning it + * from a supplier into a mapper. + */ +export type SourceMappingInstructions = Record; +/** + * @internal + * + * An instruction set for assigning a value to a target object. + */ +export type ObjectMappingInstruction = LazyValueInstruction | ConditionalLazyValueInstruction | SimpleValueInstruction | ConditionalValueInstruction | UnfilteredValue; +/** + * @internal + * + * non-array + */ +export type UnfilteredValue = any; +/** + * @internal + */ +export type LazyValueInstruction = [FilterStatus, ValueSupplier]; +/** + * @internal + */ +export type ConditionalLazyValueInstruction = [FilterStatusSupplier, ValueSupplier]; +/** + * @internal + */ +export type SimpleValueInstruction = [FilterStatus, Value]; +/** + * @internal + */ +export type ConditionalValueInstruction = [ValueFilteringFunction, Value]; +/** + * @internal + */ +export type SourceMappingInstruction = [(ValueFilteringFunction | FilterStatus)?, ValueMapper?, string?]; +/** + * @internal + * + * Filter is considered passed if + * 1. It is a boolean true. + * 2. It is not undefined and is itself truthy. + * 3. It is undefined and the corresponding _value_ is neither null nor undefined. + */ +export type FilterStatus = boolean | unknown | void; +/** + * @internal + * + * Supplies the filter check but not against any value as input. + */ +export type FilterStatusSupplier = () => boolean; +/** + * @internal + * + * Filter check with the given value. + */ +export type ValueFilteringFunction = (value: any) => boolean; +/** + * @internal + * + * Supplies the value for lazy evaluation. + */ +export type ValueSupplier = () => any; +/** + * @internal + * + * A function that maps the source value to the target value. + * Defaults to pass-through with nullish check. + */ +export type ValueMapper = (value: any) => any; +/** + * @internal + * + * A non-function value. + */ +export type Value = any; +/** + * @internal + * Internal/Private, for codegen use only. + * + * Transfer a set of keys from [instructions] to [target]. + * + * For each instruction in the record, the target key will be the instruction key. + * The target assignment will be conditional on the instruction's filter. + * The target assigned value will be supplied by the instructions as an evaluable function or non-function value. + * + * @see ObjectMappingInstructions for an example. + */ +export declare function map(target: any, filter: (value: any) => boolean, instructions: Record): typeof target; +/** + * @internal + */ +export declare function map(instructions: ObjectMappingInstructions): any; +/** + * @internal + */ +export declare function map(target: any, instructions: ObjectMappingInstructions): typeof target; +/** + * Convert a regular object `{ k: v }` to `{ k: [, v] }` mapping instruction set with default + * filter. + * + * @internal + */ +export declare const convertMap: (target: any) => Record; +/** + * @param source - original object with data. + * @param instructions - how to map the data. + * @returns new object mapped from the source object. + * @internal + */ +export declare const take: (source: any, instructions: SourceMappingInstructions) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts new file mode 100644 index 0000000..b5ded6f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/parse-utils.d.ts @@ -0,0 +1,270 @@ +/** + * @internal + * + * Give an input string, strictly parses a boolean value. + * + * @param value - The boolean string to parse. + * @returns true for "true", false for "false", otherwise an error is thrown. + */ +export declare const parseBoolean: (value: string) => boolean; +/** + * @internal + * + * Asserts a value is a boolean and returns it. + * Casts strings and numbers with a warning if there is evidence that they were + * intended to be booleans. + * + * @param value - A value that is expected to be a boolean. + * @returns The value if it's a boolean, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectBoolean: (value: any) => boolean | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. + * Casts strings with a warning if the string is a parseable number. + * This is to unblock slight API definition/implementation inconsistencies. + * + * @param value - A value that is expected to be a number. + * @returns The value if it's a number, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectNumber: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. + * + * @param value - A value that is expected to be a 32-bit float. + * @returns The value if it's a float, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectFloat32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectLong: (value: any) => number | undefined; +/** + * @internal + * + * @deprecated Use expectLong + */ +export declare const expectInt: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectInt32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 16-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectShort: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an 8-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectByte: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is not null or undefined and returns it, or throws an error. + * + * @param value - A value that is expected to be defined + * @param location - The location where we're expecting to find a defined object (optional) + * @returns The value if it's not undefined, otherwise throws an error + */ +export declare const expectNonNull: (value: T | null | undefined, location?: string) => T; +/** + * @internal + * + * Asserts a value is an JSON-like object and returns it. This is expected to be used + * with values parsed from JSON (arrays, objects, numbers, strings, booleans). + * + * @param value - A value that is expected to be an object + * @returns The value if it's an object, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectObject: (value: any) => Record | undefined; +/** + * @internal + * + * Asserts a value is a string and returns it. + * Numbers and boolean will be cast to strings with a warning. + * + * @param value - A value that is expected to be a string. + * @returns The value if it's a string, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectString: (value: any) => string | undefined; +/** + * @internal + * + * Asserts a value is a JSON-like object with only one non-null/non-undefined key and + * returns it. + * + * @param value - A value that is expected to be an object with exactly one non-null, + * non-undefined key. + * @returns the value if it's a union, undefined if it's null/undefined, otherwise + * an error is thrown. + */ +export declare const expectUnion: (value: unknown) => Record | undefined; +/** + * @internal + * + * Parses a value into a double. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a double. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseDouble + */ +export declare const strictParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a float. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const handleFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const limitedParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseLong: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseLong + */ +export declare const strictParseInt: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 32-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 32-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseInt32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 16-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 16-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseShort: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an 8-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an 8-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseByte: (value: string | number) => number | undefined; +/** + * @internal + */ +export declare const logger: { + warn: { + (...data: any[]): void; + (message?: any, ...optionalParams: any[]): void; + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts new file mode 100644 index 0000000..73d6c16 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/quote-header.d.ts @@ -0,0 +1,6 @@ +/** + * @public + * @param part - header list element + * @returns quoted string if part contains delimiter. + */ +export declare function quoteHeader(part: string): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts new file mode 100644 index 0000000..2a3204f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/resolve-path.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts new file mode 100644 index 0000000..ae03c61 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ser-utils.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + * + * Serializes a number, turning non-numeric values into strings. + * + * @param value - The number to serialize. + * @returns A number, or a string if the given number was non-numeric. + */ +export declare const serializeFloat: (value: number) => string | number; +/** + * @internal + * @param date - to be serialized. + * @returns https://smithy.io/2.0/spec/protocol-traits.html#timestampformat-trait date-time format. + */ +export declare const serializeDateTime: (date: Date) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts new file mode 100644 index 0000000..96ac476 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/serde-json.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + * + * Maps an object through the default JSON serde behavior. + * This means removing nullish fields and un-sparsifying lists. + * + * This is also used by Smithy RPCv2 CBOR as the default serde behavior. + * + * @param obj - to be checked. + * @returns same object with default serde behavior applied. + */ +export declare const _json: (obj: any) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts new file mode 100644 index 0000000..45a0229 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/split-every.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + * + * Given an input string, splits based on the delimiter after a given + * number of delimiters has been encountered. + * + * @param value - The input string to split. + * @param delimiter - The delimiter to split on. + * @param numDelimiters - The number of delimiters to have encountered to split. + */ +export declare function splitEvery(value: string, delimiter: string, numDelimiters: number): Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts new file mode 100644 index 0000000..0f51651 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/split-header.d.ts @@ -0,0 +1,5 @@ +/** + * @param value - header string value. + * @returns value split by commas that aren't in quotes. + */ +export declare const splitHeader: (value: string) => string[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts new file mode 100644 index 0000000..a9a1062 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/NoOpLogger.d.ts @@ -0,0 +1,11 @@ +import { Logger } from "@smithy/types"; +/** + * @internal + */ +export declare class NoOpLogger implements Logger { + trace(): void; + debug(): void; + info(): void; + warn(): void; + error(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..578541e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/client.d.ts @@ -0,0 +1,61 @@ +import { Client as IClient, Command, FetchHttpHandlerOptions, MetadataBearer, MiddlewareStack, NodeHttpHandlerOptions, RequestHandler } from "@smithy/types"; +/** + * @public + */ +export interface SmithyConfiguration { + requestHandler: RequestHandler | NodeHttpHandlerOptions | FetchHttpHandlerOptions | Record; + /** + * The API version set internally by the SDK, and is + * not planned to be used by customer code. + * @internal + */ + readonly apiVersion: string; + /** + * @public + * + * Default false. + * + * When true, the client will only resolve the middleware stack once per + * Command class. This means modifying the middlewareStack of the + * command or client after requests have been made will not be + * recognized. + * + * Calling client.destroy() also clears this cache. + * + * Enable this only if needing the additional time saved (0-1ms per request) + * and not needing middleware modifications between requests. + */ + cacheMiddleware?: boolean; +} +/** + * @internal + */ +export type SmithyResolvedConfiguration = { + requestHandler: RequestHandler; + readonly apiVersion: string; + cacheMiddleware?: boolean; +}; +/** + * @public + */ +export declare class Client> implements IClient { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + /** + * Holds an object reference to the initial configuration object. + * Used to check that the config resolver stack does not create + * dangling instances of an intermediate form of the configuration object. + * + * @internal + */ + initConfig?: object; + /** + * May be used to cache the resolved handler function for a Command class. + */ + private handlers?; + constructor(config: ResolvedClientConfiguration); + send(command: Command>, options?: HandlerOptions): Promise; + send(command: Command>, cb: (err: any, data?: OutputType) => void): void; + send(command: Command>, options: HandlerOptions, cb: (err: any, data?: OutputType) => void): void; + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts new file mode 100644 index 0000000..c53a1e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/collect-stream-body.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { collectBody } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..8b42ff6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/command.d.ts @@ -0,0 +1,113 @@ +import { EndpointParameterInstructions } from "@smithy/middleware-endpoint"; +import { Command as ICommand, Handler, HandlerExecutionContext, HttpRequest as IHttpRequest, HttpResponse as IHttpResponse, Logger, MetadataBearer, MiddlewareStack as IMiddlewareStack, OptionalParameter, Pluggable, RequestHandler, SerdeContext } from "@smithy/types"; +/** + * @public + */ +export declare abstract class Command implements ICommand { + abstract input: Input; + readonly middlewareStack: IMiddlewareStack; + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder; + }, SI extends object = any, SO extends MetadataBearer = any>(): ClassBuilder; + abstract resolveMiddleware(stack: IMiddlewareStack, configuration: ResolvedClientConfiguration, options: any): Handler; + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack: IMiddlewareStack, configuration: { + logger: Logger; + requestHandler: RequestHandler; + }, options: any, { middlewareFn, clientName, commandName, inputFilterSensitiveLog, outputFilterSensitiveLog, smithyContext, additionalContext, CommandCtor, }: ResolveMiddlewareContextArgs): import("@smithy/types").InitializeHandler; +} +/** + * @internal + */ +type ResolveMiddlewareContextArgs = { + middlewareFn: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]; + clientName: string; + commandName: string; + smithyContext: Record; + additionalContext: HandlerExecutionContext; + inputFilterSensitiveLog: (_: any) => any; + outputFilterSensitiveLog: (_: any) => any; + CommandCtor: any; +}; +/** + * @internal + */ +declare class ClassBuilder; +}, SI extends object = any, SO extends MetadataBearer = any> { + private _init; + private _ep; + private _middlewareFn; + private _commandName; + private _clientName; + private _additionalContext; + private _smithyContext; + private _inputFilterSensitiveLog; + private _outputFilterSensitiveLog; + private _serializer; + private _deserializer; + /** + * Optional init callback. + */ + init(cb: (_: Command) => void): void; + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions: EndpointParameterInstructions): ClassBuilder; + /** + * Add any number of middleware. + */ + m(middlewareSupplier: (CommandCtor: any, clientStack: any, config: any, options: any) => Pluggable[]): ClassBuilder; + /** + * Set the initial handler execution context Smithy field. + */ + s(service: string, operation: string, smithyContext?: Record): ClassBuilder; + /** + * Set the initial handler execution context. + */ + c(additionalContext?: HandlerExecutionContext): ClassBuilder; + /** + * Set constant string identifiers for the operation. + */ + n(clientName: string, commandName: string): ClassBuilder; + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter?: (_: any) => any, outputFilter?: (_: any) => any): ClassBuilder; + /** + * Sets the serializer. + */ + ser(serializer: (input: I, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * Sets the deserializer. + */ + de(deserializer: (output: IHttpResponse, context?: SerdeContext | any) => Promise): ClassBuilder; + /** + * @returns a Command class with the classBuilder properties. + */ + build(): { + new (input: I): CommandImpl; + new (...[input]: OptionalParameter): CommandImpl; + getEndpointParameterInstructions(): EndpointParameterInstructions; + }; +} +/** + * A concrete implementation of ICommand with no abstract members. + * @public + */ +export interface CommandImpl; +}, SI extends object = any, SO extends MetadataBearer = any> extends Command { + readonly input: I; + resolveMiddleware(stack: IMiddlewareStack, configuration: C, options: any): Handler; +} +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..eab978f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const SENSITIVE_STRING = "***SensitiveInformation***"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts new file mode 100644 index 0000000..ded1999 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/create-aggregated-client.d.ts @@ -0,0 +1,9 @@ +import { Client } from "./client"; +/** + * @internal + * + * @param commands - command lookup container. + * @param client - client instance on which to add aggregated methods. + * @returns an aggregated client with dynamically created methods. + */ +export declare const createAggregatedClient: (commands: Record, Client: new (...args: any) => Client) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts new file mode 100644 index 0000000..41071c2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/date-utils.d.ts @@ -0,0 +1,73 @@ +/** + * @internal + * + * Builds a proper UTC HttpDate timestamp from a Date object + * since not all environments will have this as the expected + * format. + * + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toUTCString} + * - Prior to ECMAScript 2018, the format of the return value + * - varied according to the platform. The most common return + * - value was an RFC-1123 formatted date stamp, which is a + * - slightly updated version of RFC-822 date stamps. + */ +export declare function dateToUtcString(date: Date): string; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and cannot have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 3339 date. + * + * Input strings must conform to RFC3339 section 5.6, and can have a UTC + * offset. Fractional precision is supported. + * + * @see {@link https://xml2rfc.tools.ietf.org/public/rfc/html/rfc3339.html#anchor14} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc3339DateTimeWithOffset: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a string that can be parsed + * as an RFC 7231 IMF-fixdate or obs-date. + * + * Input strings must conform to RFC7231 section 7.1.1.1. Fractional seconds are supported. + * + * @see {@link https://datatracker.ietf.org/doc/html/rfc7231.html#section-7.1.1.1} + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseRfc7231DateTime: (value: unknown) => Date | undefined; +/** + * @internal + * + * Parses a value into a Date. Returns undefined if the input is null or + * undefined, throws an error if the input is not a number or a parseable string. + * + * Input strings must be an integer or floating point number. Fractional seconds are supported. + * + * @param value - the value to parse + * @returns a Date or undefined + */ +export declare const parseEpochTimestamp: (value: unknown) => Date | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts new file mode 100644 index 0000000..e9852ba --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/default-error-handler.d.ts @@ -0,0 +1,13 @@ +/** + * Always throws an error with the given `exceptionCtor` and other arguments. + * This is only called from an error handling code path. + * + * @internal + */ +export declare const throwDefaultError: ({ output, parsedBody, exceptionCtor, errorCode }: any) => never; +/** + * @internal + * + * Creates {@link throwDefaultError} with bound ExceptionCtor. + */ +export declare const withBaseException: (ExceptionCtor: new (...args: any) => any) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts new file mode 100644 index 0000000..c8a89ed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/defaults-mode.d.ts @@ -0,0 +1,28 @@ +/** + * @internal + */ +export declare const loadConfigsForDefaultMode: (mode: ResolvedDefaultsMode) => DefaultsModeConfigs; +/** + * Option determining how certain default configuration options are resolved in the SDK. It can be one of the value listed below: + * * `"standard"`:

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"in-region"`:

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"cross-region"`:

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"mobile"`:

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

+ * * `"auto"`:

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

+ * * `"legacy"`:

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

+ * + * @defaultValue "legacy" + */ +export type DefaultsMode = "standard" | "in-region" | "cross-region" | "mobile" | "auto" | "legacy"; +/** + * @internal + */ +export type ResolvedDefaultsMode = Exclude; +/** + * @internal + */ +export interface DefaultsModeConfigs { + retryMode?: string; + connectionTimeout?: number; + requestTimeout?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts new file mode 100644 index 0000000..f0284ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/emitWarningIfUnsupportedVersion.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * + * Emits warning if the provided Node.js version string is pending deprecation. + * + * @param version - The Node.js version string. + */ +export declare const emitWarningIfUnsupportedVersion: (version: string) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts new file mode 100644 index 0000000..675354a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/exceptions.d.ts @@ -0,0 +1,42 @@ +import { HttpResponse, MetadataBearer, ResponseMetadata, RetryableTrait, SmithyException } from "@smithy/types"; +/** + * The type of the exception class constructor parameter. The returned type contains the properties + * in the `ExceptionType` but not in the `BaseExceptionType`. If the `BaseExceptionType` contains + * `$metadata` and `message` properties, it's also included in the returned type. + * @internal + */ +export type ExceptionOptionType = Pick>>; +/** + * @public + */ +export interface ServiceExceptionOptions extends SmithyException, MetadataBearer { + message?: string; +} +/** + * @public + * + * Base exception class for the exceptions from the server-side. + */ +export declare class ServiceException extends Error implements SmithyException, MetadataBearer { + readonly $fault: "client" | "server"; + $response?: HttpResponse; + $retryable?: RetryableTrait; + $metadata: ResponseMetadata; + constructor(options: ServiceExceptionOptions); + /** + * Checks if a value is an instance of ServiceException (duck typed) + */ + static isInstance(value: unknown): value is ServiceException; + /** + * Custom instanceof check to support the operator for ServiceException base class + */ + static [Symbol.hasInstance](instance: unknown): boolean; +} +/** + * This method inject unmodeled member to a deserialized SDK exception, + * and load the error message from different possible keys('message', + * 'Message'). + * + * @internal + */ +export declare const decorateServiceException: (exception: E, additions?: Record) => E; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts new file mode 100644 index 0000000..4e510cf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extended-encode-uri-component.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { extendedEncodeURIComponent } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts new file mode 100644 index 0000000..c5f06b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/checksum.d.ts @@ -0,0 +1,24 @@ +import { ChecksumAlgorithm, ChecksumConfiguration, ChecksumConstructor, HashConstructor } from "@smithy/types"; +import { AlgorithmId } from "@smithy/types"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration }; +/** + * @internal + */ +export type PartialChecksumRuntimeConfigType = Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; + crc32: ChecksumConstructor | HashConstructor; + crc32c: ChecksumConstructor | HashConstructor; + sha1: ChecksumConstructor | HashConstructor; +}>; +/** + * @internal + */ +export declare const getChecksumConfiguration: (runtimeConfig: PartialChecksumRuntimeConfigType) => { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +}; +/** + * @internal + */ +export declare const resolveChecksumRuntimeConfig: (clientConfig: ChecksumConfiguration) => PartialChecksumRuntimeConfigType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..d8c05bb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,38 @@ +import { DefaultExtensionConfiguration } from "@smithy/types"; +import { PartialChecksumRuntimeConfigType } from "./checksum"; +import { PartialRetryRuntimeConfigType } from "./retry"; +/** + * @internal + */ +export type DefaultExtensionRuntimeConfigType = PartialRetryRuntimeConfigType & PartialChecksumRuntimeConfigType; +/** + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultExtensionConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @deprecated use getDefaultExtensionConfiguration + * @internal + * + * Helper function to resolve default extension configuration from runtime config + */ +export declare const getDefaultClientConfiguration: (runtimeConfig: DefaultExtensionRuntimeConfigType) => { + addChecksumAlgorithm(algo: import("@smithy/types").ChecksumAlgorithm): void; + checksumAlgorithms(): import("@smithy/types").ChecksumAlgorithm[]; +} & { + setRetryStrategy(retryStrategy: import("@smithy/types").Provider): void; + retryStrategy(): import("@smithy/types").Provider; +}; +/** + * @internal + * + * Helper function to resolve runtime config from default extension configuration + */ +export declare const resolveDefaultRuntimeConfig: (config: DefaultExtensionConfiguration) => DefaultExtensionRuntimeConfigType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..04e3c83 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1 @@ +export * from "./defaultExtensionConfiguration"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts new file mode 100644 index 0000000..b41fa3c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { Provider, RetryStrategy, RetryStrategyConfiguration, RetryStrategyV2 } from "@smithy/types"; +/** + * @internal + */ +export type PartialRetryRuntimeConfigType = Partial<{ + retryStrategy: Provider; +}>; +/** + * @internal + */ +export declare const getRetryConfiguration: (runtimeConfig: PartialRetryRuntimeConfigType) => { + setRetryStrategy(retryStrategy: Provider): void; + retryStrategy(): Provider; +}; +/** + * @internal + */ +export declare const resolveRetryRuntimeConfig: (retryStrategyConfiguration: RetryStrategyConfiguration) => PartialRetryRuntimeConfigType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts new file mode 100644 index 0000000..dbbd280 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-array-if-single-item.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * The XML parser will set one K:V for a member that could + * return multiple entries but only has one. + */ +export declare const getArrayIfSingleItem: (mayBeArray: T) => T | T[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts new file mode 100644 index 0000000..d56771e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/get-value-from-text-node.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + * + * Recursively parses object and populates value is node from + * "#text" key if it's available + */ +export declare const getValueFromTextNode: (obj: any) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..684c977 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/index.d.ts @@ -0,0 +1,26 @@ +export { DocumentType, SdkError, SmithyException } from "@smithy/types"; +export * from "./client"; +export * from "./collect-stream-body"; +export * from "./command"; +export * from "./constants"; +export * from "./create-aggregated-client"; +export * from "./date-utils"; +export * from "./default-error-handler"; +export * from "./defaults-mode"; +export * from "./emitWarningIfUnsupportedVersion"; +export * from "./exceptions"; +export * from "./extended-encode-uri-component"; +export * from "./extensions"; +export * from "./get-array-if-single-item"; +export * from "./get-value-from-text-node"; +export * from "./is-serializable-header-value"; +export * from "./lazy-json"; +export * from "./NoOpLogger"; +export * from "./object-mapping"; +export * from "./parse-utils"; +export * from "./quote-header"; +export * from "./resolve-path"; +export * from "./ser-utils"; +export * from "./serde-json"; +export * from "./split-every"; +export * from "./split-header"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts new file mode 100644 index 0000000..4d53109 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/is-serializable-header-value.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * @returns whether the header value is serializable. + */ +export declare const isSerializableHeaderValue: (value: any) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts new file mode 100644 index 0000000..3a41bf3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/lazy-json.d.ts @@ -0,0 +1,46 @@ +/** + * @public + * + * A model field with this type means that you may provide a JavaScript + * object in lieu of a JSON string, and it will be serialized to JSON + * automatically before being sent in a request. + * + * For responses, you will receive a "LazyJsonString", which is a boxed String object + * with additional mixin methods. + * To get the string value, call `.toString()`, or to get the JSON object value, + * call `.deserializeJSON()` or parse it yourself. + */ +export type AutomaticJsonStringConversion = Parameters[0] | LazyJsonString; +/** + * @internal + * + */ +export interface LazyJsonString extends String { + /** + * @returns the JSON parsing of the string value. + */ + deserializeJSON(): any; + /** + * @returns the original string value rather than a JSON.stringified value. + */ + toJSON(): string; +} +/** + * @internal + * + * Extension of the native String class in the previous implementation + * has negative global performance impact on method dispatch for strings, + * and is generally discouraged. + * + * This current implementation may look strange, but is necessary to preserve the interface and + * behavior of extending the String class. + */ +export declare const LazyJsonString: { + (s: string): LazyJsonString; + new (s: string): LazyJsonString; + from(s: any): LazyJsonString; + /** + * @deprecated use #from. + */ + fromObject(s: any): LazyJsonString; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts new file mode 100644 index 0000000..d658c16 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/object-mapping.d.ts @@ -0,0 +1,178 @@ +/** + * @internal + * + * A set of instructions for multiple keys. + * The aim is to provide a concise yet readable way to map and filter values + * onto a target object. + * + * @example + * ```javascript + * const example: ObjectMappingInstructions = { + * lazyValue1: [, () => 1], + * lazyValue2: [, () => 2], + * lazyValue3: [, () => 3], + * lazyConditionalValue1: [() => true, () => 4], + * lazyConditionalValue2: [() => true, () => 5], + * lazyConditionalValue3: [true, () => 6], + * lazyConditionalValue4: [false, () => 44], + * lazyConditionalValue5: [() => false, () => 55], + * lazyConditionalValue6: ["", () => 66], + * simpleValue1: [, 7], + * simpleValue2: [, 8], + * simpleValue3: [, 9], + * conditionalValue1: [() => true, 10], + * conditionalValue2: [() => true, 11], + * conditionalValue3: [{}, 12], + * conditionalValue4: [false, 110], + * conditionalValue5: [() => false, 121], + * conditionalValue6: ["", 132], + * }; + * + * const exampleResult: Record = { + * lazyValue1: 1, + * lazyValue2: 2, + * lazyValue3: 3, + * lazyConditionalValue1: 4, + * lazyConditionalValue2: 5, + * lazyConditionalValue3: 6, + * simpleValue1: 7, + * simpleValue2: 8, + * simpleValue3: 9, + * conditionalValue1: 10, + * conditionalValue2: 11, + * conditionalValue3: 12, + * }; + * ``` + */ +export type ObjectMappingInstructions = Record; +/** + * @internal + * + * A variant of the object mapping instruction for the `take` function. + * In this case, the source value is provided to the value function, turning it + * from a supplier into a mapper. + */ +export type SourceMappingInstructions = Record; +/** + * @internal + * + * An instruction set for assigning a value to a target object. + */ +export type ObjectMappingInstruction = LazyValueInstruction | ConditionalLazyValueInstruction | SimpleValueInstruction | ConditionalValueInstruction | UnfilteredValue; +/** + * @internal + * + * non-array + */ +export type UnfilteredValue = any; +/** + * @internal + */ +export type LazyValueInstruction = [ + FilterStatus, + ValueSupplier +]; +/** + * @internal + */ +export type ConditionalLazyValueInstruction = [ + FilterStatusSupplier, + ValueSupplier +]; +/** + * @internal + */ +export type SimpleValueInstruction = [ + FilterStatus, + Value +]; +/** + * @internal + */ +export type ConditionalValueInstruction = [ + ValueFilteringFunction, + Value +]; +/** + * @internal + */ +export type SourceMappingInstruction = [ + (ValueFilteringFunction | FilterStatus)?, + ValueMapper?, + string? +]; +/** + * @internal + * + * Filter is considered passed if + * 1. It is a boolean true. + * 2. It is not undefined and is itself truthy. + * 3. It is undefined and the corresponding _value_ is neither null nor undefined. + */ +export type FilterStatus = boolean | unknown | void; +/** + * @internal + * + * Supplies the filter check but not against any value as input. + */ +export type FilterStatusSupplier = () => boolean; +/** + * @internal + * + * Filter check with the given value. + */ +export type ValueFilteringFunction = (value: any) => boolean; +/** + * @internal + * + * Supplies the value for lazy evaluation. + */ +export type ValueSupplier = () => any; +/** + * @internal + * + * A function that maps the source value to the target value. + * Defaults to pass-through with nullish check. + */ +export type ValueMapper = (value: any) => any; +/** + * @internal + * + * A non-function value. + */ +export type Value = any; +/** + * @internal + * Internal/Private, for codegen use only. + * + * Transfer a set of keys from [instructions] to [target]. + * + * For each instruction in the record, the target key will be the instruction key. + * The target assignment will be conditional on the instruction's filter. + * The target assigned value will be supplied by the instructions as an evaluable function or non-function value. + * + * @see ObjectMappingInstructions for an example. + */ +export declare function map(target: any, filter: (value: any) => boolean, instructions: Record): typeof target; +/** + * @internal + */ +export declare function map(instructions: ObjectMappingInstructions): any; +/** + * @internal + */ +export declare function map(target: any, instructions: ObjectMappingInstructions): typeof target; +/** + * Convert a regular object `{ k: v }` to `{ k: [, v] }` mapping instruction set with default + * filter. + * + * @internal + */ +export declare const convertMap: (target: any) => Record; +/** + * @param source - original object with data. + * @param instructions - how to map the data. + * @returns new object mapped from the source object. + * @internal + */ +export declare const take: (source: any, instructions: SourceMappingInstructions) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts new file mode 100644 index 0000000..e4c8aef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/parse-utils.d.ts @@ -0,0 +1,270 @@ +/** + * @internal + * + * Give an input string, strictly parses a boolean value. + * + * @param value - The boolean string to parse. + * @returns true for "true", false for "false", otherwise an error is thrown. + */ +export declare const parseBoolean: (value: string) => boolean; +/** + * @internal + * + * Asserts a value is a boolean and returns it. + * Casts strings and numbers with a warning if there is evidence that they were + * intended to be booleans. + * + * @param value - A value that is expected to be a boolean. + * @returns The value if it's a boolean, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectBoolean: (value: any) => boolean | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. + * Casts strings with a warning if the string is a parseable number. + * This is to unblock slight API definition/implementation inconsistencies. + * + * @param value - A value that is expected to be a number. + * @returns The value if it's a number, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectNumber: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. + * + * @param value - A value that is expected to be a 32-bit float. + * @returns The value if it's a float, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectFloat32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectLong: (value: any) => number | undefined; +/** + * @internal + * + * @deprecated Use expectLong + */ +export declare const expectInt: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectInt32: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is a 16-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectShort: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is an 8-bit integer and returns it. + * + * @param value - A value that is expected to be an integer. + * @returns The value if it's an integer, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectByte: (value: any) => number | undefined; +/** + * @internal + * + * Asserts a value is not null or undefined and returns it, or throws an error. + * + * @param value - A value that is expected to be defined + * @param location - The location where we're expecting to find a defined object (optional) + * @returns The value if it's not undefined, otherwise throws an error + */ +export declare const expectNonNull: (value: T | null | undefined, location?: string) => T; +/** + * @internal + * + * Asserts a value is an JSON-like object and returns it. This is expected to be used + * with values parsed from JSON (arrays, objects, numbers, strings, booleans). + * + * @param value - A value that is expected to be an object + * @returns The value if it's an object, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectObject: (value: any) => Record | undefined; +/** + * @internal + * + * Asserts a value is a string and returns it. + * Numbers and boolean will be cast to strings with a warning. + * + * @param value - A value that is expected to be a string. + * @returns The value if it's a string, undefined if it's null/undefined, + * otherwise an error is thrown. + */ +export declare const expectString: (value: any) => string | undefined; +/** + * @internal + * + * Asserts a value is a JSON-like object with only one non-null/non-undefined key and + * returns it. + * + * @param value - A value that is expected to be an object with exactly one non-null, + * non-undefined key. + * @returns the value if it's a union, undefined if it's null/undefined, otherwise + * an error is thrown. + */ +export declare const expectUnion: (value: unknown) => Record | undefined; +/** + * @internal + * + * Parses a value into a double. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a double. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseDouble + */ +export declare const strictParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a float. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by the standard + * parseFloat with one exception: NaN may only be explicitly set as the string + * "NaN", any implicit Nan values will result in an error being thrown. If any + * other type is provided, an exception will be thrown. + * + * @param value - A number or string representation of a float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a number and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseDouble: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const handleFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use limitedParseDouble + */ +export declare const limitedParseFloat: (value: string | number) => number | undefined; +/** + * @internal + * + * Asserts a value is a 32-bit float and returns it. If the value is a string + * representation of a non-numeric number type (NaN, Infinity, -Infinity), + * the value will be parsed. Any other string value will result in an exception + * being thrown. Null or undefined will be returned as undefined. Any other + * type will result in an exception being thrown. + * + * @param value - A number or string representation of a non-numeric float. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const limitedParseFloat32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseLong: (value: string | number) => number | undefined; +/** + * @internal + * + * @deprecated Use strictParseLong + */ +export declare const strictParseInt: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 32-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 32-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseInt32: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into a 16-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of a 16-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseShort: (value: string | number) => number | undefined; +/** + * @internal + * + * Parses a value into an 8-bit integer. If the value is null or undefined, undefined + * will be returned. If the value is a string, it will be parsed by parseFloat + * and the result will be asserted to be an integer. If the parsed value is not + * an integer, or the raw value is any type other than a string or number, an + * exception will be thrown. + * + * @param value - A number or string representation of an 8-bit integer. + * @returns The value as a number, or undefined if it's null/undefined. + */ +export declare const strictParseByte: (value: string | number) => number | undefined; +/** + * @internal + */ +export declare const logger: { + warn: { + (...data: any[]): void; + (message?: any, ...optionalParams: any[]): void; + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts new file mode 100644 index 0000000..c2f12e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/quote-header.d.ts @@ -0,0 +1,6 @@ +/** + * @public + * @param part - header list element + * @returns quoted string if part contains delimiter. + */ +export declare function quoteHeader(part: string): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts new file mode 100644 index 0000000..5432be7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/resolve-path.d.ts @@ -0,0 +1,5 @@ +/** + * @internal + * Backwards compatibility re-export. + */ +export { resolvedPath } from "@smithy/core/protocols"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts new file mode 100644 index 0000000..355f829 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/ser-utils.d.ts @@ -0,0 +1,15 @@ +/** + * @internal + * + * Serializes a number, turning non-numeric values into strings. + * + * @param value - The number to serialize. + * @returns A number, or a string if the given number was non-numeric. + */ +export declare const serializeFloat: (value: number) => string | number; +/** + * @internal + * @param date - to be serialized. + * @returns https://smithy.io/2.0/spec/protocol-traits.html#timestampformat-trait date-time format. + */ +export declare const serializeDateTime: (date: Date) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts new file mode 100644 index 0000000..499409f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/serde-json.d.ts @@ -0,0 +1,12 @@ +/** + * @internal + * + * Maps an object through the default JSON serde behavior. + * This means removing nullish fields and un-sparsifying lists. + * + * This is also used by Smithy RPCv2 CBOR as the default serde behavior. + * + * @param obj - to be checked. + * @returns same object with default serde behavior applied. + */ +export declare const _json: (obj: any) => any; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts new file mode 100644 index 0000000..2280f3e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-every.d.ts @@ -0,0 +1,11 @@ +/** + * @internal + * + * Given an input string, splits based on the delimiter after a given + * number of delimiters has been encountered. + * + * @param value - The input string to split. + * @param delimiter - The delimiter to split on. + * @param numDelimiters - The number of delimiters to have encountered to split. + */ +export declare function splitEvery(value: string, delimiter: string, numDelimiters: number): Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts new file mode 100644 index 0000000..7cf54c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/dist-types/ts3.4/split-header.d.ts @@ -0,0 +1,5 @@ +/** + * @param value - header string value. + * @returns value split by commas that aren't in quotes. + */ +export declare const splitHeader: (value: string) => string[]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/package.json new file mode 100644 index 0000000..cbab653 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/smithy-client/package.json @@ -0,0 +1,67 @@ +{ + "name": "@smithy/smithy-client", + "version": "4.2.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline smithy-client", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/smithy-client", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/smithy-client" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/types/LICENSE new file mode 100644 index 0000000..e907b58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/types/README.md new file mode 100644 index 0000000..7ab3ccd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/README.md @@ -0,0 +1,115 @@ +# @smithy/types + +[![NPM version](https://img.shields.io/npm/v/@smithy/types/latest.svg)](https://www.npmjs.com/package/@smithy/types) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/types.svg)](https://www.npmjs.com/package/@smithy/types) + +## Usage + +This package is mostly used internally by generated clients. +Some public components have independent applications. + +--- + +### Scenario: Removing `| undefined` from input and output structures + +Generated shapes' members are unioned with `undefined` for +input shapes, and are `?` (optional) for output shapes. + +- for inputs, this defers the validation to the service. +- for outputs, this strongly suggests that you should runtime-check the output data. + +If you would like to skip these steps, use the `AssertiveClient` or +`UncheckedClient` type helpers. + +Using AWS S3 as an example: + +```ts +import { S3 } from "@aws-sdk/client-s3"; +import type { AssertiveClient, UncheckedClient } from "@smithy/types"; + +const s3a = new S3({}) as AssertiveClient; +const s3b = new S3({}) as UncheckedClient; + +// AssertiveClient enforces required inputs are not undefined +// and required outputs are not undefined. +const get = await s3a.getObject({ + Bucket: "", + // @ts-expect-error (undefined not assignable to string) + Key: undefined, +}); + +// UncheckedClient makes output fields non-nullable. +// You should still perform type checks as you deem +// necessary, but the SDK will no longer prompt you +// with nullability errors. +const body = await ( + await s3b.getObject({ + Bucket: "", + Key: "", + }) +).Body.transformToString(); +``` + +When using the transform on non-aggregated client with the `Command` syntax, +the input cannot be validated because it goes through another class. + +```ts +import { S3Client, ListBucketsCommand, GetObjectCommand, GetObjectCommandInput } from "@aws-sdk/client-s3"; +import type { AssertiveClient, UncheckedClient, NoUndefined } from "@smithy/types"; + +const s3 = new S3Client({}) as UncheckedClient; + +const list = await s3.send( + new ListBucketsCommand({ + // command inputs are not validated by the type transform. + // because this is a separate class. + }) +); + +/** + * Although less ergonomic, you can use the NoUndefined + * transform on the input type. + */ +const getObjectInput: NoUndefined = { + Bucket: "undefined", + // @ts-expect-error (undefined not assignable to string) + Key: undefined, + // optional params can still be undefined. + SSECustomerAlgorithm: undefined, +}; + +const get = s3.send(new GetObjectCommand(getObjectInput)); + +// outputs are still transformed. +await get.Body.TransformToString(); +``` + +### Scenario: Narrowing a smithy-typescript generated client's output payload blob types + +This is mostly relevant to operations with streaming bodies such as within +the S3Client in the AWS SDK for JavaScript v3. + +Because blob payload types are platform dependent, you may wish to indicate in your application that a client is running in a specific +environment. This narrows the blob payload types. + +```typescript +import { GetObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import type { NodeJsClient, SdkStream, StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import type { IncomingMessage } from "node:http"; + +// default client init. +const s3Default = new S3Client({}); + +// client init with type narrowing. +const s3NarrowType = new S3Client({}) as NodeJsClient; + +// The default type of blob payloads is a wide union type including multiple possible +// request handlers. +const body1: StreamingBlobPayloadOutputTypes = (await s3Default.send(new GetObjectCommand({ Key: "", Bucket: "" }))) + .Body!; + +// This is of the narrower type SdkStream representing +// blob payload responses using specifically the node:http request handler. +const body2: SdkStream = (await s3NarrowType.send(new GetObjectCommand({ Key: "", Bucket: "" }))) + .Body!; +``` diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/abort-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/abort-handler.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/abort-handler.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/abort.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/abort.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/abort.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpApiKeyAuth.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpAuthScheme.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpAuthSchemeProvider.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/HttpSigner.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/IdentityProviderConfig.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/auth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/auth.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/auth.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/auth/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/blob/blob-payload-input-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/checksum.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/checksum.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/checksum.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/client.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/client.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/client.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/command.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/command.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/command.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/config.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/config.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/config.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/manager.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/manager.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/manager.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/pool.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/pool.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/connection/pool.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/crypto.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/crypto.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/crypto.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js new file mode 100644 index 0000000..8817412 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/downlevel-ts3.4/transform/type-transform.js @@ -0,0 +1 @@ +module.exports = require("../../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/encode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/encode.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/encode.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoint.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoint.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoint.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/EndpointRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/ErrorRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/RuleSetObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/TreeRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/shared.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/shared.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/endpoints/shared.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/eventStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/eventStream.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/eventStream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/checksum.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/checksum.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/checksum.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/defaultClientConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/retry.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/retry.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/extensions/retry.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/externals-check/browser-externals-check.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/feature-ids.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/feature-ids.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/feature-ids.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/http.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/http.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/http.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/http/httpHandlerInitialization.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/apiKeyIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/awsCredentialIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/identity.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/identity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/identity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/identity/tokenIdentity.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/index.js new file mode 100644 index 0000000..0849f2b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/index.js @@ -0,0 +1,144 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AlgorithmId: () => AlgorithmId, + EndpointURLScheme: () => EndpointURLScheme, + FieldPosition: () => FieldPosition, + HttpApiKeyAuthLocation: () => HttpApiKeyAuthLocation, + HttpAuthLocation: () => HttpAuthLocation, + IniSectionType: () => IniSectionType, + RequestHandlerProtocol: () => RequestHandlerProtocol, + SMITHY_CONTEXT_KEY: () => SMITHY_CONTEXT_KEY, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/auth/auth.ts +var HttpAuthLocation = /* @__PURE__ */ ((HttpAuthLocation2) => { + HttpAuthLocation2["HEADER"] = "header"; + HttpAuthLocation2["QUERY"] = "query"; + return HttpAuthLocation2; +})(HttpAuthLocation || {}); + +// src/auth/HttpApiKeyAuth.ts +var HttpApiKeyAuthLocation = /* @__PURE__ */ ((HttpApiKeyAuthLocation2) => { + HttpApiKeyAuthLocation2["HEADER"] = "header"; + HttpApiKeyAuthLocation2["QUERY"] = "query"; + return HttpApiKeyAuthLocation2; +})(HttpApiKeyAuthLocation || {}); + +// src/endpoint.ts +var EndpointURLScheme = /* @__PURE__ */ ((EndpointURLScheme2) => { + EndpointURLScheme2["HTTP"] = "http"; + EndpointURLScheme2["HTTPS"] = "https"; + return EndpointURLScheme2; +})(EndpointURLScheme || {}); + +// src/extensions/checksum.ts +var AlgorithmId = /* @__PURE__ */ ((AlgorithmId2) => { + AlgorithmId2["MD5"] = "md5"; + AlgorithmId2["CRC32"] = "crc32"; + AlgorithmId2["CRC32C"] = "crc32c"; + AlgorithmId2["SHA1"] = "sha1"; + AlgorithmId2["SHA256"] = "sha256"; + return AlgorithmId2; +})(AlgorithmId || {}); +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== void 0) { + checksumAlgorithms.push({ + algorithmId: () => "sha256" /* SHA256 */, + checksumConstructor: () => runtimeConfig.sha256 + }); + } + if (runtimeConfig.md5 != void 0) { + checksumAlgorithms.push({ + algorithmId: () => "md5" /* MD5 */, + checksumConstructor: () => runtimeConfig.md5 + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/defaultClientConfiguration.ts +var getDefaultClientConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return getChecksumConfiguration(runtimeConfig); +}, "getDefaultClientConfiguration"); +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return resolveChecksumRuntimeConfig(config); +}, "resolveDefaultRuntimeConfig"); + +// src/http.ts +var FieldPosition = /* @__PURE__ */ ((FieldPosition2) => { + FieldPosition2[FieldPosition2["HEADER"] = 0] = "HEADER"; + FieldPosition2[FieldPosition2["TRAILER"] = 1] = "TRAILER"; + return FieldPosition2; +})(FieldPosition || {}); + +// src/middleware.ts +var SMITHY_CONTEXT_KEY = "__smithy_context"; + +// src/profile.ts +var IniSectionType = /* @__PURE__ */ ((IniSectionType2) => { + IniSectionType2["PROFILE"] = "profile"; + IniSectionType2["SSO_SESSION"] = "sso-session"; + IniSectionType2["SERVICES"] = "services"; + return IniSectionType2; +})(IniSectionType || {}); + +// src/transfer.ts +var RequestHandlerProtocol = /* @__PURE__ */ ((RequestHandlerProtocol2) => { + RequestHandlerProtocol2["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol2["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol2["TDS_8_0"] = "tds/8.0"; + return RequestHandlerProtocol2; +})(RequestHandlerProtocol || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + HttpAuthLocation, + HttpApiKeyAuthLocation, + EndpointURLScheme, + AlgorithmId, + getDefaultClientConfiguration, + resolveDefaultRuntimeConfig, + FieldPosition, + SMITHY_CONTEXT_KEY, + IniSectionType, + RequestHandlerProtocol +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/logger.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/logger.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/logger.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/middleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/middleware.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/middleware.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/pagination.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/pagination.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/pagination.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/profile.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/profile.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/profile.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/response.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/response.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/response.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/retry.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/retry.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/retry.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/serde.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/serde.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/serde.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/shapes.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/shapes.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/shapes.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/signature.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/signature.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/signature.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/stream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/stream.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/stream.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-common-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-input-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/streaming-payload/streaming-blob-payload-output-types.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transfer.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transfer.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transfer.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/client-method-transforms.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/client-payload-blob-type-narrow.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/exact.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/exact.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/exact.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/no-undefined.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/type-transform.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/type-transform.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/transform/type-transform.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/uri.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/uri.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/uri.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/util.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/util.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/util.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/waiter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/waiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-cjs/waiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/abort-handler.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/abort-handler.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/abort-handler.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/abort.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/abort.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/abort.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js new file mode 100644 index 0000000..4c02f24 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpApiKeyAuth.js @@ -0,0 +1,5 @@ +export var HttpApiKeyAuthLocation; +(function (HttpApiKeyAuthLocation) { + HttpApiKeyAuthLocation["HEADER"] = "header"; + HttpApiKeyAuthLocation["QUERY"] = "query"; +})(HttpApiKeyAuthLocation || (HttpApiKeyAuthLocation = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpAuthScheme.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpAuthSchemeProvider.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpSigner.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpSigner.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/HttpSigner.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/IdentityProviderConfig.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/auth.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/auth.js new file mode 100644 index 0000000..bd3b2df --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/auth.js @@ -0,0 +1,5 @@ +export var HttpAuthLocation; +(function (HttpAuthLocation) { + HttpAuthLocation["HEADER"] = "header"; + HttpAuthLocation["QUERY"] = "query"; +})(HttpAuthLocation || (HttpAuthLocation = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/index.js new file mode 100644 index 0000000..7436030 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/auth/index.js @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/blob/blob-payload-input-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/checksum.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/checksum.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/checksum.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/client.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/client.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/client.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/command.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/command.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/command.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/config.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/config.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/config.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/index.js new file mode 100644 index 0000000..c6c3ea8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/index.js @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/manager.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/manager.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/manager.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/pool.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/pool.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/connection/pool.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/crypto.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/crypto.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/crypto.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/downlevel-ts3.4/transform/type-transform.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/encode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/encode.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/encode.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoint.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoint.js new file mode 100644 index 0000000..4ae601f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoint.js @@ -0,0 +1,5 @@ +export var EndpointURLScheme; +(function (EndpointURLScheme) { + EndpointURLScheme["HTTP"] = "http"; + EndpointURLScheme["HTTPS"] = "https"; +})(EndpointURLScheme || (EndpointURLScheme = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/index.js new file mode 100644 index 0000000..64d85cf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/index.js @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/shared.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/endpoints/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/eventStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/eventStream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/eventStream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/checksum.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/checksum.js new file mode 100644 index 0000000..5a7939e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/checksum.js @@ -0,0 +1,38 @@ +export var AlgorithmId; +(function (AlgorithmId) { + AlgorithmId["MD5"] = "md5"; + AlgorithmId["CRC32"] = "crc32"; + AlgorithmId["CRC32C"] = "crc32c"; + AlgorithmId["SHA1"] = "sha1"; + AlgorithmId["SHA256"] = "sha256"; +})(AlgorithmId || (AlgorithmId = {})); +export const getChecksumConfiguration = (runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.SHA256, + checksumConstructor: () => runtimeConfig.sha256, + }); + } + if (runtimeConfig.md5 != undefined) { + checksumAlgorithms.push({ + algorithmId: () => AlgorithmId.MD5, + checksumConstructor: () => runtimeConfig.md5, + }); + } + return { + addChecksumAlgorithm(algo) { + checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return checksumAlgorithms; + }, + }; +}; +export const resolveChecksumRuntimeConfig = (clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js new file mode 100644 index 0000000..4e3eb91 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/defaultClientConfiguration.js @@ -0,0 +1,7 @@ +import { getChecksumConfiguration, resolveChecksumRuntimeConfig } from "./checksum"; +export const getDefaultClientConfiguration = (runtimeConfig) => { + return getChecksumConfiguration(runtimeConfig); +}; +export const resolveDefaultRuntimeConfig = (config) => { + return resolveChecksumRuntimeConfig(config); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/defaultExtensionConfiguration.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/index.js new file mode 100644 index 0000000..0fa92d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/index.js @@ -0,0 +1,3 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId } from "./checksum"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/retry.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/extensions/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/externals-check/browser-externals-check.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/feature-ids.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/feature-ids.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/feature-ids.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/http.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/http.js new file mode 100644 index 0000000..27b22f0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/http.js @@ -0,0 +1,5 @@ +export var FieldPosition; +(function (FieldPosition) { + FieldPosition[FieldPosition["HEADER"] = 0] = "HEADER"; + FieldPosition[FieldPosition["TRAILER"] = 1] = "TRAILER"; +})(FieldPosition || (FieldPosition = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/http/httpHandlerInitialization.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/apiKeyIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/awsCredentialIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/identity.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/identity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/identity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/index.js new file mode 100644 index 0000000..3360320 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/index.js @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/identity/tokenIdentity.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/index.js new file mode 100644 index 0000000..c370335 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/index.js @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/logger.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/logger.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/logger.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/middleware.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/middleware.js new file mode 100644 index 0000000..7d0d050 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/middleware.js @@ -0,0 +1 @@ +export const SMITHY_CONTEXT_KEY = "__smithy_context"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/pagination.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/pagination.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/pagination.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/profile.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/profile.js new file mode 100644 index 0000000..9d56c8d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/profile.js @@ -0,0 +1,6 @@ +export var IniSectionType; +(function (IniSectionType) { + IniSectionType["PROFILE"] = "profile"; + IniSectionType["SSO_SESSION"] = "sso-session"; + IniSectionType["SERVICES"] = "services"; +})(IniSectionType || (IniSectionType = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/response.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/response.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/response.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/retry.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/retry.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/retry.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/serde.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/serde.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/serde.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/shapes.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/shapes.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/shapes.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/signature.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/signature.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/signature.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/stream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/stream.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/stream.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-common-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-input-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/streaming-payload/streaming-blob-payload-output-types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transfer.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transfer.js new file mode 100644 index 0000000..f776151 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transfer.js @@ -0,0 +1,6 @@ +export var RequestHandlerProtocol; +(function (RequestHandlerProtocol) { + RequestHandlerProtocol["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol["TDS_8_0"] = "tds/8.0"; +})(RequestHandlerProtocol || (RequestHandlerProtocol = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/client-method-transforms.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/client-payload-blob-type-narrow.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/exact.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/exact.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/exact.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/no-undefined.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/no-undefined.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/no-undefined.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/type-transform.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/type-transform.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/transform/type-transform.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/uri.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/uri.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/uri.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/util.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/util.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/util.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/waiter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/waiter.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-es/waiter.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/abort-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/abort-handler.d.ts new file mode 100644 index 0000000..09a0544 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/abort-handler.d.ts @@ -0,0 +1,7 @@ +import type { AbortSignal as DeprecatedAbortSignal } from "./abort"; +/** + * @public + */ +export interface AbortHandler { + (this: AbortSignal | DeprecatedAbortSignal, ev: any): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/abort.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/abort.d.ts new file mode 100644 index 0000000..80fc87f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/abort.d.ts @@ -0,0 +1,50 @@ +import type { AbortHandler } from "./abort-handler"; +/** + * @public + */ +export { AbortHandler }; +/** + * @public + * @deprecated use platform (global) type for AbortSignal. + * + * Holders of an AbortSignal object may query if the associated operation has + * been aborted and register an onabort handler. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export interface AbortSignal { + /** + * Whether the action represented by this signal has been cancelled. + */ + readonly aborted: boolean; + /** + * A function to be invoked when the action represented by this signal has + * been cancelled. + */ + onabort: AbortHandler | Function | null; +} +/** + * @public + * @deprecated use platform (global) type for AbortController. + * + * The AWS SDK uses a Controller/Signal model to allow for cooperative + * cancellation of asynchronous operations. When initiating such an operation, + * the caller can create an AbortController and then provide linked signal to + * subtasks. This allows a single source to communicate to multiple consumers + * that an action has been aborted without dictating how that cancellation + * should be handled. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortController + */ +export interface AbortController { + /** + * An object that reports whether the action associated with this + * `AbortController` has been cancelled. + */ + readonly signal: AbortSignal; + /** + * Declares the operation associated with this AbortController to have been + * cancelled. + */ + abort(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts new file mode 100644 index 0000000..5d74340 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpApiKeyAuth.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum HttpApiKeyAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts new file mode 100644 index 0000000..c5be532 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpAuthScheme.d.ts @@ -0,0 +1,49 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HandlerExecutionContext } from "../middleware"; +import { HttpSigner } from "./HttpSigner"; +import { IdentityProviderConfig } from "./IdentityProviderConfig"; +/** + * ID for {@link HttpAuthScheme} + * @internal + */ +export type HttpAuthSchemeId = string; +/** + * Interface that defines an HttpAuthScheme + * @internal + */ +export interface HttpAuthScheme { + /** + * ID for an HttpAuthScheme, typically the absolute shape ID of a Smithy auth trait. + */ + schemeId: HttpAuthSchemeId; + /** + * Gets the IdentityProvider corresponding to an HttpAuthScheme. + */ + identityProvider(config: IdentityProviderConfig): IdentityProvider | undefined; + /** + * HttpSigner corresponding to an HttpAuthScheme. + */ + signer: HttpSigner; +} +/** + * Interface that defines the identity and signing properties when selecting + * an HttpAuthScheme. + * @internal + */ +export interface HttpAuthOption { + schemeId: HttpAuthSchemeId; + identityProperties?: Record; + signingProperties?: Record; + propertiesExtractor?: (config: TConfig, context: TContext) => { + identityProperties?: Record; + signingProperties?: Record; + }; +} +/** + * @internal + */ +export interface SelectedHttpAuthScheme { + httpAuthOption: HttpAuthOption; + identity: Identity; + signer: HttpSigner; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..710dc8f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpAuthSchemeProvider.d.ts @@ -0,0 +1,20 @@ +import { HandlerExecutionContext } from "../middleware"; +import { HttpAuthOption } from "./HttpAuthScheme"; +/** + * @internal + */ +export interface HttpAuthSchemeParameters { + operation?: string; +} +/** + * @internal + */ +export interface HttpAuthSchemeProvider { + (authParameters: TParameters): HttpAuthOption[]; +} +/** + * @internal + */ +export interface HttpAuthSchemeParametersProvider { + (config: TConfig, context: TContext, input: TInput): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts new file mode 100644 index 0000000..ea2969c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/HttpSigner.d.ts @@ -0,0 +1,41 @@ +import { HttpRequest, HttpResponse } from "../http"; +import { Identity } from "../identity/identity"; +/** + * @internal + */ +export interface ErrorHandler { + (signingProperties: Record): (error: E) => never; +} +/** + * @internal + */ +export interface SuccessHandler { + (httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * Interface to sign identity and signing properties. + * @internal + */ +export interface HttpSigner { + /** + * Signs an HttpRequest with an identity and signing properties. + * @param httpRequest request to sign + * @param identity identity to sing the request with + * @param signingProperties property bag for signing + * @returns signed request in a promise + */ + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware throws an error. + * The error handler is expected to throw the error it receives, so the return type of the error handler is `never`. + * @internal + */ + errorHandler?: ErrorHandler; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware succeeds. + * @internal + */ + successHandler?: SuccessHandler; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts new file mode 100644 index 0000000..663d2ec --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/IdentityProviderConfig.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HttpAuthSchemeId } from "./HttpAuthScheme"; +/** + * Interface to get an IdentityProvider for a specified HttpAuthScheme + * @internal + */ +export interface IdentityProviderConfig { + /** + * Get the IdentityProvider for a specified HttpAuthScheme. + * @param schemeId schemeId of the HttpAuthScheme + * @returns IdentityProvider or undefined if HttpAuthScheme is not found + */ + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/auth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/auth.d.ts new file mode 100644 index 0000000..2aaabbc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/auth.d.ts @@ -0,0 +1,57 @@ +/** + * @internal + * + * Authentication schemes represent a way that the service will authenticate the customer’s identity. + */ +export interface AuthScheme { + /** + * @example "sigv4a" or "sigv4" + */ + name: "sigv4" | "sigv4a" | string; + /** + * @example "s3" + */ + signingName: string; + /** + * @example "us-east-1" + */ + signingRegion: string; + /** + * @example ["*"] + * @example ["us-west-2", "us-east-1"] + */ + signingRegionSet?: string[]; + /** + * @deprecated this field was renamed to signingRegion. + */ + signingScope?: never; + properties: Record; +} +/** + * @internal + * @deprecated + */ +export interface HttpAuthDefinition { + /** + * Defines the location of where the Auth is serialized. + */ + in: HttpAuthLocation; + /** + * Defines the name of the HTTP header or query string parameter + * that contains the Auth. + */ + name: string; + /** + * Defines the security scheme to use on the `Authorization` header value. + * This can only be set if the "in" property is set to {@link HttpAuthLocation.HEADER}. + */ + scheme?: string; +} +/** + * @internal + * @deprecated + */ +export declare enum HttpAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/index.d.ts new file mode 100644 index 0000000..7436030 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/auth/index.d.ts @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts new file mode 100644 index 0000000..e468bae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/blob/blob-payload-input-types.d.ts @@ -0,0 +1,43 @@ +/// +/// +/// +import { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * A union of types that can be used as inputs for the service model + * "blob" type when it represents the request's entire payload or body. + * + * For example, in Lambda::invoke, the payload is modeled as a blob type + * and this union applies to it. + * In contrast, in Lambda::createFunction the Zip file option is a blob type, + * but is not the (entire) payload and this union does not apply. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may work in some cases, + * but the expected types are primarily string and Uint8Array. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type BlobPayloadInputTypes = string | ArrayBuffer | ArrayBufferView | Uint8Array | NodeJsRuntimeBlobTypes | BrowserRuntimeBlobTypes; +/** + * @public + * + * Additional blob types for the Node.js environment. + */ +export type NodeJsRuntimeBlobTypes = Readable | Buffer; +/** + * @public + * + * Additional blob types for the browser environment. + */ +export type BrowserRuntimeBlobTypes = BlobOptionalType | ReadableStreamOptionalType; +/** + * @internal + * @deprecated renamed to BlobPayloadInputTypes. + */ +export type BlobTypes = BlobPayloadInputTypes; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/checksum.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/checksum.d.ts new file mode 100644 index 0000000..1906009 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/checksum.d.ts @@ -0,0 +1,63 @@ +import { SourceData } from "./crypto"; +/** + * @public + * + * An object that provides a checksum of data provided in chunks to `update`. + * The checksum may be performed incrementally as chunks are received or all + * at once when the checksum is finalized, depending on the underlying + * implementation. + * + * It's recommended to compute checksum incrementally to avoid reading the + * entire payload in memory. + * + * A class that implements this interface may accept an optional secret key in its + * constructor while computing checksum value, when using HMAC. If provided, + * this secret key would be used when computing checksum. + */ +export interface Checksum { + /** + * Constant length of the digest created by the algorithm in bytes. + */ + digestLength?: number; + /** + * Creates a new checksum object that contains a deep copy of the internal + * state of the current `Checksum` object. + */ + copy?(): Checksum; + /** + * Returns the digest of all of the data passed. + */ + digest(): Promise; + /** + * Allows marking a checksum for checksums that support the ability + * to mark and reset. + * + * @param readLimit - The maximum limit of bytes that can be read + * before the mark position becomes invalid. + */ + mark?(readLimit: number): void; + /** + * Resets the checksum to its initial value. + */ + reset(): void; + /** + * Adds a chunk of data for which checksum needs to be computed. + * This can be called many times with new data as it is streamed. + * + * Implementations may override this method which passes second param + * which makes Checksum object stateless. + * + * @param chunk - The buffer to update checksum with. + */ + update(chunk: Uint8Array): void; +} +/** + * @public + * + * A constructor for a Checksum that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + */ +export interface ChecksumConstructor { + new (secret?: SourceData): Checksum; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/client.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/client.d.ts new file mode 100644 index 0000000..8bd8f7e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/client.d.ts @@ -0,0 +1,57 @@ +import { Command } from "./command"; +import { MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +import { OptionalParameter } from "./util"; +/** + * @public + * + * A type which checks if the client configuration is optional. + * If all entries of the client configuration are optional, it allows client creation without passing any config. + */ +export type CheckOptionalClientConfig = OptionalParameter; +/** + * @public + * + * function definition for different overrides of client's 'send' function. + */ +export interface InvokeFunction { + (command: Command, options?: any): Promise; + (command: Command, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options: any, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods. + */ +export interface InvokeMethod { + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods when argument is optional. + */ +export interface InvokeMethodOptionalArgs { + (): Promise; + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * A general interface for service clients, idempotent to browser or node clients + * This type corresponds to SmithyClient(https://github.com/aws/aws-sdk-js-v3/blob/main/packages/smithy-client/src/client.ts). + * It's provided for using without importing the SmithyClient class. + * @internal + */ +export interface Client { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + send: InvokeFunction; + destroy: () => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/command.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/command.d.ts new file mode 100644 index 0000000..3a71ee7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/command.d.ts @@ -0,0 +1,23 @@ +import { Handler, MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + */ +export interface Command extends CommandIO { + readonly input: InputType; + readonly middlewareStack: MiddlewareStack; + resolveMiddleware(stack: MiddlewareStack, configuration: ResolvedConfiguration, options: any): Handler; +} +/** + * @internal + * + * This is a subset of the Command type used only to detect the i/o types. + */ +export interface CommandIO { + readonly input: InputType; + resolveMiddleware(stack: any, configuration: any, options: any): Handler; +} +/** + * @internal + */ +export type GetOutputType = Command extends CommandIO ? O : never; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/config.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/config.d.ts new file mode 100644 index 0000000..f9d4632 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/config.d.ts @@ -0,0 +1,10 @@ +/** + * @public + */ +export interface ConnectConfiguration { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/index.d.ts new file mode 100644 index 0000000..c6c3ea8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/index.d.ts @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/manager.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/manager.d.ts new file mode 100644 index 0000000..5b1a837 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/manager.d.ts @@ -0,0 +1,34 @@ +import { RequestContext } from "../transfer"; +import { ConnectConfiguration } from "./config"; +/** + * @public + */ +export interface ConnectionManagerConfiguration { + /** + * Maximum number of allowed concurrent requests per connection. + */ + maxConcurrency?: number; + /** + * Disables concurrent requests per connection. + */ + disableConcurrency?: boolean; +} +/** + * @public + */ +export interface ConnectionManager { + /** + * Retrieves a connection from the connection pool if available, + * otherwise establish a new connection + */ + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): T; + /** + * Releases the connection back to the pool making it potentially + * re-usable by other requests. + */ + release(requestContext: RequestContext, connection: T): void; + /** + * Destroys the connection manager. All connections will be closed. + */ + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/pool.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/pool.d.ts new file mode 100644 index 0000000..d43530a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/connection/pool.d.ts @@ -0,0 +1,32 @@ +/** + * @public + */ +export interface ConnectionPool { + /** + * Retrieve the first connection in the pool + */ + poll(): T | void; + /** + * Release the connection back to the pool making it potentially + * re-usable by other requests. + */ + offerLast(connection: T): void; + /** + * Removes the connection from the pool, and destroys it. + */ + destroy(connection: T): void; + /** + * Implements the iterable protocol and allows arrays to be consumed + * by most syntaxes expecting iterables, such as the spread syntax + * and for...of loops + */ + [Symbol.iterator](): Iterator; +} +/** + * Unused. + * @internal + * @deprecated + */ +export interface CacheKey { + destination: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/crypto.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/crypto.d.ts new file mode 100644 index 0000000..874320e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/crypto.d.ts @@ -0,0 +1,60 @@ +/** + * @public + */ +export type SourceData = string | ArrayBuffer | ArrayBufferView; +/** + * @public + * + * An object that provides a hash of data provided in chunks to `update`. The + * hash may be performed incrementally as chunks are received or all at once + * when the hash is finalized, depending on the underlying implementation. + * + * @deprecated use {@link Checksum} + */ +export interface Hash { + /** + * Adds a chunk of data to the hash. If a buffer is provided, the `encoding` + * argument will be ignored. If a string is provided without a specified + * encoding, implementations must assume UTF-8 encoding. + * + * Not all encodings are supported on all platforms, though all must support + * UTF-8. + */ + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + /** + * Finalizes the hash and provides a promise that will be fulfilled with the + * raw bytes of the calculated hash. + */ + digest(): Promise; +} +/** + * @public + * + * A constructor for a hash that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + * + * @deprecated use {@link ChecksumConstructor} + */ +export interface HashConstructor { + new (secret?: SourceData): Hash; +} +/** + * @public + * + * A function that calculates the hash of a data stream. Determining the hash + * will consume the stream, so only replayable streams should be provided to an + * implementation of this interface. + */ +export interface StreamHasher { + (hashCtor: HashConstructor, stream: StreamType): Promise; +} +/** + * @public + * + * A function that returns a promise fulfilled with bytes from a + * cryptographically secure pseudorandom number generator. + */ +export interface randomValues { + (byteLength: number): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..312ae6e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/downlevel-ts3.4/transform/type-transform.d.ts @@ -0,0 +1,25 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [T] extends [FromType] ? ([FromType] extends [T] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [T[key]] extends [FromType] ? [FromType] extends [T[key]] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/encode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/encode.d.ts new file mode 100644 index 0000000..27d3a18 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/encode.d.ts @@ -0,0 +1,31 @@ +import { Message } from "./eventStream"; +/** + * @public + */ +export interface MessageEncoder { + encode(message: Message): Uint8Array; +} +/** + * @public + */ +export interface MessageDecoder { + decode(message: ArrayBufferView): Message; + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; +} +/** + * @public + */ +export interface AvailableMessage { + getMessage(): Message | undefined; + isEndOfStream(): boolean; +} +/** + * @public + */ +export interface AvailableMessages { + getMessages(): Message[]; + isEndOfStream(): boolean; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoint.d.ts new file mode 100644 index 0000000..4e93733 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoint.d.ts @@ -0,0 +1,77 @@ +import { AuthScheme } from "./auth/auth"; +/** + * @public + */ +export interface EndpointPartition { + name: string; + dnsSuffix: string; + dualStackDnsSuffix: string; + supportsFIPS: boolean; + supportsDualStack: boolean; +} +/** + * @public + */ +export interface EndpointARN { + partition: string; + service: string; + region: string; + accountId: string; + resourceId: Array; +} +/** + * @public + */ +export declare enum EndpointURLScheme { + HTTP = "http", + HTTPS = "https" +} +/** + * @public + */ +export interface EndpointURL { + /** + * The URL scheme such as http or https. + */ + scheme: EndpointURLScheme; + /** + * The authority is the host and optional port component of the URL. + */ + authority: string; + /** + * The parsed path segment of the URL. + * This value is as-is as provided by the user. + */ + path: string; + /** + * The parsed path segment of the URL. + * This value is guranteed to start and end with a "/". + */ + normalizedPath: string; + /** + * A boolean indicating whether the authority is an IP address. + */ + isIp: boolean; +} +/** + * @public + */ +export type EndpointObjectProperty = string | boolean | { + [key: string]: EndpointObjectProperty; +} | EndpointObjectProperty[]; +/** + * @public + */ +export interface EndpointV2 { + url: URL; + properties?: { + authSchemes?: AuthScheme[]; + } & Record; + headers?: Record; +} +/** + * @public + */ +export type EndpointParameters = { + [name: string]: undefined | boolean | string | string[]; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts new file mode 100644 index 0000000..349558e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/EndpointRuleObject.d.ts @@ -0,0 +1,27 @@ +import { EndpointObjectProperty } from "../endpoint"; +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type EndpointObjectProperties = Record; +/** + * @public + */ +export type EndpointObjectHeaders = Record; +/** + * @public + */ +export type EndpointObject = { + url: Expression; + properties?: EndpointObjectProperties; + headers?: EndpointObjectHeaders; +}; +/** + * @public + */ +export type EndpointRuleObject = { + type: "endpoint"; + conditions?: ConditionObject[]; + endpoint: EndpointObject; + documentation?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts new file mode 100644 index 0000000..9ce0733 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/ErrorRuleObject.d.ts @@ -0,0 +1,10 @@ +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type ErrorRuleObject = { + type: "error"; + conditions?: ConditionObject[]; + error: Expression; + documentation?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts new file mode 100644 index 0000000..669b591 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/RuleSetObject.d.ts @@ -0,0 +1,28 @@ +import { RuleSetRules } from "./TreeRuleObject"; +/** + * @public + */ +export type DeprecatedObject = { + message?: string; + since?: string; +}; +/** + * @public + */ +export type ParameterObject = { + type: "String" | "string" | "Boolean" | "boolean"; + default?: string | boolean; + required?: boolean; + documentation?: string; + builtIn?: string; + deprecated?: DeprecatedObject; +}; +/** + * @public + */ +export type RuleSetObject = { + version: string; + serviceId?: string; + parameters: Record; + rules: RuleSetRules; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts new file mode 100644 index 0000000..180d306 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/TreeRuleObject.d.ts @@ -0,0 +1,16 @@ +import { EndpointRuleObject } from "./EndpointRuleObject"; +import { ErrorRuleObject } from "./ErrorRuleObject"; +import { ConditionObject } from "./shared"; +/** + * @public + */ +export type RuleSetRules = Array; +/** + * @public + */ +export type TreeRuleObject = { + type: "tree"; + conditions?: ConditionObject[]; + rules: RuleSetRules; + documentation?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/index.d.ts new file mode 100644 index 0000000..64d85cf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/index.d.ts @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts new file mode 100644 index 0000000..bd11393 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/endpoints/shared.d.ts @@ -0,0 +1,55 @@ +import { Logger } from "../logger"; +/** + * @public + */ +export type ReferenceObject = { + ref: string; +}; +/** + * @public + */ +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +/** + * @public + */ +export type FunctionArgv = Array; +/** + * @public + */ +export type FunctionReturn = string | boolean | number | { + [key: string]: FunctionReturn; +}; +/** + * @public + */ +export type ConditionObject = FunctionObject & { + assign?: string; +}; +/** + * @public + */ +export type Expression = string | ReferenceObject | FunctionObject; +/** + * @public + */ +export type EndpointParams = Record; +/** + * @public + */ +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +/** + * @public + */ +export type ReferenceRecord = Record; +/** + * @public + */ +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/eventStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/eventStream.d.ts new file mode 100644 index 0000000..7b9af6c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/eventStream.d.ts @@ -0,0 +1,137 @@ +import { HttpRequest } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, HandlerExecutionContext } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +/** + * @public + */ +export type MessageHeaders = Record; +/** + * @public + */ +export type HeaderValue = { + type: K; + value: V; +}; +/** + * @public + */ +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +/** + * @public + */ +export type ByteHeaderValue = HeaderValue<"byte", number>; +/** + * @public + */ +export type ShortHeaderValue = HeaderValue<"short", number>; +/** + * @public + */ +export type IntegerHeaderValue = HeaderValue<"integer", number>; +/** + * @public + */ +export type LongHeaderValue = HeaderValue<"long", Int64>; +/** + * @public + */ +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +/** + * @public + */ +export type StringHeaderValue = HeaderValue<"string", string>; +/** + * @public + */ +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +/** + * @public + */ +export type UuidHeaderValue = HeaderValue<"uuid", string>; +/** + * @public + */ +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +/** + * @public + */ +export interface Int64 { + readonly bytes: Uint8Array; + valueOf: () => number; + toString: () => string; +} +/** + * @public + * + * Util functions for serializing or deserializing event stream + */ +export interface EventStreamSerdeContext { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @public + * + * A function which deserializes binary event stream message into modeled shape. + */ +export interface EventStreamMarshallerDeserFn { + (body: StreamType, deserializer: (input: Record) => Promise): AsyncIterable; +} +/** + * @public + * + * A function that serializes modeled shape into binary stream message. + */ +export interface EventStreamMarshallerSerFn { + (input: AsyncIterable, serializer: (event: T) => Message): StreamType; +} +/** + * @public + * + * An interface which provides functions for serializing and deserializing binary event stream + * to/from corresponsing modeled shape. + */ +export interface EventStreamMarshaller { + deserialize: EventStreamMarshallerDeserFn; + serialize: EventStreamMarshallerSerFn; +} +/** + * @public + */ +export interface EventStreamRequestSigner { + sign(request: HttpRequest): Promise; +} +/** + * @public + */ +export interface EventStreamPayloadHandler { + handle: (next: FinalizeHandler, args: FinalizeHandlerArguments, context?: HandlerExecutionContext) => Promise>; +} +/** + * @public + */ +export interface EventStreamPayloadHandlerProvider { + (options: any): EventStreamPayloadHandler; +} +/** + * @public + */ +export interface EventStreamSerdeProvider { + (options: any): EventStreamMarshaller; +} +/** + * @public + */ +export interface EventStreamSignerProvider { + (options: any): EventStreamRequestSigner; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts new file mode 100644 index 0000000..88995b9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/checksum.d.ts @@ -0,0 +1,58 @@ +import { ChecksumConstructor } from "../checksum"; +import { HashConstructor } from "../crypto"; +/** + * @internal + */ +export declare enum AlgorithmId { + MD5 = "md5", + CRC32 = "crc32", + CRC32C = "crc32c", + SHA1 = "sha1", + SHA256 = "sha256" +} +/** + * @internal + */ +export interface ChecksumAlgorithm { + algorithmId(): AlgorithmId; + checksumConstructor(): ChecksumConstructor | HashConstructor; +} +/** + * @deprecated unused. + * @internal + */ +type ChecksumConfigurationLegacy = { + [other in string | number]: any; +}; +/** + * @internal + */ +export interface ChecksumConfiguration extends ChecksumConfigurationLegacy { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +} +/** + * @deprecated will be removed for implicit type. + * @internal + */ +type GetChecksumConfigurationType = (runtimeConfig: Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; +}>) => ChecksumConfiguration; +/** + * @internal + * @deprecated will be moved to smithy-client. + */ +export declare const getChecksumConfiguration: GetChecksumConfigurationType; +/** + * @internal + * @deprecated will be removed for implicit type. + */ +type ResolveChecksumRuntimeConfigType = (clientConfig: ChecksumConfiguration) => any; +/** + * @internal + * + * @deprecated will be moved to smithy-client. + */ +export declare const resolveChecksumRuntimeConfig: ResolveChecksumRuntimeConfigType; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts new file mode 100644 index 0000000..12eb924 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/defaultClientConfiguration.d.ts @@ -0,0 +1,33 @@ +import { ChecksumConfiguration } from "./checksum"; +/** + * @deprecated will be replaced by DefaultExtensionConfiguration. + * @internal + * + * Default client configuration consisting various configurations for modifying a service client + */ +export interface DefaultClientConfiguration extends ChecksumConfiguration { +} +/** + * @deprecated will be removed for implicit type. + */ +type GetDefaultConfigurationType = (runtimeConfig: any) => DefaultClientConfiguration; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve default client configuration from runtime config + * + */ +export declare const getDefaultClientConfiguration: GetDefaultConfigurationType; +/** + * @deprecated will be removed for implicit type. + */ +type ResolveDefaultRuntimeConfigType = (clientConfig: DefaultClientConfiguration) => any; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve runtime config from default client configuration + */ +export declare const resolveDefaultRuntimeConfig: ResolveDefaultRuntimeConfigType; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..0e6fa0d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConfiguration } from "./checksum"; +import { RetryStrategyConfiguration } from "./retry"; +/** + * @internal + * + * Default extension configuration consisting various configurations for modifying a service client + */ +export interface DefaultExtensionConfiguration extends ChecksumConfiguration, RetryStrategyConfiguration { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/index.d.ts new file mode 100644 index 0000000..cce65a1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/index.d.ts @@ -0,0 +1,4 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration } from "./checksum"; +export { RetryStrategyConfiguration } from "./retry"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/retry.d.ts new file mode 100644 index 0000000..8b91f1c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { RetryStrategyV2 } from "../retry"; +import { Provider, RetryStrategy } from "../util"; +/** + * A configuration interface with methods called by runtime extension + * @internal + */ +export interface RetryStrategyConfiguration { + /** + * Set retry strategy used for all http requests + * @param retryStrategy + */ + setRetryStrategy(retryStrategy: Provider): void; + /** + * Get retry strategy used for all http requests + * @param retryStrategy + */ + retryStrategy(): Provider; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts new file mode 100644 index 0000000..0de7f8f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/externals-check/browser-externals-check.d.ts @@ -0,0 +1,35 @@ +import type { Exact } from "../transform/exact"; +/** + * @public + * + * A checked type that resolves to Blob if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing Blob + * excessively. + */ +export type BlobOptionalType = BlobDefined extends true ? Blob : Unavailable; +/** + * @public + * + * A checked type that resolves to ReadableStream if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing ReadableStream + * excessively. + */ +export type ReadableStreamOptionalType = ReadableStreamDefined extends true ? ReadableStream : Unavailable; +/** + * @public + * + * Indicates a type is unavailable if it resolves to this. + */ +export type Unavailable = never; +/** + * @internal + * + * Whether the global types define more than a stub for ReadableStream. + */ +export type ReadableStreamDefined = Exact extends true ? false : true; +/** + * @internal + * + * Whether the global types define more than a stub for Blob. + */ +export type BlobDefined = Exact extends true ? false : true; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/feature-ids.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/feature-ids.d.ts new file mode 100644 index 0000000..19e4bd2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/feature-ids.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export type SmithyFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + SIGV4A_SIGNING: "S"; + CREDENTIALS_CODE: "e"; +}>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/http.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/http.d.ts new file mode 100644 index 0000000..76c6cb2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/http.d.ts @@ -0,0 +1,112 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +import { URI } from "./uri"; +/** + * @public + * + * @deprecated use {@link EndpointV2} from `@smithy/types`. + */ +export interface Endpoint { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; +} +/** + * @public + * + * Interface an HTTP request class. Contains + * addressing information in addition to standard message properties. + */ +export interface HttpRequest extends HttpMessage, URI { + method: string; +} +/** + * @public + * + * Represents an HTTP message as received in reply to a request. Contains a + * numeric status code in addition to standard message properties. + */ +export interface HttpResponse extends HttpMessage { + statusCode: number; + reason?: string; +} +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. body: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * A mapping of query parameter names to strings or arrays of strings, with the + * second being used when a parameter contains a list of values. Value can be set + * to null when query is not in key-value pairs shape + */ +export type QueryParameterBag = Record | null>; +/** + * @public + */ +export type FieldOptions = { + name: string; + kind?: FieldPosition; + values?: string[]; +}; +/** + * @public + */ +export declare enum FieldPosition { + HEADER = 0, + TRAILER = 1 +} +/** + * @public + * + * A mapping of header names to string values. Multiple values for the same + * header should be represented as a single string with values separated by + * `, `. + * + * Keys should be considered case insensitive, even if this is not enforced by a + * particular implementation. For example, given the following HeaderBag, where + * keys differ only in case: + * + * ```json + * { + * 'x-request-date': '2000-01-01T00:00:00Z', + * 'X-Request-Date': '2001-01-01T00:00:00Z' + * } + * ``` + * + * The SDK may at any point during processing remove one of the object + * properties in favor of the other. The headers may or may not be combined, and + * the SDK will not deterministically select which header candidate to use. + */ +export type HeaderBag = Record; +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. bode: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * Represents the options that may be passed to an Http Handler. + */ +export interface HttpHandlerOptions { + abortSignal?: AbortSignal | DeprecatedAbortSignal; + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts new file mode 100644 index 0000000..bca0851 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/http/httpHandlerInitialization.d.ts @@ -0,0 +1,123 @@ +/// +/// +import type { Agent as hAgent, AgentOptions as hAgentOptions } from "http"; +import type { Agent as hsAgent, AgentOptions as hsAgentOptions } from "https"; +import { HttpRequest as IHttpRequest } from "../http"; +import { Logger } from "../logger"; +/** + * + * This type represents an alternate client constructor option for the entry + * "requestHandler". Instead of providing an instance of a requestHandler, the user + * may provide the requestHandler's constructor options for either the + * NodeHttpHandler or FetchHttpHandler. + * + * For other RequestHandlers like HTTP2 or WebSocket, + * constructor parameter passthrough is not currently available. + * + * @public + */ +export type RequestHandlerParams = NodeHttpHandlerOptions | FetchHttpHandlerOptions; +/** + * Represents the http options that can be passed to a node http client. + * @public + */ +export interface NodeHttpHandlerOptions { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + * + * Defaults to 0, which disables the timeout. + */ + connectionTimeout?: number; + /** + * The number of milliseconds a request can take before automatically being terminated. + * Defaults to 0, which disables the timeout. + */ + requestTimeout?: number; + /** + * Delay before the NodeHttpHandler checks for socket exhaustion, + * and emits a warning if the active sockets and enqueued request count is greater than + * 2x the maxSockets count. + * + * Defaults to connectionTimeout + requestTimeout or 3000ms if those are not set. + */ + socketAcquisitionWarningTimeout?: number; + /** + * This field is deprecated, and requestTimeout should be used instead. + * The maximum time in milliseconds that a socket may remain idle before it + * is closed. + * + * @deprecated Use {@link requestTimeout} + */ + socketTimeout?: number; + /** + * You can pass http.Agent or its constructor options. + */ + httpAgent?: hAgent | hAgentOptions; + /** + * You can pass https.Agent or its constructor options. + */ + httpsAgent?: hsAgent | hsAgentOptions; + /** + * Optional logger. + */ + logger?: Logger; +} +/** + * Represents the http options that can be passed to a browser http client. + * @public + */ +export interface FetchHttpHandlerOptions { + /** + * The number of milliseconds a request can take before being automatically + * terminated. + */ + requestTimeout?: number; + /** + * Whether to allow the request to outlive the page. Default value is false. + * + * There may be limitations to the payload size, number of concurrent requests, + * request duration etc. when using keepalive in browsers. + * + * These may change over time, so look for up to date information about + * these limitations before enabling keepalive. + */ + keepAlive?: boolean; + /** + * A string indicating whether credentials will be sent with the request always, never, or + * only when sent to a same-origin URL. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials + */ + credentials?: "include" | "omit" | "same-origin" | undefined | string; + /** + * Cache settings for fetch. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/cache + */ + cache?: "default" | "force-cache" | "no-cache" | "no-store" | "only-if-cached" | "reload"; + /** + * An optional function that produces additional RequestInit + * parameters for each httpRequest. + * + * This is applied last via merging with Object.assign() and overwrites other values + * set from other sources. + * + * @example + * ```js + * new Client({ + * requestHandler: { + * requestInit(httpRequest) { + * return { cache: "no-store" }; + * } + * } + * }); + * ``` + */ + requestInit?: (httpRequest: IHttpRequest) => RequestInit; +} +declare global { + /** + * interface merging stub. + */ + interface RequestInit { + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts new file mode 100644 index 0000000..27750d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/apiKeyIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @public + */ +export interface ApiKeyIdentity extends Identity { + /** + * The literal API Key + */ + readonly apiKey: string; +} +/** + * @public + */ +export type ApiKeyIdentityProvider = IdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts new file mode 100644 index 0000000..7aa5a4b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/awsCredentialIdentity.d.ts @@ -0,0 +1,31 @@ +import { Identity, IdentityProvider } from "./identity"; +/** + * @public + */ +export interface AwsCredentialIdentity extends Identity { + /** + * AWS access key ID + */ + readonly accessKeyId: string; + /** + * AWS secret access key + */ + readonly secretAccessKey: string; + /** + * A security or session token to use with these credentials. Usually + * present for temporary credentials. + */ + readonly sessionToken?: string; + /** + * AWS credential scope for this set of credentials. + */ + readonly credentialScope?: string; + /** + * AWS accountId. + */ + readonly accountId?: string; +} +/** + * @public + */ +export type AwsCredentialIdentityProvider = IdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/identity.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/identity.d.ts new file mode 100644 index 0000000..c6fd0d1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/identity.d.ts @@ -0,0 +1,15 @@ +/** + * @public + */ +export interface Identity { + /** + * A `Date` when the identity or credential will no longer be accepted. + */ + readonly expiration?: Date; +} +/** + * @public + */ +export interface IdentityProvider { + (identityProperties?: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/index.d.ts new file mode 100644 index 0000000..3360320 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/index.d.ts @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts new file mode 100644 index 0000000..84a74ff --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/identity/tokenIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @internal + */ +export interface TokenIdentity extends Identity { + /** + * The literal token string + */ + readonly token: string; +} +/** + * @internal + */ +export type TokenIdentityProvider = IdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/index.d.ts new file mode 100644 index 0000000..c370335 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/index.d.ts @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/logger.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/logger.d.ts new file mode 100644 index 0000000..f66a664 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/logger.d.ts @@ -0,0 +1,13 @@ +/** + * @public + * + * Represents a logger object that is available in HandlerExecutionContext + * throughout the middleware stack. + */ +export interface Logger { + trace?: (...content: any[]) => void; + debug: (...content: any[]) => void; + info: (...content: any[]) => void; + warn: (...content: any[]) => void; + error: (...content: any[]) => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/middleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/middleware.d.ts new file mode 100644 index 0000000..cc20098 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/middleware.d.ts @@ -0,0 +1,534 @@ +import type { AuthScheme, HttpAuthDefinition } from "./auth/auth"; +import type { SelectedHttpAuthScheme } from "./auth/HttpAuthScheme"; +import type { Command } from "./command"; +import type { EndpointV2 } from "./endpoint"; +import type { SmithyFeatures } from "./feature-ids"; +import type { Logger } from "./logger"; +import type { UserAgent } from "./util"; +/** + * @public + */ +export interface InitializeHandlerArguments { + /** + * User input to a command. Reflects the userland representation of the + * union of data types the command can effectively handle. + */ + input: Input; +} +/** + * @public + */ +export interface InitializeHandlerOutput extends DeserializeHandlerOutput { + output: Output; +} +/** + * @public + */ +export interface SerializeHandlerArguments extends InitializeHandlerArguments { + /** + * The user input serialized as a request object. The request object is unknown, + * so you cannot modify it directly. When work with request, you need to guard its + * type to e.g. HttpRequest with 'instanceof' operand + * + * During the build phase of the execution of a middleware stack, a built + * request may or may not be available. + */ + request?: unknown; +} +/** + * @public + */ +export interface SerializeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface BuildHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface BuildHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface FinalizeHandlerArguments extends SerializeHandlerArguments { + /** + * The user input serialized as a request. + */ + request: unknown; +} +/** + * @public + */ +export interface FinalizeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface DeserializeHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface DeserializeHandlerOutput { + /** + * The raw response object from runtime is deserialized to structured output object. + * The response object is unknown so you cannot modify it directly. When work with + * response, you need to guard its type to e.g. HttpResponse with 'instanceof' operand. + * + * During the deserialize phase of the execution of a middleware stack, a deserialized + * response may or may not be available + */ + response: unknown; + output?: Output; +} +/** + * @public + */ +export interface InitializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: InitializeHandlerArguments): Promise>; +} +/** + * @public + */ +export type Handler = InitializeHandler; +/** + * @public + */ +export interface SerializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: SerializeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface FinalizeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: FinalizeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface BuildHandler { + (args: BuildHandlerArguments): Promise>; +} +/** + * @public + */ +export interface DeserializeHandler { + (args: DeserializeHandlerArguments): Promise>; +} +/** + * @public + * + * A factory function that creates functions implementing the `Handler` + * interface. + */ +export interface InitializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: InitializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `BuildHandler` + * interface. + */ +export interface SerializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: SerializeHandler, context: HandlerExecutionContext): SerializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `FinalizeHandler` + * interface. + */ +export interface FinalizeRequestMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: FinalizeHandler, context: HandlerExecutionContext): FinalizeHandler; +} +/** + * @public + */ +export interface BuildMiddleware { + (next: BuildHandler, context: HandlerExecutionContext): BuildHandler; +} +/** + * @public + */ +export interface DeserializeMiddleware { + (next: DeserializeHandler, context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type MiddlewareType = InitializeMiddleware | SerializeMiddleware | BuildMiddleware | FinalizeRequestMiddleware | DeserializeMiddleware; +/** + * @public + * + * A factory function that creates the terminal handler atop which a middleware + * stack sits. + */ +export interface Terminalware { + (context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type Step = "initialize" | "serialize" | "build" | "finalizeRequest" | "deserialize"; +/** + * @public + */ +export type Priority = "high" | "normal" | "low"; +/** + * @public + */ +export interface HandlerOptions { + /** + * Handlers are ordered using a "step" that describes the stage of command + * execution at which the handler will be executed. The available steps are: + * + * - initialize: The input is being prepared. Examples of typical + * initialization tasks include injecting default options computing + * derived parameters. + * - serialize: The input is complete and ready to be serialized. Examples + * of typical serialization tasks include input validation and building + * an HTTP request from user input. + * - build: The input has been serialized into an HTTP request, but that + * request may require further modification. Any request alterations + * will be applied to all retries. Examples of typical build tasks + * include injecting HTTP headers that describe a stable aspect of the + * request, such as `Content-Length` or a body checksum. + * - finalizeRequest: The request is being prepared to be sent over the wire. The + * request in this stage should already be semantically complete and + * should therefore only be altered as match the recipient's + * expectations. Examples of typical finalization tasks include request + * signing and injecting hop-by-hop headers. + * - deserialize: The response has arrived, the middleware here will deserialize + * the raw response object to structured response + * + * Unlike initialization and build handlers, which are executed once + * per operation execution, finalization and deserialize handlers will be + * executed foreach HTTP request sent. + * + * @defaultValue 'initialize' + */ + step?: Step; + /** + * A list of strings to any that identify the general purpose or important + * characteristics of a given handler. + */ + tags?: Array; + /** + * A unique name to refer to a middleware + */ + name?: string; + /** + * @internal + * Aliases allows for middleware to be found by multiple names besides {@link HandlerOptions.name}. + * This allows for references to replaced middleware to continue working, e.g. replacing + * multiple auth-specific middleware with a single generic auth middleware. + */ + aliases?: Array; + /** + * A flag to override the existing middleware with the same name. Without + * setting it, adding middleware with duplicated name will throw an exception. + * @internal + */ + override?: boolean; +} +/** + * @public + */ +export interface AbsoluteLocation { + /** + * By default middleware will be added to individual step in un-guaranteed order. + * In the case that + * + * @defaultValue 'normal' + */ + priority?: Priority; +} +/** + * @public + */ +export type Relation = "before" | "after"; +/** + * @public + */ +export interface RelativeLocation { + /** + * Specify the relation to be before or after a know middleware. + */ + relation: Relation; + /** + * A known middleware name to indicate inserting middleware's location. + */ + toMiddleware: string; +} +/** + * @public + */ +export type RelativeMiddlewareOptions = RelativeLocation & Omit; +/** + * @public + */ +export interface InitializeHandlerOptions extends HandlerOptions { + step?: "initialize"; +} +/** + * @public + */ +export interface SerializeHandlerOptions extends HandlerOptions { + step: "serialize"; +} +/** + * @public + */ +export interface BuildHandlerOptions extends HandlerOptions { + step: "build"; +} +/** + * @public + */ +export interface FinalizeRequestHandlerOptions extends HandlerOptions { + step: "finalizeRequest"; +} +/** + * @public + */ +export interface DeserializeHandlerOptions extends HandlerOptions { + step: "deserialize"; +} +/** + * @public + * + * A stack storing middleware. It can be resolved into a handler. It supports 2 + * approaches for adding middleware: + * 1. Adding middleware to specific step with `add()`. The order of middleware + * added into same step is determined by order of adding them. If one middleware + * needs to be executed at the front of the step or at the end of step, set + * `priority` options to `high` or `low`. + * 2. Adding middleware to location relative to known middleware with `addRelativeTo()`. + * This is useful when given middleware must be executed before or after specific + * middleware(`toMiddleware`). You can add a middleware relatively to another + * middleware which also added relatively. But eventually, this relative middleware + * chain **must** be 'anchored' by a middleware that added using `add()` API + * with absolute `step` and `priority`. This mothod will throw if specified + * `toMiddleware` is not found. + */ +export interface MiddlewareStack extends Pluggable { + /** + * Add middleware to the stack to be executed during the "initialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: InitializeMiddleware, options?: InitializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "serialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: SerializeMiddleware, options: SerializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "build" step, + * optionally specifying a priority, tags and name + */ + add(middleware: BuildMiddleware, options: BuildHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "finalizeRequest" step, + * optionally specifying a priority, tags and name + */ + add(middleware: FinalizeRequestMiddleware, options: FinalizeRequestHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "deserialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: DeserializeMiddleware, options: DeserializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to a stack position before or after a known middleware,optionally + * specifying name and tags. + */ + addRelativeTo(middleware: MiddlewareType, options: RelativeMiddlewareOptions): void; + /** + * Apply a customization function to mutate the middleware stack, often + * used for customizations that requires mutating multiple middleware. + */ + use(pluggable: Pluggable): void; + /** + * Create a shallow clone of this stack. Step bindings and handler priorities + * and tags are preserved in the copy. + */ + clone(): MiddlewareStack; + /** + * Removes middleware from the stack. + * + * If a string is provided, it will be treated as middleware name. If a middleware + * is inserted with the given name, it will be removed. + * + * If a middleware class is provided, all usages thereof will be removed. + */ + remove(toRemove: MiddlewareType | string): boolean; + /** + * Removes middleware that contains given tag + * + * Multiple middleware will potentially be removed + */ + removeByTag(toRemove: string): boolean; + /** + * Create a stack containing the middlewares in this stack as well as the + * middlewares in the `from` stack. Neither source is modified, and step + * bindings and handler priorities and tags are preserved in the copy. + */ + concat(from: MiddlewareStack): MiddlewareStack; + /** + * Returns a list of the current order of middleware in the stack. + * This does not execute the middleware functions, nor does it + * provide a reference to the stack itself. + */ + identify(): string[]; + /** + * @internal + * + * When an operation is called using this stack, + * it will log its list of middleware to the console using + * the identify function. + * + * @param toggle - set whether to log on resolve. + * If no argument given, returns the current value. + */ + identifyOnResolve(toggle?: boolean): boolean; + /** + * Builds a single handler function from zero or more middleware classes and + * a core handler. The core handler is meant to send command objects to AWS + * services and return promises that will resolve with the operation result + * or be rejected with an error. + * + * When a composed handler is invoked, the arguments will pass through all + * middleware in a defined order, and the return from the innermost handler + * will pass through all middleware in the reverse of that order. + */ + resolve(handler: DeserializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @internal + */ +export declare const SMITHY_CONTEXT_KEY = "__smithy_context"; +/** + * @public + * + * Data and helper objects that are not expected to change from one execution of + * a composed handler to another. + */ +export interface HandlerExecutionContext { + /** + * A logger that may be invoked by any handler during execution of an + * operation. + */ + logger?: Logger; + /** + * Name of the service the operation is being sent to. + */ + clientName?: string; + /** + * Name of the operation being executed. + */ + commandName?: string; + /** + * Additional user agent that inferred by middleware. It can be used to save + * the internal user agent sections without overriding the `customUserAgent` + * config in clients. + */ + userAgent?: UserAgent; + /** + * Resolved by the endpointMiddleware function of `@smithy/middleware-endpoint` + * in the serialization stage. + */ + endpointV2?: EndpointV2; + /** + * Set at the same time as endpointV2. + */ + authSchemes?: AuthScheme[]; + /** + * The current auth configuration that has been set by any auth middleware and + * that will prevent from being set more than once. + */ + currentAuthConfig?: HttpAuthDefinition; + /** + * @deprecated do not extend this field, it is a carryover from AWS SDKs. + * Used by DynamoDbDocumentClient. + */ + dynamoDbDocumentClientOptions?: Partial<{ + overrideInputFilterSensitiveLog(...args: any[]): string | void; + overrideOutputFilterSensitiveLog(...args: any[]): string | void; + }>; + /** + * @internal + * Context for Smithy properties. + */ + [SMITHY_CONTEXT_KEY]?: { + service?: string; + operation?: string; + commandInstance?: Command; + selectedHttpAuthScheme?: SelectedHttpAuthScheme; + features?: SmithyFeatures; + /** + * @deprecated + * Do not assign arbitrary members to the Smithy Context, + * fields should be explicitly declared here to avoid collisions. + */ + [key: string]: unknown; + }; + /** + * @deprecated + * Do not assign arbitrary members to the context, since + * they can interfere with existing functionality. + * + * Additional members should instead be declared on the SMITHY_CONTEXT_KEY + * or other reserved keys. + */ + [key: string]: any; +} +/** + * @public + */ +export interface Pluggable { + /** + * A function that mutate the passed in middleware stack. Functions implementing + * this interface can add, remove, modify existing middleware stack from clients + * or commands + */ + applyToStack: (stack: MiddlewareStack) => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/pagination.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/pagination.d.ts new file mode 100644 index 0000000..e10fdda --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/pagination.d.ts @@ -0,0 +1,33 @@ +import type { Client } from "./client"; +import type { Command } from "./command"; +/** + * @public + * + * Expected type definition of a paginator. + */ +export type Paginator = AsyncGenerator; +/** + * @public + * + * Expected paginator configuration passed to an operation. Services will extend + * this interface definition and may type client further. + */ +export interface PaginationConfiguration { + client: Client; + pageSize?: number; + startingToken?: any; + /** + * For some APIs, such as CloudWatchLogs events, the next page token will always + * be present. + * + * When true, this config field will have the paginator stop when the token doesn't change + * instead of when it is not present. + */ + stopOnSameToken?: boolean; + /** + * @param command - reference to the instantiated command. This callback is executed + * prior to sending the command with the paginator's client. + * @returns the original command or a replacement, defaulting to the original command object. + */ + withCommand?: (command: Command) => typeof command | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/profile.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/profile.d.ts new file mode 100644 index 0000000..b7885d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/profile.d.ts @@ -0,0 +1,30 @@ +/** + * @public + */ +export declare enum IniSectionType { + PROFILE = "profile", + SSO_SESSION = "sso-session", + SERVICES = "services" +} +/** + * @public + */ +export type IniSection = Record; +/** + * @public + * + * @deprecated Please use {@link IniSection} + */ +export interface Profile extends IniSection { +} +/** + * @public + */ +export type ParsedIniData = Record; +/** + * @public + */ +export interface SharedConfigFiles { + credentialsFile: ParsedIniData; + configFile: ParsedIniData; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/response.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/response.d.ts new file mode 100644 index 0000000..afcfe8f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/response.d.ts @@ -0,0 +1,40 @@ +/** + * @public + */ +export interface ResponseMetadata { + /** + * The status code of the last HTTP response received for this operation. + */ + httpStatusCode?: number; + /** + * A unique identifier for the last request sent for this operation. Often + * requested by AWS service teams to aid in debugging. + */ + requestId?: string; + /** + * A secondary identifier for the last request sent. Used for debugging. + */ + extendedRequestId?: string; + /** + * A tertiary identifier for the last request sent. Used for debugging. + */ + cfId?: string; + /** + * The number of times this operation was attempted. + */ + attempts?: number; + /** + * The total amount of time (in milliseconds) that was spent waiting between + * retry attempts. + */ + totalRetryDelay?: number; +} +/** + * @public + */ +export interface MetadataBearer { + /** + * Metadata pertaining to this request. + */ + $metadata: ResponseMetadata; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/retry.d.ts new file mode 100644 index 0000000..7bb5881 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/retry.d.ts @@ -0,0 +1,133 @@ +import { SdkError } from "./shapes"; +/** + * @public + */ +export type RetryErrorType = +/** + * This is a connection level error such as a socket timeout, socket connect + * error, tls negotiation timeout etc... + * Typically these should never be applied for non-idempotent request types + * since in this scenario, it's impossible to know whether the operation had + * a side effect on the server. + */ +"TRANSIENT" +/** + * This is an error where the server explicitly told the client to back off, + * such as a 429 or 503 Http error. + */ + | "THROTTLING" +/** + * This is a server error that isn't explicitly throttling but is considered + * by the client to be something that should be retried. + */ + | "SERVER_ERROR" +/** + * Doesn't count against any budgets. This could be something like a 401 + * challenge in Http. + */ + | "CLIENT_ERROR"; +/** + * @public + */ +export interface RetryErrorInfo { + /** + * The error thrown during the initial request, if available. + */ + error?: SdkError; + errorType: RetryErrorType; + /** + * Protocol hint. This could come from Http's 'retry-after' header or + * something from MQTT or any other protocol that has the ability to convey + * retry info from a peer. + * + * The Date after which a retry should be attempted. + */ + retryAfterHint?: Date; +} +/** + * @public + */ +export interface RetryBackoffStrategy { + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + computeNextBackoffDelay(retryAttempt: number): number; +} +/** + * @public + */ +export interface StandardRetryBackoffStrategy extends RetryBackoffStrategy { + /** + * Sets the delayBase used to compute backoff delays. + * @param delayBase - + */ + setDelayBase(delayBase: number): void; +} +/** + * @public + */ +export interface RetryStrategyOptions { + backoffStrategy: RetryBackoffStrategy; + maxRetriesBase: number; +} +/** + * @public + */ +export interface RetryToken { + /** + * @returns the current count of retry. + */ + getRetryCount(): number; + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + getRetryDelay(): number; +} +/** + * @public + */ +export interface StandardRetryToken extends RetryToken { + /** + * @returns the cost of the last retry attempt. + */ + getRetryCost(): number | undefined; +} +/** + * @public + */ +export interface RetryStrategyV2 { + /** + * Called before any retries (for the first call to the operation). It either + * returns a retry token or an error upon the failure to acquire a token prior. + * + * tokenScope is arbitrary and out of scope for this component. However, + * adding it here offers us a lot of future flexibility for outage detection. + * For example, it could be "us-east-1" on a shared retry strategy, or + * "us-west-2-c:dynamodb". + */ + acquireInitialRetryToken(retryTokenScope: string): Promise; + /** + * After a failed operation call, this function is invoked to refresh the + * retryToken returned by acquireInitialRetryToken(). This function can + * either choose to allow another retry and send a new or updated token, + * or reject the retry attempt and report the error either in an exception + * or returning an error. + */ + refreshRetryTokenForRetry(tokenToRenew: RetryToken, errorInfo: RetryErrorInfo): Promise; + /** + * Upon successful completion of the operation, this function is called + * to record that the operation was successful. + */ + recordSuccess(token: RetryToken): void; +} +/** + * @public + */ +export type ExponentialBackoffJitterType = "DEFAULT" | "NONE" | "FULL" | "DECORRELATED"; +/** + * @public + */ +export interface ExponentialBackoffStrategyOptions { + jitterType: ExponentialBackoffJitterType; + backoffScaleValue?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/serde.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/serde.d.ts new file mode 100644 index 0000000..a81314f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/serde.d.ts @@ -0,0 +1,112 @@ +import { Endpoint } from "./http"; +import { RequestHandler } from "./transfer"; +import { Decoder, Encoder, Provider } from "./util"; +/** + * @public + * + * Interface for object requires an Endpoint set. + */ +export interface EndpointBearer { + endpoint: Provider; +} +/** + * @public + */ +export interface StreamCollector { + /** + * A function that converts a stream into an array of bytes. + * + * @param stream - The low-level native stream from browser or Nodejs runtime + */ + (stream: any): Promise; +} +/** + * @public + * + * Request and Response serde util functions and settings for AWS services + */ +export interface SerdeContext extends SerdeFunctions, EndpointBearer { + requestHandler: RequestHandler; + disableHostPrefix: boolean; +} +/** + * @public + * + * Serde functions from the client config. + */ +export interface SerdeFunctions { + base64Encoder: Encoder; + base64Decoder: Decoder; + utf8Encoder: Encoder; + utf8Decoder: Decoder; + streamCollector: StreamCollector; +} +/** + * @public + */ +export interface RequestSerializer { + /** + * Converts the provided `input` into a request object + * + * @param input - The user input to serialize. + * + * @param context - Context containing runtime-specific util functions. + */ + (input: any, context: Context): Promise; +} +/** + * @public + */ +export interface ResponseDeserializer { + /** + * Converts the output of an operation into JavaScript types. + * + * @param output - The HTTP response received from the service + * + * @param context - context containing runtime-specific util functions. + */ + (output: ResponseType, context: Context): Promise; +} +/** + * The interface contains mix-in utility functions to transfer the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * @public + */ +export interface SdkStreamMixin { + transformToByteArray: () => Promise; + transformToString: (encoding?: string) => Promise; + transformToWebStream: () => ReadableStream; +} +/** + * @public + * + * The type describing a runtime-specific stream implementation with mix-in + * utility functions. + */ +export type SdkStream = BaseStream & SdkStreamMixin; +/** + * @public + * + * Indicates that the member of type T with + * key StreamKey have been extended + * with the SdkStreamMixin helper methods. + */ +export type WithSdkStreamMixin = { + [key in keyof T]: key extends StreamKey ? SdkStream : T[key]; +}; +/** + * Interface for internal function to inject stream utility functions + * implementation + * + * @internal + */ +export interface SdkStreamMixinInjector { + (stream: unknown): SdkStreamMixin; +} +/** + * @internal + */ +export interface SdkStreamSerdeContext { + sdkStreamMixin: SdkStreamMixinInjector; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/shapes.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/shapes.d.ts new file mode 100644 index 0000000..a4812fb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/shapes.d.ts @@ -0,0 +1,82 @@ +import { HttpResponse } from "./http"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A document type represents an untyped JSON-like value. + * + * Not all protocols support document types, and the serialization format of a + * document type is protocol specific. All JSON protocols SHOULD support + * document types and they SHOULD serialize document types inline as normal + * JSON values. + */ +export type DocumentType = null | boolean | number | string | DocumentType[] | { + [prop: string]: DocumentType; +}; +/** + * @public + * + * A structure shape with the error trait. + * https://smithy.io/2.0/spec/behavior-traits.html#smithy-api-retryable-trait + */ +export interface RetryableTrait { + /** + * Indicates that the error is a retryable throttling error. + */ + readonly throttling?: boolean; +} +/** + * @public + * + * Type that is implemented by all Smithy shapes marked with the + * error trait. + * @deprecated + */ +export interface SmithyException { + /** + * The shape ID name of the exception. + */ + readonly name: string; + /** + * Whether the client or server are at fault. + */ + readonly $fault: "client" | "server"; + /** + * The service that encountered the exception. + */ + readonly $service?: string; + /** + * Indicates that an error MAY be retried by the client. + */ + readonly $retryable?: RetryableTrait; + /** + * Reference to low-level HTTP response object. + */ + readonly $response?: HttpResponse; +} +/** + * @public + * + * @deprecated See {@link https://aws.amazon.com/blogs/developer/service-error-handling-modular-aws-sdk-js/} + * + * This type should not be used in your application. + * Users of the AWS SDK for JavaScript v3 service clients should prefer to + * use the specific Exception classes corresponding to each operation. + * These can be found as code in the deserializer for the operation's Command class, + * or as declarations in the service model file in codegen/sdk-codegen/aws-models. + * + * If no exceptions are enumerated by a particular Command operation, + * the base exception for the service should be used. Each client exports + * a base ServiceException prefixed with the service name. + */ +export type SdkError = Error & Partial & Partial & { + $metadata?: Partial["$metadata"] & { + /** + * If present, will have value of true and indicates that the error resulted in a + * correction of the clock skew, a.k.a. config.systemClockOffset. + * This is specific to AWS SDK and sigv4. + */ + readonly clockSkewCorrected?: true; + }; + cause?: Error; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/signature.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/signature.d.ts new file mode 100644 index 0000000..db0039d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/signature.d.ts @@ -0,0 +1,155 @@ +import { Message } from "./eventStream"; +import { HttpRequest } from "./http"; +/** + * @public + * + * A `Date` object, a unix (epoch) timestamp in seconds, or a string that can be + * understood by the JavaScript `Date` constructor. + */ +export type DateInput = number | string | Date; +/** + * @public + */ +export interface SigningArguments { + /** + * The date and time to be used as signature metadata. This value should be + * a Date object, a unix (epoch) timestamp, or a string that can be + * understood by the JavaScript `Date` constructor.If not supplied, the + * value returned by `new Date()` will be used. + */ + signingDate?: DateInput; + /** + * The service signing name. It will override the service name of the signer + * in current invocation + */ + signingService?: string; + /** + * The region name to sign the request. It will override the signing region of the + * signer in current invocation + */ + signingRegion?: string; +} +/** + * @public + */ +export interface RequestSigningArguments extends SigningArguments { + /** + * A set of strings whose members represents headers that cannot be signed. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unsignableHeaders set. + */ + unsignableHeaders?: Set; + /** + * A set of strings whose members represents headers that should be signed. + * Any values passed here will override those provided via unsignableHeaders, + * allowing them to be signed. + * + * All headers in the provided request will have their names converted to + * lower case before signing. + */ + signableHeaders?: Set; +} +/** + * @public + */ +export interface RequestPresigningArguments extends RequestSigningArguments { + /** + * The number of seconds before the presigned URL expires + */ + expiresIn?: number; + /** + * A set of strings whose representing headers that should not be hoisted + * to presigned request's query string. If not supplied, the presigner + * moves all the AWS-specific headers (starting with `x-amz-`) to the request + * query string. If supplied, these headers remain in the presigned request's + * header. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unhoistableHeaders set. + */ + unhoistableHeaders?: Set; + /** + * This overrides any headers with the same name(s) set by unhoistableHeaders. + * These headers will be hoisted into the query string and signed. + */ + hoistableHeaders?: Set; +} +/** + * @public + */ +export interface EventSigningArguments extends SigningArguments { + priorSignature: string; +} +/** + * @public + */ +export interface RequestPresigner { + /** + * Signs a request for future use. + * + * The request will be valid until either the provided `expiration` time has + * passed or the underlying credentials have expired. + * + * @param requestToSign - The request that should be signed. + * @param options - Additional signing options. + */ + presign(requestToSign: HttpRequest, options?: RequestPresigningArguments): Promise; +} +/** + * @public + * + * An object that signs request objects with AWS credentials using one of the + * AWS authentication protocols. + */ +export interface RequestSigner { + /** + * Sign the provided request for immediate dispatch. + */ + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; +} +/** + * @public + */ +export interface StringSigner { + /** + * Sign the provided `stringToSign` for use outside of the context of + * request signing. Typical uses include signed policy generation. + */ + sign(stringToSign: string, options?: SigningArguments): Promise; +} +/** + * @public + */ +export interface FormattedEvent { + headers: Uint8Array; + payload: Uint8Array; +} +/** + * @public + */ +export interface EventSigner { + /** + * Sign the individual event of the event stream. + */ + sign(event: FormattedEvent, options: EventSigningArguments): Promise; +} +/** + * @public + */ +export interface SignableMessage { + message: Message; + priorSignature: string; +} +/** + * @public + */ +export interface SignedMessage { + message: Message; + signature: string; +} +/** + * @public + */ +export interface MessageSigner { + signMessage(message: SignableMessage, args: SigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/stream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/stream.d.ts new file mode 100644 index 0000000..f305dd9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/stream.d.ts @@ -0,0 +1,22 @@ +import { ChecksumConstructor } from "./checksum"; +import { HashConstructor, StreamHasher } from "./crypto"; +import { BodyLengthCalculator, Encoder } from "./util"; +/** + * @public + */ +export interface GetAwsChunkedEncodingStreamOptions { + base64Encoder?: Encoder; + bodyLengthChecker: BodyLengthCalculator; + checksumAlgorithmFn?: ChecksumConstructor | HashConstructor; + checksumLocationName?: string; + streamHasher?: StreamHasher; +} +/** + * @public + * + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * It optionally adds checksum if options are provided. + */ +export interface GetAwsChunkedEncodingStream { + (readableStream: StreamType, options: GetAwsChunkedEncodingStreamOptions): StreamType; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts new file mode 100644 index 0000000..92c52da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-common-types.d.ts @@ -0,0 +1,33 @@ +/// +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This is the union representing the modeled blob type with streaming trait + * in a generic format that does not relate to HTTP input or output payloads. + * + * Note: the non-streaming blob type is represented by Uint8Array, but because + * the streaming blob type is always in the request/response paylod, it has + * historically been handled with different types. + * + * @see https://smithy.io/2.0/spec/simple-types.html#blob + * + * For compatibility with its historical representation, it must contain at least + * Readble (Node.js), Blob (browser), and ReadableStream (browser). + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + */ +export type StreamingBlobTypes = NodeJsRuntimeStreamingBlobTypes | BrowserRuntimeStreamingBlobTypes; +/** + * @public + * + * Node.js streaming blob type. + */ +export type NodeJsRuntimeStreamingBlobTypes = Readable; +/** + * @public + * + * Browser streaming blob types. + */ +export type BrowserRuntimeStreamingBlobTypes = ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts new file mode 100644 index 0000000..9bcc164 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-input-types.d.ts @@ -0,0 +1,63 @@ +/// +/// +/// +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This union represents a superset of the compatible types you + * can use for streaming payload inputs. + * + * FAQ: + * Why does the type union mix mutually exclusive runtime types, namely + * Node.js and browser types? + * + * There are several reasons: + * 1. For backwards compatibility. + * 2. As a convenient compromise solution so that users in either environment may use the types + * without customization. + * 3. The SDK does not have static type information about the exact implementation + * of the HTTP RequestHandler being used in your client(s) (e.g. fetch, XHR, node:http, or node:http2), + * given that it is chosen at runtime. There are multiple possible request handlers + * in both the Node.js and browser runtime environments. + * + * Rather than restricting the type to a known common format (Uint8Array, for example) + * which doesn't include a universal streaming format in the currently supported Node.js versions, + * the type declaration is widened to multiple possible formats. + * It is up to the user to ultimately select a compatible format with the + * runtime and HTTP handler implementation they are using. + * + * Usage: + * The typical solution we expect users to have is to manually narrow the + * type when needed, picking the appropriate one out of the union according to the + * runtime environment and specific request handler. + * There is also the type utility "NodeJsClient", "BrowserClient" and more + * exported from this package. These can be applied at the client level + * to pre-narrow these streaming payload blobs. For usage see the readme.md + * in the root of the \@smithy/types NPM package. + */ +export type StreamingBlobPayloadInputTypes = NodeJsRuntimeStreamingBlobPayloadInputTypes | BrowserRuntimeStreamingBlobPayloadInputTypes; +/** + * @public + * + * Streaming payload input types in the Node.js environment. + * These are derived from the types compatible with the request body used by node:http. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may be intended in some cases, + * but the expected types are primarily string, Uint8Array, and Buffer. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type NodeJsRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | Buffer | Readable; +/** + * @public + * + * Streaming payload input types in the browser environment. + * These are derived from the types compatible with fetch's Request.body. + */ +export type BrowserRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts new file mode 100644 index 0000000..b64a878 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/streaming-payload/streaming-blob-payload-output-types.d.ts @@ -0,0 +1,53 @@ +/// +/// +import type { IncomingMessage } from "http"; +import type { Readable } from "stream"; +import type { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +import type { SdkStream } from "../serde"; +/** + * @public + * + * This union represents a superset of the types you may receive + * in streaming payload outputs. + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + * + * To highlight the upstream docs about the SdkStream mixin: + * + * The interface contains mix-in (via Object.assign) methods to transform the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * + * The available methods are described on the SdkStream type via SdkStreamMixin. + */ +export type StreamingBlobPayloadOutputTypes = NodeJsRuntimeStreamingBlobPayloadOutputTypes | BrowserRuntimeStreamingBlobPayloadOutputTypes; +/** + * @public + * + * Streaming payload output types in the Node.js environment. + * + * This is by default the IncomingMessage type from node:http responses when + * using the default node-http-handler in Node.js environments. + * + * It can be other Readable types like node:http2's ClientHttp2Stream + * such as when using the node-http2-handler. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type NodeJsRuntimeStreamingBlobPayloadOutputTypes = SdkStream; +/** + * @public + * + * Streaming payload output types in the browser environment. + * + * This is by default fetch's Response.body type (ReadableStream) when using + * the default fetch-http-handler in browser-like environments. + * + * It may be a Blob, such as when using the XMLHttpRequest handler + * and receiving an arraybuffer response body. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type BrowserRuntimeStreamingBlobPayloadOutputTypes = SdkStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transfer.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transfer.d.ts new file mode 100644 index 0000000..462ee23 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transfer.d.ts @@ -0,0 +1,41 @@ +/** + * @public + */ +export type RequestHandlerOutput = { + response: ResponseType; +}; +/** + * @public + */ +export interface RequestHandler { + /** + * metadata contains information of a handler. For example + * 'h2' refers this handler is for handling HTTP/2 requests, + * whereas 'h1' refers handling HTTP1 requests + */ + metadata?: RequestHandlerMetadata; + destroy?: () => void; + handle: (request: RequestType, handlerOptions?: HandlerOptions) => Promise>; +} +/** + * @public + */ +export interface RequestHandlerMetadata { + handlerProtocol: RequestHandlerProtocol | string; +} +/** + * @public + * Values from ALPN Protocol IDs. + * @see https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + */ +export declare enum RequestHandlerProtocol { + HTTP_0_9 = "http/0.9", + HTTP_1_0 = "http/1.0", + TDS_8_0 = "tds/8.0" +} +/** + * @public + */ +export interface RequestContext { + destination: URL; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts new file mode 100644 index 0000000..f9424c4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/client-method-transforms.d.ts @@ -0,0 +1,26 @@ +import type { CommandIO } from "../command"; +import type { MetadataBearer } from "../response"; +import type { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import type { Transform } from "./type-transform"; +/** + * @internal + * + * Narrowed version of InvokeFunction used in Client::send. + */ +export interface NarrowedInvokeFunction { + (command: CommandIO, options?: HttpHandlerOptions): Promise>; + (command: CommandIO, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options?: HttpHandlerOptions, cb?: (err: unknown, data?: Transform) => void): Promise> | void; +} +/** + * @internal + * + * Narrowed version of InvokeMethod used in aggregated Client methods. + */ +export interface NarrowedInvokeMethod { + (input: InputType, options?: HttpHandlerOptions): Promise>; + (input: InputType, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options?: HttpHandlerOptions, cb?: (err: unknown, data?: OutputType) => void): Promise> | void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts new file mode 100644 index 0000000..243a40f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/client-payload-blob-type-narrow.d.ts @@ -0,0 +1,79 @@ +/// +/// +import type { IncomingMessage } from "http"; +import type { ClientHttp2Stream } from "http2"; +import type { InvokeMethod } from "../client"; +import type { GetOutputType } from "../command"; +import type { HttpHandlerOptions } from "../http"; +import type { SdkStream } from "../serde"; +import type { BrowserRuntimeStreamingBlobPayloadInputTypes, NodeJsRuntimeStreamingBlobPayloadInputTypes, StreamingBlobPayloadInputTypes } from "../streaming-payload/streaming-blob-payload-input-types"; +import type { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import type { NarrowedInvokeMethod } from "./client-method-transforms"; +import type { Transform } from "./type-transform"; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the NodeHttpHandler requestHandler, + * the default in Node.js when not using HTTP2. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as NodeJsClient; + * ``` + */ +export type NodeJsClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * Variant of NodeJsClient for node:http2. + */ +export type NodeJsHttp2Client = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the FetchHttpHandler requestHandler, + * which is the default in browser environments. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as BrowserClient; + * ``` + */ +export type BrowserClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Variant of BrowserClient for XMLHttpRequest. + */ +export type BrowserXhrClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * @deprecated use NarrowPayloadBlobTypes. + * + * Narrow a given Client's blob payload outputs to the given type T. + */ +export type NarrowPayloadBlobOutputType = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, T>>; +}; +/** + * @public + * + * Narrow a Client's blob payload input and output types to I and O. + */ +export type NarrowPayloadBlobTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod, FunctionOutputTypes> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, O>>; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/exact.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/exact.d.ts new file mode 100644 index 0000000..c8a15d8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/exact.d.ts @@ -0,0 +1,6 @@ +/** + * @internal + * + * Checks that A and B extend each other. + */ +export type Exact = [A] extends [B] ? ([B] extends [A] ? true : false) : false; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts new file mode 100644 index 0000000..a0ec72e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/no-undefined.d.ts @@ -0,0 +1,68 @@ +import type { InvokeMethod, InvokeMethodOptionalArgs } from "../client"; +import type { GetOutputType } from "../command"; +import type { DocumentType } from "../shapes"; +/** + * @public + * + * This type is intended as a type helper for generated clients. + * When initializing client, cast it to this type by passing + * the client constructor type as the type parameter. + * + * It will then recursively remove "undefined" as a union type from all + * input and output shapes' members. Note, this does not affect + * any member that is optional (?) such as outputs with no required members. + * + * @example + * ```ts + * const client = new Client({}) as AssertiveClient; + * ``` + */ +export type AssertiveClient = NarrowClientIOTypes; +/** + * @public + * + * This is similar to AssertiveClient but additionally changes all + * output types to (recursive) Required so as to bypass all output nullability guards. + */ +export type UncheckedClient = UncheckedClientOutputTypes; +/** + * @internal + * + * Excludes undefined recursively. + */ +export type NoUndefined = T extends Function ? T : T extends DocumentType ? T : [T] extends [object] ? { + [key in keyof T]: NoUndefined; +} : Exclude; +/** + * @internal + * + * Excludes undefined and optional recursively. + */ +export type RecursiveRequired = T extends Function ? T : T extends DocumentType ? T : [T] extends [object] ? { + [key in keyof T]-?: RecursiveRequired; +} : Exclude; +/** + * @internal + * + * Removes undefined from unions. + */ +type NarrowClientIOTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, NoUndefined> : [ClientType[key]] extends [InvokeMethod] ? InvokeMethod, NoUndefined> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>; +}; +/** + * @internal + * + * Removes undefined from unions and adds yolo output types. + */ +type UncheckedClientOutputTypes = { + [key in keyof ClientType]: [ClientType[key]] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, RecursiveRequired> : [ClientType[key]] extends [InvokeMethod] ? InvokeMethod, RecursiveRequired> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>>; +}; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts new file mode 100644 index 0000000..90373fb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/transform/type-transform.d.ts @@ -0,0 +1,34 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = ConditionalRecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [T] extends [FromType] ? ([FromType] extends [T] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [T[key]] extends [FromType] ? [FromType] extends [T[key]] ? ToType : ConditionalRecursiveTransformExact : ConditionalRecursiveTransformExact; +} : TransformExact; +/** + * @internal + * + * Same as RecursiveTransformExact but does not assign to an object + * unless there is a matching transformed member. + */ +type ConditionalRecursiveTransformExact = [T] extends [ + RecursiveTransformExact +] ? [RecursiveTransformExact] extends [T] ? T : RecursiveTransformExact : RecursiveTransformExact; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts new file mode 100644 index 0000000..26c068c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/abort-handler.d.ts @@ -0,0 +1,7 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +/** + * @public + */ +export interface AbortHandler { + (this: AbortSignal | DeprecatedAbortSignal, ev: any): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts new file mode 100644 index 0000000..00741af --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/abort.d.ts @@ -0,0 +1,50 @@ +import { AbortHandler } from "./abort-handler"; +/** + * @public + */ +export { AbortHandler }; +/** + * @public + * @deprecated use platform (global) type for AbortSignal. + * + * Holders of an AbortSignal object may query if the associated operation has + * been aborted and register an onabort handler. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal + */ +export interface AbortSignal { + /** + * Whether the action represented by this signal has been cancelled. + */ + readonly aborted: boolean; + /** + * A function to be invoked when the action represented by this signal has + * been cancelled. + */ + onabort: AbortHandler | Function | null; +} +/** + * @public + * @deprecated use platform (global) type for AbortController. + * + * The AWS SDK uses a Controller/Signal model to allow for cooperative + * cancellation of asynchronous operations. When initiating such an operation, + * the caller can create an AbortController and then provide linked signal to + * subtasks. This allows a single source to communicate to multiple consumers + * that an action has been aborted without dictating how that cancellation + * should be handled. + * + * @see https://developer.mozilla.org/en-US/docs/Web/API/AbortController + */ +export interface AbortController { + /** + * An object that reports whether the action associated with this + * `AbortController` has been cancelled. + */ + readonly signal: AbortSignal; + /** + * Declares the operation associated with this AbortController to have been + * cancelled. + */ + abort(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts new file mode 100644 index 0000000..380c8fc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpApiKeyAuth.d.ts @@ -0,0 +1,7 @@ +/** + * @internal + */ +export declare enum HttpApiKeyAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts new file mode 100644 index 0000000..e0d939e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthScheme.d.ts @@ -0,0 +1,49 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HandlerExecutionContext } from "../middleware"; +import { HttpSigner } from "./HttpSigner"; +import { IdentityProviderConfig } from "./IdentityProviderConfig"; +/** + * ID for {@link HttpAuthScheme} + * @internal + */ +export type HttpAuthSchemeId = string; +/** + * Interface that defines an HttpAuthScheme + * @internal + */ +export interface HttpAuthScheme { + /** + * ID for an HttpAuthScheme, typically the absolute shape ID of a Smithy auth trait. + */ + schemeId: HttpAuthSchemeId; + /** + * Gets the IdentityProvider corresponding to an HttpAuthScheme. + */ + identityProvider(config: IdentityProviderConfig): IdentityProvider | undefined; + /** + * HttpSigner corresponding to an HttpAuthScheme. + */ + signer: HttpSigner; +} +/** + * Interface that defines the identity and signing properties when selecting + * an HttpAuthScheme. + * @internal + */ +export interface HttpAuthOption { + schemeId: HttpAuthSchemeId; + identityProperties?: Record; + signingProperties?: Record; + propertiesExtractor?: (config: TConfig, context: TContext) => { + identityProperties?: Record; + signingProperties?: Record; + }; +} +/** + * @internal + */ +export interface SelectedHttpAuthScheme { + httpAuthOption: HttpAuthOption; + identity: Identity; + signer: HttpSigner; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts new file mode 100644 index 0000000..d417aaf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpAuthSchemeProvider.d.ts @@ -0,0 +1,20 @@ +import { HandlerExecutionContext } from "../middleware"; +import { HttpAuthOption } from "./HttpAuthScheme"; +/** + * @internal + */ +export interface HttpAuthSchemeParameters { + operation?: string; +} +/** + * @internal + */ +export interface HttpAuthSchemeProvider { + (authParameters: TParameters): HttpAuthOption[]; +} +/** + * @internal + */ +export interface HttpAuthSchemeParametersProvider { + (config: TConfig, context: TContext, input: TInput): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts new file mode 100644 index 0000000..7abcf84 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/HttpSigner.d.ts @@ -0,0 +1,41 @@ +import { HttpRequest, HttpResponse } from "../http"; +import { Identity } from "../identity/identity"; +/** + * @internal + */ +export interface ErrorHandler { + (signingProperties: Record): (error: E) => never; +} +/** + * @internal + */ +export interface SuccessHandler { + (httpResponse: HttpResponse | unknown, signingProperties: Record): void; +} +/** + * Interface to sign identity and signing properties. + * @internal + */ +export interface HttpSigner { + /** + * Signs an HttpRequest with an identity and signing properties. + * @param httpRequest request to sign + * @param identity identity to sing the request with + * @param signingProperties property bag for signing + * @returns signed request in a promise + */ + sign(httpRequest: HttpRequest, identity: Identity, signingProperties: Record): Promise; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware throws an error. + * The error handler is expected to throw the error it receives, so the return type of the error handler is `never`. + * @internal + */ + errorHandler?: ErrorHandler; + /** + * Handler that executes after the {@link HttpSigner.sign} invocation and corresponding + * middleware succeeds. + * @internal + */ + successHandler?: SuccessHandler; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts new file mode 100644 index 0000000..6a50f65 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/IdentityProviderConfig.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +import { HttpAuthSchemeId } from "./HttpAuthScheme"; +/** + * Interface to get an IdentityProvider for a specified HttpAuthScheme + * @internal + */ +export interface IdentityProviderConfig { + /** + * Get the IdentityProvider for a specified HttpAuthScheme. + * @param schemeId schemeId of the HttpAuthScheme + * @returns IdentityProvider or undefined if HttpAuthScheme is not found + */ + getIdentityProvider(schemeId: HttpAuthSchemeId): IdentityProvider | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts new file mode 100644 index 0000000..8241fe3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/auth.d.ts @@ -0,0 +1,57 @@ +/** + * @internal + * + * Authentication schemes represent a way that the service will authenticate the customer’s identity. + */ +export interface AuthScheme { + /** + * @example "sigv4a" or "sigv4" + */ + name: "sigv4" | "sigv4a" | string; + /** + * @example "s3" + */ + signingName: string; + /** + * @example "us-east-1" + */ + signingRegion: string; + /** + * @example ["*"] + * @example ["us-west-2", "us-east-1"] + */ + signingRegionSet?: string[]; + /** + * @deprecated this field was renamed to signingRegion. + */ + signingScope?: never; + properties: Record; +} +/** + * @internal + * @deprecated + */ +export interface HttpAuthDefinition { + /** + * Defines the location of where the Auth is serialized. + */ + in: HttpAuthLocation; + /** + * Defines the name of the HTTP header or query string parameter + * that contains the Auth. + */ + name: string; + /** + * Defines the security scheme to use on the `Authorization` header value. + * This can only be set if the "in" property is set to {@link HttpAuthLocation.HEADER}. + */ + scheme?: string; +} +/** + * @internal + * @deprecated + */ +export declare enum HttpAuthLocation { + HEADER = "header", + QUERY = "query" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts new file mode 100644 index 0000000..fbb845d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/auth/index.d.ts @@ -0,0 +1,6 @@ +export * from "./auth"; +export * from "./HttpApiKeyAuth"; +export * from "./HttpAuthScheme"; +export * from "./HttpAuthSchemeProvider"; +export * from "./HttpSigner"; +export * from "./IdentityProviderConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts new file mode 100644 index 0000000..465c9a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/blob/blob-payload-input-types.d.ts @@ -0,0 +1,41 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * A union of types that can be used as inputs for the service model + * "blob" type when it represents the request's entire payload or body. + * + * For example, in Lambda::invoke, the payload is modeled as a blob type + * and this union applies to it. + * In contrast, in Lambda::createFunction the Zip file option is a blob type, + * but is not the (entire) payload and this union does not apply. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may work in some cases, + * but the expected types are primarily string and Uint8Array. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type BlobPayloadInputTypes = string | ArrayBuffer | ArrayBufferView | Uint8Array | NodeJsRuntimeBlobTypes | BrowserRuntimeBlobTypes; +/** + * @public + * + * Additional blob types for the Node.js environment. + */ +export type NodeJsRuntimeBlobTypes = Readable | Buffer; +/** + * @public + * + * Additional blob types for the browser environment. + */ +export type BrowserRuntimeBlobTypes = BlobOptionalType | ReadableStreamOptionalType; +/** + * @internal + * @deprecated renamed to BlobPayloadInputTypes. + */ +export type BlobTypes = BlobPayloadInputTypes; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts new file mode 100644 index 0000000..dbfff0c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/checksum.d.ts @@ -0,0 +1,63 @@ +import { SourceData } from "./crypto"; +/** + * @public + * + * An object that provides a checksum of data provided in chunks to `update`. + * The checksum may be performed incrementally as chunks are received or all + * at once when the checksum is finalized, depending on the underlying + * implementation. + * + * It's recommended to compute checksum incrementally to avoid reading the + * entire payload in memory. + * + * A class that implements this interface may accept an optional secret key in its + * constructor while computing checksum value, when using HMAC. If provided, + * this secret key would be used when computing checksum. + */ +export interface Checksum { + /** + * Constant length of the digest created by the algorithm in bytes. + */ + digestLength?: number; + /** + * Creates a new checksum object that contains a deep copy of the internal + * state of the current `Checksum` object. + */ + copy?(): Checksum; + /** + * Returns the digest of all of the data passed. + */ + digest(): Promise; + /** + * Allows marking a checksum for checksums that support the ability + * to mark and reset. + * + * @param readLimit - The maximum limit of bytes that can be read + * before the mark position becomes invalid. + */ + mark?(readLimit: number): void; + /** + * Resets the checksum to its initial value. + */ + reset(): void; + /** + * Adds a chunk of data for which checksum needs to be computed. + * This can be called many times with new data as it is streamed. + * + * Implementations may override this method which passes second param + * which makes Checksum object stateless. + * + * @param chunk - The buffer to update checksum with. + */ + update(chunk: Uint8Array): void; +} +/** + * @public + * + * A constructor for a Checksum that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + */ +export interface ChecksumConstructor { + new (secret?: SourceData): Checksum; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts new file mode 100644 index 0000000..1d05c04 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/client.d.ts @@ -0,0 +1,57 @@ +import { Command } from "./command"; +import { MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +import { OptionalParameter } from "./util"; +/** + * @public + * + * A type which checks if the client configuration is optional. + * If all entries of the client configuration are optional, it allows client creation without passing any config. + */ +export type CheckOptionalClientConfig = OptionalParameter; +/** + * @public + * + * function definition for different overrides of client's 'send' function. + */ +export interface InvokeFunction { + (command: Command, options?: any): Promise; + (command: Command, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options: any, cb: (err: any, data?: OutputType) => void): void; + (command: Command, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods. + */ +export interface InvokeMethod { + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * @public + * + * Signature that appears on aggregated clients' methods when argument is optional. + */ +export interface InvokeMethodOptionalArgs { + (): Promise; + (input: InputType, options?: any): Promise; + (input: InputType, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options: any, cb: (err: any, data?: OutputType) => void): void; + (input: InputType, options?: any, cb?: (err: any, data?: OutputType) => void): Promise | void; +} +/** + * A general interface for service clients, idempotent to browser or node clients + * This type corresponds to SmithyClient(https://github.com/aws/aws-sdk-js-v3/blob/main/packages/smithy-client/src/client.ts). + * It's provided for using without importing the SmithyClient class. + * @internal + */ +export interface Client { + readonly config: ResolvedClientConfiguration; + middlewareStack: MiddlewareStack; + send: InvokeFunction; + destroy: () => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts new file mode 100644 index 0000000..fb7c5b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/command.d.ts @@ -0,0 +1,23 @@ +import { Handler, MiddlewareStack } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + */ +export interface Command extends CommandIO { + readonly input: InputType; + readonly middlewareStack: MiddlewareStack; + resolveMiddleware(stack: MiddlewareStack, configuration: ResolvedConfiguration, options: any): Handler; +} +/** + * @internal + * + * This is a subset of the Command type used only to detect the i/o types. + */ +export interface CommandIO { + readonly input: InputType; + resolveMiddleware(stack: any, configuration: any, options: any): Handler; +} +/** + * @internal + */ +export type GetOutputType = Command extends CommandIO ? O : never; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts new file mode 100644 index 0000000..09ed18b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/config.d.ts @@ -0,0 +1,10 @@ +/** + * @public + */ +export interface ConnectConfiguration { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts new file mode 100644 index 0000000..eaacf8b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/index.d.ts @@ -0,0 +1,3 @@ +export * from "./config"; +export * from "./manager"; +export * from "./pool"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts new file mode 100644 index 0000000..7245028 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/manager.d.ts @@ -0,0 +1,34 @@ +import { RequestContext } from "../transfer"; +import { ConnectConfiguration } from "./config"; +/** + * @public + */ +export interface ConnectionManagerConfiguration { + /** + * Maximum number of allowed concurrent requests per connection. + */ + maxConcurrency?: number; + /** + * Disables concurrent requests per connection. + */ + disableConcurrency?: boolean; +} +/** + * @public + */ +export interface ConnectionManager { + /** + * Retrieves a connection from the connection pool if available, + * otherwise establish a new connection + */ + lease(requestContext: RequestContext, connectionConfiguration: ConnectConfiguration): T; + /** + * Releases the connection back to the pool making it potentially + * re-usable by other requests. + */ + release(requestContext: RequestContext, connection: T): void; + /** + * Destroys the connection manager. All connections will be closed. + */ + destroy(): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts new file mode 100644 index 0000000..161094f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/connection/pool.d.ts @@ -0,0 +1,32 @@ +/** + * @public + */ +export interface ConnectionPool { + /** + * Retrieve the first connection in the pool + */ + poll(): T | void; + /** + * Release the connection back to the pool making it potentially + * re-usable by other requests. + */ + offerLast(connection: T): void; + /** + * Removes the connection from the pool, and destroys it. + */ + destroy(connection: T): void; + /** + * Implements the iterable protocol and allows arrays to be consumed + * by most syntaxes expecting iterables, such as the spread syntax + * and for...of loops + */ + [Symbol.iterator](): Iterator; +} +/** + * Unused. + * @internal + * @deprecated + */ +export interface CacheKey { + destination: string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts new file mode 100644 index 0000000..467ec86 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/crypto.d.ts @@ -0,0 +1,60 @@ +/** + * @public + */ +export type SourceData = string | ArrayBuffer | ArrayBufferView; +/** + * @public + * + * An object that provides a hash of data provided in chunks to `update`. The + * hash may be performed incrementally as chunks are received or all at once + * when the hash is finalized, depending on the underlying implementation. + * + * @deprecated use {@link Checksum} + */ +export interface Hash { + /** + * Adds a chunk of data to the hash. If a buffer is provided, the `encoding` + * argument will be ignored. If a string is provided without a specified + * encoding, implementations must assume UTF-8 encoding. + * + * Not all encodings are supported on all platforms, though all must support + * UTF-8. + */ + update(toHash: SourceData, encoding?: "utf8" | "ascii" | "latin1"): void; + /** + * Finalizes the hash and provides a promise that will be fulfilled with the + * raw bytes of the calculated hash. + */ + digest(): Promise; +} +/** + * @public + * + * A constructor for a hash that may be used to calculate an HMAC. Implementing + * classes should not directly hold the provided key in memory beyond the + * lexical scope of the constructor. + * + * @deprecated use {@link ChecksumConstructor} + */ +export interface HashConstructor { + new (secret?: SourceData): Hash; +} +/** + * @public + * + * A function that calculates the hash of a data stream. Determining the hash + * will consume the stream, so only replayable streams should be provided to an + * implementation of this interface. + */ +export interface StreamHasher { + (hashCtor: HashConstructor, stream: StreamType): Promise; +} +/** + * @public + * + * A function that returns a promise fulfilled with bytes from a + * cryptographically secure pseudorandom number generator. + */ +export interface randomValues { + (byteLength: number): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..547303f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/downlevel-ts3.4/transform/type-transform.d.ts @@ -0,0 +1,41 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [ + T +] extends [ + FromType +] ? ([ + FromType +] extends [ + T +] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [ + T[key] + ] extends [ + FromType + ] ? [ + FromType + ] extends [ + T[key] + ] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts new file mode 100644 index 0000000..4714bf9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/encode.d.ts @@ -0,0 +1,31 @@ +import { Message } from "./eventStream"; +/** + * @public + */ +export interface MessageEncoder { + encode(message: Message): Uint8Array; +} +/** + * @public + */ +export interface MessageDecoder { + decode(message: ArrayBufferView): Message; + feed(message: ArrayBufferView): void; + endOfStream(): void; + getMessage(): AvailableMessage; + getAvailableMessages(): AvailableMessages; +} +/** + * @public + */ +export interface AvailableMessage { + getMessage(): Message | undefined; + isEndOfStream(): boolean; +} +/** + * @public + */ +export interface AvailableMessages { + getMessages(): Message[]; + isEndOfStream(): boolean; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts new file mode 100644 index 0000000..a1221ee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoint.d.ts @@ -0,0 +1,77 @@ +import { AuthScheme } from "./auth/auth"; +/** + * @public + */ +export interface EndpointPartition { + name: string; + dnsSuffix: string; + dualStackDnsSuffix: string; + supportsFIPS: boolean; + supportsDualStack: boolean; +} +/** + * @public + */ +export interface EndpointARN { + partition: string; + service: string; + region: string; + accountId: string; + resourceId: Array; +} +/** + * @public + */ +export declare enum EndpointURLScheme { + HTTP = "http", + HTTPS = "https" +} +/** + * @public + */ +export interface EndpointURL { + /** + * The URL scheme such as http or https. + */ + scheme: EndpointURLScheme; + /** + * The authority is the host and optional port component of the URL. + */ + authority: string; + /** + * The parsed path segment of the URL. + * This value is as-is as provided by the user. + */ + path: string; + /** + * The parsed path segment of the URL. + * This value is guranteed to start and end with a "/". + */ + normalizedPath: string; + /** + * A boolean indicating whether the authority is an IP address. + */ + isIp: boolean; +} +/** + * @public + */ +export type EndpointObjectProperty = string | boolean | { + [key: string]: EndpointObjectProperty; +} | EndpointObjectProperty[]; +/** + * @public + */ +export interface EndpointV2 { + url: URL; + properties?: { + authSchemes?: AuthScheme[]; + } & Record; + headers?: Record; +} +/** + * @public + */ +export type EndpointParameters = { + [name: string]: undefined | boolean | string | string[]; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts new file mode 100644 index 0000000..2c8026b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/EndpointRuleObject.d.ts @@ -0,0 +1,27 @@ +import { EndpointObjectProperty } from "../endpoint"; +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type EndpointObjectProperties = Record; +/** + * @public + */ +export type EndpointObjectHeaders = Record; +/** + * @public + */ +export type EndpointObject = { + url: Expression; + properties?: EndpointObjectProperties; + headers?: EndpointObjectHeaders; +}; +/** + * @public + */ +export type EndpointRuleObject = { + type: "endpoint"; + conditions?: ConditionObject[]; + endpoint: EndpointObject; + documentation?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts new file mode 100644 index 0000000..98fc7a8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/ErrorRuleObject.d.ts @@ -0,0 +1,10 @@ +import { ConditionObject, Expression } from "./shared"; +/** + * @public + */ +export type ErrorRuleObject = { + type: "error"; + conditions?: ConditionObject[]; + error: Expression; + documentation?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts new file mode 100644 index 0000000..e749fba --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/RuleSetObject.d.ts @@ -0,0 +1,28 @@ +import { RuleSetRules } from "./TreeRuleObject"; +/** + * @public + */ +export type DeprecatedObject = { + message?: string; + since?: string; +}; +/** + * @public + */ +export type ParameterObject = { + type: "String" | "string" | "Boolean" | "boolean"; + default?: string | boolean; + required?: boolean; + documentation?: string; + builtIn?: string; + deprecated?: DeprecatedObject; +}; +/** + * @public + */ +export type RuleSetObject = { + version: string; + serviceId?: string; + parameters: Record; + rules: RuleSetRules; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts new file mode 100644 index 0000000..c203eed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/TreeRuleObject.d.ts @@ -0,0 +1,16 @@ +import { EndpointRuleObject } from "./EndpointRuleObject"; +import { ErrorRuleObject } from "./ErrorRuleObject"; +import { ConditionObject } from "./shared"; +/** + * @public + */ +export type RuleSetRules = Array; +/** + * @public + */ +export type TreeRuleObject = { + type: "tree"; + conditions?: ConditionObject[]; + rules: RuleSetRules; + documentation?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts new file mode 100644 index 0000000..8a29789 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/index.d.ts @@ -0,0 +1,5 @@ +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./shared"; +export * from "./TreeRuleObject"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts new file mode 100644 index 0000000..1c5d4b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/endpoints/shared.d.ts @@ -0,0 +1,55 @@ +import { Logger } from "../logger"; +/** + * @public + */ +export type ReferenceObject = { + ref: string; +}; +/** + * @public + */ +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +/** + * @public + */ +export type FunctionArgv = Array; +/** + * @public + */ +export type FunctionReturn = string | boolean | number | { + [key: string]: FunctionReturn; +}; +/** + * @public + */ +export type ConditionObject = FunctionObject & { + assign?: string; +}; +/** + * @public + */ +export type Expression = string | ReferenceObject | FunctionObject; +/** + * @public + */ +export type EndpointParams = Record; +/** + * @public + */ +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +/** + * @public + */ +export type ReferenceRecord = Record; +/** + * @public + */ +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts new file mode 100644 index 0000000..49c37c7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/eventStream.d.ts @@ -0,0 +1,137 @@ +import { HttpRequest } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput, HandlerExecutionContext } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * An event stream message. The headers and body properties will always be + * defined, with empty headers represented as an object with no keys and an + * empty body represented as a zero-length Uint8Array. + */ +export interface Message { + headers: MessageHeaders; + body: Uint8Array; +} +/** + * @public + */ +export type MessageHeaders = Record; +/** + * @public + */ +export type HeaderValue = { + type: K; + value: V; +}; +/** + * @public + */ +export type BooleanHeaderValue = HeaderValue<"boolean", boolean>; +/** + * @public + */ +export type ByteHeaderValue = HeaderValue<"byte", number>; +/** + * @public + */ +export type ShortHeaderValue = HeaderValue<"short", number>; +/** + * @public + */ +export type IntegerHeaderValue = HeaderValue<"integer", number>; +/** + * @public + */ +export type LongHeaderValue = HeaderValue<"long", Int64>; +/** + * @public + */ +export type BinaryHeaderValue = HeaderValue<"binary", Uint8Array>; +/** + * @public + */ +export type StringHeaderValue = HeaderValue<"string", string>; +/** + * @public + */ +export type TimestampHeaderValue = HeaderValue<"timestamp", Date>; +/** + * @public + */ +export type UuidHeaderValue = HeaderValue<"uuid", string>; +/** + * @public + */ +export type MessageHeaderValue = BooleanHeaderValue | ByteHeaderValue | ShortHeaderValue | IntegerHeaderValue | LongHeaderValue | BinaryHeaderValue | StringHeaderValue | TimestampHeaderValue | UuidHeaderValue; +/** + * @public + */ +export interface Int64 { + readonly bytes: Uint8Array; + valueOf: () => number; + toString: () => string; +} +/** + * @public + * + * Util functions for serializing or deserializing event stream + */ +export interface EventStreamSerdeContext { + eventStreamMarshaller: EventStreamMarshaller; +} +/** + * @public + * + * A function which deserializes binary event stream message into modeled shape. + */ +export interface EventStreamMarshallerDeserFn { + (body: StreamType, deserializer: (input: Record) => Promise): AsyncIterable; +} +/** + * @public + * + * A function that serializes modeled shape into binary stream message. + */ +export interface EventStreamMarshallerSerFn { + (input: AsyncIterable, serializer: (event: T) => Message): StreamType; +} +/** + * @public + * + * An interface which provides functions for serializing and deserializing binary event stream + * to/from corresponsing modeled shape. + */ +export interface EventStreamMarshaller { + deserialize: EventStreamMarshallerDeserFn; + serialize: EventStreamMarshallerSerFn; +} +/** + * @public + */ +export interface EventStreamRequestSigner { + sign(request: HttpRequest): Promise; +} +/** + * @public + */ +export interface EventStreamPayloadHandler { + handle: (next: FinalizeHandler, args: FinalizeHandlerArguments, context?: HandlerExecutionContext) => Promise>; +} +/** + * @public + */ +export interface EventStreamPayloadHandlerProvider { + (options: any): EventStreamPayloadHandler; +} +/** + * @public + */ +export interface EventStreamSerdeProvider { + (options: any): EventStreamMarshaller; +} +/** + * @public + */ +export interface EventStreamSignerProvider { + (options: any): EventStreamRequestSigner; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts new file mode 100644 index 0000000..8ebbf00 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/checksum.d.ts @@ -0,0 +1,58 @@ +import { ChecksumConstructor } from "../checksum"; +import { HashConstructor } from "../crypto"; +/** + * @internal + */ +export declare enum AlgorithmId { + MD5 = "md5", + CRC32 = "crc32", + CRC32C = "crc32c", + SHA1 = "sha1", + SHA256 = "sha256" +} +/** + * @internal + */ +export interface ChecksumAlgorithm { + algorithmId(): AlgorithmId; + checksumConstructor(): ChecksumConstructor | HashConstructor; +} +/** + * @deprecated unused. + * @internal + */ +type ChecksumConfigurationLegacy = { + [other in string | number]: any; +}; +/** + * @internal + */ +export interface ChecksumConfiguration extends ChecksumConfigurationLegacy { + addChecksumAlgorithm(algo: ChecksumAlgorithm): void; + checksumAlgorithms(): ChecksumAlgorithm[]; +} +/** + * @deprecated will be removed for implicit type. + * @internal + */ +type GetChecksumConfigurationType = (runtimeConfig: Partial<{ + sha256: ChecksumConstructor | HashConstructor; + md5: ChecksumConstructor | HashConstructor; +}>) => ChecksumConfiguration; +/** + * @internal + * @deprecated will be moved to smithy-client. + */ +export declare const getChecksumConfiguration: GetChecksumConfigurationType; +/** + * @internal + * @deprecated will be removed for implicit type. + */ +type ResolveChecksumRuntimeConfigType = (clientConfig: ChecksumConfiguration) => any; +/** + * @internal + * + * @deprecated will be moved to smithy-client. + */ +export declare const resolveChecksumRuntimeConfig: ResolveChecksumRuntimeConfigType; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts new file mode 100644 index 0000000..40458b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultClientConfiguration.d.ts @@ -0,0 +1,33 @@ +import { ChecksumConfiguration } from "./checksum"; +/** + * @deprecated will be replaced by DefaultExtensionConfiguration. + * @internal + * + * Default client configuration consisting various configurations for modifying a service client + */ +export interface DefaultClientConfiguration extends ChecksumConfiguration { +} +/** + * @deprecated will be removed for implicit type. + */ +type GetDefaultConfigurationType = (runtimeConfig: any) => DefaultClientConfiguration; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve default client configuration from runtime config + * + */ +export declare const getDefaultClientConfiguration: GetDefaultConfigurationType; +/** + * @deprecated will be removed for implicit type. + */ +type ResolveDefaultRuntimeConfigType = (clientConfig: DefaultClientConfiguration) => any; +/** + * @deprecated moving to @smithy/smithy-client. + * @internal + * + * Helper function to resolve runtime config from default client configuration + */ +export declare const resolveDefaultRuntimeConfig: ResolveDefaultRuntimeConfigType; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts new file mode 100644 index 0000000..55f5137 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/defaultExtensionConfiguration.d.ts @@ -0,0 +1,9 @@ +import { ChecksumConfiguration } from "./checksum"; +import { RetryStrategyConfiguration } from "./retry"; +/** + * @internal + * + * Default extension configuration consisting various configurations for modifying a service client + */ +export interface DefaultExtensionConfiguration extends ChecksumConfiguration, RetryStrategyConfiguration { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts new file mode 100644 index 0000000..55edb16 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/index.d.ts @@ -0,0 +1,4 @@ +export * from "./defaultClientConfiguration"; +export * from "./defaultExtensionConfiguration"; +export { AlgorithmId, ChecksumAlgorithm, ChecksumConfiguration } from "./checksum"; +export { RetryStrategyConfiguration } from "./retry"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts new file mode 100644 index 0000000..3471d08 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/extensions/retry.d.ts @@ -0,0 +1,18 @@ +import { RetryStrategyV2 } from "../retry"; +import { Provider, RetryStrategy } from "../util"; +/** + * A configuration interface with methods called by runtime extension + * @internal + */ +export interface RetryStrategyConfiguration { + /** + * Set retry strategy used for all http requests + * @param retryStrategy + */ + setRetryStrategy(retryStrategy: Provider): void; + /** + * Get retry strategy used for all http requests + * @param retryStrategy + */ + retryStrategy(): Provider; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts new file mode 100644 index 0000000..b709d7f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/externals-check/browser-externals-check.d.ts @@ -0,0 +1,35 @@ +import { Exact } from "../transform/exact"; +/** + * @public + * + * A checked type that resolves to Blob if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing Blob + * excessively. + */ +export type BlobOptionalType = BlobDefined extends true ? Blob : Unavailable; +/** + * @public + * + * A checked type that resolves to ReadableStream if it is defined as more than a stub, otherwise + * resolves to 'never' so as not to widen the type of unions containing ReadableStream + * excessively. + */ +export type ReadableStreamOptionalType = ReadableStreamDefined extends true ? ReadableStream : Unavailable; +/** + * @public + * + * Indicates a type is unavailable if it resolves to this. + */ +export type Unavailable = never; +/** + * @internal + * + * Whether the global types define more than a stub for ReadableStream. + */ +export type ReadableStreamDefined = Exact extends true ? false : true; +/** + * @internal + * + * Whether the global types define more than a stub for Blob. + */ +export type BlobDefined = Exact extends true ? false : true; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts new file mode 100644 index 0000000..1a2c157 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/feature-ids.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + */ +export type SmithyFeatures = Partial<{ + RESOURCE_MODEL: "A"; + WAITER: "B"; + PAGINATOR: "C"; + RETRY_MODE_LEGACY: "D"; + RETRY_MODE_STANDARD: "E"; + RETRY_MODE_ADAPTIVE: "F"; + GZIP_REQUEST_COMPRESSION: "L"; + PROTOCOL_RPC_V2_CBOR: "M"; + ENDPOINT_OVERRIDE: "N"; + SIGV4A_SIGNING: "S"; + CREDENTIALS_CODE: "e"; +}>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts new file mode 100644 index 0000000..1e47e4e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/http.d.ts @@ -0,0 +1,112 @@ +import { AbortSignal as DeprecatedAbortSignal } from "./abort"; +import { URI } from "./uri"; +/** + * @public + * + * @deprecated use {@link EndpointV2} from `@smithy/types`. + */ +export interface Endpoint { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; +} +/** + * @public + * + * Interface an HTTP request class. Contains + * addressing information in addition to standard message properties. + */ +export interface HttpRequest extends HttpMessage, URI { + method: string; +} +/** + * @public + * + * Represents an HTTP message as received in reply to a request. Contains a + * numeric status code in addition to standard message properties. + */ +export interface HttpResponse extends HttpMessage { + statusCode: number; + reason?: string; +} +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. body: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * A mapping of query parameter names to strings or arrays of strings, with the + * second being used when a parameter contains a list of values. Value can be set + * to null when query is not in key-value pairs shape + */ +export type QueryParameterBag = Record | null>; +/** + * @public + */ +export type FieldOptions = { + name: string; + kind?: FieldPosition; + values?: string[]; +}; +/** + * @public + */ +export declare enum FieldPosition { + HEADER = 0, + TRAILER = 1 +} +/** + * @public + * + * A mapping of header names to string values. Multiple values for the same + * header should be represented as a single string with values separated by + * `, `. + * + * Keys should be considered case insensitive, even if this is not enforced by a + * particular implementation. For example, given the following HeaderBag, where + * keys differ only in case: + * + * ```json + * { + * 'x-request-date': '2000-01-01T00:00:00Z', + * 'X-Request-Date': '2001-01-01T00:00:00Z' + * } + * ``` + * + * The SDK may at any point during processing remove one of the object + * properties in favor of the other. The headers may or may not be combined, and + * the SDK will not deterministically select which header candidate to use. + */ +export type HeaderBag = Record; +/** + * @public + * + * Represents an HTTP message with headers and an optional static or streaming + * body. bode: ArrayBuffer | ArrayBufferView | string | Uint8Array | Readable | ReadableStream; + */ +export interface HttpMessage { + headers: HeaderBag; + body?: any; +} +/** + * @public + * + * Represents the options that may be passed to an Http Handler. + */ +export interface HttpHandlerOptions { + abortSignal?: AbortSignal | DeprecatedAbortSignal; + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + */ + requestTimeout?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts new file mode 100644 index 0000000..0ee18e4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/http/httpHandlerInitialization.d.ts @@ -0,0 +1,122 @@ +/// +import { Agent as hAgent, AgentOptions as hAgentOptions } from "http"; +import { Agent as hsAgent, AgentOptions as hsAgentOptions } from "https"; +import { HttpRequest as IHttpRequest } from "../http"; +import { Logger } from "../logger"; +/** + * + * This type represents an alternate client constructor option for the entry + * "requestHandler". Instead of providing an instance of a requestHandler, the user + * may provide the requestHandler's constructor options for either the + * NodeHttpHandler or FetchHttpHandler. + * + * For other RequestHandlers like HTTP2 or WebSocket, + * constructor parameter passthrough is not currently available. + * + * @public + */ +export type RequestHandlerParams = NodeHttpHandlerOptions | FetchHttpHandlerOptions; +/** + * Represents the http options that can be passed to a node http client. + * @public + */ +export interface NodeHttpHandlerOptions { + /** + * The maximum time in milliseconds that the connection phase of a request + * may take before the connection attempt is abandoned. + * + * Defaults to 0, which disables the timeout. + */ + connectionTimeout?: number; + /** + * The number of milliseconds a request can take before automatically being terminated. + * Defaults to 0, which disables the timeout. + */ + requestTimeout?: number; + /** + * Delay before the NodeHttpHandler checks for socket exhaustion, + * and emits a warning if the active sockets and enqueued request count is greater than + * 2x the maxSockets count. + * + * Defaults to connectionTimeout + requestTimeout or 3000ms if those are not set. + */ + socketAcquisitionWarningTimeout?: number; + /** + * This field is deprecated, and requestTimeout should be used instead. + * The maximum time in milliseconds that a socket may remain idle before it + * is closed. + * + * @deprecated Use {@link requestTimeout} + */ + socketTimeout?: number; + /** + * You can pass http.Agent or its constructor options. + */ + httpAgent?: hAgent | hAgentOptions; + /** + * You can pass https.Agent or its constructor options. + */ + httpsAgent?: hsAgent | hsAgentOptions; + /** + * Optional logger. + */ + logger?: Logger; +} +/** + * Represents the http options that can be passed to a browser http client. + * @public + */ +export interface FetchHttpHandlerOptions { + /** + * The number of milliseconds a request can take before being automatically + * terminated. + */ + requestTimeout?: number; + /** + * Whether to allow the request to outlive the page. Default value is false. + * + * There may be limitations to the payload size, number of concurrent requests, + * request duration etc. when using keepalive in browsers. + * + * These may change over time, so look for up to date information about + * these limitations before enabling keepalive. + */ + keepAlive?: boolean; + /** + * A string indicating whether credentials will be sent with the request always, never, or + * only when sent to a same-origin URL. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/credentials + */ + credentials?: "include" | "omit" | "same-origin" | undefined | string; + /** + * Cache settings for fetch. + * @see https://developer.mozilla.org/en-US/docs/Web/API/Request/cache + */ + cache?: "default" | "force-cache" | "no-cache" | "no-store" | "only-if-cached" | "reload"; + /** + * An optional function that produces additional RequestInit + * parameters for each httpRequest. + * + * This is applied last via merging with Object.assign() and overwrites other values + * set from other sources. + * + * @example + * ```js + * new Client({ + * requestHandler: { + * requestInit(httpRequest) { + * return { cache: "no-store" }; + * } + * } + * }); + * ``` + */ + requestInit?: (httpRequest: IHttpRequest) => RequestInit; +} +declare global { + /** + * interface merging stub. + */ + interface RequestInit { + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts new file mode 100644 index 0000000..4aee7a2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/apiKeyIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @public + */ +export interface ApiKeyIdentity extends Identity { + /** + * The literal API Key + */ + readonly apiKey: string; +} +/** + * @public + */ +export type ApiKeyIdentityProvider = IdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts new file mode 100644 index 0000000..9605e4d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/awsCredentialIdentity.d.ts @@ -0,0 +1,31 @@ +import { Identity, IdentityProvider } from "./identity"; +/** + * @public + */ +export interface AwsCredentialIdentity extends Identity { + /** + * AWS access key ID + */ + readonly accessKeyId: string; + /** + * AWS secret access key + */ + readonly secretAccessKey: string; + /** + * A security or session token to use with these credentials. Usually + * present for temporary credentials. + */ + readonly sessionToken?: string; + /** + * AWS credential scope for this set of credentials. + */ + readonly credentialScope?: string; + /** + * AWS accountId. + */ + readonly accountId?: string; +} +/** + * @public + */ +export type AwsCredentialIdentityProvider = IdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts new file mode 100644 index 0000000..eaa7e5d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/identity.d.ts @@ -0,0 +1,15 @@ +/** + * @public + */ +export interface Identity { + /** + * A `Date` when the identity or credential will no longer be accepted. + */ + readonly expiration?: Date; +} +/** + * @public + */ +export interface IdentityProvider { + (identityProperties?: Record): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts new file mode 100644 index 0000000..031a0fe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/index.d.ts @@ -0,0 +1,4 @@ +export * from "./apiKeyIdentity"; +export * from "./awsCredentialIdentity"; +export * from "./identity"; +export * from "./tokenIdentity"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts new file mode 100644 index 0000000..33783eb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/identity/tokenIdentity.d.ts @@ -0,0 +1,14 @@ +import { Identity, IdentityProvider } from "../identity/identity"; +/** + * @internal + */ +export interface TokenIdentity extends Identity { + /** + * The literal token string + */ + readonly token: string; +} +/** + * @internal + */ +export type TokenIdentityProvider = IdentityProvider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..85b4e44 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/index.d.ts @@ -0,0 +1,37 @@ +export * from "./abort"; +export * from "./auth"; +export * from "./blob/blob-payload-input-types"; +export * from "./checksum"; +export * from "./client"; +export * from "./command"; +export * from "./connection"; +export * from "./crypto"; +export * from "./encode"; +export * from "./endpoint"; +export * from "./endpoints"; +export * from "./eventStream"; +export * from "./extensions"; +export * from "./feature-ids"; +export * from "./http"; +export * from "./http/httpHandlerInitialization"; +export * from "./identity"; +export * from "./logger"; +export * from "./middleware"; +export * from "./pagination"; +export * from "./profile"; +export * from "./response"; +export * from "./retry"; +export * from "./serde"; +export * from "./shapes"; +export * from "./signature"; +export * from "./stream"; +export * from "./streaming-payload/streaming-blob-common-types"; +export * from "./streaming-payload/streaming-blob-payload-input-types"; +export * from "./streaming-payload/streaming-blob-payload-output-types"; +export * from "./transfer"; +export * from "./transform/client-payload-blob-type-narrow"; +export * from "./transform/no-undefined"; +export * from "./transform/type-transform"; +export * from "./uri"; +export * from "./util"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts new file mode 100644 index 0000000..cc69a11 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/logger.d.ts @@ -0,0 +1,13 @@ +/** + * @public + * + * Represents a logger object that is available in HandlerExecutionContext + * throughout the middleware stack. + */ +export interface Logger { + trace?: (...content: any[]) => void; + debug: (...content: any[]) => void; + info: (...content: any[]) => void; + warn: (...content: any[]) => void; + error: (...content: any[]) => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts new file mode 100644 index 0000000..8b35bbe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/middleware.d.ts @@ -0,0 +1,534 @@ +import { AuthScheme, HttpAuthDefinition } from "./auth/auth"; +import { SelectedHttpAuthScheme } from "./auth/HttpAuthScheme"; +import { Command } from "./command"; +import { EndpointV2 } from "./endpoint"; +import { SmithyFeatures } from "./feature-ids"; +import { Logger } from "./logger"; +import { UserAgent } from "./util"; +/** + * @public + */ +export interface InitializeHandlerArguments { + /** + * User input to a command. Reflects the userland representation of the + * union of data types the command can effectively handle. + */ + input: Input; +} +/** + * @public + */ +export interface InitializeHandlerOutput extends DeserializeHandlerOutput { + output: Output; +} +/** + * @public + */ +export interface SerializeHandlerArguments extends InitializeHandlerArguments { + /** + * The user input serialized as a request object. The request object is unknown, + * so you cannot modify it directly. When work with request, you need to guard its + * type to e.g. HttpRequest with 'instanceof' operand + * + * During the build phase of the execution of a middleware stack, a built + * request may or may not be available. + */ + request?: unknown; +} +/** + * @public + */ +export interface SerializeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface BuildHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface BuildHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface FinalizeHandlerArguments extends SerializeHandlerArguments { + /** + * The user input serialized as a request. + */ + request: unknown; +} +/** + * @public + */ +export interface FinalizeHandlerOutput extends InitializeHandlerOutput { +} +/** + * @public + */ +export interface DeserializeHandlerArguments extends FinalizeHandlerArguments { +} +/** + * @public + */ +export interface DeserializeHandlerOutput { + /** + * The raw response object from runtime is deserialized to structured output object. + * The response object is unknown so you cannot modify it directly. When work with + * response, you need to guard its type to e.g. HttpResponse with 'instanceof' operand. + * + * During the deserialize phase of the execution of a middleware stack, a deserialized + * response may or may not be available + */ + response: unknown; + output?: Output; +} +/** + * @public + */ +export interface InitializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: InitializeHandlerArguments): Promise>; +} +/** + * @public + */ +export type Handler = InitializeHandler; +/** + * @public + */ +export interface SerializeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: SerializeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface FinalizeHandler { + /** + * Asynchronously converts an input object into an output object. + * + * @param args - An object containing a input to the command as well as any + * associated or previously generated execution artifacts. + */ + (args: FinalizeHandlerArguments): Promise>; +} +/** + * @public + */ +export interface BuildHandler { + (args: BuildHandlerArguments): Promise>; +} +/** + * @public + */ +export interface DeserializeHandler { + (args: DeserializeHandlerArguments): Promise>; +} +/** + * @public + * + * A factory function that creates functions implementing the `Handler` + * interface. + */ +export interface InitializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: InitializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `BuildHandler` + * interface. + */ +export interface SerializeMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: SerializeHandler, context: HandlerExecutionContext): SerializeHandler; +} +/** + * @public + * + * A factory function that creates functions implementing the `FinalizeHandler` + * interface. + */ +export interface FinalizeRequestMiddleware { + /** + * @param next - The handler to invoke after this middleware has operated on + * the user input and before this middleware operates on the output. + * + * @param context - Invariant data and functions for use by the handler. + */ + (next: FinalizeHandler, context: HandlerExecutionContext): FinalizeHandler; +} +/** + * @public + */ +export interface BuildMiddleware { + (next: BuildHandler, context: HandlerExecutionContext): BuildHandler; +} +/** + * @public + */ +export interface DeserializeMiddleware { + (next: DeserializeHandler, context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type MiddlewareType = InitializeMiddleware | SerializeMiddleware | BuildMiddleware | FinalizeRequestMiddleware | DeserializeMiddleware; +/** + * @public + * + * A factory function that creates the terminal handler atop which a middleware + * stack sits. + */ +export interface Terminalware { + (context: HandlerExecutionContext): DeserializeHandler; +} +/** + * @public + */ +export type Step = "initialize" | "serialize" | "build" | "finalizeRequest" | "deserialize"; +/** + * @public + */ +export type Priority = "high" | "normal" | "low"; +/** + * @public + */ +export interface HandlerOptions { + /** + * Handlers are ordered using a "step" that describes the stage of command + * execution at which the handler will be executed. The available steps are: + * + * - initialize: The input is being prepared. Examples of typical + * initialization tasks include injecting default options computing + * derived parameters. + * - serialize: The input is complete and ready to be serialized. Examples + * of typical serialization tasks include input validation and building + * an HTTP request from user input. + * - build: The input has been serialized into an HTTP request, but that + * request may require further modification. Any request alterations + * will be applied to all retries. Examples of typical build tasks + * include injecting HTTP headers that describe a stable aspect of the + * request, such as `Content-Length` or a body checksum. + * - finalizeRequest: The request is being prepared to be sent over the wire. The + * request in this stage should already be semantically complete and + * should therefore only be altered as match the recipient's + * expectations. Examples of typical finalization tasks include request + * signing and injecting hop-by-hop headers. + * - deserialize: The response has arrived, the middleware here will deserialize + * the raw response object to structured response + * + * Unlike initialization and build handlers, which are executed once + * per operation execution, finalization and deserialize handlers will be + * executed foreach HTTP request sent. + * + * @defaultValue 'initialize' + */ + step?: Step; + /** + * A list of strings to any that identify the general purpose or important + * characteristics of a given handler. + */ + tags?: Array; + /** + * A unique name to refer to a middleware + */ + name?: string; + /** + * @internal + * Aliases allows for middleware to be found by multiple names besides {@link HandlerOptions.name}. + * This allows for references to replaced middleware to continue working, e.g. replacing + * multiple auth-specific middleware with a single generic auth middleware. + */ + aliases?: Array; + /** + * A flag to override the existing middleware with the same name. Without + * setting it, adding middleware with duplicated name will throw an exception. + * @internal + */ + override?: boolean; +} +/** + * @public + */ +export interface AbsoluteLocation { + /** + * By default middleware will be added to individual step in un-guaranteed order. + * In the case that + * + * @defaultValue 'normal' + */ + priority?: Priority; +} +/** + * @public + */ +export type Relation = "before" | "after"; +/** + * @public + */ +export interface RelativeLocation { + /** + * Specify the relation to be before or after a know middleware. + */ + relation: Relation; + /** + * A known middleware name to indicate inserting middleware's location. + */ + toMiddleware: string; +} +/** + * @public + */ +export type RelativeMiddlewareOptions = RelativeLocation & Pick>; +/** + * @public + */ +export interface InitializeHandlerOptions extends HandlerOptions { + step?: "initialize"; +} +/** + * @public + */ +export interface SerializeHandlerOptions extends HandlerOptions { + step: "serialize"; +} +/** + * @public + */ +export interface BuildHandlerOptions extends HandlerOptions { + step: "build"; +} +/** + * @public + */ +export interface FinalizeRequestHandlerOptions extends HandlerOptions { + step: "finalizeRequest"; +} +/** + * @public + */ +export interface DeserializeHandlerOptions extends HandlerOptions { + step: "deserialize"; +} +/** + * @public + * + * A stack storing middleware. It can be resolved into a handler. It supports 2 + * approaches for adding middleware: + * 1. Adding middleware to specific step with `add()`. The order of middleware + * added into same step is determined by order of adding them. If one middleware + * needs to be executed at the front of the step or at the end of step, set + * `priority` options to `high` or `low`. + * 2. Adding middleware to location relative to known middleware with `addRelativeTo()`. + * This is useful when given middleware must be executed before or after specific + * middleware(`toMiddleware`). You can add a middleware relatively to another + * middleware which also added relatively. But eventually, this relative middleware + * chain **must** be 'anchored' by a middleware that added using `add()` API + * with absolute `step` and `priority`. This mothod will throw if specified + * `toMiddleware` is not found. + */ +export interface MiddlewareStack extends Pluggable { + /** + * Add middleware to the stack to be executed during the "initialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: InitializeMiddleware, options?: InitializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "serialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: SerializeMiddleware, options: SerializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "build" step, + * optionally specifying a priority, tags and name + */ + add(middleware: BuildMiddleware, options: BuildHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "finalizeRequest" step, + * optionally specifying a priority, tags and name + */ + add(middleware: FinalizeRequestMiddleware, options: FinalizeRequestHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to the stack to be executed during the "deserialize" step, + * optionally specifying a priority, tags and name + */ + add(middleware: DeserializeMiddleware, options: DeserializeHandlerOptions & AbsoluteLocation): void; + /** + * Add middleware to a stack position before or after a known middleware,optionally + * specifying name and tags. + */ + addRelativeTo(middleware: MiddlewareType, options: RelativeMiddlewareOptions): void; + /** + * Apply a customization function to mutate the middleware stack, often + * used for customizations that requires mutating multiple middleware. + */ + use(pluggable: Pluggable): void; + /** + * Create a shallow clone of this stack. Step bindings and handler priorities + * and tags are preserved in the copy. + */ + clone(): MiddlewareStack; + /** + * Removes middleware from the stack. + * + * If a string is provided, it will be treated as middleware name. If a middleware + * is inserted with the given name, it will be removed. + * + * If a middleware class is provided, all usages thereof will be removed. + */ + remove(toRemove: MiddlewareType | string): boolean; + /** + * Removes middleware that contains given tag + * + * Multiple middleware will potentially be removed + */ + removeByTag(toRemove: string): boolean; + /** + * Create a stack containing the middlewares in this stack as well as the + * middlewares in the `from` stack. Neither source is modified, and step + * bindings and handler priorities and tags are preserved in the copy. + */ + concat(from: MiddlewareStack): MiddlewareStack; + /** + * Returns a list of the current order of middleware in the stack. + * This does not execute the middleware functions, nor does it + * provide a reference to the stack itself. + */ + identify(): string[]; + /** + * @internal + * + * When an operation is called using this stack, + * it will log its list of middleware to the console using + * the identify function. + * + * @param toggle - set whether to log on resolve. + * If no argument given, returns the current value. + */ + identifyOnResolve(toggle?: boolean): boolean; + /** + * Builds a single handler function from zero or more middleware classes and + * a core handler. The core handler is meant to send command objects to AWS + * services and return promises that will resolve with the operation result + * or be rejected with an error. + * + * When a composed handler is invoked, the arguments will pass through all + * middleware in a defined order, and the return from the innermost handler + * will pass through all middleware in the reverse of that order. + */ + resolve(handler: DeserializeHandler, context: HandlerExecutionContext): InitializeHandler; +} +/** + * @internal + */ +export declare const SMITHY_CONTEXT_KEY = "__smithy_context"; +/** + * @public + * + * Data and helper objects that are not expected to change from one execution of + * a composed handler to another. + */ +export interface HandlerExecutionContext { + /** + * A logger that may be invoked by any handler during execution of an + * operation. + */ + logger?: Logger; + /** + * Name of the service the operation is being sent to. + */ + clientName?: string; + /** + * Name of the operation being executed. + */ + commandName?: string; + /** + * Additional user agent that inferred by middleware. It can be used to save + * the internal user agent sections without overriding the `customUserAgent` + * config in clients. + */ + userAgent?: UserAgent; + /** + * Resolved by the endpointMiddleware function of `@smithy/middleware-endpoint` + * in the serialization stage. + */ + endpointV2?: EndpointV2; + /** + * Set at the same time as endpointV2. + */ + authSchemes?: AuthScheme[]; + /** + * The current auth configuration that has been set by any auth middleware and + * that will prevent from being set more than once. + */ + currentAuthConfig?: HttpAuthDefinition; + /** + * @deprecated do not extend this field, it is a carryover from AWS SDKs. + * Used by DynamoDbDocumentClient. + */ + dynamoDbDocumentClientOptions?: Partial<{ + overrideInputFilterSensitiveLog(...args: any[]): string | void; + overrideOutputFilterSensitiveLog(...args: any[]): string | void; + }>; + /** + * @internal + * Context for Smithy properties. + */ + [SMITHY_CONTEXT_KEY]?: { + service?: string; + operation?: string; + commandInstance?: Command; + selectedHttpAuthScheme?: SelectedHttpAuthScheme; + features?: SmithyFeatures; + /** + * @deprecated + * Do not assign arbitrary members to the Smithy Context, + * fields should be explicitly declared here to avoid collisions. + */ + [key: string]: unknown; + }; + /** + * @deprecated + * Do not assign arbitrary members to the context, since + * they can interfere with existing functionality. + * + * Additional members should instead be declared on the SMITHY_CONTEXT_KEY + * or other reserved keys. + */ + [key: string]: any; +} +/** + * @public + */ +export interface Pluggable { + /** + * A function that mutate the passed in middleware stack. Functions implementing + * this interface can add, remove, modify existing middleware stack from clients + * or commands + */ + applyToStack: (stack: MiddlewareStack) => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts new file mode 100644 index 0000000..c9d1c92 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/pagination.d.ts @@ -0,0 +1,33 @@ +import { Client } from "./client"; +import { Command } from "./command"; +/** + * @public + * + * Expected type definition of a paginator. + */ +export type Paginator = AsyncGenerator; +/** + * @public + * + * Expected paginator configuration passed to an operation. Services will extend + * this interface definition and may type client further. + */ +export interface PaginationConfiguration { + client: Client; + pageSize?: number; + startingToken?: any; + /** + * For some APIs, such as CloudWatchLogs events, the next page token will always + * be present. + * + * When true, this config field will have the paginator stop when the token doesn't change + * instead of when it is not present. + */ + stopOnSameToken?: boolean; + /** + * @param command - reference to the instantiated command. This callback is executed + * prior to sending the command with the paginator's client. + * @returns the original command or a replacement, defaulting to the original command object. + */ + withCommand?: (command: Command) => typeof command | undefined; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts new file mode 100644 index 0000000..1b3dba7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/profile.d.ts @@ -0,0 +1,30 @@ +/** + * @public + */ +export declare enum IniSectionType { + PROFILE = "profile", + SSO_SESSION = "sso-session", + SERVICES = "services" +} +/** + * @public + */ +export type IniSection = Record; +/** + * @public + * + * @deprecated Please use {@link IniSection} + */ +export interface Profile extends IniSection { +} +/** + * @public + */ +export type ParsedIniData = Record; +/** + * @public + */ +export interface SharedConfigFiles { + credentialsFile: ParsedIniData; + configFile: ParsedIniData; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts new file mode 100644 index 0000000..3d8a45a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/response.d.ts @@ -0,0 +1,40 @@ +/** + * @public + */ +export interface ResponseMetadata { + /** + * The status code of the last HTTP response received for this operation. + */ + httpStatusCode?: number; + /** + * A unique identifier for the last request sent for this operation. Often + * requested by AWS service teams to aid in debugging. + */ + requestId?: string; + /** + * A secondary identifier for the last request sent. Used for debugging. + */ + extendedRequestId?: string; + /** + * A tertiary identifier for the last request sent. Used for debugging. + */ + cfId?: string; + /** + * The number of times this operation was attempted. + */ + attempts?: number; + /** + * The total amount of time (in milliseconds) that was spent waiting between + * retry attempts. + */ + totalRetryDelay?: number; +} +/** + * @public + */ +export interface MetadataBearer { + /** + * Metadata pertaining to this request. + */ + $metadata: ResponseMetadata; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts new file mode 100644 index 0000000..8436c9a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/retry.d.ts @@ -0,0 +1,133 @@ +import { SdkError } from "./shapes"; +/** + * @public + */ +export type RetryErrorType = +/** + * This is a connection level error such as a socket timeout, socket connect + * error, tls negotiation timeout etc... + * Typically these should never be applied for non-idempotent request types + * since in this scenario, it's impossible to know whether the operation had + * a side effect on the server. + */ +"TRANSIENT" +/** + * This is an error where the server explicitly told the client to back off, + * such as a 429 or 503 Http error. + */ + | "THROTTLING" +/** + * This is a server error that isn't explicitly throttling but is considered + * by the client to be something that should be retried. + */ + | "SERVER_ERROR" +/** + * Doesn't count against any budgets. This could be something like a 401 + * challenge in Http. + */ + | "CLIENT_ERROR"; +/** + * @public + */ +export interface RetryErrorInfo { + /** + * The error thrown during the initial request, if available. + */ + error?: SdkError; + errorType: RetryErrorType; + /** + * Protocol hint. This could come from Http's 'retry-after' header or + * something from MQTT or any other protocol that has the ability to convey + * retry info from a peer. + * + * The Date after which a retry should be attempted. + */ + retryAfterHint?: Date; +} +/** + * @public + */ +export interface RetryBackoffStrategy { + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + computeNextBackoffDelay(retryAttempt: number): number; +} +/** + * @public + */ +export interface StandardRetryBackoffStrategy extends RetryBackoffStrategy { + /** + * Sets the delayBase used to compute backoff delays. + * @param delayBase - + */ + setDelayBase(delayBase: number): void; +} +/** + * @public + */ +export interface RetryStrategyOptions { + backoffStrategy: RetryBackoffStrategy; + maxRetriesBase: number; +} +/** + * @public + */ +export interface RetryToken { + /** + * @returns the current count of retry. + */ + getRetryCount(): number; + /** + * @returns the number of milliseconds to wait before retrying an action. + */ + getRetryDelay(): number; +} +/** + * @public + */ +export interface StandardRetryToken extends RetryToken { + /** + * @returns the cost of the last retry attempt. + */ + getRetryCost(): number | undefined; +} +/** + * @public + */ +export interface RetryStrategyV2 { + /** + * Called before any retries (for the first call to the operation). It either + * returns a retry token or an error upon the failure to acquire a token prior. + * + * tokenScope is arbitrary and out of scope for this component. However, + * adding it here offers us a lot of future flexibility for outage detection. + * For example, it could be "us-east-1" on a shared retry strategy, or + * "us-west-2-c:dynamodb". + */ + acquireInitialRetryToken(retryTokenScope: string): Promise; + /** + * After a failed operation call, this function is invoked to refresh the + * retryToken returned by acquireInitialRetryToken(). This function can + * either choose to allow another retry and send a new or updated token, + * or reject the retry attempt and report the error either in an exception + * or returning an error. + */ + refreshRetryTokenForRetry(tokenToRenew: RetryToken, errorInfo: RetryErrorInfo): Promise; + /** + * Upon successful completion of the operation, this function is called + * to record that the operation was successful. + */ + recordSuccess(token: RetryToken): void; +} +/** + * @public + */ +export type ExponentialBackoffJitterType = "DEFAULT" | "NONE" | "FULL" | "DECORRELATED"; +/** + * @public + */ +export interface ExponentialBackoffStrategyOptions { + jitterType: ExponentialBackoffJitterType; + backoffScaleValue?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts new file mode 100644 index 0000000..d2d7ea9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/serde.d.ts @@ -0,0 +1,112 @@ +import { Endpoint } from "./http"; +import { RequestHandler } from "./transfer"; +import { Decoder, Encoder, Provider } from "./util"; +/** + * @public + * + * Interface for object requires an Endpoint set. + */ +export interface EndpointBearer { + endpoint: Provider; +} +/** + * @public + */ +export interface StreamCollector { + /** + * A function that converts a stream into an array of bytes. + * + * @param stream - The low-level native stream from browser or Nodejs runtime + */ + (stream: any): Promise; +} +/** + * @public + * + * Request and Response serde util functions and settings for AWS services + */ +export interface SerdeContext extends SerdeFunctions, EndpointBearer { + requestHandler: RequestHandler; + disableHostPrefix: boolean; +} +/** + * @public + * + * Serde functions from the client config. + */ +export interface SerdeFunctions { + base64Encoder: Encoder; + base64Decoder: Decoder; + utf8Encoder: Encoder; + utf8Decoder: Decoder; + streamCollector: StreamCollector; +} +/** + * @public + */ +export interface RequestSerializer { + /** + * Converts the provided `input` into a request object + * + * @param input - The user input to serialize. + * + * @param context - Context containing runtime-specific util functions. + */ + (input: any, context: Context): Promise; +} +/** + * @public + */ +export interface ResponseDeserializer { + /** + * Converts the output of an operation into JavaScript types. + * + * @param output - The HTTP response received from the service + * + * @param context - context containing runtime-specific util functions. + */ + (output: ResponseType, context: Context): Promise; +} +/** + * The interface contains mix-in utility functions to transfer the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * @public + */ +export interface SdkStreamMixin { + transformToByteArray: () => Promise; + transformToString: (encoding?: string) => Promise; + transformToWebStream: () => ReadableStream; +} +/** + * @public + * + * The type describing a runtime-specific stream implementation with mix-in + * utility functions. + */ +export type SdkStream = BaseStream & SdkStreamMixin; +/** + * @public + * + * Indicates that the member of type T with + * key StreamKey have been extended + * with the SdkStreamMixin helper methods. + */ +export type WithSdkStreamMixin = { + [key in keyof T]: key extends StreamKey ? SdkStream : T[key]; +}; +/** + * Interface for internal function to inject stream utility functions + * implementation + * + * @internal + */ +export interface SdkStreamMixinInjector { + (stream: unknown): SdkStreamMixin; +} +/** + * @internal + */ +export interface SdkStreamSerdeContext { + sdkStreamMixin: SdkStreamMixinInjector; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts new file mode 100644 index 0000000..a81cbf1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/shapes.d.ts @@ -0,0 +1,82 @@ +import { HttpResponse } from "./http"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A document type represents an untyped JSON-like value. + * + * Not all protocols support document types, and the serialization format of a + * document type is protocol specific. All JSON protocols SHOULD support + * document types and they SHOULD serialize document types inline as normal + * JSON values. + */ +export type DocumentType = null | boolean | number | string | DocumentType[] | { + [prop: string]: DocumentType; +}; +/** + * @public + * + * A structure shape with the error trait. + * https://smithy.io/2.0/spec/behavior-traits.html#smithy-api-retryable-trait + */ +export interface RetryableTrait { + /** + * Indicates that the error is a retryable throttling error. + */ + readonly throttling?: boolean; +} +/** + * @public + * + * Type that is implemented by all Smithy shapes marked with the + * error trait. + * @deprecated + */ +export interface SmithyException { + /** + * The shape ID name of the exception. + */ + readonly name: string; + /** + * Whether the client or server are at fault. + */ + readonly $fault: "client" | "server"; + /** + * The service that encountered the exception. + */ + readonly $service?: string; + /** + * Indicates that an error MAY be retried by the client. + */ + readonly $retryable?: RetryableTrait; + /** + * Reference to low-level HTTP response object. + */ + readonly $response?: HttpResponse; +} +/** + * @public + * + * @deprecated See {@link https://aws.amazon.com/blogs/developer/service-error-handling-modular-aws-sdk-js/} + * + * This type should not be used in your application. + * Users of the AWS SDK for JavaScript v3 service clients should prefer to + * use the specific Exception classes corresponding to each operation. + * These can be found as code in the deserializer for the operation's Command class, + * or as declarations in the service model file in codegen/sdk-codegen/aws-models. + * + * If no exceptions are enumerated by a particular Command operation, + * the base exception for the service should be used. Each client exports + * a base ServiceException prefixed with the service name. + */ +export type SdkError = Error & Partial & Partial & { + $metadata?: Partial["$metadata"] & { + /** + * If present, will have value of true and indicates that the error resulted in a + * correction of the clock skew, a.k.a. config.systemClockOffset. + * This is specific to AWS SDK and sigv4. + */ + readonly clockSkewCorrected?: true; + }; + cause?: Error; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts new file mode 100644 index 0000000..bbaecde --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/signature.d.ts @@ -0,0 +1,155 @@ +import { Message } from "./eventStream"; +import { HttpRequest } from "./http"; +/** + * @public + * + * A `Date` object, a unix (epoch) timestamp in seconds, or a string that can be + * understood by the JavaScript `Date` constructor. + */ +export type DateInput = number | string | Date; +/** + * @public + */ +export interface SigningArguments { + /** + * The date and time to be used as signature metadata. This value should be + * a Date object, a unix (epoch) timestamp, or a string that can be + * understood by the JavaScript `Date` constructor.If not supplied, the + * value returned by `new Date()` will be used. + */ + signingDate?: DateInput; + /** + * The service signing name. It will override the service name of the signer + * in current invocation + */ + signingService?: string; + /** + * The region name to sign the request. It will override the signing region of the + * signer in current invocation + */ + signingRegion?: string; +} +/** + * @public + */ +export interface RequestSigningArguments extends SigningArguments { + /** + * A set of strings whose members represents headers that cannot be signed. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unsignableHeaders set. + */ + unsignableHeaders?: Set; + /** + * A set of strings whose members represents headers that should be signed. + * Any values passed here will override those provided via unsignableHeaders, + * allowing them to be signed. + * + * All headers in the provided request will have their names converted to + * lower case before signing. + */ + signableHeaders?: Set; +} +/** + * @public + */ +export interface RequestPresigningArguments extends RequestSigningArguments { + /** + * The number of seconds before the presigned URL expires + */ + expiresIn?: number; + /** + * A set of strings whose representing headers that should not be hoisted + * to presigned request's query string. If not supplied, the presigner + * moves all the AWS-specific headers (starting with `x-amz-`) to the request + * query string. If supplied, these headers remain in the presigned request's + * header. + * All headers in the provided request will have their names converted to + * lower case and then checked for existence in the unhoistableHeaders set. + */ + unhoistableHeaders?: Set; + /** + * This overrides any headers with the same name(s) set by unhoistableHeaders. + * These headers will be hoisted into the query string and signed. + */ + hoistableHeaders?: Set; +} +/** + * @public + */ +export interface EventSigningArguments extends SigningArguments { + priorSignature: string; +} +/** + * @public + */ +export interface RequestPresigner { + /** + * Signs a request for future use. + * + * The request will be valid until either the provided `expiration` time has + * passed or the underlying credentials have expired. + * + * @param requestToSign - The request that should be signed. + * @param options - Additional signing options. + */ + presign(requestToSign: HttpRequest, options?: RequestPresigningArguments): Promise; +} +/** + * @public + * + * An object that signs request objects with AWS credentials using one of the + * AWS authentication protocols. + */ +export interface RequestSigner { + /** + * Sign the provided request for immediate dispatch. + */ + sign(requestToSign: HttpRequest, options?: RequestSigningArguments): Promise; +} +/** + * @public + */ +export interface StringSigner { + /** + * Sign the provided `stringToSign` for use outside of the context of + * request signing. Typical uses include signed policy generation. + */ + sign(stringToSign: string, options?: SigningArguments): Promise; +} +/** + * @public + */ +export interface FormattedEvent { + headers: Uint8Array; + payload: Uint8Array; +} +/** + * @public + */ +export interface EventSigner { + /** + * Sign the individual event of the event stream. + */ + sign(event: FormattedEvent, options: EventSigningArguments): Promise; +} +/** + * @public + */ +export interface SignableMessage { + message: Message; + priorSignature: string; +} +/** + * @public + */ +export interface SignedMessage { + message: Message; + signature: string; +} +/** + * @public + */ +export interface MessageSigner { + signMessage(message: SignableMessage, args: SigningArguments): Promise; + sign(event: SignableMessage, options: SigningArguments): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts new file mode 100644 index 0000000..1e2b85d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/stream.d.ts @@ -0,0 +1,22 @@ +import { ChecksumConstructor } from "./checksum"; +import { HashConstructor, StreamHasher } from "./crypto"; +import { BodyLengthCalculator, Encoder } from "./util"; +/** + * @public + */ +export interface GetAwsChunkedEncodingStreamOptions { + base64Encoder?: Encoder; + bodyLengthChecker: BodyLengthCalculator; + checksumAlgorithmFn?: ChecksumConstructor | HashConstructor; + checksumLocationName?: string; + streamHasher?: StreamHasher; +} +/** + * @public + * + * A function that returns Readable Stream which follows aws-chunked encoding stream. + * It optionally adds checksum if options are provided. + */ +export interface GetAwsChunkedEncodingStream { + (readableStream: StreamType, options: GetAwsChunkedEncodingStreamOptions): StreamType; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts new file mode 100644 index 0000000..27088db --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-common-types.d.ts @@ -0,0 +1,33 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This is the union representing the modeled blob type with streaming trait + * in a generic format that does not relate to HTTP input or output payloads. + * + * Note: the non-streaming blob type is represented by Uint8Array, but because + * the streaming blob type is always in the request/response paylod, it has + * historically been handled with different types. + * + * @see https://smithy.io/2.0/spec/simple-types.html#blob + * + * For compatibility with its historical representation, it must contain at least + * Readble (Node.js), Blob (browser), and ReadableStream (browser). + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + */ +export type StreamingBlobTypes = NodeJsRuntimeStreamingBlobTypes | BrowserRuntimeStreamingBlobTypes; +/** + * @public + * + * Node.js streaming blob type. + */ +export type NodeJsRuntimeStreamingBlobTypes = Readable; +/** + * @public + * + * Browser streaming blob types. + */ +export type BrowserRuntimeStreamingBlobTypes = ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts new file mode 100644 index 0000000..1a86dea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-input-types.d.ts @@ -0,0 +1,61 @@ +/// +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +/** + * @public + * + * This union represents a superset of the compatible types you + * can use for streaming payload inputs. + * + * FAQ: + * Why does the type union mix mutually exclusive runtime types, namely + * Node.js and browser types? + * + * There are several reasons: + * 1. For backwards compatibility. + * 2. As a convenient compromise solution so that users in either environment may use the types + * without customization. + * 3. The SDK does not have static type information about the exact implementation + * of the HTTP RequestHandler being used in your client(s) (e.g. fetch, XHR, node:http, or node:http2), + * given that it is chosen at runtime. There are multiple possible request handlers + * in both the Node.js and browser runtime environments. + * + * Rather than restricting the type to a known common format (Uint8Array, for example) + * which doesn't include a universal streaming format in the currently supported Node.js versions, + * the type declaration is widened to multiple possible formats. + * It is up to the user to ultimately select a compatible format with the + * runtime and HTTP handler implementation they are using. + * + * Usage: + * The typical solution we expect users to have is to manually narrow the + * type when needed, picking the appropriate one out of the union according to the + * runtime environment and specific request handler. + * There is also the type utility "NodeJsClient", "BrowserClient" and more + * exported from this package. These can be applied at the client level + * to pre-narrow these streaming payload blobs. For usage see the readme.md + * in the root of the \@smithy/types NPM package. + */ +export type StreamingBlobPayloadInputTypes = NodeJsRuntimeStreamingBlobPayloadInputTypes | BrowserRuntimeStreamingBlobPayloadInputTypes; +/** + * @public + * + * Streaming payload input types in the Node.js environment. + * These are derived from the types compatible with the request body used by node:http. + * + * Note: not all types are signable by the standard SignatureV4 signer when + * used as the request body. For example, in Node.js a Readable stream + * is not signable by the default signer. + * They are included in the union because it may be intended in some cases, + * but the expected types are primarily string, Uint8Array, and Buffer. + * + * Additional details may be found in the internal + * function "getPayloadHash" in the SignatureV4 module. + */ +export type NodeJsRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | Buffer | Readable; +/** + * @public + * + * Streaming payload input types in the browser environment. + * These are derived from the types compatible with fetch's Request.body. + */ +export type BrowserRuntimeStreamingBlobPayloadInputTypes = string | Uint8Array | ReadableStreamOptionalType | BlobOptionalType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts new file mode 100644 index 0000000..e344a46 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/streaming-payload/streaming-blob-payload-output-types.d.ts @@ -0,0 +1,52 @@ +/// +import { IncomingMessage } from "http"; +import { Readable } from "stream"; +import { BlobOptionalType, ReadableStreamOptionalType } from "../externals-check/browser-externals-check"; +import { SdkStream } from "../serde"; +/** + * @public + * + * This union represents a superset of the types you may receive + * in streaming payload outputs. + * + * @see StreamingPayloadInputTypes for FAQ about mixing types from multiple environments. + * + * To highlight the upstream docs about the SdkStream mixin: + * + * The interface contains mix-in (via Object.assign) methods to transform the runtime-specific + * stream implementation to specified format. Each stream can ONLY be transformed + * once. + * + * The available methods are described on the SdkStream type via SdkStreamMixin. + */ +export type StreamingBlobPayloadOutputTypes = NodeJsRuntimeStreamingBlobPayloadOutputTypes | BrowserRuntimeStreamingBlobPayloadOutputTypes; +/** + * @public + * + * Streaming payload output types in the Node.js environment. + * + * This is by default the IncomingMessage type from node:http responses when + * using the default node-http-handler in Node.js environments. + * + * It can be other Readable types like node:http2's ClientHttp2Stream + * such as when using the node-http2-handler. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type NodeJsRuntimeStreamingBlobPayloadOutputTypes = SdkStream; +/** + * @public + * + * Streaming payload output types in the browser environment. + * + * This is by default fetch's Response.body type (ReadableStream) when using + * the default fetch-http-handler in browser-like environments. + * + * It may be a Blob, such as when using the XMLHttpRequest handler + * and receiving an arraybuffer response body. + * + * The SdkStreamMixin adds methods on this type to help transform (collect) it to + * other formats. + */ +export type BrowserRuntimeStreamingBlobPayloadOutputTypes = SdkStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts new file mode 100644 index 0000000..f37ddb7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transfer.d.ts @@ -0,0 +1,41 @@ +/** + * @public + */ +export type RequestHandlerOutput = { + response: ResponseType; +}; +/** + * @public + */ +export interface RequestHandler { + /** + * metadata contains information of a handler. For example + * 'h2' refers this handler is for handling HTTP/2 requests, + * whereas 'h1' refers handling HTTP1 requests + */ + metadata?: RequestHandlerMetadata; + destroy?: () => void; + handle: (request: RequestType, handlerOptions?: HandlerOptions) => Promise>; +} +/** + * @public + */ +export interface RequestHandlerMetadata { + handlerProtocol: RequestHandlerProtocol | string; +} +/** + * @public + * Values from ALPN Protocol IDs. + * @see https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + */ +export declare enum RequestHandlerProtocol { + HTTP_0_9 = "http/0.9", + HTTP_1_0 = "http/1.0", + TDS_8_0 = "tds/8.0" +} +/** + * @public + */ +export interface RequestContext { + destination: URL; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts new file mode 100644 index 0000000..f1aecf3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/client-method-transforms.d.ts @@ -0,0 +1,26 @@ +import { CommandIO } from "../command"; +import { MetadataBearer } from "../response"; +import { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import { Transform } from "./type-transform"; +/** + * @internal + * + * Narrowed version of InvokeFunction used in Client::send. + */ +export interface NarrowedInvokeFunction { + (command: CommandIO, options?: HttpHandlerOptions): Promise>; + (command: CommandIO, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (command: CommandIO, options?: HttpHandlerOptions, cb?: (err: unknown, data?: Transform) => void): Promise> | void; +} +/** + * @internal + * + * Narrowed version of InvokeMethod used in aggregated Client methods. + */ +export interface NarrowedInvokeMethod { + (input: InputType, options?: HttpHandlerOptions): Promise>; + (input: InputType, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options: HttpHandlerOptions, cb: (err: unknown, data?: Transform) => void): void; + (input: InputType, options?: HttpHandlerOptions, cb?: (err: unknown, data?: OutputType) => void): Promise> | void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts new file mode 100644 index 0000000..e9516e2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/client-payload-blob-type-narrow.d.ts @@ -0,0 +1,82 @@ +/// +import { IncomingMessage } from "http"; +import { ClientHttp2Stream } from "http2"; +import { InvokeMethod } from "../client"; +import { GetOutputType } from "../command"; +import { HttpHandlerOptions } from "../http"; +import { SdkStream } from "../serde"; +import { BrowserRuntimeStreamingBlobPayloadInputTypes, NodeJsRuntimeStreamingBlobPayloadInputTypes, StreamingBlobPayloadInputTypes } from "../streaming-payload/streaming-blob-payload-input-types"; +import { StreamingBlobPayloadOutputTypes } from "../streaming-payload/streaming-blob-payload-output-types"; +import { NarrowedInvokeMethod } from "./client-method-transforms"; +import { Transform } from "./type-transform"; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the NodeHttpHandler requestHandler, + * the default in Node.js when not using HTTP2. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as NodeJsClient; + * ``` + */ +export type NodeJsClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * Variant of NodeJsClient for node:http2. + */ +export type NodeJsHttp2Client = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Creates a type with a given client type that narrows payload blob output + * types to SdkStream. + * + * This can be used for clients with the FetchHttpHandler requestHandler, + * which is the default in browser environments. + * + * Usage example: + * ```typescript + * const client = new YourClient({}) as BrowserClient; + * ``` + */ +export type BrowserClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * Variant of BrowserClient for XMLHttpRequest. + */ +export type BrowserXhrClient = NarrowPayloadBlobTypes, ClientType>; +/** + * @public + * + * @deprecated use NarrowPayloadBlobTypes. + * + * Narrow a given Client's blob payload outputs to the given type T. + */ +export type NarrowPayloadBlobOutputType = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, T>>; +}; +/** + * @public + * + * Narrow a Client's blob payload input and output types to I and O. + */ +export type NarrowPayloadBlobTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? NarrowedInvokeMethod, FunctionOutputTypes> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise, StreamingBlobPayloadOutputTypes | undefined, O>>; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts new file mode 100644 index 0000000..3a812df --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/exact.d.ts @@ -0,0 +1,14 @@ +/** + * @internal + * + * Checks that A and B extend each other. + */ +export type Exact = [ + A +] extends [ + B +] ? ([ + B +] extends [ + A +] ? true : false) : false; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts new file mode 100644 index 0000000..6a7f6d8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/no-undefined.d.ts @@ -0,0 +1,88 @@ +import { InvokeMethod, InvokeMethodOptionalArgs } from "../client"; +import { GetOutputType } from "../command"; +import { DocumentType } from "../shapes"; +/** + * @public + * + * This type is intended as a type helper for generated clients. + * When initializing client, cast it to this type by passing + * the client constructor type as the type parameter. + * + * It will then recursively remove "undefined" as a union type from all + * input and output shapes' members. Note, this does not affect + * any member that is optional (?) such as outputs with no required members. + * + * @example + * ```ts + * const client = new Client({}) as AssertiveClient; + * ``` + */ +export type AssertiveClient = NarrowClientIOTypes; +/** + * @public + * + * This is similar to AssertiveClient but additionally changes all + * output types to (recursive) Required so as to bypass all output nullability guards. + */ +export type UncheckedClient = UncheckedClientOutputTypes; +/** + * @internal + * + * Excludes undefined recursively. + */ +export type NoUndefined = T extends Function ? T : T extends DocumentType ? T : [ + T +] extends [ + object +] ? { + [key in keyof T]: NoUndefined; +} : Exclude; +/** + * @internal + * + * Excludes undefined and optional recursively. + */ +export type RecursiveRequired = T extends Function ? T : T extends DocumentType ? T : [ + T +] extends [ + object +] ? { + [key in keyof T]-?: RecursiveRequired; +} : Exclude; +/** + * @internal + * + * Removes undefined from unions. + */ +type NarrowClientIOTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, NoUndefined> : [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? InvokeMethod, NoUndefined> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>; +}; +/** + * @internal + * + * Removes undefined from unions and adds yolo output types. + */ +type UncheckedClientOutputTypes = { + [key in keyof ClientType]: [ + ClientType[key] + ] extends [ + InvokeMethodOptionalArgs + ] ? InvokeMethodOptionalArgs, RecursiveRequired> : [ + ClientType[key] + ] extends [ + InvokeMethod + ] ? InvokeMethod, RecursiveRequired> : ClientType[key]; +} & { + send(command: Command, options?: any): Promise>>>; +}; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts new file mode 100644 index 0000000..547303f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/transform/type-transform.d.ts @@ -0,0 +1,41 @@ +/** + * @public + * + * Transforms any members of the object T having type FromType + * to ToType. This applies only to exact type matches. + * + * This is for the case where FromType is a union and only those fields + * matching the same union should be transformed. + */ +export type Transform = RecursiveTransformExact; +/** + * @internal + * + * Returns ToType if T matches exactly with FromType. + */ +type TransformExact = [ + T +] extends [ + FromType +] ? ([ + FromType +] extends [ + T +] ? ToType : T) : T; +/** + * @internal + * + * Applies TransformExact to members of an object recursively. + */ +type RecursiveTransformExact = T extends Function ? T : T extends object ? { + [key in keyof T]: [ + T[key] + ] extends [ + FromType + ] ? [ + FromType + ] extends [ + T[key] + ] ? ToType : RecursiveTransformExact : RecursiveTransformExact; +} : TransformExact; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts new file mode 100644 index 0000000..4e7adb4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/uri.d.ts @@ -0,0 +1,17 @@ +import { QueryParameterBag } from "./http"; +/** + * @internal + * + * Represents the components parts of a Uniform Resource Identifier used to + * construct the target location of a Request. + */ +export type URI = { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; + username?: string; + password?: string; + fragment?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts new file mode 100644 index 0000000..7c700af --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/util.d.ts @@ -0,0 +1,192 @@ +import { Endpoint } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A generic which checks if Type1 is exactly same as Type2. + */ +export type Exact = [ + Type1 +] extends [ + Type2 +] ? ([ + Type2 +] extends [ + Type1 +] ? true : false) : false; +/** + * @public + * + * A function that, given a Uint8Array of bytes, can produce a string + * representation thereof. The function may optionally attempt to + * convert other input types to Uint8Array before encoding. + * + * @example An encoder function that converts bytes to hexadecimal + * representation would return `'hello'` when given + * `new Uint8Array([104, 101, 108, 108, 111])`. + */ +export interface Encoder { + /** + * Caution: the `any` type on the input is for backwards compatibility. + * Runtime support is limited to Uint8Array and string by default. + * + * You may choose to support more encoder input types if overriding the default + * implementations. + */ + (input: Uint8Array | string | any): string; +} +/** + * @public + * + * A function that, given a string, can derive the bytes represented by that + * string. + * + * @example A decoder function that converts bytes to hexadecimal + * representation would return `new Uint8Array([104, 101, 108, 108, 111])` when + * given the string `'hello'`. + */ +export interface Decoder { + (input: string): Uint8Array; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. + * + * @example A function that reads credentials from shared SDK configuration + * files, assuming roles and collecting MFA tokens as necessary. + */ +export interface Provider { + (): Promise; +} +/** + * @public + * + * A tuple that represents an API name and optional version + * of a library built using the AWS SDK. + */ +export type UserAgentPair = [ + /*name*/ string, + /*version*/ string +]; +/** + * @public + * + * User agent data that to be put into the request's user + * agent. + */ +export type UserAgent = UserAgentPair[]; +/** + * @public + * + * Parses a URL in string form into an Endpoint object. + */ +export interface UrlParser { + (url: string | URL): Endpoint; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. It memoizes the result from the previous invocation + * instead of calling the underlying resources every time. + * + * You can force the provider to refresh the memoized value by invoke the + * function with optional parameter hash with `forceRefresh` boolean key and + * value `true`. + * + * @example A function that reads credentials from IMDS service that could + * return expired credentials. The SDK will keep using the expired credentials + * until an unretryable service error requiring a force refresh of the + * credentials. + */ +export interface MemoizedProvider { + (options?: { + forceRefresh?: boolean; + }): Promise; +} +/** + * @public + * + * A function that, given a request body, determines the + * length of the body. This is used to determine the Content-Length + * that should be sent with a request. + * + * @example A function that reads a file stream and calculates + * the size of the file. + */ +export interface BodyLengthCalculator { + (body: any): number | undefined; +} +/** + * @public + * + * Object containing regionalization information of + * AWS services. + */ +export interface RegionInfo { + hostname: string; + partition: string; + path?: string; + signingService?: string; + signingRegion?: string; +} +/** + * @public + * + * Options to pass when calling {@link RegionInfoProvider} + */ +export interface RegionInfoProviderOptions { + /** + * Enables IPv6/IPv4 dualstack endpoint. + * @defaultValue false + */ + useDualstackEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + * @defaultValue false + */ + useFipsEndpoint: boolean; +} +/** + * @public + * + * Function returns designated service's regionalization + * information from given region. Each service client + * comes with its regionalization provider. it serves + * to provide the default values of related configurations + */ +export interface RegionInfoProvider { + (region: string, options?: RegionInfoProviderOptions): Promise; +} +/** + * @public + * + * Interface that specifies the retry behavior + */ +export interface RetryStrategy { + /** + * The retry mode describing how the retry strategy control the traffic flow. + */ + mode?: string; + /** + * the retry behavior the will invoke the next handler and handle the retry accordingly. + * This function should also update the $metadata from the response accordingly. + * @see {@link ResponseMetadata} + */ + retry: (next: FinalizeHandler, args: FinalizeHandlerArguments) => Promise>; +} +/** + * @public + * + * Indicates the parameter may be omitted if the parameter object T + * is equivalent to a Partial, i.e. all properties optional. + */ +export type OptionalParameter = Exact, T> extends true ? [ +] | [ + T +] : [ + T +]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..2cc2fff --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1,35 @@ +import { AbortController as DeprecatedAbortController } from "./abort"; +/** + * @public + */ +export interface WaiterConfiguration { + /** + * Required service client + */ + client: Client; + /** + * The amount of time in seconds a user is willing to wait for a waiter to complete. + */ + maxWaitTime: number; + /** + * @deprecated Use abortSignal + * Abort controller. Used for ending the waiter early. + */ + abortController?: AbortController | DeprecatedAbortController; + /** + * Abort Signal. Used for ending the waiter early. + */ + abortSignal?: AbortController["signal"] | DeprecatedAbortController["signal"]; + /** + * The minimum amount of time to delay between retries in seconds. This is the + * floor of the exponential backoff. This value defaults to service default + * if not specified. This value MUST be less than or equal to maxDelay and greater than 0. + */ + minDelay?: number; + /** + * The maximum amount of time to delay between retries in seconds. This is the + * ceiling of the exponential backoff. This value defaults to service default + * if not specified. If specified, this value MUST be greater than or equal to 1. + */ + maxDelay?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/uri.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/uri.d.ts new file mode 100644 index 0000000..d7b874c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/uri.d.ts @@ -0,0 +1,17 @@ +import { QueryParameterBag } from "./http"; +/** + * @internal + * + * Represents the components parts of a Uniform Resource Identifier used to + * construct the target location of a Request. + */ +export type URI = { + protocol: string; + hostname: string; + port?: number; + path: string; + query?: QueryParameterBag; + username?: string; + password?: string; + fragment?: string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/util.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/util.d.ts new file mode 100644 index 0000000..b15045c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/util.d.ts @@ -0,0 +1,176 @@ +import { Endpoint } from "./http"; +import { FinalizeHandler, FinalizeHandlerArguments, FinalizeHandlerOutput } from "./middleware"; +import { MetadataBearer } from "./response"; +/** + * @public + * + * A generic which checks if Type1 is exactly same as Type2. + */ +export type Exact = [Type1] extends [Type2] ? ([Type2] extends [Type1] ? true : false) : false; +/** + * @public + * + * A function that, given a Uint8Array of bytes, can produce a string + * representation thereof. The function may optionally attempt to + * convert other input types to Uint8Array before encoding. + * + * @example An encoder function that converts bytes to hexadecimal + * representation would return `'hello'` when given + * `new Uint8Array([104, 101, 108, 108, 111])`. + */ +export interface Encoder { + /** + * Caution: the `any` type on the input is for backwards compatibility. + * Runtime support is limited to Uint8Array and string by default. + * + * You may choose to support more encoder input types if overriding the default + * implementations. + */ + (input: Uint8Array | string | any): string; +} +/** + * @public + * + * A function that, given a string, can derive the bytes represented by that + * string. + * + * @example A decoder function that converts bytes to hexadecimal + * representation would return `new Uint8Array([104, 101, 108, 108, 111])` when + * given the string `'hello'`. + */ +export interface Decoder { + (input: string): Uint8Array; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. + * + * @example A function that reads credentials from shared SDK configuration + * files, assuming roles and collecting MFA tokens as necessary. + */ +export interface Provider { + (): Promise; +} +/** + * @public + * + * A tuple that represents an API name and optional version + * of a library built using the AWS SDK. + */ +export type UserAgentPair = [name: string, version?: string]; +/** + * @public + * + * User agent data that to be put into the request's user + * agent. + */ +export type UserAgent = UserAgentPair[]; +/** + * @public + * + * Parses a URL in string form into an Endpoint object. + */ +export interface UrlParser { + (url: string | URL): Endpoint; +} +/** + * @public + * + * A function that, when invoked, returns a promise that will be fulfilled with + * a value of type T. It memoizes the result from the previous invocation + * instead of calling the underlying resources every time. + * + * You can force the provider to refresh the memoized value by invoke the + * function with optional parameter hash with `forceRefresh` boolean key and + * value `true`. + * + * @example A function that reads credentials from IMDS service that could + * return expired credentials. The SDK will keep using the expired credentials + * until an unretryable service error requiring a force refresh of the + * credentials. + */ +export interface MemoizedProvider { + (options?: { + forceRefresh?: boolean; + }): Promise; +} +/** + * @public + * + * A function that, given a request body, determines the + * length of the body. This is used to determine the Content-Length + * that should be sent with a request. + * + * @example A function that reads a file stream and calculates + * the size of the file. + */ +export interface BodyLengthCalculator { + (body: any): number | undefined; +} +/** + * @public + * + * Object containing regionalization information of + * AWS services. + */ +export interface RegionInfo { + hostname: string; + partition: string; + path?: string; + signingService?: string; + signingRegion?: string; +} +/** + * @public + * + * Options to pass when calling {@link RegionInfoProvider} + */ +export interface RegionInfoProviderOptions { + /** + * Enables IPv6/IPv4 dualstack endpoint. + * @defaultValue false + */ + useDualstackEndpoint: boolean; + /** + * Enables FIPS compatible endpoints. + * @defaultValue false + */ + useFipsEndpoint: boolean; +} +/** + * @public + * + * Function returns designated service's regionalization + * information from given region. Each service client + * comes with its regionalization provider. it serves + * to provide the default values of related configurations + */ +export interface RegionInfoProvider { + (region: string, options?: RegionInfoProviderOptions): Promise; +} +/** + * @public + * + * Interface that specifies the retry behavior + */ +export interface RetryStrategy { + /** + * The retry mode describing how the retry strategy control the traffic flow. + */ + mode?: string; + /** + * the retry behavior the will invoke the next handler and handle the retry accordingly. + * This function should also update the $metadata from the response accordingly. + * @see {@link ResponseMetadata} + */ + retry: (next: FinalizeHandler, args: FinalizeHandlerArguments) => Promise>; +} +/** + * @public + * + * Indicates the parameter may be omitted if the parameter object T + * is equivalent to a Partial, i.e. all properties optional. + */ +export type OptionalParameter = Exact, T> extends true ? [] | [T] : [T]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/waiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/waiter.d.ts new file mode 100644 index 0000000..5941832 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/dist-types/waiter.d.ts @@ -0,0 +1,35 @@ +import { AbortController as DeprecatedAbortController } from "./abort"; +/** + * @public + */ +export interface WaiterConfiguration { + /** + * Required service client + */ + client: Client; + /** + * The amount of time in seconds a user is willing to wait for a waiter to complete. + */ + maxWaitTime: number; + /** + * @deprecated Use abortSignal + * Abort controller. Used for ending the waiter early. + */ + abortController?: AbortController | DeprecatedAbortController; + /** + * Abort Signal. Used for ending the waiter early. + */ + abortSignal?: AbortController["signal"] | DeprecatedAbortController["signal"]; + /** + * The minimum amount of time to delay between retries in seconds. This is the + * floor of the exponential backoff. This value defaults to service default + * if not specified. This value MUST be less than or equal to maxDelay and greater than 0. + */ + minDelay?: number; + /** + * The maximum amount of time to delay between retries in seconds. This is the + * ceiling of the exponential backoff. This value defaults to service default + * if not specified. If specified, this value MUST be greater than or equal to 1. + */ + maxDelay?: number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/types/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/types/package.json new file mode 100644 index 0000000..87c5ad0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/types/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/types", + "version": "4.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline types", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4 && node scripts/downlevel", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:tsc -p tsconfig.test.json", + "extract:docs": "api-extractor run --local" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS Smithy Team", + "email": "", + "url": "https://smithy.io" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<=4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/types", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/types" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/README.md new file mode 100644 index 0000000..0d8d61e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/README.md @@ -0,0 +1,10 @@ +# @smithy/url-parser + +[![NPM version](https://img.shields.io/npm/v/@smithy/url-parser/latest.svg)](https://www.npmjs.com/package/@smithy/url-parser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/url-parser.svg)](https://www.npmjs.com/package/@smithy/url-parser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-cjs/index.js new file mode 100644 index 0000000..ab81787 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-cjs/index.js @@ -0,0 +1,49 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseUrl: () => parseUrl +}); +module.exports = __toCommonJS(src_exports); +var import_querystring_parser = require("@smithy/querystring-parser"); +var parseUrl = /* @__PURE__ */ __name((url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, import_querystring_parser.parseQueryString)(search); + } + return { + hostname, + port: port ? parseInt(port) : void 0, + protocol, + path: pathname, + query + }; +}, "parseUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + parseUrl +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-es/index.js new file mode 100644 index 0000000..811f8bf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-es/index.js @@ -0,0 +1,18 @@ +import { parseQueryString } from "@smithy/querystring-parser"; +export const parseUrl = (url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = parseQueryString(search); + } + return { + hostname, + port: port ? parseInt(port) : undefined, + protocol, + path: pathname, + query, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-types/index.d.ts new file mode 100644 index 0000000..b0d91c9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-types/index.d.ts @@ -0,0 +1,5 @@ +import { UrlParser } from "@smithy/types"; +/** + * @internal + */ +export declare const parseUrl: UrlParser; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..d6f0ec5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,5 @@ +import { UrlParser } from "@smithy/types"; +/** + * @internal + */ +export declare const parseUrl: UrlParser; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/package.json new file mode 100644 index 0000000..10aebb8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/url-parser/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/url-parser", + "version": "4.0.2", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline url-parser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/url-parser", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/url-parser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/README.md new file mode 100644 index 0000000..c9b6c87 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/README.md @@ -0,0 +1,4 @@ +# @smithy/util-base64 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-base64/latest.svg)](https://www.npmjs.com/package/@smithy/util-base64) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-base64.svg)](https://www.npmjs.com/package/@smithy/util-base64) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js new file mode 100644 index 0000000..d35d09f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/constants.browser.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.maxLetterValue = exports.bitsPerByte = exports.bitsPerLetter = exports.alphabetByValue = exports.alphabetByEncoding = void 0; +const alphabetByEncoding = {}; +exports.alphabetByEncoding = alphabetByEncoding; +const alphabetByValue = new Array(64); +exports.alphabetByValue = alphabetByValue; +for (let i = 0, start = "A".charCodeAt(0), limit = "Z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + alphabetByEncoding[char] = i; + alphabetByValue[i] = char; +} +for (let i = 0, start = "a".charCodeAt(0), limit = "z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + const index = i + 26; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +for (let i = 0; i < 10; i++) { + alphabetByEncoding[i.toString(10)] = i + 52; + const char = i.toString(10); + const index = i + 52; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +alphabetByEncoding["+"] = 62; +alphabetByValue[62] = "+"; +alphabetByEncoding["/"] = 63; +alphabetByValue[63] = "/"; +const bitsPerLetter = 6; +exports.bitsPerLetter = bitsPerLetter; +const bitsPerByte = 8; +exports.bitsPerByte = bitsPerByte; +const maxLetterValue = 0b111111; +exports.maxLetterValue = maxLetterValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js new file mode 100644 index 0000000..a5baffd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/fromBase64.browser.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromBase64 = void 0; +const constants_browser_1 = require("./constants.browser"); +const fromBase64 = (input) => { + let totalByteLength = (input.length / 4) * 3; + if (input.slice(-2) === "==") { + totalByteLength -= 2; + } + else if (input.slice(-1) === "=") { + totalByteLength--; + } + const out = new ArrayBuffer(totalByteLength); + const dataView = new DataView(out); + for (let i = 0; i < input.length; i += 4) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = i + 3; j <= limit; j++) { + if (input[j] !== "=") { + if (!(input[j] in constants_browser_1.alphabetByEncoding)) { + throw new TypeError(`Invalid character ${input[j]} in base64 string.`); + } + bits |= constants_browser_1.alphabetByEncoding[input[j]] << ((limit - j) * constants_browser_1.bitsPerLetter); + bitLength += constants_browser_1.bitsPerLetter; + } + else { + bits >>= constants_browser_1.bitsPerLetter; + } + } + const chunkOffset = (i / 4) * 3; + bits >>= bitLength % constants_browser_1.bitsPerByte; + const byteLength = Math.floor(bitLength / constants_browser_1.bitsPerByte); + for (let k = 0; k < byteLength; k++) { + const offset = (byteLength - k - 1) * constants_browser_1.bitsPerByte; + dataView.setUint8(chunkOffset + k, (bits & (255 << offset)) >> offset); + } + } + return new Uint8Array(out); +}; +exports.fromBase64 = fromBase64; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js new file mode 100644 index 0000000..b06a7b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/fromBase64.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fromBase64 = void 0; +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/index.js new file mode 100644 index 0000000..02848d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/index.js @@ -0,0 +1,27 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././fromBase64"), module.exports); +__reExport(src_exports, require("././toBase64"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromBase64, + toBase64 +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js new file mode 100644 index 0000000..e294f3f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/toBase64.browser.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBase64 = void 0; +const util_utf8_1 = require("@smithy/util-utf8"); +const constants_browser_1 = require("./constants.browser"); +function toBase64(_input) { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + const isArrayLike = typeof input === "object" && typeof input.length === "number"; + const isUint8Array = typeof input === "object" && + typeof input.byteOffset === "number" && + typeof input.byteLength === "number"; + if (!isArrayLike && !isUint8Array) { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + let str = ""; + for (let i = 0; i < input.length; i += 3) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = Math.min(i + 3, input.length); j < limit; j++) { + bits |= input[j] << ((limit - j - 1) * constants_browser_1.bitsPerByte); + bitLength += constants_browser_1.bitsPerByte; + } + const bitClusterCount = Math.ceil(bitLength / constants_browser_1.bitsPerLetter); + bits <<= bitClusterCount * constants_browser_1.bitsPerLetter - bitLength; + for (let k = 1; k <= bitClusterCount; k++) { + const offset = (bitClusterCount - k) * constants_browser_1.bitsPerLetter; + str += constants_browser_1.alphabetByValue[(bits & (constants_browser_1.maxLetterValue << offset)) >> offset]; + } + str += "==".slice(0, 4 - bitClusterCount); + } + return str; +} +exports.toBase64 = toBase64; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/toBase64.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/toBase64.js new file mode 100644 index 0000000..0590ce3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-cjs/toBase64.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBase64 = void 0; +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const util_utf8_1 = require("@smithy/util-utf8"); +const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; +exports.toBase64 = toBase64; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/constants.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/constants.browser.js new file mode 100644 index 0000000..fd4df4d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/constants.browser.js @@ -0,0 +1,28 @@ +const alphabetByEncoding = {}; +const alphabetByValue = new Array(64); +for (let i = 0, start = "A".charCodeAt(0), limit = "Z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + alphabetByEncoding[char] = i; + alphabetByValue[i] = char; +} +for (let i = 0, start = "a".charCodeAt(0), limit = "z".charCodeAt(0); i + start <= limit; i++) { + const char = String.fromCharCode(i + start); + const index = i + 26; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +for (let i = 0; i < 10; i++) { + alphabetByEncoding[i.toString(10)] = i + 52; + const char = i.toString(10); + const index = i + 52; + alphabetByEncoding[char] = index; + alphabetByValue[index] = char; +} +alphabetByEncoding["+"] = 62; +alphabetByValue[62] = "+"; +alphabetByEncoding["/"] = 63; +alphabetByValue[63] = "/"; +const bitsPerLetter = 6; +const bitsPerByte = 8; +const maxLetterValue = 0b111111; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js new file mode 100644 index 0000000..c2c6a66 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/fromBase64.browser.js @@ -0,0 +1,36 @@ +import { alphabetByEncoding, bitsPerByte, bitsPerLetter } from "./constants.browser"; +export const fromBase64 = (input) => { + let totalByteLength = (input.length / 4) * 3; + if (input.slice(-2) === "==") { + totalByteLength -= 2; + } + else if (input.slice(-1) === "=") { + totalByteLength--; + } + const out = new ArrayBuffer(totalByteLength); + const dataView = new DataView(out); + for (let i = 0; i < input.length; i += 4) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = i + 3; j <= limit; j++) { + if (input[j] !== "=") { + if (!(input[j] in alphabetByEncoding)) { + throw new TypeError(`Invalid character ${input[j]} in base64 string.`); + } + bits |= alphabetByEncoding[input[j]] << ((limit - j) * bitsPerLetter); + bitLength += bitsPerLetter; + } + else { + bits >>= bitsPerLetter; + } + } + const chunkOffset = (i / 4) * 3; + bits >>= bitLength % bitsPerByte; + const byteLength = Math.floor(bitLength / bitsPerByte); + for (let k = 0; k < byteLength; k++) { + const offset = (byteLength - k - 1) * bitsPerByte; + dataView.setUint8(chunkOffset + k, (bits & (255 << offset)) >> offset); + } + } + return new Uint8Array(out); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/fromBase64.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/fromBase64.js new file mode 100644 index 0000000..5197e93 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/fromBase64.js @@ -0,0 +1,12 @@ +import { fromString } from "@smithy/util-buffer-from"; +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +export const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = fromString(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/index.js new file mode 100644 index 0000000..594bd43 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js new file mode 100644 index 0000000..2a03a9d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/toBase64.browser.js @@ -0,0 +1,35 @@ +import { fromUtf8 } from "@smithy/util-utf8"; +import { alphabetByValue, bitsPerByte, bitsPerLetter, maxLetterValue } from "./constants.browser"; +export function toBase64(_input) { + let input; + if (typeof _input === "string") { + input = fromUtf8(_input); + } + else { + input = _input; + } + const isArrayLike = typeof input === "object" && typeof input.length === "number"; + const isUint8Array = typeof input === "object" && + typeof input.byteOffset === "number" && + typeof input.byteLength === "number"; + if (!isArrayLike && !isUint8Array) { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + let str = ""; + for (let i = 0; i < input.length; i += 3) { + let bits = 0; + let bitLength = 0; + for (let j = i, limit = Math.min(i + 3, input.length); j < limit; j++) { + bits |= input[j] << ((limit - j - 1) * bitsPerByte); + bitLength += bitsPerByte; + } + const bitClusterCount = Math.ceil(bitLength / bitsPerLetter); + bits <<= bitClusterCount * bitsPerLetter - bitLength; + for (let k = 1; k <= bitClusterCount; k++) { + const offset = (bitClusterCount - k) * bitsPerLetter; + str += alphabetByValue[(bits & (maxLetterValue << offset)) >> offset]; + } + str += "==".slice(0, 4 - bitClusterCount); + } + return str; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/toBase64.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/toBase64.js new file mode 100644 index 0000000..61f03ce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-es/toBase64.js @@ -0,0 +1,15 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +import { fromUtf8 } from "@smithy/util-utf8"; +export const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = fromUtf8(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts new file mode 100644 index 0000000..eb750ea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/constants.browser.d.ts @@ -0,0 +1,6 @@ +declare const alphabetByEncoding: Record; +declare const alphabetByValue: Array; +declare const bitsPerLetter = 6; +declare const bitsPerByte = 8; +declare const maxLetterValue = 63; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts new file mode 100644 index 0000000..6a640f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/fromBase64.browser.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes. + * + * @param input The base-64 encoded string + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts new file mode 100644 index 0000000..1878a89 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/fromBase64.d.ts @@ -0,0 +1,7 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes using Node.JS's + * `buffer` module. + * + * @param input The base-64 encoded string + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/index.d.ts new file mode 100644 index 0000000..594bd43 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts new file mode 100644 index 0000000..5f5615e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/toBase64.browser.d.ts @@ -0,0 +1,9 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare function toBase64(_input: Uint8Array | string): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts new file mode 100644 index 0000000..96bd0ed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/toBase64.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string using + * Node.JS's `buffer` module. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + */ +export declare const toBase64: (_input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts new file mode 100644 index 0000000..61c36c8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/constants.browser.d.ts @@ -0,0 +1,6 @@ +declare const alphabetByEncoding: Record; +declare const alphabetByValue: Array; +declare const bitsPerLetter = 6; +declare const bitsPerByte = 8; +declare const maxLetterValue = 63; +export { alphabetByEncoding, alphabetByValue, bitsPerLetter, bitsPerByte, maxLetterValue }; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts new file mode 100644 index 0000000..3a50006 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.browser.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes. + * + * @param input The base-64 encoded string + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts new file mode 100644 index 0000000..f84c7c6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/fromBase64.d.ts @@ -0,0 +1,7 @@ +/** + * Converts a base-64 encoded string to a Uint8Array of bytes using Node.JS's + * `buffer` module. + * + * @param input The base-64 encoded string + */ +export declare const fromBase64: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c4e1d03 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./fromBase64"; +export * from "./toBase64"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts new file mode 100644 index 0000000..260f696 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.browser.d.ts @@ -0,0 +1,9 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + * + * @see https://tools.ietf.org/html/rfc4648#section-4 + */ +export declare function toBase64(_input: Uint8Array | string): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts new file mode 100644 index 0000000..7e8bb70 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/dist-types/ts3.4/toBase64.d.ts @@ -0,0 +1,8 @@ +/** + * Converts a Uint8Array of binary data or a utf-8 string to a base-64 encoded string using + * Node.JS's `buffer` module. + * + * @param _input - the binary data or string to encode. + * @returns base64 string. + */ +export declare const toBase64: (_input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/package.json new file mode 100644 index 0000000..e122233 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-base64/package.json @@ -0,0 +1,73 @@ +{ + "name": "@smithy/util-base64", + "version": "4.0.0", + "description": "A Base64 <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-base64", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromBase64": "./dist-es/fromBase64.browser", + "./dist-es/toBase64": "./dist-es/toBase64.browser" + }, + "react-native": { + "./dist-es/fromBase64": "./dist-es/fromBase64.browser", + "./dist-es/toBase64": "./dist-es/toBase64.browser", + "./dist-cjs/fromBase64": "./dist-cjs/fromBase64.browser", + "./dist-cjs/toBase64": "./dist-cjs/toBase64.browser" + }, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-base64", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-base64" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/README.md new file mode 100644 index 0000000..460d092 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/README.md @@ -0,0 +1,12 @@ +# @smithy/util-body-length-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-body-length-browser/latest.svg)](https://www.npmjs.com/package/@smithy/util-body-length-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-body-length-browser.svg)](https://www.npmjs.com/package/@smithy/util-body-length-browser) + +Determines the length of a request body in browsers + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-cjs/calculateBodyLength.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js new file mode 100644 index 0000000..9e872bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-cjs/index.js @@ -0,0 +1,57 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var TEXT_ENCODER = typeof TextEncoder == "function" ? new TextEncoder() : null; +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (typeof body === "string") { + if (TEXT_ENCODER) { + return TEXT_ENCODER.encode(body).byteLength; + } + let len = body.length; + for (let i = len - 1; i >= 0; i--) { + const code = body.charCodeAt(i); + if (code > 127 && code <= 2047) + len++; + else if (code > 2047 && code <= 65535) + len += 2; + if (code >= 56320 && code <= 57343) + i--; + } + return len; + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + calculateBodyLength +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js new file mode 100644 index 0000000..6b994ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-es/calculateBodyLength.js @@ -0,0 +1,26 @@ +const TEXT_ENCODER = typeof TextEncoder == "function" ? new TextEncoder() : null; +export const calculateBodyLength = (body) => { + if (typeof body === "string") { + if (TEXT_ENCODER) { + return TEXT_ENCODER.encode(body).byteLength; + } + let len = body.length; + for (let i = len - 1; i >= 0; i--) { + const code = body.charCodeAt(i); + if (code > 0x7f && code <= 0x7ff) + len++; + else if (code > 0x7ff && code <= 0xffff) + len += 2; + if (code >= 0xdc00 && code <= 0xdfff) + i--; + } + return len; + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-es/index.js new file mode 100644 index 0000000..16ba478 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-es/index.js @@ -0,0 +1 @@ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts new file mode 100644 index 0000000..8e1bdb0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts new file mode 100644 index 0000000..7b4a0d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts new file mode 100644 index 0000000..3260536 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab6cb83 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/package.json new file mode 100644 index 0000000..b571489 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-browser/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/util-body-length-browser", + "description": "Determines the length of a request body in browsers", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-body-length-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-body-length-browser", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-body-length-browser" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + }, + "engines": { + "node": ">=18.0.0" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/README.md new file mode 100644 index 0000000..9a80efe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/README.md @@ -0,0 +1,12 @@ +# @smithy/util-body-length-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-body-length-node/latest.svg)](https://www.npmjs.com/package/@smithy/util-body-length-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-body-length-node.svg)](https://www.npmjs.com/package/@smithy/util-body-length-node) + +Determines the length of a request body in node.js + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-cjs/calculateBodyLength.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-cjs/index.js new file mode 100644 index 0000000..1ecdc79 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-cjs/index.js @@ -0,0 +1,53 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var import_fs = require("fs"); +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return (0, import_fs.lstatSync)(body.path).size; + } else if (typeof body.fd === "number") { + return (0, import_fs.fstatSync)(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + calculateBodyLength +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js new file mode 100644 index 0000000..857cff5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-es/calculateBodyLength.js @@ -0,0 +1,25 @@ +import { fstatSync, lstatSync } from "fs"; +export const calculateBodyLength = (body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } + else if (typeof body.byteLength === "number") { + return body.byteLength; + } + else if (typeof body.size === "number") { + return body.size; + } + else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } + else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return lstatSync(body.path).size; + } + else if (typeof body.fd === "number") { + return fstatSync(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-es/index.js new file mode 100644 index 0000000..16ba478 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts new file mode 100644 index 0000000..8e1bdb0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts new file mode 100644 index 0000000..7b4a0d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts new file mode 100644 index 0000000..3260536 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/calculateBodyLength.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const calculateBodyLength: (body: any) => number | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab6cb83 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./calculateBodyLength"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/package.json new file mode 100644 index 0000000..25b0f7a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-body-length-node/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-body-length-node", + "description": "Determines the length of a request body in node.js", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-body-length-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-body-length-node", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-body-length-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/README.md new file mode 100644 index 0000000..c896b04 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/README.md @@ -0,0 +1,10 @@ +# @smithy/util-buffer-from + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-buffer-from/latest.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-buffer-from.svg)](https://www.npmjs.com/package/@smithy/util-buffer-from) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-cjs/index.js new file mode 100644 index 0000000..c6738d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-cjs/index.js @@ -0,0 +1,47 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = require("@smithy/is-array-buffer"); +var import_buffer = require("buffer"); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromArrayBuffer, + fromString +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-es/index.js new file mode 100644 index 0000000..718f831 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-es/index.js @@ -0,0 +1,14 @@ +import { isArrayBuffer } from "@smithy/is-array-buffer"; +import { Buffer } from "buffer"; +export const fromArrayBuffer = (input, offset = 0, length = input.byteLength - offset) => { + if (!isArrayBuffer(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return Buffer.from(input, offset, length); +}; +export const fromString = (input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? Buffer.from(input, encoding) : Buffer.from(input); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts new file mode 100644 index 0000000..a523134 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-types/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..f9173f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/dist-types/ts3.4/index.d.ts @@ -0,0 +1,13 @@ +import { Buffer } from "buffer"; +/** + * @internal + */ +export declare const fromArrayBuffer: (input: ArrayBuffer, offset?: number, length?: number) => Buffer; +/** + * @internal + */ +export type StringEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; +/** + * @internal + */ +export declare const fromString: (input: string, encoding?: StringEncoding) => Buffer; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/package.json new file mode 100644 index 0000000..0869899 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-buffer-from/package.json @@ -0,0 +1,61 @@ +{ + "name": "@smithy/util-buffer-from", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-buffer-from", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-buffer-from", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-buffer-from" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/LICENSE new file mode 100644 index 0000000..74d4e5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/README.md new file mode 100644 index 0000000..5b0341d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/README.md @@ -0,0 +1,4 @@ +# @smithy/util-config-provider + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-config-provider/latest.svg)](https://www.npmjs.com/package/@smithy/util-config-provider) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-config-provider.svg)](https://www.npmjs.com/package/@smithy/util-config-provider) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/booleanSelector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/index.js new file mode 100644 index 0000000..210d40d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/index.js @@ -0,0 +1,64 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + SelectorType: () => SelectorType, + booleanSelector: () => booleanSelector, + numberSelector: () => numberSelector +}); +module.exports = __toCommonJS(src_exports); + +// src/booleanSelector.ts +var booleanSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}, "booleanSelector"); + +// src/numberSelector.ts +var numberSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}, "numberSelector"); + +// src/types.ts +var SelectorType = /* @__PURE__ */ ((SelectorType2) => { + SelectorType2["ENV"] = "env"; + SelectorType2["CONFIG"] = "shared config entry"; + return SelectorType2; +})(SelectorType || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + booleanSelector, + numberSelector, + SelectorType +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/numberSelector.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js new file mode 100644 index 0000000..6ba2261 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/booleanSelector.js @@ -0,0 +1,9 @@ +export const booleanSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/index.js new file mode 100644 index 0000000..a926de8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js new file mode 100644 index 0000000..81cfe40 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/numberSelector.js @@ -0,0 +1,9 @@ +export const numberSelector = (obj, key, type) => { + if (!(key in obj)) + return undefined; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/types.js new file mode 100644 index 0000000..5b10fb5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-es/types.js @@ -0,0 +1,5 @@ +export var SelectorType; +(function (SelectorType) { + SelectorType["ENV"] = "env"; + SelectorType["CONFIG"] = "shared config entry"; +})(SelectorType || (SelectorType = {})); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts new file mode 100644 index 0000000..d4977cb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/booleanSelector.d.ts @@ -0,0 +1,10 @@ +import { SelectorType } from "./types"; +/** + * Returns boolean value true/false for string value "true"/"false", + * if the string is defined in obj[key] + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const booleanSelector: (obj: Record, key: string, type: SelectorType) => boolean | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/index.d.ts new file mode 100644 index 0000000..a926de8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts new file mode 100644 index 0000000..9e0cbf9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/numberSelector.d.ts @@ -0,0 +1,9 @@ +import { SelectorType } from "./types"; +/** + * Returns number value for string value, if the string is defined in obj[key]. + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const numberSelector: (obj: Record, key: string, type: SelectorType) => number | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts new file mode 100644 index 0000000..0b85452 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/booleanSelector.d.ts @@ -0,0 +1,10 @@ +import { SelectorType } from "./types"; +/** + * Returns boolean value true/false for string value "true"/"false", + * if the string is defined in obj[key] + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const booleanSelector: (obj: Record, key: string, type: SelectorType) => boolean | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..02fd81d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./booleanSelector"; +export * from "./numberSelector"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts new file mode 100644 index 0000000..3a34671 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/numberSelector.d.ts @@ -0,0 +1,9 @@ +import { SelectorType } from "./types"; +/** + * Returns number value for string value, if the string is defined in obj[key]. + * Returns undefined, if obj[key] is not defined. + * Throws error for all other cases. + * + * @internal + */ +export declare const numberSelector: (obj: Record, key: string, type: SelectorType) => number | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..e01c128 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/ts3.4/types.d.ts @@ -0,0 +1,4 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/types.d.ts new file mode 100644 index 0000000..caa65d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/dist-types/types.d.ts @@ -0,0 +1,4 @@ +export declare enum SelectorType { + ENV = "env", + CONFIG = "shared config entry" +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/package.json new file mode 100644 index 0000000..50796be --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-config-provider/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/util-config-provider", + "version": "4.0.0", + "description": "Utilities package for configuration providers", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-config-provider", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "email": "", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "dependencies": { + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-config-provider", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-config-provider" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/README.md new file mode 100644 index 0000000..f2f1cc0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/README.md @@ -0,0 +1,10 @@ +# @smithy/util-defaults-mode-browser + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-defaults-mode-browser/latest.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-browser) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-defaults-mode-browser.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-browser) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js new file mode 100644 index 0000000..3733506 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/constants.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULTS_MODE_OPTIONS = void 0; +exports.DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js new file mode 100644 index 0000000..4624ef1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/index.js @@ -0,0 +1,25 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, require("././resolveDefaultsModeConfig"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveDefaultsModeConfig +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..f23368c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveDefaultsModeConfig = void 0; +const tslib_1 = require("tslib"); +const property_provider_1 = require("@smithy/property-provider"); +const bowser_1 = tslib_1.__importDefault(require("bowser")); +const constants_1 = require("./constants"); +const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return Promise.resolve(isMobileBrowser() ? "mobile" : "standard"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; +const isMobileBrowser = () => { + var _a, _b; + const parsedUA = typeof window !== "undefined" && ((_a = window === null || window === void 0 ? void 0 : window.navigator) === null || _a === void 0 ? void 0 : _a.userAgent) + ? bowser_1.default.parse(window.navigator.userAgent) + : undefined; + const platform = (_b = parsedUA === null || parsedUA === void 0 ? void 0 : parsedUA.platform) === null || _b === void 0 ? void 0 : _b.type; + return platform === "tablet" || platform === "mobile"; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js new file mode 100644 index 0000000..fc6be33 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-cjs/resolveDefaultsModeConfig.native.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.resolveDefaultsModeConfig = void 0; +const property_provider_1 = require("@smithy/property-provider"); +const constants_1 = require("./constants"); +const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => (0, property_provider_1.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode === null || mode === void 0 ? void 0 : mode.toLowerCase()) { + case "auto": + return Promise.resolve("mobile"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode === null || mode === void 0 ? void 0 : mode.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${constants_1.DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +exports.resolveDefaultsModeConfig = resolveDefaultsModeConfig; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js new file mode 100644 index 0000000..d58e11f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/constants.js @@ -0,0 +1 @@ +export const DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js new file mode 100644 index 0000000..05aa818 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/index.js @@ -0,0 +1 @@ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..940ab63 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.js @@ -0,0 +1,27 @@ +import { memoize } from "@smithy/property-provider"; +import bowser from "bowser"; +import { DEFAULTS_MODE_OPTIONS } from "./constants"; +export const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return Promise.resolve(isMobileBrowser() ? "mobile" : "standard"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +const isMobileBrowser = () => { + const parsedUA = typeof window !== "undefined" && window?.navigator?.userAgent + ? bowser.parse(window.navigator.userAgent) + : undefined; + const platform = parsedUA?.platform?.type; + return platform === "tablet" || platform === "mobile"; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js new file mode 100644 index 0000000..3164191 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-es/resolveDefaultsModeConfig.native.js @@ -0,0 +1,19 @@ +import { memoize } from "@smithy/property-provider"; +import { DEFAULTS_MODE_OPTIONS } from "./constants"; +export const resolveDefaultsModeConfig = ({ defaultsMode, } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return Promise.resolve("mobile"); + case "mobile": + case "in-region": + case "cross-region": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts new file mode 100644 index 0000000..18dbe6c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/constants.d.ts @@ -0,0 +1,12 @@ +import type { DefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts new file mode 100644 index 0000000..003de26 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..e4cc1b7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile" if the app is running in a mobile browser, + * otherwise it resolves to "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts new file mode 100644 index 0000000..6c48ad8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/resolveDefaultsModeConfig.native.d.ts @@ -0,0 +1,16 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..fc88602 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,12 @@ +import { DefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..4ab48b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..d468478 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile" if the app is running in a mobile browser, + * otherwise it resolves to "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts new file mode 100644 index 0000000..86fe4b7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/dist-types/ts3.4/resolveDefaultsModeConfig.native.d.ts @@ -0,0 +1,16 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "mobile". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/package.json new file mode 100644 index 0000000..f06f246 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-browser/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-defaults-mode-browser", + "version": "4.0.10", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-defaults-mode-browser", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "react-native": {}, + "browser": {}, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-defaults-mode-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-defaults-mode-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/LICENSE new file mode 100644 index 0000000..dd65ae0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/README.md new file mode 100644 index 0000000..bfae0bd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/README.md @@ -0,0 +1,10 @@ +# @smithy/util-defaults-mode-node + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-defaults-mode-node/latest.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-node) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-defaults-mode-node.svg)](https://www.npmjs.com/package/@smithy/util-defaults-mode-node) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/defaultsModeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js new file mode 100644 index 0000000..ddd0684 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/index.js @@ -0,0 +1,119 @@ +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + resolveDefaultsModeConfig: () => resolveDefaultsModeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/resolveDefaultsModeConfig.ts +var import_config_resolver = require("@smithy/config-resolver"); +var import_node_config_provider = require("@smithy/node-config-provider"); +var import_property_provider = require("@smithy/property-provider"); + +// src/constants.ts +var AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +var AWS_REGION_ENV = "AWS_REGION"; +var AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +var IMDS_REGION_PATH = "/latest/meta-data/placement/region"; + +// src/defaultsModeConfig.ts +var AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +var AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +var NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy" +}; + +// src/resolveDefaultsModeConfig.ts +var resolveDefaultsModeConfig = /* @__PURE__ */ __name(({ + region = (0, import_node_config_provider.loadConfig)(import_config_resolver.NODE_REGION_CONFIG_OPTIONS), + defaultsMode = (0, import_node_config_provider.loadConfig)(NODE_DEFAULTS_MODE_CONFIG_OPTIONS) +} = {}) => (0, import_property_provider.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case void 0: + return Promise.resolve("legacy"); + default: + throw new Error( + `Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}` + ); + } +}), "resolveDefaultsModeConfig"); +var resolveNodeDefaultsModeAuto = /* @__PURE__ */ __name(async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } else { + return "cross-region"; + } + } + return "standard"; +}, "resolveNodeDefaultsModeAuto"); +var inferPhysicalRegion = /* @__PURE__ */ __name(async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await Promise.resolve().then(() => __toESM(require("@smithy/credential-provider-imds"))); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } catch (e) { + } + } +}, "inferPhysicalRegion"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + resolveDefaultsModeConfig +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-cjs/resolveDefaultsModeConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js new file mode 100644 index 0000000..69361a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/constants.js @@ -0,0 +1,6 @@ +export const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +export const AWS_REGION_ENV = "AWS_REGION"; +export const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +export const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +export const DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +export const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js new file mode 100644 index 0000000..f43b570 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/defaultsModeConfig.js @@ -0,0 +1,11 @@ +const AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +const AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +export const NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy", +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js new file mode 100644 index 0000000..05aa818 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/index.js @@ -0,0 +1 @@ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js new file mode 100644 index 0000000..8c9d050 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-es/resolveDefaultsModeConfig.js @@ -0,0 +1,52 @@ +import { NODE_REGION_CONFIG_OPTIONS } from "@smithy/config-resolver"; +import { loadConfig } from "@smithy/node-config-provider"; +import { memoize } from "@smithy/property-provider"; +import { AWS_DEFAULT_REGION_ENV, AWS_EXECUTION_ENV, AWS_REGION_ENV, DEFAULTS_MODE_OPTIONS, ENV_IMDS_DISABLED, IMDS_REGION_PATH, } from "./constants"; +import { NODE_DEFAULTS_MODE_CONFIG_OPTIONS } from "./defaultsModeConfig"; +export const resolveDefaultsModeConfig = ({ region = loadConfig(NODE_REGION_CONFIG_OPTIONS), defaultsMode = loadConfig(NODE_DEFAULTS_MODE_CONFIG_OPTIONS), } = {}) => memoize(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode?.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode?.toLocaleLowerCase()); + case undefined: + return Promise.resolve("legacy"); + default: + throw new Error(`Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}`); + } +}); +const resolveNodeDefaultsModeAuto = async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } + else { + return "cross-region"; + } + } + return "standard"; +}; +const inferPhysicalRegion = async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await import("@smithy/credential-provider-imds"); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } + catch (e) { + } + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts new file mode 100644 index 0000000..a2db283 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/constants.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export declare const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +/** + * @internal + */ +export declare const AWS_REGION_ENV = "AWS_REGION"; +/** + * @internal + */ +export declare const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export declare const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts new file mode 100644 index 0000000..12f4dae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/defaultsModeConfig.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import type { DefaultsMode } from "@smithy/smithy-client"; +/** + * @internal + */ +export declare const NODE_DEFAULTS_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts new file mode 100644 index 0000000..003de26 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..8f34371 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import type { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import type { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; + region?: string | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "in-region", "cross-region", or "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ region, defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..b847dc2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,24 @@ +/** + * @internal + */ +export declare const AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +/** + * @internal + */ +export declare const AWS_REGION_ENV = "AWS_REGION"; +/** + * @internal + */ +export declare const AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +/** + * @internal + */ +export declare const ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +/** + * @internal + */ +export declare const DEFAULTS_MODE_OPTIONS: string[]; +/** + * @internal + */ +export declare const IMDS_REGION_PATH = "/latest/meta-data/placement/region"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts new file mode 100644 index 0000000..76c3d0d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/defaultsModeConfig.d.ts @@ -0,0 +1,6 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +import { DefaultsMode } from "@smithy/smithy-client"; +/** + * @internal + */ +export declare const NODE_DEFAULTS_MODE_CONFIG_OPTIONS: LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..4ab48b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/index.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export * from "./resolveDefaultsModeConfig"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts new file mode 100644 index 0000000..4daa927 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/dist-types/ts3.4/resolveDefaultsModeConfig.d.ts @@ -0,0 +1,17 @@ +import { DefaultsMode, ResolvedDefaultsMode } from "@smithy/smithy-client"; +import { Provider } from "@smithy/types"; +/** + * @internal + */ +export interface ResolveDefaultsModeConfigOptions { + defaultsMode?: DefaultsMode | Provider; + region?: string | Provider; +} +/** + * Validate the defaultsMode configuration. If the value is set to "auto", it + * resolves the value to "in-region", "cross-region", or "standard". + * + * @default "legacy" + * @internal + */ +export declare const resolveDefaultsModeConfig: ({ region, defaultsMode, }?: ResolveDefaultsModeConfigOptions) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/package.json new file mode 100644 index 0000000..0a69079 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-defaults-mode-node/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-defaults-mode-node", + "version": "4.0.10", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-defaults-mode-node", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-defaults-mode-node", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-defaults-mode-node" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/README.md new file mode 100644 index 0000000..85d60b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/README.md @@ -0,0 +1,10 @@ +# @smithy/util-endpoints + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-endpoints/latest.svg)](https://www.npmjs.com/package/@smithy/util-endpoints) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-endpoints.svg)](https://www.npmjs.com/package/@smithy/util-endpoints) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/cache/EndpointCache.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/debugId.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/debug/toDebugString.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/getEndpointUrlConfig.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/index.js new file mode 100644 index 0000000..3bc5a7d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/index.js @@ -0,0 +1,544 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EndpointCache: () => EndpointCache, + EndpointError: () => EndpointError, + customEndpointFunctions: () => customEndpointFunctions, + isIpAddress: () => isIpAddress, + isValidHostLabel: () => isValidHostLabel, + resolveEndpoint: () => resolveEndpoint +}); +module.exports = __toCommonJS(src_exports); + +// src/cache/EndpointCache.ts +var EndpointCache = class { + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }) { + this.data = /* @__PURE__ */ new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + static { + __name(this, "EndpointCache"); + } + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + /** + * @returns cache key or false if not cachable. + */ + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +}; + +// src/lib/isIpAddress.ts +var IP_V4_REGEX = new RegExp( + `^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$` +); +var isIpAddress = /* @__PURE__ */ __name((value) => IP_V4_REGEX.test(value) || value.startsWith("[") && value.endsWith("]"), "isIpAddress"); + +// src/lib/isValidHostLabel.ts +var VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +var isValidHostLabel = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}, "isValidHostLabel"); + +// src/utils/customEndpointFunctions.ts +var customEndpointFunctions = {}; + +// src/debug/debugId.ts +var debugId = "endpoints"; + +// src/debug/toDebugString.ts +function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} +__name(toDebugString, "toDebugString"); + +// src/types/EndpointError.ts +var EndpointError = class extends Error { + static { + __name(this, "EndpointError"); + } + constructor(message) { + super(message); + this.name = "EndpointError"; + } +}; + +// src/lib/booleanEquals.ts +var booleanEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "booleanEquals"); + +// src/lib/getAttrPathList.ts +var getAttrPathList = /* @__PURE__ */ __name((path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } else { + pathList.push(part); + } + } + return pathList; +}, "getAttrPathList"); + +// src/lib/getAttr.ts +var getAttr = /* @__PURE__ */ __name((value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value), "getAttr"); + +// src/lib/isSet.ts +var isSet = /* @__PURE__ */ __name((value) => value != null, "isSet"); + +// src/lib/not.ts +var not = /* @__PURE__ */ __name((value) => !value, "not"); + +// src/lib/parseURL.ts +var import_types3 = require("@smithy/types"); +var DEFAULT_PORTS = { + [import_types3.EndpointURLScheme.HTTP]: 80, + [import_types3.EndpointURLScheme.HTTPS]: 443 +}; +var parseURL = /* @__PURE__ */ __name((value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname: hostname2, port, protocol: protocol2 = "", path = "", query = {} } = value; + const url = new URL(`${protocol2}//${hostname2}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query).map(([k, v]) => `${k}=${v}`).join("&"); + return url; + } + return new URL(value); + } catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(import_types3.EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp + }; +}, "parseURL"); + +// src/lib/stringEquals.ts +var stringEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "stringEquals"); + +// src/lib/substring.ts +var substring = /* @__PURE__ */ __name((input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}, "substring"); + +// src/lib/uriEncode.ts +var uriEncode = /* @__PURE__ */ __name((value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`), "uriEncode"); + +// src/utils/endpointFunctions.ts +var endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode +}; + +// src/utils/evaluateTemplate.ts +var evaluateTemplate = /* @__PURE__ */ __name((template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}, "evaluateTemplate"); + +// src/utils/getReferenceValue.ts +var getReferenceValue = /* @__PURE__ */ __name(({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord + }; + return referenceRecord[ref]; +}, "getReferenceValue"); + +// src/utils/evaluateExpression.ts +var evaluateExpression = /* @__PURE__ */ __name((obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } else if (obj["fn"]) { + return callFunction(obj, options); + } else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}, "evaluateExpression"); + +// src/utils/callFunction.ts +var callFunction = /* @__PURE__ */ __name(({ fn, argv }, options) => { + const evaluatedArgs = argv.map( + (arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options) + ); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}, "callFunction"); + +// src/utils/evaluateCondition.ts +var evaluateCondition = /* @__PURE__ */ __name(({ assign, ...fnArgs }, options) => { + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + options.logger?.debug?.(`${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...assign != null && { toAssign: { name: assign, value } } + }; +}, "evaluateCondition"); + +// src/utils/evaluateConditions.ts +var evaluateConditions = /* @__PURE__ */ __name((conditions = [], options) => { + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord + } + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + options.logger?.debug?.(`${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}, "evaluateConditions"); + +// src/utils/getEndpointHeaders.ts +var getEndpointHeaders = /* @__PURE__ */ __name((headers, options) => Object.entries(headers).reduce( + (acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }) + }), + {} +), "getEndpointHeaders"); + +// src/utils/getEndpointProperty.ts +var getEndpointProperty = /* @__PURE__ */ __name((property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}, "getEndpointProperty"); + +// src/utils/getEndpointProperties.ts +var getEndpointProperties = /* @__PURE__ */ __name((properties, options) => Object.entries(properties).reduce( + (acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options) + }), + {} +), "getEndpointProperties"); + +// src/utils/getEndpointUrl.ts +var getEndpointUrl = /* @__PURE__ */ __name((endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}, "getEndpointUrl"); + +// src/utils/evaluateEndpointRule.ts +var evaluateEndpointRule = /* @__PURE__ */ __name((endpointRule, options) => { + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }; + const { url, properties, headers } = endpoint; + options.logger?.debug?.(`${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...headers != void 0 && { + headers: getEndpointHeaders(headers, endpointRuleOptions) + }, + ...properties != void 0 && { + properties: getEndpointProperties(properties, endpointRuleOptions) + }, + url: getEndpointUrl(url, endpointRuleOptions) + }; +}, "evaluateEndpointRule"); + +// src/utils/evaluateErrorRule.ts +var evaluateErrorRule = /* @__PURE__ */ __name((errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError( + evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }) + ); +}, "evaluateErrorRule"); + +// src/utils/evaluateTreeRule.ts +var evaluateTreeRule = /* @__PURE__ */ __name((treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }); +}, "evaluateTreeRule"); + +// src/utils/evaluateRules.ts +var evaluateRules = /* @__PURE__ */ __name((rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}, "evaluateRules"); + +// src/resolveEndpoint.ts +var resolveEndpoint = /* @__PURE__ */ __name((ruleSetObject, options) => { + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + options.logger?.debug?.(`${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters).filter(([, v]) => v.default != null).map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters).filter(([, v]) => v.required).map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + options.logger?.debug?.(`${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}, "resolveEndpoint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + EndpointCache, + isIpAddress, + isValidHostLabel, + customEndpointFunctions, + resolveEndpoint, + EndpointError +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/booleanEquals.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttr.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/getAttrPathList.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isIpAddress.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isSet.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/isValidHostLabel.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/not.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/parseURL.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/stringEquals.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/substring.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/lib/uriEncode.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/resolveEndpoint.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointError.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/EndpointRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/ErrorRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/RuleSetObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/TreeRuleObject.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/types/shared.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/callFunction.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/customEndpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/endpointFunctions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateCondition.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateConditions.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateEndpointRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateErrorRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateExpression.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateRules.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTemplate.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/evaluateTreeRule.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointHeaders.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperties.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointProperty.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getEndpointUrl.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/getReferenceValue.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-cjs/utils/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js new file mode 100644 index 0000000..ddc7b0d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/cache/EndpointCache.js @@ -0,0 +1,49 @@ +export class EndpointCache { + constructor({ size, params }) { + this.data = new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js new file mode 100644 index 0000000..0d4e27e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/debugId.js @@ -0,0 +1 @@ +export const debugId = "endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/index.js new file mode 100644 index 0000000..70d3b15 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/index.js @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js new file mode 100644 index 0000000..33c8fcb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/debug/toDebugString.js @@ -0,0 +1,12 @@ +export function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js new file mode 100644 index 0000000..5069030 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/getEndpointUrlConfig.js @@ -0,0 +1,21 @@ +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +export const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceEndpointUrlSections = [ENV_ENDPOINT_URL, ...serviceId.split(" ").map((w) => w.toUpperCase())]; + const serviceEndpointUrl = env[serviceEndpointUrlSections.join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile) => { + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/index.js new file mode 100644 index 0000000..c39ed2b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/index.js @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js new file mode 100644 index 0000000..730cbd3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/booleanEquals.js @@ -0,0 +1 @@ +export const booleanEquals = (value1, value2) => value1 === value2; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js new file mode 100644 index 0000000..d77f165 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/getAttr.js @@ -0,0 +1,11 @@ +import { EndpointError } from "../types"; +import { getAttrPathList } from "./getAttrPathList"; +export const getAttr = (value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } + else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js new file mode 100644 index 0000000..5817a2d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/getAttrPathList.js @@ -0,0 +1,25 @@ +import { EndpointError } from "../types"; +export const getAttrPathList = (path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } + else { + pathList.push(part); + } + } + return pathList; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/index.js new file mode 100644 index 0000000..99a0844 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/index.js @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js new file mode 100644 index 0000000..20be5a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isIpAddress.js @@ -0,0 +1,2 @@ +const IP_V4_REGEX = new RegExp(`^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$`); +export const isIpAddress = (value) => IP_V4_REGEX.test(value) || (value.startsWith("[") && value.endsWith("]")); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js new file mode 100644 index 0000000..83ccc7a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isSet.js @@ -0,0 +1 @@ +export const isSet = (value) => value != null; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js new file mode 100644 index 0000000..7858598 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/isValidHostLabel.js @@ -0,0 +1,13 @@ +const VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +export const isValidHostLabel = (value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/not.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/not.js new file mode 100644 index 0000000..180e5dd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/not.js @@ -0,0 +1 @@ +export const not = (value) => !value; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js new file mode 100644 index 0000000..79f9b24 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/parseURL.js @@ -0,0 +1,51 @@ +import { EndpointURLScheme } from "@smithy/types"; +import { isIpAddress } from "./isIpAddress"; +const DEFAULT_PORTS = { + [EndpointURLScheme.HTTP]: 80, + [EndpointURLScheme.HTTPS]: 443, +}; +export const parseURL = (value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname, port, protocol = "", path = "", query = {} } = value; + const url = new URL(`${protocol}//${hostname}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query) + .map(([k, v]) => `${k}=${v}`) + .join("&"); + return url; + } + return new URL(value); + } + catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || + (typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`)); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js new file mode 100644 index 0000000..ee41426 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/stringEquals.js @@ -0,0 +1 @@ +export const stringEquals = (value1, value2) => value1 === value2; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js new file mode 100644 index 0000000..942dde4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/substring.js @@ -0,0 +1,9 @@ +export const substring = (input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js new file mode 100644 index 0000000..ae226dc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/lib/uriEncode.js @@ -0,0 +1 @@ +export const uriEncode = (value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js new file mode 100644 index 0000000..ac12096 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/resolveEndpoint.js @@ -0,0 +1,27 @@ +import { debugId, toDebugString } from "./debug"; +import { EndpointError } from "./types"; +import { evaluateRules } from "./utils"; +export const resolveEndpoint = (ruleSetObject, options) => { + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + options.logger?.debug?.(`${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters) + .filter(([, v]) => v.default != null) + .map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters) + .filter(([, v]) => v.required) + .map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + options.logger?.debug?.(`${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js new file mode 100644 index 0000000..1ce597d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointError.js @@ -0,0 +1,6 @@ +export class EndpointError extends Error { + constructor(message) { + super(message); + this.name = "EndpointError"; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointFunctions.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/EndpointRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/ErrorRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/RuleSetObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/TreeRuleObject.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/index.js new file mode 100644 index 0000000..a49f984 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/index.js @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/shared.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/shared.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/types/shared.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js new file mode 100644 index 0000000..bf0747a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/callFunction.js @@ -0,0 +1,11 @@ +import { customEndpointFunctions } from "./customEndpointFunctions"; +import { endpointFunctions } from "./endpointFunctions"; +import { evaluateExpression } from "./evaluateExpression"; +export const callFunction = ({ fn, argv }, options) => { + const evaluatedArgs = argv.map((arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options)); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js new file mode 100644 index 0000000..0c26493 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/customEndpointFunctions.js @@ -0,0 +1 @@ +export const customEndpointFunctions = {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js new file mode 100644 index 0000000..e2215ff --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/endpointFunctions.js @@ -0,0 +1,12 @@ +import { booleanEquals, getAttr, isSet, isValidHostLabel, not, parseURL, stringEquals, substring, uriEncode, } from "../lib"; +export const endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js new file mode 100644 index 0000000..8e84f08 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateCondition.js @@ -0,0 +1,14 @@ +import { debugId, toDebugString } from "../debug"; +import { EndpointError } from "../types"; +import { callFunction } from "./callFunction"; +export const evaluateCondition = ({ assign, ...fnArgs }, options) => { + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + options.logger?.debug?.(`${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...(assign != null && { toAssign: { name: assign, value } }), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js new file mode 100644 index 0000000..5542076 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateConditions.js @@ -0,0 +1,22 @@ +import { debugId, toDebugString } from "../debug"; +import { evaluateCondition } from "./evaluateCondition"; +export const evaluateConditions = (conditions = [], options) => { + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord, + }, + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + options.logger?.debug?.(`${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js new file mode 100644 index 0000000..ba6307b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateEndpointRule.js @@ -0,0 +1,27 @@ +import { debugId, toDebugString } from "../debug"; +import { evaluateConditions } from "./evaluateConditions"; +import { getEndpointHeaders } from "./getEndpointHeaders"; +import { getEndpointProperties } from "./getEndpointProperties"; +import { getEndpointUrl } from "./getEndpointUrl"; +export const evaluateEndpointRule = (endpointRule, options) => { + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }; + const { url, properties, headers } = endpoint; + options.logger?.debug?.(`${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...(headers != undefined && { + headers: getEndpointHeaders(headers, endpointRuleOptions), + }), + ...(properties != undefined && { + properties: getEndpointProperties(properties, endpointRuleOptions), + }), + url: getEndpointUrl(url, endpointRuleOptions), + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js new file mode 100644 index 0000000..1a57860 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateErrorRule.js @@ -0,0 +1,14 @@ +import { EndpointError } from "../types"; +import { evaluateConditions } from "./evaluateConditions"; +import { evaluateExpression } from "./evaluateExpression"; +export const evaluateErrorRule = (errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError(evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + })); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js new file mode 100644 index 0000000..7f69658 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateExpression.js @@ -0,0 +1,16 @@ +import { EndpointError } from "../types"; +import { callFunction } from "./callFunction"; +import { evaluateTemplate } from "./evaluateTemplate"; +import { getReferenceValue } from "./getReferenceValue"; +export const evaluateExpression = (obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } + else if (obj["fn"]) { + return callFunction(obj, options); + } + else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js new file mode 100644 index 0000000..58a40a0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateRules.js @@ -0,0 +1,27 @@ +import { EndpointError } from "../types"; +import { evaluateEndpointRule } from "./evaluateEndpointRule"; +import { evaluateErrorRule } from "./evaluateErrorRule"; +import { evaluateTreeRule } from "./evaluateTreeRule"; +export const evaluateRules = (rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } + else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } + else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js new file mode 100644 index 0000000..7005809 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTemplate.js @@ -0,0 +1,36 @@ +import { getAttr } from "../lib"; +export const evaluateTemplate = (template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord, + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } + else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js new file mode 100644 index 0000000..427c1fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/evaluateTreeRule.js @@ -0,0 +1,13 @@ +import { evaluateConditions } from "./evaluateConditions"; +import { evaluateRules } from "./evaluateRules"; +export const evaluateTreeRule = (treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord }, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js new file mode 100644 index 0000000..f94cf55 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointHeaders.js @@ -0,0 +1,12 @@ +import { EndpointError } from "../types"; +import { evaluateExpression } from "./evaluateExpression"; +export const getEndpointHeaders = (headers, options) => Object.entries(headers).reduce((acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }), +}), {}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js new file mode 100644 index 0000000..e7afe88 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperties.js @@ -0,0 +1,5 @@ +import { getEndpointProperty } from "./getEndpointProperty"; +export const getEndpointProperties = (properties, options) => Object.entries(properties).reduce((acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options), +}), {}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js new file mode 100644 index 0000000..0600969 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointProperty.js @@ -0,0 +1,21 @@ +import { EndpointError } from "../types"; +import { evaluateTemplate } from "./evaluateTemplate"; +import { getEndpointProperties } from "./getEndpointProperties"; +export const getEndpointProperty = (property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js new file mode 100644 index 0000000..8f1301e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getEndpointUrl.js @@ -0,0 +1,15 @@ +import { EndpointError } from "../types"; +import { evaluateExpression } from "./evaluateExpression"; +export const getEndpointUrl = (endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } + catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js new file mode 100644 index 0000000..759f4d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/getReferenceValue.js @@ -0,0 +1,7 @@ +export const getReferenceValue = ({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord, + }; + return referenceRecord[ref]; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/index.js new file mode 100644 index 0000000..b571d02 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-es/utils/index.js @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts new file mode 100644 index 0000000..19a338f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/cache/EndpointCache.d.ts @@ -0,0 +1,34 @@ +import type { EndpointParams, EndpointV2 } from "@smithy/types"; +/** + * @internal + * + * Cache for endpoint ruleSet resolution. + */ +export declare class EndpointCache { + private capacity; + private data; + private parameters; + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }: { + size?: number; + params?: string[]; + }); + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams: EndpointParams, resolver: () => EndpointV2): EndpointV2; + size(): number; + /** + * @returns cache key or false if not cachable. + */ + private hash; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts new file mode 100644 index 0000000..d39f408 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/debugId.d.ts @@ -0,0 +1 @@ +export declare const debugId = "endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts new file mode 100644 index 0000000..70d3b15 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/index.d.ts @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts new file mode 100644 index 0000000..6bf1d3a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/debug/toDebugString.d.ts @@ -0,0 +1,9 @@ +import { EndpointParameters, EndpointV2 } from "@smithy/types"; +import { GetAttrValue } from "../lib"; +import { EndpointObject, FunctionObject, FunctionReturn } from "../types"; +export declare function toDebugString(input: EndpointParameters): string; +export declare function toDebugString(input: EndpointV2): string; +export declare function toDebugString(input: GetAttrValue): string; +export declare function toDebugString(input: FunctionObject): string; +export declare function toDebugString(input: FunctionReturn): string; +export declare function toDebugString(input: EndpointObject): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..0971010 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/index.d.ts new file mode 100644 index 0000000..c39ed2b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/index.d.ts @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts new file mode 100644 index 0000000..7eac561 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/booleanEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two boolean values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const booleanEquals: (value1: boolean, value2: boolean) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts new file mode 100644 index 0000000..a8088c5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/getAttr.d.ts @@ -0,0 +1,7 @@ +export type GetAttrValue = string | boolean | { + [key: string]: GetAttrValue; +} | Array; +/** + * Returns value corresponding to pathing string for an array or object. + */ +export declare const getAttr: (value: GetAttrValue, path: string) => GetAttrValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts new file mode 100644 index 0000000..e6c4979 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/getAttrPathList.d.ts @@ -0,0 +1,4 @@ +/** + * Parses path as a getAttr expression, returning a list of strings. + */ +export declare const getAttrPathList: (path: string) => Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts new file mode 100644 index 0000000..99a0844 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/index.d.ts @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts new file mode 100644 index 0000000..28aba97 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isIpAddress.d.ts @@ -0,0 +1,4 @@ +/** + * Validates if the provided value is an IP address. + */ +export declare const isIpAddress: (value: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts new file mode 100644 index 0000000..7c74ec5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isSet.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a value is set (aka not null or undefined). + * Returns true if the value is set, otherwise returns false. + */ +export declare const isSet: (value: unknown) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts new file mode 100644 index 0000000..c05f9e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/isValidHostLabel.d.ts @@ -0,0 +1,7 @@ +/** + * Evaluates whether one or more string values are valid host labels per RFC 1123. + * + * If allowSubDomains is true, then the provided value may be zero or more dotted + * subdomains which are each validated per RFC 1123. + */ +export declare const isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts new file mode 100644 index 0000000..1e8e728 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/not.d.ts @@ -0,0 +1,5 @@ +/** + * Performs logical negation on the provided boolean value, + * returning the negated value. + */ +export declare const not: (value: boolean) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts new file mode 100644 index 0000000..3e0dce3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/parseURL.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointURL } from "@smithy/types"; +/** + * Parses a string, URL, or Endpoint into it’s Endpoint URL components. + */ +export declare const parseURL: (value: string | URL | Endpoint) => EndpointURL | null; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts new file mode 100644 index 0000000..bdfc98d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/stringEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two string values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const stringEquals: (value1: string, value2: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts new file mode 100644 index 0000000..5d70035 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/substring.d.ts @@ -0,0 +1,7 @@ +/** + * Computes the substring of a given string, conditionally indexing from the end of the string. + * When the string is long enough to fully include the substring, return the substring. + * Otherwise, return None. The start index is inclusive and the stop index is exclusive. + * The length of the returned string will always be stop-start. + */ +export declare const substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts new file mode 100644 index 0000000..c2a720c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/lib/uriEncode.d.ts @@ -0,0 +1,4 @@ +/** + * Performs percent-encoding per RFC3986 section 2.1 + */ +export declare const uriEncode: (value: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts new file mode 100644 index 0000000..b02188b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/resolveEndpoint.d.ts @@ -0,0 +1,6 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointResolverOptions, RuleSetObject } from "./types"; +/** + * Resolves an endpoint URL by processing the endpoints ruleset and options. + */ +export declare const resolveEndpoint: (ruleSetObject: RuleSetObject, options: EndpointResolverOptions) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts new file mode 100644 index 0000000..9d622ae --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/cache/EndpointCache.d.ts @@ -0,0 +1,34 @@ +import { EndpointParams, EndpointV2 } from "@smithy/types"; +/** + * @internal + * + * Cache for endpoint ruleSet resolution. + */ +export declare class EndpointCache { + private capacity; + private data; + private parameters; + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }: { + size?: number; + params?: string[]; + }); + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams: EndpointParams, resolver: () => EndpointV2): EndpointV2; + size(): number; + /** + * @returns cache key or false if not cachable. + */ + private hash; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts new file mode 100644 index 0000000..f674b8a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/debugId.d.ts @@ -0,0 +1 @@ +export declare const debugId = "endpoints"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts new file mode 100644 index 0000000..1eb0bf4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/index.d.ts @@ -0,0 +1,2 @@ +export * from "./debugId"; +export * from "./toDebugString"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts new file mode 100644 index 0000000..e295ca0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/debug/toDebugString.d.ts @@ -0,0 +1,9 @@ +import { EndpointParameters, EndpointV2 } from "@smithy/types"; +import { GetAttrValue } from "../lib"; +import { EndpointObject, FunctionObject, FunctionReturn } from "../types"; +export declare function toDebugString(input: EndpointParameters): string; +export declare function toDebugString(input: EndpointV2): string; +export declare function toDebugString(input: GetAttrValue): string; +export declare function toDebugString(input: FunctionObject): string; +export declare function toDebugString(input: FunctionReturn): string; +export declare function toDebugString(input: EndpointObject): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts new file mode 100644 index 0000000..7b9d068 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/getEndpointUrlConfig.d.ts @@ -0,0 +1,2 @@ +import { LoadedConfigSelectors } from "@smithy/node-config-provider"; +export declare const getEndpointUrlConfig: (serviceId: string) => LoadedConfigSelectors; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..7b367cf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/index.d.ts @@ -0,0 +1,6 @@ +export * from "./cache/EndpointCache"; +export * from "./lib/isIpAddress"; +export * from "./lib/isValidHostLabel"; +export * from "./utils/customEndpointFunctions"; +export * from "./resolveEndpoint"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts new file mode 100644 index 0000000..7aec001 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/booleanEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two boolean values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const booleanEquals: (value1: boolean, value2: boolean) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts new file mode 100644 index 0000000..e2f5b43 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttr.d.ts @@ -0,0 +1,7 @@ +export type GetAttrValue = string | boolean | { + [key: string]: GetAttrValue; +} | Array; +/** + * Returns value corresponding to pathing string for an array or object. + */ +export declare const getAttr: (value: GetAttrValue, path: string) => GetAttrValue; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts new file mode 100644 index 0000000..93bbf31 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/getAttrPathList.d.ts @@ -0,0 +1,4 @@ +/** + * Parses path as a getAttr expression, returning a list of strings. + */ +export declare const getAttrPathList: (path: string) => Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts new file mode 100644 index 0000000..a28ecaa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/index.d.ts @@ -0,0 +1,9 @@ +export * from "./booleanEquals"; +export * from "./getAttr"; +export * from "./isSet"; +export * from "./isValidHostLabel"; +export * from "./not"; +export * from "./parseURL"; +export * from "./stringEquals"; +export * from "./substring"; +export * from "./uriEncode"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts new file mode 100644 index 0000000..9f37893 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isIpAddress.d.ts @@ -0,0 +1,4 @@ +/** + * Validates if the provided value is an IP address. + */ +export declare const isIpAddress: (value: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts new file mode 100644 index 0000000..6b102dd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isSet.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates whether a value is set (aka not null or undefined). + * Returns true if the value is set, otherwise returns false. + */ +export declare const isSet: (value: unknown) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts new file mode 100644 index 0000000..01f7eb9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/isValidHostLabel.d.ts @@ -0,0 +1,7 @@ +/** + * Evaluates whether one or more string values are valid host labels per RFC 1123. + * + * If allowSubDomains is true, then the provided value may be zero or more dotted + * subdomains which are each validated per RFC 1123. + */ +export declare const isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts new file mode 100644 index 0000000..b4e84ac --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/not.d.ts @@ -0,0 +1,5 @@ +/** + * Performs logical negation on the provided boolean value, + * returning the negated value. + */ +export declare const not: (value: boolean) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts new file mode 100644 index 0000000..0f54066 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/parseURL.d.ts @@ -0,0 +1,5 @@ +import { Endpoint, EndpointURL } from "@smithy/types"; +/** + * Parses a string, URL, or Endpoint into it’s Endpoint URL components. + */ +export declare const parseURL: (value: string | URL | Endpoint) => EndpointURL | null; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts new file mode 100644 index 0000000..9acb10c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/stringEquals.d.ts @@ -0,0 +1,5 @@ +/** + * Evaluates two string values value1 and value2 for equality and returns + * true if both values match. + */ +export declare const stringEquals: (value1: string, value2: string) => boolean; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts new file mode 100644 index 0000000..a99025c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/substring.d.ts @@ -0,0 +1,7 @@ +/** + * Computes the substring of a given string, conditionally indexing from the end of the string. + * When the string is long enough to fully include the substring, return the substring. + * Otherwise, return None. The start index is inclusive and the stop index is exclusive. + * The length of the returned string will always be stop-start. + */ +export declare const substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts new file mode 100644 index 0000000..acb75bb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/lib/uriEncode.d.ts @@ -0,0 +1,4 @@ +/** + * Performs percent-encoding per RFC3986 section 2.1 + */ +export declare const uriEncode: (value: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts new file mode 100644 index 0000000..5469fa2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/resolveEndpoint.d.ts @@ -0,0 +1,6 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointResolverOptions, RuleSetObject } from "./types"; +/** + * Resolves an endpoint URL by processing the endpoints ruleset and options. + */ +export declare const resolveEndpoint: (ruleSetObject: RuleSetObject, options: EndpointResolverOptions) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts new file mode 100644 index 0000000..4f3c538 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointError.d.ts @@ -0,0 +1,3 @@ +export declare class EndpointError extends Error { + constructor(message: string); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts new file mode 100644 index 0000000..7b3cf42 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointFunctions.d.ts @@ -0,0 +1,2 @@ +import { FunctionReturn } from "./shared"; +export type EndpointFunctions = Record FunctionReturn>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..436001e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/EndpointRuleObject.d.ts @@ -0,0 +1,5 @@ +import { EndpointObject as __EndpointObject, EndpointObjectHeaders as __EndpointObjectHeaders, EndpointObjectProperties as __EndpointObjectProperties, EndpointRuleObject as __EndpointRuleObject } from "@smithy/types"; +export type EndpointObjectProperties = __EndpointObjectProperties; +export type EndpointObjectHeaders = __EndpointObjectHeaders; +export type EndpointObject = __EndpointObject; +export type EndpointRuleObject = __EndpointRuleObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..1540835 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/ErrorRuleObject.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject as __ErrorRuleObject } from "@smithy/types"; +export type ErrorRuleObject = __ErrorRuleObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts new file mode 100644 index 0000000..227b269 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/RuleSetObject.d.ts @@ -0,0 +1,4 @@ +import { DeprecatedObject as __DeprecatedObject, ParameterObject as __ParameterObject, RuleSetObject as __RuleSetObject } from "@smithy/types"; +export type DeprecatedObject = __DeprecatedObject; +export type ParameterObject = __ParameterObject; +export type RuleSetObject = __RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..ecdb6b4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/TreeRuleObject.d.ts @@ -0,0 +1,3 @@ +import { RuleSetRules as __RuleSetRules, TreeRuleObject as __TreeRuleObject } from "@smithy/types"; +export type RuleSetRules = __RuleSetRules; +export type TreeRuleObject = __TreeRuleObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts new file mode 100644 index 0000000..f89fb63 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts new file mode 100644 index 0000000..052dcf3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/types/shared.d.ts @@ -0,0 +1,25 @@ +import { EndpointARN, EndpointPartition, Logger } from "@smithy/types"; +export type ReferenceObject = { + ref: string; +}; +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +export type FunctionArgv = Array; +export type FunctionReturn = string | boolean | number | EndpointARN | EndpointPartition | { + [key: string]: FunctionReturn; +} | null; +export type ConditionObject = FunctionObject & { + assign?: string; +}; +export type Expression = string | ReferenceObject | FunctionObject; +export type EndpointParams = Record; +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +export type ReferenceRecord = Record; +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts new file mode 100644 index 0000000..bfdf543 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/callFunction.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, FunctionObject, FunctionReturn } from "../types"; +export declare const callFunction: ({ fn, argv }: FunctionObject, options: EvaluateOptions) => FunctionReturn; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts new file mode 100644 index 0000000..1cd2240 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/customEndpointFunctions.d.ts @@ -0,0 +1,4 @@ +import { EndpointFunctions } from "../types/EndpointFunctions"; +export declare const customEndpointFunctions: { + [key: string]: EndpointFunctions; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts new file mode 100644 index 0000000..cde57d1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/endpointFunctions.d.ts @@ -0,0 +1,11 @@ +export declare const endpointFunctions: { + booleanEquals: (value1: boolean, value2: boolean) => boolean; + getAttr: (value: import("../lib").GetAttrValue, path: string) => import("../lib").GetAttrValue; + isSet: (value: unknown) => boolean; + isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; + not: (value: boolean) => boolean; + parseURL: (value: string | URL | import("@smithy/types").Endpoint) => import("@smithy/types").EndpointURL | null; + stringEquals: (value1: string, value2: string) => boolean; + substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; + uriEncode: (value: string) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts new file mode 100644 index 0000000..ba2c0be --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateCondition.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions } from "../types"; +export declare const evaluateCondition: ({ assign, ...fnArgs }: ConditionObject, options: EvaluateOptions) => { + toAssign?: { + name: string; + value: import("../types").FunctionReturn; + } | undefined; + result: boolean; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts new file mode 100644 index 0000000..a7fbc5f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateConditions.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions, FunctionReturn } from "../types"; +export declare const evaluateConditions: (conditions: ConditionObject[] | undefined, options: EvaluateOptions) => { + result: false; + referenceRecord?: undefined; +} | { + result: boolean; + referenceRecord: Record; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts new file mode 100644 index 0000000..32f23ff --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateEndpointRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateEndpointRule: (endpointRule: EndpointRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts new file mode 100644 index 0000000..eef15e3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateErrorRule.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateErrorRule: (errorRule: ErrorRuleObject, options: EvaluateOptions) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts new file mode 100644 index 0000000..8bbd358 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateExpression.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const evaluateExpression: (obj: Expression, keyName: string, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts new file mode 100644 index 0000000..a37fe07 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateRules.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const evaluateRules: (rules: import("@smithy/types").RuleSetRules, options: EvaluateOptions) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts new file mode 100644 index 0000000..e6ae9c3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTemplate.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions } from "../types"; +export declare const evaluateTemplate: (template: string, options: EvaluateOptions) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts new file mode 100644 index 0000000..8518f7b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/evaluateTreeRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions, TreeRuleObject } from "../types"; +export declare const evaluateTreeRule: (treeRule: TreeRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts new file mode 100644 index 0000000..2775159 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointHeaders.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectHeaders, EvaluateOptions } from "../types"; +export declare const getEndpointHeaders: (headers: EndpointObjectHeaders, options: EvaluateOptions) => {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts new file mode 100644 index 0000000..944b39d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperties.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectProperties, EvaluateOptions } from "../types"; +export declare const getEndpointProperties: (properties: EndpointObjectProperties, options: EvaluateOptions) => {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts new file mode 100644 index 0000000..5002377 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointProperty.d.ts @@ -0,0 +1,3 @@ +import { EndpointObjectProperty } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const getEndpointProperty: (property: EndpointObjectProperty, options: EvaluateOptions) => EndpointObjectProperty; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts new file mode 100644 index 0000000..9c93422 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getEndpointUrl.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const getEndpointUrl: (endpointUrl: Expression, options: EvaluateOptions) => URL; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts new file mode 100644 index 0000000..2ebfda3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/getReferenceValue.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, ReferenceObject } from "../types"; +export declare const getReferenceValue: ({ ref }: ReferenceObject, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts new file mode 100644 index 0000000..bd481df --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/ts3.4/utils/index.d.ts @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts new file mode 100644 index 0000000..89132f2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointError.d.ts @@ -0,0 +1,3 @@ +export declare class EndpointError extends Error { + constructor(message: string); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts new file mode 100644 index 0000000..33b1a0b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointFunctions.d.ts @@ -0,0 +1,2 @@ +import { FunctionReturn } from "./shared"; +export type EndpointFunctions = Record FunctionReturn>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts new file mode 100644 index 0000000..d24545f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/EndpointRuleObject.d.ts @@ -0,0 +1,5 @@ +import { EndpointObject as __EndpointObject, EndpointObjectHeaders as __EndpointObjectHeaders, EndpointObjectProperties as __EndpointObjectProperties, EndpointRuleObject as __EndpointRuleObject } from "@smithy/types"; +export type EndpointObjectProperties = __EndpointObjectProperties; +export type EndpointObjectHeaders = __EndpointObjectHeaders; +export type EndpointObject = __EndpointObject; +export type EndpointRuleObject = __EndpointRuleObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts new file mode 100644 index 0000000..51fe138 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/ErrorRuleObject.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject as __ErrorRuleObject } from "@smithy/types"; +export type ErrorRuleObject = __ErrorRuleObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts new file mode 100644 index 0000000..3335b80 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/RuleSetObject.d.ts @@ -0,0 +1,4 @@ +import { DeprecatedObject as __DeprecatedObject, ParameterObject as __ParameterObject, RuleSetObject as __RuleSetObject } from "@smithy/types"; +export type DeprecatedObject = __DeprecatedObject; +export type ParameterObject = __ParameterObject; +export type RuleSetObject = __RuleSetObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts new file mode 100644 index 0000000..3d902d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/TreeRuleObject.d.ts @@ -0,0 +1,3 @@ +import { RuleSetRules as __RuleSetRules, TreeRuleObject as __TreeRuleObject } from "@smithy/types"; +export type RuleSetRules = __RuleSetRules; +export type TreeRuleObject = __TreeRuleObject; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts new file mode 100644 index 0000000..a49f984 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./EndpointError"; +export * from "./EndpointFunctions"; +export * from "./EndpointRuleObject"; +export * from "./ErrorRuleObject"; +export * from "./RuleSetObject"; +export * from "./TreeRuleObject"; +export * from "./shared"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts new file mode 100644 index 0000000..8351a92 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/types/shared.d.ts @@ -0,0 +1,25 @@ +import { EndpointARN, EndpointPartition, Logger } from "@smithy/types"; +export type ReferenceObject = { + ref: string; +}; +export type FunctionObject = { + fn: string; + argv: FunctionArgv; +}; +export type FunctionArgv = Array; +export type FunctionReturn = string | boolean | number | EndpointARN | EndpointPartition | { + [key: string]: FunctionReturn; +} | null; +export type ConditionObject = FunctionObject & { + assign?: string; +}; +export type Expression = string | ReferenceObject | FunctionObject; +export type EndpointParams = Record; +export type EndpointResolverOptions = { + endpointParams: EndpointParams; + logger?: Logger; +}; +export type ReferenceRecord = Record; +export type EvaluateOptions = EndpointResolverOptions & { + referenceRecord: ReferenceRecord; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts new file mode 100644 index 0000000..729a206 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/callFunction.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, FunctionObject, FunctionReturn } from "../types"; +export declare const callFunction: ({ fn, argv }: FunctionObject, options: EvaluateOptions) => FunctionReturn; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts new file mode 100644 index 0000000..d8971d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/customEndpointFunctions.d.ts @@ -0,0 +1,4 @@ +import { EndpointFunctions } from "../types/EndpointFunctions"; +export declare const customEndpointFunctions: { + [key: string]: EndpointFunctions; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts new file mode 100644 index 0000000..12d75b9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/endpointFunctions.d.ts @@ -0,0 +1,11 @@ +export declare const endpointFunctions: { + booleanEquals: (value1: boolean, value2: boolean) => boolean; + getAttr: (value: import("../lib").GetAttrValue, path: string) => import("../lib").GetAttrValue; + isSet: (value: unknown) => boolean; + isValidHostLabel: (value: string, allowSubDomains?: boolean) => boolean; + not: (value: boolean) => boolean; + parseURL: (value: string | URL | import("@smithy/types").Endpoint) => import("@smithy/types").EndpointURL | null; + stringEquals: (value1: string, value2: string) => boolean; + substring: (input: string, start: number, stop: number, reverse: boolean) => string | null; + uriEncode: (value: string) => string; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts new file mode 100644 index 0000000..5fbe59f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateCondition.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions } from "../types"; +export declare const evaluateCondition: ({ assign, ...fnArgs }: ConditionObject, options: EvaluateOptions) => { + toAssign?: { + name: string; + value: import("../types").FunctionReturn; + } | undefined; + result: boolean; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts new file mode 100644 index 0000000..4131beb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateConditions.d.ts @@ -0,0 +1,8 @@ +import { ConditionObject, EvaluateOptions, FunctionReturn } from "../types"; +export declare const evaluateConditions: (conditions: ConditionObject[] | undefined, options: EvaluateOptions) => { + result: false; + referenceRecord?: undefined; +} | { + result: boolean; + referenceRecord: Record; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts new file mode 100644 index 0000000..da9496e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateEndpointRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EndpointRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateEndpointRule: (endpointRule: EndpointRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts new file mode 100644 index 0000000..df4973d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateErrorRule.d.ts @@ -0,0 +1,2 @@ +import { ErrorRuleObject, EvaluateOptions } from "../types"; +export declare const evaluateErrorRule: (errorRule: ErrorRuleObject, options: EvaluateOptions) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts new file mode 100644 index 0000000..2541960 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateExpression.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const evaluateExpression: (obj: Expression, keyName: string, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts new file mode 100644 index 0000000..d38c8be --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateRules.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const evaluateRules: (rules: import("@smithy/types").RuleSetRules, options: EvaluateOptions) => EndpointV2; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts new file mode 100644 index 0000000..9b0b9ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTemplate.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions } from "../types"; +export declare const evaluateTemplate: (template: string, options: EvaluateOptions) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts new file mode 100644 index 0000000..2564388 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/evaluateTreeRule.d.ts @@ -0,0 +1,3 @@ +import { EndpointV2 } from "@smithy/types"; +import { EvaluateOptions, TreeRuleObject } from "../types"; +export declare const evaluateTreeRule: (treeRule: TreeRuleObject, options: EvaluateOptions) => EndpointV2 | undefined; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts new file mode 100644 index 0000000..a802565 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointHeaders.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectHeaders, EvaluateOptions } from "../types"; +export declare const getEndpointHeaders: (headers: EndpointObjectHeaders, options: EvaluateOptions) => {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts new file mode 100644 index 0000000..9c83bb0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperties.d.ts @@ -0,0 +1,2 @@ +import { EndpointObjectProperties, EvaluateOptions } from "../types"; +export declare const getEndpointProperties: (properties: EndpointObjectProperties, options: EvaluateOptions) => {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts new file mode 100644 index 0000000..7bc5b82 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointProperty.d.ts @@ -0,0 +1,3 @@ +import { EndpointObjectProperty } from "@smithy/types"; +import { EvaluateOptions } from "../types"; +export declare const getEndpointProperty: (property: EndpointObjectProperty, options: EvaluateOptions) => EndpointObjectProperty; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts new file mode 100644 index 0000000..4ab2289 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getEndpointUrl.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, Expression } from "../types"; +export declare const getEndpointUrl: (endpointUrl: Expression, options: EvaluateOptions) => URL; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts new file mode 100644 index 0000000..3699ec1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/getReferenceValue.d.ts @@ -0,0 +1,2 @@ +import { EvaluateOptions, ReferenceObject } from "../types"; +export declare const getReferenceValue: ({ ref }: ReferenceObject, options: EvaluateOptions) => import("../types").FunctionReturn; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts new file mode 100644 index 0000000..b571d02 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/dist-types/utils/index.d.ts @@ -0,0 +1,2 @@ +export * from "./customEndpointFunctions"; +export * from "./evaluateRules"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/package.json new file mode 100644 index 0000000..7c6f9b1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-endpoints/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/util-endpoints", + "version": "3.0.2", + "description": "Utilities to help with endpoint resolution.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-endpoints", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts" + }, + "keywords": [ + "endpoint" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-endpoints", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-endpoints" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/README.md new file mode 100644 index 0000000..67e4499 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/README.md @@ -0,0 +1,4 @@ +# @smithy/util-hex-encoding + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-hex-encoding/latest.svg)](https://www.npmjs.com/package/@smithy/util-hex-encoding) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-hex-encoding.svg)](https://www.npmjs.com/package/@smithy/util-hex-encoding) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js new file mode 100644 index 0000000..78a59ea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-cjs/index.js @@ -0,0 +1,67 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromHex: () => fromHex, + toHex: () => toHex +}); +module.exports = __toCommonJS(src_exports); +var SHORT_TO_HEX = {}; +var HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +__name(fromHex, "fromHex"); +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} +__name(toHex, "toHex"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromHex, + toHex +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-es/index.js new file mode 100644 index 0000000..e47b3aa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-es/index.js @@ -0,0 +1,33 @@ +const SHORT_TO_HEX = {}; +const HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +export function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } + else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +export function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts new file mode 100644 index 0000000..9d4307a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-types/index.d.ts @@ -0,0 +1,12 @@ +/** + * Converts a hexadecimal encoded string to a Uint8Array of bytes. + * + * @param encoded The hexadecimal encoded string + */ +export declare function fromHex(encoded: string): Uint8Array; +/** + * Converts a Uint8Array of binary data to a hexadecimal encoded string. + * + * @param bytes The binary data to encode + */ +export declare function toHex(bytes: Uint8Array): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..02a8848 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/dist-types/ts3.4/index.d.ts @@ -0,0 +1,12 @@ +/** + * Converts a hexadecimal encoded string to a Uint8Array of bytes. + * + * @param encoded The hexadecimal encoded string + */ +export declare function fromHex(encoded: string): Uint8Array; +/** + * Converts a Uint8Array of binary data to a hexadecimal encoded string. + * + * @param bytes The binary data to encode + */ +export declare function toHex(bytes: Uint8Array): string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/package.json new file mode 100644 index 0000000..2c1ba3d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-hex-encoding/package.json @@ -0,0 +1,60 @@ +{ + "name": "@smithy/util-hex-encoding", + "version": "4.0.0", + "description": "Converts binary buffers to and from lowercase hexadecimal encoding", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-hex-encoding", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "dependencies": { + "tslib": "^2.6.2" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-hex-encoding", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-hex-encoding" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/README.md new file mode 100644 index 0000000..f043cfa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/README.md @@ -0,0 +1,12 @@ +# @smithy/util-middleware + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-middleware/latest.svg)](https://www.npmjs.com/package/@smithy/util-middleware) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-middleware.svg)](https://www.npmjs.com/package/@smithy/util-middleware) + +> An internal package + +This package provides shared utilities for middleware. + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/getSmithyContext.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/index.js new file mode 100644 index 0000000..dfccf17 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/index.js @@ -0,0 +1,45 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + getSmithyContext: () => getSmithyContext, + normalizeProvider: () => normalizeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = require("@smithy/types"); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + getSmithyContext, + normalizeProvider +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-cjs/normalizeProvider.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js new file mode 100644 index 0000000..3848a0c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/getSmithyContext.js @@ -0,0 +1,2 @@ +import { SMITHY_CONTEXT_KEY } from "@smithy/types"; +export const getSmithyContext = (context) => context[SMITHY_CONTEXT_KEY] || (context[SMITHY_CONTEXT_KEY] = {}); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/index.js new file mode 100644 index 0000000..484290d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./getSmithyContext"; +export * from "./normalizeProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js new file mode 100644 index 0000000..a83ea99 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-es/normalizeProvider.js @@ -0,0 +1,6 @@ +export const normalizeProvider = (input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts new file mode 100644 index 0000000..523ee47 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/index.d.ts new file mode 100644 index 0000000..3869284 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getSmithyContext"; +/** + * @internal + */ +export * from "./normalizeProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts new file mode 100644 index 0000000..4fe2d9a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts new file mode 100644 index 0000000..14cd7c4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/getSmithyContext.d.ts @@ -0,0 +1,5 @@ +import { HandlerExecutionContext } from "@smithy/types"; +/** + * @internal + */ +export declare const getSmithyContext: (context: HandlerExecutionContext) => Record; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ab07159 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./getSmithyContext"; +/** + * @internal + */ +export * from "./normalizeProvider"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts new file mode 100644 index 0000000..594e8fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/dist-types/ts3.4/normalizeProvider.d.ts @@ -0,0 +1,7 @@ +import { Provider } from "@smithy/types"; +/** + * @internal + * + * @returns a provider function for the input value if it isn't already one. + */ +export declare const normalizeProvider: (input: T | Provider) => Provider; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/package.json new file mode 100644 index 0000000..b9aa172 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-middleware/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-middleware", + "version": "4.0.2", + "description": "Shared utilities for to be used in middleware packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-middleware", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "middleware" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-middleware", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-middleware" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/LICENSE new file mode 100644 index 0000000..a1895fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/README.md new file mode 100644 index 0000000..bcf11a9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/README.md @@ -0,0 +1,78 @@ +# @smithy/util-retry + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-retry/latest.svg)](https://www.npmjs.com/package/@smithy/util-retry) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-retry.svg)](https://www.npmjs.com/package/@smithy/util-retry) + +This package provides shared utilities for retries. + +## Usage + +### Default + +By default, each client already has a default retry strategy. The default retry count is 3, and +only retryable errors will be retried. + +[AWS Documentation: Retry behavior](https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html). + +```js +import { S3Client } from "@aws-sdk/client-s3"; + +const client = new S3Client({}); // default retry strategy included. +``` + +### MaxAttempts + +If you want to change the number of attempts, you can provide `maxAttempts` configuration during client creation. + +```js +import { S3Client } from "@aws-sdk/client-s3"; + +const client = new S3Client({ maxAttempts: 4 }); +``` + +This is recommended because the `StandardRetryStrategy` includes backoff calculation, +deciding whether an error should be retried, and a retry token counter. + +### MaxAttempts and BackoffComputation + +If you want to change the number of attempts and use a custom delay +computation, you can use the `ConfiguredRetryStrategy` from `@smithy/util-retry`. + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { ConfiguredRetryStrategy } from "@smithy/util-retry"; + +const client = new S3Client({ + retryStrategy: new ConfiguredRetryStrategy( + 4, // max attempts. + (attempt: number) => 100 + attempt * 1000 // backoff function. + ), +}); +``` + +This example sets the backoff at 100ms plus 1s per attempt. + +### MaxAttempts and RetryStrategy + +If you provide both `maxAttempts` and `retryStrategy`, the `retryStrategy` will +get precedence as it's more specific. + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { ConfiguredRetryStrategy } from "@smithy/util-retry"; + +const client = new S3Client({ + maxAttempts: 2, // ignored. + retryStrategy: new ConfiguredRetryStrategy( + 4, // used. + (attempt: number) => 100 + attempt * 1000 // backoff function. + ), +}); +``` + +### Further customization + +You can implement the `RetryStrategyV2` interface. + +Source: https://github.com/smithy-lang/smithy-typescript/blob/main/packages/types/src/retry.ts +API Docs: https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-smithy-types/Interface/RetryStrategyV2/ diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/AdaptiveRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/ConfiguredRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/DefaultRateLimiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/StandardRetryStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/config.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/config.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/config.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/constants.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/constants.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/defaultRetryBackoffStrategy.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/defaultRetryToken.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/index.js new file mode 100644 index 0000000..699447a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/index.js @@ -0,0 +1,358 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + ConfiguredRetryStrategy: () => ConfiguredRetryStrategy, + DEFAULT_MAX_ATTEMPTS: () => DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_DELAY_BASE: () => DEFAULT_RETRY_DELAY_BASE, + DEFAULT_RETRY_MODE: () => DEFAULT_RETRY_MODE, + DefaultRateLimiter: () => DefaultRateLimiter, + INITIAL_RETRY_TOKENS: () => INITIAL_RETRY_TOKENS, + INVOCATION_ID_HEADER: () => INVOCATION_ID_HEADER, + MAXIMUM_RETRY_DELAY: () => MAXIMUM_RETRY_DELAY, + NO_RETRY_INCREMENT: () => NO_RETRY_INCREMENT, + REQUEST_HEADER: () => REQUEST_HEADER, + RETRY_COST: () => RETRY_COST, + RETRY_MODES: () => RETRY_MODES, + StandardRetryStrategy: () => StandardRetryStrategy, + THROTTLING_RETRY_DELAY_BASE: () => THROTTLING_RETRY_DELAY_BASE, + TIMEOUT_RETRY_COST: () => TIMEOUT_RETRY_COST +}); +module.exports = __toCommonJS(src_exports); + +// src/config.ts +var RETRY_MODES = /* @__PURE__ */ ((RETRY_MODES2) => { + RETRY_MODES2["STANDARD"] = "standard"; + RETRY_MODES2["ADAPTIVE"] = "adaptive"; + return RETRY_MODES2; +})(RETRY_MODES || {}); +var DEFAULT_MAX_ATTEMPTS = 3; +var DEFAULT_RETRY_MODE = "standard" /* STANDARD */; + +// src/DefaultRateLimiter.ts +var import_service_error_classification = require("@smithy/service-error-classification"); +var DefaultRateLimiter = class _DefaultRateLimiter { + constructor(options) { + // Pre-set state variables + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = options?.beta ?? 0.7; + this.minCapacity = options?.minCapacity ?? 1; + this.minFillRate = options?.minFillRate ?? 0.5; + this.scaleConstant = options?.scaleConstant ?? 0.4; + this.smooth = options?.smooth ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + static { + __name(this, "DefaultRateLimiter"); + } + static { + /** + * Only used in testing. + */ + this.setTimeoutFn = setTimeout; + } + getCurrentTimeInSeconds() { + return Date.now() / 1e3; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = (amount - this.currentCapacity) / this.fillRate * 1e3; + await new Promise((resolve) => _DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, import_service_error_classification.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow(this.lastMaxRate * (1 - this.beta) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise( + this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate + ); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +}; + +// src/constants.ts +var DEFAULT_RETRY_DELAY_BASE = 100; +var MAXIMUM_RETRY_DELAY = 20 * 1e3; +var THROTTLING_RETRY_DELAY_BASE = 500; +var INITIAL_RETRY_TOKENS = 500; +var RETRY_COST = 5; +var TIMEOUT_RETRY_COST = 10; +var NO_RETRY_INCREMENT = 1; +var INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +var REQUEST_HEADER = "amz-sdk-request"; + +// src/defaultRetryBackoffStrategy.ts +var getDefaultRetryBackoffStrategy = /* @__PURE__ */ __name(() => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = /* @__PURE__ */ __name((attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }, "computeNextBackoffDelay"); + const setDelayBase = /* @__PURE__ */ __name((delay) => { + delayBase = delay; + }, "setDelayBase"); + return { + computeNextBackoffDelay, + setDelayBase + }; +}, "getDefaultRetryBackoffStrategy"); + +// src/defaultRetryToken.ts +var createDefaultRetryToken = /* @__PURE__ */ __name(({ + retryDelay, + retryCount, + retryCost +}) => { + const getRetryCount = /* @__PURE__ */ __name(() => retryCount, "getRetryCount"); + const getRetryDelay = /* @__PURE__ */ __name(() => Math.min(MAXIMUM_RETRY_DELAY, retryDelay), "getRetryDelay"); + const getRetryCost = /* @__PURE__ */ __name(() => retryCost, "getRetryCost"); + return { + getRetryCount, + getRetryDelay, + getRetryCost + }; +}, "createDefaultRetryToken"); + +// src/StandardRetryStrategy.ts +var StandardRetryStrategy = class { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = "standard" /* STANDARD */; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + static { + __name(this, "StandardRetryStrategy"); + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0 + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase( + errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE + ); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return attempts < maxAttempts && this.capacity >= this.getCapacityCost(errorInfo.errorType) && this.isRetryableError(errorInfo.errorType); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +}; + +// src/AdaptiveRetryStrategy.ts +var AdaptiveRetryStrategy = class { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = "adaptive" /* ADAPTIVE */; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + static { + __name(this, "AdaptiveRetryStrategy"); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +}; + +// src/ConfiguredRetryStrategy.ts +var ConfiguredRetryStrategy = class extends StandardRetryStrategy { + static { + __name(this, "ConfiguredRetryStrategy"); + } + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + AdaptiveRetryStrategy, + ConfiguredRetryStrategy, + DefaultRateLimiter, + StandardRetryStrategy, + RETRY_MODES, + DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_MODE, + DEFAULT_RETRY_DELAY_BASE, + MAXIMUM_RETRY_DELAY, + THROTTLING_RETRY_DELAY_BASE, + INITIAL_RETRY_TOKENS, + RETRY_COST, + TIMEOUT_RETRY_COST, + NO_RETRY_INCREMENT, + INVOCATION_ID_HEADER, + REQUEST_HEADER +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/types.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-cjs/types.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js new file mode 100644 index 0000000..e20cf0f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/AdaptiveRetryStrategy.js @@ -0,0 +1,24 @@ +import { RETRY_MODES } from "./config"; +import { DefaultRateLimiter } from "./DefaultRateLimiter"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = RETRY_MODES.ADAPTIVE; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js new file mode 100644 index 0000000..541bdb2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/ConfiguredRetryStrategy.js @@ -0,0 +1,18 @@ +import { DEFAULT_RETRY_DELAY_BASE } from "./constants"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +export class ConfiguredRetryStrategy extends StandardRetryStrategy { + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } + else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js new file mode 100644 index 0000000..15240c8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/DefaultRateLimiter.js @@ -0,0 +1,100 @@ +import { isThrottlingError } from "@smithy/service-error-classification"; +export class DefaultRateLimiter { + constructor(options) { + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = options?.beta ?? 0.7; + this.minCapacity = options?.minCapacity ?? 1; + this.minFillRate = options?.minFillRate ?? 0.5; + this.scaleConstant = options?.scaleConstant ?? 0.4; + this.smooth = options?.smooth ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1000; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = ((amount - this.currentCapacity) / this.fillRate) * 1000; + await new Promise((resolve) => DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if (isThrottlingError(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } + else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow((this.lastMaxRate * (1 - this.beta)) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise(this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +} +DefaultRateLimiter.setTimeoutFn = setTimeout; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js new file mode 100644 index 0000000..07adde0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/StandardRetryStrategy.js @@ -0,0 +1,65 @@ +import { DEFAULT_MAX_ATTEMPTS, RETRY_MODES } from "./config"; +import { DEFAULT_RETRY_DELAY_BASE, INITIAL_RETRY_TOKENS, NO_RETRY_INCREMENT, RETRY_COST, THROTTLING_RETRY_DELAY_BASE, TIMEOUT_RETRY_COST, } from "./constants"; +import { getDefaultRetryBackoffStrategy } from "./defaultRetryBackoffStrategy"; +import { createDefaultRetryToken } from "./defaultRetryToken"; +export class StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = RETRY_MODES.STANDARD; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0, + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase(errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint + ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) + : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost, + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } + catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return (attempts < maxAttempts && + this.capacity >= this.getCapacityCost(errorInfo.errorType) && + this.isRetryableError(errorInfo.errorType)); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/config.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/config.js new file mode 100644 index 0000000..438d42d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/config.js @@ -0,0 +1,7 @@ +export var RETRY_MODES; +(function (RETRY_MODES) { + RETRY_MODES["STANDARD"] = "standard"; + RETRY_MODES["ADAPTIVE"] = "adaptive"; +})(RETRY_MODES || (RETRY_MODES = {})); +export const DEFAULT_MAX_ATTEMPTS = 3; +export const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/constants.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/constants.js new file mode 100644 index 0000000..0876f8e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/constants.js @@ -0,0 +1,9 @@ +export const DEFAULT_RETRY_DELAY_BASE = 100; +export const MAXIMUM_RETRY_DELAY = 20 * 1000; +export const THROTTLING_RETRY_DELAY_BASE = 500; +export const INITIAL_RETRY_TOKENS = 500; +export const RETRY_COST = 5; +export const TIMEOUT_RETRY_COST = 10; +export const NO_RETRY_INCREMENT = 1; +export const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +export const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js new file mode 100644 index 0000000..ce04bc5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/defaultRetryBackoffStrategy.js @@ -0,0 +1,14 @@ +import { DEFAULT_RETRY_DELAY_BASE, MAXIMUM_RETRY_DELAY } from "./constants"; +export const getDefaultRetryBackoffStrategy = () => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = (attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }; + const setDelayBase = (delay) => { + delayBase = delay; + }; + return { + computeNextBackoffDelay, + setDelayBase, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js new file mode 100644 index 0000000..203bb66 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/defaultRetryToken.js @@ -0,0 +1,11 @@ +import { MAXIMUM_RETRY_DELAY } from "./constants"; +export const createDefaultRetryToken = ({ retryDelay, retryCount, retryCost, }) => { + const getRetryCount = () => retryCount; + const getRetryDelay = () => Math.min(MAXIMUM_RETRY_DELAY, retryDelay); + const getRetryCost = () => retryCost; + return { + getRetryCount, + getRetryDelay, + getRetryCost, + }; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/index.js new file mode 100644 index 0000000..8637ced --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/index.js @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/types.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-es/types.js @@ -0,0 +1 @@ +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..8092519 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, RetryToken, StandardRetryToken } from "@smithy/types"; +import { RateLimiter } from "./types"; +/** + * @public + * + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * + * The AdaptiveRetryStrategy is a retry strategy for executing against a very + * resource constrained set of resources. Care should be taken when using this + * retry strategy. By default, it uses a dynamic backoff delay based on load + * currently perceived against the downstream resource and performs circuit + * breaking to disable retries in the event of high downstream failures using + * the DefaultRateLimiter. + * + * @see {@link StandardRetryStrategy} + * @see {@link DefaultRateLimiter } + */ +export declare class AdaptiveRetryStrategy implements RetryStrategyV2 { + private readonly maxAttemptsProvider; + private rateLimiter; + private standardRetryStrategy; + readonly mode: string; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts new file mode 100644 index 0000000..3250c6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ConfiguredRetryStrategy.d.ts @@ -0,0 +1,32 @@ +import type { Provider, RetryBackoffStrategy, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +/** + * @public + * + * This extension of the StandardRetryStrategy allows customizing the + * backoff computation. + */ +export declare class ConfiguredRetryStrategy extends StandardRetryStrategy implements RetryStrategyV2 { + private readonly computeNextBackoffDelay; + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts: number | Provider, computeNextBackoffDelay?: number | RetryBackoffStrategy["computeNextBackoffDelay"]); + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts new file mode 100644 index 0000000..9d689fc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/DefaultRateLimiter.d.ts @@ -0,0 +1,49 @@ +import { RateLimiter } from "./types"; +/** + * @public + */ +export interface DefaultRateLimiterOptions { + beta?: number; + minCapacity?: number; + minFillRate?: number; + scaleConstant?: number; + smooth?: number; +} +/** + * @public + */ +export declare class DefaultRateLimiter implements RateLimiter { + /** + * Only used in testing. + */ + private static setTimeoutFn; + private beta; + private minCapacity; + private minFillRate; + private scaleConstant; + private smooth; + private currentCapacity; + private enabled; + private lastMaxRate; + private measuredTxRate; + private requestCount; + private fillRate; + private lastThrottleTime; + private lastTimestamp; + private lastTxRateBucket; + private maxCapacity; + private timeWindow; + constructor(options?: DefaultRateLimiterOptions); + private getCurrentTimeInSeconds; + getSendToken(): Promise; + private acquireTokenBucket; + private refillTokenBucket; + updateClientSendingRate(response: any): void; + private calculateTimeWindow; + private cubicThrottle; + private cubicSuccess; + private enableTokenBucket; + private updateTokenBucketRate; + private updateMeasuredRate; + private getPrecise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..c100ebc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/StandardRetryStrategy.d.ts @@ -0,0 +1,26 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +/** + * @public + */ +export declare class StandardRetryStrategy implements RetryStrategyV2 { + private readonly maxAttempts; + readonly mode: string; + private capacity; + private readonly retryBackoffStrategy; + private readonly maxAttemptsProvider; + constructor(maxAttempts: number); + constructor(maxAttemptsProvider: Provider); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(token: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity(): number; + private getMaxAttempts; + private shouldRetry; + private getCapacityCost; + private isRetryableError; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/config.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/config.d.ts new file mode 100644 index 0000000..e4e74b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/config.d.ts @@ -0,0 +1,20 @@ +/** + * @public + */ +export declare enum RETRY_MODES { + STANDARD = "standard", + ADAPTIVE = "adaptive" +} +/** + * @public + * + * The default value for how many HTTP requests an SDK should make for a + * single SDK operation invocation before giving up + */ +export declare const DEFAULT_MAX_ATTEMPTS = 3; +/** + * @public + * + * The default retry algorithm to use. + */ +export declare const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/constants.d.ts new file mode 100644 index 0000000..bc7fec8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/constants.d.ts @@ -0,0 +1,59 @@ +/** + * @public + * + * The base number of milliseconds to use in calculating a suitable cool-down + * time when a retryable error is encountered. + */ +export declare const DEFAULT_RETRY_DELAY_BASE = 100; +/** + * @public + * + * The maximum amount of time (in milliseconds) that will be used as a delay + * between retry attempts. + */ +export declare const MAXIMUM_RETRY_DELAY: number; +/** + * @public + * + * The retry delay base (in milliseconds) to use when a throttling error is + * encountered. + */ +export declare const THROTTLING_RETRY_DELAY_BASE = 500; +/** + * @public + * + * Initial number of retry tokens in Retry Quota + */ +export declare const INITIAL_RETRY_TOKENS = 500; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance. + */ +export declare const RETRY_COST = 5; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ +export declare const TIMEOUT_RETRY_COST = 10; +/** + * @public + * + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ +export declare const NO_RETRY_INCREMENT = 1; +/** + * @public + * + * Header name for SDK invocation ID + */ +export declare const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +/** + * @public + * + * Header name for request retry information. + */ +export declare const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts new file mode 100644 index 0000000..b70eb2d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/defaultRetryBackoffStrategy.d.ts @@ -0,0 +1,5 @@ +import { StandardRetryBackoffStrategy } from "@smithy/types"; +/** + * @internal + */ +export declare const getDefaultRetryBackoffStrategy: () => StandardRetryBackoffStrategy; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts new file mode 100644 index 0000000..947b68f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/defaultRetryToken.d.ts @@ -0,0 +1,9 @@ +import { StandardRetryToken } from "@smithy/types"; +/** + * @internal + */ +export declare const createDefaultRetryToken: ({ retryDelay, retryCount, retryCost, }: { + retryDelay: number; + retryCount: number; + retryCost?: number | undefined; +}) => StandardRetryToken; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/index.d.ts new file mode 100644 index 0000000..8637ced --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts new file mode 100644 index 0000000..f6b0ef4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/AdaptiveRetryStrategy.d.ts @@ -0,0 +1,33 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, RetryToken, StandardRetryToken } from "@smithy/types"; +import { RateLimiter } from "./types"; +/** + * @public + * + * Strategy options to be passed to AdaptiveRetryStrategy + */ +export interface AdaptiveRetryStrategyOptions { + rateLimiter?: RateLimiter; +} +/** + * @public + * + * The AdaptiveRetryStrategy is a retry strategy for executing against a very + * resource constrained set of resources. Care should be taken when using this + * retry strategy. By default, it uses a dynamic backoff delay based on load + * currently perceived against the downstream resource and performs circuit + * breaking to disable retries in the event of high downstream failures using + * the DefaultRateLimiter. + * + * @see {@link StandardRetryStrategy} + * @see {@link DefaultRateLimiter } + */ +export declare class AdaptiveRetryStrategy implements RetryStrategyV2 { + private readonly maxAttemptsProvider; + private rateLimiter; + private standardRetryStrategy; + readonly mode: string; + constructor(maxAttemptsProvider: Provider, options?: AdaptiveRetryStrategyOptions); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts new file mode 100644 index 0000000..7df2983 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/ConfiguredRetryStrategy.d.ts @@ -0,0 +1,32 @@ +import { Provider, RetryBackoffStrategy, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +import { StandardRetryStrategy } from "./StandardRetryStrategy"; +/** + * @public + * + * This extension of the StandardRetryStrategy allows customizing the + * backoff computation. + */ +export declare class ConfiguredRetryStrategy extends StandardRetryStrategy implements RetryStrategyV2 { + private readonly computeNextBackoffDelay; + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts: number | Provider, computeNextBackoffDelay?: number | RetryBackoffStrategy["computeNextBackoffDelay"]); + refreshRetryTokenForRetry(tokenToRenew: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts new file mode 100644 index 0000000..9c239d6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/DefaultRateLimiter.d.ts @@ -0,0 +1,49 @@ +import { RateLimiter } from "./types"; +/** + * @public + */ +export interface DefaultRateLimiterOptions { + beta?: number; + minCapacity?: number; + minFillRate?: number; + scaleConstant?: number; + smooth?: number; +} +/** + * @public + */ +export declare class DefaultRateLimiter implements RateLimiter { + /** + * Only used in testing. + */ + private static setTimeoutFn; + private beta; + private minCapacity; + private minFillRate; + private scaleConstant; + private smooth; + private currentCapacity; + private enabled; + private lastMaxRate; + private measuredTxRate; + private requestCount; + private fillRate; + private lastThrottleTime; + private lastTimestamp; + private lastTxRateBucket; + private maxCapacity; + private timeWindow; + constructor(options?: DefaultRateLimiterOptions); + private getCurrentTimeInSeconds; + getSendToken(): Promise; + private acquireTokenBucket; + private refillTokenBucket; + updateClientSendingRate(response: any): void; + private calculateTimeWindow; + private cubicThrottle; + private cubicSuccess; + private enableTokenBucket; + private updateTokenBucketRate; + private updateMeasuredRate; + private getPrecise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts new file mode 100644 index 0000000..c22f8b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/StandardRetryStrategy.d.ts @@ -0,0 +1,26 @@ +import { Provider, RetryErrorInfo, RetryStrategyV2, StandardRetryToken } from "@smithy/types"; +/** + * @public + */ +export declare class StandardRetryStrategy implements RetryStrategyV2 { + private readonly maxAttempts; + readonly mode: string; + private capacity; + private readonly retryBackoffStrategy; + private readonly maxAttemptsProvider; + constructor(maxAttempts: number); + constructor(maxAttemptsProvider: Provider); + acquireInitialRetryToken(retryTokenScope: string): Promise; + refreshRetryTokenForRetry(token: StandardRetryToken, errorInfo: RetryErrorInfo): Promise; + recordSuccess(token: StandardRetryToken): void; + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity(): number; + private getMaxAttempts; + private shouldRetry; + private getCapacityCost; + private isRetryableError; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts new file mode 100644 index 0000000..6727a38 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/config.d.ts @@ -0,0 +1,20 @@ +/** + * @public + */ +export declare enum RETRY_MODES { + STANDARD = "standard", + ADAPTIVE = "adaptive" +} +/** + * @public + * + * The default value for how many HTTP requests an SDK should make for a + * single SDK operation invocation before giving up + */ +export declare const DEFAULT_MAX_ATTEMPTS = 3; +/** + * @public + * + * The default retry algorithm to use. + */ +export declare const DEFAULT_RETRY_MODE = RETRY_MODES.STANDARD; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts new file mode 100644 index 0000000..5c1a5ce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/constants.d.ts @@ -0,0 +1,59 @@ +/** + * @public + * + * The base number of milliseconds to use in calculating a suitable cool-down + * time when a retryable error is encountered. + */ +export declare const DEFAULT_RETRY_DELAY_BASE = 100; +/** + * @public + * + * The maximum amount of time (in milliseconds) that will be used as a delay + * between retry attempts. + */ +export declare const MAXIMUM_RETRY_DELAY: number; +/** + * @public + * + * The retry delay base (in milliseconds) to use when a throttling error is + * encountered. + */ +export declare const THROTTLING_RETRY_DELAY_BASE = 500; +/** + * @public + * + * Initial number of retry tokens in Retry Quota + */ +export declare const INITIAL_RETRY_TOKENS = 500; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance. + */ +export declare const RETRY_COST = 5; +/** + * @public + * + * The total amount of retry tokens to be decremented from retry token balance + * when a throttling error is encountered. + */ +export declare const TIMEOUT_RETRY_COST = 10; +/** + * @public + * + * The total amount of retry token to be incremented from retry token balance + * if an SDK operation invocation succeeds without requiring a retry request. + */ +export declare const NO_RETRY_INCREMENT = 1; +/** + * @public + * + * Header name for SDK invocation ID + */ +export declare const INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +/** + * @public + * + * Header name for request retry information. + */ +export declare const REQUEST_HEADER = "amz-sdk-request"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts new file mode 100644 index 0000000..1d632ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryBackoffStrategy.d.ts @@ -0,0 +1,5 @@ +import { StandardRetryBackoffStrategy } from "@smithy/types"; +/** + * @internal + */ +export declare const getDefaultRetryBackoffStrategy: () => StandardRetryBackoffStrategy; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts new file mode 100644 index 0000000..fd4b75e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/defaultRetryToken.d.ts @@ -0,0 +1,9 @@ +import { StandardRetryToken } from "@smithy/types"; +/** + * @internal + */ +export declare const createDefaultRetryToken: ({ retryDelay, retryCount, retryCost, }: { + retryDelay: number; + retryCount: number; + retryCost?: number | undefined; +}) => StandardRetryToken; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..de9af3d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/index.d.ts @@ -0,0 +1,7 @@ +export * from "./AdaptiveRetryStrategy"; +export * from "./ConfiguredRetryStrategy"; +export * from "./DefaultRateLimiter"; +export * from "./StandardRetryStrategy"; +export * from "./config"; +export * from "./constants"; +export * from "./types"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts new file mode 100644 index 0000000..5a20c01 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/ts3.4/types.d.ts @@ -0,0 +1,19 @@ +/** + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/types.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/types.d.ts new file mode 100644 index 0000000..b3f2bd1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/dist-types/types.d.ts @@ -0,0 +1,19 @@ +/** + * @internal + */ +export interface RateLimiter { + /** + * If there is sufficient capacity (tokens) available, it immediately returns. + * If there is not sufficient capacity, it will either sleep a certain amount + * of time until the rate limiter can retrieve a token from its token bucket + * or raise an exception indicating there is insufficient capacity. + */ + getSendToken: () => Promise; + /** + * Updates the client sending rate based on response. + * If the response was successful, the capacity and fill rate are increased. + * If the response was a throttling response, the capacity and fill rate are + * decreased. Transient errors do not affect the rate limiter. + */ + updateClientSendingRate: (response: any) => void; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/package.json new file mode 100644 index 0000000..6379727 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-retry/package.json @@ -0,0 +1,68 @@ +{ + "name": "@smithy/util-retry", + "version": "4.0.3", + "description": "Shared retry utilities to be used in middleware packages.", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-retry", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "keywords": [ + "aws", + "retry" + ], + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "types/*": [ + "types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/master/packages/util-retry", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-retry" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/README.md new file mode 100644 index 0000000..6fcd9f6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/README.md @@ -0,0 +1,6 @@ +# @smithy/util-stream + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-stream/latest.svg)](https://www.npmjs.com/package/@smithy/util-stream) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-stream.svg)](https://www.npmjs.com/package/@smithy/util-stream) + +Package with utilities to operate on streams. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js new file mode 100644 index 0000000..ea8baac --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/ByteArrayCollector.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ByteArrayCollector = void 0; +class ByteArrayCollector { + constructor(allocByteArray) { + this.allocByteArray = allocByteArray; + this.byteLength = 0; + this.byteArrays = []; + } + push(byteArray) { + this.byteArrays.push(byteArray); + this.byteLength += byteArray.byteLength; + } + flush() { + if (this.byteArrays.length === 1) { + const bytes = this.byteArrays[0]; + this.reset(); + return bytes; + } + const aggregation = this.allocByteArray(this.byteLength); + let cursor = 0; + for (let i = 0; i < this.byteArrays.length; ++i) { + const bytes = this.byteArrays[i]; + aggregation.set(bytes, cursor); + cursor += bytes.byteLength; + } + this.reset(); + return aggregation; + } + reset() { + this.byteArrays = []; + this.byteLength = 0; + } +} +exports.ByteArrayCollector = ByteArrayCollector; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/blob/Uint8ArrayBlobAdapter.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/blob/transforms.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js new file mode 100644 index 0000000..b73363a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.browser.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChecksumStream = void 0; +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +class ChecksumStream extends ReadableStreamRef { +} +exports.ChecksumStream = ChecksumStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js new file mode 100644 index 0000000..92d0bc0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/ChecksumStream.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ChecksumStream = void 0; +const util_base64_1 = require("@smithy/util-base64"); +const stream_1 = require("stream"); +class ChecksumStream extends stream_1.Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + var _a, _b; + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} +exports.ChecksumStream = ChecksumStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js new file mode 100644 index 0000000..2f6cf12 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.browser.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createChecksumStream = void 0; +const util_base64_1 = require("@smithy/util-base64"); +const stream_type_check_1 = require("../stream-type-check"); +const ChecksumStream_browser_1 = require("./ChecksumStream.browser"); +const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + var _a, _b; + if (!(0, stream_type_check_1.isReadableStream)(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + const encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream_browser_1.ChecksumStream.prototype); + return readable; +}; +exports.createChecksumStream = createChecksumStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js new file mode 100644 index 0000000..57e2a2f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/checksum/createChecksumStream.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createChecksumStream = void 0; +const stream_type_check_1 = require("../stream-type-check"); +const ChecksumStream_1 = require("./ChecksumStream"); +const createChecksumStream_browser_1 = require("./createChecksumStream.browser"); +function createChecksumStream(init) { + if (typeof ReadableStream === "function" && (0, stream_type_check_1.isReadableStream)(init.source)) { + return (0, createChecksumStream_browser_1.createChecksumStream)(init); + } + return new ChecksumStream_1.ChecksumStream(init); +} +exports.createChecksumStream = createChecksumStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js new file mode 100644 index 0000000..4c10847 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadable.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createBufferedReadable = void 0; +const node_stream_1 = require("node:stream"); +const ByteArrayCollector_1 = require("./ByteArrayCollector"); +const createBufferedReadableStream_1 = require("./createBufferedReadableStream"); +const stream_type_check_1 = require("./stream-type-check"); +function createBufferedReadable(upstream, size, logger) { + if ((0, stream_type_check_1.isReadableStream)(upstream)) { + return (0, createBufferedReadableStream_1.createBufferedReadableStream)(upstream, size, logger); + } + const downstream = new node_stream_1.Readable({ read() { } }); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = [ + "", + new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size)), + new ByteArrayCollector_1.ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), + ]; + let mode = -1; + upstream.on("data", (chunk) => { + const chunkMode = (0, createBufferedReadableStream_1.modeOf)(chunk, true); + if (mode !== chunkMode) { + if (mode >= 0) { + downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + downstream.push(chunk); + return; + } + const chunkSize = (0, createBufferedReadableStream_1.sizeOf)(chunk); + bytesSeen += chunkSize; + const bufferSize = (0, createBufferedReadableStream_1.sizeOf)(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + downstream.push(chunk); + } + else { + const newSize = (0, createBufferedReadableStream_1.merge)(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger === null || logger === void 0 ? void 0 : logger.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + downstream.push((0, createBufferedReadableStream_1.flush)(buffers, mode)); + } + } + }); + upstream.on("end", () => { + if (mode !== -1) { + const remainder = (0, createBufferedReadableStream_1.flush)(buffers, mode); + if ((0, createBufferedReadableStream_1.sizeOf)(remainder) > 0) { + downstream.push(remainder); + } + } + downstream.push(null); + }); + return downstream; +} +exports.createBufferedReadable = createBufferedReadable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js new file mode 100644 index 0000000..2cd72aa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/createBufferedReadableStream.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.modeOf = exports.sizeOf = exports.flush = exports.merge = exports.createBufferedReadable = exports.createBufferedReadableStream = void 0; +const ByteArrayCollector_1 = require("./ByteArrayCollector"); +function createBufferedReadableStream(upstream, size, logger) { + const reader = upstream.getReader(); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = ["", new ByteArrayCollector_1.ByteArrayCollector((size) => new Uint8Array(size))]; + let mode = -1; + const pull = async (controller) => { + const { value, done } = await reader.read(); + const chunk = value; + if (done) { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + controller.enqueue(remainder); + } + } + controller.close(); + } + else { + const chunkMode = modeOf(chunk, false); + if (mode !== chunkMode) { + if (mode >= 0) { + controller.enqueue(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + controller.enqueue(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + controller.enqueue(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger === null || logger === void 0 ? void 0 : logger.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + controller.enqueue(flush(buffers, mode)); + } + else { + await pull(controller); + } + } + } + }; + return new ReadableStream({ + pull, + }); +} +exports.createBufferedReadableStream = createBufferedReadableStream; +exports.createBufferedReadable = createBufferedReadableStream; +function merge(buffers, mode, chunk) { + switch (mode) { + case 0: + buffers[0] += chunk; + return sizeOf(buffers[0]); + case 1: + case 2: + buffers[mode].push(chunk); + return sizeOf(buffers[mode]); + } +} +exports.merge = merge; +function flush(buffers, mode) { + switch (mode) { + case 0: + const s = buffers[0]; + buffers[0] = ""; + return s; + case 1: + case 2: + return buffers[mode].flush(); + } + throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`); +} +exports.flush = flush; +function sizeOf(chunk) { + var _a, _b; + return (_b = (_a = chunk === null || chunk === void 0 ? void 0 : chunk.byteLength) !== null && _a !== void 0 ? _a : chunk === null || chunk === void 0 ? void 0 : chunk.length) !== null && _b !== void 0 ? _b : 0; +} +exports.sizeOf = sizeOf; +function modeOf(chunk, allowBuffer = true) { + if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) { + return 2; + } + if (chunk instanceof Uint8Array) { + return 1; + } + if (typeof chunk === "string") { + return 0; + } + return -1; +} +exports.modeOf = modeOf; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js new file mode 100644 index 0000000..d8e540c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.browser.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAwsChunkedEncodingStream = void 0; +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + bodyLengthChecker !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await reader.read(); + if (done) { + controller.enqueue(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + controller.enqueue(`${checksumLocationName}:${checksum}\r\n`); + controller.enqueue(`\r\n`); + } + controller.close(); + } + else { + controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`); + } + }, + }); +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js new file mode 100644 index 0000000..4f3f9e7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/getAwsChunkedEncodingStream.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getAwsChunkedEncodingStream = void 0; +const stream_1 = require("stream"); +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js new file mode 100644 index 0000000..38512c1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/headStream.browser.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.headStream = void 0; +async function headStream(stream, bytes) { + var _a; + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += (_a = value === null || value === void 0 ? void 0 : value.byteLength) !== null && _a !== void 0 ? _a : 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} +exports.headStream = headStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/headStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/headStream.js new file mode 100644 index 0000000..86103b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/headStream.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.headStream = void 0; +const stream_1 = require("stream"); +const headStream_browser_1 = require("./headStream.browser"); +const stream_type_check_1 = require("./stream-type-check"); +const headStream = (stream, bytes) => { + if ((0, stream_type_check_1.isReadableStream)(stream)) { + return (0, headStream_browser_1.headStream)(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +exports.headStream = headStream; +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + var _a; + this.buffers.push(chunk); + this.bytesBuffered += (_a = chunk.byteLength) !== null && _a !== void 0 ? _a : 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/index.js new file mode 100644 index 0000000..d42fe10 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/index.js @@ -0,0 +1,103 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Uint8ArrayBlobAdapter: () => Uint8ArrayBlobAdapter +}); +module.exports = __toCommonJS(src_exports); + +// src/blob/transforms.ts +var import_util_base64 = require("@smithy/util-base64"); +var import_util_utf8 = require("@smithy/util-utf8"); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, import_util_base64.toBase64)(payload); + } + return (0, import_util_utf8.toUtf8)(payload); +} +__name(transformToString, "transformToString"); +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate((0, import_util_base64.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter.mutate((0, import_util_utf8.fromUtf8)(str)); +} +__name(transformFromString, "transformFromString"); + +// src/blob/Uint8ArrayBlobAdapter.ts +var Uint8ArrayBlobAdapter = class _Uint8ArrayBlobAdapter extends Uint8Array { + static { + __name(this, "Uint8ArrayBlobAdapter"); + } + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source) { + Object.setPrototypeOf(source, _Uint8ArrayBlobAdapter.prototype); + return source; + } + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +}; + +// src/index.ts +__reExport(src_exports, require("./checksum/ChecksumStream"), module.exports); +__reExport(src_exports, require("./checksum/createChecksumStream"), module.exports); +__reExport(src_exports, require("././createBufferedReadable"), module.exports); +__reExport(src_exports, require("././getAwsChunkedEncodingStream"), module.exports); +__reExport(src_exports, require("././headStream"), module.exports); +__reExport(src_exports, require("././sdk-stream-mixin"), module.exports); +__reExport(src_exports, require("././splitStream"), module.exports); +__reExport(src_exports, require("././stream-type-check"), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + Uint8ArrayBlobAdapter, + ChecksumStream, + createChecksumStream, + createBufferedReadable, + getAwsChunkedEncodingStream, + headStream, + sdkStreamMixin, + splitStream, + isReadableStream, + isBlob +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js new file mode 100644 index 0000000..9309af1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.browser.js @@ -0,0 +1,69 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sdkStreamMixin = void 0; +const fetch_http_handler_1 = require("@smithy/fetch-http-handler"); +const util_base64_1 = require("@smithy/util-base64"); +const util_hex_encoding_1 = require("@smithy/util-hex-encoding"); +const util_utf8_1 = require("@smithy/util-utf8"); +const stream_type_check_1 = require("./stream-type-check"); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!isBlobInstance(stream) && !(0, stream_type_check_1.isReadableStream)(stream)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, fetch_http_handler_1.streamCollector)(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return (0, util_base64_1.toBase64)(buf); + } + else if (encoding === "hex") { + return (0, util_hex_encoding_1.toHex)(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return (0, util_utf8_1.toUtf8)(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if ((0, stream_type_check_1.isReadableStream)(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js new file mode 100644 index 0000000..0817eac --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/sdk-stream-mixin.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = require("@smithy/node-http-handler"); +const util_buffer_from_1 = require("@smithy/util-buffer-from"); +const stream_1 = require("stream"); +const sdk_stream_mixin_browser_1 = require("./sdk-stream-mixin.browser"); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + try { + return (0, sdk_stream_mixin_browser_1.sdkStreamMixin)(stream); + } + catch (e) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js new file mode 100644 index 0000000..eb890cc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/splitStream.browser.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitStream = void 0; +async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} +exports.splitStream = splitStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/splitStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/splitStream.js new file mode 100644 index 0000000..c55b628 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/splitStream.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitStream = void 0; +const stream_1 = require("stream"); +const splitStream_browser_1 = require("./splitStream.browser"); +const stream_type_check_1 = require("./stream-type-check"); +async function splitStream(stream) { + if ((0, stream_type_check_1.isReadableStream)(stream) || (0, stream_type_check_1.isBlob)(stream)) { + return (0, splitStream_browser_1.splitStream)(stream); + } + const stream1 = new stream_1.PassThrough(); + const stream2 = new stream_1.PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} +exports.splitStream = splitStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js new file mode 100644 index 0000000..a4a6138 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-cjs/stream-type-check.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBlob = exports.isReadableStream = void 0; +const isReadableStream = (stream) => { + var _a; + return typeof ReadableStream === "function" && + (((_a = stream === null || stream === void 0 ? void 0 : stream.constructor) === null || _a === void 0 ? void 0 : _a.name) === ReadableStream.name || stream instanceof ReadableStream); +}; +exports.isReadableStream = isReadableStream; +const isBlob = (blob) => { + var _a; + return typeof Blob === "function" && (((_a = blob === null || blob === void 0 ? void 0 : blob.constructor) === null || _a === void 0 ? void 0 : _a.name) === Blob.name || blob instanceof Blob); +}; +exports.isBlob = isBlob; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js new file mode 100644 index 0000000..39af48f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/ByteArrayCollector.js @@ -0,0 +1,31 @@ +export class ByteArrayCollector { + constructor(allocByteArray) { + this.allocByteArray = allocByteArray; + this.byteLength = 0; + this.byteArrays = []; + } + push(byteArray) { + this.byteArrays.push(byteArray); + this.byteLength += byteArray.byteLength; + } + flush() { + if (this.byteArrays.length === 1) { + const bytes = this.byteArrays[0]; + this.reset(); + return bytes; + } + const aggregation = this.allocByteArray(this.byteLength); + let cursor = 0; + for (let i = 0; i < this.byteArrays.length; ++i) { + const bytes = this.byteArrays[i]; + aggregation.set(bytes, cursor); + cursor += bytes.byteLength; + } + this.reset(); + return aggregation; + } + reset() { + this.byteArrays = []; + this.byteLength = 0; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js new file mode 100644 index 0000000..41746b1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/blob/Uint8ArrayBlobAdapter.js @@ -0,0 +1,18 @@ +import { transformFromString, transformToString } from "./transforms"; +export class Uint8ArrayBlobAdapter extends Uint8Array { + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + static mutate(source) { + Object.setPrototypeOf(source, Uint8ArrayBlobAdapter.prototype); + return source; + } + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/blob/transforms.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/blob/transforms.js new file mode 100644 index 0000000..0d1f74a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/blob/transforms.js @@ -0,0 +1,15 @@ +import { fromBase64, toBase64 } from "@smithy/util-base64"; +import { fromUtf8, toUtf8 } from "@smithy/util-utf8"; +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +export function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return toBase64(payload); + } + return toUtf8(payload); +} +export function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate(fromBase64(str)); + } + return Uint8ArrayBlobAdapter.mutate(fromUtf8(str)); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js new file mode 100644 index 0000000..afcf529 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.browser.js @@ -0,0 +1,3 @@ +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +export class ChecksumStream extends ReadableStreamRef { +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js new file mode 100644 index 0000000..e623a09 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/ChecksumStream.js @@ -0,0 +1,44 @@ +import { toBase64 } from "@smithy/util-base64"; +import { Duplex } from "stream"; +export class ChecksumStream extends Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder ?? toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js new file mode 100644 index 0000000..6a41c12 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.browser.js @@ -0,0 +1,35 @@ +import { toBase64 } from "@smithy/util-base64"; +import { isReadableStream } from "../stream-type-check"; +import { ChecksumStream } from "./ChecksumStream.browser"; +export const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + if (!isReadableStream(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${source?.constructor?.name ?? source} in ChecksumStream.`); + } + const encoder = base64Encoder ?? toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream.prototype); + return readable; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js new file mode 100644 index 0000000..d205b82 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/checksum/createChecksumStream.js @@ -0,0 +1,9 @@ +import { isReadableStream } from "../stream-type-check"; +import { ChecksumStream } from "./ChecksumStream"; +import { createChecksumStream as createChecksumStreamWeb } from "./createChecksumStream.browser"; +export function createChecksumStream(init) { + if (typeof ReadableStream === "function" && isReadableStream(init.source)) { + return createChecksumStreamWeb(init); + } + return new ChecksumStream(init); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js new file mode 100644 index 0000000..0e3bbce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/createBufferedReadable.js @@ -0,0 +1,57 @@ +import { Readable } from "node:stream"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +import { createBufferedReadableStream, flush, merge, modeOf, sizeOf } from "./createBufferedReadableStream"; +import { isReadableStream } from "./stream-type-check"; +export function createBufferedReadable(upstream, size, logger) { + if (isReadableStream(upstream)) { + return createBufferedReadableStream(upstream, size, logger); + } + const downstream = new Readable({ read() { } }); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = [ + "", + new ByteArrayCollector((size) => new Uint8Array(size)), + new ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), + ]; + let mode = -1; + upstream.on("data", (chunk) => { + const chunkMode = modeOf(chunk, true); + if (mode !== chunkMode) { + if (mode >= 0) { + downstream.push(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + downstream.push(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + downstream.push(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + downstream.push(flush(buffers, mode)); + } + } + }); + upstream.on("end", () => { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + downstream.push(remainder); + } + } + downstream.push(null); + }); + return downstream; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js new file mode 100644 index 0000000..698a757 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/createBufferedReadableStream.js @@ -0,0 +1,95 @@ +import { ByteArrayCollector } from "./ByteArrayCollector"; +export function createBufferedReadableStream(upstream, size, logger) { + const reader = upstream.getReader(); + let streamBufferingLoggedWarning = false; + let bytesSeen = 0; + const buffers = ["", new ByteArrayCollector((size) => new Uint8Array(size))]; + let mode = -1; + const pull = async (controller) => { + const { value, done } = await reader.read(); + const chunk = value; + if (done) { + if (mode !== -1) { + const remainder = flush(buffers, mode); + if (sizeOf(remainder) > 0) { + controller.enqueue(remainder); + } + } + controller.close(); + } + else { + const chunkMode = modeOf(chunk, false); + if (mode !== chunkMode) { + if (mode >= 0) { + controller.enqueue(flush(buffers, mode)); + } + mode = chunkMode; + } + if (mode === -1) { + controller.enqueue(chunk); + return; + } + const chunkSize = sizeOf(chunk); + bytesSeen += chunkSize; + const bufferSize = sizeOf(buffers[mode]); + if (chunkSize >= size && bufferSize === 0) { + controller.enqueue(chunk); + } + else { + const newSize = merge(buffers, mode, chunk); + if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { + streamBufferingLoggedWarning = true; + logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); + } + if (newSize >= size) { + controller.enqueue(flush(buffers, mode)); + } + else { + await pull(controller); + } + } + } + }; + return new ReadableStream({ + pull, + }); +} +export const createBufferedReadable = createBufferedReadableStream; +export function merge(buffers, mode, chunk) { + switch (mode) { + case 0: + buffers[0] += chunk; + return sizeOf(buffers[0]); + case 1: + case 2: + buffers[mode].push(chunk); + return sizeOf(buffers[mode]); + } +} +export function flush(buffers, mode) { + switch (mode) { + case 0: + const s = buffers[0]; + buffers[0] = ""; + return s; + case 1: + case 2: + return buffers[mode].flush(); + } + throw new Error(`@smithy/util-stream - invalid index ${mode} given to flush()`); +} +export function sizeOf(chunk) { + return chunk?.byteLength ?? chunk?.length ?? 0; +} +export function modeOf(chunk, allowBuffer = true) { + if (allowBuffer && typeof Buffer !== "undefined" && chunk instanceof Buffer) { + return 2; + } + if (chunk instanceof Uint8Array) { + return 1; + } + if (typeof chunk === "string") { + return 0; + } + return -1; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js new file mode 100644 index 0000000..b5d5fa4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.browser.js @@ -0,0 +1,27 @@ +export const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + bodyLengthChecker !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + const { value, done } = await reader.read(); + if (done) { + controller.enqueue(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + controller.enqueue(`${checksumLocationName}:${checksum}\r\n`); + controller.enqueue(`\r\n`); + } + controller.close(); + } + else { + controller.enqueue(`${(bodyLengthChecker(value) || 0).toString(16)}\r\n${value}\r\n`); + } + }, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js new file mode 100644 index 0000000..7c55116 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/getAwsChunkedEncodingStream.js @@ -0,0 +1,26 @@ +import { Readable } from "stream"; +export const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/headStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/headStream.browser.js new file mode 100644 index 0000000..4e7f864 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/headStream.browser.js @@ -0,0 +1,31 @@ +export async function headStream(stream, bytes) { + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += value?.byteLength ?? 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/headStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/headStream.js new file mode 100644 index 0000000..27b28ea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/headStream.js @@ -0,0 +1,41 @@ +import { Writable } from "stream"; +import { headStream as headWebStream } from "./headStream.browser"; +import { isReadableStream } from "./stream-type-check"; +export const headStream = (stream, bytes) => { + if (isReadableStream(stream)) { + return headWebStream(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +class Collector extends Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + this.buffers.push(chunk); + this.bytesBuffered += chunk.byteLength ?? 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/index.js new file mode 100644 index 0000000..1b5b599 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/index.js @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js new file mode 100644 index 0000000..f21ff66 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.browser.js @@ -0,0 +1,64 @@ +import { streamCollector } from "@smithy/fetch-http-handler"; +import { toBase64 } from "@smithy/util-base64"; +import { toHex } from "@smithy/util-hex-encoding"; +import { toUtf8 } from "@smithy/util-utf8"; +import { isReadableStream } from "./stream-type-check"; +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +export const sdkStreamMixin = (stream) => { + if (!isBlobInstance(stream) && !isReadableStream(stream)) { + const name = stream?.__proto__?.constructor?.name || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await streamCollector(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return toBase64(buf); + } + else if (encoding === "hex") { + return toHex(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return toUtf8(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if (isReadableStream(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js new file mode 100644 index 0000000..4731333 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/sdk-stream-mixin.js @@ -0,0 +1,50 @@ +import { streamCollector } from "@smithy/node-http-handler"; +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +import { Readable } from "stream"; +import { sdkStreamMixin as sdkStreamMixinReadableStream } from "./sdk-stream-mixin.browser"; +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +export const sdkStreamMixin = (stream) => { + if (!(stream instanceof Readable)) { + try { + return sdkStreamMixinReadableStream(stream); + } + catch (e) { + const name = stream?.__proto__?.constructor?.name || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await streamCollector(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return fromArrayBuffer(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please ensure a polyfill is available."); + } + transformed = true; + return Readable.toWeb(stream); + }, + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js new file mode 100644 index 0000000..6f06b0e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/splitStream.browser.js @@ -0,0 +1,7 @@ +export async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/splitStream.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/splitStream.js new file mode 100644 index 0000000..1a8c032 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/splitStream.js @@ -0,0 +1,13 @@ +import { PassThrough } from "stream"; +import { splitStream as splitWebStream } from "./splitStream.browser"; +import { isBlob, isReadableStream } from "./stream-type-check"; +export async function splitStream(stream) { + if (isReadableStream(stream) || isBlob(stream)) { + return splitWebStream(stream); + } + const stream1 = new PassThrough(); + const stream2 = new PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/stream-type-check.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/stream-type-check.js new file mode 100644 index 0000000..6ee93a3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-es/stream-type-check.js @@ -0,0 +1,5 @@ +export const isReadableStream = (stream) => typeof ReadableStream === "function" && + (stream?.constructor?.name === ReadableStream.name || stream instanceof ReadableStream); +export const isBlob = (blob) => { + return typeof Blob === "function" && (blob?.constructor?.name === Blob.name || blob instanceof Blob); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts new file mode 100644 index 0000000..a1bbd53 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ByteArrayCollector.d.ts @@ -0,0 +1,13 @@ +/** + * Aggregates byteArrays on demand. + * @internal + */ +export declare class ByteArrayCollector { + readonly allocByteArray: (size: number) => Uint8Array; + byteLength: number; + private byteArrays; + constructor(allocByteArray: (size: number) => Uint8Array); + push(byteArray: Uint8Array): void; + flush(): Uint8Array; + private reset; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts new file mode 100644 index 0000000..c3d994d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/blob/Uint8ArrayBlobAdapter.d.ts @@ -0,0 +1,21 @@ +/** + * Adapter for conversions of the native Uint8Array type. + * @public + */ +export declare class Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source: string, encoding?: string): Uint8ArrayBlobAdapter; + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source: Uint8Array): Uint8ArrayBlobAdapter; + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding?: string): string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts new file mode 100644 index 0000000..c54a18b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/blob/transforms.d.ts @@ -0,0 +1,9 @@ +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +/** + * @internal + */ +export declare function transformToString(payload: Uint8Array, encoding?: string): string; +/** + * @internal + */ +export declare function transformFromString(str: string, encoding?: string): Uint8ArrayBlobAdapter; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts new file mode 100644 index 0000000..0c5fbd4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.browser.d.ts @@ -0,0 +1,37 @@ +import { Checksum, Encoder } from "@smithy/types"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: ReadableStream; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +declare const ChecksumStream_base: any; +/** + * This stub exists so that the readable returned by createChecksumStream + * identifies as "ChecksumStream" in alignment with the Node.js + * implementation. + * + * @extends ReadableStream + */ +export declare class ChecksumStream extends ChecksumStream_base { +} +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts new file mode 100644 index 0000000..6893e55 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/ChecksumStream.d.ts @@ -0,0 +1,62 @@ +/// +/// +/// +import { Checksum, Encoder } from "@smithy/types"; +import { Duplex, Readable } from "stream"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: T; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +/** + * @internal + * + * Wrapper for throwing checksum errors for streams without + * buffering the stream. + * + */ +export declare class ChecksumStream extends Duplex { + private expectedChecksum; + private checksumSourceLocation; + private checksum; + private source?; + private base64Encoder; + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit); + /** + * @internal do not call this directly. + */ + _read(size: number): void; + /** + * @internal do not call this directly. + * + * When the upstream source flows data to this stream, + * calculate a step update of the checksum. + */ + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; + /** + * @internal do not call this directly. + * + * When the upstream source finishes, perform the checksum comparison. + */ + _final(callback: (err?: Error) => void): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts new file mode 100644 index 0000000..1874987 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.browser.d.ts @@ -0,0 +1,15 @@ +import { ChecksumStreamInit } from "./ChecksumStream.browser"; +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +export type ReadableStreamType = ReadableStream; +/** + * @internal + * + * Creates a stream adapter for throwing checksum errors for streams without + * buffering the stream. + */ +export declare const createChecksumStream: ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit) => ReadableStreamType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts new file mode 100644 index 0000000..db09f80 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/checksum/createChecksumStream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from "stream"; +import { ChecksumStreamInit } from "./ChecksumStream"; +import { ReadableStreamType } from "./createChecksumStream.browser"; +/** + * @internal + * + * Creates a stream mirroring the input stream's interface, but + * performs checksumming when reading to the end of the stream. + */ +export declare function createChecksumStream(init: ChecksumStreamInit): ReadableStreamType; +export declare function createChecksumStream(init: ChecksumStreamInit): Readable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts new file mode 100644 index 0000000..b173636 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/createBufferedReadable.d.ts @@ -0,0 +1,13 @@ +/// +import type { Logger } from "@smithy/types"; +import { Readable } from "node:stream"; +/** + * @internal + * @param upstream - any Readable or ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param onBuffer - for emitting warnings when buffering occurs. + * @returns another stream of the same data and stream class, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadable(upstream: Readable, size: number, logger?: Logger): Readable; +export declare function createBufferedReadable(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts new file mode 100644 index 0000000..9f6cdbd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/createBufferedReadableStream.d.ts @@ -0,0 +1,50 @@ +import type { Logger } from "@smithy/types"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +export type BufferStore = [string, ByteArrayCollector, ByteArrayCollector?]; +export type BufferUnion = string | Uint8Array; +export type Modes = 0 | 1 | 2; +/** + * @internal + * @param upstream - any ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param logger - for emitting warnings when buffering occurs. + * @returns another stream of the same data, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadableStream(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; +/** + * Replaces R/RS polymorphic implementation in environments with only ReadableStream. + * @internal + */ +export declare const createBufferedReadable: typeof createBufferedReadableStream; +/** + * @internal + * @param buffers + * @param mode + * @param chunk + * @returns the new buffer size after merging the chunk with its appropriate buffer. + */ +export declare function merge(buffers: BufferStore, mode: Modes, chunk: string | Uint8Array): number; +/** + * @internal + * @param buffers + * @param mode + * @returns the buffer matching the mode. + */ +export declare function flush(buffers: BufferStore, mode: Modes | -1): BufferUnion; +/** + * @internal + * @param chunk + * @returns size of the chunk in bytes or characters. + */ +export declare function sizeOf(chunk?: { + byteLength?: number; + length?: number; +}): number; +/** + * @internal + * @param chunk - from upstream Readable. + * @param allowBuffer - allow mode 2 (Buffer), otherwise Buffer will return mode 1. + * @returns type index of the chunk. + */ +export declare function modeOf(chunk: BufferUnion, allowBuffer?: boolean): Modes | -1; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts new file mode 100644 index 0000000..f767f77 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.browser.d.ts @@ -0,0 +1,5 @@ +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts new file mode 100644 index 0000000..d3997d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/getAwsChunkedEncodingStream.d.ts @@ -0,0 +1,7 @@ +/// +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts new file mode 100644 index 0000000..80ad267 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/headStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * @param stream + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare function headStream(stream: ReadableStream, bytes: number): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/headStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/headStream.d.ts new file mode 100644 index 0000000..7ab9714 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/headStream.d.ts @@ -0,0 +1,9 @@ +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be read. + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare const headStream: (stream: Readable | ReadableStream, bytes: number) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/index.d.ts new file mode 100644 index 0000000..1b5b599 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/index.d.ts @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts new file mode 100644 index 0000000..400c0b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.browser.d.ts @@ -0,0 +1,7 @@ +import { SdkStream } from "@smithy/types"; +/** + * The stream handling utility functions for browsers and React Native + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts new file mode 100644 index 0000000..34fcb6f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/sdk-stream-mixin.d.ts @@ -0,0 +1,8 @@ +import { SdkStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * The function that mixes in the utility functions to help consuming runtime-specific payload stream. + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream | SdkStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts new file mode 100644 index 0000000..506c23a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/splitStream.browser.d.ts @@ -0,0 +1,5 @@ +/** + * @param stream + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: ReadableStream | Blob): Promise<[ReadableStream, ReadableStream]>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts new file mode 100644 index 0000000..8a8a48c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/splitStream.d.ts @@ -0,0 +1,9 @@ +/// +import type { Readable } from "stream"; +/** + * @internal + * @param stream - to be split. + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: Readable): Promise<[Readable, Readable]>; +export declare function splitStream(stream: ReadableStream): Promise<[ReadableStream, ReadableStream]>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts new file mode 100644 index 0000000..5607088 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/stream-type-check.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +type ReadableStreamType = ReadableStream; +/** + * @internal + */ +export declare const isReadableStream: (stream: unknown) => stream is ReadableStreamType; +/** + * @internal + */ +export declare const isBlob: (blob: unknown) => blob is Blob; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts new file mode 100644 index 0000000..c309a6c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/ByteArrayCollector.d.ts @@ -0,0 +1,13 @@ +/** + * Aggregates byteArrays on demand. + * @internal + */ +export declare class ByteArrayCollector { + readonly allocByteArray: (size: number) => Uint8Array; + byteLength: number; + private byteArrays; + constructor(allocByteArray: (size: number) => Uint8Array); + push(byteArray: Uint8Array): void; + flush(): Uint8Array; + private reset; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts new file mode 100644 index 0000000..e0338a2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/Uint8ArrayBlobAdapter.d.ts @@ -0,0 +1,21 @@ +/** + * Adapter for conversions of the native Uint8Array type. + * @public + */ +export declare class Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source: string, encoding?: string): Uint8ArrayBlobAdapter; + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source: Uint8Array): Uint8ArrayBlobAdapter; + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding?: string): string; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts new file mode 100644 index 0000000..6e3ee0a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/blob/transforms.d.ts @@ -0,0 +1,9 @@ +import { Uint8ArrayBlobAdapter } from "./Uint8ArrayBlobAdapter"; +/** + * @internal + */ +export declare function transformToString(payload: Uint8Array, encoding?: string): string; +/** + * @internal + */ +export declare function transformFromString(str: string, encoding?: string): Uint8ArrayBlobAdapter; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts new file mode 100644 index 0000000..902a9b2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.browser.d.ts @@ -0,0 +1,37 @@ +import { Checksum, Encoder } from "@smithy/types"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: ReadableStream; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +declare const ChecksumStream_base: any; +/** + * This stub exists so that the readable returned by createChecksumStream + * identifies as "ChecksumStream" in alignment with the Node.js + * implementation. + * + * @extends ReadableStream + */ +export declare class ChecksumStream extends ChecksumStream_base { +} +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts new file mode 100644 index 0000000..7151034 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/ChecksumStream.d.ts @@ -0,0 +1,60 @@ +/// +import { Checksum, Encoder } from "@smithy/types"; +import { Duplex, Readable } from "stream"; +/** + * @internal + */ +export interface ChecksumStreamInit { + /** + * Base64 value of the expected checksum. + */ + expectedChecksum: string; + /** + * For error messaging, the location from which the checksum value was read. + */ + checksumSourceLocation: string; + /** + * The checksum calculator. + */ + checksum: Checksum; + /** + * The stream to be checked. + */ + source: T; + /** + * Optional base 64 encoder if calling from a request context. + */ + base64Encoder?: Encoder; +} +/** + * @internal + * + * Wrapper for throwing checksum errors for streams without + * buffering the stream. + * + */ +export declare class ChecksumStream extends Duplex { + private expectedChecksum; + private checksumSourceLocation; + private checksum; + private source?; + private base64Encoder; + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit); + /** + * @internal do not call this directly. + */ + _read(size: number): void; + /** + * @internal do not call this directly. + * + * When the upstream source flows data to this stream, + * calculate a step update of the checksum. + */ + _write(chunk: Buffer, encoding: string, callback: (err?: Error) => void): void; + /** + * @internal do not call this directly. + * + * When the upstream source finishes, perform the checksum comparison. + */ + _final(callback: (err?: Error) => void): Promise; +} diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts new file mode 100644 index 0000000..bd3c004 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.browser.d.ts @@ -0,0 +1,15 @@ +import { ChecksumStreamInit } from "./ChecksumStream.browser"; +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +export type ReadableStreamType = ReadableStream; +/** + * @internal + * + * Creates a stream adapter for throwing checksum errors for streams without + * buffering the stream. + */ +export declare const createChecksumStream: ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }: ChecksumStreamInit) => ReadableStreamType; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts new file mode 100644 index 0000000..dc36418 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/checksum/createChecksumStream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from "stream"; +import { ChecksumStreamInit } from "./ChecksumStream"; +import { ReadableStreamType } from "./createChecksumStream.browser"; +/** + * @internal + * + * Creates a stream mirroring the input stream's interface, but + * performs checksumming when reading to the end of the stream. + */ +export declare function createChecksumStream(init: ChecksumStreamInit): ReadableStreamType; +export declare function createChecksumStream(init: ChecksumStreamInit): Readable; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts new file mode 100644 index 0000000..f62c741 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadable.d.ts @@ -0,0 +1,13 @@ +/// +import { Logger } from "@smithy/types"; +import { Readable } from "node:stream"; +/** + * @internal + * @param upstream - any Readable or ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param onBuffer - for emitting warnings when buffering occurs. + * @returns another stream of the same data and stream class, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadable(upstream: Readable, size: number, logger?: Logger): Readable; +export declare function createBufferedReadable(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts new file mode 100644 index 0000000..7b4effd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/createBufferedReadableStream.d.ts @@ -0,0 +1,54 @@ +import { Logger } from "@smithy/types"; +import { ByteArrayCollector } from "./ByteArrayCollector"; +export type BufferStore = [ + string, + ByteArrayCollector, + ByteArrayCollector? +]; +export type BufferUnion = string | Uint8Array; +export type Modes = 0 | 1 | 2; +/** + * @internal + * @param upstream - any ReadableStream. + * @param size - byte or character length minimum. Buffering occurs when a chunk fails to meet this value. + * @param logger - for emitting warnings when buffering occurs. + * @returns another stream of the same data, but buffers chunks until + * the minimum size is met, except for the last chunk. + */ +export declare function createBufferedReadableStream(upstream: ReadableStream, size: number, logger?: Logger): ReadableStream; +/** + * Replaces R/RS polymorphic implementation in environments with only ReadableStream. + * @internal + */ +export declare const createBufferedReadable: typeof createBufferedReadableStream; +/** + * @internal + * @param buffers + * @param mode + * @param chunk + * @returns the new buffer size after merging the chunk with its appropriate buffer. + */ +export declare function merge(buffers: BufferStore, mode: Modes, chunk: string | Uint8Array): number; +/** + * @internal + * @param buffers + * @param mode + * @returns the buffer matching the mode. + */ +export declare function flush(buffers: BufferStore, mode: Modes | -1): BufferUnion; +/** + * @internal + * @param chunk + * @returns size of the chunk in bytes or characters. + */ +export declare function sizeOf(chunk?: { + byteLength?: number; + length?: number; +}): number; +/** + * @internal + * @param chunk - from upstream Readable. + * @param allowBuffer - allow mode 2 (Buffer), otherwise Buffer will return mode 1. + * @returns type index of the chunk. + */ +export declare function modeOf(chunk: BufferUnion, allowBuffer?: boolean): Modes | -1; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts new file mode 100644 index 0000000..5979078 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.browser.d.ts @@ -0,0 +1,5 @@ +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts new file mode 100644 index 0000000..a100381 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/getAwsChunkedEncodingStream.d.ts @@ -0,0 +1,7 @@ +/// +import { GetAwsChunkedEncodingStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * @internal + */ +export declare const getAwsChunkedEncodingStream: GetAwsChunkedEncodingStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts new file mode 100644 index 0000000..d8654c3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + * @param stream + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare function headStream(stream: ReadableStream, bytes: number): Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts new file mode 100644 index 0000000..7037715 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/headStream.d.ts @@ -0,0 +1,9 @@ +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be read. + * @param bytes - read head bytes from the stream and discard the rest of it. + * + * Caution: the input stream must be destroyed separately, this function does not do so. + */ +export declare const headStream: (stream: Readable | ReadableStream, bytes: number) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..c7c4c3f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/index.d.ts @@ -0,0 +1,9 @@ +export * from "./blob/Uint8ArrayBlobAdapter"; +export * from "./checksum/ChecksumStream"; +export * from "./checksum/createChecksumStream"; +export * from "./createBufferedReadable"; +export * from "./getAwsChunkedEncodingStream"; +export * from "./headStream"; +export * from "./sdk-stream-mixin"; +export * from "./splitStream"; +export * from "./stream-type-check"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts new file mode 100644 index 0000000..99dea40 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.browser.d.ts @@ -0,0 +1,7 @@ +import { SdkStream } from "@smithy/types"; +/** + * The stream handling utility functions for browsers and React Native + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts new file mode 100644 index 0000000..c05518a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/sdk-stream-mixin.d.ts @@ -0,0 +1,8 @@ +import { SdkStream } from "@smithy/types"; +import { Readable } from "stream"; +/** + * The function that mixes in the utility functions to help consuming runtime-specific payload stream. + * + * @internal + */ +export declare const sdkStreamMixin: (stream: unknown) => SdkStream | SdkStream; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts new file mode 100644 index 0000000..25c8549 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.browser.d.ts @@ -0,0 +1,8 @@ +/** + * @param stream + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: ReadableStream | Blob): Promise<[ + ReadableStream, + ReadableStream +]>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts new file mode 100644 index 0000000..61a7620 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/splitStream.d.ts @@ -0,0 +1,15 @@ +/// +import { Readable } from "stream"; +/** + * @internal + * @param stream - to be split. + * @returns stream split into two identical streams. + */ +export declare function splitStream(stream: Readable): Promise<[ + Readable, + Readable +]>; +export declare function splitStream(stream: ReadableStream): Promise<[ + ReadableStream, + ReadableStream +]>; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts new file mode 100644 index 0000000..11be8f1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/dist-types/ts3.4/stream-type-check.d.ts @@ -0,0 +1,16 @@ +/** + * @internal + * Alias prevents compiler from turning + * ReadableStream into ReadableStream, which is incompatible + * with the NodeJS.ReadableStream global type. + */ +type ReadableStreamType = ReadableStream; +/** + * @internal + */ +export declare const isReadableStream: (stream: unknown) => stream is ReadableStreamType; +/** + * @internal + */ +export declare const isBlob: (blob: unknown) => blob is Blob; +export {}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/package.json new file mode 100644 index 0000000..769bfc1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-stream/package.json @@ -0,0 +1,98 @@ +{ + "name": "@smithy/util-stream", + "version": "4.2.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-stream", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "extract:docs": "api-extractor run --local", + "test": "yarn g:vitest run && yarn test:browser", + "test:integration": "yarn g:vitest run -c vitest.config.integ.ts", + "test:watch": "yarn g:vitest watch", + "test:integration:watch": "yarn g:vitest watch -c vitest.config.integ.ts", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@smithy/util-test": "^0.2.8", + "@types/node": "^18.11.9", + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/checksum/ChecksumStream": "./dist-es/checksum/ChecksumStream.browser", + "./dist-es/checksum/createChecksumStream": "./dist-es/checksum/createChecksumStream.browser", + "./dist-es/createBufferedReadable": "./dist-es/createBufferedReadableStream", + "./dist-es/getAwsChunkedEncodingStream": "./dist-es/getAwsChunkedEncodingStream.browser", + "./dist-es/headStream": "./dist-es/headStream.browser", + "./dist-es/sdk-stream-mixin": "./dist-es/sdk-stream-mixin.browser", + "./dist-es/splitStream": "./dist-es/splitStream.browser" + }, + "react-native": { + "./dist-es/checksum/createChecksumStream": "./dist-es/checksum/createChecksumStream.browser", + "./dist-es/checksum/ChecksumStream": "./dist-es/checksum/ChecksumStream.browser", + "./dist-es/getAwsChunkedEncodingStream": "./dist-es/getAwsChunkedEncodingStream.browser", + "./dist-es/sdk-stream-mixin": "./dist-es/sdk-stream-mixin.browser", + "./dist-es/headStream": "./dist-es/headStream.browser", + "./dist-es/splitStream": "./dist-es/splitStream.browser", + "./dist-es/createBufferedReadable": "./dist-es/createBufferedReadableStream", + "./dist-cjs/checksum/createChecksumStream": "./dist-cjs/checksum/createChecksumStream.browser", + "./dist-cjs/checksum/ChecksumStream": "./dist-cjs/checksum/ChecksumStream.browser", + "./dist-cjs/getAwsChunkedEncodingStream": "./dist-cjs/getAwsChunkedEncodingStream.browser", + "./dist-cjs/sdk-stream-mixin": "./dist-cjs/sdk-stream-mixin.browser", + "./dist-cjs/headStream": "./dist-cjs/headStream.browser", + "./dist-cjs/splitStream": "./dist-cjs/splitStream.browser", + "./dist-cjs/createBufferedReadable": "./dist-cjs/createBufferedReadableStream" + }, + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-stream", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-stream" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/README.md new file mode 100644 index 0000000..22e939a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/README.md @@ -0,0 +1,10 @@ +# @smithy/util-uri-escape + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-uri-escape/latest.svg)](https://www.npmjs.com/package/@smithy/util-uri-escape) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-uri-escape.svg)](https://www.npmjs.com/package/@smithy/util-uri-escape) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri-path.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/escape-uri.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/index.js new file mode 100644 index 0000000..51001ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-cjs/index.js @@ -0,0 +1,43 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + escapeUri: () => escapeUri, + escapeUriPath: () => escapeUriPath +}); +module.exports = __toCommonJS(src_exports); + +// src/escape-uri.ts +var escapeUri = /* @__PURE__ */ __name((uri) => ( + // AWS percent-encodes some extra non-standard characters in a URI + encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode) +), "escapeUri"); +var hexEncode = /* @__PURE__ */ __name((c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`, "hexEncode"); + +// src/escape-uri-path.ts +var escapeUriPath = /* @__PURE__ */ __name((uri) => uri.split("/").map(escapeUri).join("/"), "escapeUriPath"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + escapeUri, + escapeUriPath +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js new file mode 100644 index 0000000..81b3fe3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/escape-uri-path.js @@ -0,0 +1,2 @@ +import { escapeUri } from "./escape-uri"; +export const escapeUriPath = (uri) => uri.split("/").map(escapeUri).join("/"); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js new file mode 100644 index 0000000..8990be1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/escape-uri.js @@ -0,0 +1,2 @@ +export const escapeUri = (uri) => encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode); +const hexEncode = (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/index.js new file mode 100644 index 0000000..ed402e1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./escape-uri"; +export * from "./escape-uri-path"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts new file mode 100644 index 0000000..b547ff9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/escape-uri-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUriPath: (uri: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts new file mode 100644 index 0000000..3f14d2c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/escape-uri.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUri: (uri: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts new file mode 100644 index 0000000..1913825 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./escape-uri"; +/** + * @internal + */ +export * from "./escape-uri-path"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts new file mode 100644 index 0000000..a7e19ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri-path.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUriPath: (uri: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts new file mode 100644 index 0000000..13cc372 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/escape-uri.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const escapeUri: (uri: string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ad719fe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/dist-types/ts3.4/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./escape-uri"; +/** + * @internal + */ +export * from "./escape-uri-path"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/package.json new file mode 100644 index 0000000..4ca6fd9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-uri-escape/package.json @@ -0,0 +1,59 @@ +{ + "name": "@smithy/util-uri-escape", + "version": "4.0.0", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-uri-escape", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-uri-escape", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-uri-escape" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/README.md new file mode 100644 index 0000000..fc5db6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/README.md @@ -0,0 +1,4 @@ +# @smithy/util-utf8 + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-utf8/latest.svg)](https://www.npmjs.com/package/@smithy/util-utf8) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-utf8.svg)](https://www.npmjs.com/package/@smithy/util-utf8) diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/fromUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/index.js new file mode 100644 index 0000000..0b22680 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/index.js @@ -0,0 +1,65 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = require("@smithy/util-buffer-from"); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + fromUtf8, + toUint8Array, + toUtf8 +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUint8Array.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.browser.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-cjs/toUtf8.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js new file mode 100644 index 0000000..7344190 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/fromUtf8.browser.js @@ -0,0 +1 @@ +export const fromUtf8 = (input) => new TextEncoder().encode(input); diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js new file mode 100644 index 0000000..6dc438b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/fromUtf8.js @@ -0,0 +1,5 @@ +import { fromString } from "@smithy/util-buffer-from"; +export const fromUtf8 = (input) => { + const buf = fromString(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/index.js new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/index.js @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js new file mode 100644 index 0000000..2cd36f7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUint8Array.js @@ -0,0 +1,10 @@ +import { fromUtf8 } from "./fromUtf8"; +export const toUint8Array = (data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js new file mode 100644 index 0000000..c292127 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUtf8.browser.js @@ -0,0 +1,9 @@ +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return new TextDecoder("utf-8").decode(input); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUtf8.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUtf8.js new file mode 100644 index 0000000..7be8745 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-es/toUtf8.js @@ -0,0 +1,10 @@ +import { fromArrayBuffer } from "@smithy/util-buffer-from"; +export const toUtf8 = (input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return fromArrayBuffer(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts new file mode 100644 index 0000000..dd91981 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/index.d.ts new file mode 100644 index 0000000..00ba465 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts new file mode 100644 index 0000000..11b6342 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts new file mode 100644 index 0000000..8494acd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.browser.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts new file mode 100644 index 0000000..39f3d6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/fromUtf8.d.ts @@ -0,0 +1 @@ +export declare const fromUtf8: (input: string) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..ef9761d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/index.d.ts @@ -0,0 +1,3 @@ +export * from "./fromUtf8"; +export * from "./toUint8Array"; +export * from "./toUtf8"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts new file mode 100644 index 0000000..562fe10 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUint8Array.d.ts @@ -0,0 +1 @@ +export declare const toUint8Array: (data: string | ArrayBuffer | ArrayBufferView) => Uint8Array; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.browser.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts new file mode 100644 index 0000000..33511ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/dist-types/ts3.4/toUtf8.d.ts @@ -0,0 +1,7 @@ +/** + * + * This does not convert non-utf8 strings to utf8, it only passes through strings if + * a string is received instead of a Uint8Array. + * + */ +export declare const toUtf8: (input: Uint8Array | string) => string; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/package.json new file mode 100644 index 0000000..e33060d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-utf8/package.json @@ -0,0 +1,66 @@ +{ + "name": "@smithy/util-utf8", + "version": "4.0.0", + "description": "A UTF-8 string <-> UInt8Array converter", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-utf8", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "browser": { + "./dist-es/fromUtf8": "./dist-es/fromUtf8.browser", + "./dist-es/toUtf8": "./dist-es/toUtf8.browser" + }, + "react-native": {}, + "homepage": "https://github.com/awslabs/smithy-typescript/tree/main/packages/util-utf8", + "repository": { + "type": "git", + "url": "https://github.com/awslabs/smithy-typescript.git", + "directory": "packages/util-utf8" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/LICENSE b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/LICENSE new file mode 100644 index 0000000..7b6491b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/README.md b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/README.md new file mode 100644 index 0000000..17169a8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/README.md @@ -0,0 +1,10 @@ +# @smithy/util-waiter + +[![NPM version](https://img.shields.io/npm/v/@smithy/util-waiter/latest.svg)](https://www.npmjs.com/package/@smithy/util-waiter) +[![NPM downloads](https://img.shields.io/npm/dm/@smithy/util-waiter.svg)](https://www.npmjs.com/package/@smithy/util-waiter) + +> An internal package + +## Usage + +You probably shouldn't, at least directly. diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/createWaiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/index.js new file mode 100644 index 0000000..c038e3b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/index.js @@ -0,0 +1,185 @@ +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + WaiterState: () => WaiterState, + checkExceptions: () => checkExceptions, + createWaiter: () => createWaiter, + waiterServiceDefaults: () => waiterServiceDefaults +}); +module.exports = __toCommonJS(src_exports); + +// src/utils/sleep.ts +var sleep = /* @__PURE__ */ __name((seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); +}, "sleep"); + +// src/waiter.ts +var waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120 +}; +var WaiterState = /* @__PURE__ */ ((WaiterState2) => { + WaiterState2["ABORTED"] = "ABORTED"; + WaiterState2["FAILURE"] = "FAILURE"; + WaiterState2["SUCCESS"] = "SUCCESS"; + WaiterState2["RETRY"] = "RETRY"; + WaiterState2["TIMEOUT"] = "TIMEOUT"; + return WaiterState2; +})(WaiterState || {}); +var checkExceptions = /* @__PURE__ */ __name((result) => { + if (result.state === "ABORTED" /* ABORTED */) { + const abortError = new Error( + `${JSON.stringify({ + ...result, + reason: "Request was aborted" + })}` + ); + abortError.name = "AbortError"; + throw abortError; + } else if (result.state === "TIMEOUT" /* TIMEOUT */) { + const timeoutError = new Error( + `${JSON.stringify({ + ...result, + reason: "Waiter has timed out" + })}` + ); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } else if (result.state !== "SUCCESS" /* SUCCESS */) { + throw new Error(`${JSON.stringify(result)}`); + } + return result; +}, "checkExceptions"); + +// src/poller.ts +var exponentialBackoffWithJitter = /* @__PURE__ */ __name((minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}, "exponentialBackoffWithJitter"); +var randomInRange = /* @__PURE__ */ __name((min, max) => min + Math.random() * (max - min), "randomInRange"); +var runPolling = /* @__PURE__ */ __name(async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + const observedResponses = {}; + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== "RETRY" /* RETRY */) { + return { state, reason, observedResponses }; + } + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1e3; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (abortController?.signal?.aborted || abortSignal?.aborted) { + const message = "AbortController signal aborted."; + observedResponses[message] |= 0; + observedResponses[message] += 1; + return { state: "ABORTED" /* ABORTED */, observedResponses }; + } + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1e3 > waitUntil) { + return { state: "TIMEOUT" /* TIMEOUT */, observedResponses }; + } + await sleep(delay); + const { state: state2, reason: reason2 } = await acceptorChecks(client, input); + if (reason2) { + const message = createMessageFromResponse(reason2); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state2 !== "RETRY" /* RETRY */) { + return { state: state2, reason: reason2, observedResponses }; + } + currentAttempt += 1; + } +}, "runPolling"); +var createMessageFromResponse = /* @__PURE__ */ __name((reason) => { + if (reason?.$responseBodyText) { + return `Deserialization error for body: ${reason.$responseBodyText}`; + } + if (reason?.$metadata?.httpStatusCode) { + if (reason.$response || reason.message) { + return `${reason.$response.statusCode ?? reason.$metadata.httpStatusCode ?? "Unknown"}: ${reason.message}`; + } + return `${reason.$metadata.httpStatusCode}: OK`; + } + return String(reason?.message ?? JSON.stringify(reason) ?? "Unknown"); +}, "createMessageFromResponse"); + +// src/utils/validate.ts +var validateWaiterOptions = /* @__PURE__ */ __name((options) => { + if (options.maxWaitTime <= 0) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } else if (options.minDelay <= 0) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } else if (options.maxDelay <= 0) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } else if (options.maxWaitTime <= options.minDelay) { + throw new Error( + `WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter` + ); + } else if (options.maxDelay < options.minDelay) { + throw new Error( + `WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter` + ); + } +}, "validateWaiterOptions"); + +// src/createWaiter.ts +var abortTimeout = /* @__PURE__ */ __name(async (abortSignal) => { + return new Promise((resolve) => { + const onAbort = /* @__PURE__ */ __name(() => resolve({ state: "ABORTED" /* ABORTED */ }), "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + abortSignal.addEventListener("abort", onAbort); + } else { + abortSignal.onabort = onAbort; + } + }); +}, "abortTimeout"); +var createWaiter = /* @__PURE__ */ __name(async (options, input, acceptorChecks) => { + const params = { + ...waiterServiceDefaults, + ...options + }; + validateWaiterOptions(params); + const exitConditions = [runPolling(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); + } + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}, "createWaiter"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (module.exports = { + createWaiter, + waiterServiceDefaults, + WaiterState, + checkExceptions +}); + diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/poller.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/poller.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/poller.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/index.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/sleep.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js new file mode 100644 index 0000000..0440577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/utils/validate.js @@ -0,0 +1 @@ +module.exports = require("../index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/waiter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/waiter.js new file mode 100644 index 0000000..532e610 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-cjs/waiter.js @@ -0,0 +1 @@ +module.exports = require("./index.js"); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/createWaiter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/createWaiter.js new file mode 100644 index 0000000..59bfdb9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/createWaiter.js @@ -0,0 +1,29 @@ +import { runPolling } from "./poller"; +import { validateWaiterOptions } from "./utils"; +import { waiterServiceDefaults, WaiterState } from "./waiter"; +const abortTimeout = async (abortSignal) => { + return new Promise((resolve) => { + const onAbort = () => resolve({ state: WaiterState.ABORTED }); + if (typeof abortSignal.addEventListener === "function") { + abortSignal.addEventListener("abort", onAbort); + } + else { + abortSignal.onabort = onAbort; + } + }); +}; +export const createWaiter = async (options, input, acceptorChecks) => { + const params = { + ...waiterServiceDefaults, + ...options, + }; + validateWaiterOptions(params); + const exitConditions = [runPolling(params, input, acceptorChecks)]; + if (options.abortController) { + exitConditions.push(abortTimeout(options.abortController.signal)); + } + if (options.abortSignal) { + exitConditions.push(abortTimeout(options.abortSignal)); + } + return Promise.race(exitConditions); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/index.js new file mode 100644 index 0000000..d77f139 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/index.js @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/poller.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/poller.js new file mode 100644 index 0000000..d1a0ec0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/poller.js @@ -0,0 +1,59 @@ +import { sleep } from "./utils/sleep"; +import { WaiterState } from "./waiter"; +const exponentialBackoffWithJitter = (minDelay, maxDelay, attemptCeiling, attempt) => { + if (attempt > attemptCeiling) + return maxDelay; + const delay = minDelay * 2 ** (attempt - 1); + return randomInRange(minDelay, delay); +}; +const randomInRange = (min, max) => min + Math.random() * (max - min); +export const runPolling = async ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }, input, acceptorChecks) => { + const observedResponses = {}; + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== WaiterState.RETRY) { + return { state, reason, observedResponses }; + } + let currentAttempt = 1; + const waitUntil = Date.now() + maxWaitTime * 1000; + const attemptCeiling = Math.log(maxDelay / minDelay) / Math.log(2) + 1; + while (true) { + if (abortController?.signal?.aborted || abortSignal?.aborted) { + const message = "AbortController signal aborted."; + observedResponses[message] |= 0; + observedResponses[message] += 1; + return { state: WaiterState.ABORTED, observedResponses }; + } + const delay = exponentialBackoffWithJitter(minDelay, maxDelay, attemptCeiling, currentAttempt); + if (Date.now() + delay * 1000 > waitUntil) { + return { state: WaiterState.TIMEOUT, observedResponses }; + } + await sleep(delay); + const { state, reason } = await acceptorChecks(client, input); + if (reason) { + const message = createMessageFromResponse(reason); + observedResponses[message] |= 0; + observedResponses[message] += 1; + } + if (state !== WaiterState.RETRY) { + return { state, reason, observedResponses }; + } + currentAttempt += 1; + } +}; +const createMessageFromResponse = (reason) => { + if (reason?.$responseBodyText) { + return `Deserialization error for body: ${reason.$responseBodyText}`; + } + if (reason?.$metadata?.httpStatusCode) { + if (reason.$response || reason.message) { + return `${reason.$response.statusCode ?? reason.$metadata.httpStatusCode ?? "Unknown"}: ${reason.message}`; + } + return `${reason.$metadata.httpStatusCode}: OK`; + } + return String(reason?.message ?? JSON.stringify(reason) ?? "Unknown"); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/index.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/index.js new file mode 100644 index 0000000..e15a156 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/index.js @@ -0,0 +1,2 @@ +export * from "./sleep"; +export * from "./validate"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js new file mode 100644 index 0000000..789205d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/sleep.js @@ -0,0 +1,3 @@ +export const sleep = (seconds) => { + return new Promise((resolve) => setTimeout(resolve, seconds * 1000)); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/validate.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/validate.js new file mode 100644 index 0000000..e094ea7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/utils/validate.js @@ -0,0 +1,17 @@ +export const validateWaiterOptions = (options) => { + if (options.maxWaitTime <= 0) { + throw new Error(`WaiterConfiguration.maxWaitTime must be greater than 0`); + } + else if (options.minDelay <= 0) { + throw new Error(`WaiterConfiguration.minDelay must be greater than 0`); + } + else if (options.maxDelay <= 0) { + throw new Error(`WaiterConfiguration.maxDelay must be greater than 0`); + } + else if (options.maxWaitTime <= options.minDelay) { + throw new Error(`WaiterConfiguration.maxWaitTime [${options.maxWaitTime}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } + else if (options.maxDelay < options.minDelay) { + throw new Error(`WaiterConfiguration.maxDelay [${options.maxDelay}] must be greater than WaiterConfiguration.minDelay [${options.minDelay}] for this waiter`); + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/waiter.js b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/waiter.js new file mode 100644 index 0000000..158c46a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-es/waiter.js @@ -0,0 +1,34 @@ +export const waiterServiceDefaults = { + minDelay: 2, + maxDelay: 120, +}; +export var WaiterState; +(function (WaiterState) { + WaiterState["ABORTED"] = "ABORTED"; + WaiterState["FAILURE"] = "FAILURE"; + WaiterState["SUCCESS"] = "SUCCESS"; + WaiterState["RETRY"] = "RETRY"; + WaiterState["TIMEOUT"] = "TIMEOUT"; +})(WaiterState || (WaiterState = {})); +export const checkExceptions = (result) => { + if (result.state === WaiterState.ABORTED) { + const abortError = new Error(`${JSON.stringify({ + ...result, + reason: "Request was aborted", + })}`); + abortError.name = "AbortError"; + throw abortError; + } + else if (result.state === WaiterState.TIMEOUT) { + const timeoutError = new Error(`${JSON.stringify({ + ...result, + reason: "Waiter has timed out", + })}`); + timeoutError.name = "TimeoutError"; + throw timeoutError; + } + else if (result.state !== WaiterState.SUCCESS) { + throw new Error(`${JSON.stringify(result)}`); + } + return result; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts new file mode 100644 index 0000000..1695802 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/createWaiter.d.ts @@ -0,0 +1,11 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Create a waiter promise that only resolves when: + * 1. Abort controller is signaled + * 2. Max wait time is reached + * 3. `acceptorChecks` succeeds, or fails + * Otherwise, it invokes `acceptorChecks` with exponential-backoff delay. + * + * @internal + */ +export declare const createWaiter: (options: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/index.d.ts new file mode 100644 index 0000000..d77f139 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/index.d.ts @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/poller.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/poller.d.ts new file mode 100644 index 0000000..4008957 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/poller.d.ts @@ -0,0 +1,10 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Function that runs polling as part of waiters. This will make one inital attempt and then + * subsequent attempts with an increasing delay. + * @param params - options passed to the waiter. + * @param client - AWS SDK Client + * @param input - client input + * @param stateChecker - function that checks the acceptor states on each poll. + */ +export declare const runPolling: ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts new file mode 100644 index 0000000..f9b3242 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/createWaiter.d.ts @@ -0,0 +1,11 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Create a waiter promise that only resolves when: + * 1. Abort controller is signaled + * 2. Max wait time is reached + * 3. `acceptorChecks` succeeds, or fails + * Otherwise, it invokes `acceptorChecks` with exponential-backoff delay. + * + * @internal + */ +export declare const createWaiter: (options: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts new file mode 100644 index 0000000..be143d5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/index.d.ts @@ -0,0 +1,2 @@ +export * from "./createWaiter"; +export * from "./waiter"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts new file mode 100644 index 0000000..8b33c94 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/poller.d.ts @@ -0,0 +1,10 @@ +import { WaiterOptions, WaiterResult } from "./waiter"; +/** + * Function that runs polling as part of waiters. This will make one inital attempt and then + * subsequent attempts with an increasing delay. + * @param params - options passed to the waiter. + * @param client - AWS SDK Client + * @param input - client input + * @param stateChecker - function that checks the acceptor states on each poll. + */ +export declare const runPolling: ({ minDelay, maxDelay, maxWaitTime, abortController, client, abortSignal }: WaiterOptions, input: Input, acceptorChecks: (client: Client, input: Input) => Promise) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts new file mode 100644 index 0000000..974384c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./sleep"; +/** + * @internal + */ +export * from "./validate"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts new file mode 100644 index 0000000..f53553b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/sleep.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const sleep: (seconds: number) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts new file mode 100644 index 0000000..73d79b0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/utils/validate.d.ts @@ -0,0 +1,8 @@ +import { WaiterOptions } from "../waiter"; +/** + * @internal + * + * Validates that waiter options are passed correctly + * @param options - a waiter configuration object + */ +export declare const validateWaiterOptions: (options: WaiterOptions) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts new file mode 100644 index 0000000..f685ce4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/ts3.4/waiter.d.ts @@ -0,0 +1,49 @@ +import { WaiterConfiguration as WaiterConfiguration__ } from "@smithy/types"; +/** + * @internal + */ +export interface WaiterConfiguration extends WaiterConfiguration__ { +} +/** + * @internal + */ +export declare const waiterServiceDefaults: { + minDelay: number; + maxDelay: number; +}; +/** + * @internal + */ +export type WaiterOptions = WaiterConfiguration & Required, "minDelay" | "maxDelay">>; +/** + * @internal + */ +export declare enum WaiterState { + ABORTED = "ABORTED", + FAILURE = "FAILURE", + SUCCESS = "SUCCESS", + RETRY = "RETRY", + TIMEOUT = "TIMEOUT" +} +/** + * @internal + */ +export type WaiterResult = { + state: WaiterState; + /** + * (optional) Indicates a reason for why a waiter has reached its state. + */ + reason?: any; + /** + * Responses observed by the waiter during its polling, where the value + * is the count. + */ + observedResponses?: Record; +}; +/** + * @internal + * + * Handles and throws exceptions resulting from the waiterResult + * @param result - WaiterResult + */ +export declare const checkExceptions: (result: WaiterResult) => WaiterResult; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts new file mode 100644 index 0000000..b9a3205 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/index.d.ts @@ -0,0 +1,8 @@ +/** + * @internal + */ +export * from "./sleep"; +/** + * @internal + */ +export * from "./validate"; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts new file mode 100644 index 0000000..e5d9f73 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/sleep.d.ts @@ -0,0 +1,4 @@ +/** + * @internal + */ +export declare const sleep: (seconds: number) => Promise; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts new file mode 100644 index 0000000..a847eee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/utils/validate.d.ts @@ -0,0 +1,8 @@ +import { WaiterOptions } from "../waiter"; +/** + * @internal + * + * Validates that waiter options are passed correctly + * @param options - a waiter configuration object + */ +export declare const validateWaiterOptions: (options: WaiterOptions) => void; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts new file mode 100644 index 0000000..e0c690f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/dist-types/waiter.d.ts @@ -0,0 +1,49 @@ +import { WaiterConfiguration as WaiterConfiguration__ } from "@smithy/types"; +/** + * @internal + */ +export interface WaiterConfiguration extends WaiterConfiguration__ { +} +/** + * @internal + */ +export declare const waiterServiceDefaults: { + minDelay: number; + maxDelay: number; +}; +/** + * @internal + */ +export type WaiterOptions = WaiterConfiguration & Required, "minDelay" | "maxDelay">>; +/** + * @internal + */ +export declare enum WaiterState { + ABORTED = "ABORTED", + FAILURE = "FAILURE", + SUCCESS = "SUCCESS", + RETRY = "RETRY", + TIMEOUT = "TIMEOUT" +} +/** + * @internal + */ +export type WaiterResult = { + state: WaiterState; + /** + * (optional) Indicates a reason for why a waiter has reached its state. + */ + reason?: any; + /** + * Responses observed by the waiter during its polling, where the value + * is the count. + */ + observedResponses?: Record; +}; +/** + * @internal + * + * Handles and throws exceptions resulting from the waiterResult + * @param result - WaiterResult + */ +export declare const checkExceptions: (result: WaiterResult) => WaiterResult; diff --git a/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/package.json b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/package.json new file mode 100644 index 0000000..2706fd7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@smithy/util-waiter/package.json @@ -0,0 +1,62 @@ +{ + "name": "@smithy/util-waiter", + "version": "4.0.3", + "description": "Shared utilities for client waiters for the AWS SDK", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "scripts": { + "build": "concurrently 'yarn:build:cjs' 'yarn:build:es' 'yarn:build:types && yarn build:types:downlevel'", + "build:cjs": "node ../../scripts/inline util-waiter", + "build:es": "yarn g:tsc -p tsconfig.es.json", + "build:types": "yarn g:tsc -p tsconfig.types.json", + "build:types:downlevel": "rimraf dist-types/ts3.4 && downlevel-dts dist-types dist-types/ts3.4", + "stage-release": "rimraf ./.release && yarn pack && mkdir ./.release && tar zxvf ./package.tgz --directory ./.release && rm ./package.tgz", + "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo || exit 0", + "lint": "eslint -c ../../.eslintrc.js \"src/**/*.ts\"", + "format": "prettier --config ../../prettier.config.js --ignore-path ../../.prettierignore --write \"**/*.{ts,md,json}\"", + "test": "yarn g:vitest run", + "test:watch": "yarn g:vitest watch" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "main": "./dist-cjs/index.js", + "module": "./dist-es/index.js", + "types": "./dist-types/index.d.ts", + "engines": { + "node": ">=18.0.0" + }, + "typesVersions": { + "<4.0": { + "dist-types/*": [ + "dist-types/ts3.4/*" + ] + } + }, + "files": [ + "dist-*/**" + ], + "homepage": "https://github.com/smithy-lang/smithy-typescript/tree/main/packages/util-waiter", + "repository": { + "type": "git", + "url": "https://github.com/smithy-lang/smithy-typescript.git", + "directory": "packages/util-waiter" + }, + "devDependencies": { + "concurrently": "7.0.0", + "downlevel-dts": "0.10.1", + "rimraf": "3.0.2", + "typedoc": "0.23.23" + }, + "typedoc": { + "entryPoint": "src/index.ts" + }, + "publishConfig": { + "directory": ".release/package" + } +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/@types/uuid/LICENSE b/amplify/functions/fetchDocuments/node_modules/@types/uuid/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@types/uuid/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/amplify/functions/fetchDocuments/node_modules/@types/uuid/README.md b/amplify/functions/fetchDocuments/node_modules/@types/uuid/README.md new file mode 100644 index 0000000..4cd2a58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@types/uuid/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/uuid` + +# Summary +This package contains type definitions for uuid (https://github.com/uuidjs/uuid). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/uuid. + +### Additional Details + * Last updated: Thu, 25 Jan 2024 23:07:19 GMT + * Dependencies: none + +# Credits +These definitions were written by [Oliver Hoffmann](https://github.com/iamolivinius), [Felipe Ochoa](https://github.com/felipeochoa), [Chris Barth](https://github.com/cjbarth), [Linus Unnebäck](https://github.com/LinusU), and [Christoph Tavan](https://github.com/ctavan). diff --git a/amplify/functions/fetchDocuments/node_modules/@types/uuid/index.d.mts b/amplify/functions/fetchDocuments/node_modules/@types/uuid/index.d.mts new file mode 100644 index 0000000..47a6599 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@types/uuid/index.d.mts @@ -0,0 +1,12 @@ +import uuid from "./index.js"; +export import v1 = uuid.v1; +export import v3 = uuid.v3; +export import v4 = uuid.v4; +export import v5 = uuid.v5; +export import NIL = uuid.NIL; +export import version = uuid.version; +export import validate = uuid.validate; +export import stringify = uuid.stringify; +export import parse = uuid.parse; +export import V1Options = uuid.V1Options; +export import V4Options = uuid.V4Options; diff --git a/amplify/functions/fetchDocuments/node_modules/@types/uuid/index.d.ts b/amplify/functions/fetchDocuments/node_modules/@types/uuid/index.d.ts new file mode 100644 index 0000000..2f7d813 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@types/uuid/index.d.ts @@ -0,0 +1,86 @@ +// disable automatic export +export {}; + +// Uses ArrayLike to admit Uint8 and co. +type OutputBuffer = ArrayLike; +type InputBuffer = ArrayLike; + +interface RandomOptions { + /** `Array` of 16 random bytes (0-255) */ + random?: InputBuffer | undefined; +} +interface RngOptions { + /** Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) */ + rng?: (() => InputBuffer) | undefined; +} + +interface V1BaseOptions { + /** RFC "node" field as an `Array[6]` of byte values (per 4.1.6) */ + node?: InputBuffer | undefined; + /** RFC "clock sequence" as a `Number` between 0 - 0x3fff */ + clockseq?: number | undefined; + /** RFC "timestamp" field (`Number` of milliseconds, unix epoch) */ + msecs?: number | Date | undefined; + /** RFC "timestamp" field (`Number` of nanoseconds to add to msecs, should be 0-10,000) */ + nsecs?: number | undefined; +} +interface V1RandomOptions extends V1BaseOptions, RandomOptions {} +interface V1RngOptions extends V1BaseOptions, RngOptions {} + +export type V1Options = V1RandomOptions | V1RngOptions; +export type V4Options = RandomOptions | RngOptions; + +type v1String = (options?: V1Options) => string; +type v1Buffer = (options: V1Options | null | undefined, buffer: T, offset?: number) => T; +type v1 = v1Buffer & v1String; + +type v4String = (options?: V4Options) => string; +type v4Buffer = (options: V4Options | null | undefined, buffer: T, offset?: number) => T; +type v4 = v4Buffer & v4String; + +type v3String = (name: string | InputBuffer, namespace: string | InputBuffer) => string; +type v3Buffer = ( + name: string | InputBuffer, + namespace: string | InputBuffer, + buffer: T, + offset?: number, +) => T; +interface v3Static { + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L16 + DNS: string; + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L17 + URL: string; +} +type v3 = v3Buffer & v3String & v3Static; + +type v5String = (name: string | InputBuffer, namespace: string | InputBuffer) => string; +type v5Buffer = ( + name: string | InputBuffer, + namespace: string | InputBuffer, + buffer: T, + offset?: number, +) => T; +interface v5Static { + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L16 + DNS: string; + // https://github.com/uuidjs/uuid/blob/master/src/v35.js#L17 + URL: string; +} +type v5 = v5Buffer & v5String & v5Static; + +type NIL = string; + +type parse = (uuid: string) => Uint8Array; +type stringify = (buffer: InputBuffer, offset?: number) => string; +type validate = (uuid: string) => boolean; +type version = (uuid: string) => number; + +export const NIL: NIL; +export const parse: parse; +export const stringify: stringify; +export const v1: v1; +export const v3: v3; +export const v4: v4; +export const v5: v5; +export const validate: validate; +export const version: version; diff --git a/amplify/functions/fetchDocuments/node_modules/@types/uuid/package.json b/amplify/functions/fetchDocuments/node_modules/@types/uuid/package.json new file mode 100644 index 0000000..09959ce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/@types/uuid/package.json @@ -0,0 +1,54 @@ +{ + "name": "@types/uuid", + "version": "9.0.8", + "description": "TypeScript definitions for uuid", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/uuid", + "license": "MIT", + "contributors": [ + { + "name": "Oliver Hoffmann", + "githubUsername": "iamolivinius", + "url": "https://github.com/iamolivinius" + }, + { + "name": "Felipe Ochoa", + "githubUsername": "felipeochoa", + "url": "https://github.com/felipeochoa" + }, + { + "name": "Chris Barth", + "githubUsername": "cjbarth", + "url": "https://github.com/cjbarth" + }, + { + "name": "Linus Unnebäck", + "githubUsername": "LinusU", + "url": "https://github.com/LinusU" + }, + { + "name": "Christoph Tavan", + "githubUsername": "ctavan", + "url": "https://github.com/ctavan" + } + ], + "main": "", + "types": "index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "types": { + "import": "./index.d.mts", + "default": "./index.d.ts" + } + } + }, + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/uuid" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "ee6ba7ad17fbbead7a508faf213a9ad0f49c12929e8c6b0f05fb35129bc72d61", + "typeScriptVersion": "4.6" +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/bowser/CHANGELOG.md new file mode 100644 index 0000000..260a03d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/CHANGELOG.md @@ -0,0 +1,218 @@ +# Bowser Changelog + +### 2.11.0 (Sep 12, 2020) +- [ADD] Added support for aliases in `Parser#is` method (#437) +- [ADD] Added more typings (#438, #427) +- [ADD] Added support for MIUI Browserr (#436) + +### 2.10.0 (Jul 9, 2020) +- [FIX] Fix for Firefox detection on iOS 13 [#415] +- [FIX] Fixes for typings.d.ts [#409] +- [FIX] Updated development dependencies + +### 2.9.0 (Jan 28, 2020) +- [ADD] Export more methods and constants via .d.ts [#388], [#390] + +### 2.8.1 (Dec 26, 2019) +- [FIX] Reverted [#382] as it broke build + +### 2.8.0 (Dec 26, 2019) +- [ADD] Add polyfills for Array.find & Object.assign [#383] +- [ADD] Export constants with types.d.ts [#382] +- [FIX] Add support for WeChat on Windows [#381] +- [FIX] Fix detection of Firefox on iPad [#379] +- [FIX] Add detection of Electron [#375] +- [FIX] Updated dev-dependencies + +### 2.7.0 (Oct 2, 2019) +- [FIX] Add support for QQ Browser [#362] +- [FIX] Add support for GSA [#364] +- [FIX] Updated dependencies + +### 2.6.0 (Sep 6, 2019) +- [ADD] Define "module" export in package.json [#354] +- [FIX] Fix Tablet PC detection [#334] + +### 2.5.4 (Sep 2, 2019) +- [FIX] Exclude docs from the npm package [#349] + +### 2.5.3 (Aug 4, 2019) +- [FIX] Add MacOS names support [#338] +- [FIX] Point typings.d.ts from package.json [#341] +- [FIX] Upgrade dependencies + +### 2.5.2 (July 17, 2019) +- [FIX] Fixes the bug undefined method because of failed build (#335) + +### 2.5.1 (July 17, 2019) +- [FIX] Fixes the bug with a custom Error class (#335) +- [FIX] Fixes the settings for Babel to reduce the bundle size (#259) + +### 2.5.0 (July 16, 2019) +- [ADD] Add constant output so that users can quickly get all types (#325) +- [FIX] Add support for Roku OS (#332) +- [FIX] Update devDependencies +- [FIX] Fix docs, README and added funding information + +### 2.4.0 (May 3, 2019) +- [FIX] Update regexp for generic browsers (#310) +- [FIX] Fix issues with module.exports (#318) +- [FIX] Update devDependencies (#316, #321, #322) +- [FIX] Fix docs (#320) + +### 2.3.0 (April 14, 2019) +- [ADD] Add support for Blink-based MS Edge (#311) +- [ADD] Add more types for TS (#289) +- [FIX] Update dev-dependencies +- [FIX] Update docs + +### 2.2.1 (April 12, 2019) +- [ADD] Add an alias for Samsung Internet +- [FIX] Fix browser name detection for browsers without an alias (#313) + +### 2.2.0 (April 7, 2019) +- [ADD] Add short aliases for browser names (#295) +- [FIX] Fix Yandex Browser version detection (#308) + +### 2.1.2 (March 6, 2019) +- [FIX] Fix buggy `getFirstMatch` reference + +### 2.1.1 (March 6, 2019) +- [ADD] Add detection of PlayStation 4 (#291) +- [ADD] Deploy docs on GH Pages (#293) +- [FIX] Fix files extensions for importing (#294) +- [FIX] Fix docs (#295) + +### 2.1.0 (January 24, 2019) +- [ADD] Add new `Parser.getEngineName()` method (#288) +- [ADD] Add detection of ChromeOS (#287) +- [FIX] Fix README + +### 2.0.0 (January 19, 2019) +- [ADD] Support a non strict equality in `Parser.satisfies()` (#275) +- [ADD] Add Android versions names (#276) +- [ADD] Add a typings file (#277) +- [ADD] Added support for Googlebot recognition (#278) +- [FIX] Update building tools, avoid security issues + +### 2.0.0-beta.3 (September 15, 2018) +- [FIX] Fix Chrome Mobile detection (#253) +- [FIX] Use built bowser for CI (#252) +- [FIX] Update babel-plugin-add-module-exports (#251) + +### 2.0.0-beta.2 (September 9, 2018) +- [FIX] Fix failing comparing version through `Parser.satisfies` (#243) +- [FIX] Fix travis testing, include eslint into CI testing +- [FIX] Add support for Maxthon desktop browser (#246) +- [FIX] Add support for Swing browser (#248) +- [DOCS] Regenerate docs + +### 2.0.0-beta.1 (August 18, 2018) +- [ADD] Add loose version comparison to `Parser.compareVersion()` and `Parser.satisfies()` +- [CHORE] Add CONTRIBUTING.md +- [DOCS] Regenerate docs + +### 2.0.0-alpha.4 (August 2, 2018) +- [DOCS] Fix usage docs (#238) +- [CHANGE] Make `./es5.js` the main file of the package (#239) + +### 2.0.0-alpha.3 (July 22, 2018) +- [CHANGE] Rename split and rename `compiled.js` to `es5.js` and `bundled.js` (#231, #236, #237) +- [ADD] Add `Parser.some` (#235) + +### 2.0.0-alpha.2 (July 17, 2018) +- [CHANGE] Make `src/bowser` main file instead of the bundled one +- [CHANGE] Move the bundled file to the root of the package to make it possible to `require('bowser/compiled')` (#231) +- [REMOVE] Remove `typings.d.ts` before stable release (#232) +- [FIX] Improve Nexus devices detection (#233) + +### 2.0.0-alpha.1 (July 9, 2018) +- [ADD] `Bowser.getParser()` +- [ADD] `Bowser.parse` +- [ADD] `Parser` class which describes parsing process +- [CHANGE] Change bowser's returning object +- [REMOVE] Remove bower support + +### 1.9.4 (June 28, 2018) +- [FIX] Fix NAVER Whale browser detection (#220) +- [FIX] Fix MZ Browser browser detection (#219) +- [FIX] Fix Firefox Focus browser detection (#191) +- [FIX] Fix webOS browser detection (#186) + +### 1.9.3 (March 12, 2018) +- [FIX] Fix `typings.d.ts` — add `ipad`, `iphone`, `ipod` flags to the interface + +### 1.9.2 (February 5, 2018) +- [FIX] Fix `typings.d.ts` — add `osname` flag to the interface + +### 1.9.1 (December 22, 2017) +- [FIX] Fix `typings.d.ts` — add `chromium` flag to the interface + +### 1.9.0 (December 20, 2017) +- [ADD] Add a public method `.detect()` (#205) +- [DOCS] Fix description of `chromium` flag in docs (#206) + +### 1.8.1 (October 7, 2017) +- [FIX] Fix detection of MS Edge on Android and iOS (#201) + +### 1.8.0 (October 7, 2017) +- [ADD] Add `osname` into result object (#200) + +### 1.7.3 (August 30, 2017) +- [FIX] Fix detection of Chrome on Android 8 OPR6 (#193) + +### 1.7.2 (August 17, 2017) +- [FIX] Fix typings.d.ts according to #185 + +### 1.7.1 (July 13, 2017) +- [ADD] Fix detecting of Tablet PC as tablet (#183) + +### 1.7.0 (May 18, 2017) +- [ADD] Add OS version support for Windows and macOS (#178) + +### 1.6.0 (December 5, 2016) +- [ADD] Add some tests for Windows devices (#89) +- [ADD] Add `root` to initialization process (#170) +- [FIX] Upgrade .travis.yml config + +### 1.5.0 (October 31, 2016) +- [ADD] Throw an error when `minVersion` map has not a string as a version and fix readme (#165) +- [FIX] Fix truly detection of Windows Phones (#167) + +### 1.4.6 (September 19, 2016) +- [FIX] Fix mobile Opera's version detection on Android +- [FIX] Fix typescript typings — add `mobile` and `tablet` flags +- [DOC] Fix description of `bowser.check` + +### 1.4.5 (August 30, 2016) + +- [FIX] Add support of Samsung Internet for Android +- [FIX] Fix case when `navigator.userAgent` is `undefined` +- [DOC] Add information about `strictMode` in `check` function +- [DOC] Consistent use of `bowser` variable in the README + +### 1.4.4 (August 10, 2016) + +- [FIX] Fix AMD `define` call — pass name to the function + +### 1.4.3 (July 27, 2016) + +- [FIX] Fix error `Object doesn't support this property or method` on IE8 + +### 1.4.2 (July 26, 2016) + +- [FIX] Fix missing `isUnsupportedBrowser` in typings description +- [DOC] Fix `check`'s declaration in README + +### 1.4.1 (July 7, 2016) + +- [FIX] Fix `strictMode` logic for `isUnsupportedBrowser` + +### 1.4.0 (June 28, 2016) + +- [FEATURE] Add `bowser.compareVersions` method +- [FEATURE] Add `bowser.isUnsupportedBrowser` method +- [FEATURE] Add `bowser.check` method +- [DOC] Changelog started +- [DOC] Add API section to README +- [FIX] Fix detection of browser type (A/C/X) for Chromium diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/LICENSE b/amplify/functions/fetchDocuments/node_modules/bowser/LICENSE new file mode 100644 index 0000000..94085f0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/LICENSE @@ -0,0 +1,39 @@ +Copyright 2015, Dustin Diaz (the "Original Author") +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +Distributions of all or part of the Software intended to be used +by the recipients as they would use the unmodified Software, +containing modifications that substantially alter, remove, or +disable functionality of the Software, outside of the documented +configuration mechanisms provided by the Software, shall be +modified such that the Original Author's bug reporting email +addresses and urls are either replaced with the contact information +of the parties responsible for the changes, or removed entirely. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + + +Except where noted, this license applies to any and all software +programs and associated documentation files created by the +Original Author, when distributed with the Software. diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/README.md b/amplify/functions/fetchDocuments/node_modules/bowser/README.md new file mode 100644 index 0000000..8f5f915 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/README.md @@ -0,0 +1,179 @@ +## Bowser +A small, fast and rich-API browser/platform/engine detector for both browser and node. +- **Small.** Use plain ES5-version which is ~4.8kB gzipped. +- **Optimized.** Use only those parsers you need — it doesn't do useless work. +- **Multi-platform.** It's browser- and node-ready, so you can use it in any environment. + +Don't hesitate to support the project on Github or [OpenCollective](https://opencollective.com/bowser) if you like it ❤️ Also, contributors are always welcome! + +[![Financial Contributors on Open Collective](https://opencollective.com/bowser/all/badge.svg?label=financial+contributors)](https://opencollective.com/bowser) [![Build Status](https://travis-ci.org/lancedikson/bowser.svg?branch=master)](https://travis-ci.org/lancedikson/bowser/) [![Greenkeeper badge](https://badges.greenkeeper.io/lancedikson/bowser.svg)](https://greenkeeper.io/) [![Coverage Status](https://coveralls.io/repos/github/lancedikson/bowser/badge.svg?branch=master)](https://coveralls.io/github/lancedikson/bowser?branch=master) ![Downloads](https://img.shields.io/npm/dm/bowser) + +# Contents +- [Overview](#overview) +- [Use cases](#use-cases) +- [Advanced usage](#advanced-usage) +- [How can I help?](#contributing) + +# Overview + +The library is made to help to detect what browser your user has and gives you a convenient API to filter the users somehow depending on their browsers. Check it out on this page: https://bowser-js.github.io/bowser-online/. + +### ⚠️ Version 2.0 breaking changes ⚠️ + +Version 2.0 has drastically changed the API. All available methods are on the [docs page](https://lancedikson.github.io/bowser/docs). + +_For legacy code, check out the [1.x](https://github.com/lancedikson/bowser/tree/v1.x) branch and install it through `npm install bowser@1.9.4`._ + +# Use cases + +First of all, require the library. This is a UMD Module, so it will work for AMD, TypeScript, ES6, and CommonJS module systems. + +```javascript +const Bowser = require("bowser"); // CommonJS + +import * as Bowser from "bowser"; // TypeScript + +import Bowser from "bowser"; // ES6 (and TypeScript with --esModuleInterop enabled) +``` + +By default, the exported version is the *ES5 transpiled version*, which **do not** include any polyfills. + +In case you don't use your own `babel-polyfill` you may need to have pre-built bundle with all needed polyfills. +So, for you it's suitable to require bowser like this: `require('bowser/bundled')`. +As the result, you get a ES5 version of bowser with `babel-polyfill` bundled together. + +You may need to use the source files, so they will be available in the package as well. + +## Browser props detection + +Often we need to pick users' browser properties such as the name, the version, the rendering engine and so on. Here is an example how to do it with Bowser: + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); + +console.log(`The current browser name is "${browser.getBrowserName()}"`); +// The current browser name is "Internet Explorer" +``` + +or + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); +console.log(browser.getBrowser()); + +// outputs +{ + name: "Internet Explorer" + version: "11.0" +} +``` + +or + +```javascript +console.log(Bowser.parse(window.navigator.userAgent)); + +// outputs +{ + browser: { + name: "Internet Explorer" + version: "11.0" + }, + os: { + name: "Windows" + version: "NT 6.3" + versionName: "8.1" + }, + platform: { + type: "desktop" + }, + engine: { + name: "Trident" + version: "7.0" + } +} +``` + + +## Filtering browsers + +You could want to filter some particular browsers to provide any special support for them or make any workarounds. +It could look like this: + +```javascript +const browser = Bowser.getParser(window.navigator.userAgent); +const isValidBrowser = browser.satisfies({ + // declare browsers per OS + windows: { + "internet explorer": ">10", + }, + macos: { + safari: ">10.1" + }, + + // per platform (mobile, desktop or tablet) + mobile: { + safari: '>=9', + 'android browser': '>3.10' + }, + + // or in general + chrome: "~20.1.1432", + firefox: ">31", + opera: ">=22", + + // also supports equality operator + chrome: "=20.1.1432", // will match particular build only + + // and loose-equality operator + chrome: "~20", // will match any 20.* sub-version + chrome: "~20.1" // will match any 20.1.* sub-version (20.1.19 as well as 20.1.12.42-alpha.1) +}); +``` + +Settings for any particular OS or platform has more priority and redefines settings of standalone browsers. +Thus, you can define OS or platform specific rules and they will have more priority in the end. + +More of API and possibilities you will find in the `docs` folder. + +### Browser names for `.satisfies()` + +By default you are supposed to use the full browser name for `.satisfies`. +But, there's a short way to define a browser using short aliases. The full +list of aliases can be found in [the file](src/constants.js). + +## Similar Projects +* [Kong](https://github.com/BigBadBleuCheese/Kong) - A C# port of Bowser. + +## Contributors + +### Code Contributors + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. + + +### Financial Contributors + +Become a financial contributor and help us sustain our community. [[Contribute](https://opencollective.com/bowser/contribute)] + +#### Individuals + + + +#### Organizations + +Support this project with your organization. Your logo will show up here with a link to your website. [[Contribute](https://opencollective.com/bowser/contribute)] + + + + + + + + + + + + +## License +Licensed as MIT. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details. diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/bundled.js b/amplify/functions/fetchDocuments/node_modules/bowser/bundled.js new file mode 100644 index 0000000..066ac40 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/bundled.js @@ -0,0 +1 @@ +!function(t,n){"object"==typeof exports&&"object"==typeof module?module.exports=n():"function"==typeof define&&define.amd?define([],n):"object"==typeof exports?exports.bowser=n():t.bowser=n()}(this,(function(){return function(t){var n={};function e(r){if(n[r])return n[r].exports;var i=n[r]={i:r,l:!1,exports:{}};return t[r].call(i.exports,i,i.exports,e),i.l=!0,i.exports}return e.m=t,e.c=n,e.d=function(t,n,r){e.o(t,n)||Object.defineProperty(t,n,{enumerable:!0,get:r})},e.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},e.t=function(t,n){if(1&n&&(t=e(t)),8&n)return t;if(4&n&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(e.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&n&&"string"!=typeof t)for(var i in t)e.d(r,i,function(n){return t[n]}.bind(null,i));return r},e.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(n,"a",n),n},e.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},e.p="",e(e.s=129)}([function(t,n,e){var r=e(1),i=e(7),o=e(14),u=e(11),a=e(19),c=function(t,n,e){var s,f,l,h,d=t&c.F,p=t&c.G,v=t&c.S,g=t&c.P,y=t&c.B,m=p?r:v?r[n]||(r[n]={}):(r[n]||{}).prototype,b=p?i:i[n]||(i[n]={}),S=b.prototype||(b.prototype={});for(s in p&&(e=n),e)l=((f=!d&&m&&void 0!==m[s])?m:e)[s],h=y&&f?a(l,r):g&&"function"==typeof l?a(Function.call,l):l,m&&u(m,s,l,t&c.U),b[s]!=l&&o(b,s,h),g&&S[s]!=l&&(S[s]=l)};r.core=i,c.F=1,c.G=2,c.S=4,c.P=8,c.B=16,c.W=32,c.U=64,c.R=128,t.exports=c},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,e){var r=e(4);t.exports=function(t){if(!r(t))throw TypeError(t+" is not an object!");return t}},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,e){var r=e(50)("wks"),i=e(31),o=e(1).Symbol,u="function"==typeof o;(t.exports=function(t){return r[t]||(r[t]=u&&o[t]||(u?o:i)("Symbol."+t))}).store=r},function(t,n,e){var r=e(21),i=Math.min;t.exports=function(t){return t>0?i(r(t),9007199254740991):0}},function(t,n){var e=t.exports={version:"2.6.9"};"number"==typeof __e&&(__e=e)},function(t,n,e){t.exports=!e(2)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(3),i=e(96),o=e(28),u=Object.defineProperty;n.f=e(8)?Object.defineProperty:function(t,n,e){if(r(t),n=o(n,!0),r(e),i)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n,e){var r=e(26);t.exports=function(t){return Object(r(t))}},function(t,n,e){var r=e(1),i=e(14),o=e(13),u=e(31)("src"),a=e(134),c=(""+a).split("toString");e(7).inspectSource=function(t){return a.call(t)},(t.exports=function(t,n,e,a){var s="function"==typeof e;s&&(o(e,"name")||i(e,"name",n)),t[n]!==e&&(s&&(o(e,u)||i(e,u,t[n]?""+t[n]:c.join(String(n)))),t===r?t[n]=e:a?t[n]?t[n]=e:i(t,n,e):(delete t[n],i(t,n,e)))})(Function.prototype,"toString",(function(){return"function"==typeof this&&this[u]||a.call(this)}))},function(t,n,e){var r=e(0),i=e(2),o=e(26),u=/"/g,a=function(t,n,e,r){var i=String(o(t)),a="<"+n;return""!==e&&(a+=" "+e+'="'+String(r).replace(u,""")+'"'),a+">"+i+""};t.exports=function(t,n){var e={};e[t]=n(a),r(r.P+r.F*i((function(){var n=""[t]('"');return n!==n.toLowerCase()||n.split('"').length>3})),"String",e)}},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}},function(t,n,e){var r=e(9),i=e(30);t.exports=e(8)?function(t,n,e){return r.f(t,n,i(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){var r=e(46),i=e(26);t.exports=function(t){return r(i(t))}},function(t,n,e){"use strict";var r=e(2);t.exports=function(t,n){return!!t&&r((function(){n?t.call(null,(function(){}),1):t.call(null)}))}},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r=e(18),i=function(){function t(){}return t.getFirstMatch=function(t,n){var e=n.match(t);return e&&e.length>0&&e[1]||""},t.getSecondMatch=function(t,n){var e=n.match(t);return e&&e.length>1&&e[2]||""},t.matchAndReturnConst=function(t,n,e){if(t.test(n))return e},t.getWindowsVersionName=function(t){switch(t){case"NT":return"NT";case"XP":return"XP";case"NT 5.0":return"2000";case"NT 5.1":return"XP";case"NT 5.2":return"2003";case"NT 6.0":return"Vista";case"NT 6.1":return"7";case"NT 6.2":return"8";case"NT 6.3":return"8.1";case"NT 10.0":return"10";default:return}},t.getMacOSVersionName=function(t){var n=t.split(".").splice(0,2).map((function(t){return parseInt(t,10)||0}));if(n.push(0),10===n[0])switch(n[1]){case 5:return"Leopard";case 6:return"Snow Leopard";case 7:return"Lion";case 8:return"Mountain Lion";case 9:return"Mavericks";case 10:return"Yosemite";case 11:return"El Capitan";case 12:return"Sierra";case 13:return"High Sierra";case 14:return"Mojave";case 15:return"Catalina";default:return}},t.getAndroidVersionName=function(t){var n=t.split(".").splice(0,2).map((function(t){return parseInt(t,10)||0}));if(n.push(0),!(1===n[0]&&n[1]<5))return 1===n[0]&&n[1]<6?"Cupcake":1===n[0]&&n[1]>=6?"Donut":2===n[0]&&n[1]<2?"Eclair":2===n[0]&&2===n[1]?"Froyo":2===n[0]&&n[1]>2?"Gingerbread":3===n[0]?"Honeycomb":4===n[0]&&n[1]<1?"Ice Cream Sandwich":4===n[0]&&n[1]<4?"Jelly Bean":4===n[0]&&n[1]>=4?"KitKat":5===n[0]?"Lollipop":6===n[0]?"Marshmallow":7===n[0]?"Nougat":8===n[0]?"Oreo":9===n[0]?"Pie":void 0},t.getVersionPrecision=function(t){return t.split(".").length},t.compareVersions=function(n,e,r){void 0===r&&(r=!1);var i=t.getVersionPrecision(n),o=t.getVersionPrecision(e),u=Math.max(i,o),a=0,c=t.map([n,e],(function(n){var e=u-t.getVersionPrecision(n),r=n+new Array(e+1).join(".0");return t.map(r.split("."),(function(t){return new Array(20-t.length).join("0")+t})).reverse()}));for(r&&(a=u-Math.min(i,o)),u-=1;u>=a;){if(c[0][u]>c[1][u])return 1;if(c[0][u]===c[1][u]){if(u===a)return 0;u-=1}else if(c[0][u]1?i-1:0),u=1;u0?r:e)(t)}},function(t,n,e){var r=e(47),i=e(30),o=e(15),u=e(28),a=e(13),c=e(96),s=Object.getOwnPropertyDescriptor;n.f=e(8)?s:function(t,n){if(t=o(t),n=u(n,!0),c)try{return s(t,n)}catch(t){}if(a(t,n))return i(!r.f.call(t,n),t[n])}},function(t,n,e){var r=e(0),i=e(7),o=e(2);t.exports=function(t,n){var e=(i.Object||{})[t]||Object[t],u={};u[t]=n(e),r(r.S+r.F*o((function(){e(1)})),"Object",u)}},function(t,n,e){var r=e(19),i=e(46),o=e(10),u=e(6),a=e(112);t.exports=function(t,n){var e=1==t,c=2==t,s=3==t,f=4==t,l=6==t,h=5==t||l,d=n||a;return function(n,a,p){for(var v,g,y=o(n),m=i(y),b=r(a,p,3),S=u(m.length),w=0,_=e?d(n,S):c?d(n,0):void 0;S>w;w++)if((h||w in m)&&(g=b(v=m[w],w,y),t))if(e)_[w]=g;else if(g)switch(t){case 3:return!0;case 5:return v;case 6:return w;case 2:_.push(v)}else if(f)return!1;return l?-1:s||f?f:_}}},function(t,n){var e={}.toString;t.exports=function(t){return e.call(t).slice(8,-1)}},function(t,n){t.exports=function(t){if(null==t)throw TypeError("Can't call method on "+t);return t}},function(t,n,e){"use strict";if(e(8)){var r=e(32),i=e(1),o=e(2),u=e(0),a=e(61),c=e(86),s=e(19),f=e(44),l=e(30),h=e(14),d=e(45),p=e(21),v=e(6),g=e(123),y=e(34),m=e(28),b=e(13),S=e(48),w=e(4),_=e(10),M=e(78),x=e(35),P=e(37),O=e(36).f,F=e(80),A=e(31),E=e(5),N=e(24),R=e(51),k=e(49),T=e(82),I=e(42),j=e(54),L=e(43),B=e(81),C=e(114),W=e(9),V=e(22),G=W.f,D=V.f,U=i.RangeError,z=i.TypeError,q=i.Uint8Array,K=Array.prototype,Y=c.ArrayBuffer,Q=c.DataView,H=N(0),J=N(2),X=N(3),Z=N(4),$=N(5),tt=N(6),nt=R(!0),et=R(!1),rt=T.values,it=T.keys,ot=T.entries,ut=K.lastIndexOf,at=K.reduce,ct=K.reduceRight,st=K.join,ft=K.sort,lt=K.slice,ht=K.toString,dt=K.toLocaleString,pt=E("iterator"),vt=E("toStringTag"),gt=A("typed_constructor"),yt=A("def_constructor"),mt=a.CONSTR,bt=a.TYPED,St=a.VIEW,wt=N(1,(function(t,n){return Ot(k(t,t[yt]),n)})),_t=o((function(){return 1===new q(new Uint16Array([1]).buffer)[0]})),Mt=!!q&&!!q.prototype.set&&o((function(){new q(1).set({})})),xt=function(t,n){var e=p(t);if(e<0||e%n)throw U("Wrong offset!");return e},Pt=function(t){if(w(t)&&bt in t)return t;throw z(t+" is not a typed array!")},Ot=function(t,n){if(!(w(t)&> in t))throw z("It is not a typed array constructor!");return new t(n)},Ft=function(t,n){return At(k(t,t[yt]),n)},At=function(t,n){for(var e=0,r=n.length,i=Ot(t,r);r>e;)i[e]=n[e++];return i},Et=function(t,n,e){G(t,n,{get:function(){return this._d[e]}})},Nt=function(t){var n,e,r,i,o,u,a=_(t),c=arguments.length,f=c>1?arguments[1]:void 0,l=void 0!==f,h=F(a);if(null!=h&&!M(h)){for(u=h.call(a),r=[],n=0;!(o=u.next()).done;n++)r.push(o.value);a=r}for(l&&c>2&&(f=s(f,arguments[2],2)),n=0,e=v(a.length),i=Ot(this,e);e>n;n++)i[n]=l?f(a[n],n):a[n];return i},Rt=function(){for(var t=0,n=arguments.length,e=Ot(this,n);n>t;)e[t]=arguments[t++];return e},kt=!!q&&o((function(){dt.call(new q(1))})),Tt=function(){return dt.apply(kt?lt.call(Pt(this)):Pt(this),arguments)},It={copyWithin:function(t,n){return C.call(Pt(this),t,n,arguments.length>2?arguments[2]:void 0)},every:function(t){return Z(Pt(this),t,arguments.length>1?arguments[1]:void 0)},fill:function(t){return B.apply(Pt(this),arguments)},filter:function(t){return Ft(this,J(Pt(this),t,arguments.length>1?arguments[1]:void 0))},find:function(t){return $(Pt(this),t,arguments.length>1?arguments[1]:void 0)},findIndex:function(t){return tt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},forEach:function(t){H(Pt(this),t,arguments.length>1?arguments[1]:void 0)},indexOf:function(t){return et(Pt(this),t,arguments.length>1?arguments[1]:void 0)},includes:function(t){return nt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},join:function(t){return st.apply(Pt(this),arguments)},lastIndexOf:function(t){return ut.apply(Pt(this),arguments)},map:function(t){return wt(Pt(this),t,arguments.length>1?arguments[1]:void 0)},reduce:function(t){return at.apply(Pt(this),arguments)},reduceRight:function(t){return ct.apply(Pt(this),arguments)},reverse:function(){for(var t,n=Pt(this).length,e=Math.floor(n/2),r=0;r1?arguments[1]:void 0)},sort:function(t){return ft.call(Pt(this),t)},subarray:function(t,n){var e=Pt(this),r=e.length,i=y(t,r);return new(k(e,e[yt]))(e.buffer,e.byteOffset+i*e.BYTES_PER_ELEMENT,v((void 0===n?r:y(n,r))-i))}},jt=function(t,n){return Ft(this,lt.call(Pt(this),t,n))},Lt=function(t){Pt(this);var n=xt(arguments[1],1),e=this.length,r=_(t),i=v(r.length),o=0;if(i+n>e)throw U("Wrong length!");for(;o255?255:255&r),i.v[d](e*n+i.o,r,_t)}(this,e,t)},enumerable:!0})};b?(p=e((function(t,e,r,i){f(t,p,s,"_d");var o,u,a,c,l=0,d=0;if(w(e)){if(!(e instanceof Y||"ArrayBuffer"==(c=S(e))||"SharedArrayBuffer"==c))return bt in e?At(p,e):Nt.call(p,e);o=e,d=xt(r,n);var y=e.byteLength;if(void 0===i){if(y%n)throw U("Wrong length!");if((u=y-d)<0)throw U("Wrong length!")}else if((u=v(i)*n)+d>y)throw U("Wrong length!");a=u/n}else a=g(e),o=new Y(u=a*n);for(h(t,"_d",{b:o,o:d,l:u,e:a,v:new Q(o)});ldocument.F=Object<\/script>"),t.close(),c=t.F;r--;)delete c.prototype[o[r]];return c()};t.exports=Object.create||function(t,n){var e;return null!==t?(a.prototype=r(t),e=new a,a.prototype=null,e[u]=t):e=c(),void 0===n?e:i(e,n)}},function(t,n,e){var r=e(98),i=e(65).concat("length","prototype");n.f=Object.getOwnPropertyNames||function(t){return r(t,i)}},function(t,n,e){var r=e(13),i=e(10),o=e(64)("IE_PROTO"),u=Object.prototype;t.exports=Object.getPrototypeOf||function(t){return t=i(t),r(t,o)?t[o]:"function"==typeof t.constructor&&t instanceof t.constructor?t.constructor.prototype:t instanceof Object?u:null}},function(t,n,e){var r=e(5)("unscopables"),i=Array.prototype;null==i[r]&&e(14)(i,r,{}),t.exports=function(t){i[r][t]=!0}},function(t,n,e){var r=e(4);t.exports=function(t,n){if(!r(t)||t._t!==n)throw TypeError("Incompatible receiver, "+n+" required!");return t}},function(t,n,e){var r=e(9).f,i=e(13),o=e(5)("toStringTag");t.exports=function(t,n,e){t&&!i(t=e?t:t.prototype,o)&&r(t,o,{configurable:!0,value:n})}},function(t,n,e){var r=e(0),i=e(26),o=e(2),u=e(68),a="["+u+"]",c=RegExp("^"+a+a+"*"),s=RegExp(a+a+"*$"),f=function(t,n,e){var i={},a=o((function(){return!!u[t]()||"​…"!="​…"[t]()})),c=i[t]=a?n(l):u[t];e&&(i[e]=c),r(r.P+r.F*a,"String",i)},l=f.trim=function(t,n){return t=String(i(t)),1&n&&(t=t.replace(c,"")),2&n&&(t=t.replace(s,"")),t};t.exports=f},function(t,n){t.exports={}},function(t,n,e){"use strict";var r=e(1),i=e(9),o=e(8),u=e(5)("species");t.exports=function(t){var n=r[t];o&&n&&!n[u]&&i.f(n,u,{configurable:!0,get:function(){return this}})}},function(t,n){t.exports=function(t,n,e,r){if(!(t instanceof n)||void 0!==r&&r in t)throw TypeError(e+": incorrect invocation!");return t}},function(t,n,e){var r=e(11);t.exports=function(t,n,e){for(var i in n)r(t,i,n[i],e);return t}},function(t,n,e){var r=e(25);t.exports=Object("z").propertyIsEnumerable(0)?Object:function(t){return"String"==r(t)?t.split(""):Object(t)}},function(t,n){n.f={}.propertyIsEnumerable},function(t,n,e){var r=e(25),i=e(5)("toStringTag"),o="Arguments"==r(function(){return arguments}());t.exports=function(t){var n,e,u;return void 0===t?"Undefined":null===t?"Null":"string"==typeof(e=function(t,n){try{return t[n]}catch(t){}}(n=Object(t),i))?e:o?r(n):"Object"==(u=r(n))&&"function"==typeof n.callee?"Arguments":u}},function(t,n,e){var r=e(3),i=e(20),o=e(5)("species");t.exports=function(t,n){var e,u=r(t).constructor;return void 0===u||null==(e=r(u)[o])?n:i(e)}},function(t,n,e){var r=e(7),i=e(1),o=i["__core-js_shared__"]||(i["__core-js_shared__"]={});(t.exports=function(t,n){return o[t]||(o[t]=void 0!==n?n:{})})("versions",[]).push({version:r.version,mode:e(32)?"pure":"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})},function(t,n,e){var r=e(15),i=e(6),o=e(34);t.exports=function(t){return function(n,e,u){var a,c=r(n),s=i(c.length),f=o(u,s);if(t&&e!=e){for(;s>f;)if((a=c[f++])!=a)return!0}else for(;s>f;f++)if((t||f in c)&&c[f]===e)return t||f||0;return!t&&-1}}},function(t,n){n.f=Object.getOwnPropertySymbols},function(t,n,e){var r=e(25);t.exports=Array.isArray||function(t){return"Array"==r(t)}},function(t,n,e){var r=e(5)("iterator"),i=!1;try{var o=[7][r]();o.return=function(){i=!0},Array.from(o,(function(){throw 2}))}catch(t){}t.exports=function(t,n){if(!n&&!i)return!1;var e=!1;try{var o=[7],u=o[r]();u.next=function(){return{done:e=!0}},o[r]=function(){return u},t(o)}catch(t){}return e}},function(t,n,e){"use strict";var r=e(3);t.exports=function(){var t=r(this),n="";return t.global&&(n+="g"),t.ignoreCase&&(n+="i"),t.multiline&&(n+="m"),t.unicode&&(n+="u"),t.sticky&&(n+="y"),n}},function(t,n,e){"use strict";var r=e(48),i=RegExp.prototype.exec;t.exports=function(t,n){var e=t.exec;if("function"==typeof e){var o=e.call(t,n);if("object"!=typeof o)throw new TypeError("RegExp exec method returned something other than an Object or null");return o}if("RegExp"!==r(t))throw new TypeError("RegExp#exec called on incompatible receiver");return i.call(t,n)}},function(t,n,e){"use strict";e(116);var r=e(11),i=e(14),o=e(2),u=e(26),a=e(5),c=e(83),s=a("species"),f=!o((function(){var t=/./;return t.exec=function(){var t=[];return t.groups={a:"7"},t},"7"!=="".replace(t,"$")})),l=function(){var t=/(?:)/,n=t.exec;t.exec=function(){return n.apply(this,arguments)};var e="ab".split(t);return 2===e.length&&"a"===e[0]&&"b"===e[1]}();t.exports=function(t,n,e){var h=a(t),d=!o((function(){var n={};return n[h]=function(){return 7},7!=""[t](n)})),p=d?!o((function(){var n=!1,e=/a/;return e.exec=function(){return n=!0,null},"split"===t&&(e.constructor={},e.constructor[s]=function(){return e}),e[h](""),!n})):void 0;if(!d||!p||"replace"===t&&!f||"split"===t&&!l){var v=/./[h],g=e(u,h,""[t],(function(t,n,e,r,i){return n.exec===c?d&&!i?{done:!0,value:v.call(n,e,r)}:{done:!0,value:t.call(e,n,r)}:{done:!1}})),y=g[0],m=g[1];r(String.prototype,t,y),i(RegExp.prototype,h,2==n?function(t,n){return m.call(t,this,n)}:function(t){return m.call(t,this)})}}},function(t,n,e){var r=e(19),i=e(111),o=e(78),u=e(3),a=e(6),c=e(80),s={},f={};(n=t.exports=function(t,n,e,l,h){var d,p,v,g,y=h?function(){return t}:c(t),m=r(e,l,n?2:1),b=0;if("function"!=typeof y)throw TypeError(t+" is not iterable!");if(o(y)){for(d=a(t.length);d>b;b++)if((g=n?m(u(p=t[b])[0],p[1]):m(t[b]))===s||g===f)return g}else for(v=y.call(t);!(p=v.next()).done;)if((g=i(v,m,p.value,n))===s||g===f)return g}).BREAK=s,n.RETURN=f},function(t,n,e){var r=e(1).navigator;t.exports=r&&r.userAgent||""},function(t,n,e){"use strict";var r=e(1),i=e(0),o=e(11),u=e(45),a=e(29),c=e(58),s=e(44),f=e(4),l=e(2),h=e(54),d=e(40),p=e(69);t.exports=function(t,n,e,v,g,y){var m=r[t],b=m,S=g?"set":"add",w=b&&b.prototype,_={},M=function(t){var n=w[t];o(w,t,"delete"==t?function(t){return!(y&&!f(t))&&n.call(this,0===t?0:t)}:"has"==t?function(t){return!(y&&!f(t))&&n.call(this,0===t?0:t)}:"get"==t?function(t){return y&&!f(t)?void 0:n.call(this,0===t?0:t)}:"add"==t?function(t){return n.call(this,0===t?0:t),this}:function(t,e){return n.call(this,0===t?0:t,e),this})};if("function"==typeof b&&(y||w.forEach&&!l((function(){(new b).entries().next()})))){var x=new b,P=x[S](y?{}:-0,1)!=x,O=l((function(){x.has(1)})),F=h((function(t){new b(t)})),A=!y&&l((function(){for(var t=new b,n=5;n--;)t[S](n,n);return!t.has(-0)}));F||((b=n((function(n,e){s(n,b,t);var r=p(new m,n,b);return null!=e&&c(e,g,r[S],r),r}))).prototype=w,w.constructor=b),(O||A)&&(M("delete"),M("has"),g&&M("get")),(A||P)&&M(S),y&&w.clear&&delete w.clear}else b=v.getConstructor(n,t,g,S),u(b.prototype,e),a.NEED=!0;return d(b,t),_[t]=b,i(i.G+i.W+i.F*(b!=m),_),y||v.setStrong(b,t,g),b}},function(t,n,e){for(var r,i=e(1),o=e(14),u=e(31),a=u("typed_array"),c=u("view"),s=!(!i.ArrayBuffer||!i.DataView),f=s,l=0,h="Int8Array,Uint8Array,Uint8ClampedArray,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array".split(",");l<9;)(r=i[h[l++]])?(o(r.prototype,a,!0),o(r.prototype,c,!0)):f=!1;t.exports={ABV:s,CONSTR:f,TYPED:a,VIEW:c}},function(t,n,e){var r=e(4),i=e(1).document,o=r(i)&&r(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,e){n.f=e(5)},function(t,n,e){var r=e(50)("keys"),i=e(31);t.exports=function(t){return r[t]||(r[t]=i(t))}},function(t,n){t.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},function(t,n,e){var r=e(1).document;t.exports=r&&r.documentElement},function(t,n,e){var r=e(4),i=e(3),o=function(t,n){if(i(t),!r(n)&&null!==n)throw TypeError(n+": can't set as prototype!")};t.exports={set:Object.setPrototypeOf||("__proto__"in{}?function(t,n,r){try{(r=e(19)(Function.call,e(22).f(Object.prototype,"__proto__").set,2))(t,[]),n=!(t instanceof Array)}catch(t){n=!0}return function(t,e){return o(t,e),n?t.__proto__=e:r(t,e),t}}({},!1):void 0),check:o}},function(t,n){t.exports="\t\n\v\f\r   ᠎              \u2028\u2029\ufeff"},function(t,n,e){var r=e(4),i=e(67).set;t.exports=function(t,n,e){var o,u=n.constructor;return u!==e&&"function"==typeof u&&(o=u.prototype)!==e.prototype&&r(o)&&i&&i(t,o),t}},function(t,n,e){"use strict";var r=e(21),i=e(26);t.exports=function(t){var n=String(i(this)),e="",o=r(t);if(o<0||o==1/0)throw RangeError("Count can't be negative");for(;o>0;(o>>>=1)&&(n+=n))1&o&&(e+=n);return e}},function(t,n){t.exports=Math.sign||function(t){return 0==(t=+t)||t!=t?t:t<0?-1:1}},function(t,n){var e=Math.expm1;t.exports=!e||e(10)>22025.465794806718||e(10)<22025.465794806718||-2e-17!=e(-2e-17)?function(t){return 0==(t=+t)?t:t>-1e-6&&t<1e-6?t+t*t/2:Math.exp(t)-1}:e},function(t,n,e){var r=e(21),i=e(26);t.exports=function(t){return function(n,e){var o,u,a=String(i(n)),c=r(e),s=a.length;return c<0||c>=s?t?"":void 0:(o=a.charCodeAt(c))<55296||o>56319||c+1===s||(u=a.charCodeAt(c+1))<56320||u>57343?t?a.charAt(c):o:t?a.slice(c,c+2):u-56320+(o-55296<<10)+65536}}},function(t,n,e){"use strict";var r=e(32),i=e(0),o=e(11),u=e(14),a=e(42),c=e(110),s=e(40),f=e(37),l=e(5)("iterator"),h=!([].keys&&"next"in[].keys()),d=function(){return this};t.exports=function(t,n,e,p,v,g,y){c(e,n,p);var m,b,S,w=function(t){if(!h&&t in P)return P[t];switch(t){case"keys":case"values":return function(){return new e(this,t)}}return function(){return new e(this,t)}},_=n+" Iterator",M="values"==v,x=!1,P=t.prototype,O=P[l]||P["@@iterator"]||v&&P[v],F=O||w(v),A=v?M?w("entries"):F:void 0,E="Array"==n&&P.entries||O;if(E&&(S=f(E.call(new t)))!==Object.prototype&&S.next&&(s(S,_,!0),r||"function"==typeof S[l]||u(S,l,d)),M&&O&&"values"!==O.name&&(x=!0,F=function(){return O.call(this)}),r&&!y||!h&&!x&&P[l]||u(P,l,F),a[n]=F,a[_]=d,v)if(m={values:M?F:w("values"),keys:g?F:w("keys"),entries:A},y)for(b in m)b in P||o(P,b,m[b]);else i(i.P+i.F*(h||x),n,m);return m}},function(t,n,e){var r=e(76),i=e(26);t.exports=function(t,n,e){if(r(n))throw TypeError("String#"+e+" doesn't accept regex!");return String(i(t))}},function(t,n,e){var r=e(4),i=e(25),o=e(5)("match");t.exports=function(t){var n;return r(t)&&(void 0!==(n=t[o])?!!n:"RegExp"==i(t))}},function(t,n,e){var r=e(5)("match");t.exports=function(t){var n=/./;try{"/./"[t](n)}catch(e){try{return n[r]=!1,!"/./"[t](n)}catch(t){}}return!0}},function(t,n,e){var r=e(42),i=e(5)("iterator"),o=Array.prototype;t.exports=function(t){return void 0!==t&&(r.Array===t||o[i]===t)}},function(t,n,e){"use strict";var r=e(9),i=e(30);t.exports=function(t,n,e){n in t?r.f(t,n,i(0,e)):t[n]=e}},function(t,n,e){var r=e(48),i=e(5)("iterator"),o=e(42);t.exports=e(7).getIteratorMethod=function(t){if(null!=t)return t[i]||t["@@iterator"]||o[r(t)]}},function(t,n,e){"use strict";var r=e(10),i=e(34),o=e(6);t.exports=function(t){for(var n=r(this),e=o(n.length),u=arguments.length,a=i(u>1?arguments[1]:void 0,e),c=u>2?arguments[2]:void 0,s=void 0===c?e:i(c,e);s>a;)n[a++]=t;return n}},function(t,n,e){"use strict";var r=e(38),i=e(115),o=e(42),u=e(15);t.exports=e(74)(Array,"Array",(function(t,n){this._t=u(t),this._i=0,this._k=n}),(function(){var t=this._t,n=this._k,e=this._i++;return!t||e>=t.length?(this._t=void 0,i(1)):i(0,"keys"==n?e:"values"==n?t[e]:[e,t[e]])}),"values"),o.Arguments=o.Array,r("keys"),r("values"),r("entries")},function(t,n,e){"use strict";var r,i,o=e(55),u=RegExp.prototype.exec,a=String.prototype.replace,c=u,s=(r=/a/,i=/b*/g,u.call(r,"a"),u.call(i,"a"),0!==r.lastIndex||0!==i.lastIndex),f=void 0!==/()??/.exec("")[1];(s||f)&&(c=function(t){var n,e,r,i,c=this;return f&&(e=new RegExp("^"+c.source+"$(?!\\s)",o.call(c))),s&&(n=c.lastIndex),r=u.call(c,t),s&&r&&(c.lastIndex=c.global?r.index+r[0].length:n),f&&r&&r.length>1&&a.call(r[0],e,(function(){for(i=1;ie;)n.push(arguments[e++]);return y[++g]=function(){a("function"==typeof t?t:Function(t),n)},r(g),g},d=function(t){delete y[t]},"process"==e(25)(l)?r=function(t){l.nextTick(u(m,t,1))}:v&&v.now?r=function(t){v.now(u(m,t,1))}:p?(o=(i=new p).port2,i.port1.onmessage=b,r=u(o.postMessage,o,1)):f.addEventListener&&"function"==typeof postMessage&&!f.importScripts?(r=function(t){f.postMessage(t+"","*")},f.addEventListener("message",b,!1)):r="onreadystatechange"in s("script")?function(t){c.appendChild(s("script")).onreadystatechange=function(){c.removeChild(this),m.call(t)}}:function(t){setTimeout(u(m,t,1),0)}),t.exports={set:h,clear:d}},function(t,n,e){"use strict";var r=e(1),i=e(8),o=e(32),u=e(61),a=e(14),c=e(45),s=e(2),f=e(44),l=e(21),h=e(6),d=e(123),p=e(36).f,v=e(9).f,g=e(81),y=e(40),m="prototype",b="Wrong index!",S=r.ArrayBuffer,w=r.DataView,_=r.Math,M=r.RangeError,x=r.Infinity,P=S,O=_.abs,F=_.pow,A=_.floor,E=_.log,N=_.LN2,R=i?"_b":"buffer",k=i?"_l":"byteLength",T=i?"_o":"byteOffset";function I(t,n,e){var r,i,o,u=new Array(e),a=8*e-n-1,c=(1<>1,f=23===n?F(2,-24)-F(2,-77):0,l=0,h=t<0||0===t&&1/t<0?1:0;for((t=O(t))!=t||t===x?(i=t!=t?1:0,r=c):(r=A(E(t)/N),t*(o=F(2,-r))<1&&(r--,o*=2),(t+=r+s>=1?f/o:f*F(2,1-s))*o>=2&&(r++,o/=2),r+s>=c?(i=0,r=c):r+s>=1?(i=(t*o-1)*F(2,n),r+=s):(i=t*F(2,s-1)*F(2,n),r=0));n>=8;u[l++]=255&i,i/=256,n-=8);for(r=r<0;u[l++]=255&r,r/=256,a-=8);return u[--l]|=128*h,u}function j(t,n,e){var r,i=8*e-n-1,o=(1<>1,a=i-7,c=e-1,s=t[c--],f=127&s;for(s>>=7;a>0;f=256*f+t[c],c--,a-=8);for(r=f&(1<<-a)-1,f>>=-a,a+=n;a>0;r=256*r+t[c],c--,a-=8);if(0===f)f=1-u;else{if(f===o)return r?NaN:s?-x:x;r+=F(2,n),f-=u}return(s?-1:1)*r*F(2,f-n)}function L(t){return t[3]<<24|t[2]<<16|t[1]<<8|t[0]}function B(t){return[255&t]}function C(t){return[255&t,t>>8&255]}function W(t){return[255&t,t>>8&255,t>>16&255,t>>24&255]}function V(t){return I(t,52,8)}function G(t){return I(t,23,4)}function D(t,n,e){v(t[m],n,{get:function(){return this[e]}})}function U(t,n,e,r){var i=d(+e);if(i+n>t[k])throw M(b);var o=t[R]._b,u=i+t[T],a=o.slice(u,u+n);return r?a:a.reverse()}function z(t,n,e,r,i,o){var u=d(+e);if(u+n>t[k])throw M(b);for(var a=t[R]._b,c=u+t[T],s=r(+i),f=0;fQ;)(q=Y[Q++])in S||a(S,q,P[q]);o||(K.constructor=S)}var H=new w(new S(2)),J=w[m].setInt8;H.setInt8(0,2147483648),H.setInt8(1,2147483649),!H.getInt8(0)&&H.getInt8(1)||c(w[m],{setInt8:function(t,n){J.call(this,t,n<<24>>24)},setUint8:function(t,n){J.call(this,t,n<<24>>24)}},!0)}else S=function(t){f(this,S,"ArrayBuffer");var n=d(t);this._b=g.call(new Array(n),0),this[k]=n},w=function(t,n,e){f(this,w,"DataView"),f(t,S,"DataView");var r=t[k],i=l(n);if(i<0||i>r)throw M("Wrong offset!");if(i+(e=void 0===e?r-i:h(e))>r)throw M("Wrong length!");this[R]=t,this[T]=i,this[k]=e},i&&(D(S,"byteLength","_l"),D(w,"buffer","_b"),D(w,"byteLength","_l"),D(w,"byteOffset","_o")),c(w[m],{getInt8:function(t){return U(this,1,t)[0]<<24>>24},getUint8:function(t){return U(this,1,t)[0]},getInt16:function(t){var n=U(this,2,t,arguments[1]);return(n[1]<<8|n[0])<<16>>16},getUint16:function(t){var n=U(this,2,t,arguments[1]);return n[1]<<8|n[0]},getInt32:function(t){return L(U(this,4,t,arguments[1]))},getUint32:function(t){return L(U(this,4,t,arguments[1]))>>>0},getFloat32:function(t){return j(U(this,4,t,arguments[1]),23,4)},getFloat64:function(t){return j(U(this,8,t,arguments[1]),52,8)},setInt8:function(t,n){z(this,1,t,B,n)},setUint8:function(t,n){z(this,1,t,B,n)},setInt16:function(t,n){z(this,2,t,C,n,arguments[2])},setUint16:function(t,n){z(this,2,t,C,n,arguments[2])},setInt32:function(t,n){z(this,4,t,W,n,arguments[2])},setUint32:function(t,n){z(this,4,t,W,n,arguments[2])},setFloat32:function(t,n){z(this,4,t,G,n,arguments[2])},setFloat64:function(t,n){z(this,8,t,V,n,arguments[2])}});y(S,"ArrayBuffer"),y(w,"DataView"),a(w[m],u.VIEW,!0),n.ArrayBuffer=S,n.DataView=w},function(t,n){var e=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=e)},function(t,n){t.exports=function(t){return"object"==typeof t?null!==t:"function"==typeof t}},function(t,n,e){t.exports=!e(128)((function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a}))},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(91))&&r.__esModule?r:{default:r},o=e(18);function u(t,n){for(var e=0;e0){var u=Object.keys(e),c=a.default.find(u,(function(t){return n.isOS(t)}));if(c){var s=this.satisfies(e[c]);if(void 0!==s)return s}var f=a.default.find(u,(function(t){return n.isPlatform(t)}));if(f){var l=this.satisfies(e[f]);if(void 0!==l)return l}}if(o>0){var h=Object.keys(i),d=a.default.find(h,(function(t){return n.isBrowser(t,!0)}));if(void 0!==d)return this.compareVersion(i[d])}},n.isBrowser=function(t,n){void 0===n&&(n=!1);var e=this.getBrowserName().toLowerCase(),r=t.toLowerCase(),i=a.default.getBrowserTypeByAlias(r);return n&&i&&(r=i.toLowerCase()),r===e},n.compareVersion=function(t){var n=[0],e=t,r=!1,i=this.getBrowserVersion();if("string"==typeof i)return">"===t[0]||"<"===t[0]?(e=t.substr(1),"="===t[1]?(r=!0,e=t.substr(2)):n=[],">"===t[0]?n.push(1):n.push(-1)):"="===t[0]?e=t.substr(1):"~"===t[0]&&(r=!0,e=t.substr(1)),n.indexOf(a.default.compareVersions(i,e,r))>-1},n.isOS=function(t){return this.getOSName(!0)===String(t).toLowerCase()},n.isPlatform=function(t){return this.getPlatformType(!0)===String(t).toLowerCase()},n.isEngine=function(t){return this.getEngineName(!0)===String(t).toLowerCase()},n.is=function(t,n){return void 0===n&&(n=!1),this.isBrowser(t,n)||this.isOS(t)||this.isPlatform(t)},n.some=function(t){var n=this;return void 0===t&&(t=[]),t.some((function(t){return n.is(t)}))},t}();n.default=s,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r};var o=/version\/(\d+(\.?_?\d+)+)/i,u=[{test:[/googlebot/i],describe:function(t){var n={name:"Googlebot"},e=i.default.getFirstMatch(/googlebot\/(\d+(\.\d+))/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/opera/i],describe:function(t){var n={name:"Opera"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/opr\/|opios/i],describe:function(t){var n={name:"Opera"},e=i.default.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/SamsungBrowser/i],describe:function(t){var n={name:"Samsung Internet for Android"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/Whale/i],describe:function(t){var n={name:"NAVER Whale Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/MZBrowser/i],describe:function(t){var n={name:"MZ Browser"},e=i.default.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/focus/i],describe:function(t){var n={name:"Focus"},e=i.default.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/swing/i],describe:function(t){var n={name:"Swing"},e=i.default.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/coast/i],describe:function(t){var n={name:"Opera Coast"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/opt\/\d+(?:.?_?\d+)+/i],describe:function(t){var n={name:"Opera Touch"},e=i.default.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/yabrowser/i],describe:function(t){var n={name:"Yandex Browser"},e=i.default.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/ucbrowser/i],describe:function(t){var n={name:"UC Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/Maxthon|mxios/i],describe:function(t){var n={name:"Maxthon"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/epiphany/i],describe:function(t){var n={name:"Epiphany"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/puffin/i],describe:function(t){var n={name:"Puffin"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/sleipnir/i],describe:function(t){var n={name:"Sleipnir"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/k-meleon/i],describe:function(t){var n={name:"K-Meleon"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/micromessenger/i],describe:function(t){var n={name:"WeChat"},e=i.default.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/qqbrowser/i],describe:function(t){var n={name:/qqbrowserlite/i.test(t)?"QQ Browser Lite":"QQ Browser"},e=i.default.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/msie|trident/i],describe:function(t){var n={name:"Internet Explorer"},e=i.default.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/\sedg\//i],describe:function(t){var n={name:"Microsoft Edge"},e=i.default.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/edg([ea]|ios)/i],describe:function(t){var n={name:"Microsoft Edge"},e=i.default.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/vivaldi/i],describe:function(t){var n={name:"Vivaldi"},e=i.default.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/seamonkey/i],describe:function(t){var n={name:"SeaMonkey"},e=i.default.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/sailfish/i],describe:function(t){var n={name:"Sailfish"},e=i.default.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i,t);return e&&(n.version=e),n}},{test:[/silk/i],describe:function(t){var n={name:"Amazon Silk"},e=i.default.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/phantom/i],describe:function(t){var n={name:"PhantomJS"},e=i.default.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/slimerjs/i],describe:function(t){var n={name:"SlimerJS"},e=i.default.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(t){var n={name:"BlackBerry"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/(web|hpw)[o0]s/i],describe:function(t){var n={name:"WebOS Browser"},e=i.default.getFirstMatch(o,t)||i.default.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/bada/i],describe:function(t){var n={name:"Bada"},e=i.default.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/tizen/i],describe:function(t){var n={name:"Tizen"},e=i.default.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/qupzilla/i],describe:function(t){var n={name:"QupZilla"},e=i.default.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/firefox|iceweasel|fxios/i],describe:function(t){var n={name:"Firefox"},e=i.default.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/electron/i],describe:function(t){var n={name:"Electron"},e=i.default.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/MiuiBrowser/i],describe:function(t){var n={name:"Miui"},e=i.default.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/chromium/i],describe:function(t){var n={name:"Chromium"},e=i.default.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i,t)||i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/chrome|crios|crmo/i],describe:function(t){var n={name:"Chrome"},e=i.default.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/GSA/i],describe:function(t){var n={name:"Google Search"},e=i.default.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){var n=!t.test(/like android/i),e=t.test(/android/i);return n&&e},describe:function(t){var n={name:"Android Browser"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/playstation 4/i],describe:function(t){var n={name:"PlayStation 4"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/safari|applewebkit/i],describe:function(t){var n={name:"Safari"},e=i.default.getFirstMatch(o,t);return e&&(n.version=e),n}},{test:[/.*/i],describe:function(t){var n=-1!==t.search("\\(")?/^(.*)\/(.*)[ \t]\((.*)/:/^(.*)\/(.*) /;return{name:i.default.getFirstMatch(n,t),version:i.default.getSecondMatch(n,t)}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:[/Roku\/DVP/],describe:function(t){var n=i.default.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i,t);return{name:o.OS_MAP.Roku,version:n}}},{test:[/windows phone/i],describe:function(t){var n=i.default.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.WindowsPhone,version:n}}},{test:[/windows /i],describe:function(t){var n=i.default.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i,t),e=i.default.getWindowsVersionName(n);return{name:o.OS_MAP.Windows,version:n,versionName:e}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(t){var n={name:o.OS_MAP.iOS},e=i.default.getSecondMatch(/(Version\/)(\d[\d.]+)/,t);return e&&(n.version=e),n}},{test:[/macintosh/i],describe:function(t){var n=i.default.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i,t).replace(/[_\s]/g,"."),e=i.default.getMacOSVersionName(n),r={name:o.OS_MAP.MacOS,version:n};return e&&(r.versionName=e),r}},{test:[/(ipod|iphone|ipad)/i],describe:function(t){var n=i.default.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i,t).replace(/[_\s]/g,".");return{name:o.OS_MAP.iOS,version:n}}},{test:function(t){var n=!t.test(/like android/i),e=t.test(/android/i);return n&&e},describe:function(t){var n=i.default.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i,t),e=i.default.getAndroidVersionName(n),r={name:o.OS_MAP.Android,version:n};return e&&(r.versionName=e),r}},{test:[/(web|hpw)[o0]s/i],describe:function(t){var n=i.default.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i,t),e={name:o.OS_MAP.WebOS};return n&&n.length&&(e.version=n),e}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(t){var n=i.default.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i,t)||i.default.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i,t)||i.default.getFirstMatch(/\bbb(\d+)/i,t);return{name:o.OS_MAP.BlackBerry,version:n}}},{test:[/bada/i],describe:function(t){var n=i.default.getFirstMatch(/bada\/(\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.Bada,version:n}}},{test:[/tizen/i],describe:function(t){var n=i.default.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.Tizen,version:n}}},{test:[/linux/i],describe:function(){return{name:o.OS_MAP.Linux}}},{test:[/CrOS/],describe:function(){return{name:o.OS_MAP.ChromeOS}}},{test:[/PlayStation 4/],describe:function(t){var n=i.default.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i,t);return{name:o.OS_MAP.PlayStation4,version:n}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:[/googlebot/i],describe:function(){return{type:"bot",vendor:"Google"}}},{test:[/huawei/i],describe:function(t){var n=i.default.getFirstMatch(/(can-l01)/i,t)&&"Nova",e={type:o.PLATFORMS_MAP.mobile,vendor:"Huawei"};return n&&(e.model=n),e}},{test:[/nexus\s*(?:7|8|9|10).*/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Nexus"}}},{test:[/ipad/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/kftt build/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Amazon",model:"Kindle Fire HD 7"}}},{test:[/silk/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet,vendor:"Amazon"}}},{test:[/tablet(?! pc)/i],describe:function(){return{type:o.PLATFORMS_MAP.tablet}}},{test:function(t){var n=t.test(/ipod|iphone/i),e=t.test(/like (ipod|iphone)/i);return n&&!e},describe:function(t){var n=i.default.getFirstMatch(/(ipod|iphone)/i,t);return{type:o.PLATFORMS_MAP.mobile,vendor:"Apple",model:n}}},{test:[/nexus\s*[0-6].*/i,/galaxy nexus/i],describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"Nexus"}}},{test:[/[^-]mobi/i],describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"blackberry"===t.getBrowserName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"BlackBerry"}}},{test:function(t){return"bada"===t.getBrowserName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"windows phone"===t.getBrowserName()},describe:function(){return{type:o.PLATFORMS_MAP.mobile,vendor:"Microsoft"}}},{test:function(t){var n=Number(String(t.getOSVersion()).split(".")[0]);return"android"===t.getOSName(!0)&&n>=3},describe:function(){return{type:o.PLATFORMS_MAP.tablet}}},{test:function(t){return"android"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.mobile}}},{test:function(t){return"macos"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop,vendor:"Apple"}}},{test:function(t){return"windows"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop}}},{test:function(t){return"linux"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.desktop}}},{test:function(t){return"playstation 4"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.tv}}},{test:function(t){return"roku"===t.getOSName(!0)},describe:function(){return{type:o.PLATFORMS_MAP.tv}}}];n.default=u,t.exports=n.default},function(t,n,e){"use strict";n.__esModule=!0,n.default=void 0;var r,i=(r=e(17))&&r.__esModule?r:{default:r},o=e(18);var u=[{test:function(t){return"microsoft edge"===t.getBrowserName(!0)},describe:function(t){if(/\sedg\//i.test(t))return{name:o.ENGINE_MAP.Blink};var n=i.default.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i,t);return{name:o.ENGINE_MAP.EdgeHTML,version:n}}},{test:[/trident/i],describe:function(t){var n={name:o.ENGINE_MAP.Trident},e=i.default.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){return t.test(/presto/i)},describe:function(t){var n={name:o.ENGINE_MAP.Presto},e=i.default.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:function(t){var n=t.test(/gecko/i),e=t.test(/like gecko/i);return n&&!e},describe:function(t){var n={name:o.ENGINE_MAP.Gecko},e=i.default.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}},{test:[/(apple)?webkit\/537\.36/i],describe:function(){return{name:o.ENGINE_MAP.Blink}}},{test:[/(apple)?webkit/i],describe:function(t){var n={name:o.ENGINE_MAP.WebKit},e=i.default.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i,t);return e&&(n.version=e),n}}];n.default=u,t.exports=n.default},function(t,n,e){t.exports=!e(8)&&!e(2)((function(){return 7!=Object.defineProperty(e(62)("div"),"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(1),i=e(7),o=e(32),u=e(63),a=e(9).f;t.exports=function(t){var n=i.Symbol||(i.Symbol=o?{}:r.Symbol||{});"_"==t.charAt(0)||t in n||a(n,t,{value:u.f(t)})}},function(t,n,e){var r=e(13),i=e(15),o=e(51)(!1),u=e(64)("IE_PROTO");t.exports=function(t,n){var e,a=i(t),c=0,s=[];for(e in a)e!=u&&r(a,e)&&s.push(e);for(;n.length>c;)r(a,e=n[c++])&&(~o(s,e)||s.push(e));return s}},function(t,n,e){var r=e(9),i=e(3),o=e(33);t.exports=e(8)?Object.defineProperties:function(t,n){i(t);for(var e,u=o(n),a=u.length,c=0;a>c;)r.f(t,e=u[c++],n[e]);return t}},function(t,n,e){var r=e(15),i=e(36).f,o={}.toString,u="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[];t.exports.f=function(t){return u&&"[object Window]"==o.call(t)?function(t){try{return i(t)}catch(t){return u.slice()}}(t):i(r(t))}},function(t,n,e){"use strict";var r=e(8),i=e(33),o=e(52),u=e(47),a=e(10),c=e(46),s=Object.assign;t.exports=!s||e(2)((function(){var t={},n={},e=Symbol(),r="abcdefghijklmnopqrst";return t[e]=7,r.split("").forEach((function(t){n[t]=t})),7!=s({},t)[e]||Object.keys(s({},n)).join("")!=r}))?function(t,n){for(var e=a(t),s=arguments.length,f=1,l=o.f,h=u.f;s>f;)for(var d,p=c(arguments[f++]),v=l?i(p).concat(l(p)):i(p),g=v.length,y=0;g>y;)d=v[y++],r&&!h.call(p,d)||(e[d]=p[d]);return e}:s},function(t,n){t.exports=Object.is||function(t,n){return t===n?0!==t||1/t==1/n:t!=t&&n!=n}},function(t,n,e){"use strict";var r=e(20),i=e(4),o=e(104),u=[].slice,a={},c=function(t,n,e){if(!(n in a)){for(var r=[],i=0;i>>0||(u.test(e)?16:10))}:r},function(t,n,e){var r=e(1).parseFloat,i=e(41).trim;t.exports=1/r(e(68)+"-0")!=-1/0?function(t){var n=i(String(t),3),e=r(n);return 0===e&&"-"==n.charAt(0)?-0:e}:r},function(t,n,e){var r=e(25);t.exports=function(t,n){if("number"!=typeof t&&"Number"!=r(t))throw TypeError(n);return+t}},function(t,n,e){var r=e(4),i=Math.floor;t.exports=function(t){return!r(t)&&isFinite(t)&&i(t)===t}},function(t,n){t.exports=Math.log1p||function(t){return(t=+t)>-1e-8&&t<1e-8?t-t*t/2:Math.log(1+t)}},function(t,n,e){"use strict";var r=e(35),i=e(30),o=e(40),u={};e(14)(u,e(5)("iterator"),(function(){return this})),t.exports=function(t,n,e){t.prototype=r(u,{next:i(1,e)}),o(t,n+" Iterator")}},function(t,n,e){var r=e(3);t.exports=function(t,n,e,i){try{return i?n(r(e)[0],e[1]):n(e)}catch(n){var o=t.return;throw void 0!==o&&r(o.call(t)),n}}},function(t,n,e){var r=e(224);t.exports=function(t,n){return new(r(t))(n)}},function(t,n,e){var r=e(20),i=e(10),o=e(46),u=e(6);t.exports=function(t,n,e,a,c){r(n);var s=i(t),f=o(s),l=u(s.length),h=c?l-1:0,d=c?-1:1;if(e<2)for(;;){if(h in f){a=f[h],h+=d;break}if(h+=d,c?h<0:l<=h)throw TypeError("Reduce of empty array with no initial value")}for(;c?h>=0:l>h;h+=d)h in f&&(a=n(a,f[h],h,s));return a}},function(t,n,e){"use strict";var r=e(10),i=e(34),o=e(6);t.exports=[].copyWithin||function(t,n){var e=r(this),u=o(e.length),a=i(t,u),c=i(n,u),s=arguments.length>2?arguments[2]:void 0,f=Math.min((void 0===s?u:i(s,u))-c,u-a),l=1;for(c0;)c in e?e[a]=e[c]:delete e[a],a+=l,c+=l;return e}},function(t,n){t.exports=function(t,n){return{value:n,done:!!t}}},function(t,n,e){"use strict";var r=e(83);e(0)({target:"RegExp",proto:!0,forced:r!==/./.exec},{exec:r})},function(t,n,e){e(8)&&"g"!=/./g.flags&&e(9).f(RegExp.prototype,"flags",{configurable:!0,get:e(55)})},function(t,n,e){"use strict";var r,i,o,u,a=e(32),c=e(1),s=e(19),f=e(48),l=e(0),h=e(4),d=e(20),p=e(44),v=e(58),g=e(49),y=e(85).set,m=e(244)(),b=e(119),S=e(245),w=e(59),_=e(120),M=c.TypeError,x=c.process,P=x&&x.versions,O=P&&P.v8||"",F=c.Promise,A="process"==f(x),E=function(){},N=i=b.f,R=!!function(){try{var t=F.resolve(1),n=(t.constructor={})[e(5)("species")]=function(t){t(E,E)};return(A||"function"==typeof PromiseRejectionEvent)&&t.then(E)instanceof n&&0!==O.indexOf("6.6")&&-1===w.indexOf("Chrome/66")}catch(t){}}(),k=function(t){var n;return!(!h(t)||"function"!=typeof(n=t.then))&&n},T=function(t,n){if(!t._n){t._n=!0;var e=t._c;m((function(){for(var r=t._v,i=1==t._s,o=0,u=function(n){var e,o,u,a=i?n.ok:n.fail,c=n.resolve,s=n.reject,f=n.domain;try{a?(i||(2==t._h&&L(t),t._h=1),!0===a?e=r:(f&&f.enter(),e=a(r),f&&(f.exit(),u=!0)),e===n.promise?s(M("Promise-chain cycle")):(o=k(e))?o.call(e,c,s):c(e)):s(r)}catch(t){f&&!u&&f.exit(),s(t)}};e.length>o;)u(e[o++]);t._c=[],t._n=!1,n&&!t._h&&I(t)}))}},I=function(t){y.call(c,(function(){var n,e,r,i=t._v,o=j(t);if(o&&(n=S((function(){A?x.emit("unhandledRejection",i,t):(e=c.onunhandledrejection)?e({promise:t,reason:i}):(r=c.console)&&r.error&&r.error("Unhandled promise rejection",i)})),t._h=A||j(t)?2:1),t._a=void 0,o&&n.e)throw n.v}))},j=function(t){return 1!==t._h&&0===(t._a||t._c).length},L=function(t){y.call(c,(function(){var n;A?x.emit("rejectionHandled",t):(n=c.onrejectionhandled)&&n({promise:t,reason:t._v})}))},B=function(t){var n=this;n._d||(n._d=!0,(n=n._w||n)._v=t,n._s=2,n._a||(n._a=n._c.slice()),T(n,!0))},C=function(t){var n,e=this;if(!e._d){e._d=!0,e=e._w||e;try{if(e===t)throw M("Promise can't be resolved itself");(n=k(t))?m((function(){var r={_w:e,_d:!1};try{n.call(t,s(C,r,1),s(B,r,1))}catch(t){B.call(r,t)}})):(e._v=t,e._s=1,T(e,!1))}catch(t){B.call({_w:e,_d:!1},t)}}};R||(F=function(t){p(this,F,"Promise","_h"),d(t),r.call(this);try{t(s(C,this,1),s(B,this,1))}catch(t){B.call(this,t)}},(r=function(t){this._c=[],this._a=void 0,this._s=0,this._d=!1,this._v=void 0,this._h=0,this._n=!1}).prototype=e(45)(F.prototype,{then:function(t,n){var e=N(g(this,F));return e.ok="function"!=typeof t||t,e.fail="function"==typeof n&&n,e.domain=A?x.domain:void 0,this._c.push(e),this._a&&this._a.push(e),this._s&&T(this,!1),e.promise},catch:function(t){return this.then(void 0,t)}}),o=function(){var t=new r;this.promise=t,this.resolve=s(C,t,1),this.reject=s(B,t,1)},b.f=N=function(t){return t===F||t===u?new o(t):i(t)}),l(l.G+l.W+l.F*!R,{Promise:F}),e(40)(F,"Promise"),e(43)("Promise"),u=e(7).Promise,l(l.S+l.F*!R,"Promise",{reject:function(t){var n=N(this);return(0,n.reject)(t),n.promise}}),l(l.S+l.F*(a||!R),"Promise",{resolve:function(t){return _(a&&this===u?F:this,t)}}),l(l.S+l.F*!(R&&e(54)((function(t){F.all(t).catch(E)}))),"Promise",{all:function(t){var n=this,e=N(n),r=e.resolve,i=e.reject,o=S((function(){var e=[],o=0,u=1;v(t,!1,(function(t){var a=o++,c=!1;e.push(void 0),u++,n.resolve(t).then((function(t){c||(c=!0,e[a]=t,--u||r(e))}),i)})),--u||r(e)}));return o.e&&i(o.v),e.promise},race:function(t){var n=this,e=N(n),r=e.reject,i=S((function(){v(t,!1,(function(t){n.resolve(t).then(e.resolve,r)}))}));return i.e&&r(i.v),e.promise}})},function(t,n,e){"use strict";var r=e(20);function i(t){var n,e;this.promise=new t((function(t,r){if(void 0!==n||void 0!==e)throw TypeError("Bad Promise constructor");n=t,e=r})),this.resolve=r(n),this.reject=r(e)}t.exports.f=function(t){return new i(t)}},function(t,n,e){var r=e(3),i=e(4),o=e(119);t.exports=function(t,n){if(r(t),i(n)&&n.constructor===t)return n;var e=o.f(t);return(0,e.resolve)(n),e.promise}},function(t,n,e){"use strict";var r=e(9).f,i=e(35),o=e(45),u=e(19),a=e(44),c=e(58),s=e(74),f=e(115),l=e(43),h=e(8),d=e(29).fastKey,p=e(39),v=h?"_s":"size",g=function(t,n){var e,r=d(n);if("F"!==r)return t._i[r];for(e=t._f;e;e=e.n)if(e.k==n)return e};t.exports={getConstructor:function(t,n,e,s){var f=t((function(t,r){a(t,f,n,"_i"),t._t=n,t._i=i(null),t._f=void 0,t._l=void 0,t[v]=0,null!=r&&c(r,e,t[s],t)}));return o(f.prototype,{clear:function(){for(var t=p(this,n),e=t._i,r=t._f;r;r=r.n)r.r=!0,r.p&&(r.p=r.p.n=void 0),delete e[r.i];t._f=t._l=void 0,t[v]=0},delete:function(t){var e=p(this,n),r=g(e,t);if(r){var i=r.n,o=r.p;delete e._i[r.i],r.r=!0,o&&(o.n=i),i&&(i.p=o),e._f==r&&(e._f=i),e._l==r&&(e._l=o),e[v]--}return!!r},forEach:function(t){p(this,n);for(var e,r=u(t,arguments.length>1?arguments[1]:void 0,3);e=e?e.n:this._f;)for(r(e.v,e.k,this);e&&e.r;)e=e.p},has:function(t){return!!g(p(this,n),t)}}),h&&r(f.prototype,"size",{get:function(){return p(this,n)[v]}}),f},def:function(t,n,e){var r,i,o=g(t,n);return o?o.v=e:(t._l=o={i:i=d(n,!0),k:n,v:e,p:r=t._l,n:void 0,r:!1},t._f||(t._f=o),r&&(r.n=o),t[v]++,"F"!==i&&(t._i[i]=o)),t},getEntry:g,setStrong:function(t,n,e){s(t,n,(function(t,e){this._t=p(t,n),this._k=e,this._l=void 0}),(function(){for(var t=this._k,n=this._l;n&&n.r;)n=n.p;return this._t&&(this._l=n=n?n.n:this._t._f)?f(0,"keys"==t?n.k:"values"==t?n.v:[n.k,n.v]):(this._t=void 0,f(1))}),e?"entries":"values",!e,!0),l(n)}}},function(t,n,e){"use strict";var r=e(45),i=e(29).getWeak,o=e(3),u=e(4),a=e(44),c=e(58),s=e(24),f=e(13),l=e(39),h=s(5),d=s(6),p=0,v=function(t){return t._l||(t._l=new g)},g=function(){this.a=[]},y=function(t,n){return h(t.a,(function(t){return t[0]===n}))};g.prototype={get:function(t){var n=y(this,t);if(n)return n[1]},has:function(t){return!!y(this,t)},set:function(t,n){var e=y(this,t);e?e[1]=n:this.a.push([t,n])},delete:function(t){var n=d(this.a,(function(n){return n[0]===t}));return~n&&this.a.splice(n,1),!!~n}},t.exports={getConstructor:function(t,n,e,o){var s=t((function(t,r){a(t,s,n,"_i"),t._t=n,t._i=p++,t._l=void 0,null!=r&&c(r,e,t[o],t)}));return r(s.prototype,{delete:function(t){if(!u(t))return!1;var e=i(t);return!0===e?v(l(this,n)).delete(t):e&&f(e,this._i)&&delete e[this._i]},has:function(t){if(!u(t))return!1;var e=i(t);return!0===e?v(l(this,n)).has(t):e&&f(e,this._i)}}),s},def:function(t,n,e){var r=i(o(n),!0);return!0===r?v(t).set(n,e):r[t._i]=e,t},ufstore:v}},function(t,n,e){var r=e(21),i=e(6);t.exports=function(t){if(void 0===t)return 0;var n=r(t),e=i(n);if(n!==e)throw RangeError("Wrong length!");return e}},function(t,n,e){var r=e(36),i=e(52),o=e(3),u=e(1).Reflect;t.exports=u&&u.ownKeys||function(t){var n=r.f(o(t)),e=i.f;return e?n.concat(e(t)):n}},function(t,n,e){var r=e(6),i=e(70),o=e(26);t.exports=function(t,n,e,u){var a=String(o(t)),c=a.length,s=void 0===e?" ":String(e),f=r(n);if(f<=c||""==s)return a;var l=f-c,h=i.call(s,Math.ceil(l/s.length));return h.length>l&&(h=h.slice(0,l)),u?h+a:a+h}},function(t,n,e){var r=e(8),i=e(33),o=e(15),u=e(47).f;t.exports=function(t){return function(n){for(var e,a=o(n),c=i(a),s=c.length,f=0,l=[];s>f;)e=c[f++],r&&!u.call(a,e)||l.push(t?[e,a[e]]:a[e]);return l}}},function(t,n){var e=t.exports={version:"2.6.9"};"number"==typeof __e&&(__e=e)},function(t,n){t.exports=function(t){try{return!!t()}catch(t){return!0}}},function(t,n,e){e(130),t.exports=e(90)},function(t,n,e){"use strict";e(131);var r,i=(r=e(303))&&r.__esModule?r:{default:r};i.default._babelPolyfill&&"undefined"!=typeof console&&console.warn&&console.warn("@babel/polyfill is loaded more than once on this page. This is probably not desirable/intended and may have consequences if different versions of the polyfills are applied sequentially. If you do need to load the polyfill more than once, use @babel/polyfill/noConflict instead to bypass the warning."),i.default._babelPolyfill=!0},function(t,n,e){"use strict";e(132),e(275),e(277),e(280),e(282),e(284),e(286),e(288),e(290),e(292),e(294),e(296),e(298),e(302)},function(t,n,e){e(133),e(136),e(137),e(138),e(139),e(140),e(141),e(142),e(143),e(144),e(145),e(146),e(147),e(148),e(149),e(150),e(151),e(152),e(153),e(154),e(155),e(156),e(157),e(158),e(159),e(160),e(161),e(162),e(163),e(164),e(165),e(166),e(167),e(168),e(169),e(170),e(171),e(172),e(173),e(174),e(175),e(176),e(177),e(179),e(180),e(181),e(182),e(183),e(184),e(185),e(186),e(187),e(188),e(189),e(190),e(191),e(192),e(193),e(194),e(195),e(196),e(197),e(198),e(199),e(200),e(201),e(202),e(203),e(204),e(205),e(206),e(207),e(208),e(209),e(210),e(211),e(212),e(214),e(215),e(217),e(218),e(219),e(220),e(221),e(222),e(223),e(225),e(226),e(227),e(228),e(229),e(230),e(231),e(232),e(233),e(234),e(235),e(236),e(237),e(82),e(238),e(116),e(239),e(117),e(240),e(241),e(242),e(243),e(118),e(246),e(247),e(248),e(249),e(250),e(251),e(252),e(253),e(254),e(255),e(256),e(257),e(258),e(259),e(260),e(261),e(262),e(263),e(264),e(265),e(266),e(267),e(268),e(269),e(270),e(271),e(272),e(273),e(274),t.exports=e(7)},function(t,n,e){"use strict";var r=e(1),i=e(13),o=e(8),u=e(0),a=e(11),c=e(29).KEY,s=e(2),f=e(50),l=e(40),h=e(31),d=e(5),p=e(63),v=e(97),g=e(135),y=e(53),m=e(3),b=e(4),S=e(10),w=e(15),_=e(28),M=e(30),x=e(35),P=e(100),O=e(22),F=e(52),A=e(9),E=e(33),N=O.f,R=A.f,k=P.f,T=r.Symbol,I=r.JSON,j=I&&I.stringify,L=d("_hidden"),B=d("toPrimitive"),C={}.propertyIsEnumerable,W=f("symbol-registry"),V=f("symbols"),G=f("op-symbols"),D=Object.prototype,U="function"==typeof T&&!!F.f,z=r.QObject,q=!z||!z.prototype||!z.prototype.findChild,K=o&&s((function(){return 7!=x(R({},"a",{get:function(){return R(this,"a",{value:7}).a}})).a}))?function(t,n,e){var r=N(D,n);r&&delete D[n],R(t,n,e),r&&t!==D&&R(D,n,r)}:R,Y=function(t){var n=V[t]=x(T.prototype);return n._k=t,n},Q=U&&"symbol"==typeof T.iterator?function(t){return"symbol"==typeof t}:function(t){return t instanceof T},H=function(t,n,e){return t===D&&H(G,n,e),m(t),n=_(n,!0),m(e),i(V,n)?(e.enumerable?(i(t,L)&&t[L][n]&&(t[L][n]=!1),e=x(e,{enumerable:M(0,!1)})):(i(t,L)||R(t,L,M(1,{})),t[L][n]=!0),K(t,n,e)):R(t,n,e)},J=function(t,n){m(t);for(var e,r=g(n=w(n)),i=0,o=r.length;o>i;)H(t,e=r[i++],n[e]);return t},X=function(t){var n=C.call(this,t=_(t,!0));return!(this===D&&i(V,t)&&!i(G,t))&&(!(n||!i(this,t)||!i(V,t)||i(this,L)&&this[L][t])||n)},Z=function(t,n){if(t=w(t),n=_(n,!0),t!==D||!i(V,n)||i(G,n)){var e=N(t,n);return!e||!i(V,n)||i(t,L)&&t[L][n]||(e.enumerable=!0),e}},$=function(t){for(var n,e=k(w(t)),r=[],o=0;e.length>o;)i(V,n=e[o++])||n==L||n==c||r.push(n);return r},tt=function(t){for(var n,e=t===D,r=k(e?G:w(t)),o=[],u=0;r.length>u;)!i(V,n=r[u++])||e&&!i(D,n)||o.push(V[n]);return o};U||(a((T=function(){if(this instanceof T)throw TypeError("Symbol is not a constructor!");var t=h(arguments.length>0?arguments[0]:void 0),n=function(e){this===D&&n.call(G,e),i(this,L)&&i(this[L],t)&&(this[L][t]=!1),K(this,t,M(1,e))};return o&&q&&K(D,t,{configurable:!0,set:n}),Y(t)}).prototype,"toString",(function(){return this._k})),O.f=Z,A.f=H,e(36).f=P.f=$,e(47).f=X,F.f=tt,o&&!e(32)&&a(D,"propertyIsEnumerable",X,!0),p.f=function(t){return Y(d(t))}),u(u.G+u.W+u.F*!U,{Symbol:T});for(var nt="hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables".split(","),et=0;nt.length>et;)d(nt[et++]);for(var rt=E(d.store),it=0;rt.length>it;)v(rt[it++]);u(u.S+u.F*!U,"Symbol",{for:function(t){return i(W,t+="")?W[t]:W[t]=T(t)},keyFor:function(t){if(!Q(t))throw TypeError(t+" is not a symbol!");for(var n in W)if(W[n]===t)return n},useSetter:function(){q=!0},useSimple:function(){q=!1}}),u(u.S+u.F*!U,"Object",{create:function(t,n){return void 0===n?x(t):J(x(t),n)},defineProperty:H,defineProperties:J,getOwnPropertyDescriptor:Z,getOwnPropertyNames:$,getOwnPropertySymbols:tt});var ot=s((function(){F.f(1)}));u(u.S+u.F*ot,"Object",{getOwnPropertySymbols:function(t){return F.f(S(t))}}),I&&u(u.S+u.F*(!U||s((function(){var t=T();return"[null]"!=j([t])||"{}"!=j({a:t})||"{}"!=j(Object(t))}))),"JSON",{stringify:function(t){for(var n,e,r=[t],i=1;arguments.length>i;)r.push(arguments[i++]);if(e=n=r[1],(b(n)||void 0!==t)&&!Q(t))return y(n)||(n=function(t,n){if("function"==typeof e&&(n=e.call(this,t,n)),!Q(n))return n}),r[1]=n,j.apply(I,r)}}),T.prototype[B]||e(14)(T.prototype,B,T.prototype.valueOf),l(T,"Symbol"),l(Math,"Math",!0),l(r.JSON,"JSON",!0)},function(t,n,e){t.exports=e(50)("native-function-to-string",Function.toString)},function(t,n,e){var r=e(33),i=e(52),o=e(47);t.exports=function(t){var n=r(t),e=i.f;if(e)for(var u,a=e(t),c=o.f,s=0;a.length>s;)c.call(t,u=a[s++])&&n.push(u);return n}},function(t,n,e){var r=e(0);r(r.S,"Object",{create:e(35)})},function(t,n,e){var r=e(0);r(r.S+r.F*!e(8),"Object",{defineProperty:e(9).f})},function(t,n,e){var r=e(0);r(r.S+r.F*!e(8),"Object",{defineProperties:e(99)})},function(t,n,e){var r=e(15),i=e(22).f;e(23)("getOwnPropertyDescriptor",(function(){return function(t,n){return i(r(t),n)}}))},function(t,n,e){var r=e(10),i=e(37);e(23)("getPrototypeOf",(function(){return function(t){return i(r(t))}}))},function(t,n,e){var r=e(10),i=e(33);e(23)("keys",(function(){return function(t){return i(r(t))}}))},function(t,n,e){e(23)("getOwnPropertyNames",(function(){return e(100).f}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("freeze",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("seal",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4),i=e(29).onFreeze;e(23)("preventExtensions",(function(t){return function(n){return t&&r(n)?t(i(n)):n}}))},function(t,n,e){var r=e(4);e(23)("isFrozen",(function(t){return function(n){return!r(n)||!!t&&t(n)}}))},function(t,n,e){var r=e(4);e(23)("isSealed",(function(t){return function(n){return!r(n)||!!t&&t(n)}}))},function(t,n,e){var r=e(4);e(23)("isExtensible",(function(t){return function(n){return!!r(n)&&(!t||t(n))}}))},function(t,n,e){var r=e(0);r(r.S+r.F,"Object",{assign:e(101)})},function(t,n,e){var r=e(0);r(r.S,"Object",{is:e(102)})},function(t,n,e){var r=e(0);r(r.S,"Object",{setPrototypeOf:e(67).set})},function(t,n,e){"use strict";var r=e(48),i={};i[e(5)("toStringTag")]="z",i+""!="[object z]"&&e(11)(Object.prototype,"toString",(function(){return"[object "+r(this)+"]"}),!0)},function(t,n,e){var r=e(0);r(r.P,"Function",{bind:e(103)})},function(t,n,e){var r=e(9).f,i=Function.prototype,o=/^\s*function ([^ (]*)/;"name"in i||e(8)&&r(i,"name",{configurable:!0,get:function(){try{return(""+this).match(o)[1]}catch(t){return""}}})},function(t,n,e){"use strict";var r=e(4),i=e(37),o=e(5)("hasInstance"),u=Function.prototype;o in u||e(9).f(u,o,{value:function(t){if("function"!=typeof this||!r(t))return!1;if(!r(this.prototype))return t instanceof this;for(;t=i(t);)if(this.prototype===t)return!0;return!1}})},function(t,n,e){var r=e(0),i=e(105);r(r.G+r.F*(parseInt!=i),{parseInt:i})},function(t,n,e){var r=e(0),i=e(106);r(r.G+r.F*(parseFloat!=i),{parseFloat:i})},function(t,n,e){"use strict";var r=e(1),i=e(13),o=e(25),u=e(69),a=e(28),c=e(2),s=e(36).f,f=e(22).f,l=e(9).f,h=e(41).trim,d=r.Number,p=d,v=d.prototype,g="Number"==o(e(35)(v)),y="trim"in String.prototype,m=function(t){var n=a(t,!1);if("string"==typeof n&&n.length>2){var e,r,i,o=(n=y?n.trim():h(n,3)).charCodeAt(0);if(43===o||45===o){if(88===(e=n.charCodeAt(2))||120===e)return NaN}else if(48===o){switch(n.charCodeAt(1)){case 66:case 98:r=2,i=49;break;case 79:case 111:r=8,i=55;break;default:return+n}for(var u,c=n.slice(2),s=0,f=c.length;si)return NaN;return parseInt(c,r)}}return+n};if(!d(" 0o1")||!d("0b1")||d("+0x1")){d=function(t){var n=arguments.length<1?0:t,e=this;return e instanceof d&&(g?c((function(){v.valueOf.call(e)})):"Number"!=o(e))?u(new p(m(n)),e,d):m(n)};for(var b,S=e(8)?s(p):"MAX_VALUE,MIN_VALUE,NaN,NEGATIVE_INFINITY,POSITIVE_INFINITY,EPSILON,isFinite,isInteger,isNaN,isSafeInteger,MAX_SAFE_INTEGER,MIN_SAFE_INTEGER,parseFloat,parseInt,isInteger".split(","),w=0;S.length>w;w++)i(p,b=S[w])&&!i(d,b)&&l(d,b,f(p,b));d.prototype=v,v.constructor=d,e(11)(r,"Number",d)}},function(t,n,e){"use strict";var r=e(0),i=e(21),o=e(107),u=e(70),a=1..toFixed,c=Math.floor,s=[0,0,0,0,0,0],f="Number.toFixed: incorrect invocation!",l=function(t,n){for(var e=-1,r=n;++e<6;)r+=t*s[e],s[e]=r%1e7,r=c(r/1e7)},h=function(t){for(var n=6,e=0;--n>=0;)e+=s[n],s[n]=c(e/t),e=e%t*1e7},d=function(){for(var t=6,n="";--t>=0;)if(""!==n||0===t||0!==s[t]){var e=String(s[t]);n=""===n?e:n+u.call("0",7-e.length)+e}return n},p=function(t,n,e){return 0===n?e:n%2==1?p(t,n-1,e*t):p(t*t,n/2,e)};r(r.P+r.F*(!!a&&("0.000"!==8e-5.toFixed(3)||"1"!==.9.toFixed(0)||"1.25"!==1.255.toFixed(2)||"1000000000000000128"!==(0xde0b6b3a7640080).toFixed(0))||!e(2)((function(){a.call({})}))),"Number",{toFixed:function(t){var n,e,r,a,c=o(this,f),s=i(t),v="",g="0";if(s<0||s>20)throw RangeError(f);if(c!=c)return"NaN";if(c<=-1e21||c>=1e21)return String(c);if(c<0&&(v="-",c=-c),c>1e-21)if(e=(n=function(t){for(var n=0,e=t;e>=4096;)n+=12,e/=4096;for(;e>=2;)n+=1,e/=2;return n}(c*p(2,69,1))-69)<0?c*p(2,-n,1):c/p(2,n,1),e*=4503599627370496,(n=52-n)>0){for(l(0,e),r=s;r>=7;)l(1e7,0),r-=7;for(l(p(10,r,1),0),r=n-1;r>=23;)h(1<<23),r-=23;h(1<0?v+((a=g.length)<=s?"0."+u.call("0",s-a)+g:g.slice(0,a-s)+"."+g.slice(a-s)):v+g}})},function(t,n,e){"use strict";var r=e(0),i=e(2),o=e(107),u=1..toPrecision;r(r.P+r.F*(i((function(){return"1"!==u.call(1,void 0)}))||!i((function(){u.call({})}))),"Number",{toPrecision:function(t){var n=o(this,"Number#toPrecision: incorrect invocation!");return void 0===t?u.call(n):u.call(n,t)}})},function(t,n,e){var r=e(0);r(r.S,"Number",{EPSILON:Math.pow(2,-52)})},function(t,n,e){var r=e(0),i=e(1).isFinite;r(r.S,"Number",{isFinite:function(t){return"number"==typeof t&&i(t)}})},function(t,n,e){var r=e(0);r(r.S,"Number",{isInteger:e(108)})},function(t,n,e){var r=e(0);r(r.S,"Number",{isNaN:function(t){return t!=t}})},function(t,n,e){var r=e(0),i=e(108),o=Math.abs;r(r.S,"Number",{isSafeInteger:function(t){return i(t)&&o(t)<=9007199254740991}})},function(t,n,e){var r=e(0);r(r.S,"Number",{MAX_SAFE_INTEGER:9007199254740991})},function(t,n,e){var r=e(0);r(r.S,"Number",{MIN_SAFE_INTEGER:-9007199254740991})},function(t,n,e){var r=e(0),i=e(106);r(r.S+r.F*(Number.parseFloat!=i),"Number",{parseFloat:i})},function(t,n,e){var r=e(0),i=e(105);r(r.S+r.F*(Number.parseInt!=i),"Number",{parseInt:i})},function(t,n,e){var r=e(0),i=e(109),o=Math.sqrt,u=Math.acosh;r(r.S+r.F*!(u&&710==Math.floor(u(Number.MAX_VALUE))&&u(1/0)==1/0),"Math",{acosh:function(t){return(t=+t)<1?NaN:t>94906265.62425156?Math.log(t)+Math.LN2:i(t-1+o(t-1)*o(t+1))}})},function(t,n,e){var r=e(0),i=Math.asinh;r(r.S+r.F*!(i&&1/i(0)>0),"Math",{asinh:function t(n){return isFinite(n=+n)&&0!=n?n<0?-t(-n):Math.log(n+Math.sqrt(n*n+1)):n}})},function(t,n,e){var r=e(0),i=Math.atanh;r(r.S+r.F*!(i&&1/i(-0)<0),"Math",{atanh:function(t){return 0==(t=+t)?t:Math.log((1+t)/(1-t))/2}})},function(t,n,e){var r=e(0),i=e(71);r(r.S,"Math",{cbrt:function(t){return i(t=+t)*Math.pow(Math.abs(t),1/3)}})},function(t,n,e){var r=e(0);r(r.S,"Math",{clz32:function(t){return(t>>>=0)?31-Math.floor(Math.log(t+.5)*Math.LOG2E):32}})},function(t,n,e){var r=e(0),i=Math.exp;r(r.S,"Math",{cosh:function(t){return(i(t=+t)+i(-t))/2}})},function(t,n,e){var r=e(0),i=e(72);r(r.S+r.F*(i!=Math.expm1),"Math",{expm1:i})},function(t,n,e){var r=e(0);r(r.S,"Math",{fround:e(178)})},function(t,n,e){var r=e(71),i=Math.pow,o=i(2,-52),u=i(2,-23),a=i(2,127)*(2-u),c=i(2,-126);t.exports=Math.fround||function(t){var n,e,i=Math.abs(t),s=r(t);return ia||e!=e?s*(1/0):s*e}},function(t,n,e){var r=e(0),i=Math.abs;r(r.S,"Math",{hypot:function(t,n){for(var e,r,o=0,u=0,a=arguments.length,c=0;u0?(r=e/c)*r:e;return c===1/0?1/0:c*Math.sqrt(o)}})},function(t,n,e){var r=e(0),i=Math.imul;r(r.S+r.F*e(2)((function(){return-5!=i(4294967295,5)||2!=i.length})),"Math",{imul:function(t,n){var e=+t,r=+n,i=65535&e,o=65535&r;return 0|i*o+((65535&e>>>16)*o+i*(65535&r>>>16)<<16>>>0)}})},function(t,n,e){var r=e(0);r(r.S,"Math",{log10:function(t){return Math.log(t)*Math.LOG10E}})},function(t,n,e){var r=e(0);r(r.S,"Math",{log1p:e(109)})},function(t,n,e){var r=e(0);r(r.S,"Math",{log2:function(t){return Math.log(t)/Math.LN2}})},function(t,n,e){var r=e(0);r(r.S,"Math",{sign:e(71)})},function(t,n,e){var r=e(0),i=e(72),o=Math.exp;r(r.S+r.F*e(2)((function(){return-2e-17!=!Math.sinh(-2e-17)})),"Math",{sinh:function(t){return Math.abs(t=+t)<1?(i(t)-i(-t))/2:(o(t-1)-o(-t-1))*(Math.E/2)}})},function(t,n,e){var r=e(0),i=e(72),o=Math.exp;r(r.S,"Math",{tanh:function(t){var n=i(t=+t),e=i(-t);return n==1/0?1:e==1/0?-1:(n-e)/(o(t)+o(-t))}})},function(t,n,e){var r=e(0);r(r.S,"Math",{trunc:function(t){return(t>0?Math.floor:Math.ceil)(t)}})},function(t,n,e){var r=e(0),i=e(34),o=String.fromCharCode,u=String.fromCodePoint;r(r.S+r.F*(!!u&&1!=u.length),"String",{fromCodePoint:function(t){for(var n,e=[],r=arguments.length,u=0;r>u;){if(n=+arguments[u++],i(n,1114111)!==n)throw RangeError(n+" is not a valid code point");e.push(n<65536?o(n):o(55296+((n-=65536)>>10),n%1024+56320))}return e.join("")}})},function(t,n,e){var r=e(0),i=e(15),o=e(6);r(r.S,"String",{raw:function(t){for(var n=i(t.raw),e=o(n.length),r=arguments.length,u=[],a=0;e>a;)u.push(String(n[a++])),a=n.length?{value:void 0,done:!0}:(t=r(n,e),this._i+=t.length,{value:t,done:!1})}))},function(t,n,e){"use strict";var r=e(0),i=e(73)(!1);r(r.P,"String",{codePointAt:function(t){return i(this,t)}})},function(t,n,e){"use strict";var r=e(0),i=e(6),o=e(75),u="".endsWith;r(r.P+r.F*e(77)("endsWith"),"String",{endsWith:function(t){var n=o(this,t,"endsWith"),e=arguments.length>1?arguments[1]:void 0,r=i(n.length),a=void 0===e?r:Math.min(i(e),r),c=String(t);return u?u.call(n,c,a):n.slice(a-c.length,a)===c}})},function(t,n,e){"use strict";var r=e(0),i=e(75);r(r.P+r.F*e(77)("includes"),"String",{includes:function(t){return!!~i(this,t,"includes").indexOf(t,arguments.length>1?arguments[1]:void 0)}})},function(t,n,e){var r=e(0);r(r.P,"String",{repeat:e(70)})},function(t,n,e){"use strict";var r=e(0),i=e(6),o=e(75),u="".startsWith;r(r.P+r.F*e(77)("startsWith"),"String",{startsWith:function(t){var n=o(this,t,"startsWith"),e=i(Math.min(arguments.length>1?arguments[1]:void 0,n.length)),r=String(t);return u?u.call(n,r,e):n.slice(e,e+r.length)===r}})},function(t,n,e){"use strict";e(12)("anchor",(function(t){return function(n){return t(this,"a","name",n)}}))},function(t,n,e){"use strict";e(12)("big",(function(t){return function(){return t(this,"big","","")}}))},function(t,n,e){"use strict";e(12)("blink",(function(t){return function(){return t(this,"blink","","")}}))},function(t,n,e){"use strict";e(12)("bold",(function(t){return function(){return t(this,"b","","")}}))},function(t,n,e){"use strict";e(12)("fixed",(function(t){return function(){return t(this,"tt","","")}}))},function(t,n,e){"use strict";e(12)("fontcolor",(function(t){return function(n){return t(this,"font","color",n)}}))},function(t,n,e){"use strict";e(12)("fontsize",(function(t){return function(n){return t(this,"font","size",n)}}))},function(t,n,e){"use strict";e(12)("italics",(function(t){return function(){return t(this,"i","","")}}))},function(t,n,e){"use strict";e(12)("link",(function(t){return function(n){return t(this,"a","href",n)}}))},function(t,n,e){"use strict";e(12)("small",(function(t){return function(){return t(this,"small","","")}}))},function(t,n,e){"use strict";e(12)("strike",(function(t){return function(){return t(this,"strike","","")}}))},function(t,n,e){"use strict";e(12)("sub",(function(t){return function(){return t(this,"sub","","")}}))},function(t,n,e){"use strict";e(12)("sup",(function(t){return function(){return t(this,"sup","","")}}))},function(t,n,e){var r=e(0);r(r.S,"Date",{now:function(){return(new Date).getTime()}})},function(t,n,e){"use strict";var r=e(0),i=e(10),o=e(28);r(r.P+r.F*e(2)((function(){return null!==new Date(NaN).toJSON()||1!==Date.prototype.toJSON.call({toISOString:function(){return 1}})})),"Date",{toJSON:function(t){var n=i(this),e=o(n);return"number"!=typeof e||isFinite(e)?n.toISOString():null}})},function(t,n,e){var r=e(0),i=e(213);r(r.P+r.F*(Date.prototype.toISOString!==i),"Date",{toISOString:i})},function(t,n,e){"use strict";var r=e(2),i=Date.prototype.getTime,o=Date.prototype.toISOString,u=function(t){return t>9?t:"0"+t};t.exports=r((function(){return"0385-07-25T07:06:39.999Z"!=o.call(new Date(-5e13-1))}))||!r((function(){o.call(new Date(NaN))}))?function(){if(!isFinite(i.call(this)))throw RangeError("Invalid time value");var t=this,n=t.getUTCFullYear(),e=t.getUTCMilliseconds(),r=n<0?"-":n>9999?"+":"";return r+("00000"+Math.abs(n)).slice(r?-6:-4)+"-"+u(t.getUTCMonth()+1)+"-"+u(t.getUTCDate())+"T"+u(t.getUTCHours())+":"+u(t.getUTCMinutes())+":"+u(t.getUTCSeconds())+"."+(e>99?e:"0"+u(e))+"Z"}:o},function(t,n,e){var r=Date.prototype,i=r.toString,o=r.getTime;new Date(NaN)+""!="Invalid Date"&&e(11)(r,"toString",(function(){var t=o.call(this);return t==t?i.call(this):"Invalid Date"}))},function(t,n,e){var r=e(5)("toPrimitive"),i=Date.prototype;r in i||e(14)(i,r,e(216))},function(t,n,e){"use strict";var r=e(3),i=e(28);t.exports=function(t){if("string"!==t&&"number"!==t&&"default"!==t)throw TypeError("Incorrect hint");return i(r(this),"number"!=t)}},function(t,n,e){var r=e(0);r(r.S,"Array",{isArray:e(53)})},function(t,n,e){"use strict";var r=e(19),i=e(0),o=e(10),u=e(111),a=e(78),c=e(6),s=e(79),f=e(80);i(i.S+i.F*!e(54)((function(t){Array.from(t)})),"Array",{from:function(t){var n,e,i,l,h=o(t),d="function"==typeof this?this:Array,p=arguments.length,v=p>1?arguments[1]:void 0,g=void 0!==v,y=0,m=f(h);if(g&&(v=r(v,p>2?arguments[2]:void 0,2)),null==m||d==Array&&a(m))for(e=new d(n=c(h.length));n>y;y++)s(e,y,g?v(h[y],y):h[y]);else for(l=m.call(h),e=new d;!(i=l.next()).done;y++)s(e,y,g?u(l,v,[i.value,y],!0):i.value);return e.length=y,e}})},function(t,n,e){"use strict";var r=e(0),i=e(79);r(r.S+r.F*e(2)((function(){function t(){}return!(Array.of.call(t)instanceof t)})),"Array",{of:function(){for(var t=0,n=arguments.length,e=new("function"==typeof this?this:Array)(n);n>t;)i(e,t,arguments[t++]);return e.length=n,e}})},function(t,n,e){"use strict";var r=e(0),i=e(15),o=[].join;r(r.P+r.F*(e(46)!=Object||!e(16)(o)),"Array",{join:function(t){return o.call(i(this),void 0===t?",":t)}})},function(t,n,e){"use strict";var r=e(0),i=e(66),o=e(25),u=e(34),a=e(6),c=[].slice;r(r.P+r.F*e(2)((function(){i&&c.call(i)})),"Array",{slice:function(t,n){var e=a(this.length),r=o(this);if(n=void 0===n?e:n,"Array"==r)return c.call(this,t,n);for(var i=u(t,e),s=u(n,e),f=a(s-i),l=new Array(f),h=0;h1&&(r=Math.min(r,o(arguments[1]))),r<0&&(r=e+r);r>=0;r--)if(r in n&&n[r]===t)return r||0;return-1}})},function(t,n,e){var r=e(0);r(r.P,"Array",{copyWithin:e(114)}),e(38)("copyWithin")},function(t,n,e){var r=e(0);r(r.P,"Array",{fill:e(81)}),e(38)("fill")},function(t,n,e){"use strict";var r=e(0),i=e(24)(5),o=!0;"find"in[]&&Array(1).find((function(){o=!1})),r(r.P+r.F*o,"Array",{find:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)("find")},function(t,n,e){"use strict";var r=e(0),i=e(24)(6),o="findIndex",u=!0;o in[]&&Array(1)[o]((function(){u=!1})),r(r.P+r.F*u,"Array",{findIndex:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)(o)},function(t,n,e){e(43)("Array")},function(t,n,e){var r=e(1),i=e(69),o=e(9).f,u=e(36).f,a=e(76),c=e(55),s=r.RegExp,f=s,l=s.prototype,h=/a/g,d=/a/g,p=new s(h)!==h;if(e(8)&&(!p||e(2)((function(){return d[e(5)("match")]=!1,s(h)!=h||s(d)==d||"/a/i"!=s(h,"i")})))){s=function(t,n){var e=this instanceof s,r=a(t),o=void 0===n;return!e&&r&&t.constructor===s&&o?t:i(p?new f(r&&!o?t.source:t,n):f((r=t instanceof s)?t.source:t,r&&o?c.call(t):n),e?this:l,s)};for(var v=function(t){t in s||o(s,t,{configurable:!0,get:function(){return f[t]},set:function(n){f[t]=n}})},g=u(f),y=0;g.length>y;)v(g[y++]);l.constructor=s,s.prototype=l,e(11)(r,"RegExp",s)}e(43)("RegExp")},function(t,n,e){"use strict";e(117);var r=e(3),i=e(55),o=e(8),u=/./.toString,a=function(t){e(11)(RegExp.prototype,"toString",t,!0)};e(2)((function(){return"/a/b"!=u.call({source:"a",flags:"b"})}))?a((function(){var t=r(this);return"/".concat(t.source,"/","flags"in t?t.flags:!o&&t instanceof RegExp?i.call(t):void 0)})):"toString"!=u.name&&a((function(){return u.call(this)}))},function(t,n,e){"use strict";var r=e(3),i=e(6),o=e(84),u=e(56);e(57)("match",1,(function(t,n,e,a){return[function(e){var r=t(this),i=null==e?void 0:e[n];return void 0!==i?i.call(e,r):new RegExp(e)[n](String(r))},function(t){var n=a(e,t,this);if(n.done)return n.value;var c=r(t),s=String(this);if(!c.global)return u(c,s);var f=c.unicode;c.lastIndex=0;for(var l,h=[],d=0;null!==(l=u(c,s));){var p=String(l[0]);h[d]=p,""===p&&(c.lastIndex=o(s,i(c.lastIndex),f)),d++}return 0===d?null:h}]}))},function(t,n,e){"use strict";var r=e(3),i=e(10),o=e(6),u=e(21),a=e(84),c=e(56),s=Math.max,f=Math.min,l=Math.floor,h=/\$([$&`']|\d\d?|<[^>]*>)/g,d=/\$([$&`']|\d\d?)/g;e(57)("replace",2,(function(t,n,e,p){return[function(r,i){var o=t(this),u=null==r?void 0:r[n];return void 0!==u?u.call(r,o,i):e.call(String(o),r,i)},function(t,n){var i=p(e,t,this,n);if(i.done)return i.value;var l=r(t),h=String(this),d="function"==typeof n;d||(n=String(n));var g=l.global;if(g){var y=l.unicode;l.lastIndex=0}for(var m=[];;){var b=c(l,h);if(null===b)break;if(m.push(b),!g)break;""===String(b[0])&&(l.lastIndex=a(h,o(l.lastIndex),y))}for(var S,w="",_=0,M=0;M=_&&(w+=h.slice(_,P)+N,_=P+x.length)}return w+h.slice(_)}];function v(t,n,r,o,u,a){var c=r+t.length,s=o.length,f=d;return void 0!==u&&(u=i(u),f=h),e.call(a,f,(function(e,i){var a;switch(i.charAt(0)){case"$":return"$";case"&":return t;case"`":return n.slice(0,r);case"'":return n.slice(c);case"<":a=u[i.slice(1,-1)];break;default:var f=+i;if(0===f)return e;if(f>s){var h=l(f/10);return 0===h?e:h<=s?void 0===o[h-1]?i.charAt(1):o[h-1]+i.charAt(1):e}a=o[f-1]}return void 0===a?"":a}))}}))},function(t,n,e){"use strict";var r=e(3),i=e(102),o=e(56);e(57)("search",1,(function(t,n,e,u){return[function(e){var r=t(this),i=null==e?void 0:e[n];return void 0!==i?i.call(e,r):new RegExp(e)[n](String(r))},function(t){var n=u(e,t,this);if(n.done)return n.value;var a=r(t),c=String(this),s=a.lastIndex;i(s,0)||(a.lastIndex=0);var f=o(a,c);return i(a.lastIndex,s)||(a.lastIndex=s),null===f?-1:f.index}]}))},function(t,n,e){"use strict";var r=e(76),i=e(3),o=e(49),u=e(84),a=e(6),c=e(56),s=e(83),f=e(2),l=Math.min,h=[].push,d=!f((function(){RegExp(4294967295,"y")}));e(57)("split",2,(function(t,n,e,f){var p;return p="c"=="abbc".split(/(b)*/)[1]||4!="test".split(/(?:)/,-1).length||2!="ab".split(/(?:ab)*/).length||4!=".".split(/(.?)(.?)/).length||".".split(/()()/).length>1||"".split(/.?/).length?function(t,n){var i=String(this);if(void 0===t&&0===n)return[];if(!r(t))return e.call(i,t,n);for(var o,u,a,c=[],f=(t.ignoreCase?"i":"")+(t.multiline?"m":"")+(t.unicode?"u":"")+(t.sticky?"y":""),l=0,d=void 0===n?4294967295:n>>>0,p=new RegExp(t.source,f+"g");(o=s.call(p,i))&&!((u=p.lastIndex)>l&&(c.push(i.slice(l,o.index)),o.length>1&&o.index=d));)p.lastIndex===o.index&&p.lastIndex++;return l===i.length?!a&&p.test("")||c.push(""):c.push(i.slice(l)),c.length>d?c.slice(0,d):c}:"0".split(void 0,0).length?function(t,n){return void 0===t&&0===n?[]:e.call(this,t,n)}:e,[function(e,r){var i=t(this),o=null==e?void 0:e[n];return void 0!==o?o.call(e,i,r):p.call(String(i),e,r)},function(t,n){var r=f(p,t,this,n,p!==e);if(r.done)return r.value;var s=i(t),h=String(this),v=o(s,RegExp),g=s.unicode,y=(s.ignoreCase?"i":"")+(s.multiline?"m":"")+(s.unicode?"u":"")+(d?"y":"g"),m=new v(d?s:"^(?:"+s.source+")",y),b=void 0===n?4294967295:n>>>0;if(0===b)return[];if(0===h.length)return null===c(m,h)?[h]:[];for(var S=0,w=0,_=[];w0?arguments[0]:void 0)}}),{get:function(t){var n=r.getEntry(i(this,"Map"),t);return n&&n.v},set:function(t,n){return r.def(i(this,"Map"),0===t?0:t,n)}},r,!0)},function(t,n,e){"use strict";var r=e(121),i=e(39);t.exports=e(60)("Set",(function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}}),{add:function(t){return r.def(i(this,"Set"),t=0===t?0:t,t)}},r)},function(t,n,e){"use strict";var r,i=e(1),o=e(24)(0),u=e(11),a=e(29),c=e(101),s=e(122),f=e(4),l=e(39),h=e(39),d=!i.ActiveXObject&&"ActiveXObject"in i,p=a.getWeak,v=Object.isExtensible,g=s.ufstore,y=function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}},m={get:function(t){if(f(t)){var n=p(t);return!0===n?g(l(this,"WeakMap")).get(t):n?n[this._i]:void 0}},set:function(t,n){return s.def(l(this,"WeakMap"),t,n)}},b=t.exports=e(60)("WeakMap",y,m,s,!0,!0);h&&d&&(c((r=s.getConstructor(y,"WeakMap")).prototype,m),a.NEED=!0,o(["delete","has","get","set"],(function(t){var n=b.prototype,e=n[t];u(n,t,(function(n,i){if(f(n)&&!v(n)){this._f||(this._f=new r);var o=this._f[t](n,i);return"set"==t?this:o}return e.call(this,n,i)}))})))},function(t,n,e){"use strict";var r=e(122),i=e(39);e(60)("WeakSet",(function(t){return function(){return t(this,arguments.length>0?arguments[0]:void 0)}}),{add:function(t){return r.def(i(this,"WeakSet"),t,!0)}},r,!1,!0)},function(t,n,e){"use strict";var r=e(0),i=e(61),o=e(86),u=e(3),a=e(34),c=e(6),s=e(4),f=e(1).ArrayBuffer,l=e(49),h=o.ArrayBuffer,d=o.DataView,p=i.ABV&&f.isView,v=h.prototype.slice,g=i.VIEW;r(r.G+r.W+r.F*(f!==h),{ArrayBuffer:h}),r(r.S+r.F*!i.CONSTR,"ArrayBuffer",{isView:function(t){return p&&p(t)||s(t)&&g in t}}),r(r.P+r.U+r.F*e(2)((function(){return!new h(2).slice(1,void 0).byteLength})),"ArrayBuffer",{slice:function(t,n){if(void 0!==v&&void 0===n)return v.call(u(this),t);for(var e=u(this).byteLength,r=a(t,e),i=a(void 0===n?e:n,e),o=new(l(this,h))(c(i-r)),s=new d(this),f=new d(o),p=0;r=n.length)return{value:void 0,done:!0}}while(!((t=n[this._i++])in this._t));return{value:t,done:!1}})),r(r.S,"Reflect",{enumerate:function(t){return new o(t)}})},function(t,n,e){var r=e(22),i=e(37),o=e(13),u=e(0),a=e(4),c=e(3);u(u.S,"Reflect",{get:function t(n,e){var u,s,f=arguments.length<3?n:arguments[2];return c(n)===f?n[e]:(u=r.f(n,e))?o(u,"value")?u.value:void 0!==u.get?u.get.call(f):void 0:a(s=i(n))?t(s,e,f):void 0}})},function(t,n,e){var r=e(22),i=e(0),o=e(3);i(i.S,"Reflect",{getOwnPropertyDescriptor:function(t,n){return r.f(o(t),n)}})},function(t,n,e){var r=e(0),i=e(37),o=e(3);r(r.S,"Reflect",{getPrototypeOf:function(t){return i(o(t))}})},function(t,n,e){var r=e(0);r(r.S,"Reflect",{has:function(t,n){return n in t}})},function(t,n,e){var r=e(0),i=e(3),o=Object.isExtensible;r(r.S,"Reflect",{isExtensible:function(t){return i(t),!o||o(t)}})},function(t,n,e){var r=e(0);r(r.S,"Reflect",{ownKeys:e(124)})},function(t,n,e){var r=e(0),i=e(3),o=Object.preventExtensions;r(r.S,"Reflect",{preventExtensions:function(t){i(t);try{return o&&o(t),!0}catch(t){return!1}}})},function(t,n,e){var r=e(9),i=e(22),o=e(37),u=e(13),a=e(0),c=e(30),s=e(3),f=e(4);a(a.S,"Reflect",{set:function t(n,e,a){var l,h,d=arguments.length<4?n:arguments[3],p=i.f(s(n),e);if(!p){if(f(h=o(n)))return t(h,e,a,d);p=c(0)}if(u(p,"value")){if(!1===p.writable||!f(d))return!1;if(l=i.f(d,e)){if(l.get||l.set||!1===l.writable)return!1;l.value=a,r.f(d,e,l)}else r.f(d,e,c(0,a));return!0}return void 0!==p.set&&(p.set.call(d,a),!0)}})},function(t,n,e){var r=e(0),i=e(67);i&&r(r.S,"Reflect",{setPrototypeOf:function(t,n){i.check(t,n);try{return i.set(t,n),!0}catch(t){return!1}}})},function(t,n,e){e(276),t.exports=e(7).Array.includes},function(t,n,e){"use strict";var r=e(0),i=e(51)(!0);r(r.P,"Array",{includes:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}}),e(38)("includes")},function(t,n,e){e(278),t.exports=e(7).Array.flatMap},function(t,n,e){"use strict";var r=e(0),i=e(279),o=e(10),u=e(6),a=e(20),c=e(112);r(r.P,"Array",{flatMap:function(t){var n,e,r=o(this);return a(t),n=u(r.length),e=c(r,0),i(e,r,r,n,0,1,t,arguments[1]),e}}),e(38)("flatMap")},function(t,n,e){"use strict";var r=e(53),i=e(4),o=e(6),u=e(19),a=e(5)("isConcatSpreadable");t.exports=function t(n,e,c,s,f,l,h,d){for(var p,v,g=f,y=0,m=!!h&&u(h,d,3);y0)g=t(n,e,p,o(p.length),g,l-1)-1;else{if(g>=9007199254740991)throw TypeError();n[g]=p}g++}y++}return g}},function(t,n,e){e(281),t.exports=e(7).String.padStart},function(t,n,e){"use strict";var r=e(0),i=e(125),o=e(59),u=/Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(o);r(r.P+r.F*u,"String",{padStart:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!0)}})},function(t,n,e){e(283),t.exports=e(7).String.padEnd},function(t,n,e){"use strict";var r=e(0),i=e(125),o=e(59),u=/Version\/10\.\d+(\.\d+)?( Mobile\/\w+)? Safari\//.test(o);r(r.P+r.F*u,"String",{padEnd:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0,!1)}})},function(t,n,e){e(285),t.exports=e(7).String.trimLeft},function(t,n,e){"use strict";e(41)("trimLeft",(function(t){return function(){return t(this,1)}}),"trimStart")},function(t,n,e){e(287),t.exports=e(7).String.trimRight},function(t,n,e){"use strict";e(41)("trimRight",(function(t){return function(){return t(this,2)}}),"trimEnd")},function(t,n,e){e(289),t.exports=e(63).f("asyncIterator")},function(t,n,e){e(97)("asyncIterator")},function(t,n,e){e(291),t.exports=e(7).Object.getOwnPropertyDescriptors},function(t,n,e){var r=e(0),i=e(124),o=e(15),u=e(22),a=e(79);r(r.S,"Object",{getOwnPropertyDescriptors:function(t){for(var n,e,r=o(t),c=u.f,s=i(r),f={},l=0;s.length>l;)void 0!==(e=c(r,n=s[l++]))&&a(f,n,e);return f}})},function(t,n,e){e(293),t.exports=e(7).Object.values},function(t,n,e){var r=e(0),i=e(126)(!1);r(r.S,"Object",{values:function(t){return i(t)}})},function(t,n,e){e(295),t.exports=e(7).Object.entries},function(t,n,e){var r=e(0),i=e(126)(!0);r(r.S,"Object",{entries:function(t){return i(t)}})},function(t,n,e){"use strict";e(118),e(297),t.exports=e(7).Promise.finally},function(t,n,e){"use strict";var r=e(0),i=e(7),o=e(1),u=e(49),a=e(120);r(r.P+r.R,"Promise",{finally:function(t){var n=u(this,i.Promise||o.Promise),e="function"==typeof t;return this.then(e?function(e){return a(n,t()).then((function(){return e}))}:t,e?function(e){return a(n,t()).then((function(){throw e}))}:t)}})},function(t,n,e){e(299),e(300),e(301),t.exports=e(7)},function(t,n,e){var r=e(1),i=e(0),o=e(59),u=[].slice,a=/MSIE .\./.test(o),c=function(t){return function(n,e){var r=arguments.length>2,i=!!r&&u.call(arguments,2);return t(r?function(){("function"==typeof n?n:Function(n)).apply(this,i)}:n,e)}};i(i.G+i.B+i.F*a,{setTimeout:c(r.setTimeout),setInterval:c(r.setInterval)})},function(t,n,e){var r=e(0),i=e(85);r(r.G+r.B,{setImmediate:i.set,clearImmediate:i.clear})},function(t,n,e){for(var r=e(82),i=e(33),o=e(11),u=e(1),a=e(14),c=e(42),s=e(5),f=s("iterator"),l=s("toStringTag"),h=c.Array,d={CSSRuleList:!0,CSSStyleDeclaration:!1,CSSValueList:!1,ClientRectList:!1,DOMRectList:!1,DOMStringList:!1,DOMTokenList:!0,DataTransferItemList:!1,FileList:!1,HTMLAllCollection:!1,HTMLCollection:!1,HTMLFormElement:!1,HTMLSelectElement:!1,MediaList:!0,MimeTypeArray:!1,NamedNodeMap:!1,NodeList:!0,PaintRequestList:!1,Plugin:!1,PluginArray:!1,SVGLengthList:!1,SVGNumberList:!1,SVGPathSegList:!1,SVGPointList:!1,SVGStringList:!1,SVGTransformList:!1,SourceBufferList:!1,StyleSheetList:!0,TextTrackCueList:!1,TextTrackList:!1,TouchList:!1},p=i(d),v=0;v=0;--o){var u=this.tryEntries[o],a=u.completion;if("root"===u.tryLoc)return i("end");if(u.tryLoc<=this.prev){var c=r.call(u,"catchLoc"),s=r.call(u,"finallyLoc");if(c&&s){if(this.prev=0;--e){var i=this.tryEntries[e];if(i.tryLoc<=this.prev&&r.call(i,"finallyLoc")&&this.prev=0;--n){var e=this.tryEntries[n];if(e.finallyLoc===t)return this.complete(e.completion,e.afterLoc),O(e),p}},catch:function(t){for(var n=this.tryEntries.length-1;n>=0;--n){var e=this.tryEntries[n];if(e.tryLoc===t){var r=e.completion;if("throw"===r.type){var i=r.arg;O(e)}return i}}throw new Error("illegal catch attempt")},delegateYield:function(t,e,r){return this.delegate={iterator:A(t),resultName:e,nextLoc:r},"next"===this.method&&(this.arg=n),p}},t}(t.exports);try{regeneratorRuntime=r}catch(t){Function("r","regeneratorRuntime = r")(r)}},function(t,n,e){e(304),t.exports=e(127).global},function(t,n,e){var r=e(305);r(r.G,{global:e(87)})},function(t,n,e){var r=e(87),i=e(127),o=e(306),u=e(308),a=e(315),c=function(t,n,e){var s,f,l,h=t&c.F,d=t&c.G,p=t&c.S,v=t&c.P,g=t&c.B,y=t&c.W,m=d?i:i[n]||(i[n]={}),b=m.prototype,S=d?r:p?r[n]:(r[n]||{}).prototype;for(s in d&&(e=n),e)(f=!h&&S&&void 0!==S[s])&&a(m,s)||(l=f?S[s]:e[s],m[s]=d&&"function"!=typeof S[s]?e[s]:g&&f?o(l,r):y&&S[s]==l?function(t){var n=function(n,e,r){if(this instanceof t){switch(arguments.length){case 0:return new t;case 1:return new t(n);case 2:return new t(n,e)}return new t(n,e,r)}return t.apply(this,arguments)};return n.prototype=t.prototype,n}(l):v&&"function"==typeof l?o(Function.call,l):l,v&&((m.virtual||(m.virtual={}))[s]=l,t&c.R&&b&&!b[s]&&u(b,s,l)))};c.F=1,c.G=2,c.S=4,c.P=8,c.B=16,c.W=32,c.U=64,c.R=128,t.exports=c},function(t,n,e){var r=e(307);t.exports=function(t,n,e){if(r(t),void 0===n)return t;switch(e){case 1:return function(e){return t.call(n,e)};case 2:return function(e,r){return t.call(n,e,r)};case 3:return function(e,r,i){return t.call(n,e,r,i)}}return function(){return t.apply(n,arguments)}}},function(t,n){t.exports=function(t){if("function"!=typeof t)throw TypeError(t+" is not a function!");return t}},function(t,n,e){var r=e(309),i=e(314);t.exports=e(89)?function(t,n,e){return r.f(t,n,i(1,e))}:function(t,n,e){return t[n]=e,t}},function(t,n,e){var r=e(310),i=e(311),o=e(313),u=Object.defineProperty;n.f=e(89)?Object.defineProperty:function(t,n,e){if(r(t),n=o(n,!0),r(e),i)try{return u(t,n,e)}catch(t){}if("get"in e||"set"in e)throw TypeError("Accessors not supported!");return"value"in e&&(t[n]=e.value),t}},function(t,n,e){var r=e(88);t.exports=function(t){if(!r(t))throw TypeError(t+" is not an object!");return t}},function(t,n,e){t.exports=!e(89)&&!e(128)((function(){return 7!=Object.defineProperty(e(312)("div"),"a",{get:function(){return 7}}).a}))},function(t,n,e){var r=e(88),i=e(87).document,o=r(i)&&r(i.createElement);t.exports=function(t){return o?i.createElement(t):{}}},function(t,n,e){var r=e(88);t.exports=function(t,n){if(!r(t))return t;var e,i;if(n&&"function"==typeof(e=t.toString)&&!r(i=e.call(t)))return i;if("function"==typeof(e=t.valueOf)&&!r(i=e.call(t)))return i;if(!n&&"function"==typeof(e=t.toString)&&!r(i=e.call(t)))return i;throw TypeError("Can't convert object to primitive value")}},function(t,n){t.exports=function(t,n){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:n}}},function(t,n){var e={}.hasOwnProperty;t.exports=function(t,n){return e.call(t,n)}}])})); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/es5.js b/amplify/functions/fetchDocuments/node_modules/bowser/es5.js new file mode 100644 index 0000000..bb8ec3d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/es5.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.bowser=t():e.bowser=t()}(this,(function(){return function(e){var t={};function r(n){if(t[n])return t[n].exports;var i=t[n]={i:n,l:!1,exports:{}};return e[n].call(i.exports,i,i.exports,r),i.l=!0,i.exports}return r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var i in e)r.d(n,i,function(t){return e[t]}.bind(null,i));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=90)}({17:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n=r(18),i=function(){function e(){}return e.getFirstMatch=function(e,t){var r=t.match(e);return r&&r.length>0&&r[1]||""},e.getSecondMatch=function(e,t){var r=t.match(e);return r&&r.length>1&&r[2]||""},e.matchAndReturnConst=function(e,t,r){if(e.test(t))return r},e.getWindowsVersionName=function(e){switch(e){case"NT":return"NT";case"XP":return"XP";case"NT 5.0":return"2000";case"NT 5.1":return"XP";case"NT 5.2":return"2003";case"NT 6.0":return"Vista";case"NT 6.1":return"7";case"NT 6.2":return"8";case"NT 6.3":return"8.1";case"NT 10.0":return"10";default:return}},e.getMacOSVersionName=function(e){var t=e.split(".").splice(0,2).map((function(e){return parseInt(e,10)||0}));if(t.push(0),10===t[0])switch(t[1]){case 5:return"Leopard";case 6:return"Snow Leopard";case 7:return"Lion";case 8:return"Mountain Lion";case 9:return"Mavericks";case 10:return"Yosemite";case 11:return"El Capitan";case 12:return"Sierra";case 13:return"High Sierra";case 14:return"Mojave";case 15:return"Catalina";default:return}},e.getAndroidVersionName=function(e){var t=e.split(".").splice(0,2).map((function(e){return parseInt(e,10)||0}));if(t.push(0),!(1===t[0]&&t[1]<5))return 1===t[0]&&t[1]<6?"Cupcake":1===t[0]&&t[1]>=6?"Donut":2===t[0]&&t[1]<2?"Eclair":2===t[0]&&2===t[1]?"Froyo":2===t[0]&&t[1]>2?"Gingerbread":3===t[0]?"Honeycomb":4===t[0]&&t[1]<1?"Ice Cream Sandwich":4===t[0]&&t[1]<4?"Jelly Bean":4===t[0]&&t[1]>=4?"KitKat":5===t[0]?"Lollipop":6===t[0]?"Marshmallow":7===t[0]?"Nougat":8===t[0]?"Oreo":9===t[0]?"Pie":void 0},e.getVersionPrecision=function(e){return e.split(".").length},e.compareVersions=function(t,r,n){void 0===n&&(n=!1);var i=e.getVersionPrecision(t),s=e.getVersionPrecision(r),a=Math.max(i,s),o=0,u=e.map([t,r],(function(t){var r=a-e.getVersionPrecision(t),n=t+new Array(r+1).join(".0");return e.map(n.split("."),(function(e){return new Array(20-e.length).join("0")+e})).reverse()}));for(n&&(o=a-Math.min(i,s)),a-=1;a>=o;){if(u[0][a]>u[1][a])return 1;if(u[0][a]===u[1][a]){if(a===o)return 0;a-=1}else if(u[0][a]1?i-1:0),a=1;a0){var a=Object.keys(r),u=o.default.find(a,(function(e){return t.isOS(e)}));if(u){var d=this.satisfies(r[u]);if(void 0!==d)return d}var c=o.default.find(a,(function(e){return t.isPlatform(e)}));if(c){var f=this.satisfies(r[c]);if(void 0!==f)return f}}if(s>0){var l=Object.keys(i),h=o.default.find(l,(function(e){return t.isBrowser(e,!0)}));if(void 0!==h)return this.compareVersion(i[h])}},t.isBrowser=function(e,t){void 0===t&&(t=!1);var r=this.getBrowserName().toLowerCase(),n=e.toLowerCase(),i=o.default.getBrowserTypeByAlias(n);return t&&i&&(n=i.toLowerCase()),n===r},t.compareVersion=function(e){var t=[0],r=e,n=!1,i=this.getBrowserVersion();if("string"==typeof i)return">"===e[0]||"<"===e[0]?(r=e.substr(1),"="===e[1]?(n=!0,r=e.substr(2)):t=[],">"===e[0]?t.push(1):t.push(-1)):"="===e[0]?r=e.substr(1):"~"===e[0]&&(n=!0,r=e.substr(1)),t.indexOf(o.default.compareVersions(i,r,n))>-1},t.isOS=function(e){return this.getOSName(!0)===String(e).toLowerCase()},t.isPlatform=function(e){return this.getPlatformType(!0)===String(e).toLowerCase()},t.isEngine=function(e){return this.getEngineName(!0)===String(e).toLowerCase()},t.is=function(e,t){return void 0===t&&(t=!1),this.isBrowser(e,t)||this.isOS(e)||this.isPlatform(e)},t.some=function(e){var t=this;return void 0===e&&(e=[]),e.some((function(e){return t.is(e)}))},e}();t.default=d,e.exports=t.default},92:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n};var s=/version\/(\d+(\.?_?\d+)+)/i,a=[{test:[/googlebot/i],describe:function(e){var t={name:"Googlebot"},r=i.default.getFirstMatch(/googlebot\/(\d+(\.\d+))/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/opera/i],describe:function(e){var t={name:"Opera"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/opr\/|opios/i],describe:function(e){var t={name:"Opera"},r=i.default.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/SamsungBrowser/i],describe:function(e){var t={name:"Samsung Internet for Android"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/Whale/i],describe:function(e){var t={name:"NAVER Whale Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/MZBrowser/i],describe:function(e){var t={name:"MZ Browser"},r=i.default.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/focus/i],describe:function(e){var t={name:"Focus"},r=i.default.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/swing/i],describe:function(e){var t={name:"Swing"},r=i.default.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/coast/i],describe:function(e){var t={name:"Opera Coast"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/opt\/\d+(?:.?_?\d+)+/i],describe:function(e){var t={name:"Opera Touch"},r=i.default.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/yabrowser/i],describe:function(e){var t={name:"Yandex Browser"},r=i.default.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/ucbrowser/i],describe:function(e){var t={name:"UC Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/Maxthon|mxios/i],describe:function(e){var t={name:"Maxthon"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/epiphany/i],describe:function(e){var t={name:"Epiphany"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/puffin/i],describe:function(e){var t={name:"Puffin"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/sleipnir/i],describe:function(e){var t={name:"Sleipnir"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/k-meleon/i],describe:function(e){var t={name:"K-Meleon"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/micromessenger/i],describe:function(e){var t={name:"WeChat"},r=i.default.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/qqbrowser/i],describe:function(e){var t={name:/qqbrowserlite/i.test(e)?"QQ Browser Lite":"QQ Browser"},r=i.default.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/msie|trident/i],describe:function(e){var t={name:"Internet Explorer"},r=i.default.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/\sedg\//i],describe:function(e){var t={name:"Microsoft Edge"},r=i.default.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/edg([ea]|ios)/i],describe:function(e){var t={name:"Microsoft Edge"},r=i.default.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/vivaldi/i],describe:function(e){var t={name:"Vivaldi"},r=i.default.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/seamonkey/i],describe:function(e){var t={name:"SeaMonkey"},r=i.default.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/sailfish/i],describe:function(e){var t={name:"Sailfish"},r=i.default.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i,e);return r&&(t.version=r),t}},{test:[/silk/i],describe:function(e){var t={name:"Amazon Silk"},r=i.default.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/phantom/i],describe:function(e){var t={name:"PhantomJS"},r=i.default.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/slimerjs/i],describe:function(e){var t={name:"SlimerJS"},r=i.default.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(e){var t={name:"BlackBerry"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/(web|hpw)[o0]s/i],describe:function(e){var t={name:"WebOS Browser"},r=i.default.getFirstMatch(s,e)||i.default.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/bada/i],describe:function(e){var t={name:"Bada"},r=i.default.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/tizen/i],describe:function(e){var t={name:"Tizen"},r=i.default.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/qupzilla/i],describe:function(e){var t={name:"QupZilla"},r=i.default.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/firefox|iceweasel|fxios/i],describe:function(e){var t={name:"Firefox"},r=i.default.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/electron/i],describe:function(e){var t={name:"Electron"},r=i.default.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/MiuiBrowser/i],describe:function(e){var t={name:"Miui"},r=i.default.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/chromium/i],describe:function(e){var t={name:"Chromium"},r=i.default.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i,e)||i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/chrome|crios|crmo/i],describe:function(e){var t={name:"Chrome"},r=i.default.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/GSA/i],describe:function(e){var t={name:"Google Search"},r=i.default.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){var t=!e.test(/like android/i),r=e.test(/android/i);return t&&r},describe:function(e){var t={name:"Android Browser"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/playstation 4/i],describe:function(e){var t={name:"PlayStation 4"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/safari|applewebkit/i],describe:function(e){var t={name:"Safari"},r=i.default.getFirstMatch(s,e);return r&&(t.version=r),t}},{test:[/.*/i],describe:function(e){var t=-1!==e.search("\\(")?/^(.*)\/(.*)[ \t]\((.*)/:/^(.*)\/(.*) /;return{name:i.default.getFirstMatch(t,e),version:i.default.getSecondMatch(t,e)}}}];t.default=a,e.exports=t.default},93:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:[/Roku\/DVP/],describe:function(e){var t=i.default.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i,e);return{name:s.OS_MAP.Roku,version:t}}},{test:[/windows phone/i],describe:function(e){var t=i.default.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.WindowsPhone,version:t}}},{test:[/windows /i],describe:function(e){var t=i.default.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i,e),r=i.default.getWindowsVersionName(t);return{name:s.OS_MAP.Windows,version:t,versionName:r}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(e){var t={name:s.OS_MAP.iOS},r=i.default.getSecondMatch(/(Version\/)(\d[\d.]+)/,e);return r&&(t.version=r),t}},{test:[/macintosh/i],describe:function(e){var t=i.default.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i,e).replace(/[_\s]/g,"."),r=i.default.getMacOSVersionName(t),n={name:s.OS_MAP.MacOS,version:t};return r&&(n.versionName=r),n}},{test:[/(ipod|iphone|ipad)/i],describe:function(e){var t=i.default.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i,e).replace(/[_\s]/g,".");return{name:s.OS_MAP.iOS,version:t}}},{test:function(e){var t=!e.test(/like android/i),r=e.test(/android/i);return t&&r},describe:function(e){var t=i.default.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i,e),r=i.default.getAndroidVersionName(t),n={name:s.OS_MAP.Android,version:t};return r&&(n.versionName=r),n}},{test:[/(web|hpw)[o0]s/i],describe:function(e){var t=i.default.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i,e),r={name:s.OS_MAP.WebOS};return t&&t.length&&(r.version=t),r}},{test:[/blackberry|\bbb\d+/i,/rim\stablet/i],describe:function(e){var t=i.default.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i,e)||i.default.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i,e)||i.default.getFirstMatch(/\bbb(\d+)/i,e);return{name:s.OS_MAP.BlackBerry,version:t}}},{test:[/bada/i],describe:function(e){var t=i.default.getFirstMatch(/bada\/(\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.Bada,version:t}}},{test:[/tizen/i],describe:function(e){var t=i.default.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.Tizen,version:t}}},{test:[/linux/i],describe:function(){return{name:s.OS_MAP.Linux}}},{test:[/CrOS/],describe:function(){return{name:s.OS_MAP.ChromeOS}}},{test:[/PlayStation 4/],describe:function(e){var t=i.default.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i,e);return{name:s.OS_MAP.PlayStation4,version:t}}}];t.default=a,e.exports=t.default},94:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:[/googlebot/i],describe:function(){return{type:"bot",vendor:"Google"}}},{test:[/huawei/i],describe:function(e){var t=i.default.getFirstMatch(/(can-l01)/i,e)&&"Nova",r={type:s.PLATFORMS_MAP.mobile,vendor:"Huawei"};return t&&(r.model=t),r}},{test:[/nexus\s*(?:7|8|9|10).*/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Nexus"}}},{test:[/ipad/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/Macintosh(.*?) FxiOS(.*?)\//],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Apple",model:"iPad"}}},{test:[/kftt build/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Amazon",model:"Kindle Fire HD 7"}}},{test:[/silk/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet,vendor:"Amazon"}}},{test:[/tablet(?! pc)/i],describe:function(){return{type:s.PLATFORMS_MAP.tablet}}},{test:function(e){var t=e.test(/ipod|iphone/i),r=e.test(/like (ipod|iphone)/i);return t&&!r},describe:function(e){var t=i.default.getFirstMatch(/(ipod|iphone)/i,e);return{type:s.PLATFORMS_MAP.mobile,vendor:"Apple",model:t}}},{test:[/nexus\s*[0-6].*/i,/galaxy nexus/i],describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"Nexus"}}},{test:[/[^-]mobi/i],describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"blackberry"===e.getBrowserName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"BlackBerry"}}},{test:function(e){return"bada"===e.getBrowserName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"windows phone"===e.getBrowserName()},describe:function(){return{type:s.PLATFORMS_MAP.mobile,vendor:"Microsoft"}}},{test:function(e){var t=Number(String(e.getOSVersion()).split(".")[0]);return"android"===e.getOSName(!0)&&t>=3},describe:function(){return{type:s.PLATFORMS_MAP.tablet}}},{test:function(e){return"android"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.mobile}}},{test:function(e){return"macos"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop,vendor:"Apple"}}},{test:function(e){return"windows"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop}}},{test:function(e){return"linux"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.desktop}}},{test:function(e){return"playstation 4"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.tv}}},{test:function(e){return"roku"===e.getOSName(!0)},describe:function(){return{type:s.PLATFORMS_MAP.tv}}}];t.default=a,e.exports=t.default},95:function(e,t,r){"use strict";t.__esModule=!0,t.default=void 0;var n,i=(n=r(17))&&n.__esModule?n:{default:n},s=r(18);var a=[{test:function(e){return"microsoft edge"===e.getBrowserName(!0)},describe:function(e){if(/\sedg\//i.test(e))return{name:s.ENGINE_MAP.Blink};var t=i.default.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i,e);return{name:s.ENGINE_MAP.EdgeHTML,version:t}}},{test:[/trident/i],describe:function(e){var t={name:s.ENGINE_MAP.Trident},r=i.default.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){return e.test(/presto/i)},describe:function(e){var t={name:s.ENGINE_MAP.Presto},r=i.default.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:function(e){var t=e.test(/gecko/i),r=e.test(/like gecko/i);return t&&!r},describe:function(e){var t={name:s.ENGINE_MAP.Gecko},r=i.default.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}},{test:[/(apple)?webkit\/537\.36/i],describe:function(){return{name:s.ENGINE_MAP.Blink}}},{test:[/(apple)?webkit/i],describe:function(e){var t={name:s.ENGINE_MAP.WebKit},r=i.default.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i,e);return r&&(t.version=r),t}}];t.default=a,e.exports=t.default}})})); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/index.d.ts b/amplify/functions/fetchDocuments/node_modules/bowser/index.d.ts new file mode 100644 index 0000000..d95656a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/index.d.ts @@ -0,0 +1,250 @@ +// Type definitions for Bowser v2 +// Project: https://github.com/lancedikson/bowser +// Definitions by: Alexander P. Cerutti , + +export = Bowser; +export as namespace Bowser; + +declare namespace Bowser { + /** + * Creates a Parser instance + * @param {string} UA - User agent string + * @param {boolean} skipParsing + */ + + function getParser(UA: string, skipParsing?: boolean): Parser.Parser; + + /** + * Creates a Parser instance and runs Parser.getResult immediately + * @param UA - User agent string + * @returns {Parser.ParsedResult} + */ + + function parse(UA: string): Parser.ParsedResult; + + /** + * Constants exposed via bowser getters + */ + const BROWSER_MAP: Record; + const ENGINE_MAP: Record; + const OS_MAP: Record; + const PLATFORMS_MAP: Record; + + namespace Parser { + interface Parser { + constructor(UA: string, skipParsing?: boolean): Parser.Parser; + + /** + * Get parsed browser object + * @return {BrowserDetails} Browser's details + */ + + getBrowser(): BrowserDetails; + + /** + * Get browser's name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} Browser's name or an empty string + */ + + getBrowserName(toLowerCase?: boolean): string; + + /** + * Get browser's version + * @return {String} version of browser + */ + + getBrowserVersion(): string; + + /** + * Get OS + * @return {OSDetails} - OS Details + * + * @example + * this.getOS(); // { + * // name: 'macOS', + * // version: '10.11.12', + * // } + */ + + getOS(): OSDetails; + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + + getOSName(toLowerCase?: boolean): string; + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + + getOSVersion(): string; + + /** + * Get parsed platform + * @returns {PlatformDetails} + */ + + getPlatform(): PlatformDetails; + + /** + * Get platform name + * @param {boolean} toLowerCase + */ + + getPlatformType(toLowerCase?: boolean): string; + + /** + * Get parsed engine + * @returns {EngineDetails} + */ + + getEngine(): EngineDetails; + + /** + * Get parsed engine's name + * @returns {String} Engine's name or an empty string + */ + + getEngineName(): string; + + /** + * Get parsed result + * @return {ParsedResult} + */ + + getResult(): ParsedResult; + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + */ + + getUA(): string; + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @returns {Boolean} + */ + + is(anything: any): boolean; + + /** + * Parse full information about the browser + * @returns {Parser.Parser} + */ + + parse(): Parser.Parser; + + /** + * Get parsed browser object + * @returns {BrowserDetails} + */ + + parseBrowser(): BrowserDetails; + + /** + * Get parsed engine + * @returns {EngineDetails} + */ + + parseEngine(): EngineDetails; + + /** + * Parse OS and save it to this.parsedResult.os + * @returns {OSDetails} + */ + + parseOS(): OSDetails; + + /** + * Get parsed platform + * @returns {PlatformDetails} + */ + + parsePlatform(): PlatformDetails; + + /** + * Check if parsed browser matches certain conditions + * + * @param {checkTree} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = new Bowser(UA); + * if (browser.check({chrome: '>118.01.1322' })) + * // or with os + * if (browser.check({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.check({desktop: { chrome: '>118.01.1322' } })) + */ + + satisfies(checkTree: checkTree): boolean | undefined; + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + + + isBrowser(browserName: string, includingAlias?: boolean): boolean; + + /** + * Check if any of the given values satifies `.is(anything)` + * @param {string[]} anythings + * @returns {boolean} true if at least one condition is satisfied, false otherwise. + */ + + some(anythings: string[]): boolean | undefined; + + /** + * Test a UA string for a regexp + * @param regex + * @returns {boolean} true if the regex matches the UA, false otherwise. + */ + + test(regex: RegExp): boolean; + } + + interface ParsedResult { + browser: BrowserDetails; + os: OSDetails; + platform: PlatformDetails; + engine: EngineDetails; + } + + interface Details { + name?: string; + version?: string; + } + + interface OSDetails extends Details { + versionName?: string; + } + + interface PlatformDetails { + type?: string; + vendor?: string; + model?: string; + } + + type BrowserDetails = Details; + type EngineDetails = Details; + + interface checkTree { + [key: string]: any; + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/package.json b/amplify/functions/fetchDocuments/node_modules/bowser/package.json new file mode 100644 index 0000000..3fb7c83 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/package.json @@ -0,0 +1,83 @@ +{ + "name": "bowser", + "version": "2.11.0", + "description": "Lightweight browser detector", + "keywords": [ + "browser", + "useragent", + "user-agent", + "parser", + "ua", + "detection", + "ender", + "sniff" + ], + "homepage": "https://github.com/lancedikson/bowser", + "author": "Dustin Diaz (http://dustindiaz.com)", + "contributors": [ + { + "name": "Denis Demchenko", + "url": "http://twitter.com/lancedikson" + } + ], + "main": "es5.js", + "browser": "es5.js", + "module": "src/bowser.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git+https://github.com/lancedikson/bowser.git" + }, + "devDependencies": { + "@babel/cli": "^7.11.6", + "@babel/core": "^7.8.0", + "@babel/polyfill": "^7.8.3", + "@babel/preset-env": "^7.8.2", + "@babel/register": "^7.8.3", + "ava": "^3.0.0", + "babel-eslint": "^10.0.3", + "babel-loader": "^8.0.6", + "babel-plugin-add-module-exports": "^1.0.2", + "babel-plugin-istanbul": "^6.0.0", + "compression-webpack-plugin": "^4.0.0", + "coveralls": "^3.0.6", + "docdash": "^1.1.1", + "eslint": "^6.5.1", + "eslint-config-airbnb-base": "^13.2.0", + "eslint-plugin-ava": "^10.0.0", + "eslint-plugin-import": "^2.18.2", + "gh-pages": "^3.0.0", + "jsdoc": "^3.6.3", + "nyc": "^15.0.0", + "sinon": "^9.0.0", + "testem": "^3.0.0", + "webpack": "^4.41.0", + "webpack-bundle-analyzer": "^3.5.2", + "webpack-cli": "^3.3.9", + "yamljs": "^0.3.0" + }, + "ava": { + "require": [ + "@babel/register" + ] + }, + "bugs": { + "url": "https://github.com/lancedikson/bowser/issues" + }, + "directories": { + "test": "test" + }, + "scripts": { + "build": "webpack --config webpack.config.js", + "generate-and-deploy-docs": "npm run generate-docs && gh-pages --dist docs --dest docs", + "watch": "webpack --watch --config webpack.config.js", + "prepublishOnly": "npm run build", + "lint": "eslint ./src", + "testem": "testem", + "test": "nyc --reporter=html --reporter=text ava", + "test:watch": "ava --watch", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "generate-docs": "jsdoc -c jsdoc.json" + }, + "license": "MIT" +} diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/src/bowser.js b/amplify/functions/fetchDocuments/node_modules/bowser/src/bowser.js new file mode 100644 index 0000000..f79e6e0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/src/bowser.js @@ -0,0 +1,77 @@ +/*! + * Bowser - a browser detector + * https://github.com/lancedikson/bowser + * MIT License | (c) Dustin Diaz 2012-2015 + * MIT License | (c) Denis Demchenko 2015-2019 + */ +import Parser from './parser.js'; +import { + BROWSER_MAP, + ENGINE_MAP, + OS_MAP, + PLATFORMS_MAP, +} from './constants.js'; + +/** + * Bowser class. + * Keep it simple as much as it can be. + * It's supposed to work with collections of {@link Parser} instances + * rather then solve one-instance problems. + * All the one-instance stuff is located in Parser class. + * + * @class + * @classdesc Bowser is a static object, that provides an API to the Parsers + * @hideconstructor + */ +class Bowser { + /** + * Creates a {@link Parser} instance + * + * @param {String} UA UserAgent string + * @param {Boolean} [skipParsing=false] Will make the Parser postpone parsing until you ask it + * explicitly. Same as `skipParsing` for {@link Parser}. + * @returns {Parser} + * @throws {Error} when UA is not a String + * + * @example + * const parser = Bowser.getParser(window.navigator.userAgent); + * const result = parser.getResult(); + */ + static getParser(UA, skipParsing = false) { + if (typeof UA !== 'string') { + throw new Error('UserAgent should be a string'); + } + return new Parser(UA, skipParsing); + } + + /** + * Creates a {@link Parser} instance and runs {@link Parser.getResult} immediately + * + * @param UA + * @return {ParsedResult} + * + * @example + * const result = Bowser.parse(window.navigator.userAgent); + */ + static parse(UA) { + return (new Parser(UA)).getResult(); + } + + static get BROWSER_MAP() { + return BROWSER_MAP; + } + + static get ENGINE_MAP() { + return ENGINE_MAP; + } + + static get OS_MAP() { + return OS_MAP; + } + + static get PLATFORMS_MAP() { + return PLATFORMS_MAP; + } +} + +export default Bowser; diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/src/constants.js b/amplify/functions/fetchDocuments/node_modules/bowser/src/constants.js new file mode 100644 index 0000000..f335032 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/src/constants.js @@ -0,0 +1,116 @@ +// NOTE: this list must be up-to-date with browsers listed in +// test/acceptance/useragentstrings.yml +export const BROWSER_ALIASES_MAP = { + 'Amazon Silk': 'amazon_silk', + 'Android Browser': 'android', + Bada: 'bada', + BlackBerry: 'blackberry', + Chrome: 'chrome', + Chromium: 'chromium', + Electron: 'electron', + Epiphany: 'epiphany', + Firefox: 'firefox', + Focus: 'focus', + Generic: 'generic', + 'Google Search': 'google_search', + Googlebot: 'googlebot', + 'Internet Explorer': 'ie', + 'K-Meleon': 'k_meleon', + Maxthon: 'maxthon', + 'Microsoft Edge': 'edge', + 'MZ Browser': 'mz', + 'NAVER Whale Browser': 'naver', + Opera: 'opera', + 'Opera Coast': 'opera_coast', + PhantomJS: 'phantomjs', + Puffin: 'puffin', + QupZilla: 'qupzilla', + QQ: 'qq', + QQLite: 'qqlite', + Safari: 'safari', + Sailfish: 'sailfish', + 'Samsung Internet for Android': 'samsung_internet', + SeaMonkey: 'seamonkey', + Sleipnir: 'sleipnir', + Swing: 'swing', + Tizen: 'tizen', + 'UC Browser': 'uc', + Vivaldi: 'vivaldi', + 'WebOS Browser': 'webos', + WeChat: 'wechat', + 'Yandex Browser': 'yandex', + Roku: 'roku', +}; + +export const BROWSER_MAP = { + amazon_silk: 'Amazon Silk', + android: 'Android Browser', + bada: 'Bada', + blackberry: 'BlackBerry', + chrome: 'Chrome', + chromium: 'Chromium', + electron: 'Electron', + epiphany: 'Epiphany', + firefox: 'Firefox', + focus: 'Focus', + generic: 'Generic', + googlebot: 'Googlebot', + google_search: 'Google Search', + ie: 'Internet Explorer', + k_meleon: 'K-Meleon', + maxthon: 'Maxthon', + edge: 'Microsoft Edge', + mz: 'MZ Browser', + naver: 'NAVER Whale Browser', + opera: 'Opera', + opera_coast: 'Opera Coast', + phantomjs: 'PhantomJS', + puffin: 'Puffin', + qupzilla: 'QupZilla', + qq: 'QQ Browser', + qqlite: 'QQ Browser Lite', + safari: 'Safari', + sailfish: 'Sailfish', + samsung_internet: 'Samsung Internet for Android', + seamonkey: 'SeaMonkey', + sleipnir: 'Sleipnir', + swing: 'Swing', + tizen: 'Tizen', + uc: 'UC Browser', + vivaldi: 'Vivaldi', + webos: 'WebOS Browser', + wechat: 'WeChat', + yandex: 'Yandex Browser', +}; + +export const PLATFORMS_MAP = { + tablet: 'tablet', + mobile: 'mobile', + desktop: 'desktop', + tv: 'tv', +}; + +export const OS_MAP = { + WindowsPhone: 'Windows Phone', + Windows: 'Windows', + MacOS: 'macOS', + iOS: 'iOS', + Android: 'Android', + WebOS: 'WebOS', + BlackBerry: 'BlackBerry', + Bada: 'Bada', + Tizen: 'Tizen', + Linux: 'Linux', + ChromeOS: 'Chrome OS', + PlayStation4: 'PlayStation 4', + Roku: 'Roku', +}; + +export const ENGINE_MAP = { + EdgeHTML: 'EdgeHTML', + Blink: 'Blink', + Trident: 'Trident', + Presto: 'Presto', + Gecko: 'Gecko', + WebKit: 'WebKit', +}; diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-browsers.js b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-browsers.js new file mode 100644 index 0000000..ee7840c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-browsers.js @@ -0,0 +1,700 @@ +/** + * Browsers' descriptors + * + * The idea of descriptors is simple. You should know about them two simple things: + * 1. Every descriptor has a method or property called `test` and a `describe` method. + * 2. Order of descriptors is important. + * + * More details: + * 1. Method or property `test` serves as a way to detect whether the UA string + * matches some certain browser or not. The `describe` method helps to make a result + * object with params that show some browser-specific things: name, version, etc. + * 2. Order of descriptors is important because a Parser goes through them one by one + * in course. For example, if you insert Chrome's descriptor as the first one, + * more then a half of browsers will be described as Chrome, because they will pass + * the Chrome descriptor's test. + * + * Descriptor's `test` could be a property with an array of RegExps, where every RegExp + * will be applied to a UA string to test it whether it matches or not. + * If a descriptor has two or more regexps in the `test` array it tests them one by one + * with a logical sum operation. Parser stops if it has found any RegExp that matches the UA. + * + * Or `test` could be a method. In that case it gets a Parser instance and should + * return true/false to get the Parser know if this browser descriptor matches the UA or not. + */ + +import Utils from './utils.js'; + +const commonVersionIdentifier = /version\/(\d+(\.?_?\d+)+)/i; + +const browsersList = [ + /* Googlebot */ + { + test: [/googlebot/i], + describe(ua) { + const browser = { + name: 'Googlebot', + }; + const version = Utils.getFirstMatch(/googlebot\/(\d+(\.\d+))/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera < 13.0 */ + { + test: [/opera/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:opera)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Opera > 13.0 */ + { + test: [/opr\/|opios/i], + describe(ua) { + const browser = { + name: 'Opera', + }; + const version = Utils.getFirstMatch(/(?:opr|opios)[\s/](\S+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/SamsungBrowser/i], + describe(ua) { + const browser = { + name: 'Samsung Internet for Android', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:SamsungBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Whale/i], + describe(ua) { + const browser = { + name: 'NAVER Whale Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:whale)[\s/](\d+(?:\.\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MZBrowser/i], + describe(ua) { + const browser = { + name: 'MZ Browser', + }; + const version = Utils.getFirstMatch(/(?:MZBrowser)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/focus/i], + describe(ua) { + const browser = { + name: 'Focus', + }; + const version = Utils.getFirstMatch(/(?:focus)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/swing/i], + describe(ua) { + const browser = { + name: 'Swing', + }; + const version = Utils.getFirstMatch(/(?:swing)[\s/](\d+(?:\.\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/coast/i], + describe(ua) { + const browser = { + name: 'Opera Coast', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:coast)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/opt\/\d+(?:.?_?\d+)+/i], + describe(ua) { + const browser = { + name: 'Opera Touch', + }; + const version = Utils.getFirstMatch(/(?:opt)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/yabrowser/i], + describe(ua) { + const browser = { + name: 'Yandex Browser', + }; + const version = Utils.getFirstMatch(/(?:yabrowser)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/ucbrowser/i], + describe(ua) { + const browser = { + name: 'UC Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:ucbrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/Maxthon|mxios/i], + describe(ua) { + const browser = { + name: 'Maxthon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:Maxthon|mxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/epiphany/i], + describe(ua) { + const browser = { + name: 'Epiphany', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:epiphany)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/puffin/i], + describe(ua) { + const browser = { + name: 'Puffin', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:puffin)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sleipnir/i], + describe(ua) { + const browser = { + name: 'Sleipnir', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:sleipnir)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/k-meleon/i], + describe(ua) { + const browser = { + name: 'K-Meleon', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/(?:k-meleon)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/micromessenger/i], + describe(ua) { + const browser = { + name: 'WeChat', + }; + const version = Utils.getFirstMatch(/(?:micromessenger)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qqbrowser/i], + describe(ua) { + const browser = { + name: (/qqbrowserlite/i).test(ua) ? 'QQ Browser Lite' : 'QQ Browser', + }; + const version = Utils.getFirstMatch(/(?:qqbrowserlite|qqbrowser)[/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/msie|trident/i], + describe(ua) { + const browser = { + name: 'Internet Explorer', + }; + const version = Utils.getFirstMatch(/(?:msie |rv:)(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/\sedg\//i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getFirstMatch(/\sedg\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/edg([ea]|ios)/i], + describe(ua) { + const browser = { + name: 'Microsoft Edge', + }; + + const version = Utils.getSecondMatch(/edg([ea]|ios)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/vivaldi/i], + describe(ua) { + const browser = { + name: 'Vivaldi', + }; + const version = Utils.getFirstMatch(/vivaldi\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/seamonkey/i], + describe(ua) { + const browser = { + name: 'SeaMonkey', + }; + const version = Utils.getFirstMatch(/seamonkey\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/sailfish/i], + describe(ua) { + const browser = { + name: 'Sailfish', + }; + + const version = Utils.getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/silk/i], + describe(ua) { + const browser = { + name: 'Amazon Silk', + }; + const version = Utils.getFirstMatch(/silk\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/phantom/i], + describe(ua) { + const browser = { + name: 'PhantomJS', + }; + const version = Utils.getFirstMatch(/phantomjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/slimerjs/i], + describe(ua) { + const browser = { + name: 'SlimerJS', + }; + const version = Utils.getFirstMatch(/slimerjs\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const browser = { + name: 'BlackBerry', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/blackberry[\d]+\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const browser = { + name: 'WebOS Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua) || Utils.getFirstMatch(/w(?:eb)?[o0]sbrowser\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/bada/i], + describe(ua) { + const browser = { + name: 'Bada', + }; + const version = Utils.getFirstMatch(/dolfin\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/tizen/i], + describe(ua) { + const browser = { + name: 'Tizen', + }; + const version = Utils.getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/qupzilla/i], + describe(ua) { + const browser = { + name: 'QupZilla', + }; + const version = Utils.getFirstMatch(/(?:qupzilla)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/firefox|iceweasel|fxios/i], + describe(ua) { + const browser = { + name: 'Firefox', + }; + const version = Utils.getFirstMatch(/(?:firefox|iceweasel|fxios)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/electron/i], + describe(ua) { + const browser = { + name: 'Electron', + }; + const version = Utils.getFirstMatch(/(?:electron)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/MiuiBrowser/i], + describe(ua) { + const browser = { + name: 'Miui', + }; + const version = Utils.getFirstMatch(/(?:MiuiBrowser)[\s/](\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chromium/i], + describe(ua) { + const browser = { + name: 'Chromium', + }; + const version = Utils.getFirstMatch(/(?:chromium)[\s/](\d+(\.?_?\d+)+)/i, ua) || Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/chrome|crios|crmo/i], + describe(ua) { + const browser = { + name: 'Chrome', + }; + const version = Utils.getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + { + test: [/GSA/i], + describe(ua) { + const browser = { + name: 'Google Search', + }; + const version = Utils.getFirstMatch(/(?:GSA)\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Android Browser */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const browser = { + name: 'Android Browser', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* PlayStation 4 */ + { + test: [/playstation 4/i], + describe(ua) { + const browser = { + name: 'PlayStation 4', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Safari */ + { + test: [/safari|applewebkit/i], + describe(ua) { + const browser = { + name: 'Safari', + }; + const version = Utils.getFirstMatch(commonVersionIdentifier, ua); + + if (version) { + browser.version = version; + } + + return browser; + }, + }, + + /* Something else */ + { + test: [/.*/i], + describe(ua) { + /* Here we try to make sure that there are explicit details about the device + * in order to decide what regexp exactly we want to apply + * (as there is a specific decision based on that conclusion) + */ + const regexpWithoutDeviceSpec = /^(.*)\/(.*) /; + const regexpWithDeviceSpec = /^(.*)\/(.*)[ \t]\((.*)/; + const hasDeviceSpec = ua.search('\\(') !== -1; + const regexp = hasDeviceSpec ? regexpWithDeviceSpec : regexpWithoutDeviceSpec; + return { + name: Utils.getFirstMatch(regexp, ua), + version: Utils.getSecondMatch(regexp, ua), + }; + }, + }, +]; + +export default browsersList; diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-engines.js b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-engines.js new file mode 100644 index 0000000..d46d0e5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-engines.js @@ -0,0 +1,120 @@ +import Utils from './utils.js'; +import { ENGINE_MAP } from './constants.js'; + +/* + * More specific goes first + */ +export default [ + /* EdgeHTML */ + { + test(parser) { + return parser.getBrowserName(true) === 'microsoft edge'; + }, + describe(ua) { + const isBlinkBased = /\sedg\//i.test(ua); + + // return blink if it's blink-based one + if (isBlinkBased) { + return { + name: ENGINE_MAP.Blink, + }; + } + + // otherwise match the version and return EdgeHTML + const version = Utils.getFirstMatch(/edge\/(\d+(\.?_?\d+)+)/i, ua); + + return { + name: ENGINE_MAP.EdgeHTML, + version, + }; + }, + }, + + /* Trident */ + { + test: [/trident/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.Trident, + }; + + const version = Utils.getFirstMatch(/trident\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Presto */ + { + test(parser) { + return parser.test(/presto/i); + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Presto, + }; + + const version = Utils.getFirstMatch(/presto\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Gecko */ + { + test(parser) { + const isGecko = parser.test(/gecko/i); + const likeGecko = parser.test(/like gecko/i); + return isGecko && !likeGecko; + }, + describe(ua) { + const engine = { + name: ENGINE_MAP.Gecko, + }; + + const version = Utils.getFirstMatch(/gecko\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, + + /* Blink */ + { + test: [/(apple)?webkit\/537\.36/i], + describe() { + return { + name: ENGINE_MAP.Blink, + }; + }, + }, + + /* WebKit */ + { + test: [/(apple)?webkit/i], + describe(ua) { + const engine = { + name: ENGINE_MAP.WebKit, + }; + + const version = Utils.getFirstMatch(/webkit\/(\d+(\.?_?\d+)+)/i, ua); + + if (version) { + engine.version = version; + } + + return engine; + }, + }, +]; diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-os.js b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-os.js new file mode 100644 index 0000000..4c516dd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-os.js @@ -0,0 +1,199 @@ +import Utils from './utils.js'; +import { OS_MAP } from './constants.js'; + +export default [ + /* Roku */ + { + test: [/Roku\/DVP/], + describe(ua) { + const version = Utils.getFirstMatch(/Roku\/DVP-(\d+\.\d+)/i, ua); + return { + name: OS_MAP.Roku, + version, + }; + }, + }, + + /* Windows Phone */ + { + test: [/windows phone/i], + describe(ua) { + const version = Utils.getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.WindowsPhone, + version, + }; + }, + }, + + /* Windows */ + { + test: [/windows /i], + describe(ua) { + const version = Utils.getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i, ua); + const versionName = Utils.getWindowsVersionName(version); + + return { + name: OS_MAP.Windows, + version, + versionName, + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe(ua) { + const result = { + name: OS_MAP.iOS, + }; + const version = Utils.getSecondMatch(/(Version\/)(\d[\d.]+)/, ua); + if (version) { + result.version = version; + } + return result; + }, + }, + + /* macOS */ + { + test: [/macintosh/i], + describe(ua) { + const version = Utils.getFirstMatch(/mac os x (\d+(\.?_?\d+)+)/i, ua).replace(/[_\s]/g, '.'); + const versionName = Utils.getMacOSVersionName(version); + + const os = { + name: OS_MAP.MacOS, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* iOS */ + { + test: [/(ipod|iphone|ipad)/i], + describe(ua) { + const version = Utils.getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i, ua).replace(/[_\s]/g, '.'); + + return { + name: OS_MAP.iOS, + version, + }; + }, + }, + + /* Android */ + { + test(parser) { + const notLikeAndroid = !parser.test(/like android/i); + const butAndroid = parser.test(/android/i); + return notLikeAndroid && butAndroid; + }, + describe(ua) { + const version = Utils.getFirstMatch(/android[\s/-](\d+(\.\d+)*)/i, ua); + const versionName = Utils.getAndroidVersionName(version); + const os = { + name: OS_MAP.Android, + version, + }; + if (versionName) { + os.versionName = versionName; + } + return os; + }, + }, + + /* WebOS */ + { + test: [/(web|hpw)[o0]s/i], + describe(ua) { + const version = Utils.getFirstMatch(/(?:web|hpw)[o0]s\/(\d+(\.\d+)*)/i, ua); + const os = { + name: OS_MAP.WebOS, + }; + + if (version && version.length) { + os.version = version; + } + return os; + }, + }, + + /* BlackBerry */ + { + test: [/blackberry|\bbb\d+/i, /rim\stablet/i], + describe(ua) { + const version = Utils.getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i, ua) + || Utils.getFirstMatch(/blackberry\d+\/(\d+([_\s]\d+)*)/i, ua) + || Utils.getFirstMatch(/\bbb(\d+)/i, ua); + + return { + name: OS_MAP.BlackBerry, + version, + }; + }, + }, + + /* Bada */ + { + test: [/bada/i], + describe(ua) { + const version = Utils.getFirstMatch(/bada\/(\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Bada, + version, + }; + }, + }, + + /* Tizen */ + { + test: [/tizen/i], + describe(ua) { + const version = Utils.getFirstMatch(/tizen[/\s](\d+(\.\d+)*)/i, ua); + + return { + name: OS_MAP.Tizen, + version, + }; + }, + }, + + /* Linux */ + { + test: [/linux/i], + describe() { + return { + name: OS_MAP.Linux, + }; + }, + }, + + /* Chrome OS */ + { + test: [/CrOS/], + describe() { + return { + name: OS_MAP.ChromeOS, + }; + }, + }, + + /* Playstation 4 */ + { + test: [/PlayStation 4/], + describe(ua) { + const version = Utils.getFirstMatch(/PlayStation 4[/\s](\d+(\.\d+)*)/i, ua); + return { + name: OS_MAP.PlayStation4, + version, + }; + }, + }, +]; diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-platforms.js b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-platforms.js new file mode 100644 index 0000000..48b1eb1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser-platforms.js @@ -0,0 +1,266 @@ +import Utils from './utils.js'; +import { PLATFORMS_MAP } from './constants.js'; + +/* + * Tablets go first since usually they have more specific + * signs to detect. + */ + +export default [ + /* Googlebot */ + { + test: [/googlebot/i], + describe() { + return { + type: 'bot', + vendor: 'Google', + }; + }, + }, + + /* Huawei */ + { + test: [/huawei/i], + describe(ua) { + const model = Utils.getFirstMatch(/(can-l01)/i, ua) && 'Nova'; + const platform = { + type: PLATFORMS_MAP.mobile, + vendor: 'Huawei', + }; + if (model) { + platform.model = model; + } + return platform; + }, + }, + + /* Nexus Tablet */ + { + test: [/nexus\s*(?:7|8|9|10).*/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Nexus', + }; + }, + }, + + /* iPad */ + { + test: [/ipad/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Firefox on iPad */ + { + test: [/Macintosh(.*?) FxiOS(.*?)\//], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Apple', + model: 'iPad', + }; + }, + }, + + /* Amazon Kindle Fire */ + { + test: [/kftt build/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + model: 'Kindle Fire HD 7', + }; + }, + }, + + /* Another Amazon Tablet with Silk */ + { + test: [/silk/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + vendor: 'Amazon', + }; + }, + }, + + /* Tablet */ + { + test: [/tablet(?! pc)/i], + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* iPod/iPhone */ + { + test(parser) { + const iDevice = parser.test(/ipod|iphone/i); + const likeIDevice = parser.test(/like (ipod|iphone)/i); + return iDevice && !likeIDevice; + }, + describe(ua) { + const model = Utils.getFirstMatch(/(ipod|iphone)/i, ua); + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Apple', + model, + }; + }, + }, + + /* Nexus Mobile */ + { + test: [/nexus\s*[0-6].*/i, /galaxy nexus/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Nexus', + }; + }, + }, + + /* Mobile */ + { + test: [/[^-]mobi/i], + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* BlackBerry */ + { + test(parser) { + return parser.getBrowserName(true) === 'blackberry'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'BlackBerry', + }; + }, + }, + + /* Bada */ + { + test(parser) { + return parser.getBrowserName(true) === 'bada'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* Windows Phone */ + { + test(parser) { + return parser.getBrowserName() === 'windows phone'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + vendor: 'Microsoft', + }; + }, + }, + + /* Android Tablet */ + { + test(parser) { + const osMajorVersion = Number(String(parser.getOSVersion()).split('.')[0]); + return parser.getOSName(true) === 'android' && (osMajorVersion >= 3); + }, + describe() { + return { + type: PLATFORMS_MAP.tablet, + }; + }, + }, + + /* Android Mobile */ + { + test(parser) { + return parser.getOSName(true) === 'android'; + }, + describe() { + return { + type: PLATFORMS_MAP.mobile, + }; + }, + }, + + /* desktop */ + { + test(parser) { + return parser.getOSName(true) === 'macos'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + vendor: 'Apple', + }; + }, + }, + + /* Windows */ + { + test(parser) { + return parser.getOSName(true) === 'windows'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* Linux */ + { + test(parser) { + return parser.getOSName(true) === 'linux'; + }, + describe() { + return { + type: PLATFORMS_MAP.desktop, + }; + }, + }, + + /* PlayStation 4 */ + { + test(parser) { + return parser.getOSName(true) === 'playstation 4'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, + + /* Roku */ + { + test(parser) { + return parser.getOSName(true) === 'roku'; + }, + describe() { + return { + type: PLATFORMS_MAP.tv, + }; + }, + }, +]; diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/src/parser.js b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser.js new file mode 100644 index 0000000..2f9f39f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/src/parser.js @@ -0,0 +1,496 @@ +import browserParsersList from './parser-browsers.js'; +import osParsersList from './parser-os.js'; +import platformParsersList from './parser-platforms.js'; +import enginesParsersList from './parser-engines.js'; +import Utils from './utils.js'; + +/** + * The main class that arranges the whole parsing process. + */ +class Parser { + /** + * Create instance of Parser + * + * @param {String} UA User-Agent string + * @param {Boolean} [skipParsing=false] parser can skip parsing in purpose of performance + * improvements if you need to make a more particular parsing + * like {@link Parser#parseBrowser} or {@link Parser#parsePlatform} + * + * @throw {Error} in case of empty UA String + * + * @constructor + */ + constructor(UA, skipParsing = false) { + if (UA === void (0) || UA === null || UA === '') { + throw new Error("UserAgent parameter can't be empty"); + } + + this._ua = UA; + + /** + * @typedef ParsedResult + * @property {Object} browser + * @property {String|undefined} [browser.name] + * Browser name, like `"Chrome"` or `"Internet Explorer"` + * @property {String|undefined} [browser.version] Browser version as a String `"12.01.45334.10"` + * @property {Object} os + * @property {String|undefined} [os.name] OS name, like `"Windows"` or `"macOS"` + * @property {String|undefined} [os.version] OS version, like `"NT 5.1"` or `"10.11.1"` + * @property {String|undefined} [os.versionName] OS name, like `"XP"` or `"High Sierra"` + * @property {Object} platform + * @property {String|undefined} [platform.type] + * platform type, can be either `"desktop"`, `"tablet"` or `"mobile"` + * @property {String|undefined} [platform.vendor] Vendor of the device, + * like `"Apple"` or `"Samsung"` + * @property {String|undefined} [platform.model] Device model, + * like `"iPhone"` or `"Kindle Fire HD 7"` + * @property {Object} engine + * @property {String|undefined} [engine.name] + * Can be any of this: `WebKit`, `Blink`, `Gecko`, `Trident`, `Presto`, `EdgeHTML` + * @property {String|undefined} [engine.version] String version of the engine + */ + this.parsedResult = {}; + + if (skipParsing !== true) { + this.parse(); + } + } + + /** + * Get UserAgent string of current Parser instance + * @return {String} User-Agent String of the current object + * + * @public + */ + getUA() { + return this._ua; + } + + /** + * Test a UA string for a regexp + * @param {RegExp} regex + * @return {Boolean} + */ + test(regex) { + return regex.test(this._ua); + } + + /** + * Get parsed browser object + * @return {Object} + */ + parseBrowser() { + this.parsedResult.browser = {}; + + const browserDescriptor = Utils.find(browserParsersList, (_browser) => { + if (typeof _browser.test === 'function') { + return _browser.test(this); + } + + if (_browser.test instanceof Array) { + return _browser.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (browserDescriptor) { + this.parsedResult.browser = browserDescriptor.describe(this.getUA()); + } + + return this.parsedResult.browser; + } + + /** + * Get parsed browser object + * @return {Object} + * + * @public + */ + getBrowser() { + if (this.parsedResult.browser) { + return this.parsedResult.browser; + } + + return this.parseBrowser(); + } + + /** + * Get browser's name + * @return {String} Browser's name or an empty string + * + * @public + */ + getBrowserName(toLowerCase) { + if (toLowerCase) { + return String(this.getBrowser().name).toLowerCase() || ''; + } + return this.getBrowser().name || ''; + } + + + /** + * Get browser's version + * @return {String} version of browser + * + * @public + */ + getBrowserVersion() { + return this.getBrowser().version; + } + + /** + * Get OS + * @return {Object} + * + * @example + * this.getOS(); + * { + * name: 'macOS', + * version: '10.11.12' + * } + */ + getOS() { + if (this.parsedResult.os) { + return this.parsedResult.os; + } + + return this.parseOS(); + } + + /** + * Parse OS and save it to this.parsedResult.os + * @return {*|{}} + */ + parseOS() { + this.parsedResult.os = {}; + + const os = Utils.find(osParsersList, (_os) => { + if (typeof _os.test === 'function') { + return _os.test(this); + } + + if (_os.test instanceof Array) { + return _os.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (os) { + this.parsedResult.os = os.describe(this.getUA()); + } + + return this.parsedResult.os; + } + + /** + * Get OS name + * @param {Boolean} [toLowerCase] return lower-cased value + * @return {String} name of the OS — macOS, Windows, Linux, etc. + */ + getOSName(toLowerCase) { + const { name } = this.getOS(); + + if (toLowerCase) { + return String(name).toLowerCase() || ''; + } + + return name || ''; + } + + /** + * Get OS version + * @return {String} full version with dots ('10.11.12', '5.6', etc) + */ + getOSVersion() { + return this.getOS().version; + } + + /** + * Get parsed platform + * @return {{}} + */ + getPlatform() { + if (this.parsedResult.platform) { + return this.parsedResult.platform; + } + + return this.parsePlatform(); + } + + /** + * Get platform name + * @param {Boolean} [toLowerCase=false] + * @return {*} + */ + getPlatformType(toLowerCase = false) { + const { type } = this.getPlatform(); + + if (toLowerCase) { + return String(type).toLowerCase() || ''; + } + + return type || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parsePlatform() { + this.parsedResult.platform = {}; + + const platform = Utils.find(platformParsersList, (_platform) => { + if (typeof _platform.test === 'function') { + return _platform.test(this); + } + + if (_platform.test instanceof Array) { + return _platform.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (platform) { + this.parsedResult.platform = platform.describe(this.getUA()); + } + + return this.parsedResult.platform; + } + + /** + * Get parsed engine + * @return {{}} + */ + getEngine() { + if (this.parsedResult.engine) { + return this.parsedResult.engine; + } + + return this.parseEngine(); + } + + /** + * Get engines's name + * @return {String} Engines's name or an empty string + * + * @public + */ + getEngineName(toLowerCase) { + if (toLowerCase) { + return String(this.getEngine().name).toLowerCase() || ''; + } + return this.getEngine().name || ''; + } + + /** + * Get parsed platform + * @return {{}} + */ + parseEngine() { + this.parsedResult.engine = {}; + + const engine = Utils.find(enginesParsersList, (_engine) => { + if (typeof _engine.test === 'function') { + return _engine.test(this); + } + + if (_engine.test instanceof Array) { + return _engine.test.some(condition => this.test(condition)); + } + + throw new Error("Browser's test function is not valid"); + }); + + if (engine) { + this.parsedResult.engine = engine.describe(this.getUA()); + } + + return this.parsedResult.engine; + } + + /** + * Parse full information about the browser + * @returns {Parser} + */ + parse() { + this.parseBrowser(); + this.parseOS(); + this.parsePlatform(); + this.parseEngine(); + + return this; + } + + /** + * Get parsed result + * @return {ParsedResult} + */ + getResult() { + return Utils.assign({}, this.parsedResult); + } + + /** + * Check if parsed browser matches certain conditions + * + * @param {Object} checkTree It's one or two layered object, + * which can include a platform or an OS on the first layer + * and should have browsers specs on the bottom-laying layer + * + * @returns {Boolean|undefined} Whether the browser satisfies the set conditions or not. + * Returns `undefined` when the browser is no described in the checkTree object. + * + * @example + * const browser = Bowser.getParser(window.navigator.userAgent); + * if (browser.satisfies({chrome: '>118.01.1322' })) + * // or with os + * if (browser.satisfies({windows: { chrome: '>118.01.1322' } })) + * // or with platforms + * if (browser.satisfies({desktop: { chrome: '>118.01.1322' } })) + */ + satisfies(checkTree) { + const platformsAndOSes = {}; + let platformsAndOSCounter = 0; + const browsers = {}; + let browsersCounter = 0; + + const allDefinitions = Object.keys(checkTree); + + allDefinitions.forEach((key) => { + const currentDefinition = checkTree[key]; + if (typeof currentDefinition === 'string') { + browsers[key] = currentDefinition; + browsersCounter += 1; + } else if (typeof currentDefinition === 'object') { + platformsAndOSes[key] = currentDefinition; + platformsAndOSCounter += 1; + } + }); + + if (platformsAndOSCounter > 0) { + const platformsAndOSNames = Object.keys(platformsAndOSes); + const OSMatchingDefinition = Utils.find(platformsAndOSNames, name => (this.isOS(name))); + + if (OSMatchingDefinition) { + const osResult = this.satisfies(platformsAndOSes[OSMatchingDefinition]); + + if (osResult !== void 0) { + return osResult; + } + } + + const platformMatchingDefinition = Utils.find( + platformsAndOSNames, + name => (this.isPlatform(name)), + ); + if (platformMatchingDefinition) { + const platformResult = this.satisfies(platformsAndOSes[platformMatchingDefinition]); + + if (platformResult !== void 0) { + return platformResult; + } + } + } + + if (browsersCounter > 0) { + const browserNames = Object.keys(browsers); + const matchingDefinition = Utils.find(browserNames, name => (this.isBrowser(name, true))); + + if (matchingDefinition !== void 0) { + return this.compareVersion(browsers[matchingDefinition]); + } + } + + return undefined; + } + + /** + * Check if the browser name equals the passed string + * @param browserName The string to compare with the browser name + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {boolean} + */ + isBrowser(browserName, includingAlias = false) { + const defaultBrowserName = this.getBrowserName().toLowerCase(); + let browserNameLower = browserName.toLowerCase(); + const alias = Utils.getBrowserTypeByAlias(browserNameLower); + + if (includingAlias && alias) { + browserNameLower = alias.toLowerCase(); + } + return browserNameLower === defaultBrowserName; + } + + compareVersion(version) { + let expectedResults = [0]; + let comparableVersion = version; + let isLoose = false; + + const currentBrowserVersion = this.getBrowserVersion(); + + if (typeof currentBrowserVersion !== 'string') { + return void 0; + } + + if (version[0] === '>' || version[0] === '<') { + comparableVersion = version.substr(1); + if (version[1] === '=') { + isLoose = true; + comparableVersion = version.substr(2); + } else { + expectedResults = []; + } + if (version[0] === '>') { + expectedResults.push(1); + } else { + expectedResults.push(-1); + } + } else if (version[0] === '=') { + comparableVersion = version.substr(1); + } else if (version[0] === '~') { + isLoose = true; + comparableVersion = version.substr(1); + } + + return expectedResults.indexOf( + Utils.compareVersions(currentBrowserVersion, comparableVersion, isLoose), + ) > -1; + } + + isOS(osName) { + return this.getOSName(true) === String(osName).toLowerCase(); + } + + isPlatform(platformType) { + return this.getPlatformType(true) === String(platformType).toLowerCase(); + } + + isEngine(engineName) { + return this.getEngineName(true) === String(engineName).toLowerCase(); + } + + /** + * Is anything? Check if the browser is called "anything", + * the OS called "anything" or the platform called "anything" + * @param {String} anything + * @param [includingAlias=false] The flag showing whether alias will be included into comparison + * @returns {Boolean} + */ + is(anything, includingAlias = false) { + return this.isBrowser(anything, includingAlias) || this.isOS(anything) + || this.isPlatform(anything); + } + + /** + * Check if any of the given values satisfies this.is(anything) + * @param {String[]} anythings + * @returns {Boolean} + */ + some(anythings = []) { + return anythings.some(anything => this.is(anything)); + } +} + +export default Parser; diff --git a/amplify/functions/fetchDocuments/node_modules/bowser/src/utils.js b/amplify/functions/fetchDocuments/node_modules/bowser/src/utils.js new file mode 100644 index 0000000..d1174bf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/bowser/src/utils.js @@ -0,0 +1,309 @@ +import { BROWSER_MAP, BROWSER_ALIASES_MAP } from './constants.js'; + +export default class Utils { + /** + * Get first matched item for a string + * @param {RegExp} regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getFirstMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 0 && match[1]) || ''; + } + + /** + * Get second matched item for a string + * @param regexp + * @param {String} ua + * @return {Array|{index: number, input: string}|*|boolean|string} + */ + static getSecondMatch(regexp, ua) { + const match = ua.match(regexp); + return (match && match.length > 1 && match[2]) || ''; + } + + /** + * Match a regexp and return a constant or undefined + * @param {RegExp} regexp + * @param {String} ua + * @param {*} _const Any const that will be returned if regexp matches the string + * @return {*} + */ + static matchAndReturnConst(regexp, ua, _const) { + if (regexp.test(ua)) { + return _const; + } + return void (0); + } + + static getWindowsVersionName(version) { + switch (version) { + case 'NT': return 'NT'; + case 'XP': return 'XP'; + case 'NT 5.0': return '2000'; + case 'NT 5.1': return 'XP'; + case 'NT 5.2': return '2003'; + case 'NT 6.0': return 'Vista'; + case 'NT 6.1': return '7'; + case 'NT 6.2': return '8'; + case 'NT 6.3': return '8.1'; + case 'NT 10.0': return '10'; + default: return undefined; + } + } + + /** + * Get macOS version name + * 10.5 - Leopard + * 10.6 - Snow Leopard + * 10.7 - Lion + * 10.8 - Mountain Lion + * 10.9 - Mavericks + * 10.10 - Yosemite + * 10.11 - El Capitan + * 10.12 - Sierra + * 10.13 - High Sierra + * 10.14 - Mojave + * 10.15 - Catalina + * + * @example + * getMacOSVersionName("10.14") // 'Mojave' + * + * @param {string} version + * @return {string} versionName + */ + static getMacOSVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] !== 10) return undefined; + switch (v[1]) { + case 5: return 'Leopard'; + case 6: return 'Snow Leopard'; + case 7: return 'Lion'; + case 8: return 'Mountain Lion'; + case 9: return 'Mavericks'; + case 10: return 'Yosemite'; + case 11: return 'El Capitan'; + case 12: return 'Sierra'; + case 13: return 'High Sierra'; + case 14: return 'Mojave'; + case 15: return 'Catalina'; + default: return undefined; + } + } + + /** + * Get Android version name + * 1.5 - Cupcake + * 1.6 - Donut + * 2.0 - Eclair + * 2.1 - Eclair + * 2.2 - Froyo + * 2.x - Gingerbread + * 3.x - Honeycomb + * 4.0 - Ice Cream Sandwich + * 4.1 - Jelly Bean + * 4.4 - KitKat + * 5.x - Lollipop + * 6.x - Marshmallow + * 7.x - Nougat + * 8.x - Oreo + * 9.x - Pie + * + * @example + * getAndroidVersionName("7.0") // 'Nougat' + * + * @param {string} version + * @return {string} versionName + */ + static getAndroidVersionName(version) { + const v = version.split('.').splice(0, 2).map(s => parseInt(s, 10) || 0); + v.push(0); + if (v[0] === 1 && v[1] < 5) return undefined; + if (v[0] === 1 && v[1] < 6) return 'Cupcake'; + if (v[0] === 1 && v[1] >= 6) return 'Donut'; + if (v[0] === 2 && v[1] < 2) return 'Eclair'; + if (v[0] === 2 && v[1] === 2) return 'Froyo'; + if (v[0] === 2 && v[1] > 2) return 'Gingerbread'; + if (v[0] === 3) return 'Honeycomb'; + if (v[0] === 4 && v[1] < 1) return 'Ice Cream Sandwich'; + if (v[0] === 4 && v[1] < 4) return 'Jelly Bean'; + if (v[0] === 4 && v[1] >= 4) return 'KitKat'; + if (v[0] === 5) return 'Lollipop'; + if (v[0] === 6) return 'Marshmallow'; + if (v[0] === 7) return 'Nougat'; + if (v[0] === 8) return 'Oreo'; + if (v[0] === 9) return 'Pie'; + return undefined; + } + + /** + * Get version precisions count + * + * @example + * getVersionPrecision("1.10.3") // 3 + * + * @param {string} version + * @return {number} + */ + static getVersionPrecision(version) { + return version.split('.').length; + } + + /** + * Calculate browser version weight + * + * @example + * compareVersions('1.10.2.1', '1.8.2.1.90') // 1 + * compareVersions('1.010.2.1', '1.09.2.1.90'); // 1 + * compareVersions('1.10.2.1', '1.10.2.1'); // 0 + * compareVersions('1.10.2.1', '1.0800.2'); // -1 + * compareVersions('1.10.2.1', '1.10', true); // 0 + * + * @param {String} versionA versions versions to compare + * @param {String} versionB versions versions to compare + * @param {boolean} [isLoose] enable loose comparison + * @return {Number} comparison result: -1 when versionA is lower, + * 1 when versionA is bigger, 0 when both equal + */ + /* eslint consistent-return: 1 */ + static compareVersions(versionA, versionB, isLoose = false) { + // 1) get common precision for both versions, for example for "10.0" and "9" it should be 2 + const versionAPrecision = Utils.getVersionPrecision(versionA); + const versionBPrecision = Utils.getVersionPrecision(versionB); + + let precision = Math.max(versionAPrecision, versionBPrecision); + let lastPrecision = 0; + + const chunks = Utils.map([versionA, versionB], (version) => { + const delta = precision - Utils.getVersionPrecision(version); + + // 2) "9" -> "9.0" (for precision = 2) + const _version = version + new Array(delta + 1).join('.0'); + + // 3) "9.0" -> ["000000000"", "000000009"] + return Utils.map(_version.split('.'), chunk => new Array(20 - chunk.length).join('0') + chunk).reverse(); + }); + + // adjust precision for loose comparison + if (isLoose) { + lastPrecision = precision - Math.min(versionAPrecision, versionBPrecision); + } + + // iterate in reverse order by reversed chunks array + precision -= 1; + while (precision >= lastPrecision) { + // 4) compare: "000000009" > "000000010" = false (but "9" > "10" = true) + if (chunks[0][precision] > chunks[1][precision]) { + return 1; + } + + if (chunks[0][precision] === chunks[1][precision]) { + if (precision === lastPrecision) { + // all version chunks are same + return 0; + } + + precision -= 1; + } else if (chunks[0][precision] < chunks[1][precision]) { + return -1; + } + } + + return undefined; + } + + /** + * Array::map polyfill + * + * @param {Array} arr + * @param {Function} iterator + * @return {Array} + */ + static map(arr, iterator) { + const result = []; + let i; + if (Array.prototype.map) { + return Array.prototype.map.call(arr, iterator); + } + for (i = 0; i < arr.length; i += 1) { + result.push(iterator(arr[i])); + } + return result; + } + + /** + * Array::find polyfill + * + * @param {Array} arr + * @param {Function} predicate + * @return {Array} + */ + static find(arr, predicate) { + let i; + let l; + if (Array.prototype.find) { + return Array.prototype.find.call(arr, predicate); + } + for (i = 0, l = arr.length; i < l; i += 1) { + const value = arr[i]; + if (predicate(value, i)) { + return value; + } + } + return undefined; + } + + /** + * Object::assign polyfill + * + * @param {Object} obj + * @param {Object} ...objs + * @return {Object} + */ + static assign(obj, ...assigners) { + const result = obj; + let i; + let l; + if (Object.assign) { + return Object.assign(obj, ...assigners); + } + for (i = 0, l = assigners.length; i < l; i += 1) { + const assigner = assigners[i]; + if (typeof assigner === 'object' && assigner !== null) { + const keys = Object.keys(assigner); + keys.forEach((key) => { + result[key] = assigner[key]; + }); + } + } + return obj; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('Microsoft Edge') // edge + * + * @param {string} browserName + * @return {string} + */ + static getBrowserAlias(browserName) { + return BROWSER_ALIASES_MAP[browserName]; + } + + /** + * Get short version/alias for a browser name + * + * @example + * getBrowserAlias('edge') // Microsoft Edge + * + * @param {string} browserAlias + * @return {string} + */ + static getBrowserTypeByAlias(browserAlias) { + return BROWSER_MAP[browserAlias] || ''; + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/CHANGELOG.md new file mode 100644 index 0000000..021eab6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/CHANGELOG.md @@ -0,0 +1,594 @@ +Note: If you find missing information about particular minor version, that version must have been changed without any functional change in this library. + +**4.4.1 / 2024-07-28** +- v5 fix: maximum length limit to currency value +- fix #634: build attributes with oneListGroup and attributesGroupName (#653)(By [Andreas Naziris](https://github.com/a-rasin)) +- fix: get oneListGroup to work as expected for array of strings (#662)(By [Andreas Naziris](https://github.com/a-rasin)) + +**4.4.0 / 2024-05-18** +- fix #654: parse attribute list correctly for self closing stop node. +- fix: validator bug when closing tag is not opened. (#647) (By [Ryosuke Fukatani](https://github.com/RyosukeFukatani)) +- fix #581: typings; return type of `tagValueProcessor` & `attributeValueProcessor` (#582) (By [monholm]()) + +**4.3.6 / 2024-03-16** +- Add support for parsing HTML numeric entities (#645) (By [Jonas Schade ](https://github.com/DerZade)) + +**4.3.5 / 2024-02-24** +- code for v5 is added for experimental use + +**4.3.4 / 2024-01-10** +- fix: Don't escape entities in CDATA sections (#633) (By [wackbyte](https://github.com/wackbyte)) + +**4.3.3 / 2024-01-10** +- Remove unnecessary regex + +**4.3.2 / 2023-10-02** +- fix `jObj.hasOwnProperty` when give input is null (By [Arda TANRIKULU](https://github.com/ardatan)) + +**4.3.1 / 2023-09-24** +- revert back "Fix typings for builder and parser to make return type generic" to avoid failure of existing projects. Need to decide a common approach. + +**4.3.0 / 2023-09-20** +- Fix stopNodes to work with removeNSPrefix (#607) (#608) (By [Craig Andrews]https://github.com/candrews)) +- Fix #610 ignore properties set to Object.prototype +- Fix typings for builder and parser to make return type generic (By [Sarah Dayan](https://github.com/sarahdayan)) + +**4.2.7 / 2023-07-30** +- Fix: builder should set text node correctly when only textnode is present (#589) (By [qianqing](https://github.com/joneqian)) +- Fix: Fix for null and undefined attributes when building xml (#585) (#598). A null or undefined value should be ignored. (By [Eugenio Ceschia](https://github.com/cecia234)) + +**4.2.6 / 2023-07-17** +- Fix: Remove trailing slash from jPath for self-closing tags (#595) (By [Maciej Radzikowski](https://github.com/m-radzikowski)) + +**4.2.5 / 2023-06-22** +- change code implementation + +**4.2.4 / 2023-06-06** +- fix security bug + +**4.2.3 / 2023-06-05** +- fix security bug + +**4.2.2 / 2023-04-18** +- fix #562: fix unpaired tag when it comes in last of a nested tag. Also throw error when unpaired tag is used as closing tag + +**4.2.1 / 2023-04-18** +- fix: jpath after unpaired tags + +**4.2.0 / 2023-04-09** +- support `updateTag` parser property + +**4.1.4 / 2023-04-08** +- update typings to let user create XMLBuilder instance without options (#556) (By [Patrick](https://github.com/omggga)) +- fix: IsArray option isn't parsing tags with 0 as value correctly #490 (#557) (By [Aleksandr Murashkin](https://github.com/p-kuen)) +- feature: support `oneListGroup` to group repeated children tags udder single group + +**4.1.3 / 2023-02-26** +- fix #546: Support complex entity value + +**4.1.2 / 2023-02-12** +- Security Fix + +**4.1.1 / 2023-02-03** +- Fix #540: ignoreAttributes breaks unpairedTags +- Refactor XML builder code + +**4.1.0 / 2023-02-02** +- Fix '<' or '>' in DTD comment throwing an error. (#533) (By [Adam Baker](https://github.com/Cwazywierdo)) +- Set "eNotation" to 'true' as default + +**4.0.15 / 2023-01-25** +- make "eNotation" optional + +**4.0.14 / 2023-01-22** +- fixed: add missed typing "eNotation" to parse values + +**4.0.13 / 2023-01-07** +- preserveorder formatting (By [mdeknowis](https://github.com/mdeknowis)) +- support `transformAttributeName` (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.12 / 2022-11-19** +- fix typescript + +**4.0.11 / 2022-10-05** +- fix #501: parse for entities only once + +**4.0.10 / 2022-09-14** +- fix broken links in demo site (By [Yannick Lang](https://github.com/layaxx)) +- fix #491: tagValueProcessor type definition (By [Andrea Francesco Speziale](https://github.com/andreafspeziale)) +- Add jsdocs for tagValueProcessor + + +**4.0.9 / 2022-07-10** +- fix #470: stop-tag can have self-closing tag with same name +- fix #472: stopNode can have any special tag inside +- Allow !ATTLIST and !NOTATION with DOCTYPE +- Add transformTagName option to transform tag names when parsing (#469) (By [Erik Rothoff Andersson](https://github.com/erkie)) + +**4.0.8 / 2022-05-28** +- Fix CDATA parsing returning empty string when value = 0 (#451) (By [ndelanou](https://github.com/ndelanou)) +- Fix stopNodes when same tag appears inside node (#456) (By [patrickshipe](https://github.com/patrickshipe)) +- fix #468: prettify own properties only + +**4.0.7 / 2022-03-18** +- support CDATA even if tag order is not preserved +- support Comments even if tag order is not preserved +- fix #446: XMLbuilder should not indent XML declaration + +**4.0.6 / 2022-03-08** +- fix: call tagValueProcessor only once for array items +- fix: missing changed for #437 + +**4.0.5 / 2022-03-06** +- fix #437: call tagValueProcessor from XML builder + +**4.0.4 / 2022-03-03** +- fix #435: should skip unpaired and self-closing nodes when set as stopnodes + +**4.0.3 / 2022-02-15** +- fix: ReferenceError when Bundled with Strict (#431) (By [Andreas Heissenberger](https://github.com/aheissenberger)) + + +**4.0.2 / 2022-02-04** +- builder supports `suppressUnpairedNode` +- parser supports `ignoreDeclaration` and `ignorePiTags` +- fix: when comment is parsed as text value if given as ` ...` #423 +- builder supports decoding `&` + +**4.0.1 / 2022-01-08** +- fix builder for pi tag +- fix: support suppressBooleanAttrs by builder + +**4.0.0 / 2022-01-06** +- Generating different combined, parser only, builder only, validator only browser bundles +- Keeping cjs modules as they can be imported in cjs and esm modules both. Otherwise refer `esm` branch. + +**4.0.0-beta.8 / 2021-12-13** +- call tagValueProcessor for stop nodes + +**4.0.0-beta.7 / 2021-12-09** +- fix Validator bug when an attribute has no value but '=' only +- XML Builder should suppress unpaired tags by default. +- documents update for missing features +- refactoring to use Object.assign +- refactoring to remove repeated code + +**4.0.0-beta.6 / 2021-12-05** +- Support PI Tags processing +- Support `suppressBooleanAttributes` by XML Builder for attributes with value `true`. + +**4.0.0-beta.5 / 2021-12-04** +- fix: when a tag with name "attributes" + +**4.0.0-beta.4 / 2021-12-02** +- Support HTML document parsing +- skip stop nodes parsing when building the XML from JS object +- Support external entites without DOCTYPE +- update dev dependency: strnum v1.0.5 to fix long number issue + +**4.0.0-beta.3 / 2021-11-30** +- support global stopNodes expression like "*.stop" +- support self-closing and paired unpaired tags +- fix: CDATA should not be parsed. +- Fix typings for XMLBuilder (#396)(By [Anders Emil Salvesen](https://github.com/andersem)) +- supports XML entities, HTML entities, DOCTYPE entities + +**⚠️ 4.0.0-beta.2 / 2021-11-19** +- rename `attrMap` to `attibutes` in parser output when `preserveOrder:true` +- supports unpairedTags + +**⚠️ 4.0.0-beta.1 / 2021-11-18** +- Parser returns an array now + - to make the structure common + - and to return root level detail +- renamed `cdataTagName` to `cdataPropName` +- Added `commentPropName` +- fix typings + +**⚠️ 4.0.0-beta.0 / 2021-11-16** +- Name change of many configuration properties. + - `attrNodeName` to `attributesGroupName` + - `attrValueProcessor` to `attributeValueProcessor` + - `parseNodeValue` to `parseTagValue` + - `ignoreNameSpace` to `removeNSPrefix` + - `numParseOptions` to `numberParseOptions` + - spelling correction for `suppressEmptyNode` +- Name change of cli and browser bundle to **fxparser** +- `isArray` option is added to parse a tag into array +- `preserveOrder` option is added to render XML in such a way that the result js Object maintains the order of properties same as in XML. +- Processing behaviour of `tagValueProcessor` and `attributeValueProcessor` are changes with extra input parameters +- j2xparser is renamed to XMLBuilder. +- You need to build XML parser instance for given options first before parsing XML. +- fix #327, #336: throw error when extra text after XML content +- fix #330: attribute value can have '\n', +- fix #350: attrbiutes can be separated by '\n' from tagname + +3.21.1 / 2021-10-31 +- Correctly format JSON elements with a text prop but no attribute props ( By [haddadnj](https://github.com/haddadnj) ) + +3.21.0 / 2021-10-25 + - feat: added option `rootNodeName` to set tag name for array input when converting js object to XML. + - feat: added option `alwaysCreateTextNode` to force text node creation (by: *@massimo-ua*) + - ⚠️ feat: Better error location for unclosed tags. (by *@Gei0r*) + - Some error messages would be changed when validating XML. Eg + - `{ InvalidXml: "Invalid '[ \"rootNode\"]' found." }` → `{InvalidTag: "Unclosed tag 'rootNode'."}` + - `{ InvalidTag: "Closing tag 'rootNode' is expected inplace of 'rootnode'." }` → `{ InvalidTag: "Expected closing tag 'rootNode' (opened in line 1) instead of closing tag 'rootnode'."}` + - ⚠️ feat: Column in error response when validating XML +```js +{ + "code": "InvalidAttr", + "msg": "Attribute 'abc' is repeated.", + "line": 1, + "col": 22 +} +``` + +3.20.1 / 2021-09-25 + - update strnum package + +3.20.0 / 2021-09-10 + - Use strnum npm package to parse string to number + - breaking change: long number will be parsed to scientific notation. + +3.19.0 / 2021-03-14 + - License changed to MIT original + - Fix #321 : namespace tag parsing + +3.18.0 / 2021-02-05 + - Support RegEx and function in arrayMode option + - Fix #317 : validate nested PI tags + +3.17.4 / 2020-06-07 + - Refactor some code to support IE11 + - Fix: `` space as attribute string + +3.17.3 / 2020-05-23 + - Fix: tag name separated by \n \t + - Fix: throw error for unclosed tags + +3.17.2 / 2020-05-23 + - Fixed an issue in processing doctype tag + - Fixed tagName where it should not have whitespace chars + +3.17.1 / 2020-05-19 + - Fixed an issue in checking opening tag + +3.17.0 / 2020-05-18 + - parser: fix '<' issue when it comes in aatr value + - parser: refactoring to remove dependency from regex + - validator: fix IE 11 issue for error messages + - updated dev dependencies + - separated benchmark module to sub-module + - breaking change: comments will not be removed from CDATA data + +3.16.0 / 2020-01-12 + - validaor: fix for ampersand characters (#215) + - refactoring to support unicode chars in tag name + - update typing for validator error + +3.15.1 / 2019-12-09 + - validaor: fix multiple roots are not allowed + +3.15.0 / 2019-11-23 + - validaor: improve error messaging + - validator: add line number in case of error + - validator: add more error scenarios to make it more descriptive + +3.14.0 / 2019-10-25 + - arrayMode for XML to JS obj parsing + +3.13.0 / 2019-10-02 + - pass tag/attr name to tag/attr value processor + - inbuilt optional validation with XML parser + +3.12.21 / 2019-10-02 + - Fix validator for unclosed XMLs + - move nimnjs dependency to dev dependency + - update dependencies + +3.12.20 / 2019-08-16 + - Revert: Fix #167: '>' in attribute value as it is causing high performance degrade. + +3.12.19 / 2019-07-28 + - Fix js to xml parser should work for date values. (broken: `tagValueProcessor` will receive the original value instead of string always) (breaking change) + +3.12.18 / 2019-07-27 + - remove configstore dependency + +3.12.17 / 2019-07-14 + - Fix #167: '>' in attribute value + +3.12.16 / 2019-03-23 + - Support a new option "stopNodes". (#150) +Accept the list of tags which are not required to be parsed. Instead, all the nested tag and data will be assigned as string. + - Don't show post-install message + +3.12.12 / 2019-01-11 + - fix : IE parseInt, parseFloat error + +3.12.11 / 2018-12-24 + - fix #132: "/" should not be parsed as boolean attr in case of self closing tags + +3.12.9 / 2018-11-23 + - fix #129 : validator should not fail when an atrribute name is 'length' + +3.12.8 / 2018-11-22 + - fix #128 : use 'attrValueProcessor' to process attribute value in json2xml parser + +3.12.6 / 2018-11-10 + - Fix #126: check for type + +3.12.4 / 2018-09-12 + - Fix: include tasks in npm package + +3.12.3 / 2018-09-12 + - Fix CLI issue raised in last PR + +3.12.2 / 2018-09-11 + - Fix formatting for JSON to XML output + - Migrate to webpack (PR merged) + - fix cli (PR merged) + +3.12.0 / 2018-08-06 + - Support hexadecimal values + - Support true number parsing + +3.11.2 / 2018-07-23 + - Update Demo for more options + - Update license information + - Update readme for formatting, users, and spelling mistakes + - Add missing typescript definition for j2xParser + - refactoring: change filenames + +3.11.1 / 2018-06-05 + - fix #93: read the text after self closing tag + +3.11.0 / 2018-05-20 + - return defaultOptions if there are not options in buildOptions function + - added localeRange declaration in parser.d.ts + - Added support of cyrillic characters in validator XML + - fixed bug in validator work when XML data with byte order marker + +3.10.0 / 2018-05-13 + - Added support of cyrillic characters in parsing XML to JSON + +3.9.11 / 2018-05-09 + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/80 fix nimn chars + - update package information + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/86: json 2 xml parser : property with null value should be parsed to self closing tag. + - update online demo + - revert zombiejs to old version to support old version of node + - update dependencies + +3.3.10 / 2018-04-23 + - fix #77 : parse even if closing tag has space before '>' + - include all css & js lib in demo app + - remove babel dependencies until needed + +3.3.9 / 2018-04-18 + - fix #74 : TS2314 TypeScript compiler error + +3.3.8 / 2018-04-17 + - fix #73 : IE doesn't support Object.assign + +3.3.7 / 2018-04-14 + - fix: use let insted of const in for loop of validator + - Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/71 from bb/master + first draft of typings for typescript + https://github.com/NaturalIntelligence/fast-xml-parser/issues/69 + - Merge pull request + https://github.com/NaturalIntelligence/fast-xml-parser/issues/70 from bb/patch-1 + fix some typos in readme + +3.3.6 / 2018-03-21 + - change arrow functions to full notation for IE compatibility + +3.3.5 / 2018-03-15 + - fix https://github.com/NaturalIntelligence/fast-xml-parser/issues/67 : attrNodeName invalid behavior + - fix: remove decodeHTML char condition + +3.3.4 / 2018-03-14 + - remove dependency on "he" package + - refactor code to separate methods in separate files. + - draft code for transforming XML to json string. It is not officially documented due to performance issue. + +3.3.0 / 2018-03-05 + - use common default options for XML parsing for consistency. And add `parseToNimn` method. + - update nexttodo + - update README about XML to Nimn transformation and remove special notes about 3.x release + - update CONTRIBUTING.ms mentioning nexttodo + - add negative case for XML PIs + - validate xml processing instruction tags https://github.com/NaturalIntelligence/fast-xml-parser/issues/62 + - nimndata: handle array with object + - nimndata: node with nested node and text node + - nimndata: handle attributes and text node + - nimndata: add options, handle array + - add xml to nimn data converter + - x2j: direct access property with tagname + - update changelog + - fix validator when single quote presents in value enclosed with double quotes or vice versa + - Revert "remove unneded nimnjs dependency, move opencollective to devDependencies and replace it + with more light opencollective-postinstall" + This reverts commit d47aa7181075d82db4fee97fd8ea32b056fe3f46. + - Merge pull request: https://github.com/NaturalIntelligence/fast-xml-parser/issues/63 from HaroldPutman/suppress-undefined + Keep undefined nodes out of the XML output : This is useful when you are deleting nodes from the JSON and rewriting XML. + +3.2.4 / 2018-03-01 + - fix #59 fix in validator when open quote presents in attribute value + - Create nexttodo.md + - exclude static from bitHound tests + - add package lock + +3.2.3 / 2018-02-28 + - Merge pull request from Delagen/master: fix namespaces can contain the same characters as xml names + +3.2.2 / 2018-02-22 + - fix: attribute xmlns should not be removed if ignoreNameSpace is false + - create CONTRIBUTING.md + +3.2.1 / 2018-02-17 + - fix: empty attribute should be parsed + +3.2.0 / 2018-02-16 + - Merge pull request : Dev to Master + - Update README and version + - j2x:add performance test + - j2x: Remove extra empty line before closing tag + - j2x: suppress empty nodes to self closing node if configured + - j2x: provide option to give indentation depth + - j2x: make optional formatting + - j2x: encodeHTMLchat + - j2x: handle cdata tag + - j2x: handle grouped attributes + - convert json to xml + - nested object + - array + - attributes + - text value + - small refactoring + - Merge pull request: Update cli.js to let user validate XML file or data + - Add option for rendering CDATA as separate property + +3.0.1 / 2018-02-09 + - fix CRLF: replace it with single space in attributes value only. + +3.0.0 / 2018-02-08 + - change online tool with new changes + - update info about new options + - separate tag value processing to separate function + - make HTML decoding optional + - give an option to allow boolean attributes + - change cli options as per v3 + - Correct comparison table format on README + - update v3 information + - some performance improvement changes + - Make regex object local to the method and move some common methods to util + - Change parser to + - handle multiple instances of CDATA + - make triming of value optionals + - HTML decode attribute and text value + - refactor code to separate files + - Ignore newline chars without RE (in validator) + - validate for XML prolog + - Validate DOCTYPE without RE + - Update validator to return error response + - Update README to add detail about V3 + - Separate xmlNode model class + - include vscode debug config + - fix for repeated object + - fix attribute regex for boolean attributes + - Fix validator for invalid attributes +2.9.4 / 2018-02-02 + - Merge pull request: Decode HTML characters + - refactor source folder name + - ignore bundle / browser js to be published to npm +2.9.3 / 2018-01-26 + - Merge pull request: Correctly remove CRLF line breaks + - Enable to parse attribute in online editor + - Fix testing demo app test + - Describe parsing options + - Add options for online demo +2.9.2 / 2018-01-18 + - Remove check if tag starting with "XML" + - Fix: when there are spaces before / after CDATA + +2.9.1 / 2018-01-16 + - Fix: newline should be replaced with single space + - Fix: for single and multiline comments + - validate xml with CDATA + - Fix: the issue when there is no space between 2 attributes + - Fix: https://github.com/NaturalIntelligence/fast-xml-parser/issues/33: when there is newline char in attr val, it doesn't parse + - Merge pull request: fix ignoreNamespace + - fix: don't wrap attributes if only namespace attrs + - fix: use portfinder for run tests, update deps + - fix: don't treat namespaces as attributes when ignoreNamespace enabled + +2.9.0 / 2018-01-10 + - Rewrite the validator to handle large files. + Ignore DOCTYPE validation. + - Fix: When attribute value has equal sign + +2.8.3 / 2017-12-15 + - Fix: when a tag has value along with subtags + +2.8.2 / 2017-12-04 + - Fix value parsing for IE + +2.8.1 / 2017-12-01 + - fix: validator should return false instead of err when invalid XML + +2.8.0 / 2017-11-29 + - Add CLI option to ignore value conversion + - Fix variable name when filename is given on CLI + - Update CLI help text + - Merge pull request: xml2js: Accept standard input + - Test Node 8 + - Update dependencies + - Bundle readToEnd + - Add ability to read from standard input + +2.7.4 / 2017-09-22 + - Merge pull request: Allow wrap attributes with subobject to compatible with other parsers output + +2.7.3 / 2017-08-02 + - fix: handle CDATA with regx + +2.7.2 / 2017-07-30 + - Change travis config for yarn caching + - fix validator: when tag property is same as array property + - Merge pull request: Failing test case in validator for valid SVG + +2.7.1 / 2017-07-26 + - Fix: Handle val 0 + +2.7.0 / 2017-07-25 + - Fix test for arrayMode + - Merge pull request: Add arrayMode option to parse any nodes as arrays + +2.6.0 / 2017-07-14 + - code improvement + - Add unit tests for value conversion for attr + - Merge pull request: option of an attribute value conversion to a number (textAttrConversion) the same way as the textNodeConversion option does. Default value is false. + +2.5.1 / 2017-07-01 + - Fix XML element name pattern + - Fix XML element name pattern while parsing + - Fix validation for xml tag element + +2.5.0 / 2017-06-25 + - Improve Validator performance + - update attr matching regex + - Add perf tests + - Improve atrr regex to handle all cases + +2.4.4 / 2017-06-08 + - Bug fix: when an attribute has single or double quote in value + +2.4.3 / 2017-06-05 + - Bug fix: when multiple CDATA tags are given + - Merge pull request: add option "textNodeConversion" + - add option "textNodeConversion" + +2.4.1 / 2017-04-14 + - fix tests + - Bug fix: preserve initial space of node value + - Handle CDATA + +2.3.1 / 2017-03-15 + - Bug fix: when single self closing tag + - Merge pull request: fix .codeclimate.yml + - Update .codeclimate.yml - Fixed config so it does not error anymore. + - Update .codeclimate.yml + +2.3.0 / 2017-02-26 + - Code improvement + - add bithound config + - Update usage + - Update travis to generate bundle js before running tests + - 1.Browserify, 2. add more tests for validator + - Add validator + - Fix CLI default parameter bug + +2.2.1 / 2017-02-05 + - Bug fix: CLI default option diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/LICENSE b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/LICENSE new file mode 100644 index 0000000..d7da622 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Amit Kumar Gupta + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/README.md b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/README.md new file mode 100644 index 0000000..1891838 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/README.md @@ -0,0 +1,236 @@ +# [fast-xml-parser](https://www.npmjs.com/package/fast-xml-parser) +[![NPM quality][quality-image]][quality-url] +[![Coverage Status](https://coveralls.io/repos/github/NaturalIntelligence/fast-xml-parser/badge.svg?branch=master)](https://coveralls.io/github/NaturalIntelligence/fast-xml-parser?branch=master) +[Try me](https://naturalintelligence.github.io/fast-xml-parser/) +[![NPM total downloads](https://img.shields.io/npm/dt/fast-xml-parser.svg)](https://npm.im/fast-xml-parser) + + +Validate XML, Parse XML to JS Object, or Build XML from JS Object without C/C++ based libraries and no callback. + +--- + +ads-thePowerGlassesBook +I had recently published a book, The Power Glasses. Please have a look. Your feedback would be helpful. You can [mail](githubissues@proton.me) me for a free copy. +
+ +Sponsor this project + + + + + + + + Stubmatic donate button +
+
+
+ + + +![fxp_sponsors](https://github.com/NaturalIntelligence/fast-xml-parser/assets/7692328/c9367497-d67e-410a-90a6-66e3808be929) + +## Users + + + + + + + + + + + + + + + + + + + +[more](./USERs.md) + +The list of users are mostly published by Github or communicated directly. Feel free to contact if you find any information wrong. + +--- + +## Main Features + +FXP logo + +* Validate XML data syntactically +* Parse XML to JS Object +* Build XML from JS Object +* Compatible to node js packages, in browser, and in CLI (click try me button above for demo) +* Faster than any other pure JS implementation. +* It can handle big files (tested up to 100mb). +* Controlled parsing using various options +* XML Entities, HTML entities, and DOCTYPE entites are supported. +* unpaired tags (Eg `
` in HTML), stop nodes (Eg ` +: + +``` + +Bundle size + +| Bundle Name | Size | +| ------------------ | ---- | +| fxbuilder.min.js | 6.5K | +| fxparser.min.js | 20K | +| fxp.min.js | 26K | +| fxvalidator.min.js | 5.7K | + +### Documents + + + + + + + +
v3v4v5
+ documents +
    +
  1. Getting Started
  2. +
  3. XML Parser
  4. +
  5. XML Builder
  6. +
  7. XML Validator
  8. +
  9. Entities
  10. +
  11. HTML Document Parsing
  12. +
  13. PI Tag processing
  14. +
    +
  1. Getting Started +
  2. Features
  3. +
  4. Options
  5. +
  6. Output Builders
  7. +
  8. Value Parsers
  9. +
+ +**note**: version 5 is released with version 4 tfor experimental use. Based on it's demand, it'll be developed and the features can be different in final release. + +## Performance +negative means error + +### XML Parser + + + + +* Y-axis: requests per second +* X-axis: File size + +### XML Builder + + +* Y-axis: requests per second + + + + + + +## Usage Trend + +[Usage Trend of fast-xml-parser](https://npm-compare.com/fast-xml-parser#timeRange=THREE_YEARS) + + + NPM Usage Trend of fast-xml-parser + + +## Supporters +### Contributors + +This project exists thanks to [all](graphs/contributors) the people who contribute. [[Contribute](docs/CONTRIBUTING.md)]. + + + + +### Backers + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/fast-xml-parser#backer)] + + + + + +# License +* MIT License + +![Donate $5](static/img/donation_quote.png) diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/package.json b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/package.json new file mode 100644 index 0000000..1fd52c1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/package.json @@ -0,0 +1,74 @@ +{ + "name": "fast-xml-parser", + "version": "4.4.1", + "description": "Validate XML, Parse XML, Build XML without C/C++ based libraries", + "main": "./src/fxp.js", + "scripts": { + "test": "nyc --reporter=lcov --reporter=text jasmine spec/*spec.js", + "test-types": "tsc --noEmit spec/typings/typings-test.ts", + "unit": "jasmine", + "coverage": "nyc report --reporter html --reporter text -t .nyc_output --report-dir .nyc_output/summary", + "perf": "node ./benchmark/perfTest3.js", + "lint": "eslint src/*.js spec/*.js", + "bundle": "webpack --config webpack-prod.config.js", + "prettier": "prettier --write src/**/*.js", + "publish-please": "publish-please", + "checkReadiness": "publish-please --dry-run" + }, + "bin": { + "fxparser": "./src/cli/cli.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/fast-xml-parser" + }, + "keywords": [ + "fast", + "xml", + "json", + "parser", + "xml2js", + "x2js", + "xml2json", + "js", + "cli", + "validator", + "validate", + "transformer", + "assert", + "js2xml", + "json2xml", + "html" + ], + "author": "Amit Gupta (https://solothought.com)", + "license": "MIT", + "devDependencies": { + "@babel/core": "^7.13.10", + "@babel/plugin-transform-runtime": "^7.13.10", + "@babel/preset-env": "^7.13.10", + "@babel/register": "^7.13.8", + "@types/node": "20", + "babel-loader": "^8.2.2", + "cytorus": "^0.2.9", + "eslint": "^8.3.0", + "he": "^1.2.0", + "jasmine": "^3.6.4", + "nyc": "^15.1.0", + "prettier": "^1.19.1", + "publish-please": "^5.5.2", + "typescript": "5", + "webpack": "^5.64.4", + "webpack-cli": "^4.9.1" + }, + "typings": "src/fxp.d.ts", + "funding": [{ + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + },{ + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }], + "dependencies": { + "strnum": "^1.0.5" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/cli.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/cli.js new file mode 100755 index 0000000..984534c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/cli.js @@ -0,0 +1,93 @@ +#!/usr/bin/env node +'use strict'; +/*eslint-disable no-console*/ +const fs = require('fs'); +const path = require('path'); +const {XMLParser, XMLValidator} = require("../fxp"); +const readToEnd = require('./read').readToEnd; + +const version = require('./../../package.json').version; +if (process.argv[2] === '--help' || process.argv[2] === '-h') { + console.log(require("./man")); +} else if (process.argv[2] === '--version') { + console.log(version); +} else { + const options = { + removeNSPrefix: true, + ignoreAttributes: false, + parseTagValue: true, + parseAttributeValue: true, + }; + let fileName = ''; + let outputFileName; + let validate = false; + let validateOnly = false; + for (let i = 2; i < process.argv.length; i++) { + if (process.argv[i] === '-ns') { + options.removeNSPrefix = false; + } else if (process.argv[i] === '-a') { + options.ignoreAttributes = true; + } else if (process.argv[i] === '-c') { + options.parseTagValue = false; + options.parseAttributeValue = false; + } else if (process.argv[i] === '-o') { + outputFileName = process.argv[++i]; + } else if (process.argv[i] === '-v') { + validate = true; + } else if (process.argv[i] === '-V') { + validateOnly = true; + } else { + //filename + fileName = process.argv[i]; + } + } + + const callback = function(xmlData) { + let output = ''; + if (validate) { + const parser = new XMLParser(options); + output = parser.parse(xmlData,validate); + } else if (validateOnly) { + output = XMLValidator.validate(xmlData); + process.exitCode = output === true ? 0 : 1; + } else { + const parser = new XMLParser(options); + output = JSON.stringify(parser.parse(xmlData,validate), null, 4); + } + if (outputFileName) { + writeToFile(outputFileName, output); + } else { + console.log(output); + } + }; + + try { + + if (!fileName) { + readToEnd(process.stdin, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } else { + fs.readFile(fileName, function(err, data) { + if (err) { + throw err; + } + callback(data.toString()); + }); + } + } catch (e) { + console.log('Seems an invalid file or stream.' + e); + } +} + +function writeToFile(fileName, data) { + fs.writeFile(fileName, data, function(err) { + if (err) { + throw err; + } + console.log('JSON output has been written to ' + fileName); + }); +} diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/man.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/man.js new file mode 100644 index 0000000..89947cc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/man.js @@ -0,0 +1,12 @@ +module.exports = `Fast XML Parser 4.0.0 +---------------- +$ fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] +$ cat xmlfile.xml | fxparser [-ns|-a|-c|-v|-V] [-o outputfile.json] + +Options +---------------- +-ns: remove namespace from tag and atrribute name. +-a: don't parse attributes. +-c: parse values to premitive type. +-v: validate before parsing. +-V: validate only.` \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/read.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/read.js new file mode 100644 index 0000000..642da52 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/cli/read.js @@ -0,0 +1,92 @@ +'use strict'; + +// Copyright 2013 Timothy J Fontaine +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the 'Software'), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE + +/* + +Read any stream all the way to the end and trigger a single cb + +const http = require('http'); + +const rte = require('readtoend'); + +http.get('http://nodejs.org', function(response) { + rte.readToEnd(response, function(err, body) { + console.log(body); + }); +}); + +*/ + +let stream = require('stream'); +const util = require('util'); + +if (!stream.Transform) { + stream = require('readable-stream'); +} + +function ReadToEnd(opts) { + if (!(this instanceof ReadToEnd)) { + return new ReadToEnd(opts); + } + + stream.Transform.call(this, opts); + + this._rte_encoding = opts.encoding || 'utf8'; + + this._buff = ''; +} + +module.exports = ReadToEnd; +util.inherits(ReadToEnd, stream.Transform); + +ReadToEnd.prototype._transform = function(chunk, encoding, done) { + this._buff += chunk.toString(this._rte_encoding); + this.push(chunk); + done(); +}; + +ReadToEnd.prototype._flush = function(done) { + this.emit('complete', undefined, this._buff); + done(); +}; + +ReadToEnd.readToEnd = function(stream, options, cb) { + if (!cb) { + cb = options; + options = {}; + } + + const dest = new ReadToEnd(options); + + stream.pipe(dest); + + stream.on('error', function(err) { + stream.unpipe(dest); + cb(err); + }); + + dest.on('complete', cb); + + dest.resume(); + + return dest; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/fxp.d.ts b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/fxp.d.ts new file mode 100644 index 0000000..bddcfef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/fxp.d.ts @@ -0,0 +1,402 @@ +type X2jOptions = { + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Whether to remove namespace string from tag and attribute names + * + * Defaults to `false` + */ + removeNSPrefix?: boolean; + + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `true` + */ + parseTagValue?: boolean; + + /** + * Whether to parse tag value with `strnum` package + * + * Defaults to `false` + */ + parseAttributeValue?: boolean; + + /** + * Whether to remove surrounding whitespace from tag or attribute value + * + * Defaults to `true` + */ + trimValues?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (tagName: string, tagValue: string, jPath: string, hasAttributes: boolean, isLeafNode: boolean) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (attrName: string, attrValue: string, jPath: string) => unknown; + + /** + * Options to pass to `strnum` for parsing numbers + * + * Defaults to `{ hex: true, leadingZeros: true, eNotation: true }` + */ + numberParseOptions?: strnumOptions; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Whether to always create a text node + * + * Defaults to `false` + */ + alwaysCreateTextNode?: boolean; + + /** + * Determine whether a tag should be parsed as an array + * + * @param tagName + * @param jPath + * @param isLeafNode + * @param isAttribute + * @returns {boolean} + * + * Defaults to `() => false` + */ + isArray?: (tagName: string, jPath: string, isLeafNode: boolean, isAttribute: boolean) => boolean; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + /** + * Whether to process HTML entities + * + * Defaults to `false` + */ + htmlEntities?: boolean; + + /** + * Whether to ignore the declaration tag from output + * + * Defaults to `false` + */ + ignoreDeclaration?: boolean; + + /** + * Whether to ignore Pi tags + * + * Defaults to `false` + */ + ignorePiTags?: boolean; + + /** + * Transform tag names + * + * Defaults to `false` + */ + transformTagName?: ((tagName: string) => string) | false; + + /** + * Transform attribute names + * + * Defaults to `false` + */ + transformAttributeName?: ((attributeName: string) => string) | false; + + /** + * Change the tag name when a different name is returned. Skip the tag from parsed result when false is returned. + * Modify `attrs` object to control attributes for the given tag. + * + * @returns {string} new tag name. + * @returns false to skip the tag + * + * Defaults to `(tagName, jPath, attrs) => tagName` + */ + updateTag?: (tagName: string, jPath: string, attrs: {[k: string]: string}) => string | boolean; +}; + +type strnumOptions = { + hex: boolean; + leadingZeros: boolean, + skipLike?: RegExp, + eNotation?: boolean +} + +type validationOptions = { + /** + * Whether to allow attributes without value + * + * Defaults to `false` + */ + allowBooleanAttributes?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; +}; + +type XmlBuilderOptions = { + /** + * Give a prefix to the attribute name in the resulting JS object + * + * Defaults to '@_' + */ + attributeNamePrefix?: string; + + /** + * A name to group all attributes of a tag under, or `false` to disable + * + * Defaults to `false` + */ + attributesGroupName?: false | string; + + /** + * The name of the next node in the resulting JS + * + * Defaults to `#text` + */ + textNodeName?: string; + + /** + * Whether to ignore attributes when parsing + * + * Defaults to `true` + */ + ignoreAttributes?: boolean; + + /** + * Give a property name to set CDATA values to instead of merging to tag's text value + * + * Defaults to `false` + */ + cdataPropName?: false | string; + + /** + * If set, parse comments and set as this property + * + * Defaults to `false` + */ + commentPropName?: false | string; + + /** + * Whether to make output pretty instead of single line + * + * Defaults to `false` + */ + format?: boolean; + + + /** + * If `format` is set to `true`, sets the indent string + * + * Defaults to ` ` + */ + indentBy?: string; + + /** + * Give a name to a top-level array + * + * Defaults to `undefined` + */ + arrayNodeName?: string; + + /** + * Create empty tags for tags with no text value + * + * Defaults to `false` + */ + suppressEmptyNode?: boolean; + + /** + * Suppress an unpaired tag + * + * Defaults to `true` + */ + suppressUnpairedNode?: boolean; + + /** + * Don't put a value for boolean attributes + * + * Defaults to `true` + */ + suppressBooleanAttributes?: boolean; + + /** + * Preserve the order of tags in resulting JS object + * + * Defaults to `false` + */ + preserveOrder?: boolean; + + /** + * List of tags without closing tags + * + * Defaults to `[]` + */ + unpairedTags?: string[]; + + /** + * Nodes to stop parsing at + * + * Defaults to `[]` + */ + stopNodes?: string[]; + + /** + * Control how tag value should be parsed. Called only if tag value is not empty + * + * @returns {undefined|null} `undefined` or `null` to set original value. + * @returns {unknown} + * + * 1. Different value or value with different data type to set new value. + * 2. Same value to set parsed value if `parseTagValue: true`. + * + * Defaults to `(tagName, val, jPath, hasAttributes, isLeafNode) => val` + */ + tagValueProcessor?: (name: string, value: unknown) => unknown; + + /** + * Control how attribute value should be parsed + * + * @param attrName + * @param attrValue + * @param jPath + * @returns {undefined|null} `undefined` or `null` to set original value + * @returns {unknown} + * + * Defaults to `(attrName, val, jPath) => val` + */ + attributeValueProcessor?: (name: string, value: unknown) => unknown; + + /** + * Whether to process default and DOCTYPE entities + * + * Defaults to `true` + */ + processEntities?: boolean; + + + oneListGroup?: boolean; +}; + +type ESchema = string | object | Array; + +type ValidationError = { + err: { + code: string; + msg: string, + line: number, + col: number + }; +}; + +export class XMLParser { + constructor(options?: X2jOptions); + parse(xmlData: string | Buffer ,validationOptions?: validationOptions | boolean): any; + /** + * Add Entity which is not by default supported by this library + * @param entityIdentifier {string} Eg: 'ent' for &ent; + * @param entityValue {string} Eg: '\r' + */ + addEntity(entityIdentifier: string, entityValue: string): void; +} + +export class XMLValidator{ + static validate( xmlData: string, options?: validationOptions): true | ValidationError; +} +export class XMLBuilder { + constructor(options?: XmlBuilderOptions); + build(jObj: any): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/fxp.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/fxp.js new file mode 100644 index 0000000..9cfa0ac --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/fxp.js @@ -0,0 +1,11 @@ +'use strict'; + +const validator = require('./validator'); +const XMLParser = require('./xmlparser/XMLParser'); +const XMLBuilder = require('./xmlbuilder/json2xml'); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/util.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/util.js new file mode 100644 index 0000000..df0a60d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/util.js @@ -0,0 +1,72 @@ +'use strict'; + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/CharsSymbol.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/CharsSymbol.js new file mode 100644 index 0000000..fa5ce9e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/CharsSymbol.js @@ -0,0 +1,16 @@ +modules.export = { + "<" : "<", //tag start + ">" : ">", //tag end + "/" : "/", //close tag + "!" : "!", //comment or docttype + "!--" : "!--", //comment + "-->" : "-->", //comment end + "?" : "?", //pi + "?>" : "?>", //pi end + "?xml" : "?xml", //pi end + "![" : "![", //cdata + "]]>" : "]]>", //cdata end + "[" : "[", + "-" : "-", + "D" : "D", +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/EntitiesParser.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/EntitiesParser.js new file mode 100644 index 0000000..62cc02f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js new file mode 100755 index 0000000..be1f1d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OptionsBuilder.js @@ -0,0 +1,64 @@ + +const JsObjOutputBuilder = require("./OutputBuilders/JsObjBuilder"); + +const defaultOptions = { + preserveOrder: false, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + //ignoreRootElement : false, + stopNodes: [], //nested tags will not be parsed even for errors + // isArray: () => false, //User will set it + htmlEntities: false, + // skipEmptyListItem: false + tags:{ + unpaired: [], + nameFor:{ + cdata: false, + comment: false, + text: '#text' + }, + separateTextProperty: false, + }, + attributes:{ + ignore: false, + booleanType: true, + entities: true, + }, + + // select: ["img[src]"], + // stop: ["anim", "[ads]"] + only: [], // rest tags will be skipped. It will result in flat array + hierarchy: false, //will be used when a particular tag is set to be parsed. + skip: [], // will be skipped from parse result. on('skip') will be triggered + + select: [], // on('select', tag => tag ) will be called if match + stop: [], //given tagPath will not be parsed. innerXML will be set as string value + OutputBuilder: new JsObjOutputBuilder(), +}; + +const buildOptions = function(options) { + const finalOptions = { ... defaultOptions}; + copyProperties(finalOptions,options) + return finalOptions; +}; + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (key === 'OutputBuilder') { + target[key] = source[key]; + }else if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js new file mode 100644 index 0000000..be2d478 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/BaseOutputBuilder.js @@ -0,0 +1,71 @@ +class BaseOutputBuilder{ + constructor(){ + // this.attributes = {}; + } + + addAttribute(name, value){ + if(this.options.onAttribute){ + //TODO: better to pass tag path + const v = this.options.onAttribute(name, value, this.tagName); + if(v) this.attributes[v.name] = v.value; + }else{ + name = this.options.attributes.prefix + name + this.options.attributes.suffix; + this.attributes[name] = this.parseValue(value, this.options.attributes.valueParsers); + } + } + + /** + * parse value by chain of parsers + * @param {string} val + * @returns {any} parsed value if matching parser found + */ + parseValue = function(val, valParsers){ + for (let i = 0; i < valParsers.length; i++) { + let valParser = valParsers[i]; + if(typeof valParser === "string"){ + valParser = this.registeredParsers[valParser]; + } + if(valParser){ + val = valParser.parse(val); + } + } + return val; + } + + /** + * To add a nested empty tag. + * @param {string} key + * @param {any} val + */ + _addChild(key, val){} + + /** + * skip the comment if property is not set + */ + addComment(text){ + if(this.options.nameFor.comment) + this._addChild(this.options.nameFor.comment, text); + } + + //store CDATA separately if property is set + //otherwise add to tag's value + addCdata(text){ + if (this.options.nameFor.cdata) { + this._addChild(this.options.nameFor.cdata, text); + } else { + this.addRawValue(text || ""); + } + } + + addRawValue = text => this.addValue(text); + + addDeclaration(){ + if(!this.options.declaration){ + }else{ + this.addPi("?xml"); + } + this.attributes = {} + } +} + +module.exports = BaseOutputBuilder; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js new file mode 100644 index 0000000..c63f627 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsArrBuilder.js @@ -0,0 +1,103 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const rootName = '!js_arr'; +const BaseOutputBuilder = require("./BaseOutputBuilder"); + +class JsArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = new Node(rootName); + this.currentNode = this.root; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push(this.currentNode); + this.currentNode = new Node(tag.name, this.attributes); + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + this.currentNode = this.tagsStack.pop(); //set parent node in scope + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode.child.push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.child.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode.child.push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + //TODO: set pi flag + if(!this.options.ignorePiTags){ + const node = new Node(name, this.attributes); + this.currentNode[":@"] = this.attributes; + this.currentNode.child.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root.child[0]; + } +} + + + +class Node{ + constructor(tagname, attributes){ + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments + if(attributes && Object.keys(attributes).length > 0) + this[":@"] = attributes; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js new file mode 100644 index 0000000..e0dc1e9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsMinArrBuilder.js @@ -0,0 +1,102 @@ +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(options){ + this.options = buildOptions(options); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsMinArrBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsMinArrBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, options,registeredParsers) { + super(); + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = options; + this.registeredParsers = registeredParsers; + + this.root = {[rootName]: []}; + this.currentNode = this.root; + this.currentNodeTagName = rootName; + this.attributes = {}; + } + + addTag(tag){ + //when a new tag is added, it should be added as child of current node + //TODO: shift this check to the parser + if(tag.name === "__proto__") tag.name = "#__proto__"; + + this.tagsStack.push([this.currentNodeTagName,this.currentNode]); //this.currentNode is parent node here + this.currentNodeTagName = tag.name; + this.currentNode = { [tag.name]:[]} + if(Object.keys(this.attributes).length > 0){ + this.currentNode[":@"] = this.attributes; + this.attributes = {}; + } + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const node = this.currentNode; + const nodeName = this.currentNodeTagName; + const arr = this.tagsStack.pop(); //set parent node in scope + this.currentNodeTagName = arr[0]; + this.currentNode = arr[1]; + + if(this.options.onClose !== undefined){ + //TODO TagPathMatcher + const resultTag = this.options.onClose(node, + new TagPathMatcher(this.tagsStack,node)); + + if(resultTag) return; + } + this.currentNode[this.currentNodeTagName].push(node); //to parent node + } + + //Called by parent class methods + _addChild(key, val){ + // if(key === "__proto__") tagName = "#__proto__"; + this.currentNode.push( {[key]: val }); + // this.currentNode.leafType = false; + } + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + this.currentNode[this.currentNodeTagName].push( {[this.options.nameFor.text]: this.parseValue(text, this.options.tags.valueParsers) }); + } + + addPi(name){ + if(!this.options.ignorePiTags){ + const node = { [name]:[]} + if(this.attributes){ + node[":@"] = this.attributes; + } + this.currentNode.push(node); + } + this.attributes = {}; + } + getOutput(){ + return this.root[rootName]; + } +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js new file mode 100644 index 0000000..37036c5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/JsObjBuilder.js @@ -0,0 +1,156 @@ + + +const {buildOptions,registerCommonValueParsers} = require("./ParserOptionsBuilder"); + +class OutputBuilder{ + constructor(builderOptions){ + this.options = buildOptions(builderOptions); + this.registeredParsers = registerCommonValueParsers(this.options); + } + + registerValueParser(name,parserInstance){//existing name will override the parser without warning + this.registeredParsers[name] = parserInstance; + } + + getInstance(parserOptions){ + return new JsObjBuilder(parserOptions, this.options, this.registeredParsers); + } +} + +const BaseOutputBuilder = require("./BaseOutputBuilder"); +const rootName = '^'; + +class JsObjBuilder extends BaseOutputBuilder{ + + constructor(parserOptions, builderOptions,registeredParsers) { + super(); + //hold the raw detail of a tag and sequence with reference to the output + this.tagsStack = []; + this.parserOptions = parserOptions; + this.options = builderOptions; + this.registeredParsers = registeredParsers; + + this.root = {}; + this.parent = this.root; + this.tagName = rootName; + this.value = {}; + this.textValue = ""; + this.attributes = {}; + } + + addTag(tag){ + + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + + this.tagsStack.push([this.tagName, this.textValue, this.value]); //parent tag, parent text value, parent tag value (jsobj) + this.tagName = tag.name; + this.value = value; + this.textValue = ""; + this.attributes = {}; + } + + /** + * Check if the node should be added by checking user's preference + * @param {Node} node + * @returns boolean: true if the node should not be added + */ + closeTag(){ + const tagName = this.tagName; + let value = this.value; + let textValue = this.textValue; + + //update tag text value + if(typeof value !== "object" && !Array.isArray(value)){ + value = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + }else if(textValue.length > 0){ + value[this.options.nameFor.text] = this.parseValue(textValue.trim(), this.options.tags.valueParsers); + } + + + let resultTag= { + tagName: tagName, + value: value + }; + + if(this.options.onTagClose !== undefined){ + //TODO TagPathMatcher + resultTag = this.options.onClose(tagName, value, this.textValue, new TagPathMatcher(this.tagsStack,node)); + + if(!resultTag) return; + } + + //set parent node in scope + let arr = this.tagsStack.pop(); + let parentTag = arr[2]; + parentTag=this._addChildTo(resultTag.tagName, resultTag.value, parentTag); + + this.tagName = arr[0]; + this.textValue = arr[1]; + this.value = parentTag; + } + + _addChild(key, val){ + if(typeof this.value === "string"){ + this.value = { [this.options.nameFor.text] : this.value }; + } + + this._addChildTo(key, val, this.value); + // this.currentNode.leafType = false; + this.attributes = {}; + } + + _addChildTo(key, val, node){ + if(typeof node === 'string') node = {}; + if(!node[key]){ + node[key] = val; + }else{ //Repeated + if(!Array.isArray(node[key])){ //but not stored as array + node[key] = [node[key]]; + } + node[key].push(val); + } + return node; + } + + + /** + * Add text value child node + * @param {string} text + */ + addValue(text){ + //TODO: use bytes join + if(this.textValue.length > 0) this.textValue += " " + text; + else this.textValue = text; + } + + addPi(name){ + let value = ""; + if( !isEmpty(this.attributes)){ + value = {}; + if(this.options.attributes.groupBy){ + value[this.options.attributes.groupBy] = this.attributes; + }else{ + value = this.attributes; + } + } + this._addChild(name, value); + + } + getOutput(){ + return this.value; + } +} + +function isEmpty(obj) { + return Object.keys(obj).length === 0; +} + +module.exports = OutputBuilder; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js new file mode 100644 index 0000000..c71ea94 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/OutputBuilders/ParserOptionsBuilder.js @@ -0,0 +1,99 @@ +const trimParser = require("../valueParsers/trim") +const booleanParser = require("../valueParsers/booleanParser") +const currencyParser = require("../valueParsers/currency") +const numberParser = require("../valueParsers/number") + +const defaultOptions={ + nameFor:{ + text: "#text", + comment: "", + cdata: "", + }, + // onTagClose: () => {}, + // onAttribute: () => {}, + piTag: false, + declaration: false, //"?xml" + tags: { + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + attributes:{ + prefix: "@_", + suffix: "", + groupBy: "", + + valueParsers: [ + // "trim", + // "boolean", + // "number", + // "currency", + // "date", + ] + }, + dataType:{ + + } +} + +//TODO +const withJoin = ["trim","join", /*"entities",*/"number","boolean","currency"/*, "date"*/] +const withoutJoin = ["trim", /*"entities",*/"number","boolean","currency"/*, "date"*/] + +function buildOptions(options){ + //clone + const finalOptions = { ... defaultOptions}; + + //add config missed in cloning + finalOptions.tags.valueParsers.push(...withJoin) + if(!this.preserveOrder) + finalOptions.tags.valueParsers.push(...withoutJoin); + + //add config missed in cloning + finalOptions.attributes.valueParsers.push(...withJoin) + + //override configuration + copyProperties(finalOptions,options); + return finalOptions; +} + +function copyProperties(target, source) { + for (let key in source) { + if (source.hasOwnProperty(key)) { + if (typeof source[key] === 'object' && !Array.isArray(source[key])) { + // Recursively copy nested properties + if (typeof target[key] === 'undefined') { + target[key] = {}; + } + copyProperties(target[key], source[key]); + } else { + // Copy non-nested properties + target[key] = source[key]; + } + } + } +} + +function registerCommonValueParsers(options){ + return { + "trim": new trimParser(), + // "join": this.entityParser.parse, + "boolean": new booleanParser(), + "number": new numberParser({ + hex: true, + leadingZeros: true, + eNotation: true + }), + "currency": new currencyParser(), + // "date": this.entityParser.parse, + } +} + +module.exports = { + buildOptions : buildOptions, + registerCommonValueParsers: registerCommonValueParsers +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/Report.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/Report.js new file mode 100644 index 0000000..e69de29 diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/TagPath.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/TagPath.js new file mode 100644 index 0000000..d901cc3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/TagPath.js @@ -0,0 +1,81 @@ +class TagPath{ + constructor(pathStr){ + let text = ""; + let tName = ""; + let pos; + let aName = ""; + let aVal = ""; + this.stack = [] + + for (let i = 0; i < pathStr.length; i++) { + let ch = pathStr[i]; + if(ch === " ") { + if(text.length === 0) continue; + tName = text; text = ""; + }else if(ch === "["){ + if(tName.length === 0){ + tName = text; text = ""; + } + i++; + for (; i < pathStr.length; i++) { + ch = pathStr[i]; + if(ch=== "=") continue; + else if(ch=== "]") {aName = text.trim(); text=""; break; i--;} + else if(ch === "'" || ch === '"'){ + let attrEnd = pathStr.indexOf(ch,i+1); + aVal = pathStr.substring(i+1, attrEnd); + i = attrEnd; + }else{ + text +=ch; + } + } + }else if(ch !== " " && text.length === 0 && tName.length > 0){//reading tagName + //save previous tag + this.stack.push(new TagPathNode(tName,pos,aName,aVal)); + text = ch; tName = ""; aName = ""; aVal = ""; + }else{ + text+=ch; + } + } + + //last tag in the path + if(tName.length >0 || text.length>0){ + this.stack.push(new TagPathNode(text||tName,pos,aName,aVal)); + } + } + + match(tagStack,node){ + if(this.stack[0].name !== "*"){ + if(this.stack.length !== tagStack.length +1) return false; + + //loop through tagPath and tagStack and match + for (let i = 0; i < this.tagStack.length; i++) { + if(!this.stack[i].match(tagStack[i])) return false; + } + } + if(!this.stack[this.stack.length - 1].match(node)) return false; + return true; + } +} + +class TagPathNode{ + constructor(name,position,attrName,attrVal){ + this.name = name; + this.position = position; + this.attrName = attrName, + this.attrVal = attrVal; + } + + match(node){ + let matching = true; + matching = node.name === this.name; + if(this.position) matching = node.position === this.position; + if(this.attrName) matching = node.attrs[this.attrName !== undefined]; + if(this.attrVal) matching = node.attrs[this.attrName !== this.attrVal]; + return matching; + } +} + +// console.log((new TagPath("* b[b]")).stack); +// console.log((new TagPath("a[a] b[b] c")).stack); +// console.log((new TagPath(" b [ b= 'cf sdadwa' ] a ")).stack); \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js new file mode 100644 index 0000000..af23607 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/TagPathMatcher.js @@ -0,0 +1,15 @@ +const TagPath = require("./TagPath"); + +class TagPathMatcher{ + constructor(stack,node){ + this.stack = stack; + this.node= node; + } + + match(path){ + const tagPath = new TagPath(path); + return tagPath.match(this.stack, this.node); + } +} + +module.exports = TagPathMatcher; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XMLParser.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XMLParser.js new file mode 100755 index 0000000..6de58ed --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XMLParser.js @@ -0,0 +1,85 @@ +const { buildOptions} = require("./OptionsBuilder"); +const Xml2JsParser = require("./Xml2JsParser"); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + // console.log(this.options) + } + /** + * Parse XML data string to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + return this.parse(xmlData); + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + // if( validationOption){ + // if(validationOption === true) validationOption = {}; //validate with default options + + // const result = validator.validate(xmlData, validationOption); + // if (result !== true) { + // throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + // } + // } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parse(xmlData); + } + /** + * Parse XML data buffer to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parseBytesArr(xmlData){ + if(Array.isArray(xmlData) && xmlData.byteLength !== undefined){ + }else{ + throw new Error("XML data is accepted in Bytes[] form.") + } + const parser = new Xml2JsParser(this.options); + parser.entityParser.addExternalEntities(this.externalEntities); + return parser.parseBytesArr(xmlData); + } + /** + * Parse XML data stream to JS object + * @param {fs.ReadableStream} xmlDataStream + */ + parseStream(xmlDataStream){ + if(!isStream(xmlDataStream)) throw new Error("FXP: Invalid stream input"); + + const orderedObjParser = new Xml2JsParser(this.options); + orderedObjParser.entityParser.addExternalEntities(this.externalEntities); + return orderedObjParser.parseStream(xmlDataStream); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +function isStream(stream){ + if(stream && typeof stream.read === "function" && typeof stream.on === "function" && typeof stream.readableEnded === "boolean") return true; + return false; +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js new file mode 100644 index 0000000..c4baab4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/Xml2JsParser.js @@ -0,0 +1,237 @@ +const StringSource = require("./inputSource/StringSource"); +const BufferSource = require("./inputSource/BufferSource"); +const {readTagExp,readClosingTagName} = require("./XmlPartReader"); +const {readComment, readCdata,readDocType,readPiTag} = require("./XmlSpecialTagsReader"); +const TagPath = require("./TagPath"); +const TagPathMatcher = require("./TagPathMatcher"); +const EntitiesParser = require('./EntitiesParser'); + +//To hold the data of current tag +//This is usually used to compare jpath expression against current tag +class TagDetail{ + constructor(name){ + this.name = name; + this.position = 0; + // this.attributes = {}; + } +} + +class Xml2JsParser { + constructor(options) { + this.options = options; + + this.currentTagDetail = null; + this.tagTextData = ""; + this.tagsStack = []; + this.entityParser = new EntitiesParser(options.htmlEntities); + this.stopNodes = []; + for (let i = 0; i < this.options.stopNodes.length; i++) { + this.stopNodes.push(new TagPath(this.options.stopNodes[i])); + } + } + + parse(strData) { + this.source = new StringSource(strData); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + parseBytesArr(data) { + this.source = new BufferSource(data ); + this.parseXml(); + return this.outputBuilder.getOutput(); + } + + parseXml() { + //TODO: Separate TagValueParser as separate class. So no scope issue in node builder class + + //OutputBuilder should be set in XML Parser + this.outputBuilder = this.options.OutputBuilder.getInstance(this.options); + this.root = { root: true}; + this.currentTagDetail = this.root; + + while(this.source.canRead()){ + let ch = this.source.readCh(); + if (ch === "") break; + + if(ch === "<"){//tagStart + let nextChar = this.source.readChAt(0); + if (nextChar === "" ) throw new Error("Unexpected end of source"); + + + if(nextChar === "!" || nextChar === "?"){ + this.source.updateBufferBoundary(); + //previously collected text should be added to current node + this.addTextNode(); + + this.readSpecialTag(nextChar);// Read DOCTYPE, comment, CDATA, PI tag + }else if(nextChar === "/"){ + this.source.updateBufferBoundary(); + this.readClosingTag(); + // console.log(this.source.buffer.length, this.source.readable); + // console.log(this.tagsStack.length); + }else{//opening tag + this.readOpeningTag(); + } + }else{ + this.tagTextData += ch; + } + }//End While loop + if(this.tagsStack.length > 0 || ( this.tagTextData !== "undefined" && this.tagTextData.trimEnd().length > 0) ) throw new Error("Unexpected data in the end of document"); + } + + /** + * read closing paired tag. Set parent tag in scope. + * skip a node on user's choice + */ + readClosingTag(){ + const tagName = this.processTagName(readClosingTagName(this.source)); + // console.log(tagName, this.tagsStack.length); + this.validateClosingTag(tagName); + // All the text data collected, belongs to current tag. + if(!this.currentTagDetail.root) this.addTextNode(); + this.outputBuilder.closeTag(); + // Since the tag is closed now, parent tag comes in scope + this.currentTagDetail = this.tagsStack.pop(); + } + + validateClosingTag(tagName){ + // This can't be unpaired tag, or a stop tag. + if(this.isUnpaired(tagName) || this.isStopNode(tagName)) throw new Error(`Unexpected closing tag '${tagName}'`); + // This must match with last opening tag + else if(tagName !== this.currentTagDetail.name) + throw new Error(`Unexpected closing tag '${tagName}' expecting '${this.currentTagDetail.name}'`) + } + + /** + * Read paired, unpaired, self-closing, stop and special tags. + * Create a new node + * Push paired tag in stack. + */ + readOpeningTag(){ + //save previously collected text data to current node + this.addTextNode(); + + //create new tag + let tagExp = readTagExp(this, ">" ); + + // process and skip from tagsStack For unpaired tag, self closing tag, and stop node + const tagDetail = new TagDetail(tagExp.tagName); + if(this.isUnpaired(tagExp.tagName)) { + //TODO: this will lead 2 extra stack operation + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(tagExp.selfClosing){ + this.outputBuilder.addTag(tagDetail); + this.outputBuilder.closeTag(); + } else if(this.isStopNode(this.currentTagDetail)){ + // TODO: let's user set a stop node boundary detector for complex contents like script tag + //TODO: pass tag name only to avoid string operations + const content = source.readUptoCloseTag(` 0){ + //TODO: shift parsing to output builder + + this.outputBuilder.addValue(this.replaceEntities(this.tagTextData)); + } + this.tagTextData = ""; + } + // } + } + + processAttrName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + processTagName(name){ + if(name === "__proto__") name = "#__proto__"; + name = resolveNameSpace(name, this.removeNSPrefix); + return name; + } + + /** + * Generate tags path from tagsStack + */ + tagsPath(tagName){ + //TODO: return TagPath Object. User can call match method with path + return ""; + } + + isUnpaired(tagName){ + return this.options.tags.unpaired.indexOf(tagName) !== -1; + } + + /** + * valid expressions are + * tag nested + * * nested + * tag nested[attribute] + * tag nested[attribute=""] + * tag nested[attribute!=""] + * tag nested:0 //for future + * @param {string} tagName + * @returns + */ + isStopNode(node){ + for (let i = 0; i < this.stopNodes.length; i++) { + const givenPath = this.stopNodes[i]; + if(givenPath.match(this.tagsStack, node)) return true; + } + return false + } + + replaceEntities(text){ + //TODO: if option is set then replace entities + return this.entityParser.parse(text) + } +} + +function resolveNameSpace(name, removeNSPrefix) { + if (removeNSPrefix) { + const parts = name.split(':'); + if(parts.length === 2){ + if (parts[0] === 'xmlns') return ''; + else return parts[1]; + }else reportError(`Multiple namespaces ${name}`) + } + return name; +} + +module.exports = Xml2JsParser; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XmlPartReader.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XmlPartReader.js new file mode 100644 index 0000000..56b180e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XmlPartReader.js @@ -0,0 +1,212 @@ +'use strict'; + +/** + * find paired tag for a stop node + * @param {string} xmlDoc + * @param {string} tagName + * @param {number} i : start index + */ +function readStopNode(xmlDoc, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlDoc.length; i++) { + if( xmlDoc[i] === "<"){ + if (xmlDoc[i+1] === "/") {//close tag + const closeIndex = findSubStrIndex(xmlDoc, ">", i, `${tagName} is not closed`); + let closeTagName = xmlDoc.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlDoc.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlDoc[i+1] === '?') { + const closeIndex = findSubStrIndex(xmlDoc, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 3) === '!--') { + const closeIndex = findSubStrIndex(xmlDoc, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlDoc.substr(i + 1, 2) === '![') { + const closeIndex = findSubStrIndex(xmlDoc, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlDoc, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +/** + * Read closing tag name + * @param {Source} source + * @returns tag name + */ +function readClosingTagName(source){ + let text = ""; //temporary data + while(source.canRead()){ + let ch = source.readCh(); + // if (ch === null || ch === undefined) break; + // source.updateBuffer(); + + if (ch === ">") return text.trimEnd(); + else text += ch; + } + throw new Error(`Unexpected end of source. Reading '${substr}'`); +} + +/** + * Read XML tag and build attributes map + * This function can be used to read normal tag, pi tag. + * This function can't be used to read comment, CDATA, DOCTYPE. + * Eg + * @param {string} xmlDoc + * @param {number} startIndex starting index + * @returns tag expression includes tag name & attribute string + */ +function readTagExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i); i++) { + const char = parser.source.readChAt(i); + + if (char === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (char === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } else if (char === '>' && !inSingleQuotes && !inDoubleQuotes) { + // If not inside quotes, stop reading at '>' + EOE = true; + break; + } + + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '>'"); + + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function readPiExp(parser) { + let inSingleQuotes = false; + let inDoubleQuotes = false; + let i; + let EOE = false; + + for (i = 0; parser.source.canRead(i) ; i++) { + const currentChar = parser.source.readChAt(i); + const nextChar = parser.source.readChAt(i+1); + + if (currentChar === "'" && !inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } else if (currentChar === '"' && !inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } + + if (!inSingleQuotes && !inDoubleQuotes) { + if (currentChar === '?' && nextChar === '>') { + EOE = true; + break; // Exit the loop when '?>' is found + } + } + } + if(inSingleQuotes || inDoubleQuotes){ + throw new Error("Invalid attribute expression. Quote is not properly closed in PI tag expression"); + }else if(!EOE) throw new Error("Unexpected closing of source. Waiting for '?>'"); + + if(!parser.options.attributes.ignore){ + //TODO: use regex to verify attributes if not set to ignore + } + + const exp = parser.source.readStr(i); + parser.source.updateBufferBoundary(i + 1); + return buildTagExpObj(exp, parser) +} + +function buildTagExpObj(exp, parser){ + const tagExp = { + tagName: "", + selfClosing: false + }; + let attrsExp = ""; + + if(exp[exp.length -1] === "/") tagExp.selfClosing = true; + + //separate tag name + let i = 0; + for (; i < exp.length; i++) { + const char = exp[i]; + if(char === " "){ + tagExp.tagName = exp.substring(0, i); + attrsExp = exp.substring(i + 1); + break; + } + } + //only tag + if(tagExp.tagName.length === 0 && i === exp.length)tagExp.tagName = exp; + + tagExp.tagName = tagExp.tagName.trimEnd(); + + if(!parser.options.attributes.ignore && attrsExp.length > 0){ + parseAttributesExp(attrsExp,parser) + } + + return tagExp; +} + +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function parseAttributesExp(attrStr, parser) { + const matches = getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + for (let i = 0; i < len; i++) { + let attrName = parser.processAttrName(matches[i][1]); + let attrVal = parser.replaceEntities(matches[i][4] || true); + + parser.outputBuilder.addAttribute(attrName, attrVal); + } +} + + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +module.exports = { + readStopNode: readStopNode, + readClosingTagName: readClosingTagName, + readTagExp: readTagExp, + readPiExp: readPiExp, +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js new file mode 100644 index 0000000..0fba196 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/XmlSpecialTagsReader.js @@ -0,0 +1,118 @@ +const {readPiExp} = require("./XmlPartReader"); + +function readCdata(parser){ + //"); + parser.outputBuilder.addCdata(text); +} +function readPiTag(parser){ + //"); + if(!tagExp) throw new Error("Invalid Pi Tag expression."); + + if (tagExp.tagName === "?xml") {//TODO: test if tagName is just xml + parser.outputBuilder.addDeclaration(); + } else { + parser.outputBuilder.addPi("?"+tagExp.tagName); + } +} + +function readComment(parser){ + //"); + parser.outputBuilder.addComment(text); +} + +const DOCTYPE_tags = { + "EL":/^EMENT\s+([^\s>]+)\s+(ANY|EMPTY|\(.+\)\s*$)/m, + "AT":/^TLIST\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+$/m, + "NO":/^TATION.+$/m +} +function readDocType(parser){ + //"); + const regx = DOCTYPE_tags[str]; + if(regx){ + const match = dTagExp.match(regx); + if(!match) throw new Error("Invalid DOCTYPE"); + }else throw new Error("Invalid DOCTYPE"); + } + }else if( ch === '>' && lastch === "]"){//end of doctype + return; + } + }else if( ch === '>'){//end of doctype + return; + }else if( ch === '['){ + hasBody = true; + }else{ + lastch = ch; + } + }//End While loop + +} + +function registerEntity(parser){ + //read Entity + let attrBoundary=""; + let name ="", val =""; + while(source.canRead()){ + let ch = source.readCh(); + + if(attrBoundary){ + if (ch === attrBoundary){ + val = text; + text = "" + } + }else if(ch === " " || ch === "\t"){ + if(!name){ + name = text.trimStart(); + text = ""; + } + }else if (ch === '"' || ch === "'") {//start of attrBoundary + attrBoundary = ch; + }else if(ch === ">"){ + parser.entityParser.addExternalEntity(name,val); + return; + }else{ + text+=ch; + } + } +} + +module.exports = { + readCdata: readCdata, + readComment:readComment, + readDocType:readDocType, + readPiTag:readPiTag +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js new file mode 100644 index 0000000..b83ce46 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/inputSource/BufferSource.js @@ -0,0 +1,118 @@ +const Constants = { + space: 32, + tab: 9 +} +class BufferSource{ + constructor(bytesArr){ + this.line = 1; + this.cols = 0; + this.buffer = bytesArr; + this.startIndex = 0; + } + + + + readCh() { + return String.fromCharCode(this.buffer[this.startIndex++]); + } + + readChAt(index) { + return String.fromCharCode(this.buffer[this.startIndex+index]); + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.slice(from, from + n).toString(); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + const stopBuffer = Buffer.from(stopStr); + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.slice(this.startIndex, i).toString(); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + +readUptoCloseTag(stopStr) { //stopStr: "'){ //TODO: if it should be equivalent ASCII + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopBuffer[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.slice(this.startIndex, stopIndex - 1 ).toString(); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); +} + + readFromBuffer(n, shouldUpdate) { + let ch; + if (n === 1) { + ch = this.buffer[this.startIndex]; + if (ch === 10) { + this.line++; + this.cols = 1; + } else { + this.cols++; + } + ch = String.fromCharCode(ch); + } else { + this.cols += n; + ch = this.buffer.slice(this.startIndex, this.startIndex + n).toString(); + } + if (shouldUpdate) this.updateBuffer(n); + return ch; + } + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = BufferSource; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js new file mode 100644 index 0000000..a996528 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/inputSource/StringSource.js @@ -0,0 +1,123 @@ +const whiteSpaces = [" ", "\n", "\t"]; + + +class StringSource{ + constructor(str){ + this.line = 1; + this.cols = 0; + this.buffer = str; + //a boundary pointer to indicate where from the buffer dat should be read + // data before this pointer can be deleted to free the memory + this.startIndex = 0; + } + + readCh() { + return this.buffer[this.startIndex++]; + } + + readChAt(index) { + return this.buffer[this.startIndex+index]; + } + + readStr(n,from){ + if(typeof from === "undefined") from = this.startIndex; + return this.buffer.substring(from, from + n); + } + + readUpto(stopStr) { + const inputLength = this.buffer.length; + const stopLength = stopStr.length; + + for (let i = this.startIndex; i < inputLength; i++) { + let match = true; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = false; + break; + } + } + + if (match) { + const result = this.buffer.substring(this.startIndex, i); + this.startIndex = i + stopLength; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readUptoCloseTag(stopStr) { //stopStr: "'){ + match = 2; + //tag boundary found + // this.startIndex + } + }else{ + match = 1; + for (let j = 0; j < stopLength; j++) { + if (this.buffer[i + j] !== stopStr[j]) { + match = 0; + break; + } + } + } + if (match === 2) {//matched closing part + const result = this.buffer.substring(this.startIndex, stopIndex - 1 ); + this.startIndex = i + 1; + return result; + } + } + + throw new Error(`Unexpected end of source. Reading '${stopStr}'`); + } + + readFromBuffer(n, updateIndex){ + let ch; + if(n===1){ + ch = this.buffer[this.startIndex]; + // if(ch === "\n") { + // this.line++; + // this.cols = 1; + // }else{ + // this.cols++; + // } + }else{ + ch = this.buffer.substring(this.startIndex, this.startIndex + n); + // if("".indexOf("\n") !== -1){ + // //TODO: handle the scenario when there are multiple lines + // //TODO: col should be set to number of chars after last '\n' + // // this.cols = 1; + // }else{ + // this.cols += n; + + // } + } + if(updateIndex) this.updateBufferBoundary(n); + return ch; + } + + //TODO: rename to updateBufferReadIndex + + updateBufferBoundary(n = 1) { //n: number of characters read + this.startIndex += n; + } + + canRead(n){ + n = n || this.startIndex; + return this.buffer.length - n + 1 > 0; + } + +} + +module.exports = StringSource; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js new file mode 100644 index 0000000..62cc02f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/EntitiesParser.js @@ -0,0 +1,107 @@ +const ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; +const htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, +}; + +class EntitiesParser{ + constructor(replaceHtmlEntities) { + this.replaceHtmlEntities = replaceHtmlEntities; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + } + + addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.addExternalEntity(ent,externalEntities[ent]) + } + } + addExternalEntity(key,val){ + validateEntityName(key); + if(val.indexOf("&") !== -1) { + reportWarning(`Entity ${key} is not added as '&' is found in value;`) + return; + }else{ + this.lastEntities[ent] = { + regex: new RegExp("&"+key+";","g"), + val : val + } + } + } + + addDocTypeEntities(entities){ + const entKeys = Object.keys(entities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.docTypeEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : entities[ent] + } + } + } + + parse(val){ + return this.replaceEntitiesValue(val) + } + + /** + * 1. Replace DOCTYPE entities + * 2. Replace external entities + * 3. Replace HTML entities if asked + * @param {string} val + */ + replaceEntitiesValue(val){ + if(typeof val === "string" && val.length > 0){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.replaceHtmlEntities){ + for(let entityName in htmlEntities){ + const entity = htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( ampEntity.regex, ampEntity.val); + } + return val; + } +}; + +//an entity name should not contains special characters that may be used in regex +//Eg !?\\\/[]$%{}^&*()<> +const specialChar = "!?\\\/[]$%{}^&*()<>|+"; + +function validateEntityName(name){ + for (let i = 0; i < specialChar.length; i++) { + const ch = specialChar[i]; + if(name.indexOf(ch) !== -1) throw new Error(`Invalid character ${ch} in entity name`); + } + return name; +} + +module.exports = EntitiesParser; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js new file mode 100644 index 0000000..f8f5d12 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParser.js @@ -0,0 +1,23 @@ +class boolParser{ + constructor(trueList, falseList){ + if(trueList) + this.trueList = trueList; + else + this.trueList = ["true"]; + + if(falseList) + this.falseList = falseList; + else + this.falseList = ["false"]; + } + parse(val){ + if (typeof val === 'string') { + //TODO: performance: don't convert + const temp = val.toLowerCase(); + if(this.trueList.indexOf(temp) !== -1) return true; + else if(this.falseList.indexOf(temp) !== -1 ) return false; + } + return val; + } +} +module.exports = boolParser; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js new file mode 100644 index 0000000..21b8050 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/booleanParserExt.js @@ -0,0 +1,20 @@ +function boolParserExt(val){ + if(isArray(val)){ + for (let i = 0; i < val.length; i++) { + val[i] = parse(val[i]) + } + }else{ + val = parse(val) + } + return val; +} + +function parse(val){ + if (typeof val === 'string') { + const temp = val.toLowerCase(); + if(temp === 'true' || temp ==="yes" || temp==="1") return true; + else if(temp === 'false' || temp ==="no" || temp==="0") return false; + } + return val; +} +module.exports = boolParserExt; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js new file mode 100644 index 0000000..82e21e7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/currency.js @@ -0,0 +1,40 @@ +const defaultOptions = { + maxLength: 200, + // locale: "en-IN" +} +const localeMap = { + "$":"en-US", + "€":"de-DE", + "£":"en-GB", + "¥":"ja-JP", + "₹":"en-IN", +} +const sign = "(?:-|\+)?"; +const digitsAndSeparator = "(?:\d+|\d{1,3}(?:,\d{3})+)"; +const decimalPart = "(?:\.\d{1,2})?"; +const symbol = "(?:\$|€|¥|₹)?"; + +const currencyCheckRegex = /^\s*(?:-|\+)?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d{1,2})?\s*(?:\$|€|¥|₹)?\s*$/u; + +class CurrencyParser{ + constructor(options){ + this.options = options || defaultOptions; + } + parse(val){ + if (typeof val === 'string' && val.length <= this.options.maxLength) { + if(val.indexOf(",,") !== -1 && val.indexOf(".." !== -1)){ + const match = val.match(currencyCheckRegex); + if(match){ + const locale = this.options.locale || localeMap[match[2]||match[5]||"₹"]; + const formatter = new Intl.NumberFormat(locale) + val = val.replace(/[^0-9,.]/g, '').trim(); + val = Number(val.replace(formatter.format(1000)[1], '')); + } + } + } + return val; + } +} +CurrencyParser.defaultOptions = defaultOptions; + +module.exports = CurrencyParser; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/join.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/join.js new file mode 100644 index 0000000..d7f2027 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/join.js @@ -0,0 +1,14 @@ +/** + * + * @param {array} val + * @param {string} by + * @returns + */ +function join(val, by=" "){ + if(isArray(val)){ + val.join(by) + } + return val; +} + +module.exports = join; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/number.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/number.js new file mode 100644 index 0000000..bef3803 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/number.js @@ -0,0 +1,16 @@ +const toNumber = require("strnum"); + + +class numParser{ + constructor(options){ + this.options = options; + } + parse(val){ + if (typeof val === 'string') { + val = toNumber(val,this.options); + } + return val; + } +} + +module.exports = numParser; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js new file mode 100644 index 0000000..ecce49a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/v5/valueParsers/trim.js @@ -0,0 +1,8 @@ +class trimmer{ + parse(val){ + if(typeof val === "string") return val.trim(); + else return val; + } +} + +module.exports = trimmer; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/validator.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/validator.js new file mode 100644 index 0000000..3b1b2ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/validator.js @@ -0,0 +1,425 @@ +'use strict'; + +const util = require('./util'); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js new file mode 100644 index 0000000..f30604a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlbuilder/json2xml.js @@ -0,0 +1,281 @@ +'use strict'; +//parse Empty Node as self closing node +const buildFromOrderedJs = require('./orderedJs2Xml'); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } +}; + +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlbuilder/prettifyJs2Xml.js new file mode 100644 index 0000000..e69de29 diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js new file mode 100644 index 0000000..bcf9dee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/DocTypeReader.js @@ -0,0 +1,152 @@ +const util = require('../util'); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js new file mode 100644 index 0000000..bca3776 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/OptionsBuilder.js @@ -0,0 +1,48 @@ + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js new file mode 100644 index 0000000..ffd3f24 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/OrderedObjParser.js @@ -0,0 +1,601 @@ +'use strict'; +///@ts-check + +const util = require('../util'); +const xmlNode = require('./xmlNode'); +const readDocType = require("./DocTypeReader"); +const toNumber = require("strnum"); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js new file mode 100644 index 0000000..ffaf59b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/XMLParser.js @@ -0,0 +1,58 @@ +const { buildOptions} = require("./OptionsBuilder"); +const OrderedObjParser = require("./OrderedObjParser"); +const { prettify} = require("./node2json"); +const validator = require('../validator'); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/node2json.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/node2json.js new file mode 100644 index 0000000..3045573 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/node2json.js @@ -0,0 +1,113 @@ +'use strict'; + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; diff --git a/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js new file mode 100644 index 0000000..9319524 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/fast-xml-parser/src/xmlparser/xmlNode.js @@ -0,0 +1,25 @@ +'use strict'; + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/mnemonist/CHANGELOG.md new file mode 100644 index 0000000..286c2fe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/CHANGELOG.md @@ -0,0 +1,305 @@ +# Changelog + +## 0.38.3 + +* Refactoring `VPTree` memory layout. +* Fixing `VPTree.nearestNeighbors` edge case. +* Various `VPTree` optimizations. + +## 0.38.2 + +* Fixing `Heap.replace` & `Heap.pusphpop` types (@wholenews). + +## 0.38.1 + +* Fixing `SparseQueueSet` deopt. + +## 0.38.0 + +* Adding `TrieMap.update` (@wholenews). + +## 0.37.0 + +* Adding `DefaultWeakMap` (@yoursunny). + +## 0.36.1 + +* Improved typings for iteration methods (@yoursunny). + +## 0.36.0 + +* Adding `SparseQueueSet`. + +## 0.35.0 + +* Adding `SparseMap`. +* Enhancing `SparseSet` performance. + +## 0.34.0 + +* Adding `set.overlap`. + +## 0.33.1 + +* Fixing build by including missing `sort` folder. + +## 0.33.0 + +* Adding `KDTree`. +* Adding `set.intersectionSize`. +* Adding `set.unionSize`. +* Adding `set.jaccard`. +* Adding `FixedReverseHeap.peek`. + +## 0.32.0 + +* Adding `PassjoinIndex`. + +## 0.31.3 + +* Fixing `Heap.nsmallest` & `Heap.nlargest` docs & typings. +* Fixing `Heap.nsmallest` & `Heap.nlargest` not using custom comparator function when `n = 1`. + +## 0.31.2 + +* Fixing `BitSet` & `BitVector` iteration methods edge case. +* Fixing `BitSet` & `BitVector` `#.select` method. + +## 0.31.1 + +* Fixing `BitSet` & `BitVector` `#.size` caching edge case. + +## 0.31.0 + +* Adding `DefaultMap.peek`. +* Fixing some error messages. +* Fixing `BitSet` & `BitVector` `#.size` caching. + +## 0.30.0 + +* Stricter TS definitions (`--noImplicitAny`, `--noImplicitReturns`) (@pbadenski). + +## 0.29.0 + +* Adding `LRUCache.setpop` and `LRUMap.setpop` (@veggiesaurus). + +## 0.28.0 + +* Adding `LRUCache.peek` and `LRUMap.peek` (@veggiesaurus). + +## 0.27.2 + +* Fixing usage with TypeScript. + +## 0.27.1 + +* Fixing `CircularBuffer` and `FixedDeque` types. + +## 0.27.0 + +* Adding `FixedDeque`. +* Adding `CircularBuffer.unshift`. +* Changing `CircularBuffer` semantics to now overwrite values when wrapping around. + +## 0.26.0 + +* Adding the `DefaultMap.autoIncrement` factory. +* Removing the `IncrementalMap`. +* Fixing `Vector` typings. +* Fixing `BitVector` typings. + +## 0.25.1 + +* Fixing custom inspect methods for node >= 10. + +## 0.25.0 + +* Adding `LRUCache`. +* Adding `LRUMap`. + +## 0.24.0 + +* Adding `#.forEachMultiplicity` to `MultiSet`. +* Adding `#.forEachAssociation` to `MultiMap`. +* Adding `DefaultMap`. + +## 0.23.0 + +* Adding `FixedReverseHeap`. +* Adding `Heap.nsmallest` & `Heap.nlargest`. +* Adding `MultiSet.isSubset` & `MultiSet.isSuperset`. +* Adding `#.top` to `MultiSet`. +* Adding missing `Heap` types. +* Renaming `FiniteStack` to `FixedStack`. + +## 0.22.0 + +* Adding `FuzzyMultiMap.dimension`. +* Adding `#.consume` to `Heap`. +* Adding `#.replace` to `Heap`. +* Adding `#.pushpop` to `Heap`. +* Improving `BitSet` and `BitVector` `#.toJSON`. +* Improving `FiniteStack.from` & `CircularBuffer.from` performance when handling arrays. +* `Heap.from` is now linear time. +* Refactoring `Heap` inner logic. +* Fixing `CircularBuffer`'s `#.unshift` to `#.shift`. +* Fixing `SparseSet.delete` return consistency. + +## 0.21.0 + +* Library is now fully typed. +* Adding `CircularBuffer`. +* Adding `#.toArray` to `Heap`. + +## 0.20.0 + +* Adding `TrieMap`. +* Reworking the `Trie` considerably. + +## 0.19.0 + +* Adding `StaticIntervalTree`. +* Adding `PointerVector`. +* Adding `Queue.of`. +* Adding `Stack.of`. +* Improving `Vector` & `BitVector` reallocation performance. +* Improving `InvertedIndex` performance. + +## 0.18.O + +* Adding `FiniteStack`. +* Adding `#.keys` to `MultiSet`. +* Adding `#.count` alias to `MultiSet`. +* Adding `#.count` alias to `MultiMap`. +* Adding `#.remove` to `MultiMap`. +* Adding `Vector.from`. +* Adding `#.values` to `Vector`. +* Adding `#.entries` to `Vector`. +* Fixing bug when feeding invalid values to a `MultiSet`. +* Fixing `.from` static methods not taking byte arrays into account. +* Fixing bugs related to `Stack.pop` edge cases. +* Optimizing `Stack` performance. + +## 0.17.0 + +* Adding `HashedArrayTree`. +* Adding `BitVector`. +* Adding `#.frequency` to `MultiSet`. +* Adding `#.grow` to `DynamicArray`. +* Adding `#.reallocate` to `DynamicArray`. +* Adding `#.resize` to `DynamicArray`. +* Fixing several `MultiSet` issues. +* Renaming `DynamicArray` to `Vector`. +* Renaming the `DynamicArray.initialLength` option to `initialCapacity`. +* Renaming `DynamicArray.allocated` to `capacity`. +* Optimizing `MultiSet` performance. +* Optimizing `SparseSet` memory consumption. + +## 0.16.0 + +* Adding `#.has` to `FuzzyMap`. +* Adding `#.has` to `FuzzyMultiMap`. +* Adding `#.multiplicity` to `MultiMap`. +* Renaming `RangeMap` to `IncrementalMap`. +* Renaming `Index` to `FuzzyMap`. +* Renaming `MultiIndex` to `FuzzyMultiMap`. +* Renaming `DynamicArray` `initialSize` option to `initialLength`. +* Improving `MultiMap.set` performance. +* Improving `BitSet.reset` performance. +* Improving `Set.isSubset` & `Set.isSuperset` performance. + +## 0.15.0 + +* Adding `RangeMap`. +* Improving `MultiSet`. +* Out-of-bound `DynamicArray.set` will now correctly grow the array. +* Fixing `StaticDisjointSet.find` complexity. + +## O.14.0 + +* Adding `DynamicArray`. +* Adding `SparseSet`. +* Adding `StaticDisjointSet`. +* Adding iterator methods to `BitSet`. +* Adding `#.rank` & `#.select` to `BitSet`. +* `BitSet` now relies on `Uint32Array` rather than `Uint8Array`. +* Improving `BitSet` performances. +* Using `obliterator` to handle iterators. + +## 0.13.0 + +* Adding `BiMap`. +* Adding `BitSet`. +* Fixing universal iterator. + +## 0.12.0 + +* Adding `InvertedIndex`. + +## 0.11.0 + +* Adding bunch of set functions. + +## 0.10.2 + +* Fixing error in `Trie.get`. +* Fixing error related to `Trie.size`. + +## 0.10.1 + +* Fixing an error in `VPTree.neighbors`. + +## 0.10.0 + +* Adding `Index`. +* Adding `MultiIndex`. +* Adding `MultiMap`. +* Adding `MultiSet`. +* Adding `SymSpell`. + +## 0.9.0 + +* Adding `VPTree`. + +## 0.8.0 + +* Adding `BKTree`. + +## 0.7.0 + +* Adding `BloomFilter`. +* Adding static `#.from` method to all relevant structures. +* Adding iterators to all relevant structures. +* Removing the `MultiSet` until proper API is found. + +## 0.6.0 + +* Adding `MultiSet`. + +## 0.5.0 + +* Adding `SuffixArray` & `GeneralizedSuffixArray`. +* Better `Trie` sentinel. + +## 0.4.0 + +* Adding `Queue`. +* Adding possibility to pass custom comparator to `Heap` & `FibonacciHeap`. + +## 0.3.0 + +* Adding `FibonacciHeap`. +* Fixing bug related to `Heap`. + +## 0.2.0 + +* Adding `Trie`. + +## 0.1.0 + +* Adding `Heap`. + +## 0.0.1 + +* Adding `LinkedList`. +* Adding `Stack`. diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/LICENSE.txt b/amplify/functions/fetchDocuments/node_modules/mnemonist/LICENSE.txt new file mode 100644 index 0000000..2d8d205 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Guillaume Plique (Yomguithereal) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/README.md b/amplify/functions/fetchDocuments/node_modules/mnemonist/README.md new file mode 100644 index 0000000..dffc9ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/README.md @@ -0,0 +1,112 @@ +[![Build Status](https://travis-ci.org/Yomguithereal/mnemonist.svg)](https://travis-ci.org/Yomguithereal/mnemonist) + +# Mnemonist + +Mnemonist is a curated collection of data structures for the JavaScript language. + +It gathers classic data structures (think heap, trie etc.) as well as more exotic ones such as Buckhard-Keller trees etc. + +It strives at being: + +* As performant as possible for a high-level language. +* Completely modular (don't need to import the whole library just to use a simple heap). +* Simple & straightforward to use and consistent with JavaScript standard objects' API. +* Completely typed and comfortably usable with Typescript. + +## Installation + +``` +npm install --save mnemonist +``` + +## Documentation + +Full documentation for the library can be found [here](https://yomguithereal.github.io/mnemonist). + +**Classics** + +* [Heap](https://yomguithereal.github.io/mnemonist/heap) +* [Linked List](https://yomguithereal.github.io/mnemonist/linked-list) +* [LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache), [LRUMap](https://yomguithereal.github.io/mnemonist/lru-map) +* [MultiMap](https://yomguithereal.github.io/mnemonist/multi-map) +* [MultiSet](https://yomguithereal.github.io/mnemonist/multi-set) +* [Queue](https://yomguithereal.github.io/mnemonist/queue) +* [Set (helpers)](https://yomguithereal.github.io/mnemonist/set) +* [Stack](https://yomguithereal.github.io/mnemonist/stack) +* [Trie](https://yomguithereal.github.io/mnemonist/trie) +* [TrieMap](https://yomguithereal.github.io/mnemonist/trie-map) + +**Low-level & structures for very specific use cases** + +* [Circular Buffer](https://yomguithereal.github.io/mnemonist/circular-buffer) +* [Fixed Deque](https://yomguithereal.github.io/mnemonist/fixed-deque) +* [Fibonacci Heap](https://yomguithereal.github.io/mnemonist/fibonacci-heap) +* [Fixed Reverse Heap](https://yomguithereal.github.io/mnemonist/fixed-reverse-heap) +* [Fixed Stack](https://yomguithereal.github.io/mnemonist/fixed-stack) +* [Hashed Array Tree](https://yomguithereal.github.io/mnemonist/hashed-array-tree) +* [Static DisjointSet](https://yomguithereal.github.io/mnemonist/static-disjoint-set) +* [SparseQueueSet](https://yomguithereal.github.io/mnemonist/sparse-queue-set) +* [SparseMap](https://yomguithereal.github.io/mnemonist/sparse-map) +* [SparseSet](https://yomguithereal.github.io/mnemonist/sparse-set) +* [Suffix Array](https://yomguithereal.github.io/mnemonist/suffix-array) +* [Generalized Suffix Array](https://yomguithereal.github.io/mnemonist/generalized-suffix-array) +* [Vector](https://yomguithereal.github.io/mnemonist/vector) + +**Information retrieval & Natural language processing** + +* [Fuzzy Map](https://yomguithereal.github.io/mnemonist/fuzzy-map) +* [Fuzzy MultiMap](https://yomguithereal.github.io/mnemonist/fuzzy-multi-map) +* [Inverted Index](https://yomguithereal.github.io/mnemonist/inverted-index) +* [Passjoin Index](https://yomguithereal.github.io/mnemonist/passjoin-index) +* [SymSpell](https://yomguithereal.github.io/mnemonist/symspell) + +**Space & time indexation** + +* [Static IntervalTree](https://yomguithereal.github.io/mnemonist/static-interval-tree) +* [KD-Tree](https://yomguithereal.github.io/mnemonist/kd-tree) + +**Metric space indexation** + +* [Burkhard-Keller Tree](https://yomguithereal.github.io/mnemonist/bk-tree) +* [Vantage Point Tree](https://yomguithereal.github.io/mnemonist/vp-tree) + +**Probabilistic & succinct data structures** + +* [BitSet](https://yomguithereal.github.io/mnemonist/bit-set) +* [BitVector](https://yomguithereal.github.io/mnemonist/bit-vector) +* [Bloom Filter](https://yomguithereal.github.io/mnemonist/bloom-filter) + +**Utility classes** + +* [BiMap](https://yomguithereal.github.io/mnemonist/bi-map) +* [DefaultMap](https://yomguithereal.github.io/mnemonist/default-map) +* [DefaultWeakMap](https://yomguithereal.github.io/mnemonist/default-weak-map) + +--- + +Note that this list does not include a `Graph` data structure, whose implementation is usually far too complex for the scope of this library. + +However, we advise the reader to take a look at the [`graphology`](https://graphology.github.io/) library instead. + +Don't find the data structure you need? Maybe we can work it out [together](https://github.com/Yomguithereal/mnemonist/issues). + +## Contribution + +Contributions are obviously welcome. Be sure to lint the code & add relevant unit tests. + +``` +# Installing +git clone git@github.com:Yomguithereal/mnemonist.git +cd mnemonist +npm install + +# Linting +npm run lint + +# Running the unit tests +npm test +``` + +## License + +[MIT](LICENSE.txt) diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bi-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/bi-map.d.ts new file mode 100644 index 0000000..d0c2f76 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bi-map.d.ts @@ -0,0 +1,46 @@ +/** + * Mnemonist BiMap Typings + * ======================== + */ +export class InverseMap implements Iterable<[K, V]> { + + // Members + size: number; + inverse: BiMap; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; +} + +export default class BiMap implements Iterable<[K, V]> { + + // Members + size: number; + inverse: InverseMap; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, J]> | {[key: string]: J}): BiMap; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bi-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/bi-map.js new file mode 100644 index 0000000..3d5d03f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bi-map.js @@ -0,0 +1,195 @@ +/** + * Mnemonist BiMap + * ================ + * + * JavaScript implementation of a BiMap. + */ +var forEach = require('obliterator/foreach'); + +/** + * Inverse Map. + * + * @constructor + */ +function InverseMap(original) { + + this.size = 0; + this.items = new Map(); + this.inverse = original; +} + +/** + * BiMap. + * + * @constructor + */ +function BiMap() { + + this.size = 0; + this.items = new Map(); + this.inverse = new InverseMap(this); +} + +/** + * Method used to clear the map. + * + * @return {undefined} + */ +function clear() { + this.size = 0; + this.items.clear(); + this.inverse.items.clear(); +} + +BiMap.prototype.clear = clear; +InverseMap.prototype.clear = clear; + +/** + * Method used to set a relation. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {BiMap|InverseMap} + */ +function set(key, value) { + + // First we need to attempt to see if the relation is not flawed + if (this.items.has(key)) { + var currentValue = this.items.get(key); + + // The relation already exists, we do nothing + if (currentValue === value) + return this; + else + this.inverse.items.delete(currentValue); + } + + if (this.inverse.items.has(value)) { + var currentKey = this.inverse.items.get(value); + + if (currentKey === key) + return this; + else + this.items.delete(currentKey); + } + + // Here we actually add the relation + this.items.set(key, value); + this.inverse.items.set(value, key); + + // Size + this.size = this.items.size; + this.inverse.size = this.inverse.items.size; + + return this; +} + +BiMap.prototype.set = set; +InverseMap.prototype.set = set; + +/** + * Method used to delete a relation. + * + * @param {any} key - Key. + * @return {boolean} + */ +function del(key) { + if (this.items.has(key)) { + var currentValue = this.items.get(key); + + this.items.delete(key); + this.inverse.items.delete(currentValue); + + // Size + this.size = this.items.size; + this.inverse.size = this.inverse.items.size; + + return true; + } + + return false; +} + +BiMap.prototype.delete = del; +InverseMap.prototype.delete = del; + +/** + * Mapping some Map prototype function unto our two classes. + */ +var METHODS = ['has', 'get', 'forEach', 'keys', 'values', 'entries']; + +METHODS.forEach(function(name) { + BiMap.prototype[name] = InverseMap.prototype[name] = function() { + return Map.prototype[name].apply(this.items, arguments); + }; +}); + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') { + BiMap.prototype[Symbol.iterator] = BiMap.prototype.entries; + InverseMap.prototype[Symbol.iterator] = InverseMap.prototype.entries; +} + +/** + * Convenience known methods. + */ +BiMap.prototype.inspect = function() { + var dummy = { + left: this.items, + right: this.inverse.items + }; + + // Trick so that node displays the name of the constructor + Object.defineProperty(dummy, 'constructor', { + value: BiMap, + enumerable: false + }); + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + BiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = BiMap.prototype.inspect; + +InverseMap.prototype.inspect = function() { + var dummy = { + left: this.inverse.items, + right: this.items + }; + + // Trick so that node displays the name of the constructor + Object.defineProperty(dummy, 'constructor', { + value: InverseMap, + enumerable: false + }); + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + InverseMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = InverseMap.prototype.inspect; + + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a bimap. + * + * @param {Iterable} iterable - Target iterable. + * @return {BiMap} + */ +BiMap.from = function(iterable) { + var bimap = new BiMap(); + + forEach(iterable, function(value, key) { + bimap.set(key, value); + }); + + return bimap; +}; + +/** + * Exporting. + */ +module.exports = BiMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-set.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-set.d.ts new file mode 100644 index 0000000..cfeb0d1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-set.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist BitSet Typings + * ========================= + */ +export default class BitSet implements Iterable { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + set(index: number, value?: boolean | number): void; + reset(index: number, value: boolean | number): void; + flip(index: number, value: boolean | number): void; + get(index: number): number; + test(index: number): boolean; + rank(r: number): number; + select(r: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): Array; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-set.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-set.js new file mode 100644 index 0000000..f2445a0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-set.js @@ -0,0 +1,379 @@ +/** + * Mnemonist BitSet + * ================= + * + * JavaScript implementation of a fixed-size BitSet based upon a Uint32Array. + * + * Notes: + * - (i >> 5) is the same as ((i / 32) | 0) + * - (i & 0x0000001f) is the same as (i % 32) + * - I could use a Float64Array to store more in less blocks but I would lose + * the benefits of byte comparison to keep track of size without popcounts. + */ +var Iterator = require('obliterator/iterator'), + bitwise = require('./utils/bitwise.js'); + +/** + * BitSet. + * + * @constructor + */ +function BitSet(length) { + + // Properties + this.length = length; + this.clear(); + + // Methods + + // Statics +} + +/** + * Method used to clear the bit set. + * + * @return {undefined} + */ +BitSet.prototype.clear = function() { + + // Properties + this.size = 0; + this.array = new Uint32Array(Math.ceil(this.length / 32)); +}; + +/** + * Method used to set the given bit's value. + * + * @param {number} index - Target bit index. + * @param {number} value - Value to set. + * @return {BitSet} + */ +BitSet.prototype.set = function(index, value) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + if (value === 0 || value === false) + newBytes = this.array[byteIndex] &= ~(1 << pos); + else + newBytes = this.array[byteIndex] |= (1 << pos); + + // The operands of all bitwise operators are converted to *signed* 32-bit integers. + // Source: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Bitwise_Operators#Signed_32-bit_integers + // Shifting by 31 changes the sign (i.e. 1 << 31 = -2147483648). + // Therefore, get unsigned representation by applying '>>> 0'. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** +* Method used to reset the given bit's value. +* +* @param {number} index - Target bit index. +* @return {BitSet} +*/ +BitSet.prototype.reset = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + newBytes = this.array[byteIndex] &= ~(1 << pos); + + // Updating size + if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to flip the value of the given bit. + * + * @param {number} index - Target bit index. + * @return {BitSet} + */ +BitSet.prototype.flip = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex]; + + var newBytes = this.array[byteIndex] ^= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to get the given bit's value. + * + * @param {number} index - Target bit index. + * @return {number} + */ +BitSet.prototype.get = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to test the given bit's value. + * + * @param {number} index - Target bit index. + * @return {BitSet} + */ +BitSet.prototype.test = function(index) { + return Boolean(this.get(index)); +}; + +/** + * Method used to return the number of 1 from the beginning of the set up to + * the ith index. + * + * @param {number} i - Ith index (cannot be > length). + * @return {number} + */ +BitSet.prototype.rank = function(i) { + if (this.size === 0) + return 0; + + var byteIndex = i >> 5, + pos = i & 0x0000001f, + r = 0; + + // Accessing the bytes before the last one + for (var j = 0; j < byteIndex; j++) + r += bitwise.table8Popcount(this.array[j]); + + // Handling masked last byte + var maskedByte = this.array[byteIndex] & ((1 << pos) - 1); + + r += bitwise.table8Popcount(maskedByte); + + return r; +}; + +/** + * Method used to return the position of the rth 1 in the set or -1 if the + * set is empty. + * + * Note: usually select is implemented using binary search over rank but I + * tend to think the following linear implementation is faster since here + * rank is O(n) anyway. + * + * @param {number} r - Rth 1 to select (should be < length). + * @return {number} + */ +BitSet.prototype.select = function(r) { + if (this.size === 0) + return -1; + + // TODO: throw? + if (r >= this.length) + return -1; + + var byte, + b = 32, + p = 0, + c = 0; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + // The byte is empty, let's continue + if (byte === 0) + continue; + + // TODO: This branching might not be useful here + if (i === l - 1) + b = this.length % 32 || 32; + + // TODO: popcount should speed things up here + + for (var j = 0; j < b; j++, p++) { + c += (byte >> j) & 1; + + if (c === r) + return p; + } + } +}; + +/** + * Method used to iterate over the bit set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +BitSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var length = this.length, + byte, + bit, + b = 32; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + if (i === l - 1) + b = length % 32 || 32; + + for (var j = 0; j < b; j++) { + bit = (byte >> j) & 1; + + callback.call(scope, bit, i * 32 + j); + } + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +BitSet.prototype.values = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: bit + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +BitSet.prototype.entries = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + index, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + index = (~-i) * 32 + j; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: [index, bit] + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + BitSet.prototype[Symbol.iterator] = BitSet.prototype.values; + +/** + * Convenience known methods. + */ +BitSet.prototype.inspect = function() { + var proxy = new Uint8Array(this.length); + + this.forEach(function(bit, i) { + proxy[i] = bit; + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: BitSet, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + BitSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = BitSet.prototype.inspect; + +BitSet.prototype.toJSON = function() { + return Array.from(this.array); +}; + +/** + * Exporting. + */ +module.exports = BitSet; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-vector.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-vector.d.ts new file mode 100644 index 0000000..4005d3c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-vector.d.ts @@ -0,0 +1,42 @@ +/** + * Mnemonist BitVector Typings + * ============================ + */ +type BitVectorOptions = { + initialLength?: number; + initialCapacity?: number; + policy?: (capacity: number) => number; +} + +export default class BitVector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(length: number); + constructor(options: BitVectorOptions); + + // Methods + clear(): void; + set(index: number, value?: boolean | number): this; + reset(index: number, value: boolean | number): void; + flip(index: number, value: boolean | number): void; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: boolean | number): number; + pop(): number | undefined; + get(index: number): number; + test(index: number): boolean; + rank(r: number): number; + select(r: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): Array; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-vector.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-vector.js new file mode 100644 index 0000000..5ee01e6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bit-vector.js @@ -0,0 +1,550 @@ +/** + * Mnemonist BitVector + * ==================== + * + * JavaScript implementation of a dynamic BitSet based upon a Uint32Array. + * + * Notes: + * - (i >> 5) is the same as ((i / 32) | 0) + * - (i & 0x0000001f) is the same as (i % 32) + * - I could use a Float64Array to store more in less blocks but I would lose + * the benefits of byte comparison to keep track of size without popcounts. + */ +var Iterator = require('obliterator/iterator'), + bitwise = require('./utils/bitwise.js'); + +/** + * Constants. + */ +var DEFAULT_GROWING_POLICY = function(capacity) { + return Math.max(1, Math.ceil(capacity * 1.5)); +}; + +/** + * Helpers. + */ +function createByteArray(capacity) { + return new Uint32Array(Math.ceil(capacity / 32)); +} + +/** + * BitVector. + * + * @constructor + */ +function BitVector(initialLengthOrOptions) { + var initialLength = initialLengthOrOptions || 0, + policy = DEFAULT_GROWING_POLICY; + + if (typeof initialLengthOrOptions === 'object') { + initialLength = ( + initialLengthOrOptions.initialLength || + initialLengthOrOptions.initialCapacity || + 0 + ); + policy = initialLengthOrOptions.policy || policy; + } + + this.size = 0; + this.length = initialLength; + this.capacity = Math.ceil(this.length / 32) * 32; + this.policy = policy; + this.array = createByteArray(this.capacity); +} + +/** + * Method used to set the given bit's value. + * + * @param {number} index - Target bit index. + * @param {number|boolean} value - Value to set. + * @return {BitVector} + */ +BitVector.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('BitVector.set: index out of bounds.'); + + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + if (value === 0 || value === false) + newBytes = this.array[byteIndex] &= ~(1 << pos); + else + newBytes = this.array[byteIndex] |= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** +* Method used to reset the given bit's value. +* +* @param {number} index - Target bit index. +* @return {BitVector} +*/ +BitVector.prototype.reset = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex], + newBytes; + + newBytes = this.array[byteIndex] &= ~(1 << pos); + + // Updating size + if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to flip the value of the given bit. + * + * @param {number} index - Target bit index. + * @return {BitVector} + */ +BitVector.prototype.flip = function(index) { + var byteIndex = index >> 5, + pos = index & 0x0000001f, + oldBytes = this.array[byteIndex]; + + var newBytes = this.array[byteIndex] ^= (1 << pos); + + // Get unsigned representation. + newBytes = newBytes >>> 0; + + // Updating size + if (newBytes > oldBytes) + this.size++; + else if (newBytes < oldBytes) + this.size--; + + return this; +}; + +/** + * Method used to apply the growing policy. + * + * @param {number} [override] - Override capacity. + * @return {number} + */ +BitVector.prototype.applyPolicy = function(override) { + var newCapacity = this.policy(override || this.capacity); + + if (typeof newCapacity !== 'number' || newCapacity < 0) + throw new Error('mnemonist/bit-vector.applyPolicy: policy returned an invalid value (expecting a positive integer).'); + + if (newCapacity <= this.capacity) + throw new Error('mnemonist/bit-vector.applyPolicy: policy returned a less or equal capacity to allocate.'); + + // TODO: we should probably check that the returned number is an integer + + // Ceil to nearest 32 + return Math.ceil(newCapacity / 32) * 32; +}; + +/** + * Method used to reallocate the underlying array. + * + * @param {number} capacity - Target capacity. + * @return {BitVector} + */ +BitVector.prototype.reallocate = function(capacity) { + var virtualCapacity = capacity; + + capacity = Math.ceil(capacity / 32) * 32; + + if (virtualCapacity < this.length) + this.length = virtualCapacity; + + if (capacity === this.capacity) + return this; + + var oldArray = this.array; + + var storageLength = capacity / 32; + + if (storageLength === this.array.length) + return this; + + if (storageLength > this.array.length) { + this.array = new Uint32Array(storageLength); + this.array.set(oldArray, 0); + } + else { + this.array = oldArray.slice(0, storageLength); + } + + this.capacity = capacity; + + return this; +}; + +/** + * Method used to grow the array. + * + * @param {number} [capacity] - Optional capacity to match. + * @return {BitVector} + */ +BitVector.prototype.grow = function(capacity) { + var newCapacity; + + if (typeof capacity === 'number') { + + if (this.capacity >= capacity) + return this; + + // We need to match the given capacity + newCapacity = this.capacity; + + while (newCapacity < capacity) + newCapacity = this.applyPolicy(newCapacity); + + this.reallocate(newCapacity); + + return this; + } + + // We need to run the policy once + newCapacity = this.applyPolicy(); + this.reallocate(newCapacity); + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {BitVector} + */ +BitVector.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.reallocate(length); + + return this; +}; + +/** + * Method used to push a value in the set. + * + * @param {number|boolean} value + * @return {BitVector} + */ +BitVector.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + if (value === 0 || value === false) + return ++this.length; + + this.size++; + + var index = this.length++, + byteIndex = index >> 5, + pos = index & 0x0000001f; + + this.array[byteIndex] |= (1 << pos); + + return this.length; +}; + +/** + * Method used to pop the last value of the set. + * + * @return {number} - The popped value. + */ +BitVector.prototype.pop = function() { + if (this.length === 0) + return; + + var index = --this.length; + + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to get the given bit's value. + * + * @param {number} index - Target bit index. + * @return {number} + */ +BitVector.prototype.get = function(index) { + if (this.length < index) + return undefined; + + var byteIndex = index >> 5, + pos = index & 0x0000001f; + + return (this.array[byteIndex] >> pos) & 1; +}; + +/** + * Method used to test the given bit's value. + * + * @param {number} index - Target bit index. + * @return {BitVector} + */ +BitVector.prototype.test = function(index) { + if (this.length < index) + return false; + + return Boolean(this.get(index)); +}; + +/** + * Method used to return the number of 1 from the beginning of the set up to + * the ith index. + * + * @param {number} i - Ith index (cannot be > length). + * @return {number} + */ +BitVector.prototype.rank = function(i) { + if (this.size === 0) + return 0; + + var byteIndex = i >> 5, + pos = i & 0x0000001f, + r = 0; + + // Accessing the bytes before the last one + for (var j = 0; j < byteIndex; j++) + r += bitwise.table8Popcount(this.array[j]); + + // Handling masked last byte + var maskedByte = this.array[byteIndex] & ((1 << pos) - 1); + + r += bitwise.table8Popcount(maskedByte); + + return r; +}; + +/** + * Method used to return the position of the rth 1 in the set or -1 if the + * set is empty. + * + * Note: usually select is implemented using binary search over rank but I + * tend to think the following linear implementation is faster since here + * rank is O(n) anyway. + * + * @param {number} r - Rth 1 to select (should be < length). + * @return {number} + */ +BitVector.prototype.select = function(r) { + if (this.size === 0) + return -1; + + // TODO: throw? + if (r >= this.length) + return -1; + + var byte, + b = 32, + p = 0, + c = 0; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + // The byte is empty, let's continue + if (byte === 0) + continue; + + // TODO: This branching might not be useful here + if (i === l - 1) + b = this.length % 32 || 32; + + // TODO: popcount should speed things up here + + for (var j = 0; j < b; j++, p++) { + c += (byte >> j) & 1; + + if (c === r) + return p; + } + } +}; + +/** + * Method used to iterate over the bit set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +BitVector.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var length = this.length, + byte, + bit, + b = 32; + + for (var i = 0, l = this.array.length; i < l; i++) { + byte = this.array[i]; + + if (i === l - 1) + b = length % 32 || 32; + + for (var j = 0; j < b; j++) { + bit = (byte >> j) & 1; + + callback.call(scope, bit, i * 32 + j); + } + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +BitVector.prototype.values = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: bit + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +BitVector.prototype.entries = function() { + var length = this.length, + inner = false, + byte, + bit, + array = this.array, + index, + l = array.length, + i = 0, + j = -1, + b = 32; + + return new Iterator(function next() { + if (!inner) { + + if (i >= l) + return { + done: true + }; + + if (i === l - 1) + b = length % 32 || 32; + + byte = array[i++]; + inner = true; + j = -1; + } + + j++; + index = (~-i) * 32 + j; + + if (j >= b) { + inner = false; + return next(); + } + + bit = (byte >> j) & 1; + + return { + value: [index, bit] + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + BitVector.prototype[Symbol.iterator] = BitVector.prototype.values; + +/** + * Convenience known methods. + */ +BitVector.prototype.inspect = function() { + var proxy = new Uint8Array(this.length); + + this.forEach(function(bit, i) { + proxy[i] = bit; + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: BitVector, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + BitVector.prototype[Symbol.for('nodejs.util.inspect.custom')] = BitVector.prototype.inspect; + +BitVector.prototype.toJSON = function() { + return Array.from(this.array.slice(0, (this.length >> 5) + 1)); +}; + +/** + * Exporting. + */ +module.exports = BitVector; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bk-tree.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/bk-tree.d.ts new file mode 100644 index 0000000..f158dfd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bk-tree.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist BKTree Typings + * ========================= + */ +type DistanceFunction = (a: T, b: T) => number; + +export default class BKTree { + + // Members + distance: DistanceFunction; + size: number; + + // Constructor + constructor(distance: DistanceFunction); + + // Methods + add(item: T): this; + search(n: number, query: T): Array<{item: T, distance: number}>; + toJSON(): object; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, distance: DistanceFunction): BKTree; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bk-tree.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/bk-tree.js new file mode 100644 index 0000000..9c9792d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bk-tree.js @@ -0,0 +1,180 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist BK Tree + * ================== + * + * Implementation of a Burkhard-Keller tree, allowing fast lookups of words + * that lie within a specified distance of the query word. + * + * [Reference]: + * https://en.wikipedia.org/wiki/BK-tree + * + * [Article]: + * W. Burkhard and R. Keller. Some approaches to best-match file searching, + * CACM, 1973 + */ +var forEach = require('obliterator/foreach'); + +/** + * BK Tree. + * + * @constructor + * @param {function} distance - Distance function to use. + */ +function BKTree(distance) { + + if (typeof distance !== 'function') + throw new Error('mnemonist/BKTree.constructor: given `distance` should be a function.'); + + this.distance = distance; + this.clear(); +} + +/** + * Method used to add an item to the tree. + * + * @param {any} item - Item to add. + * @return {BKTree} + */ +BKTree.prototype.add = function(item) { + + // Initializing the tree with the first given word + if (!this.root) { + this.root = { + item: item, + children: {} + }; + + this.size++; + return this; + } + + var node = this.root, + d; + + while (true) { + d = this.distance(item, node.item); + + if (!node.children[d]) + break; + + node = node.children[d]; + } + + node.children[d] = { + item: item, + children: {} + }; + + this.size++; + return this; +}; + +/** + * Method used to query the tree. + * + * @param {number} n - Maximum distance between query & item. + * @param {any} query - Query + * @return {BKTree} + */ +BKTree.prototype.search = function(n, query) { + if (!this.root) + return []; + + var found = [], + stack = [this.root], + node, + child, + d, + i, + l; + + while (stack.length) { + node = stack.pop(); + d = this.distance(query, node.item); + + if (d <= n) + found.push({item: node.item, distance: d}); + + for (i = d - n, l = d + n + 1; i < l; i++) { + child = node.children[i]; + + if (child) + stack.push(child); + } + } + + return found; +}; + +/** + * Method used to clear the tree. + * + * @return {undefined} + */ +BKTree.prototype.clear = function() { + + // Properties + this.size = 0; + this.root = null; +}; + +/** + * Convenience known methods. + */ +BKTree.prototype.toJSON = function() { + return this.root; +}; + +BKTree.prototype.inspect = function() { + var array = [], + stack = [this.root], + node, + d; + + while (stack.length) { + node = stack.pop(); + + if (!node) + continue; + + array.push(node.item); + + for (d in node.children) + stack.push(node.children[d]); + } + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: BKTree, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + BKTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = BKTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a tree. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} distance - Distance function. + * @return {Heap} + */ +BKTree.from = function(iterable, distance) { + var tree = new BKTree(distance); + + forEach(iterable, function(value) { + tree.add(value); + }); + + return tree; +}; + +/** + * Exporting. + */ +module.exports = BKTree; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bloom-filter.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/bloom-filter.d.ts new file mode 100644 index 0000000..dc9b2fa --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bloom-filter.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist BloomFilter Typings + * ============================== + */ +type BloomFilterOptions = { + capacity: number; + errorRate?: number; +} + +export default class BloomFilter { + + // Members + capacity: number; + errorRate: number; + hashFunctions: number; + + // Constructor + constructor(capacity: number); + constructor(options: BloomFilterOptions); + + // Methods + clear(): void; + add(string: string): this; + test(string: string): boolean; + toJSON(): Uint8Array; + + // Statics + from(iterable: Iterable, options?: number | BloomFilterOptions): BloomFilter; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/bloom-filter.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/bloom-filter.js new file mode 100644 index 0000000..ba3ee76 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/bloom-filter.js @@ -0,0 +1,186 @@ +/** + * Mnemonist Bloom Filter + * ======================= + * + * Bloom Filter implementation relying on MurmurHash3. + */ +var murmurhash3 = require('./utils/murmurhash3.js'), + forEach = require('obliterator/foreach'); + +/** + * Constants. + */ +var LN2_SQUARED = Math.LN2 * Math.LN2; + +/** + * Defaults. + */ +var DEFAULTS = { + errorRate: 0.005 +}; + +/** + * Function used to convert a string into a Uint16 byte array. + * + * @param {string} string - Target string. + * @return {Uint16Array} + */ +function stringToByteArray(string) { + var array = new Uint16Array(string.length), + i, + l; + + for (i = 0, l = string.length; i < l; i++) + array[i] = string.charCodeAt(i); + + return array; +} + +/** + * Function used to hash the given byte array. + * + * @param {number} length - Length of the filter's byte array. + * @param {number} seed - Seed to use for the hash function. + * @param {Uint16Array} - Byte array representing the string. + * @return {number} - The hash. + * + * @note length * 8 should probably already be computed as well as seeds. + */ +function hashArray(length, seed, array) { + var hash = murmurhash3((seed * 0xFBA4C795) & 0xFFFFFFFF, array); + + return hash % (length * 8); +} + +/** + * Bloom Filter. + * + * @constructor + * @param {number|object} capacityOrOptions - Capacity or options. + */ +function BloomFilter(capacityOrOptions) { + var options = {}; + + if (!capacityOrOptions) + throw new Error('mnemonist/BloomFilter.constructor: a BloomFilter must be created with a capacity.'); + + if (typeof capacityOrOptions === 'object') + options = capacityOrOptions; + else + options.capacity = capacityOrOptions; + + // Handling capacity + if (typeof options.capacity !== 'number' || options.capacity <= 0) + throw new Error('mnemonist/BloomFilter.constructor: `capacity` option should be a positive integer.'); + + this.capacity = options.capacity; + + // Handling error rate + this.errorRate = options.errorRate || DEFAULTS.errorRate; + + if (typeof this.errorRate !== 'number' || options.errorRate <= 0) + throw new Error('mnemonist/BloomFilter.constructor: `errorRate` option should be a positive float.'); + + this.clear(); +} + +/** + * Method used to clear the filter. + * + * @return {undefined} + */ +BloomFilter.prototype.clear = function() { + + // Optimizing number of bits & number of hash functions + var bits = -1 / LN2_SQUARED * this.capacity * Math.log(this.errorRate), + length = (bits / 8) | 0; + + this.hashFunctions = (length * 8 / this.capacity * Math.LN2) | 0; + + // Creating the data array + this.data = new Uint8Array(length); + + return; +}; + +/** + * Method used to add an string to the filter. + * + * @param {string} string - Item to add. + * @return {BloomFilter} + * + * @note Should probably create a hash function working directly on a string. + */ +BloomFilter.prototype.add = function(string) { + + // Converting the string to a byte array + var array = stringToByteArray(string); + + // Applying the n hash functions + for (var i = 0, l = this.hashFunctions; i < l; i++) { + var index = hashArray(this.data.length, i, array), + position = (1 << (7 & index)); + + this.data[index >> 3] |= position; + } + + return this; +}; + +/** + * Method used to test the given string. + * + * @param {string} string - Item to test. + * @return {boolean} + */ +BloomFilter.prototype.test = function(string) { + + // Converting the string to a byte array + var array = stringToByteArray(string); + + // Applying the n hash functions + for (var i = 0, l = this.hashFunctions; i < l; i++) { + var index = hashArray(this.data.length, i, array); + + if (!(this.data[index >> 3] & (1 << (7 & index)))) + return false; + } + + return true; +}; + +/** + * Convenience known methods. + */ +BloomFilter.prototype.toJSON = function() { + return this.data; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a filter. + * + * @param {Iterable} iterable - Target iterable. + * @return {BloomFilter} + */ +BloomFilter.from = function(iterable, options) { + if (!options) { + options = iterable.length || iterable.size; + + if (typeof options !== 'number') + throw new Error('BloomFilter.from: could not infer the filter\'s capacity. Try passing it as second argument.'); + } + + var filter = new BloomFilter(options); + + forEach(iterable, function(value) { + filter.add(value); + }); + + return filter; +}; + +/** + * Exporting. + */ +module.exports = BloomFilter; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/circular-buffer.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/circular-buffer.d.ts new file mode 100644 index 0000000..ec1fa4c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/circular-buffer.d.ts @@ -0,0 +1,34 @@ +/** + * Mnemonist CircularBuffer Typings + * ================================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class CircularBuffer implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + unshift(item: T): number; + pop(): T | undefined; + shift(): T | undefined; + peekFirst(): T | undefined; + peekLast(): T | undefined; + get(index: number): T | undefined; + forEach(callback: (item: T, index: number, buffer: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): CircularBuffer; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/circular-buffer.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/circular-buffer.js new file mode 100644 index 0000000..d3ef950 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/circular-buffer.js @@ -0,0 +1,131 @@ +/** + * Mnemonist CircularBuffer + * ========================= + * + * Circular buffer implementation fit to use as a finite deque. + */ +var iterables = require('./utils/iterables.js'), + FixedDeque = require('./fixed-deque'); + +/** + * CircularBuffer. + * + * @constructor + */ +function CircularBuffer(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/circular-buffer: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/circular-buffer: `capacity` should be a positive number.'); + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + this.items = new ArrayClass(this.capacity); + this.clear(); +} + +/** + * Pasting most of the prototype from FixedDeque. + */ +function paste(name) { + CircularBuffer.prototype[name] = FixedDeque.prototype[name]; +} + +Object.keys(FixedDeque.prototype).forEach(paste); + +if (typeof Symbol !== 'undefined') + Object.getOwnPropertySymbols(FixedDeque.prototype).forEach(paste); + +/** + * Method used to append a value to the buffer. + * + * @param {any} item - Item to append. + * @return {number} - Returns the new size of the buffer. + */ +CircularBuffer.prototype.push = function(item) { + var index = (this.start + this.size) % this.capacity; + + this.items[index] = item; + + // Overwriting? + if (this.size === this.capacity) { + + // If start is at the end, we wrap around the buffer + this.start = (index + 1) % this.capacity; + + return this.size; + } + + return ++this.size; +}; + +/** + * Method used to prepend a value to the buffer. + * + * @param {any} item - Item to prepend. + * @return {number} - Returns the new size of the buffer. + */ +CircularBuffer.prototype.unshift = function(item) { + var index = this.start - 1; + + if (this.start === 0) + index = this.capacity - 1; + + this.items[index] = item; + + // Overwriting + if (this.size === this.capacity) { + + this.start = index; + + return this.size; + } + + this.start = index; + + return ++this.size; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a circular buffer. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FiniteStack} + */ +CircularBuffer.from = function(iterable, ArrayClass, capacity) { + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/circular-buffer.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var buffer = new CircularBuffer(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + buffer.items[i] = iterable[i]; + + buffer.size = l; + + return buffer; + } + + iterables.forEach(iterable, function(value) { + buffer.push(value); + }); + + return buffer; +}; + +/** + * Exporting. + */ +module.exports = CircularBuffer; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/critbit-tree-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/critbit-tree-map.js new file mode 100644 index 0000000..1c41a9a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/critbit-tree-map.js @@ -0,0 +1,515 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist CritBitTreeMap + * ========================= + * + * JavaScript implementation of a crit-bit tree, also called PATRICIA tree. + * This tree is a basically a bitwise radix tree and is supposedly much more + * efficient than a standard Trie. + * + * [References]: + * https://cr.yp.to/critbit.html + * https://www.imperialviolet.org/binary/critbit.pdf + */ +var bitwise = require('./utils/bitwise.js'); + +/** + * Helpers. + */ + +/** + * Helper returning the direction we need to take given a key and an + * encoded critbit. + * + * @param {string} key - Target key. + * @param {number} critbit - Packed address of byte + mask. + * @return {number} - 0, left or 1, right. + */ +function getDirection(key, critbit) { + var byteIndex = critbit >> 8; + + if (byteIndex > key.length - 1) + return 0; + + var byte = key.charCodeAt(byteIndex), + mask = critbit & 0xff; + + return (1 + (byte | mask)) >> 8; +} + +/** + * Helper returning the packed address of byte + mask or -1 if strings + * are identical. + * + * @param {string} a - First key. + * @param {string} b - Second key. + * @return {number} - Packed address of byte + mask. + */ +function findCriticalBit(a, b) { + var i = 0, + tmp; + + // Swapping so a is the shortest + if (a.length > b.length) { + tmp = b; + b = a; + a = tmp; + } + + var l = a.length, + mask; + + while (i < l) { + if (a[i] !== b[i]) { + mask = bitwise.criticalBit8Mask( + a.charCodeAt(i), + b.charCodeAt(i) + ); + + return (i << 8) | mask; + } + + i++; + } + + // Strings are identical + if (a.length === b.length) + return -1; + + // NOTE: x ^ 0 is the same as x + mask = bitwise.criticalBit8Mask(b.charCodeAt(i)); + + return (i << 8) | mask; +} + +/** + * Class representing a crit-bit tree's internal node. + * + * @constructor + * @param {number} critbit - Packed address of byte + mask. + */ +function InternalNode(critbit) { + this.critbit = critbit; + this.left = null; + this.right = null; +} + +/** + * Class representing a crit-bit tree's external node. + * Note that it is possible to replace those nodes by flat arrays. + * + * @constructor + * @param {string} key - Node's key. + * @param {any} value - Arbitrary value. + */ +function ExternalNode(key, value) { + this.key = key; + this.value = value; +} + +/** + * CritBitTreeMap. + * + * @constructor + */ +function CritBitTreeMap() { + + // Properties + this.root = null; + this.size = 0; + + this.clear(); +} + +/** + * Method used to clear the CritBitTreeMap. + * + * @return {undefined} + */ +CritBitTreeMap.prototype.clear = function() { + + // Properties + this.root = null; + this.size = 0; +}; + +/** + * Method used to set the value of the given key in the trie. + * + * @param {string} key - Key to set. + * @param {any} value - Arbitrary value. + * @return {CritBitTreeMap} + */ +CritBitTreeMap.prototype.set = function(key, value) { + + // Tree is empty + if (this.size === 0) { + this.root = new ExternalNode(key, value); + this.size++; + + return this; + } + + // Walk state + var node = this.root, + ancestors = [], + path = [], + ancestor, + parent, + child, + critbit, + internal, + left, + leftPath, + best, + dir, + i, + l; + + // Walking the tree + while (true) { + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + // Going left & creating key if not yet there + if (dir === 0) { + if (!node.left) { + node.left = new ExternalNode(key, value); + return this; + } + + ancestors.push(node); + path.push(true); + + node = node.left; + } + + // Going right & creating key if not yet there + else { + if (!node.right) { + node.right = new ExternalNode(key, value); + return this; + } + + ancestors.push(node); + path.push(false); + + node = node.right; + } + } + + // Reaching an external node + else { + + // 1. Creating a new external node + critbit = findCriticalBit(key, node.key); + + // Key is identical, we just replace the value + if (critbit === -1) { + node.value = value; + return this; + } + + this.size++; + + internal = new InternalNode(critbit); + + left = getDirection(key, critbit) === 0; + + // TODO: maybe setting opposite pointer is not necessary + if (left) { + internal.left = new ExternalNode(key, value); + internal.right = node; + } + else { + internal.left = node; + internal.right = new ExternalNode(key, value); + } + + // 2. Bubbling up + best = -1; + l = ancestors.length; + + for (i = l - 1; i >= 0; i--) { + ancestor = ancestors[i]; + + if (ancestor.critbit > critbit) + continue; + + best = i; + break; + } + + // Do we need to attach to the root? + if (best < 0) { + this.root = internal; + + // Need to rewire parent as child? + if (l > 0) { + parent = ancestors[0]; + + if (left) + internal.right = parent; + else + internal.left = parent; + } + } + + // Simple case without rotation + else if (best === l - 1) { + parent = ancestors[best]; + leftPath = path[best]; + + if (leftPath) + parent.left = internal; + else + parent.right = internal; + } + + // Full rotation + else { + parent = ancestors[best]; + leftPath = path[best]; + child = ancestors[best + 1]; + + if (leftPath) + parent.left = internal; + else + parent.right = internal; + + if (left) + internal.right = child; + else + internal.left = child; + } + + return this; + } + } +}; + +/** + * Method used to get the value attached to the given key in the tree or + * undefined if not found. + * + * @param {string} key - Key to get. + * @return {any} + */ +CritBitTreeMap.prototype.get = function(key) { + + // Walk state + var node = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + node = dir ? node.right : node.left; + } + + // Reaching an external node + else { + if (node.key !== key) + return; + + return node.value; + } + } +}; + +/** + * Method used to return whether the given key exists in the tree. + * + * @param {string} key - Key to test. + * @return {boolean} + */ +CritBitTreeMap.prototype.has = function(key) { + + // Walk state + var node = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return false; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + node = dir ? node.right : node.left; + } + + // Reaching an external node + else { + return node.key === key; + } + } +}; + +/** + * Method used to delete the given key from the tree and return whether the + * key did exist or not. + * + * @param {string} key - Key to delete. + * @return {boolean} + */ +CritBitTreeMap.prototype.delete = function(key) { + + // Walk state + var node = this.root, + dir; + + var parent = null, + grandParent = null, + wentLeftForParent = false, + wentLeftForGrandparent = false; + + // Walking the tree + while (true) { + + // Dead end + if (node === null) + return false; + + // Traversing an internal node + if (node instanceof InternalNode) { + dir = getDirection(key, node.critbit); + + if (dir === 0) { + grandParent = parent; + wentLeftForGrandparent = wentLeftForParent; + parent = node; + wentLeftForParent = true; + + node = node.left; + } + else { + grandParent = parent; + wentLeftForGrandparent = wentLeftForParent; + parent = node; + wentLeftForParent = false; + + node = node.right; + } + } + + // Reaching an external node + else { + if (key !== node.key) + return false; + + this.size--; + + // Rewiring + if (parent === null) { + this.root = null; + } + + else if (grandParent === null) { + if (wentLeftForParent) + this.root = parent.right; + else + this.root = parent.left; + } + + else { + if (wentLeftForGrandparent) { + if (wentLeftForParent) { + grandParent.left = parent.right; + } + else { + grandParent.left = parent.left; + } + } + else { + if (wentLeftForParent) { + grandParent.right = parent.right; + } + else { + grandParent.right = parent.left; + } + } + } + + return true; + } + } +}; + +/** + * Method used to iterate over the tree in key order. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +CritBitTreeMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inorder traversal of the tree + var current = this.root, + stack = []; + + while (true) { + + if (current !== null) { + stack.push(current); + + current = current instanceof InternalNode ? current.left : null; + } + + else { + if (stack.length > 0) { + current = stack.pop(); + + if (current instanceof ExternalNode) + callback.call(scope, current.value, current.key); + + current = current instanceof InternalNode ? current.right : null; + } + else { + break; + } + } + } +}; + +/** + * Convenience known methods. + */ +CritBitTreeMap.prototype.inspect = function() { + return this; +}; + +if (typeof Symbol !== 'undefined') + CritBitTreeMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = CritBitTreeMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a CritBitTreeMap. + * + * @param {Iterable} iterable - Target iterable. + * @return {CritBitTreeMap} + */ +// CritBitTreeMap.from = function(iterable) { + +// }; + +/** + * Exporting. + */ +module.exports = CritBitTreeMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/default-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/default-map.d.ts new file mode 100644 index 0000000..186878c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/default-map.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist DefaultMap Typings + * ============================= + */ +export default class DefaultMap implements Iterable<[K, V]> { + + // Members + size: number; + + // Constructor + constructor(factory: (key: K, index: number) => V); + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + peek(key: K): V | undefined; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static autoIncrement(): number; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/default-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/default-map.js new file mode 100644 index 0000000..dbe41d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/default-map.js @@ -0,0 +1,162 @@ +/** + * Mnemonist DefaultMap + * ===================== + * + * JavaScript implementation of a default map that will return a constructed + * value any time one tries to access an inexisting key. It's quite similar + * to python's defaultdict. + */ + +/** + * DefaultMap. + * + * @constructor + */ +function DefaultMap(factory) { + if (typeof factory !== 'function') + throw new Error('mnemonist/DefaultMap.constructor: expecting a function.'); + + this.items = new Map(); + this.factory = factory; + this.size = 0; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +DefaultMap.prototype.clear = function() { + + // Properties + this.items.clear(); + this.size = 0; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * the value will be created using the provided factory. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultMap.prototype.get = function(key) { + var value = this.items.get(key); + + if (typeof value === 'undefined') { + value = this.factory(key, this.size); + this.items.set(key, value); + this.size++; + } + + return value; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * a value won't be created. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultMap.prototype.peek = function(key) { + return this.items.get(key); +}; + +/** + * Method used to set a value for given key. + * + * @param {any} key - Target key. + * @param {any} value - Value. + * @return {DefaultMap} + */ +DefaultMap.prototype.set = function(key, value) { + this.items.set(key, value); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to test the existence of a key in the map. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to delete target key. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultMap.prototype.delete = function(key) { + var deleted = this.items.delete(key); + + this.size = this.items.size; + + return deleted; +}; + +/** + * Method used to iterate over each of the key/value pairs. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +DefaultMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Iterators. + */ +DefaultMap.prototype.entries = function() { + return this.items.entries(); +}; + +DefaultMap.prototype.keys = function() { + return this.items.keys(); +}; + +DefaultMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + DefaultMap.prototype[Symbol.iterator] = DefaultMap.prototype.entries; + +/** + * Convenience known methods. + */ +DefaultMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + DefaultMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = DefaultMap.prototype.inspect; + +/** + * Typical factories. + */ +DefaultMap.autoIncrement = function() { + var i = 0; + + return function() { + return i++; + }; +}; + +/** + * Exporting. + */ +module.exports = DefaultMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/default-weak-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/default-weak-map.d.ts new file mode 100644 index 0000000..579a883 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/default-weak-map.d.ts @@ -0,0 +1,18 @@ +/** + * Mnemonist DefaultWeakMap Typings + * ================================ + */ +export default class DefaultWeakMap { + + // Constructor + constructor(factory: (key: K) => V); + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + has(key: K): boolean; + get(key: K): V; + peek(key: K): V | undefined; + inspect(): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/default-weak-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/default-weak-map.js new file mode 100644 index 0000000..aa8931c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/default-weak-map.js @@ -0,0 +1,108 @@ +/** + * Mnemonist DefaultWeakMap + * ========================= + * + * JavaScript implementation of a default weak map that will return a constructed + * value any time one tries to access an non-existing key. It is similar to + * DefaultMap but uses ES6 WeakMap that only holds weak reference to keys. + */ + +/** + * DefaultWeakMap. + * + * @constructor + */ +function DefaultWeakMap(factory) { + if (typeof factory !== 'function') + throw new Error('mnemonist/DefaultWeakMap.constructor: expecting a function.'); + + this.items = new WeakMap(); + this.factory = factory; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +DefaultWeakMap.prototype.clear = function() { + + // Properties + this.items = new WeakMap(); +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * the value will be created using the provided factory. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultWeakMap.prototype.get = function(key) { + var value = this.items.get(key); + + if (typeof value === 'undefined') { + value = this.factory(key); + this.items.set(key, value); + } + + return value; +}; + +/** + * Method used to get the value set for given key. If the key does not exist, + * a value won't be created. + * + * @param {any} key - Target key. + * @return {any} + */ +DefaultWeakMap.prototype.peek = function(key) { + return this.items.get(key); +}; + +/** + * Method used to set a value for given key. + * + * @param {any} key - Target key. + * @param {any} value - Value. + * @return {DefaultMap} + */ +DefaultWeakMap.prototype.set = function(key, value) { + this.items.set(key, value); + return this; +}; + +/** + * Method used to test the existence of a key in the map. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultWeakMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to delete target key. + * + * @param {any} key - Target key. + * @return {boolean} + */ +DefaultWeakMap.prototype.delete = function(key) { + return this.items.delete(key); +}; + +/** + * Convenience known methods. + */ +DefaultWeakMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + DefaultWeakMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = DefaultWeakMap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = DefaultWeakMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fibonacci-heap.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/fibonacci-heap.d.ts new file mode 100644 index 0000000..cb15ab0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fibonacci-heap.d.ts @@ -0,0 +1,65 @@ +/** + * Mnemonist FibonacciHeap Typings + * ================================ + */ +type FibonacciHeapComparator = (a: T, b: T) => number; + +export default class FibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + comparator?: FibonacciHeapComparator + ): FibonacciHeap; +} + +export class MinFibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): FibonacciHeap; +} + +export class MaxFibonacciHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: FibonacciHeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): FibonacciHeap; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fibonacci-heap.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/fibonacci-heap.js new file mode 100644 index 0000000..f41334f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fibonacci-heap.js @@ -0,0 +1,320 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Fibonacci Heap + * ========================= + * + * Fibonacci heap implementation. + */ +var comparators = require('./utils/comparators.js'), + forEach = require('obliterator/foreach'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Fibonacci Heap. + * + * @constructor + */ +function FibonacciHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FibonacciHeap.constructor: given comparator should be a function.'); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +FibonacciHeap.prototype.clear = function() { + + // Properties + this.root = null; + this.min = null; + this.size = 0; +}; + +/** + * Function used to create a node. + * + * @param {any} item - Target item. + * @return {object} + */ +function createNode(item) { + return { + item: item, + degree: 0 + }; +} + +/** + * Function used to merge the given node with the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} node - Target node. + */ +function mergeWithRoot(heap, node) { + if (!heap.root) { + heap.root = node; + } + else { + node.right = heap.root.right; + node.left = heap.root; + heap.root.right.left = node; + heap.root.right = node; + } +} + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +FibonacciHeap.prototype.push = function(item) { + var node = createNode(item); + node.left = node; + node.right = node; + mergeWithRoot(this, node); + + if (!this.min || this.comparator(node.item, this.min.item) <= 0) + this.min = node; + + return ++this.size; +}; + +/** + * Method used to get the "first" item of the heap. + * + * @return {any} + */ +FibonacciHeap.prototype.peek = function() { + return this.min ? this.min.item : undefined; +}; + +/** + * Function used to consume the given linked list. + * + * @param {Node} head - Head node. + * @param {array} + */ +function consumeLinkedList(head) { + var nodes = [], + node = head, + flag = false; + + while (true) { + if (node === head && flag) + break; + else if (node === head) + flag = true; + + nodes.push(node); + node = node.right; + } + + return nodes; +} + +/** + * Function used to remove the target node from the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} node - Target node. + */ +function removeFromRoot(heap, node) { + if (heap.root === node) + heap.root = node.right; + node.left.right = node.right; + node.right.left = node.left; +} + +/** + * Function used to merge the given node with the child list of a root node. + * + * @param {Node} parent - Parent node. + * @param {Node} node - Target node. + */ +function mergeWithChild(parent, node) { + if (!parent.child) { + parent.child = node; + } + else { + node.right = parent.child.right; + node.left = parent.child; + parent.child.right.left = node; + parent.child.right = node; + } +} + +/** + * Function used to link one node to another in the root list. + * + * @param {FibonacciHeap} heap - Target heap. + * @param {Node} y - Y node. + * @param {Node} x - X node. + */ +function link(heap, y, x) { + removeFromRoot(heap, y); + y.left = y; + y.right = y; + mergeWithChild(x, y); + x.degree++; + y.parent = x; +} + +/** + * Function used to consolidate the heap. + * + * @param {FibonacciHeap} heap - Target heap. + */ +function consolidate(heap) { + var A = new Array(heap.size), + nodes = consumeLinkedList(heap.root), + i, l, x, y, d, t; + + for (i = 0, l = nodes.length; i < l; i++) { + x = nodes[i]; + d = x.degree; + + while (A[d]) { + y = A[d]; + + if (heap.comparator(x.item, y.item) > 0) { + t = x; + x = y; + y = t; + } + + link(heap, y, x); + A[d] = null; + d++; + } + + A[d] = x; + } + + for (i = 0; i < heap.size; i++) { + if (A[i] && heap.comparator(A[i].item, heap.min.item) <= 0) + heap.min = A[i]; + } +} + +/** + * Method used to retrieve & remove the "first" item of the heap. + * + * @return {any} + */ +FibonacciHeap.prototype.pop = function() { + if (!this.size) + return undefined; + + var z = this.min; + + if (z.child) { + var nodes = consumeLinkedList(z.child), + node, + i, + l; + + for (i = 0, l = nodes.length; i < l; i++) { + node = nodes[i]; + + mergeWithRoot(this, node); + delete node.parent; + } + } + + removeFromRoot(this, z); + + if (z === z.right) { + this.min = null; + this.root = null; + } + else { + this.min = z.right; + consolidate(this); + } + + this.size--; + + return z.item; +}; + +/** + * Convenience known methods. + */ +FibonacciHeap.prototype.inspect = function() { + var proxy = { + size: this.size + }; + + if (this.min && 'item' in this.min) + proxy.top = this.min.item; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: FibonacciHeap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + FibonacciHeap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FibonacciHeap.prototype.inspect; + +/** + * Fibonacci Maximum Heap. + * + * @constructor + */ +function MaxFibonacciHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FibonacciHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +MaxFibonacciHeap.prototype = FibonacciHeap.prototype; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a heap. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} comparator - Custom comparator function. + * @return {FibonacciHeap} + */ +FibonacciHeap.from = function(iterable, comparator) { + var heap = new FibonacciHeap(comparator); + + forEach(iterable, function(value) { + heap.push(value); + }); + + return heap; +}; + +MaxFibonacciHeap.from = function(iterable, comparator) { + var heap = new MaxFibonacciHeap(comparator); + + forEach(iterable, function(value) { + heap.push(value); + }); + + return heap; +}; + +/** + * Exporting. + */ +FibonacciHeap.MinFibonacciHeap = FibonacciHeap; +FibonacciHeap.MaxFibonacciHeap = MaxFibonacciHeap; +module.exports = FibonacciHeap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-critbit-tree-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-critbit-tree-map.js new file mode 100644 index 0000000..9658fee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-critbit-tree-map.js @@ -0,0 +1,427 @@ +/* eslint no-constant-condition: 0 */ + +/* eslint-disable */ + +/** + * Mnemonist FixedFixedCritBitTreeMap + * =================================== + * + * TODO... + * + * [References]: + * https://cr.yp.to/critbit.html + * https://www.imperialviolet.org/binary/critbit.pdf + */ +var bitwise = require('./utils/bitwise.js'), + typed = require('./utils/typed-arrays.js'); + +/** + * Helpers. + */ + +/** + * Helper returning the direction we need to take given a key and an + * encoded critbit. + * + * @param {string} key - Target key. + * @param {number} critbit - Packed address of byte + mask. + * @return {number} - 0, left or 1, right. + */ +function getDirection(key, critbit) { + var byteIndex = critbit >> 8; + + if (byteIndex > key.length - 1) + return 0; + + var byte = key.charCodeAt(byteIndex), + mask = critbit & 0xff; + + return byte & mask; +} + +/** + * Helper returning the packed address of byte + mask or -1 if strings + * are identical. + * + * @param {string} a - First key. + * @param {string} b - Second key. + * @return {number} - Packed address of byte + mask. + */ +function findCriticalBit(a, b) { + var i = 0, + tmp; + + // Swapping so a is the shortest + if (a.length > b.length) { + tmp = b; + b = a; + a = tmp; + } + + var l = a.length, + mask; + + while (i < l) { + if (a[i] !== b[i]) { + mask = bitwise.msb8( + a.charCodeAt(i) ^ b.charCodeAt(i) + ); + + return (i << 8) | mask; + } + + i++; + } + + // Strings are identical + if (a.length === b.length) + return -1; + + // NOTE: x ^ 0 is the same as x + mask = bitwise.msb8(b.charCodeAt(i)); + + return (i << 8) | mask; +} + +/** + * FixedCritBitTreeMap. + * + * @constructor + */ +function FixedCritBitTreeMap(capacity) { + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-critbit-tree-map: `capacity` should be a positive number.'); + + // Properties + this.capacity = capacity; + this.offset = 0; + this.root = 0; + this.size = 0; + + var PointerArray = typed.getSignedPointerArray(capacity + 1); + + this.keys = new Array(capacity); + this.values = new Array(capacity); + this.lefts = new PointerArray(capacity - 1); + this.rights = new PointerArray(capacity - 1); + this.critbits = new Uint32Array(capacity); +} + +/** + * Method used to clear the FixedCritBitTreeMap. + * + * @return {undefined} + */ +FixedCritBitTreeMap.prototype.clear = function() { + + // Properties + // TODO... + this.root = null; + this.size = 0; +}; + +/** + * Method used to set the value of the given key in the trie. + * + * @param {string} key - Key to set. + * @param {any} value - Arbitrary value. + * @return {FixedCritBitTreeMap} + */ +FixedCritBitTreeMap.prototype.set = function(key, value) { + var pointer; + + // TODO: yell if capacity is already full! + + // Tree is empty + if (this.size === 0) { + this.keys[0] = key; + this.values[0] = value; + + this.size++; + + this.root = -1; + + return this; + } + + // Walk state + var pointer = this.root, + newPointer, + leftOrRight, + opposite, + ancestors = [], + path = [], + ancestor, + parent, + child, + critbit, + internal, + best, + dir, + i, + l; + + // Walking the tree + while (true) { + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + + // Choosing the correct direction + dir = getDirection(key, this.critbits[pointer]); + + leftOrRight = dir === 0 ? this.lefts : this.rights; + newPointer = leftOrRight[pointer]; + + if (newPointer === 0) { + + // Creating a fitting external node + pointer = this.size++; + leftOrRight[newPointer] = -(pointer + 1); + this.keys[pointer] = key; + this.values[pointer] = value; + return this; + } + + ancestors.push(pointer); + path.push(dir); + pointer = newPointer; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + // 1. Creating a new external node + critbit = findCriticalBit(key, this.keys[pointer]); + + // Key is identical, we just replace the value + if (critbit === -1) { + this.values[pointer] = value; + return this; + } + + internal = this.offset++; + newPointer = this.size++; + + this.keys[newPointer] = key; + this.values[newPointer] = value; + + this.critbits[internal] = critbit; + + dir = getDirection(key, critbit); + leftOrRight = dir === 0 ? this.lefts : this.rights; + opposite = dir === 0 ? this.rights : this.lefts; + + leftOrRight[internal] = -(newPointer + 1); + opposite[internal] = -(pointer + 1); + + // 2. Bubbling up + best = -1; + l = ancestors.length; + + for (i = l - 1; i >= 0; i--) { + ancestor = ancestors[i]; + + // TODO: this can be made faster + if ((this.critbits[ancestor] >> 8) > (critbit >> 8)) { + continue; + } + else if ((this.critbits[ancestor] >> 8) === (critbit >> 8)) { + if ((this.critbits[ancestor] & 0xff) < (critbit & 0xff)) + continue; + } + + best = i; + break; + } + + // Do we need to attach to the root? + if (best < 0) { + this.root = internal + 1; + + // Need to rewire parent as child? + if (l > 0) { + parent = ancestors[0]; + + opposite[internal] = parent + 1; + } + } + + // Simple case without rotation + else if (best === l - 1) { + parent = ancestors[best]; + dir = path[best]; + + leftOrRight = dir === 0 ? this.lefts : this.rights; + + leftOrRight[parent] = internal + 1; + } + + // Full rotation + else { + parent = ancestors[best]; + dir = path[best]; + child = ancestors[best + 1]; + + opposite[internal] = child + 1; + + leftOrRight = dir === 0 ? this.lefts : this.rights; + + leftOrRight[parent] = internal + 1; + } + + return this; + } + } +}; + +/** + * Method used to get the value attached to the given key in the tree or + * undefined if not found. + * + * @param {string} key - Key to get. + * @return {any} + */ +FixedCritBitTreeMap.prototype.get = function(key) { + + // Walk state + var pointer = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (pointer === 0) + return; + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + dir = getDirection(key, this.critbits[pointer]); + + pointer = dir === 0 ? this.lefts[pointer] : this.rights[pointer]; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + if (this.keys[pointer] !== key) + return; + + return this.values[pointer]; + } + } +}; + +/** + * Method used to return whether the given key exists in the tree. + * + * @param {string} key - Key to test. + * @return {boolean} + */ +FixedCritBitTreeMap.prototype.has = function(key) { + + // Walk state + var pointer = this.root, + dir; + + // Walking the tree + while (true) { + + // Dead end + if (pointer === 0) + return false; + + // Traversing an internal node + if (pointer > 0) { + pointer -= 1; + dir = getDirection(key, this.critbits[pointer]); + + pointer = dir === 0 ? this.lefts[pointer] : this.rights[pointer]; + } + + // Reaching an external node + else { + pointer = -pointer; + pointer -= 1; + + return this.keys[pointer] === key; + } + } +}; + +/** + * Method used to iterate over the tree in key order. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedCritBitTreeMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inorder traversal of the tree + var current = this.root, + stack = [], + p; + + while (true) { + + if (current !== 0) { + stack.push(current); + + current = current > 0 ? this.lefts[current - 1] : 0; + } + + else { + if (stack.length > 0) { + current = stack.pop(); + + if (current < 0) { + p = -current; + p -= 1; + + callback.call(scope, this.values[p], this.keys[p]); + } + + current = current > 0 ? this.rights[current - 1] : 0; + } + else { + break; + } + } + } +}; + +/** + * Convenience known methods. + */ +FixedCritBitTreeMap.prototype.inspect = function() { + return this; +}; + +if (typeof Symbol !== 'undefined') + FixedCritBitTreeMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedCritBitTreeMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a FixedCritBitTreeMap. + * + * @param {Iterable} iterable - Target iterable. + * @return {FixedCritBitTreeMap} + */ +// FixedCritBitTreeMap.from = function(iterable) { + +// }; + +/** + * Exporting. + */ +module.exports = FixedCritBitTreeMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-deque.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-deque.d.ts new file mode 100644 index 0000000..6e6b908 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-deque.d.ts @@ -0,0 +1,34 @@ +/** + * Mnemonist FixedDeque Typings + * ============================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class FixedDeque implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + unshift(item: T): number; + pop(): T | undefined; + shift(): T | undefined; + peekFirst(): T | undefined; + peekLast(): T | undefined; + get(index: number): T | undefined; + forEach(callback: (item: T, index: number, buffer: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): FixedDeque; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-deque.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-deque.js new file mode 100644 index 0000000..7b29858 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-deque.js @@ -0,0 +1,351 @@ +/** + * Mnemonist FixedDeque + * ===================== + * + * Fixed capacity double-ended queue implemented as ring deque. + */ +var iterables = require('./utils/iterables.js'), + Iterator = require('obliterator/iterator'); + +/** + * FixedDeque. + * + * @constructor + */ +function FixedDeque(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/fixed-deque: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-deque: `capacity` should be a positive number.'); + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + this.items = new ArrayClass(this.capacity); + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FixedDeque.prototype.clear = function() { + + // Properties + this.start = 0; + this.size = 0; +}; + +/** + * Method used to append a value to the deque. + * + * @param {any} item - Item to append. + * @return {number} - Returns the new size of the deque. + */ +FixedDeque.prototype.push = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-deque.push: deque capacity (' + this.capacity + ') exceeded!'); + + var index = (this.start + this.size) % this.capacity; + + this.items[index] = item; + + return ++this.size; +}; + +/** + * Method used to prepend a value to the deque. + * + * @param {any} item - Item to prepend. + * @return {number} - Returns the new size of the deque. + */ +FixedDeque.prototype.unshift = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-deque.unshift: deque capacity (' + this.capacity + ') exceeded!'); + + var index = this.start - 1; + + if (this.start === 0) + index = this.capacity - 1; + + this.items[index] = item; + this.start = index; + + return ++this.size; +}; + +/** + * Method used to pop the deque. + * + * @return {any} - Returns the popped item. + */ +FixedDeque.prototype.pop = function() { + if (this.size === 0) + return; + + const index = (this.start + this.size - 1) % this.capacity; + + this.size--; + + return this.items[index]; +}; + +/** + * Method used to shift the deque. + * + * @return {any} - Returns the shifted item. + */ +FixedDeque.prototype.shift = function() { + if (this.size === 0) + return; + + var index = this.start; + + this.size--; + this.start++; + + if (this.start === this.capacity) + this.start = 0; + + return this.items[index]; +}; + +/** + * Method used to peek the first value of the deque. + * + * @return {any} + */ +FixedDeque.prototype.peekFirst = function() { + if (this.size === 0) + return; + + return this.items[this.start]; +}; + +/** + * Method used to peek the last value of the deque. + * + * @return {any} + */ +FixedDeque.prototype.peekLast = function() { + if (this.size === 0) + return; + + var index = this.start + this.size - 1; + + if (index > this.capacity) + index -= this.capacity; + + return this.items[index]; +}; + +/** + * Method used to get the desired value of the deque. + * + * @param {number} index + * @return {any} + */ +FixedDeque.prototype.get = function(index) { + if (this.size === 0) + return; + + index = this.start + index; + + if (index > this.capacity) + index -= this.capacity; + + return this.items[index]; +}; + +/** + * Method used to iterate over the deque. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedDeque.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + callback.call(scope, this.items[i], j, this); + i++; + j++; + + if (i === c) + i = 0; + } +}; + +/** + * Method used to convert the deque to a JavaScript array. + * + * @return {array} + */ +// TODO: optional array class as argument? +FixedDeque.prototype.toArray = function() { + + // Optimization + var offset = this.start + this.size; + + if (offset < this.capacity) + return this.items.slice(this.start, offset); + + var array = new this.ArrayClass(this.size), + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + array[j] = this.items[i]; + i++; + j++; + + if (i === c) + i = 0; + } + + return array; +}; + +/** + * Method used to create an iterator over the deque's values. + * + * @return {Iterator} + */ +FixedDeque.prototype.values = function() { + var items = this.items, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = items[i]; + + i++; + j++; + + if (i === c) + i = 0; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over the deque's entries. + * + * @return {Iterator} + */ +FixedDeque.prototype.entries = function() { + var items = this.items, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = items[i]; + + i++; + + if (i === c) + i = 0; + + return { + value: [j++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FixedDeque.prototype[Symbol.iterator] = FixedDeque.prototype.values; + +/** + * Convenience known methods. + */ +FixedDeque.prototype.inspect = function() { + var array = this.toArray(); + + array.type = this.ArrayClass.name; + array.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: FixedDeque, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FixedDeque.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedDeque.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a deque. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FiniteStack} + */ +FixedDeque.from = function(iterable, ArrayClass, capacity) { + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/fixed-deque.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var deque = new FixedDeque(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + deque.items[i] = iterable[i]; + + deque.size = l; + + return deque; + } + + iterables.forEach(iterable, function(value) { + deque.push(value); + }); + + return deque; +}; + +/** + * Exporting. + */ +module.exports = FixedDeque; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-reverse-heap.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-reverse-heap.d.ts new file mode 100644 index 0000000..668c556 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-reverse-heap.d.ts @@ -0,0 +1,25 @@ +/** + * Mnemonist FixedReverseHeap Typings + * =================================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +type HeapComparator = (a: T, b: T) => number; + +export default class FixedReverseHeap { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, comparator: HeapComparator, capacity: number); + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + consume(): Iterable; + toArray(): Iterable; + inspect(): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-reverse-heap.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-reverse-heap.js new file mode 100644 index 0000000..197aac4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-reverse-heap.js @@ -0,0 +1,209 @@ +/** + * Mnemonist Fixed Reverse Heap + * ============================= + * + * Static heap implementation with fixed capacity. It's a "reverse" heap + * because it stores the elements in reverse so we can replace the worst + * item in logarithmic time. As such, one cannot pop this heap but can only + * consume it at the end. This structure is very efficient when trying to + * find the n smallest/largest items from a larger query (k nearest neigbors + * for instance). + */ +var comparators = require('./utils/comparators.js'), + Heap = require('./heap.js'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Helper functions. + */ + +/** + * Function used to sift up. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} size - Heap's true size. + * @param {number} i - Index. + */ +function siftUp(compare, heap, size, i) { + var endIndex = size, + startIndex = i, + item = heap[i], + childIndex = 2 * i + 1, + rightIndex; + + while (childIndex < endIndex) { + rightIndex = childIndex + 1; + + if ( + rightIndex < endIndex && + compare(heap[childIndex], heap[rightIndex]) >= 0 + ) { + childIndex = rightIndex; + } + + heap[i] = heap[childIndex]; + i = childIndex; + childIndex = 2 * i + 1; + } + + heap[i] = item; + Heap.siftDown(compare, heap, startIndex, i); +} + +/** + * Fully consumes the given heap. + * + * @param {function} ArrayClass - Array class to use. + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} size - True size of the heap. + * @return {array} + */ +function consume(ArrayClass, compare, heap, size) { + var l = size, + i = l; + + var array = new ArrayClass(size), + lastItem, + item; + + while (i > 0) { + lastItem = heap[--i]; + + if (i !== 0) { + item = heap[0]; + heap[0] = lastItem; + siftUp(compare, heap, --size, 0); + lastItem = item; + } + + array[i] = lastItem; + } + + return array; +} + +/** + * Binary Minimum FixedReverseHeap. + * + * @constructor + * @param {function} ArrayClass - The class of array to use. + * @param {function} comparator - Comparator function. + * @param {number} capacity - Maximum number of items to keep. + */ +function FixedReverseHeap(ArrayClass, comparator, capacity) { + + // Comparator can be omitted + if (arguments.length === 2) { + capacity = comparator; + comparator = null; + } + + this.ArrayClass = ArrayClass; + this.capacity = capacity; + + this.items = new ArrayClass(capacity); + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof capacity !== 'number' && capacity <= 0) + throw new Error('mnemonist/FixedReverseHeap.constructor: capacity should be a number > 0.'); + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/FixedReverseHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +FixedReverseHeap.prototype.clear = function() { + + // Properties + this.size = 0; +}; + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +FixedReverseHeap.prototype.push = function(item) { + + // Still some place + if (this.size < this.capacity) { + this.items[this.size] = item; + Heap.siftDown(this.comparator, this.items, 0, this.size); + this.size++; + } + + // Heap is full, we need to replace worst item + else { + + if (this.comparator(item, this.items[0]) > 0) + Heap.replace(this.comparator, this.items, item); + } + + return this.size; +}; + +/** + * Method used to peek the worst item in the heap. + * + * @return {any} + */ +FixedReverseHeap.prototype.peek = function() { + return this.items[0]; +}; + +/** + * Method used to consume the heap fully and return its items as a sorted array. + * + * @return {array} + */ +FixedReverseHeap.prototype.consume = function() { + var items = consume(this.ArrayClass, this.comparator, this.items, this.size); + this.size = 0; + + return items; +}; + +/** + * Method used to convert the heap to an array. Note that it basically clone + * the heap and consumes it completely. This is hardly performant. + * + * @return {array} + */ +FixedReverseHeap.prototype.toArray = function() { + return consume(this.ArrayClass, this.comparator, this.items.slice(0, this.size), this.size); +}; + +/** + * Convenience known methods. + */ +FixedReverseHeap.prototype.inspect = function() { + var proxy = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: FixedReverseHeap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + FixedReverseHeap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedReverseHeap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = FixedReverseHeap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-stack.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-stack.d.ts new file mode 100644 index 0000000..9965853 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-stack.d.ts @@ -0,0 +1,36 @@ +/** + * Mnemonist FixedStack Typings + * ============================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class FixedStack implements Iterable { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + push(item: T): number; + pop(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, stack: this) => void, scope?: any): void; + toArray(): Iterable; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Iterable; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + ArrayClass: IArrayLikeConstructor, + capacity?: number + ): FixedStack; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-stack.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-stack.js new file mode 100644 index 0000000..c5b5f48 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fixed-stack.js @@ -0,0 +1,242 @@ +/** + * Mnemonist FixedStack + * ===================== + * + * The fixed stack is a stack whose capacity is defined beforehand and that + * cannot be exceeded. This class is really useful when combined with + * byte arrays to save up some memory and avoid memory re-allocation, hence + * speeding up computations. + * + * This has however a downside: you need to know the maximum size you stack + * can have during your iteration (which is not too difficult to compute when + * performing, say, a DFS on a balanced binary tree). + */ +var Iterator = require('obliterator/iterator'), + iterables = require('./utils/iterables.js'); + +/** + * FixedStack + * + * @constructor + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + */ +function FixedStack(ArrayClass, capacity) { + + if (arguments.length < 2) + throw new Error('mnemonist/fixed-stack: expecting an Array class and a capacity.'); + + if (typeof capacity !== 'number' || capacity <= 0) + throw new Error('mnemonist/fixed-stack: `capacity` should be a positive number.'); + + this.capacity = capacity; + this.ArrayClass = ArrayClass; + this.items = new this.ArrayClass(this.capacity); + this.clear(); +} + +/** + * Method used to clear the stack. + * + * @return {undefined} + */ +FixedStack.prototype.clear = function() { + + // Properties + this.size = 0; +}; + +/** + * Method used to add an item to the stack. + * + * @param {any} item - Item to add. + * @return {number} + */ +FixedStack.prototype.push = function(item) { + if (this.size === this.capacity) + throw new Error('mnemonist/fixed-stack.push: stack capacity (' + this.capacity + ') exceeded!'); + + this.items[this.size++] = item; + return this.size; +}; + +/** + * Method used to retrieve & remove the last item of the stack. + * + * @return {any} + */ +FixedStack.prototype.pop = function() { + if (this.size === 0) + return; + + return this.items[--this.size]; +}; + +/** + * Method used to get the last item of the stack. + * + * @return {any} + */ +FixedStack.prototype.peek = function() { + return this.items[this.size - 1]; +}; + +/** + * Method used to iterate over the stack. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FixedStack.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.items.length; i < l; i++) + callback.call(scope, this.items[l - i - 1], i, this); +}; + +/** + * Method used to convert the stack to a JavaScript array. + * + * @return {array} + */ +FixedStack.prototype.toArray = function() { + var array = new this.ArrayClass(this.size), + l = this.size - 1, + i = this.size; + + while (i--) + array[i] = this.items[l - i]; + + return array; +}; + +/** + * Method used to create an iterator over a stack's values. + * + * @return {Iterator} + */ +FixedStack.prototype.values = function() { + var items = this.items, + l = this.size, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a stack's entries. + * + * @return {Iterator} + */ +FixedStack.prototype.entries = function() { + var items = this.items, + l = this.size, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FixedStack.prototype[Symbol.iterator] = FixedStack.prototype.values; + + +/** + * Convenience known methods. + */ +FixedStack.prototype.toString = function() { + return this.toArray().join(','); +}; + +FixedStack.prototype.toJSON = function() { + return this.toArray(); +}; + +FixedStack.prototype.inspect = function() { + var array = this.toArray(); + + array.type = this.ArrayClass.name; + array.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: FixedStack, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FixedStack.prototype[Symbol.for('nodejs.util.inspect.custom')] = FixedStack.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a stack. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Array class to use. + * @param {number} capacity - Desired capacity. + * @return {FixedStack} + */ +FixedStack.from = function(iterable, ArrayClass, capacity) { + + if (arguments.length < 3) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/fixed-stack.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var stack = new FixedStack(ArrayClass, capacity); + + if (iterables.isArrayLike(iterable)) { + var i, l; + + for (i = 0, l = iterable.length; i < l; i++) + stack.items[i] = iterable[i]; + + stack.size = l; + + return stack; + } + + iterables.forEach(iterable, function(value) { + stack.push(value); + }); + + return stack; +}; + +/** + * Exporting. + */ +module.exports = FixedStack; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-map.d.ts new file mode 100644 index 0000000..7a1644d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-map.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist FuzzyMap Typings + * ========================== + */ +type HashFunction = (key: any) => K; +type HashFunctionsTuple = [HashFunction, HashFunction]; + +export default class FuzzyMap implements Iterable { + + // Members + size: number; + + // Constructor + constructor(hashFunction: HashFunction); + constructor(hashFunctionsTuple: HashFunctionsTuple); + + // Methods + clear(): void; + add(key: V): this; + set(key: K, value: V): this; + get(key: any): V | undefined; + has(key: any): boolean; + forEach(callback: (value: V, key: V) => void, scope?: this): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + hashFunction: HashFunction | HashFunctionsTuple, + ): FuzzyMap; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-map.js new file mode 100644 index 0000000..b0d52e1 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-map.js @@ -0,0 +1,185 @@ +/** + * Mnemonist Fuzzy Map + * ==================== + * + * The fuzzy map is a map whose keys are processed by a function before + * read/write operations. This can often result in multiple keys accessing + * the same resource (example: a map with lowercased keys). + */ +var forEach = require('obliterator/foreach'); + +var identity = function(x) { + return x; +}; + +/** + * FuzzyMap. + * + * @constructor + * @param {array|function} descriptor - Hash functions descriptor. + */ +function FuzzyMap(descriptor) { + this.items = new Map(); + this.clear(); + + if (Array.isArray(descriptor)) { + this.writeHashFunction = descriptor[0]; + this.readHashFunction = descriptor[1]; + } + else { + this.writeHashFunction = descriptor; + this.readHashFunction = descriptor; + } + + if (!this.writeHashFunction) + this.writeHashFunction = identity; + if (!this.readHashFunction) + this.readHashFunction = identity; + + if (typeof this.writeHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMap.constructor: invalid hash function given.'); + + if (typeof this.readHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMap.constructor: invalid hash function given.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FuzzyMap.prototype.clear = function() { + this.items.clear(); + + // Properties + this.size = 0; +}; + +/** + * Method used to add an item to the FuzzyMap. + * + * @param {any} item - Item to add. + * @return {FuzzyMap} + */ +FuzzyMap.prototype.add = function(item) { + var key = this.writeHashFunction(item); + + this.items.set(key, item); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to set an item in the FuzzyMap using the given key. + * + * @param {any} key - Key to use. + * @param {any} item - Item to add. + * @return {FuzzyMap} + */ +FuzzyMap.prototype.set = function(key, item) { + key = this.writeHashFunction(key); + + this.items.set(key, item); + this.size = this.items.size; + + return this; +}; + +/** + * Method used to retrieve an item from the FuzzyMap. + * + * @param {any} key - Key to use. + * @return {any} + */ +FuzzyMap.prototype.get = function(key) { + key = this.readHashFunction(key); + + return this.items.get(key); +}; + +/** + * Method used to test the existence of an item in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +FuzzyMap.prototype.has = function(key) { + key = this.readHashFunction(key); + + return this.items.has(key); +}; + +/** + * Method used to iterate over each of the FuzzyMap's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FuzzyMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(function(value) { + callback.call(scope, value, value); + }); +}; + +/** + * Method returning an iterator over the FuzzyMap's values. + * + * @return {FuzzyMapIterator} + */ +FuzzyMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FuzzyMap.prototype[Symbol.iterator] = FuzzyMap.prototype.values; + +/** + * Convenience known method. + */ +FuzzyMap.prototype.inspect = function() { + var array = Array.from(this.items.values()); + + Object.defineProperty(array, 'constructor', { + value: FuzzyMap, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FuzzyMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FuzzyMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {array|function} descriptor - Hash functions descriptor. + * @param {boolean} useSet - Whether to use #.set or #.add + * @return {FuzzyMap} + */ +FuzzyMap.from = function(iterable, descriptor, useSet) { + var map = new FuzzyMap(descriptor); + + forEach(iterable, function(value, key) { + if (useSet) + map.set(key, value); + else + map.add(value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = FuzzyMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-multi-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-multi-map.d.ts new file mode 100644 index 0000000..62b8250 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-multi-map.d.ts @@ -0,0 +1,36 @@ +/** + * Mnemonist FuzzyMultiMap Typings + * ================================ + */ +type HashFunction = (key: any) => K; +type HashFunctionsTuple = [HashFunction, HashFunction]; +type FuzzyMultiMapContainer = ArrayConstructor | SetConstructor; + +export default class FuzzyMultiMap implements Iterable { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(hashFunction: HashFunction, Container?: FuzzyMultiMapContainer); + constructor(hashFunctions: HashFunctionsTuple, Container?: FuzzyMultiMapContainer); + + // Methods + clear(): void; + add(value: V): this; + set(key: K, value: V): this; + get(key: any): Array | Set | undefined; + has(key: any): boolean; + forEach(callback: (value: V, key: V) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + hashFunction: HashFunction | HashFunctionsTuple, + Container?: FuzzyMultiMapContainer + ): FuzzyMultiMap; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-multi-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-multi-map.js new file mode 100644 index 0000000..78b2b08 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/fuzzy-multi-map.js @@ -0,0 +1,196 @@ +/** + * Mnemonist FuzzyMultiMap + * ======================== + * + * Same as the fuzzy map but relying on a MultiMap rather than a Map. + */ +var MultiMap = require('./multi-map.js'), + forEach = require('obliterator/foreach'); + +var identity = function(x) { + return x; +}; + +/** + * FuzzyMultiMap. + * + * @constructor + * @param {array|function} descriptor - Hash functions descriptor. + * @param {function} Container - Container to use. + */ +function FuzzyMultiMap(descriptor, Container) { + this.items = new MultiMap(Container); + this.clear(); + + if (Array.isArray(descriptor)) { + this.writeHashFunction = descriptor[0]; + this.readHashFunction = descriptor[1]; + } + else { + this.writeHashFunction = descriptor; + this.readHashFunction = descriptor; + } + + if (!this.writeHashFunction) + this.writeHashFunction = identity; + if (!this.readHashFunction) + this.readHashFunction = identity; + + if (typeof this.writeHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMultiMap.constructor: invalid hash function given.'); + + if (typeof this.readHashFunction !== 'function') + throw new Error('mnemonist/FuzzyMultiMap.constructor: invalid hash function given.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +FuzzyMultiMap.prototype.clear = function() { + this.items.clear(); + + // Properties + this.size = 0; + this.dimension = 0; +}; + +/** + * Method used to add an item to the index. + * + * @param {any} item - Item to add. + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.prototype.add = function(item) { + var key = this.writeHashFunction(item); + + this.items.set(key, item); + this.size = this.items.size; + this.dimension = this.items.dimension; + + return this; +}; + +/** + * Method used to set an item in the index using the given key. + * + * @param {any} key - Key to use. + * @param {any} item - Item to add. + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.prototype.set = function(key, item) { + key = this.writeHashFunction(key); + + this.items.set(key, item); + this.size = this.items.size; + this.dimension = this.items.dimension; + + return this; +}; + +/** + * Method used to retrieve an item from the index. + * + * @param {any} key - Key to use. + * @return {any} + */ +FuzzyMultiMap.prototype.get = function(key) { + key = this.readHashFunction(key); + + return this.items.get(key); +}; + +/** + * Method used to test the existence of an item in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +FuzzyMultiMap.prototype.has = function(key) { + key = this.readHashFunction(key); + + return this.items.has(key); +}; + +/** + * Method used to iterate over each of the index's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +FuzzyMultiMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(function(value) { + callback.call(scope, value, value); + }); +}; + +/** + * Method returning an iterator over the index's values. + * + * @return {FuzzyMultiMapIterator} + */ +FuzzyMultiMap.prototype.values = function() { + return this.items.values(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + FuzzyMultiMap.prototype[Symbol.iterator] = FuzzyMultiMap.prototype.values; + +/** + * Convenience known method. + */ +FuzzyMultiMap.prototype.inspect = function() { + var array = Array.from(this); + + Object.defineProperty(array, 'constructor', { + value: FuzzyMultiMap, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + FuzzyMultiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = FuzzyMultiMap.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {array|function} descriptor - Hash functions descriptor. + * @param {function} Container - Container to use. + * @param {boolean} useSet - Whether to use #.set or #.add + * @return {FuzzyMultiMap} + */ +FuzzyMultiMap.from = function(iterable, descriptor, Container, useSet) { + if (arguments.length === 3) { + if (typeof Container === 'boolean') { + useSet = Container; + Container = Array; + } + } + + var map = new FuzzyMultiMap(descriptor, Container); + + forEach(iterable, function(value, key) { + if (useSet) + map.set(key, value); + else + map.add(value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = FuzzyMultiMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/hashed-array-tree.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/hashed-array-tree.d.ts new file mode 100644 index 0000000..eb56f7c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/hashed-array-tree.d.ts @@ -0,0 +1,32 @@ +/** + * Mnemonist HashedArrayTree Typings + * ================================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +type HashedArrayTreeOptions = { + initialCapacity?: number; + initialLength?: number; + blockSize?: number; +} + +export default class HashedArrayTree { + + // Members + blockSize: number; + capacity: number; + length: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, capacity: number); + constructor(ArrayClass: IArrayLikeConstructor, options: HashedArrayTreeOptions); + + // Methods + set(index: number, value: T): this; + get(index: number): T | undefined; + grow(capacity: number): this; + resize(length: number): this; + push(value: T): number; + pop(): T | undefined; + inspect(): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/hashed-array-tree.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/hashed-array-tree.js new file mode 100644 index 0000000..a51667c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/hashed-array-tree.js @@ -0,0 +1,209 @@ +/** + * Mnemonist HashedArrayTree + * ========================== + * + * Abstract implementation of a hashed array tree representing arrays growing + * dynamically. + */ + +/** + * Defaults. + */ +var DEFAULT_BLOCK_SIZE = 1024; + +/** + * Helpers. + */ +function powerOfTwo(x) { + return (x & (x - 1)) === 0; +} + +/** + * HashedArrayTree. + * + * @constructor + * @param {function} ArrayClass - An array constructor. + * @param {number|object} initialCapacityOrOptions - Self-explanatory. + */ +function HashedArrayTree(ArrayClass, initialCapacityOrOptions) { + if (arguments.length < 1) + throw new Error('mnemonist/hashed-array-tree: expecting at least a byte array constructor.'); + + var initialCapacity = initialCapacityOrOptions || 0, + blockSize = DEFAULT_BLOCK_SIZE, + initialLength = 0; + + if (typeof initialCapacityOrOptions === 'object') { + initialCapacity = initialCapacityOrOptions.initialCapacity || 0; + initialLength = initialCapacityOrOptions.initialLength || 0; + blockSize = initialCapacityOrOptions.blockSize || DEFAULT_BLOCK_SIZE; + } + + if (!blockSize || !powerOfTwo(blockSize)) + throw new Error('mnemonist/hashed-array-tree: block size should be a power of two.'); + + var capacity = Math.max(initialLength, initialCapacity), + initialBlocks = Math.ceil(capacity / blockSize); + + this.ArrayClass = ArrayClass; + this.length = initialLength; + this.capacity = initialBlocks * blockSize; + this.blockSize = blockSize; + this.offsetMask = blockSize - 1; + this.blockMask = Math.log2(blockSize); + + // Allocating initial blocks + this.blocks = new Array(initialBlocks); + + for (var i = 0; i < initialBlocks; i++) + this.blocks[i] = new this.ArrayClass(this.blockSize); +} + +/** + * Method used to set a value. + * + * @param {number} index - Index to edit. + * @param {any} value - Value. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('HashedArrayTree(' + this.ArrayClass.name + ').set: index out of bounds.'); + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + this.blocks[block][i] = value; + + return this; +}; + +/** + * Method used to get a value. + * + * @param {number} index - Index to retrieve. + * @return {any} + */ +HashedArrayTree.prototype.get = function(index) { + if (this.length < index) + return; + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + return this.blocks[block][i]; +}; + +/** + * Method used to grow the array. + * + * @param {number} capacity - Optional capacity to accomodate. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.grow = function(capacity) { + if (typeof capacity !== 'number') + capacity = this.capacity + this.blockSize; + + if (this.capacity >= capacity) + return this; + + while (this.capacity < capacity) { + this.blocks.push(new this.ArrayClass(this.blockSize)); + this.capacity += this.blockSize; + } + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {HashedArrayTree} + */ +HashedArrayTree.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.grow(length); + + return this; +}; + +/** + * Method used to push a value into the array. + * + * @param {any} value - Value to push. + * @return {number} - Length of the array. + */ +HashedArrayTree.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + var index = this.length; + + var block = index >> this.blockMask, + i = index & this.offsetMask; + + this.blocks[block][i] = value; + + return ++this.length; +}; + +/** + * Method used to pop the last value of the array. + * + * @return {number} - The popped value. + */ +HashedArrayTree.prototype.pop = function() { + if (this.length === 0) + return; + + var lastBlock = this.blocks[this.blocks.length - 1]; + + var i = (--this.length) & this.offsetMask; + + return lastBlock[i]; +}; + +/** + * Convenience known methods. + */ +HashedArrayTree.prototype.inspect = function() { + var proxy = new this.ArrayClass(this.length), + block; + + for (var i = 0, l = this.length; i < l; i++) { + block = i >> this.blockMask; + proxy[i] = this.blocks[block][i & this.offsetMask]; + } + + proxy.type = this.ArrayClass.name; + proxy.items = this.length; + proxy.capacity = this.capacity; + proxy.blockSize = this.blockSize; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: HashedArrayTree, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + HashedArrayTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = HashedArrayTree.prototype.inspect; + +/** + * Exporting. + */ +module.exports = HashedArrayTree; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/heap.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/heap.d.ts new file mode 100644 index 0000000..c6aa219 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/heap.d.ts @@ -0,0 +1,84 @@ +/** + * Mnemonist Heap Typings + * ======================= + */ +type HeapComparator = (a: T, b: T) => number; + +export default class Heap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + comparator?: HeapComparator + ): Heap; +} + +export class MinHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; +} + +export class MaxHeap { + + // Members + size: number; + + // Constructor + constructor(comparator?: HeapComparator); + + // Methods + clear(): void; + push(item: T): number; + peek(): T | undefined; + pop(): T | undefined; + replace(item: T): T | undefined; + pushpop(item: T): T | undefined; + toArray(): Array; + consume(): Array; + inspect(): any; +} + +// Static helpers +export function push(comparator: HeapComparator, heap: Array, item: T): void; +export function pop(comparator: HeapComparator, heap: Array): T; +export function replace(comparator: HeapComparator, heap: Array, item: T): T; +export function pushpop(comparator: HeapComparator, heap: Array, item: T): T; +export function heapify(comparator: HeapComparator, array: Array): void; +export function consume(comparator: HeapComparator, heap: Array): Array; + +export function nsmallest(comparator: HeapComparator, n: number, values: Iterable): Array; +export function nsmallest(n: number, values: Iterable): Array; +export function nlargest(comparator: HeapComparator, n: number, values: Iterable): Array; +export function nlargest(n: number, values: Iterable): Array; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/heap.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/heap.js new file mode 100644 index 0000000..90eb971 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/heap.js @@ -0,0 +1,576 @@ +/** + * Mnemonist Binary Heap + * ====================== + * + * Binary heap implementation. + */ +var forEach = require('obliterator/foreach'), + comparators = require('./utils/comparators.js'), + iterables = require('./utils/iterables.js'); + +var DEFAULT_COMPARATOR = comparators.DEFAULT_COMPARATOR, + reverseComparator = comparators.reverseComparator; + +/** + * Heap helper functions. + */ + +/** + * Function used to sift down. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} startIndex - Starting index. + * @param {number} i - Index. + */ +function siftDown(compare, heap, startIndex, i) { + var item = heap[i], + parentIndex, + parent; + + while (i > startIndex) { + parentIndex = (i - 1) >> 1; + parent = heap[parentIndex]; + + if (compare(item, parent) < 0) { + heap[i] = parent; + i = parentIndex; + continue; + } + + break; + } + + heap[i] = item; +} + +/** + * Function used to sift up. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {number} i - Index. + */ +function siftUp(compare, heap, i) { + var endIndex = heap.length, + startIndex = i, + item = heap[i], + childIndex = 2 * i + 1, + rightIndex; + + while (childIndex < endIndex) { + rightIndex = childIndex + 1; + + if ( + rightIndex < endIndex && + compare(heap[childIndex], heap[rightIndex]) >= 0 + ) { + childIndex = rightIndex; + } + + heap[i] = heap[childIndex]; + i = childIndex; + childIndex = 2 * i + 1; + } + + heap[i] = item; + siftDown(compare, heap, startIndex, i); +} + +/** + * Function used to push an item into a heap represented by a raw array. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - Item to push. + */ +function push(compare, heap, item) { + heap.push(item); + siftDown(compare, heap, 0, heap.length - 1); +} + +/** + * Function used to pop an item from a heap represented by a raw array. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @return {any} + */ +function pop(compare, heap) { + var lastItem = heap.pop(); + + if (heap.length !== 0) { + var item = heap[0]; + heap[0] = lastItem; + siftUp(compare, heap, 0); + + return item; + } + + return lastItem; +} + +/** + * Function used to pop the heap then push a new value into it, thus "replacing" + * it. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - The item to push. + * @return {any} + */ +function replace(compare, heap, item) { + if (heap.length === 0) + throw new Error('mnemonist/heap.replace: cannot pop an empty heap.'); + + var popped = heap[0]; + heap[0] = item; + siftUp(compare, heap, 0); + + return popped; +} + +/** + * Function used to push an item in the heap then pop the heap and return the + * popped value. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @param {any} item - The item to push. + * @return {any} + */ +function pushpop(compare, heap, item) { + var tmp; + + if (heap.length !== 0 && compare(heap[0], item) < 0) { + tmp = heap[0]; + heap[0] = item; + item = tmp; + siftUp(compare, heap, 0); + } + + return item; +} + +/** + * Converts and array into an abstract heap in linear time. + * + * @param {function} compare - Comparison function. + * @param {array} array - Target array. + */ +function heapify(compare, array) { + var n = array.length, + l = n >> 1, + i = l; + + while (--i >= 0) + siftUp(compare, array, i); +} + +/** + * Fully consumes the given heap. + * + * @param {function} compare - Comparison function. + * @param {array} heap - Array storing the heap's data. + * @return {array} + */ +function consume(compare, heap) { + var l = heap.length, + i = 0; + + var array = new Array(l); + + while (i < l) + array[i++] = pop(compare, heap); + + return array; +} + +/** + * Function used to retrieve the n smallest items from the given iterable. + * + * @param {function} compare - Comparison function. + * @param {number} n - Number of top items to retrieve. + * @param {any} iterable - Arbitrary iterable. + * @param {array} + */ +function nsmallest(compare, n, iterable) { + if (arguments.length === 2) { + iterable = n; + n = compare; + compare = DEFAULT_COMPARATOR; + } + + var reverseCompare = reverseComparator(compare); + + var i, l, v; + + var min = Infinity; + + var result; + + // If n is equal to 1, it's just a matter of finding the minimum + if (n === 1) { + if (iterables.isArrayLike(iterable)) { + for (i = 0, l = iterable.length; i < l; i++) { + v = iterable[i]; + + if (min === Infinity || compare(v, min) < 0) + min = v; + } + + result = new iterable.constructor(1); + result[0] = min; + + return result; + } + + forEach(iterable, function(value) { + if (min === Infinity || compare(value, min) < 0) + min = value; + }); + + return [min]; + } + + if (iterables.isArrayLike(iterable)) { + + // If n > iterable length, we just clone and sort + if (n >= iterable.length) + return iterable.slice().sort(compare); + + result = iterable.slice(0, n); + heapify(reverseCompare, result); + + for (i = n, l = iterable.length; i < l; i++) + if (reverseCompare(iterable[i], result[0]) > 0) + replace(reverseCompare, result, iterable[i]); + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(compare); + } + + // Correct for size + var size = iterables.guessLength(iterable); + + if (size !== null && size < n) + n = size; + + result = new Array(n); + i = 0; + + forEach(iterable, function(value) { + if (i < n) { + result[i] = value; + } + else { + if (i === n) + heapify(reverseCompare, result); + + if (reverseCompare(value, result[0]) > 0) + replace(reverseCompare, result, value); + } + + i++; + }); + + if (result.length > i) + result.length = i; + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(compare); +} + +/** + * Function used to retrieve the n largest items from the given iterable. + * + * @param {function} compare - Comparison function. + * @param {number} n - Number of top items to retrieve. + * @param {any} iterable - Arbitrary iterable. + * @param {array} + */ +function nlargest(compare, n, iterable) { + if (arguments.length === 2) { + iterable = n; + n = compare; + compare = DEFAULT_COMPARATOR; + } + + var reverseCompare = reverseComparator(compare); + + var i, l, v; + + var max = -Infinity; + + var result; + + // If n is equal to 1, it's just a matter of finding the maximum + if (n === 1) { + if (iterables.isArrayLike(iterable)) { + for (i = 0, l = iterable.length; i < l; i++) { + v = iterable[i]; + + if (max === -Infinity || compare(v, max) > 0) + max = v; + } + + result = new iterable.constructor(1); + result[0] = max; + + return result; + } + + forEach(iterable, function(value) { + if (max === -Infinity || compare(value, max) > 0) + max = value; + }); + + return [max]; + } + + if (iterables.isArrayLike(iterable)) { + + // If n > iterable length, we just clone and sort + if (n >= iterable.length) + return iterable.slice().sort(reverseCompare); + + result = iterable.slice(0, n); + heapify(compare, result); + + for (i = n, l = iterable.length; i < l; i++) + if (compare(iterable[i], result[0]) > 0) + replace(compare, result, iterable[i]); + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(reverseCompare); + } + + // Correct for size + var size = iterables.guessLength(iterable); + + if (size !== null && size < n) + n = size; + + result = new Array(n); + i = 0; + + forEach(iterable, function(value) { + if (i < n) { + result[i] = value; + } + else { + if (i === n) + heapify(compare, result); + + if (compare(value, result[0]) > 0) + replace(compare, result, value); + } + + i++; + }); + + if (result.length > i) + result.length = i; + + // NOTE: if n is over some number, it becomes faster to consume the heap + return result.sort(reverseCompare); +} + +/** + * Binary Minimum Heap. + * + * @constructor + * @param {function} comparator - Comparator function to use. + */ +function Heap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/Heap.constructor: given comparator should be a function.'); +} + +/** + * Method used to clear the heap. + * + * @return {undefined} + */ +Heap.prototype.clear = function() { + + // Properties + this.items = []; + this.size = 0; +}; + +/** + * Method used to push an item into the heap. + * + * @param {any} item - Item to push. + * @return {number} + */ +Heap.prototype.push = function(item) { + push(this.comparator, this.items, item); + return ++this.size; +}; + +/** + * Method used to retrieve the "first" item of the heap. + * + * @return {any} + */ +Heap.prototype.peek = function() { + return this.items[0]; +}; + +/** + * Method used to retrieve & remove the "first" item of the heap. + * + * @return {any} + */ +Heap.prototype.pop = function() { + if (this.size !== 0) + this.size--; + + return pop(this.comparator, this.items); +}; + +/** + * Method used to pop the heap, then push an item and return the popped + * item. + * + * @param {any} item - Item to push into the heap. + * @return {any} + */ +Heap.prototype.replace = function(item) { + return replace(this.comparator, this.items, item); +}; + +/** + * Method used to push the heap, the pop it and return the pooped item. + * + * @param {any} item - Item to push into the heap. + * @return {any} + */ +Heap.prototype.pushpop = function(item) { + return pushpop(this.comparator, this.items, item); +}; + +/** + * Method used to consume the heap fully and return its items as a sorted array. + * + * @return {array} + */ +Heap.prototype.consume = function() { + this.size = 0; + return consume(this.comparator, this.items); +}; + +/** + * Method used to convert the heap to an array. Note that it basically clone + * the heap and consumes it completely. This is hardly performant. + * + * @return {array} + */ +Heap.prototype.toArray = function() { + return consume(this.comparator, this.items.slice()); +}; + +/** + * Convenience known methods. + */ +Heap.prototype.inspect = function() { + var proxy = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Heap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Heap.prototype[Symbol.for('nodejs.util.inspect.custom')] = Heap.prototype.inspect; + +/** + * Binary Maximum Heap. + * + * @constructor + * @param {function} comparator - Comparator function to use. + */ +function MaxHeap(comparator) { + this.clear(); + this.comparator = comparator || DEFAULT_COMPARATOR; + + if (typeof this.comparator !== 'function') + throw new Error('mnemonist/MaxHeap.constructor: given comparator should be a function.'); + + this.comparator = reverseComparator(this.comparator); +} + +MaxHeap.prototype = Heap.prototype; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a heap. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} comparator - Custom comparator function. + * @return {Heap} + */ +Heap.from = function(iterable, comparator) { + var heap = new Heap(comparator); + + var items; + + // If iterable is an array, we can be clever about it + if (iterables.isArrayLike(iterable)) + items = iterable.slice(); + else + items = iterables.toArray(iterable); + + heapify(heap.comparator, items); + heap.items = items; + heap.size = items.length; + + return heap; +}; + +MaxHeap.from = function(iterable, comparator) { + var heap = new MaxHeap(comparator); + + var items; + + // If iterable is an array, we can be clever about it + if (iterables.isArrayLike(iterable)) + items = iterable.slice(); + else + items = iterables.toArray(iterable); + + heapify(heap.comparator, items); + heap.items = items; + heap.size = items.length; + + return heap; +}; + +/** + * Exporting. + */ +Heap.siftUp = siftUp; +Heap.siftDown = siftDown; +Heap.push = push; +Heap.pop = pop; +Heap.replace = replace; +Heap.pushpop = pushpop; +Heap.heapify = heapify; +Heap.consume = consume; + +Heap.nsmallest = nsmallest; +Heap.nlargest = nlargest; + +Heap.MinHeap = Heap; +Heap.MaxHeap = MaxHeap; + +module.exports = Heap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/index.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/index.d.ts new file mode 100644 index 0000000..cbdc86c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/index.d.ts @@ -0,0 +1,46 @@ +/** + * Mnemonist Typings + * ================== + * + * Gathering the library's typings. + */ +import * as set from './set'; + +export {default as BiMap, InverseMap} from './bi-map'; +export {default as BitSet} from './bit-set'; +export {default as BitVector} from './bit-vector'; +export {default as BKTree} from './bk-tree'; +export {default as BloomFilter} from './bloom-filter'; +export {default as CircularBuffer} from './circular-buffer'; +export {default as DefaultMap} from './default-map'; +export {default as DefaultWeakMap} from './default-weak-map'; +export {default as FixedDeque} from './fixed-deque'; +export {default as FibonacciHeap, MinFibonacciHeap, MaxFibonacciHeap} from './fibonacci-heap'; +export {default as FixedReverseHeap} from './fixed-reverse-heap'; +export {default as FixedStack} from './fixed-stack'; +export {default as FuzzyMap} from './fuzzy-map'; +export {default as FuzzyMultiMap} from './fuzzy-multi-map'; +export {default as HashedArrayTree} from './hashed-array-tree'; +export {default as Heap, MinHeap, MaxHeap} from './heap'; +export {default as InvertedIndex} from './inverted-index'; +export {default as KDTree} from './kd-tree'; +export {default as LinkedList} from './linked-list'; +export {default as LRUCache} from './lru-cache'; +export {default as LRUMap} from './lru-map'; +export {default as MultiMap} from './multi-map'; +export {default as MultiSet} from './multi-set'; +export {default as PassjoinIndex} from './passjoin-index'; +export {default as Queue} from './queue'; +export {set}; +export {default as SparseQueueSet} from './sparse-queue-set'; +export {default as SparseMap} from './sparse-map'; +export {default as SparseSet} from './sparse-set'; +export {default as Stack} from './stack'; +export {default as StaticDisjointSet} from './static-disjoint-set'; +export {default as StaticIntervalTree} from './static-interval-tree'; +export {default as SuffixArray, GeneralizedSuffixArray} from './suffix-array'; +export {default as SymSpell} from './symspell'; +export {default as Trie} from './trie'; +export {default as TrieMap} from './trie-map'; +export {default as Vector, Uint8Vector, Uint8ClampedVector, Int8Vector, Uint16Vector, Int16Vector, Uint32Vector, Int32Vector, Float32Vector, Float64Array} from './vector'; +export {default as VPTree} from './vp-tree'; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/index.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/index.js new file mode 100644 index 0000000..80c18d4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/index.js @@ -0,0 +1,56 @@ +/** + * Mnemonist Library Endpoint + * =========================== + * + * Exporting every data structure through a unified endpoint. Consumers + * of this library should prefer the modular access though. + */ +var Heap = require('./heap.js'), + FibonacciHeap = require('./fibonacci-heap.js'), + SuffixArray = require('./suffix-array.js'); + +module.exports = { + BiMap: require('./bi-map.js'), + BitSet: require('./bit-set.js'), + BitVector: require('./bit-vector.js'), + BloomFilter: require('./bloom-filter.js'), + BKTree: require('./bk-tree.js'), + CircularBuffer: require('./circular-buffer.js'), + DefaultMap: require('./default-map.js'), + DefaultWeakMap: require('./default-weak-map.js'), + FixedDeque: require('./fixed-deque.js'), + StaticDisjointSet: require('./static-disjoint-set.js'), + FibonacciHeap: FibonacciHeap, + MinFibonacciHeap: FibonacciHeap.MinFibonacciHeap, + MaxFibonacciHeap: FibonacciHeap.MaxFibonacciHeap, + FixedReverseHeap: require('./fixed-reverse-heap.js'), + FuzzyMap: require('./fuzzy-map.js'), + FuzzyMultiMap: require('./fuzzy-multi-map.js'), + HashedArrayTree: require('./hashed-array-tree.js'), + Heap: Heap, + MinHeap: Heap.MinHeap, + MaxHeap: Heap.MaxHeap, + StaticIntervalTree: require('./static-interval-tree.js'), + InvertedIndex: require('./inverted-index.js'), + KDTree: require('./kd-tree.js'), + LinkedList: require('./linked-list.js'), + LRUCache: require('./lru-cache.js'), + LRUMap: require('./lru-map.js'), + MultiMap: require('./multi-map.js'), + MultiSet: require('./multi-set.js'), + PassjoinIndex: require('./passjoin-index.js'), + Queue: require('./queue.js'), + FixedStack: require('./fixed-stack.js'), + Stack: require('./stack.js'), + SuffixArray: SuffixArray, + GeneralizedSuffixArray: SuffixArray.GeneralizedSuffixArray, + Set: require('./set.js'), + SparseQueueSet: require('./sparse-queue-set.js'), + SparseMap: require('./sparse-map.js'), + SparseSet: require('./sparse-set.js'), + SymSpell: require('./symspell.js'), + Trie: require('./trie.js'), + TrieMap: require('./trie-map.js'), + Vector: require('./vector.js'), + VPTree: require('./vp-tree.js') +}; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/inverted-index.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/inverted-index.d.ts new file mode 100644 index 0000000..4596ff8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/inverted-index.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist InvertedIndex Typings + * ================================ + */ +type Tokenizer = (key: any) => Array; +type TokenizersTuple = [Tokenizer, Tokenizer]; + +export default class InvertedIndex implements Iterable { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(tokenizer?: Tokenizer); + constructor(tokenizers?: TokenizersTuple); + + // Methods + clear(): void; + add(document: D): this; + get(query: any): Array; + forEach(callback: (document: D, index: number, invertedIndex: this) => void, scope?: any): void; + documents(): IterableIterator; + tokens(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + tokenizer?: Tokenizer | TokenizersTuple + ): InvertedIndex; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/inverted-index.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/inverted-index.js new file mode 100644 index 0000000..a352d19 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/inverted-index.js @@ -0,0 +1,249 @@ +/** + * Mnemonist Inverted Index + * ========================= + * + * JavaScript implementation of an inverted index. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + helpers = require('./utils/merge.js'); + +function identity(x) { + return x; +} + +/** + * InvertedIndex. + * + * @constructor + * @param {function} tokenizer - Tokenizer function. + */ +function InvertedIndex(descriptor) { + this.clear(); + + if (Array.isArray(descriptor)) { + this.documentTokenizer = descriptor[0]; + this.queryTokenizer = descriptor[1]; + } + else { + this.documentTokenizer = descriptor; + this.queryTokenizer = descriptor; + } + + if (!this.documentTokenizer) + this.documentTokenizer = identity; + if (!this.queryTokenizer) + this.queryTokenizer = identity; + + if (typeof this.documentTokenizer !== 'function') + throw new Error('mnemonist/InvertedIndex.constructor: document tokenizer is not a function.'); + + if (typeof this.queryTokenizer !== 'function') + throw new Error('mnemonist/InvertedIndex.constructor: query tokenizer is not a function.'); +} + +/** + * Method used to clear the InvertedIndex. + * + * @return {undefined} + */ +InvertedIndex.prototype.clear = function() { + + // Properties + this.items = []; + this.mapping = new Map(); + this.size = 0; + this.dimension = 0; +}; + +/** + * Method used to add a document to the index. + * + * @param {any} doc - Item to add. + * @return {InvertedIndex} + */ +InvertedIndex.prototype.add = function(doc) { + + // Increasing size + this.size++; + + // Storing document + var key = this.items.length; + this.items.push(doc); + + // Tokenizing the document + var tokens = this.documentTokenizer(doc); + + if (!Array.isArray(tokens)) + throw new Error('mnemonist/InvertedIndex.add: tokenizer function should return an array of tokens.'); + + // Indexing + var done = new Set(), + token, + container; + + for (var i = 0, l = tokens.length; i < l; i++) { + token = tokens[i]; + + if (done.has(token)) + continue; + + done.add(token); + + container = this.mapping.get(token); + + if (!container) { + container = []; + this.mapping.set(token, container); + } + + container.push(key); + } + + this.dimension = this.mapping.size; + + return this; +}; + +/** + * Method used to query the index in a AND fashion. + * + * @param {any} query - Query + * @return {Set} - Intersection of documents matching the query. + */ +InvertedIndex.prototype.get = function(query) { + + // Early termination + if (!this.size) + return []; + + // First we need to tokenize the query + var tokens = this.queryTokenizer(query); + + if (!Array.isArray(tokens)) + throw new Error('mnemonist/InvertedIndex.query: tokenizer function should return an array of tokens.'); + + if (!tokens.length) + return []; + + var results = this.mapping.get(tokens[0]), + c, + i, + l; + + if (typeof results === 'undefined' || results.length === 0) + return []; + + if (tokens.length > 1) { + for (i = 1, l = tokens.length; i < l; i++) { + c = this.mapping.get(tokens[i]); + + if (typeof c === 'undefined' || c.length === 0) + return []; + + results = helpers.intersectionUniqueArrays(results, c); + } + } + + var docs = new Array(results.length); + + for (i = 0, l = docs.length; i < l; i++) + docs[i] = this.items[results[i]]; + + return docs; +}; + +/** + * Method used to iterate over each of the documents. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +InvertedIndex.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.documents.length; i < l; i++) + callback.call(scope, this.documents[i], i, this); +}; + +/** + * Method returning an iterator over the index's documents. + * + * @return {Iterator} + */ +InvertedIndex.prototype.documents = function() { + var documents = this.items, + l = documents.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = documents[i++]; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method returning an iterator over the index's tokens. + * + * @return {Iterator} + */ +InvertedIndex.prototype.tokens = function() { + return this.mapping.keys(); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + InvertedIndex.prototype[Symbol.iterator] = InvertedIndex.prototype.documents; + +/** + * Convenience known methods. + */ +InvertedIndex.prototype.inspect = function() { + var array = this.items.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: InvertedIndex, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + InvertedIndex.prototype[Symbol.for('nodejs.util.inspect.custom')] = InvertedIndex.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a InvertedIndex. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} tokenizer - Tokenizer function. + * @return {InvertedIndex} + */ +InvertedIndex.from = function(iterable, descriptor) { + var index = new InvertedIndex(descriptor); + + forEach(iterable, function(doc) { + index.add(doc); + }); + + return index; +}; + +/** + * Exporting. + */ +module.exports = InvertedIndex; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/kd-tree.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/kd-tree.d.ts new file mode 100644 index 0000000..10294f3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/kd-tree.d.ts @@ -0,0 +1,25 @@ +/** + * Mnemonist KDTree Typings + * ========================= + */ +import {IArrayLike} from './utils/types'; + +export default class KDTree { + + // Members + dimensions: number; + size: number; + visited: number; + + // Methods + nearestNeighbor(point: Array): V; + kNearestNeighbors(k: number, point: Array): Array; + linearKNearestNeighbors(k: number, point: Array): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, Array]>, dimensions: number): KDTree; + static from(axes: IArrayLike): KDTree; + static from(axes: IArrayLike, labels: Array): KDTree; +} + diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/kd-tree.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/kd-tree.js new file mode 100644 index 0000000..fe5d1ca --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/kd-tree.js @@ -0,0 +1,447 @@ +/** + * Mnemonist KDTree + * ================= + * + * Low-level JavaScript implementation of a k-dimensional tree. + */ +var iterables = require('./utils/iterables.js'); +var typed = require('./utils/typed-arrays.js'); +var createTupleComparator = require('./utils/comparators.js').createTupleComparator; +var FixedReverseHeap = require('./fixed-reverse-heap.js'); +var inplaceQuickSortIndices = require('./sort/quick.js').inplaceQuickSortIndices; + +/** + * Helper function used to compute the squared distance between a query point + * and an indexed points whose values are stored in a tree's axes. + * + * Note that squared distance is used instead of euclidean to avoid + * costly sqrt computations. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} axes - Axes data. + * @param {number} pivot - Pivot. + * @param {array} point - Query point. + * @return {number} + */ +function squaredDistanceAxes(dimensions, axes, pivot, b) { + var d; + + var dist = 0, + step; + + for (d = 0; d < dimensions; d++) { + step = axes[d][pivot] - b[d]; + dist += step * step; + } + + return dist; +} + +/** + * Helper function used to reshape input data into low-level axes data. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} data - Data in the shape [label, [x, y, z...]] + * @return {object} + */ +function reshapeIntoAxes(dimensions, data) { + var l = data.length; + + var axes = new Array(dimensions), + labels = new Array(l), + axis; + + var PointerArray = typed.getPointerArray(l); + + var ids = new PointerArray(l); + + var d, i, row; + + var f = true; + + for (d = 0; d < dimensions; d++) { + axis = new Float64Array(l); + + for (i = 0; i < l; i++) { + row = data[i]; + axis[i] = row[1][d]; + + if (f) { + labels[i] = row[0]; + ids[i] = i; + } + } + + f = false; + axes[d] = axis; + } + + return {axes: axes, ids: ids, labels: labels}; +} + +/** + * Helper function used to build a kd-tree from axes data. + * + * @param {number} dimensions - Number of dimensions. + * @param {array} axes - Axes. + * @param {array} ids - Indices to sort. + * @param {array} labels - Point labels. + * @return {object} + */ +function buildTree(dimensions, axes, ids, labels) { + var l = labels.length; + + // NOTE: +1 because we need to keep 0 as null pointer + var PointerArray = typed.getPointerArray(l + 1); + + // Building the tree + var pivots = new PointerArray(l), + lefts = new PointerArray(l), + rights = new PointerArray(l); + + var stack = [[0, 0, ids.length, -1, 0]], + step, + parent, + direction, + median, + pivot, + lo, + hi; + + var d, i = 0; + + while (stack.length !== 0) { + step = stack.pop(); + + d = step[0]; + lo = step[1]; + hi = step[2]; + parent = step[3]; + direction = step[4]; + + inplaceQuickSortIndices(axes[d], ids, lo, hi); + + l = hi - lo; + median = lo + (l >>> 1); // Fancy floor(l / 2) + pivot = ids[median]; + pivots[i] = pivot; + + if (parent > -1) { + if (direction === 0) + lefts[parent] = i + 1; + else + rights[parent] = i + 1; + } + + d = (d + 1) % dimensions; + + // Right + if (median !== lo && median !== hi - 1) { + stack.push([d, median + 1, hi, i, 1]); + } + + // Left + if (median !== lo) { + stack.push([d, lo, median, i, 0]); + } + + i++; + } + + return { + axes: axes, + labels: labels, + pivots: pivots, + lefts: lefts, + rights: rights + }; +} + +/** + * KDTree. + * + * @constructor + */ +function KDTree(dimensions, build) { + this.dimensions = dimensions; + this.visited = 0; + + this.axes = build.axes; + this.labels = build.labels; + + this.pivots = build.pivots; + this.lefts = build.lefts; + this.rights = build.rights; + + this.size = this.labels.length; +} + +/** + * Method returning the query's nearest neighbor. + * + * @param {array} query - Query point. + * @return {any} + */ +KDTree.prototype.nearestNeighbor = function(query) { + var bestDistance = Infinity, + best = null; + + var dimensions = this.dimensions, + axes = this.axes, + pivots = this.pivots, + lefts = this.lefts, + rights = this.rights; + + var visited = 0; + + function recurse(d, node) { + visited++; + + var left = lefts[node], + right = rights[node], + pivot = pivots[node]; + + var dist = squaredDistanceAxes( + dimensions, + axes, + pivot, + query + ); + + if (dist < bestDistance) { + best = pivot; + bestDistance = dist; + + if (dist === 0) + return; + } + + var dx = axes[d][pivot] - query[d]; + + d = (d + 1) % dimensions; + + // Going the correct way? + if (dx > 0) { + if (left !== 0) + recurse(d, left - 1); + } + else { + if (right !== 0) + recurse(d, right - 1); + } + + // Going the other way? + if (dx * dx < bestDistance) { + if (dx > 0) { + if (right !== 0) + recurse(d, right - 1); + } + else { + if (left !== 0) + recurse(d, left - 1); + } + } + } + + recurse(0, 0); + + this.visited = visited; + return this.labels[best]; +}; + +var KNN_HEAP_COMPARATOR_3 = createTupleComparator(3); +var KNN_HEAP_COMPARATOR_2 = createTupleComparator(2); + +/** + * Method returning the query's k nearest neighbors. + * + * @param {number} k - Number of nearest neighbor to retrieve. + * @param {array} query - Query point. + * @return {array} + */ + +// TODO: can do better by improving upon static-kdtree here +KDTree.prototype.kNearestNeighbors = function(k, query) { + if (k <= 0) + throw new Error('mnemonist/kd-tree.kNearestNeighbors: k should be a positive number.'); + + k = Math.min(k, this.size); + + if (k === 1) + return [this.nearestNeighbor(query)]; + + var heap = new FixedReverseHeap(Array, KNN_HEAP_COMPARATOR_3, k); + + var dimensions = this.dimensions, + axes = this.axes, + pivots = this.pivots, + lefts = this.lefts, + rights = this.rights; + + var visited = 0; + + function recurse(d, node) { + var left = lefts[node], + right = rights[node], + pivot = pivots[node]; + + var dist = squaredDistanceAxes( + dimensions, + axes, + pivot, + query + ); + + heap.push([dist, visited++, pivot]); + + var point = query[d], + split = axes[d][pivot], + dx = point - split; + + d = (d + 1) % dimensions; + + // Going the correct way? + if (point < split) { + if (left !== 0) { + recurse(d, left - 1); + } + } + else { + if (right !== 0) { + recurse(d, right - 1); + } + } + + // Going the other way? + if (dx * dx < heap.peek()[0] || heap.size < k) { + if (point < split) { + if (right !== 0) { + recurse(d, right - 1); + } + } + else { + if (left !== 0) { + recurse(d, left - 1); + } + } + } + } + + recurse(0, 0); + + this.visited = visited; + + var best = heap.consume(); + + for (var i = 0; i < best.length; i++) + best[i] = this.labels[best[i][2]]; + + return best; +}; + +/** + * Method returning the query's k nearest neighbors by linear search. + * + * @param {number} k - Number of nearest neighbor to retrieve. + * @param {array} query - Query point. + * @return {array} + */ +KDTree.prototype.linearKNearestNeighbors = function(k, query) { + if (k <= 0) + throw new Error('mnemonist/kd-tree.kNearestNeighbors: k should be a positive number.'); + + k = Math.min(k, this.size); + + var heap = new FixedReverseHeap(Array, KNN_HEAP_COMPARATOR_2, k); + + var i, l, dist; + + for (i = 0, l = this.size; i < l; i++) { + dist = squaredDistanceAxes( + this.dimensions, + this.axes, + this.pivots[i], + query + ); + + heap.push([dist, i]); + } + + var best = heap.consume(); + + for (i = 0; i < best.length; i++) + best[i] = this.labels[this.pivots[best[i][1]]]; + + return best; +}; + +/** + * Convenience known methods. + */ +KDTree.prototype.inspect = function() { + var dummy = new Map(); + + dummy.dimensions = this.dimensions; + + Object.defineProperty(dummy, 'constructor', { + value: KDTree, + enumerable: false + }); + + var i, j, point; + + for (i = 0; i < this.size; i++) { + point = new Array(this.dimensions); + + for (j = 0; j < this.dimensions; j++) + point[j] = this.axes[j][i]; + + dummy.set(this.labels[i], point); + } + + return dummy; +}; + +if (typeof Symbol !== 'undefined') + KDTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = KDTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {number} dimensions - Space dimensions. + * @return {KDTree} + */ +KDTree.from = function(iterable, dimensions) { + var data = iterables.toArray(iterable); + + var reshaped = reshapeIntoAxes(dimensions, data); + + var result = buildTree(dimensions, reshaped.axes, reshaped.ids, reshaped.labels); + + return new KDTree(dimensions, result); +}; + +/** + * Static @.from function building a KDTree from given axes. + * + * @param {Iterable} iterable - Target iterable. + * @param {number} dimensions - Space dimensions. + * @return {KDTree} + */ +KDTree.fromAxes = function(axes, labels) { + if (!labels) + labels = typed.indices(axes[0].length); + + var dimensions = axes.length; + + var result = buildTree(axes.length, axes, typed.indices(labels.length), labels); + + return new KDTree(dimensions, result); +}; + +/** + * Exporting. + */ +module.exports = KDTree; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/linked-list.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/linked-list.d.ts new file mode 100644 index 0000000..4eec48c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/linked-list.d.ts @@ -0,0 +1,29 @@ +/** + * Mnemonist LinkedList Typings + * ============================= + */ +export default class LinkedList implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + first(): T | undefined; + last(): T | undefined; + peek(): T | undefined; + push(value: T): number; + shift(): T | undefined; + unshift(value: T): number; + forEach(callback: (value: T, index: number, list: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): LinkedList; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/linked-list.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/linked-list.js new file mode 100644 index 0000000..17dca06 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/linked-list.js @@ -0,0 +1,261 @@ +/** + * Mnemonist Linked List + * ====================== + * + * Singly linked list implementation. Uses raw JavaScript objects as nodes + * as benchmarks proved it was the fastest thing to do. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Linked List. + * + * @constructor + */ +function LinkedList() { + this.clear(); +} + +/** + * Method used to clear the list. + * + * @return {undefined} + */ +LinkedList.prototype.clear = function() { + + // Properties + this.head = null; + this.tail = null; + this.size = 0; +}; + +/** + * Method used to get the first item of the list. + * + * @return {any} + */ +LinkedList.prototype.first = function() { + return this.head ? this.head.item : undefined; +}; +LinkedList.prototype.peek = LinkedList.prototype.first; + +/** + * Method used to get the last item of the list. + * + * @return {any} + */ +LinkedList.prototype.last = function() { + return this.tail ? this.tail.item : undefined; +}; + +/** + * Method used to add an item at the end of the list. + * + * @param {any} item - The item to add. + * @return {number} + */ +LinkedList.prototype.push = function(item) { + var node = {item: item, next: null}; + + if (!this.head) { + this.head = node; + this.tail = node; + } + else { + this.tail.next = node; + this.tail = node; + } + + this.size++; + + return this.size; +}; + +/** + * Method used to add an item at the beginning of the list. + * + * @param {any} item - The item to add. + * @return {number} + */ +LinkedList.prototype.unshift = function(item) { + var node = {item: item, next: null}; + + if (!this.head) { + this.head = node; + this.tail = node; + } + else { + if (!this.head.next) + this.tail = this.head; + node.next = this.head; + this.head = node; + } + + this.size++; + + return this.size; +}; + +/** + * Method used to retrieve & remove the first item of the list. + * + * @return {any} + */ +LinkedList.prototype.shift = function() { + if (!this.size) + return undefined; + + var node = this.head; + + this.head = node.next; + this.size--; + + return node.item; +}; + +/** + * Method used to iterate over the list. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +LinkedList.prototype.forEach = function(callback, scope) { + if (!this.size) + return; + + scope = arguments.length > 1 ? scope : this; + + var n = this.head, + i = 0; + + while (n) { + callback.call(scope, n.item, i, this); + n = n.next; + i++; + } +}; + +/** + * Method used to convert the list into an array. + * + * @return {array} + */ +LinkedList.prototype.toArray = function() { + if (!this.size) + return []; + + var array = new Array(this.size); + + for (var i = 0, l = this.size, n = this.head; i < l; i++) { + array[i] = n.item; + n = n.next; + } + + return array; +}; + +/** + * Method used to create an iterator over a list's values. + * + * @return {Iterator} + */ +LinkedList.prototype.values = function() { + var n = this.head; + + return new Iterator(function() { + if (!n) + return { + done: true + }; + + var value = n.item; + n = n.next; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a list's entries. + * + * @return {Iterator} + */ +LinkedList.prototype.entries = function() { + var n = this.head, + i = 0; + + return new Iterator(function() { + if (!n) + return { + done: true + }; + + var value = n.item; + n = n.next; + i++; + + return { + value: [i - 1, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LinkedList.prototype[Symbol.iterator] = LinkedList.prototype.values; + +/** + * Convenience known methods. + */ +LinkedList.prototype.toString = function() { + return this.toArray().join(','); +}; + +LinkedList.prototype.toJSON = function() { + return this.toArray(); +}; + +LinkedList.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: LinkedList, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + LinkedList.prototype[Symbol.for('nodejs.util.inspect.custom')] = LinkedList.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a list. + * + * @param {Iterable} iterable - Target iterable. + * @return {LinkedList} + */ +LinkedList.from = function(iterable) { + var list = new LinkedList(); + + forEach(iterable, function(value) { + list.push(value); + }); + + return list; +}; + +/** + * Exporting. + */ +module.exports = LinkedList; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-cache.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-cache.d.ts new file mode 100644 index 0000000..45b61e0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-cache.d.ts @@ -0,0 +1,43 @@ +/** + * Mnemonist LRUCache Typings + * =========================== + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class LRUCache implements Iterable<[K, V]> { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(capacity: number); + constructor(KeyArrayClass: IArrayLikeConstructor, ValueArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + set(key: K, value: V): this; + setpop(key: K, value: V): {evicted: boolean, key: K, value: V}; + get(key: K): V | undefined; + peek(key: K): V | undefined; + has(key: K): boolean; + forEach(callback: (value: V, key: K, cache: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + KeyArrayClass: IArrayLikeConstructor, + ValueArrayClass: IArrayLikeConstructor, + capacity?: number + ): LRUCache; + + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + capacity?: number + ): LRUCache; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-cache.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-cache.js new file mode 100644 index 0000000..9cab8bc --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-cache.js @@ -0,0 +1,433 @@ +/** + * Mnemonist LRUCache + * =================== + * + * JavaScript implementation of the LRU Cache data structure. To save up + * memory and allocations this implementation represents its underlying + * doubly-linked list as static arrays and pointers. Thus, memory is allocated + * only once at instantiation and JS objects are never created to serve as + * pointers. This also means this implementation does not trigger too many + * garbage collections. + * + * Note that to save up memory, a LRU Cache can be implemented using a singly + * linked list by storing predecessors' pointers as hashmap values. + * However, this means more hashmap lookups and would probably slow the whole + * thing down. What's more, pointers are not the things taking most space in + * memory. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + typed = require('./utils/typed-arrays.js'), + iterables = require('./utils/iterables.js'); + +/** + * LRUCache. + * + * @constructor + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Desired capacity. + */ +function LRUCache(Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + this.capacity = capacity; + + if (typeof this.capacity !== 'number' || this.capacity <= 0) + throw new Error('mnemonist/lru-cache: capacity should be positive number.'); + + var PointerArray = typed.getPointerArray(capacity); + + this.forward = new PointerArray(capacity); + this.backward = new PointerArray(capacity); + this.K = typeof Keys === 'function' ? new Keys(capacity) : new Array(capacity); + this.V = typeof Values === 'function' ? new Values(capacity) : new Array(capacity); + + // Properties + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = {}; +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +LRUCache.prototype.clear = function() { + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = {}; +}; + +/** + * Method used to splay a value on top. + * + * @param {number} pointer - Pointer of the value to splay on top. + * @return {LRUCache} + */ +LRUCache.prototype.splayOnTop = function(pointer) { + var oldHead = this.head; + + if (this.head === pointer) + return this; + + var previous = this.backward[pointer], + next = this.forward[pointer]; + + if (this.tail === pointer) { + this.tail = previous; + } + else { + this.backward[next] = previous; + } + + this.forward[previous] = next; + + this.backward[oldHead] = pointer; + this.head = pointer; + this.forward[pointer] = oldHead; + + return this; +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {undefined} + */ +LRUCache.prototype.set = function(key, value) { + + // The key already exists, we just need to update the value and splay on top + var pointer = this.items[key]; + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + this.V[pointer] = value; + + return; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + delete this.items[this.K[pointer]]; + } + + // Storing key & value + this.items[key] = pointer; + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; +}; + +/** + * Method used to set the value for the given key in the cache + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {{evicted: boolean, key: any, value: any}} An object containing the + * key and value of an item that was overwritten or evicted in the set + * operation, as well as a boolean indicating whether it was evicted due to + * limited capacity. Return value is null if nothing was evicted or overwritten + * during the set operation. + */ +LRUCache.prototype.setpop = function(key, value) { + var oldValue = null; + var oldKey = null; + // The key already exists, we just need to update the value and splay on top + var pointer = this.items[key]; + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + oldValue = this.V[pointer]; + this.V[pointer] = value; + return {evicted: false, key: key, value: oldValue}; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + oldValue = this.V[pointer]; + oldKey = this.K[pointer]; + delete this.items[this.K[pointer]]; + } + + // Storing key & value + this.items[key] = pointer; + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; + + // Return object if eviction took place, otherwise return null + if (oldKey) { + return {evicted: true, key: oldKey, value: oldValue}; + } + else { + return null; + } +}; + +/** + * Method used to check whether the key exists in the cache. + * + * @param {any} key - Key. + * @return {boolean} + */ +LRUCache.prototype.has = function(key) { + return key in this.items; +}; + +/** + * Method used to get the value attached to the given key. Will move the + * related key to the front of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUCache.prototype.get = function(key) { + var pointer = this.items[key]; + + if (typeof pointer === 'undefined') + return; + + this.splayOnTop(pointer); + + return this.V[pointer]; +}; + +/** + * Method used to get the value attached to the given key. Does not modify + * the ordering of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUCache.prototype.peek = function(key) { + var pointer = this.items[key]; + + if (typeof pointer === 'undefined') + return; + + return this.V[pointer]; +}; + +/** + * Method used to iterate over the cache's entries using a callback. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +LRUCache.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + values = this.V, + forward = this.forward; + + while (i < l) { + + callback.call(scope, values[pointer], keys[pointer], this); + pointer = forward[pointer]; + + i++; + } +}; + +/** + * Method used to create an iterator over the cache's keys from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.keys = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var key = keys[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: key + }; + }); +}; + +/** + * Method used to create an iterator over the cache's values from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.values = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + values = this.V, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var value = values[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: value + }; + }); +}; + +/** + * Method used to create an iterator over the cache's entries from most + * recently used to least recently used. + * + * @return {Iterator} + */ +LRUCache.prototype.entries = function() { + var i = 0, + l = this.size; + + var pointer = this.head, + keys = this.K, + values = this.V, + forward = this.forward; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var key = keys[pointer], + value = values[pointer]; + + i++; + + if (i < l) + pointer = forward[pointer]; + + return { + done: false, + value: [key, value] + }; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LRUCache.prototype[Symbol.iterator] = LRUCache.prototype.entries; + +/** + * Convenience known methods. + */ +LRUCache.prototype.inspect = function() { + var proxy = new Map(); + + var iterator = this.entries(), + step; + + while ((step = iterator.next(), !step.done)) + proxy.set(step.value[0], step.value[1]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: LRUCache, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + LRUCache.prototype[Symbol.for('nodejs.util.inspect.custom')] = LRUCache.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Cache's capacity. + * @return {LRUCache} + */ +LRUCache.from = function(iterable, Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/lru-cache.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + else if (arguments.length === 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + var cache = new LRUCache(Keys, Values, capacity); + + forEach(iterable, function(value, key) { + cache.set(key, value); + }); + + return cache; +}; + +/** + * Exporting. + */ +module.exports = LRUCache; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-map.d.ts new file mode 100644 index 0000000..0943543 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-map.d.ts @@ -0,0 +1,43 @@ +/** + * Mnemonist LRUMap Typings + * ========================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +export default class LRUMap implements Iterable<[K, V]> { + + // Members + capacity: number; + size: number; + + // Constructor + constructor(capacity: number); + constructor(KeyArrayClass: IArrayLikeConstructor, ValueArrayClass: IArrayLikeConstructor, capacity: number); + + // Methods + clear(): void; + set(key: K, value: V): this; + setpop(key: K, value: V): {evicted: boolean, key: K, value: V}; + get(key: K): V | undefined; + peek(key: K): V | undefined; + has(key: K): boolean; + forEach(callback: (value: V, key: K, cache: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + KeyArrayClass: IArrayLikeConstructor, + ValueArrayClass: IArrayLikeConstructor, + capacity?: number + ): LRUMap; + + static from( + iterable: Iterable<[I, J]> | {[key: string]: J}, + capacity?: number + ): LRUMap; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-map.js new file mode 100644 index 0000000..26afe27 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/lru-map.js @@ -0,0 +1,258 @@ +/** + * Mnemonist LRUMap + * ================= + * + * Variant of the LRUCache class that leverages an ES6 Map instead of an object. + * It might be faster for some use case but it is still hard to understand + * when a Map can outperform an object in v8. + */ +var LRUCache = require('./lru-cache.js'), + forEach = require('obliterator/foreach'), + typed = require('./utils/typed-arrays.js'), + iterables = require('./utils/iterables.js'); + +/** + * LRUMap. + * + * @constructor + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Desired capacity. + */ +function LRUMap(Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + this.capacity = capacity; + + if (typeof this.capacity !== 'number' || this.capacity <= 0) + throw new Error('mnemonist/lru-map: capacity should be positive number.'); + + var PointerArray = typed.getPointerArray(capacity); + + this.forward = new PointerArray(capacity); + this.backward = new PointerArray(capacity); + this.K = typeof Keys === 'function' ? new Keys(capacity) : new Array(capacity); + this.V = typeof Values === 'function' ? new Values(capacity) : new Array(capacity); + + // Properties + this.size = 0; + this.head = 0; + this.tail = 0; + this.items = new Map(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +LRUMap.prototype.clear = function() { + this.size = 0; + this.head = 0; + this.tail = 0; + this.items.clear(); +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {undefined} + */ +LRUMap.prototype.set = function(key, value) { + + // The key already exists, we just need to update the value and splay on top + var pointer = this.items.get(key); + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + this.V[pointer] = value; + + return; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + this.items.delete(this.K[pointer]); + } + + // Storing key & value + this.items.set(key, pointer); + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; +}; + +/** + * Method used to set the value for the given key in the cache. + * + * @param {any} key - Key. + * @param {any} value - Value. + * @return {{evicted: boolean, key: any, value: any}} An object containing the + * key and value of an item that was overwritten or evicted in the set + * operation, as well as a boolean indicating whether it was evicted due to + * limited capacity. Return value is null if nothing was evicted or overwritten + * during the set operation. + */ +LRUMap.prototype.setpop = function(key, value) { + var oldValue = null; + var oldKey = null; + // The key already exists, we just need to update the value and splay on top + var pointer = this.items.get(key); + + if (typeof pointer !== 'undefined') { + this.splayOnTop(pointer); + oldValue = this.V[pointer]; + this.V[pointer] = value; + return {evicted: false, key: key, value: oldValue}; + } + + // The cache is not yet full + if (this.size < this.capacity) { + pointer = this.size++; + } + + // Cache is full, we need to drop the last value + else { + pointer = this.tail; + this.tail = this.backward[pointer]; + oldValue = this.V[pointer]; + oldKey = this.K[pointer]; + this.items.delete(this.K[pointer]); + } + + // Storing key & value + this.items.set(key, pointer); + this.K[pointer] = key; + this.V[pointer] = value; + + // Moving the item at the front of the list + this.forward[pointer] = this.head; + this.backward[this.head] = pointer; + this.head = pointer; + + // Return object if eviction took place, otherwise return null + if (oldKey) { + return {evicted: true, key: oldKey, value: oldValue}; + } + else { + return null; + } +}; + +/** + * Method used to check whether the key exists in the cache. + * + * @param {any} key - Key. + * @return {boolean} + */ +LRUMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to get the value attached to the given key. Will move the + * related key to the front of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUMap.prototype.get = function(key) { + var pointer = this.items.get(key); + + if (typeof pointer === 'undefined') + return; + + this.splayOnTop(pointer); + + return this.V[pointer]; +}; + +/** + * Method used to get the value attached to the given key. Does not modify + * the ordering of the underlying linked list. + * + * @param {any} key - Key. + * @return {any} + */ +LRUMap.prototype.peek = function(key) { + var pointer = this.items.get(key); + + if (typeof pointer === 'undefined') + return; + + return this.V[pointer]; +}; + +/** + * Methods that can be reused as-is from LRUCache. + */ +LRUMap.prototype.splayOnTop = LRUCache.prototype.splayOnTop; +LRUMap.prototype.forEach = LRUCache.prototype.forEach; +LRUMap.prototype.keys = LRUCache.prototype.keys; +LRUMap.prototype.values = LRUCache.prototype.values; +LRUMap.prototype.entries = LRUCache.prototype.entries; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + LRUMap.prototype[Symbol.iterator] = LRUMap.prototype.entries; + +/** + * Convenience known methods. + */ +LRUMap.prototype.inspect = LRUCache.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} Keys - Array class for storing keys. + * @param {function} Values - Array class for storing values. + * @param {number} capacity - Cache's capacity. + * @return {LRUMap} + */ +LRUMap.from = function(iterable, Keys, Values, capacity) { + if (arguments.length < 2) { + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/lru-cache.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + else if (arguments.length === 2) { + capacity = Keys; + Keys = null; + Values = null; + } + + var cache = new LRUMap(Keys, Values, capacity); + + forEach(iterable, function(value, key) { + cache.set(key, value); + }); + + return cache; +}; + +/** + * Exporting. + */ +module.exports = LRUMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-array.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-array.js new file mode 100644 index 0000000..c165b55 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-array.js @@ -0,0 +1,447 @@ +/** + * Mnemonist MultiArray + * ===================== + * + * Memory-efficient representation of an array of arrays. In JavaScript and + * most high-level languages, creating objects has a cost. This implementation + * is therefore able to represent nested containers without needing to create + * objects. This works by storing singly linked lists in a single flat array. + * However, this means that this structure comes with some read/write + * overhead but consume very few memory. + * + * This structure should be particularly suited to indices that will need to + * merge arrays anyway when queried and that are quite heavily hit (such as + * an inverted index or a quad tree). + * + * Note: the implementation does not require to keep track of head pointers + * but this comes with some advantages such as not needing to offset pointers + * by 1 and being able to perform in-order iteration. This remains quite lean + * in memory and does not hinder performance whatsoever. + */ +var typed = require('./utils/typed-arrays.js'), + Vector = require('./vector.js'), + Iterator = require('obliterator/iterator'); + +var PointerVector = Vector.PointerVector; + +/** + * MultiArray. + * + * @constructor + */ +function MultiArray(Container, capacity) { + this.capacity = capacity || null; + this.Container = Container || Array; + this.hasFixedCapacity = this.capacity !== null; + + if (typeof this.Container !== 'function') + throw new Error('mnemonist/multi-array.constructor: container should be a function.'); + + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiArray.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + + // NOTE: #.heads, #.tails & #.lengths have a length equal to the dimension of + // the array, while #.pointers has a length equal to its size. + + // Storage + if (this.hasFixedCapacity) { + var capacity = this.capacity; + + var PointerArray = typed.getPointerArray(capacity); + + var policy = function(currentCapacity) { + var newCapacity = Math.max(1, Math.ceil(currentCapacity * 1.5)); + + // Clamping max allocation + return Math.min(newCapacity, capacity); + }; + + var initialCapacity = Math.max(8, capacity); + + this.tails = new Vector(PointerArray, {policy: policy, initialCapacity: initialCapacity}); + this.lengths = new Vector(PointerArray, {policy: policy, initialCapacity: initialCapacity}); + this.pointers = new PointerArray(capacity); + + this.items = new this.Container(capacity); + } + else { + + this.tails = new PointerVector(); + this.lengths = new PointerVector(); + this.pointers = new PointerVector(); + + this.items = new this.Container(); + } +}; + +/** + * Method used to add an item to the container at the given index. + * + * @param {number} index - Index of the container. + * @param {any} item - Item to add. + * @return {MultiArray} + */ +MultiArray.prototype.set = function(index, item) { + var pointer = this.size; + + // TODO: this can be factorized! + + if (this.hasFixedCapacity) { + + if (index >= this.capacity || this.size === this.capacity) + throw new Error('mnemonist/multi-array: attempting to allocate further than capacity.'); + + // This linked list does not exist yet. Let's create it + if (index >= this.dimension) { + + // We may be required to grow the vectors + this.dimension = index + 1; + this.tails.grow(this.dimension); + this.lengths.grow(this.dimension); + + this.tails.resize(this.dimension); + this.lengths.resize(this.dimension); + + this.lengths.array[index] = 1; + } + + // Appending to the list + else { + this.pointers[pointer] = this.tails.array[index]; + this.lengths.array[index]++; + } + + this.tails.array[index] = pointer; + this.items[pointer] = item; + } + else { + + // This linked list does not exist yet. Let's create it + if (index >= this.dimension) { + + // We may be required to grow the vectors + this.dimension = index + 1; + this.tails.grow(this.dimension); + this.lengths.grow(this.dimension); + + this.tails.resize(this.dimension); + this.lengths.resize(this.dimension); + + this.pointers.push(0); + this.lengths.array[index] = 1; + } + + // Appending to the list + else { + this.pointers.push(this.tails.array[index]); + this.lengths.array[index]++; + } + + this.tails.array[index] = pointer; + this.items.push(item); + } + + this.size++; + + return this; +}; + +/** + * Method used to push a new container holding the given value. + * Note: it might be useful to make this function able to take an iterable + * or variadic someday. For the time being it's just a convenience for + * implementing compact multi maps and such. + * + * @param {any} item - Item to add. + * @return {MultiArray} + */ +MultiArray.prototype.push = function(item) { + var pointer = this.size, + index = this.dimension; + + if (this.hasFixedCapacity) { + + if (index >= this.capacity || this.size === this.capacity) + throw new Error('mnemonist/multi-array: attempting to allocate further than capacity.'); + + this.items[pointer] = item; + } + else { + this.items.push(item); + this.pointers.push(0); + } + + this.lengths.push(1); + this.tails.push(pointer); + + this.dimension++; + this.size++; + + return this; +}; + +/** + * Method used to get the desired container. + * + * @param {number} index - Index of the container. + * @return {array} + */ +MultiArray.prototype.get = function(index) { + if (index >= this.dimension) + return; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array; + + var pointer = this.tails.array[index], + length = this.lengths.array[index], + i = length; + + var array = new this.Container(length); + + while (i !== 0) { + array[--i] = this.items[pointer]; + pointer = pointers[pointer]; + } + + return array; +}; + +/** + * Method used to check if a container exists at the given index. + * + * @param {number} index - Index of the container. + * @return {boolean} + */ +MultiArray.prototype.has = function(index) { + return index < this.dimension; +}; + +/** + * Method used to get the size of the container stored at given index. + * + * @param {number} index - Index of the container. + * @return {number} + */ +MultiArray.prototype.multiplicity = function(index) { + if (index >= this.dimension) + return 0; + + return this.lengths.array[index]; +}; +MultiArray.prototype.count = MultiArray.prototype.multiplicity; + +/** + * Method used to iterate over the structure's containers. + * + * @return {Iterator} + */ +MultiArray.prototype.containers = function() { + var self = this, + l = this.dimension, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {value: self.get(i++)}; + }); +}; + +/** + * Method used to iterate over the structure's associations. + * + * @return {Iterator} + */ +MultiArray.prototype.associations = function() { + var self = this, + l = this.dimension, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + var data = {value: [i, self.get(i)]}; + + i++; + + return data; + }); +}; + +/** + * Method used to iterate over the structure's values in the global insertion + * order. + * + * @param {number} [index] - Optionally, iterate over the values of a single + * container at index. + * @return {Iterator} + */ +MultiArray.prototype.values = function(index) { + var items = this.items, + length, + i = 0; + + if (typeof index === 'number') { + if (index >= this.dimension) + return Iterator.empty(); + + length = this.lengths.array[index]; + items = this.items; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array; + + if (length === 0) + return Iterator.empty(); + + var pointer = this.tails.array[index], + v; + + return new Iterator(function() { + if (i === length) + return {done: true}; + + i++; + v = items[pointer]; + pointer = pointers[pointer]; + + return {done: false, value: v}; + }); + } + + length = this.size; + + return new Iterator(function() { + if (i >= length) + return {done: true}; + + return {done: false, value: items[i++]}; + }); +}; + +/** + * Method used to iterate over the structure's entries. + * + * @return {Iterator} + */ +MultiArray.prototype.entries = function() { + if (this.size === 0) + return Iterator.empty(); + + var inContainer = false, + pointer, + length, + i = 0, + j = 0, + l = this.dimension, + v; + + var pointers = this.hasFixedCapacity ? this.pointers : this.pointers.array, + items = this.items, + tails = this.tails.array, + lengths = this.lengths.array; + + var iterator = new Iterator(function next() { + if (!inContainer) { + + if (i >= l) + return {done: true}; + + length = lengths[i]; + pointer = tails[i]; + i++; + + if (length === 0) + return next(); + + j = 0; + inContainer = true; + } + + if (j === length) { + inContainer = false; + return next(); + } + + v = items[pointer]; + + // TODO: guard for out-of-bounds + pointer = pointers[pointer]; + + j++; + + return { + done: false, + value: [i - 1, v] + }; + }); + + return iterator; +}; + +/** + * Method used to iterate over the structure's keys. + * + * @return {Iterator} + */ +MultiArray.prototype.keys = function() { + var i = 0, + l = this.dimension; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {done: false, value: i++}; + }); +}; + +/** + * Convenience known methods. + */ +MultiArray.prototype.inspect = function() { + var proxy = new Array(this.dimension), + i, + l; + + for (i = 0, l = this.dimension; i < l; i++) + proxy[i] = Array.from(this.get(i)); + + if (this.hasFixedCapacity) { + proxy.type = this.Container.name; + proxy.capacity = this.capacity; + } + + proxy.size = this.size; + proxy.dimension = this.dimension; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: MultiArray, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + MultiArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiArray.prototype.inspect; + +// TODO: .from + +/** + * Exporting. + */ +module.exports = MultiArray; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-map.d.ts new file mode 100644 index 0000000..e4c8543 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-map.d.ts @@ -0,0 +1,47 @@ +/** + * Mnemonist MultiMap Typings + * =========================== + */ + +interface MultiMap = V[]> extends Iterable<[K, V]> { + + // Members + dimension: number; + size: number; + + // Methods + clear(): void; + set(key: K, value: V): this; + delete(key: K): boolean; + remove(key: K, value: V): boolean; + has(key: K): boolean; + get(key: K): C | undefined; + multiplicity(key: K): number; + forEach(callback: (value: V, key: K, map: this) => void, scope?: any): void; + forEachAssociation(callback: (value: C, key: K, map: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[K, V]>; + containers(): IterableIterator; + associations(): IterableIterator<[K, C]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + toJSON(): any; +} + +interface MultiMapConstructor { + new (container: SetConstructor): MultiMap>; + new (container?: ArrayConstructor): MultiMap; + + from( + iterable: Iterable<[K, V]> | {[key: string]: V}, + Container: SetConstructor + ): MultiMap>; + from( + iterable: Iterable<[K, V]> | {[key: string]: V}, + Container?: ArrayConstructor + ): MultiMap; +} + +declare const MultiMap: MultiMapConstructor; +export default MultiMap; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-map.js new file mode 100644 index 0000000..0b36e15 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-map.js @@ -0,0 +1,408 @@ +/** + * Mnemonist MultiMap + * =================== + * + * Implementation of a MultiMap with custom container. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * MultiMap. + * + * @constructor + */ +function MultiMap(Container) { + + this.Container = Container || Array; + this.items = new Map(); + this.clear(); + + Object.defineProperty(this.items, 'constructor', { + value: MultiMap, + enumerable: false + }); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiMap.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + this.items.clear(); +}; + +/** + * Method used to set a value. + * + * @param {any} key - Key. + * @param {any} value - Value to add. + * @return {MultiMap} + */ +MultiMap.prototype.set = function(key, value) { + var container = this.items.get(key), + sizeBefore; + + if (!container) { + this.dimension++; + container = new this.Container(); + this.items.set(key, container); + } + + if (this.Container === Set) { + sizeBefore = container.size; + container.add(value); + + if (sizeBefore < container.size) + this.size++; + } + else { + container.push(value); + this.size++; + } + + return this; +}; + +/** + * Method used to delete the given key. + * + * @param {any} key - Key to delete. + * @return {boolean} + */ +MultiMap.prototype.delete = function(key) { + var container = this.items.get(key); + + if (!container) + return false; + + this.size -= (this.Container === Set ? container.size : container.length); + this.dimension--; + this.items.delete(key); + + return true; +}; + +/** + * Method used to delete the remove an item in the container stored at the + * given key. + * + * @param {any} key - Key to delete. + * @return {boolean} + */ +MultiMap.prototype.remove = function(key, value) { + var container = this.items.get(key), + wasDeleted, + index; + + if (!container) + return false; + + if (this.Container === Set) { + wasDeleted = container.delete(value); + + if (wasDeleted) + this.size--; + + if (container.size === 0) { + this.items.delete(key); + this.dimension--; + } + + return wasDeleted; + } + else { + index = container.indexOf(value); + + if (index === -1) + return false; + + this.size--; + + if (container.length === 1) { + this.items.delete(key); + this.dimension--; + + return true; + } + + container.splice(index, 1); + + return true; + } +}; + +/** + * Method used to return whether the given keys exists in the map. + * + * @param {any} key - Key to check. + * @return {boolean} + */ +MultiMap.prototype.has = function(key) { + return this.items.has(key); +}; + +/** + * Method used to return the container stored at the given key or `undefined`. + * + * @param {any} key - Key to get. + * @return {boolean} + */ +MultiMap.prototype.get = function(key) { + return this.items.get(key); +}; + +/** + * Method used to return the multiplicity of the given key, meaning the number + * of times it is set, or, more trivially, the size of the attached container. + * + * @param {any} key - Key to check. + * @return {number} + */ +MultiMap.prototype.multiplicity = function(key) { + var container = this.items.get(key); + + if (typeof container === 'undefined') + return 0; + + return this.Container === Set ? container.size : container.length; +}; +MultiMap.prototype.count = MultiMap.prototype.multiplicity; + +/** + * Method used to iterate over each of the key/value pairs. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + // Inner iteration function is created here to avoid creating it in the loop + var key; + function inner(value) { + callback.call(scope, value, key); + } + + this.items.forEach(function(container, k) { + key = k; + container.forEach(inner); + }); +}; + +/** + * Method used to iterate over each of the associations. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiMap.prototype.forEachAssociation = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Method returning an iterator over the map's keys. + * + * @return {Iterator} + */ +MultiMap.prototype.keys = function() { + return this.items.keys(); +}; + +/** + * Method returning an iterator over the map's keys. + * + * @return {Iterator} + */ +MultiMap.prototype.values = function() { + var iterator = this.items.values(), + inContainer = false, + countainer, + step, + i, + l; + + if (this.Container === Set) + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + countainer = step.value.values(); + } + + step = countainer.next(); + + if (step.done) { + inContainer = false; + return next(); + } + + return { + done: false, + value: step.value + }; + }); + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + countainer = step.value; + i = 0; + l = countainer.length; + } + + if (i >= l) { + inContainer = false; + return next(); + } + + return { + done: false, + value: countainer[i++] + }; + }); +}; + +/** + * Method returning an iterator over the map's entries. + * + * @return {Iterator} + */ +MultiMap.prototype.entries = function() { + var iterator = this.items.entries(), + inContainer = false, + countainer, + step, + key, + i, + l; + + if (this.Container === Set) + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + key = step.value[0]; + countainer = step.value[1].values(); + } + + step = countainer.next(); + + if (step.done) { + inContainer = false; + return next(); + } + + return { + done: false, + value: [key, step.value] + }; + }); + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + key = step.value[0]; + countainer = step.value[1]; + i = 0; + l = countainer.length; + } + + if (i >= l) { + inContainer = false; + return next(); + } + + return { + done: false, + value: [key, countainer[i++]] + }; + }); +}; + +/** + * Method returning an iterator over the map's containers. + * + * @return {Iterator} + */ +MultiMap.prototype.containers = function() { + return this.items.values(); +}; + +/** + * Method returning an iterator over the map's associations. + * + * @return {Iterator} + */ +MultiMap.prototype.associations = function() { + return this.items.entries(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + MultiMap.prototype[Symbol.iterator] = MultiMap.prototype.entries; + +/** + * Convenience known methods. + */ +MultiMap.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + MultiMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiMap.prototype.inspect; +MultiMap.prototype.toJSON = function() { + return this.items; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @param {Class} Container - Container. + * @return {MultiMap} + */ +MultiMap.from = function(iterable, Container) { + var map = new MultiMap(Container); + + forEach(iterable, function(value, key) { + map.set(key, value); + }); + + return map; +}; + +/** + * Exporting. + */ +module.exports = MultiMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-set.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-set.d.ts new file mode 100644 index 0000000..0e40bc4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-set.d.ts @@ -0,0 +1,37 @@ +/** + * Mnemonist MultiSet Typings + * =========================== + */ +export default class MultiSet implements Iterable { + + // Members + dimension: number; + size: number; + + // Methods + clear(): void; + add(key: K, count?: number): this; + set(key: K, count: number): this; + has(key: K): boolean; + delete(key: K): boolean; + remove(key: K, count?: number): void; + edit(a: K, b: K): this; + multiplicity(key: K): number; + count(key: K): number; + get(key: K): number; + frequency(key: K): number; + top(n: number): Array<[K, number]>; + forEach(callback: (value: K, key: K, set: this) => void, scope?: any): void; + forEachMultiplicity(callback: (value: number, key: K, set: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + multiplicities(): IterableIterator<[K, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string]: I}): MultiSet; + static isSubset(a: MultiSet, b: MultiSet): boolean; + static isSuperset(a: MultiSet, b: MultiSet): boolean; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-set.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-set.js new file mode 100644 index 0000000..3206af7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/multi-set.js @@ -0,0 +1,440 @@ +/** + * Mnemonist MultiSet + * ==================== + * + * JavaScript implementation of a MultiSet. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + FixedReverseHeap = require('./fixed-reverse-heap.js'); + +/** + * Helpers. + */ +var MULTISET_ITEM_COMPARATOR = function(a, b) { + if (a[1] > b[1]) + return -1; + if (a[1] < b[1]) + return 1; + + return 0; +}; + +// TODO: helper functions: union, intersection, sum, difference, subtract + +/** + * MultiSet. + * + * @constructor + */ +function MultiSet() { + this.items = new Map(); + + Object.defineProperty(this.items, 'constructor', { + value: MultiSet, + enumerable: false + }); + + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +MultiSet.prototype.clear = function() { + + // Properties + this.size = 0; + this.dimension = 0; + this.items.clear(); +}; + +/** + * Method used to add an item to the set. + * + * @param {any} item - Item to add. + * @param {number} count - Optional count. + * @return {MultiSet} + */ +MultiSet.prototype.add = function(item, count) { + if (count === 0) + return this; + + if (count < 0) + return this.remove(item, -count); + + count = count || 1; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.add: given count should be a number.'); + + this.size += count; + + const currentCount = this.items.get(item); + + if (currentCount === undefined) + this.dimension++; + else + count += currentCount; + + this.items.set(item, count); + + return this; +}; + +/** + * Method used to set the multiplicity of an item in the set. + * + * @param {any} item - Target item. + * @param {number} count - Desired multiplicity. + * @return {MultiSet} + */ +MultiSet.prototype.set = function(item, count) { + var currentCount; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.set: given count should be a number.'); + + // Setting an item to 0 or to a negative number means deleting it from the set + if (count <= 0) { + currentCount = this.items.get(item); + + if (typeof currentCount !== 'undefined') { + this.size -= currentCount; + this.dimension--; + } + + this.items.delete(item); + return this; + } + + count = count || 1; + + currentCount = this.items.get(item); + + if (typeof currentCount === 'number') { + this.items.set(item, currentCount + count); + } + else { + this.dimension++; + this.items.set(item, count); + } + + this.size += count; + + return this; +}; + +/** + * Method used to return whether the item exists in the set. + * + * @param {any} item - Item to check. + * @return {boolan} + */ +MultiSet.prototype.has = function(item) { + return this.items.has(item); +}; + +/** + * Method used to delete an item from the set. + * + * @param {any} item - Item to delete. + * @return {boolan} + */ +MultiSet.prototype.delete = function(item) { + var count = this.items.get(item); + + if (count === 0) + return false; + + this.size -= count; + this.dimension--; + this.items.delete(item); + + return true; +}; + +/** + * Method used to remove an item from the set. + * + * @param {any} item - Item to delete. + * @param {number} count - Optional count. + * @return {undefined} + */ +MultiSet.prototype.remove = function(item, count) { + if (count === 0) + return; + + if (count < 0) + return this.add(item, -count); + + count = count || 1; + + if (typeof count !== 'number') + throw new Error('mnemonist/multi-set.remove: given count should be a number.'); + + var currentCount = this.multiplicity(item), + newCount = Math.max(0, currentCount - count); + + if (newCount === 0) { + this.delete(item); + } + else { + this.items.set(item, newCount); + this.size -= (currentCount - newCount); + } + + return; +}; + +/** + * Method used to change a key into another one, merging counts if the target + * key already exists. + * + * @param {any} a - From key. + * @param {any} b - To key. + * @return {MultiSet} + */ +MultiSet.prototype.edit = function(a, b) { + var am = this.multiplicity(a); + + // If a does not exist in the set, we can stop right there + if (am === 0) + return; + + var bm = this.multiplicity(b); + + this.items.set(b, am + bm); + this.items.delete(a); + + return this; +}; + +/** + * Method used to return the multiplicity of the given item. + * + * @param {any} item - Item to get. + * @return {number} + */ +MultiSet.prototype.multiplicity = function(item) { + var count = this.items.get(item); + + if (typeof count === 'undefined') + return 0; + + return count; +}; +MultiSet.prototype.get = MultiSet.prototype.multiplicity; +MultiSet.prototype.count = MultiSet.prototype.multiplicity; + +/** + * Method used to return the frequency of the given item in the set. + * + * @param {any} item - Item to get. + * @return {number} + */ +MultiSet.prototype.frequency = function(item) { + if (this.size === 0) + return 0; + + var count = this.multiplicity(item); + + return count / this.size; +}; + +/** + * Method used to return the n most common items from the set. + * + * @param {number} n - Number of items to retrieve. + * @return {array} + */ +MultiSet.prototype.top = function(n) { + if (typeof n !== 'number' || n <= 0) + throw new Error('mnemonist/multi-set.top: n must be a number > 0.'); + + var heap = new FixedReverseHeap(Array, MULTISET_ITEM_COMPARATOR, n); + + var iterator = this.items.entries(), + step; + + while ((step = iterator.next(), !step.done)) + heap.push(step.value); + + return heap.consume(); +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var i; + + this.items.forEach(function(multiplicity, value) { + + for (i = 0; i < multiplicity; i++) + callback.call(scope, value, value); + }); +}; + +/** + * Method used to iterate over the set's multiplicities. + * + * @param {function} callback - Function to call for each multiplicity. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +MultiSet.prototype.forEachMultiplicity = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + this.items.forEach(callback, scope); +}; + +/** + * Method returning an iterator over the set's keys. I.e. its unique values, + * in a sense. + * + * @return {Iterator} + */ +MultiSet.prototype.keys = function() { + return this.items.keys(); +}; + +/** + * Method returning an iterator over the set's values. + * + * @return {Iterator} + */ +MultiSet.prototype.values = function() { + var iterator = this.items.entries(), + inContainer = false, + step, + value, + multiplicity, + i; + + return new Iterator(function next() { + if (!inContainer) { + step = iterator.next(); + + if (step.done) + return {done: true}; + + inContainer = true; + value = step.value[0]; + multiplicity = step.value[1]; + i = 0; + } + + if (i >= multiplicity) { + inContainer = false; + return next(); + } + + i++; + + return { + done: false, + value: value + }; + }); +}; + +/** + * Method returning an iterator over the set's multiplicities. + * + * @return {Iterator} + */ +MultiSet.prototype.multiplicities = function() { + return this.items.entries(); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + MultiSet.prototype[Symbol.iterator] = MultiSet.prototype.values; + +/** + * Convenience known methods. + */ +MultiSet.prototype.inspect = function() { + return this.items; +}; + +if (typeof Symbol !== 'undefined') + MultiSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = MultiSet.prototype.inspect; +MultiSet.prototype.toJSON = function() { + return this.items; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {MultiSet} + */ +MultiSet.from = function(iterable) { + var set = new MultiSet(); + + forEach(iterable, function(value) { + set.add(value); + }); + + return set; +}; + +/** + * Function returning whether the multiset A is a subset of the multiset B. + * + * @param {MultiSet} A - First set. + * @param {MultiSet} B - Second set. + * @return {boolean} + */ +MultiSet.isSubset = function(A, B) { + var iterator = A.multiplicities(), + step, + key, + mA; + + // Shortcuts + if (A === B) + return true; + + if (A.dimension > B.dimension) + return false; + + while ((step = iterator.next(), !step.done)) { + key = step.value[0]; + mA = step.value[1]; + + if (B.multiplicity(key) < mA) + return false; + } + + return true; +}; + +/** + * Function returning whether the multiset A is a superset of the multiset B. + * + * @param {MultiSet} A - First set. + * @param {MultiSet} B - Second set. + * @return {boolean} + */ +MultiSet.isSuperset = function(A, B) { + return MultiSet.isSubset(B, A); +}; + +/** + * Exporting. + */ +module.exports = MultiSet; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/package.json b/amplify/functions/fetchDocuments/node_modules/mnemonist/package.json new file mode 100644 index 0000000..79e8f19 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/package.json @@ -0,0 +1,119 @@ +{ + "name": "mnemonist", + "version": "0.38.3", + "description": "Curated collection of data structures for the JavaScript language.", + "scripts": { + "lint": "eslint ./*.js ./utils ./test", + "prepublish": "npm run lint && npm test && npm run test:types", + "test": "mocha", + "test:types": "tsc --target es2015 --noEmit --noImplicitAny --noImplicitReturns ./test/types.ts" + }, + "main": "./index.js", + "types": "./index.d.ts", + "files": [ + "sort", + "utils", + "*.d.ts", + "*.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/yomguithereal/mnemonist.git" + }, + "keywords": [ + "bag", + "bimap", + "bit array", + "bit set", + "bit vector", + "bitset", + "bk tree", + "burkhard-keller tree", + "cache", + "circular buffer", + "counter", + "data structures", + "default map", + "deque", + "disjoint set", + "fibonacci heap", + "fuzzy map", + "hashed array tree", + "heap", + "interval tree", + "inverted index", + "kd tree", + "linked list", + "lru", + "lru cache", + "multimap", + "multiset", + "passjoin", + "queue", + "sparse map", + "sparse set", + "stack", + "structures", + "suffix tree", + "symspell", + "trie", + "union find", + "vantage point tree", + "vector", + "vp tree" + ], + "author": { + "name": "Guillaume Plique", + "url": "http://github.com/Yomguithereal" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/yomguithereal/mnemonist/issues" + }, + "homepage": "https://github.com/yomguithereal/mnemonist#readme", + "dependencies": { + "obliterator": "^1.6.1" + }, + "devDependencies": { + "@yomguithereal/eslint-config": "^4.0.0", + "asciitree": "^1.0.2", + "damerau-levenshtein": "^1.0.6", + "eslint": "^7.21.0", + "leven": "^3.1.0", + "lodash": "^4.17.21", + "matcha": "^0.7.0", + "mocha": "^8.3.0", + "pandemonium": "^2.0.0", + "seedrandom": "^3.0.5", + "static-kdtree": "^1.0.2", + "typescript": "^4.2.2" + }, + "eslintConfig": { + "extends": "@yomguithereal/eslint-config", + "globals": { + "Set": true, + "Map": true, + "WeakMap": true, + "Symbol": true, + "ArrayBuffer": true, + "Uint8Array": true, + "Uint8ClampedArray": true, + "Uint16Array": true, + "Uint32Array": true, + "Int8Array": true, + "Int16Array": true, + "Int32Array": true, + "Float32Array": true, + "Float64Array": true + }, + "parserOptions": { + "ecmaVersion": 6, + "ecmaFeatures": { + "forOf": true + } + }, + "rules": { + "no-new": 0 + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/passjoin-index.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/passjoin-index.d.ts new file mode 100644 index 0000000..4d91746 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/passjoin-index.d.ts @@ -0,0 +1,54 @@ +/** + * Mnemonist PassjoinIndex Typings + * ================================ + */ +type LevenshteinDistanceFunction = (a: T, b: T) => number; + +export default class PassjoinIndex implements Iterable { + + // Members + size: number; + + // Constructor + constructor(levenshtein: LevenshteinDistanceFunction, k: number); + + // Methods + add(value: T): this; + search(query: T): Set; + clear(): void; + forEach(callback: (value: T, index: number, self: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + levenshtein: LevenshteinDistanceFunction, + k: number + ): PassjoinIndex; +} + +export function countKeys(k: number, s: number): number; +export function comparator(a: T, b: T): number; +export function partition(k: number, l: number): Array<[number, number]>; +export function segments(k: number, string: T): Array; +export function segmentPos(k: number, i: number, string: T): number; + +export function multiMatchAwareInterval( + k: number, + delta: number, + i: number, + s: number, + pi: number, + li: number +): [number, number]; + +export function multiMatchAwareSubstrings( + k: number, + string: T, + l: number, + i: number, + pi: number, + li: number +): Array; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/passjoin-index.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/passjoin-index.js new file mode 100644 index 0000000..652d614 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/passjoin-index.js @@ -0,0 +1,518 @@ +/** + * Mnemonist PassjoinIndex + * ======================== + * + * The PassjoinIndex is an index leveraging the "passjoin" algorithm as a mean + * to index strings for Levenshtein distance queries. It features a complexity + * related to the Levenshtein query threshold k rather than the number of + * strings to test (roughly O(k^3)). + * + * [References]: + * Jiang, Yu, Dong Deng, Jiannan Wang, Guoliang Li, et Jianhua Feng. + * « Efficient Parallel Partition-Based Algorithms for Similarity Search and Join + * with Edit Distance Constraints ». In Proceedings of the Joint EDBT/ICDT 2013 + * Workshops on - EDBT ’13, 341. Genoa, Italy: ACM Press, 2013. + * https://doi.org/10.1145/2457317.2457382. + * + * Li, Guoliang, Dong Deng, et Jianhua Feng. « A Partition-Based Method for + * String Similarity Joins with Edit-Distance Constraints ». ACM Transactions on + * Database Systems 38, no 2 (1 juin 2013): 1‑33. + * https://doi.org/10.1145/2487259.2487261. + * + * [Urls]: + * http://people.csail.mit.edu/dongdeng/projects/passjoin/index.html + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +// TODO: leveraging BagDistance as an upper bound of Levenshtein +// TODO: leverage n-grams recursive indexing +// TODO: try the MultiArray as a memory backend +// TODO: what about damerau levenshtein + +/** + * Helpers. + */ + +/** + * Function returning the number of substrings that will be selected by the + * multi-match-aware selection scheme for theshold `k`, for a string of length + * `s` to match strings of length `l`. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @param {number} l - Length of strings to match. + * @returns {number} - The number of selected substrings. + */ +function countSubstringsL(k, s, l) { + return (((Math.pow(k, 2) - Math.pow(Math.abs(s - l), 2)) / 2) | 0) + k + 1; +} + +/** + * Function returning the minimum number of substrings that will be selected by + * the multi-match-aware selection scheme for theshold `k`, for a string of + * length `s` to match any string of relevant length. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @returns {number} - The number of selected substrings. + */ +function countKeys(k, s) { + var c = 0; + + for (var l = 0, m = s + 1; l < m; l++) + c += countSubstringsL(k, s, l); + + return c; +} + +/** + * Function used to compare two keys in order to sort them first by decreasing + * length and then alphabetically as per the "4.2 Effective Indexing Strategy" + * point of the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} s - Length of target strings. + * @returns {number} - The number of selected substrings. + */ +function comparator(a, b) { + if (a.length > b.length) + return -1; + if (a.length < b.length) + return 1; + + if (a < b) + return -1; + if (a > b) + return 1; + + return 0; +} + +/** + * Function partitioning a string into k + 1 uneven segments, the shorter + * ones, then the longer ones. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} l - Length of the string. + * @returns {Array} - The partition tuples (start, length). + */ +function partition(k, l) { + var m = k + 1, + a = (l / m) | 0, + b = a + 1, + i, + j; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + var tuples = new Array(k + 1); + + for (i = 0; i < smallSegments; i++) + tuples[i] = [i * a, a]; + + var offset = (i - 1) * a + a; + + for (j = 0; j < largeSegments; j++) + tuples[i + j] = [offset + j * b, b]; + + return tuples; +} + +/** + * Function yielding a string's k + 1 passjoin segments to index. + * + * @param {number} k - Levenshtein distance threshold. + * @param {string} string - Target string. + * @returns {Array} - The string's segments. + */ +function segments(k, string) { + var l = string.length, + m = k + 1, + a = (l / m) | 0, + b = a + 1, + o, + i, + j; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + var S = new Array(k + 1); + + for (i = 0; i < smallSegments; i++) { + o = i * a; + S[i] = string.slice(o, o + a); + } + + var offset = (i - 1) * a + a; + + for (j = 0; j < largeSegments; j++) { + o = offset + j * b; + S[i + j] = string.slice(o, o + b); + } + + return S; +} + +// TODO: jsdocs +function segmentPos(k, i, string) { + if (i === 0) + return 0; + + var l = string.length; + + var m = k + 1, + a = (l / m) | 0, + b = a + 1; + + var largeSegments = l - a * m, + smallSegments = m - largeSegments; + + if (i <= smallSegments - 1) + return i * a; + + var offset = i - smallSegments; + + return smallSegments * a + offset * b; +} + +/** + * Function returning the interval of relevant substrings to lookup using the + * multi-match-aware substring selection scheme described in the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {number} delta - Signed length difference between both considered strings. + * @param {number} i - k + 1 segment index. + * @param {number} s - String's length. + * @param {number} pi - k + 1 segment position in target string. + * @param {number} li - k + 1 segment length. + * @returns {Array} - The interval (start, stop). + */ +function multiMatchAwareInterval(k, delta, i, s, pi, li) { + var start1 = pi - i, + end1 = pi + i; + + var o = k - i; + + var start2 = pi + delta - o, + end2 = pi + delta + o; + + var end3 = s - li; + + return [Math.max(0, start1, start2), Math.min(end1, end2, end3)]; +} + +/** + * Function yielding relevant substrings to lookup using the multi-match-aware + * substring selection scheme described in the paper. + * + * @param {number} k - Levenshtein distance threshold. + * @param {string} string - Target string. + * @param {number} l - Length of strings to match. + * @param {number} i - k + 1 segment index. + * @param {number} pi - k + 1 segment position in target string. + * @param {number} li - k + 1 segment length. + * @returns {Array} - The contiguous substrings. + */ +function multiMatchAwareSubstrings(k, string, l, i, pi, li) { + var s = string.length; + + // Note that we need to keep the non-absolute delta for this function + // to work in both directions, up & down + var delta = s - l; + + var interval = multiMatchAwareInterval(k, delta, i, s, pi, li); + + var start = interval[0], + stop = interval[1]; + + var currentSubstring = ''; + + var substrings = []; + + var substring, j, m; + + for (j = start, m = stop + 1; j < m; j++) { + substring = string.slice(j, j + li); + + // We skip identical consecutive substrings (to avoid repetition in case + // of contiguous letter duplication) + if (substring === currentSubstring) + continue; + + substrings.push(substring); + + currentSubstring = substring; + } + + return substrings; +} + +/** + * PassjoinIndex. + * + * @note I tried to apply the paper's optimizations regarding Levenshtein + * distance computations but it did not provide a performance boost, quite + * the contrary. This is because since we are mostly using the index for small k + * here, most of the strings we work on are quite small and the bookkeeping + * induced by Ukkonen's method and the paper's one are slowing us down more than + * they actually help us go faster. + * + * @note This implementation does not try to ensure that you add the same string + * more than once. + * + * @constructor + * @param {function} levenshtein - Levenshtein distance function. + * @param {number} k - Levenshtein distance threshold. + */ +function PassjoinIndex(levenshtein, k) { + if (typeof levenshtein !== 'function') + throw new Error('mnemonist/passjoin-index: `levenshtein` should be a function returning edit distance between two strings.'); + + if (typeof k !== 'number' || k < 1) + throw new Error('mnemonist/passjoin-index: `k` should be a number > 0'); + + this.levenshtein = levenshtein; + this.k = k; + this.clear(); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +PassjoinIndex.prototype.clear = function() { + + // Properties + this.size = 0; + this.strings = []; + this.invertedIndices = {}; +}; + +/** + * Method used to add a new value to the index. + * + * @param {string|Array} value - Value to add. + * @return {PassjoinIndex} + */ +PassjoinIndex.prototype.add = function(value) { + var l = value.length; + + var stringIndex = this.size; + + this.strings.push(value); + this.size++; + + var S = segments(this.k, value); + + var Ll = this.invertedIndices[l]; + + if (typeof Ll === 'undefined') { + Ll = {}; + this.invertedIndices[l] = Ll; + } + + var segment, + matches, + key, + i, + m; + + for (i = 0, m = S.length; i < m; i++) { + segment = S[i]; + key = segment + i; + matches = Ll[key]; + + if (typeof matches === 'undefined') { + matches = [stringIndex]; + Ll[key] = matches; + } + else { + matches.push(stringIndex); + } + } + + return this; +}; + +/** + * Method used to search for string matching the given query. + * + * @param {string|Array} query - Query string. + * @return {Array} + */ +PassjoinIndex.prototype.search = function(query) { + var s = query.length, + k = this.k; + + var M = new Set(); + + var candidates, + candidate, + queryPos, + querySegmentLength, + key, + S, + P, + l, + m, + i, + n1, + j, + n2, + y, + n3; + + for (l = Math.max(0, s - k), m = s + k + 1; l < m; l++) { + var Ll = this.invertedIndices[l]; + + if (typeof Ll === 'undefined') + continue; + + P = partition(k, l); + + for (i = 0, n1 = P.length; i < n1; i++) { + queryPos = P[i][0]; + querySegmentLength = P[i][1]; + + S = multiMatchAwareSubstrings( + k, + query, + l, + i, + queryPos, + querySegmentLength + ); + + // Empty string edge case + if (!S.length) + S = ['']; + + for (j = 0, n2 = S.length; j < n2; j++) { + key = S[j] + i; + candidates = Ll[key]; + + if (typeof candidates === 'undefined') + continue; + + for (y = 0, n3 = candidates.length; y < n3; y++) { + candidate = this.strings[candidates[y]]; + + // NOTE: first condition is here not to compute Levenshtein + // distance for tiny strings + + // NOTE: maintaining a Set of rejected candidate is not really useful + // because it consumes more memory and because non-matches are + // less likely to be candidates agains + if ( + s <= k && l <= k || + ( + !M.has(candidate) && + this.levenshtein(query, candidate) <= k + ) + ) + M.add(candidate); + } + } + } + } + + return M; +}; + +/** + * Method used to iterate over the index. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +PassjoinIndex.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.strings.length; i < l; i++) + callback.call(scope, this.strings[i], i, this); +}; + +/** + * Method used to create an iterator over a index's values. + * + * @return {Iterator} + */ +PassjoinIndex.prototype.values = function() { + var strings = this.strings, + l = strings.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = strings[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + PassjoinIndex.prototype[Symbol.iterator] = PassjoinIndex.prototype.values; + +/** + * Convenience known methods. + */ +PassjoinIndex.prototype.inspect = function() { + var array = this.strings.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: PassjoinIndex, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + PassjoinIndex.prototype[Symbol.for('nodejs.util.inspect.custom')] = PassjoinIndex.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {PassjoinIndex} + */ +PassjoinIndex.from = function(iterable, levenshtein, k) { + var index = new PassjoinIndex(levenshtein, k); + + forEach(iterable, function(string) { + index.add(string); + }); + + return index; +}; + +/** + * Exporting. + */ +PassjoinIndex.countKeys = countKeys; +PassjoinIndex.comparator = comparator; +PassjoinIndex.partition = partition; +PassjoinIndex.segments = segments; +PassjoinIndex.segmentPos = segmentPos; +PassjoinIndex.multiMatchAwareInterval = multiMatchAwareInterval; +PassjoinIndex.multiMatchAwareSubstrings = multiMatchAwareSubstrings; + +module.exports = PassjoinIndex; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/queue.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/queue.d.ts new file mode 100644 index 0000000..2d3e434 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/queue.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist Queue Typings + * ======================== + */ +export default class Queue implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + enqueue(item: T): number; + dequeue(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, queue: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): Queue; + static of(...items: Array): Queue; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/queue.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/queue.js new file mode 100644 index 0000000..aa554b6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/queue.js @@ -0,0 +1,215 @@ +/** + * Mnemonist Queue + * ================ + * + * Queue implementation based on the ideas of Queue.js that seems to beat + * a LinkedList one in performance. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Queue + * + * @constructor + */ +function Queue() { + this.clear(); +} + +/** + * Method used to clear the queue. + * + * @return {undefined} + */ +Queue.prototype.clear = function() { + + // Properties + this.items = []; + this.offset = 0; + this.size = 0; +}; + +/** + * Method used to add an item to the queue. + * + * @param {any} item - Item to enqueue. + * @return {number} + */ +Queue.prototype.enqueue = function(item) { + + this.items.push(item); + return ++this.size; +}; + +/** + * Method used to retrieve & remove the first item of the queue. + * + * @return {any} + */ +Queue.prototype.dequeue = function() { + if (!this.size) + return; + + var item = this.items[this.offset]; + + if (++this.offset * 2 >= this.items.length) { + this.items = this.items.slice(this.offset); + this.offset = 0; + } + + this.size--; + + return item; +}; + +/** + * Method used to retrieve the first item of the queue. + * + * @return {any} + */ +Queue.prototype.peek = function() { + if (!this.size) + return; + + return this.items[this.offset]; +}; + +/** + * Method used to iterate over the queue. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +Queue.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = this.offset, j = 0, l = this.items.length; i < l; i++, j++) + callback.call(scope, this.items[i], j, this); +}; + +/* + * Method used to convert the queue to a JavaScript array. + * + * @return {array} + */ +Queue.prototype.toArray = function() { + return this.items.slice(this.offset); +}; + +/** + * Method used to create an iterator over a queue's values. + * + * @return {Iterator} + */ +Queue.prototype.values = function() { + var items = this.items, + i = this.offset; + + return new Iterator(function() { + if (i >= items.length) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a queue's entries. + * + * @return {Iterator} + */ +Queue.prototype.entries = function() { + var items = this.items, + i = this.offset, + j = 0; + + return new Iterator(function() { + if (i >= items.length) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: [j++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Queue.prototype[Symbol.iterator] = Queue.prototype.values; + +/** + * Convenience known methods. + */ +Queue.prototype.toString = function() { + return this.toArray().join(','); +}; + +Queue.prototype.toJSON = function() { + return this.toArray(); +}; + +Queue.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: Queue, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + Queue.prototype[Symbol.for('nodejs.util.inspect.custom')] = Queue.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a queue. + * + * @param {Iterable} iterable - Target iterable. + * @return {Queue} + */ +Queue.from = function(iterable) { + var queue = new Queue(); + + forEach(iterable, function(value) { + queue.enqueue(value); + }); + + return queue; +}; + +/** + * Static @.of function taking an arbitrary number of arguments & converting it + * into a queue. + * + * @param {...any} args + * @return {Queue} + */ +Queue.of = function() { + return Queue.from(arguments); +}; + +/** + * Exporting. + */ +module.exports = Queue; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/semi-dynamic-trie.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/semi-dynamic-trie.js new file mode 100644 index 0000000..6627d34 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/semi-dynamic-trie.js @@ -0,0 +1,251 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist SemiDynamicTrie + * ========================== + * + * Lowlevel Trie working at character level, storing information in typed + * array and organizing its children in linked lists. + * + * This implementation also uses a "fat node" strategy to boost access to some + * bloated node's children when the number of children rises above a certain + * threshold. + */ +var Vector = require('./vector.js'); + +// TODO: rename => ternary search tree + +/** + * Constants. + */ +const MAX_LINKED = 7; + +/** + * SemiDynamicTrie. + * + * @constructor + */ +function SemiDynamicTrie() { + + // Properties + + // TODO: make it 16 bits + this.characters = new Vector.Uint8Vector(256); + this.nextPointers = new Vector.Int32Vector(256); + this.childPointers = new Vector.Uint32Vector(256); + this.maps = new Vector.Uint32Vector(256); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SemiDynamicTrie.prototype.clear = function() { + + // Properties +}; + +SemiDynamicTrie.prototype.ensureSibling = function(block, character) { + var nextCharacter, + nextBlock, + newBlock; + + // Do we have a root? + if (this.characters.length === 0) { + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + return block; + } + + // Are we traversing a fat node? + var fatNode = this.nextPointers.array[block]; + + if (fatNode < 0) { + var mapIndex = -fatNode + character; + + nextBlock = this.maps.array[mapIndex]; + + if (nextBlock !== 0) + return nextBlock; + + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + this.maps.set(mapIndex, newBlock); + + return newBlock; + } + + var listLength = 1, + startingBlock = block; + + while (true) { + nextCharacter = this.characters.array[block]; + + if (nextCharacter === character) + return block; + + nextBlock = this.nextPointers.array[block]; + + if (nextBlock === 0) + break; + + listLength++; + block = nextBlock; + } + + // If the list is too long, we create a fat node + if (listLength > MAX_LINKED) { + block = startingBlock; + + var offset = this.maps.length; + + this.maps.resize(offset + 255); + this.maps.set(offset + 255, 0); + + while (true) { + nextBlock = this.nextPointers.array[block]; + + if (nextBlock === 0) + break; + + nextCharacter = this.characters.array[nextBlock]; + this.maps.set(offset + nextCharacter, nextBlock); + + block = nextBlock; + } + + this.nextPointers.set(startingBlock, -offset); + + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.characters.push(character); + + this.maps.set(offset + character, newBlock); + + return newBlock; + } + + // Else, we append the character to the list + newBlock = this.characters.length; + + this.nextPointers.push(0); + this.childPointers.push(0); + this.nextPointers.set(block, newBlock); + this.characters.push(character); + + return newBlock; +}; + +SemiDynamicTrie.prototype.findSibling = function(block, character) { + var nextCharacter; + + // Do we have a fat node? + var fatNode = this.nextPointers.array[block]; + + if (fatNode < 0) { + var mapIndex = -fatNode + character; + + var nextBlock = this.maps.array[mapIndex]; + + if (nextBlock === 0) + return -1; + + return nextBlock; + } + + while (true) { + nextCharacter = this.characters.array[block]; + + if (nextCharacter === character) + return block; + + block = this.nextPointers.array[block]; + + if (block === 0) + return -1; + } +}; + +SemiDynamicTrie.prototype.add = function(key) { + var keyCharacter, + childBlock, + block = 0; + + var i = 0, l = key.length; + + // Going as far as possible + while (i < l) { + keyCharacter = key.charCodeAt(i); + + // Ensuring a correct sibling exists + block = this.ensureSibling(block, keyCharacter); + + i++; + + if (i < l) { + + // Descending + childBlock = this.childPointers.array[block]; + + if (childBlock === 0) + break; + + block = childBlock; + } + } + + // Adding as many blocks as necessary + while (i < l) { + + childBlock = this.characters.length; + this.characters.push(key.charCodeAt(i)); + + this.childPointers.push(0); + this.nextPointers.push(0); + this.childPointers.set(block, childBlock); + + block = childBlock; + + i++; + } +}; + +SemiDynamicTrie.prototype.has = function(key) { + var i, l; + + var block = 0, + siblingBlock; + + for (i = 0, l = key.length; i < l; i++) { + siblingBlock = this.findSibling(block, key.charCodeAt(i)); + + if (siblingBlock === -1) + return false; + + // TODO: be sure + if (i === l - 1) + return true; + + block = this.childPointers.array[siblingBlock]; + + if (block === 0) + return false; + } + + // TODO: fix, should have a leaf pointer somehow + return true; +}; + +/** + * Exporting. + */ +module.exports = SemiDynamicTrie; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/set.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/set.d.ts new file mode 100644 index 0000000..fc8dae8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/set.d.ts @@ -0,0 +1,18 @@ +/** + * Mnemonist Set Typings + * ====================== + */ +export function intersection(...set: Array>): Set; +export function union(...set: Array>): Set; +export function difference(a: Set, b: Set): Set; +export function symmetricDifference(a: Set, b: Set): Set; +export function isSubset(a: Set, b: Set): boolean; +export function isSuperset(a: Set, b: Set): boolean; +export function add(a: Set, b: Set): void; +export function subtract(a: Set, b: Set): void; +export function intersect(a: Set, b: Set): void; +export function disjunct(a: Set, b: Set): void; +export function intersectionSize(a: Set, b:Set): number; +export function unionSize(a: Set, b:Set): number; +export function jaccard(a: Set, b:Set): number; +export function overlap(a: Set, b: Set): number; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/set.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/set.js new file mode 100644 index 0000000..e0d020b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/set.js @@ -0,0 +1,356 @@ +/** + * Mnemonist Set + * ============== + * + * Useful function related to sets such as union, intersection and so on... + */ + +// TODO: optimize versions for less variadicities + +/** + * Variadic function computing the intersection of multiple sets. + * + * @param {...Set} sets - Sets to intersect. + * @return {Set} - The intesection. + */ +exports.intersection = function() { + if (arguments.length < 2) + throw new Error('mnemonist/Set.intersection: needs at least two arguments.'); + + var I = new Set(); + + // First we need to find the smallest set + var smallestSize = Infinity, + smallestSet = null; + + var s, i, l = arguments.length; + + for (i = 0; i < l; i++) { + s = arguments[i]; + + // If one of the set has no items, we can stop right there + if (s.size === 0) + return I; + + if (s.size < smallestSize) { + smallestSize = s.size; + smallestSet = s; + } + } + + // Now we need to intersect this set with the others + var iterator = smallestSet.values(), + step, + item, + add, + set; + + // TODO: we can optimize by iterating each next time over the current intersection + // but this probably means more RAM to consume since we'll create n-1 sets rather than + // only the one. + while ((step = iterator.next(), !step.done)) { + item = step.value; + add = true; + + for (i = 0; i < l; i++) { + set = arguments[i]; + + if (set === smallestSet) + continue; + + if (!set.has(item)) { + add = false; + break; + } + } + + if (add) + I.add(item); + } + + return I; +}; + +/** + * Variadic function computing the union of multiple sets. + * + * @param {...Set} sets - Sets to unite. + * @return {Set} - The union. + */ +exports.union = function() { + if (arguments.length < 2) + throw new Error('mnemonist/Set.union: needs at least two arguments.'); + + var U = new Set(); + + var i, l = arguments.length; + + var iterator, + step; + + for (i = 0; i < l; i++) { + iterator = arguments[i].values(); + + while ((step = iterator.next(), !step.done)) + U.add(step.value); + } + + return U; +}; + +/** + * Function computing the difference between two sets. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {Set} - The difference. + */ +exports.difference = function(A, B) { + + // If first set is empty + if (!A.size) + return new Set(); + + if (!B.size) + return new Set(A); + + var D = new Set(); + + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + D.add(step.value); + } + + return D; +}; + +/** + * Function computing the symmetric difference between two sets. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {Set} - The symmetric difference. + */ +exports.symmetricDifference = function(A, B) { + var S = new Set(); + + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + S.add(step.value); + } + + iterator = B.values(); + + while ((step = iterator.next(), !step.done)) { + if (!A.has(step.value)) + S.add(step.value); + } + + return S; +}; + +/** + * Function returning whether A is a subset of B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {boolean} + */ +exports.isSubset = function(A, B) { + var iterator = A.values(), + step; + + // Shortcuts + if (A === B) + return true; + + if (A.size > B.size) + return false; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + return false; + } + + return true; +}; + +/** + * Function returning whether A is a superset of B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {boolean} + */ +exports.isSuperset = function(A, B) { + return exports.isSubset(B, A); +}; + +/** + * Function adding the items of set B to the set A. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.add = function(A, B) { + var iterator = B.values(), + step; + + while ((step = iterator.next(), !step.done)) + A.add(step.value); + + return; +}; + +/** + * Function subtracting the items of set B from the set A. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.subtract = function(A, B) { + var iterator = B.values(), + step; + + while ((step = iterator.next(), !step.done)) + A.delete(step.value); + + return; +}; + +/** + * Function intersecting the items of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.intersect = function(A, B) { + var iterator = A.values(), + step; + + while ((step = iterator.next(), !step.done)) { + if (!B.has(step.value)) + A.delete(step.value); + } + + return; +}; + +/** + * Function disjuncting the items of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + */ +exports.disjunct = function(A, B) { + var iterator = A.values(), + step; + + var toRemove = []; + + while ((step = iterator.next(), !step.done)) { + if (B.has(step.value)) + toRemove.push(step.value); + } + + iterator = B.values(); + + while ((step = iterator.next(), !step.done)) { + if (!A.has(step.value)) + A.add(step.value); + } + + for (var i = 0, l = toRemove.length; i < l; i++) + A.delete(toRemove[i]); + + return; +}; + +/** + * Function returning the size of the intersection of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.intersectionSize = function(A, B) { + var tmp; + + // We need to know the smallest set + if (A.size > B.size) { + tmp = A; + A = B; + B = tmp; + } + + if (A.size === 0) + return 0; + + if (A === B) + return A.size; + + var iterator = A.values(), + step; + + var I = 0; + + while ((step = iterator.next(), !step.done)) { + if (B.has(step.value)) + I++; + } + + return I; +}; + +/** + * Function returning the size of the union of A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.unionSize = function(A, B) { + var I = exports.intersectionSize(A, B); + + return A.size + B.size - I; +}; + +/** + * Function returning the Jaccard similarity between A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.jaccard = function(A, B) { + var I = exports.intersectionSize(A, B); + + if (I === 0) + return 0; + + var U = A.size + B.size - I; + + return I / U; +}; + +/** + * Function returning the overlap coefficient between A & B. + * + * @param {Set} A - First set. + * @param {Set} B - Second set. + * @return {number} + */ +exports.overlap = function(A, B) { + var I = exports.intersectionSize(A, B); + + if (I === 0) + return 0; + + return I / Math.min(A.size, B.size); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/insertion.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/insertion.d.ts new file mode 100644 index 0000000..db22f9b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/insertion.d.ts @@ -0,0 +1,4 @@ +import {IArrayLike} from '../utils/types'; + +export function inplaceInsertionSort(array: IArrayLike, lo: number, hi: number): IArrayLike; +export function inplaceInsertionSortIndices(array: IArrayLike, indices: IArrayLike, lo: number, hi: number): IArrayLike; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/insertion.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/insertion.js new file mode 100644 index 0000000..aebd1ad --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/insertion.js @@ -0,0 +1,50 @@ +/** + * Mnemonist Insertion Sort + * ========================= + * + * Insertion sort related functions. + */ +function inplaceInsertionSort(array, lo, hi) { + i = lo + 1; + + var j, k; + + for (; i < hi; i++) { + k = array[i]; + j = i - 1; + + while (j >= lo && array[j] > k) { + array[j + 1] = array[j]; + j--; + } + + array[j + 1] = k; + } + + return array; +} + +exports.inplaceInsertionSort = inplaceInsertionSort; + +function inplaceInsertionSortIndices(array, indices, lo, hi) { + i = lo + 1; + + var j, k, t; + + for (; i < hi; i++) { + t = indices[i]; + k = array[t]; + j = i - 1; + + while (j >= lo && array[indices[j]] > k) { + indices[j + 1] = indices[j]; + j--; + } + + indices[j + 1] = t; + } + + return indices; +} + +exports.inplaceInsertionSortIndices = inplaceInsertionSortIndices; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/quick.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/quick.d.ts new file mode 100644 index 0000000..5e6c90d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/quick.d.ts @@ -0,0 +1,4 @@ +import {IArrayLike} from '../utils/types'; + +export function inplaceQuickSort(array: IArrayLike, lo: number, hi: number): IArrayLike; +export function inplaceQuickSortIndices(array: IArrayLike, indices: IArrayLike, lo: number, hi: number): IArrayLike; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/quick.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/quick.js new file mode 100644 index 0000000..008d0fd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sort/quick.js @@ -0,0 +1,116 @@ +/** + * Mnemonist Quick Sort + * ===================== + * + * Quick sort related functions. + * Adapted from: https://alienryderflex.com/quicksort/ + */ +var LOS = new Float64Array(64), + HIS = new Float64Array(64); + +function inplaceQuickSort(array, lo, hi) { + var p, i, l, r, swap; + + LOS[0] = lo; + HIS[0] = hi; + i = 0; + + while (i >= 0) { + l = LOS[i]; + r = HIS[i] - 1; + + if (l < r) { + p = array[l]; + + while (l < r) { + while (array[r] >= p && l < r) + r--; + + if (l < r) + array[l++] = array[r]; + + while (array[l] <= p && l < r) + l++; + + if (l < r) + array[r--] = array[l]; + } + + array[l] = p; + LOS[i + 1] = l + 1; + HIS[i + 1] = HIS[i]; + HIS[i++] = l; + + if (HIS[i] - LOS[i] > HIS[i - 1] - LOS[i - 1]) { + swap = LOS[i]; + LOS[i] = LOS[i - 1]; + LOS[i - 1] = swap; + + swap = HIS[i]; + HIS[i] = HIS[i - 1]; + HIS[i - 1] = swap; + } + } + else { + i--; + } + } + + return array; +} + +exports.inplaceQuickSort = inplaceQuickSort; + +function inplaceQuickSortIndices(array, indices, lo, hi) { + var p, i, l, r, t, swap; + + LOS[0] = lo; + HIS[0] = hi; + i = 0; + + while (i >= 0) { + l = LOS[i]; + r = HIS[i] - 1; + + if (l < r) { + t = indices[l]; + p = array[t]; + + while (l < r) { + while (array[indices[r]] >= p && l < r) + r--; + + if (l < r) + indices[l++] = indices[r]; + + while (array[indices[l]] <= p && l < r) + l++; + + if (l < r) + indices[r--] = indices[l]; + } + + indices[l] = t; + LOS[i + 1] = l + 1; + HIS[i + 1] = HIS[i]; + HIS[i++] = l; + + if (HIS[i] - LOS[i] > HIS[i - 1] - LOS[i - 1]) { + swap = LOS[i]; + LOS[i] = LOS[i - 1]; + LOS[i - 1] = swap; + + swap = HIS[i]; + HIS[i] = HIS[i - 1]; + HIS[i - 1] = swap; + } + } + else { + i--; + } + } + + return indices; +} + +exports.inplaceQuickSortIndices = inplaceQuickSortIndices; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-map.d.ts new file mode 100644 index 0000000..0b22f90 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-map.d.ts @@ -0,0 +1,26 @@ +/** + * Mnemonist SparseMap Typings + * ============================ + */ +export default class SparseMap implements Iterable<[number, V]> { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(key: number): boolean; + get(key: number): V | undefined; + set(key: number, value: V): this; + delete(key: number): boolean; + forEach(callback: (value: V, key: number, set: this) => void, scope?: any): void; + keys(): IterableIterator; + values(): IterableIterator; + entries(): IterableIterator<[number, V]>; + [Symbol.iterator](): IterableIterator<[number, V]>; + inspect(): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-map.js new file mode 100644 index 0000000..d5cf20d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-map.js @@ -0,0 +1,243 @@ +/** + * Mnemonist SparseMap + * ==================== + * + * JavaScript sparse map implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseMap. + * + * @constructor + */ +function SparseMap(Values, length) { + if (arguments.length < 2) { + length = Values; + Values = Array; + } + + var ByteArray = getPointerArray(length); + + // Properties + this.size = 0; + this.length = length; + this.dense = new ByteArray(length); + this.sparse = new ByteArray(length); + this.vals = new Values(length); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseMap.prototype.clear = function() { + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the set. + * + * @param {number} member - Member to test. + * @return {SparseMap} + */ +SparseMap.prototype.has = function(member) { + var index = this.sparse[member]; + + return ( + index < this.size && + this.dense[index] === member + ); +}; + +/** + * Method used to get the value associated to a member in the set. + * + * @param {number} member - Member to test. + * @return {any} + */ +SparseMap.prototype.get = function(member) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) + return this.vals[index]; + + return; +}; + +/** + * Method used to set a value into the map. + * + * @param {number} member - Member to set. + * @param {any} value - Associated value. + * @return {SparseMap} + */ +SparseMap.prototype.set = function(member, value) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) { + this.vals[index] = value; + return this; + } + + this.dense[this.size] = member; + this.sparse[member] = this.size; + this.vals[this.size] = value; + this.size++; + + return this; +}; + +/** + * Method used to remove a member from the set. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseMap.prototype.delete = function(member) { + var index = this.sparse[member]; + + if (index >= this.size || this.dense[index] !== member) + return false; + + index = this.dense[this.size - 1]; + this.dense[this.sparse[member]] = index; + this.sparse[index] = this.sparse[member]; + this.size--; + + return true; +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseMap.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0; i < this.size; i++) + callback.call(scope, this.vals[i], this.dense[i]); +}; + +/** + * Method used to create an iterator over a set's members. + * + * @return {Iterator} + */ +SparseMap.prototype.keys = function() { + var size = this.size, + dense = this.dense, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = dense[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseMap.prototype.values = function() { + var size = this.size, + values = this.vals, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = values[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Method used to create an iterator over a set's entries. + * + * @return {Iterator} + */ +SparseMap.prototype.entries = function() { + var size = this.size, + dense = this.dense, + values = this.vals, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = [dense[i], values[i]]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseMap.prototype[Symbol.iterator] = SparseMap.prototype.entries; + +/** + * Convenience known methods. + */ +SparseMap.prototype.inspect = function() { + var proxy = new Map(); + + for (var i = 0; i < this.size; i++) + proxy.set(this.dense[i], this.vals[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseMap, + enumerable: false + }); + + proxy.length = this.length; + + if (this.vals.constructor !== Array) + proxy.type = this.vals.constructor.name; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseMap.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-queue-set.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-queue-set.d.ts new file mode 100644 index 0000000..e7463bf --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-queue-set.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist SparseQueueSet Typings + * ================================= + */ +export default class SparseQueueSet implements Iterable { + + // Members + capacity: number; + start: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(value: number): boolean; + enqueue(value: number): this; + dequeue(): number | undefined; + forEach(callback: (value: number, key: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-queue-set.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-queue-set.js new file mode 100644 index 0000000..b5f42b3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-queue-set.js @@ -0,0 +1,218 @@ +/** + * Mnemonist SparseQueueSet + * ========================= + * + * JavaScript sparse queue set implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseQueueSet. + * + * @constructor + */ +function SparseQueueSet(capacity) { + + var ByteArray = getPointerArray(capacity); + + // Properties + this.start = 0; + this.size = 0; + this.capacity = capacity; + this.dense = new ByteArray(capacity); + this.sparse = new ByteArray(capacity); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseQueueSet.prototype.clear = function() { + this.start = 0; + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the queue. + * + * @param {number} member - Member to test. + * @return {SparseQueueSet} + */ +SparseQueueSet.prototype.has = function(member) { + if (this.size === 0) + return false; + + var index = this.sparse[member]; + + var inBounds = ( + index < this.capacity && + ( + index >= this.start && + index < this.start + this.size + ) || + ( + index < ((this.start + this.size) % this.capacity) + ) + ); + + return ( + inBounds && + this.dense[index] === member + ); +}; + +/** + * Method used to add a member to the queue. + * + * @param {number} member - Member to add. + * @return {SparseQueueSet} + */ +SparseQueueSet.prototype.enqueue = function(member) { + var index = this.sparse[member]; + + if (this.size !== 0) { + var inBounds = ( + index < this.capacity && + ( + index >= this.start && + index < this.start + this.size + ) || + ( + index < ((this.start + this.size) % this.capacity) + ) + ); + + if (inBounds && this.dense[index] === member) + return this; + } + + index = (this.start + this.size) % this.capacity; + + this.dense[index] = member; + this.sparse[member] = index; + this.size++; + + return this; +}; + +/** + * Method used to remove the next member from the queue. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseQueueSet.prototype.dequeue = function() { + if (this.size === 0) + return; + + var index = this.start; + + this.size--; + this.start++; + + if (this.start === this.capacity) + this.start = 0; + + var member = this.dense[index]; + + this.sparse[member] = this.capacity; + + return member; +}; + +/** + * Method used to iterate over the queue's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseQueueSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + while (j < l) { + callback.call(scope, this.dense[i], j, this); + i++; + j++; + + if (i === c) + i = 0; + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseQueueSet.prototype.values = function() { + var dense = this.dense, + c = this.capacity, + l = this.size, + i = this.start, + j = 0; + + return new Iterator(function() { + if (j >= l) + return { + done: true + }; + + var value = dense[i]; + + i++; + j++; + + if (i === c) + i = 0; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseQueueSet.prototype[Symbol.iterator] = SparseQueueSet.prototype.values; + +/** + * Convenience known methods. + */ +SparseQueueSet.prototype.inspect = function() { + var proxy = []; + + this.forEach(function(member) { + proxy.push(member); + }); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseQueueSet, + enumerable: false + }); + + proxy.capacity = this.capacity; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseQueueSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseQueueSet.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseQueueSet; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-set.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-set.d.ts new file mode 100644 index 0000000..99fe655 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-set.d.ts @@ -0,0 +1,23 @@ +/** + * Mnemonist SparseSet Typings + * ============================ + */ +export default class SparseSet implements Iterable { + + // Members + length: number; + size: number; + + // Constructor + constructor(length: number); + + // Methods + clear(): void; + has(value: number): boolean; + add(value: number): this; + delete(value: number): boolean; + forEach(callback: (value: number, key: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-set.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-set.js new file mode 100644 index 0000000..7498f33 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/sparse-set.js @@ -0,0 +1,168 @@ +/** + * Mnemonist SparseSet + * ==================== + * + * JavaScript sparse set implemented on top of byte arrays. + * + * [Reference]: https://research.swtch.com/sparse + */ +var Iterator = require('obliterator/iterator'), + getPointerArray = require('./utils/typed-arrays.js').getPointerArray; + +/** + * SparseSet. + * + * @constructor + */ +function SparseSet(length) { + + var ByteArray = getPointerArray(length); + + // Properties + this.size = 0; + this.length = length; + this.dense = new ByteArray(length); + this.sparse = new ByteArray(length); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SparseSet.prototype.clear = function() { + this.size = 0; +}; + +/** + * Method used to check the existence of a member in the set. + * + * @param {number} member - Member to test. + * @return {SparseSet} + */ +SparseSet.prototype.has = function(member) { + var index = this.sparse[member]; + + return ( + index < this.size && + this.dense[index] === member + ); +}; + +/** + * Method used to add a member to the set. + * + * @param {number} member - Member to add. + * @return {SparseSet} + */ +SparseSet.prototype.add = function(member) { + var index = this.sparse[member]; + + if (index < this.size && this.dense[index] === member) + return this; + + this.dense[this.size] = member; + this.sparse[member] = this.size; + this.size++; + + return this; +}; + +/** + * Method used to remove a member from the set. + * + * @param {number} member - Member to delete. + * @return {boolean} + */ +SparseSet.prototype.delete = function(member) { + var index = this.sparse[member]; + + if (index >= this.size || this.dense[index] !== member) + return false; + + index = this.dense[this.size - 1]; + this.dense[this.sparse[member]] = index; + this.sparse[index] = this.sparse[member]; + this.size--; + + return true; +}; + +/** + * Method used to iterate over the set's values. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +SparseSet.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + var item; + + for (var i = 0; i < this.size; i++) { + item = this.dense[i]; + + callback.call(scope, item, item); + } +}; + +/** + * Method used to create an iterator over a set's values. + * + * @return {Iterator} + */ +SparseSet.prototype.values = function() { + var size = this.size, + dense = this.dense, + i = 0; + + return new Iterator(function() { + if (i < size) { + var item = dense[i]; + i++; + + return { + value: item + }; + } + + return { + done: true + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + SparseSet.prototype[Symbol.iterator] = SparseSet.prototype.values; + +/** + * Convenience known methods. + */ +SparseSet.prototype.inspect = function() { + var proxy = new Set(); + + for (var i = 0; i < this.size; i++) + proxy.add(this.dense[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: SparseSet, + enumerable: false + }); + + proxy.length = this.length; + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + SparseSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = SparseSet.prototype.inspect; + +/** + * Exporting. + */ +module.exports = SparseSet; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/stack.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/stack.d.ts new file mode 100644 index 0000000..fa6998b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/stack.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist Stack Typings + * ======================== + */ +export default class Stack implements Iterable { + + // Members + size: number; + + // Methods + clear(): void; + push(item: T): number; + pop(): T | undefined; + peek(): T | undefined; + forEach(callback: (item: T, index: number, stack: this) => void, scope?: any): void; + toArray(): Array; + values(): IterableIterator; + entries(): IterableIterator<[number, T]>; + [Symbol.iterator](): IterableIterator; + toString(): string; + toJSON(): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): Stack; + static of(...items: Array): Stack; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/stack.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/stack.js new file mode 100644 index 0000000..9e83519 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/stack.js @@ -0,0 +1,210 @@ +/** + * Mnemonist Stack + * ================ + * + * Stack implementation relying on JavaScript arrays, which are fast enough & + * correctly optimized for this kind of work. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'); + +/** + * Stack + * + * @constructor + */ +function Stack() { + this.clear(); +} + +/** + * Method used to clear the stack. + * + * @return {undefined} + */ +Stack.prototype.clear = function() { + + // Properties + this.items = []; + this.size = 0; +}; + +/** + * Method used to add an item to the stack. + * + * @param {any} item - Item to add. + * @return {number} + */ +Stack.prototype.push = function(item) { + this.items.push(item); + return ++this.size; +}; + +/** + * Method used to retrieve & remove the last item of the stack. + * + * @return {any} + */ +Stack.prototype.pop = function() { + if (this.size === 0) + return; + + this.size--; + return this.items.pop(); +}; + +/** + * Method used to get the last item of the stack. + * + * @return {any} + */ +Stack.prototype.peek = function() { + return this.items[this.size - 1]; +}; + +/** + * Method used to iterate over the stack. + * + * @param {function} callback - Function to call for each item. + * @param {object} scope - Optional scope. + * @return {undefined} + */ +Stack.prototype.forEach = function(callback, scope) { + scope = arguments.length > 1 ? scope : this; + + for (var i = 0, l = this.items.length; i < l; i++) + callback.call(scope, this.items[l - i - 1], i, this); +}; + +/** + * Method used to convert the stack to a JavaScript array. + * + * @return {array} + */ +Stack.prototype.toArray = function() { + var array = new Array(this.size), + l = this.size - 1, + i = this.size; + + while (i--) + array[i] = this.items[l - i]; + + return array; +}; + +/** + * Method used to create an iterator over a stack's values. + * + * @return {Iterator} + */ +Stack.prototype.values = function() { + var items = this.items, + l = items.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a stack's entries. + * + * @return {Iterator} + */ +Stack.prototype.entries = function() { + var items = this.items, + l = items.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[l - i - 1]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Stack.prototype[Symbol.iterator] = Stack.prototype.values; + + +/** + * Convenience known methods. + */ +Stack.prototype.toString = function() { + return this.toArray().join(','); +}; + +Stack.prototype.toJSON = function() { + return this.toArray(); +}; + +Stack.prototype.inspect = function() { + var array = this.toArray(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: Stack, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + Stack.prototype[Symbol.for('nodejs.util.inspect.custom')] = Stack.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a stack. + * + * @param {Iterable} iterable - Target iterable. + * @return {Stack} + */ +Stack.from = function(iterable) { + var stack = new Stack(); + + forEach(iterable, function(value) { + stack.push(value); + }); + + return stack; +}; + +/** + * Static @.of function taking an arbitrary number of arguments & converting it + * into a stack. + * + * @param {...any} args + * @return {Stack} + */ +Stack.of = function() { + return Stack.from(arguments); +}; + +/** + * Exporting. + */ +module.exports = Stack; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/static-disjoint-set.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/static-disjoint-set.d.ts new file mode 100644 index 0000000..3e808da --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/static-disjoint-set.d.ts @@ -0,0 +1,23 @@ +/** + * Mnemonist StaticDisjointSet Typings + * ==================================== + */ +import {ArrayLike} from './utils/types'; + +export default class StaticDisjointSet { + + // Members + dimension: number; + size: number; + + // Constructor + constructor(size: number); + + // Methods + find(x: number): number; + union(x: number, y: number): this; + connected(x: number, y: number): boolean; + mapping(): ArrayLike; + compile(): Array>; + inspect(): any; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/static-disjoint-set.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/static-disjoint-set.js new file mode 100644 index 0000000..7a84b93 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/static-disjoint-set.js @@ -0,0 +1,195 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist StaticDisjointSet + * ============================ + * + * JavaScript implementation of a static disjoint set (union-find). + * + * Note that to remain performant, this implementation needs to know a size + * beforehand. + */ +var helpers = require('./utils/typed-arrays.js'); + +/** + * StaticDisjointSet. + * + * @constructor + */ +function StaticDisjointSet(size) { + + // Optimizing the typed array types + var ParentsTypedArray = helpers.getPointerArray(size), + RanksTypedArray = helpers.getPointerArray(Math.log2(size)); + + // Properties + this.size = size; + this.dimension = size; + this.parents = new ParentsTypedArray(size); + this.ranks = new RanksTypedArray(size); + + // Initializing parents + for (var i = 0; i < size; i++) + this.parents[i] = i; +} + +/** + * Method used to find the root of the given item. + * + * @param {number} x - Target item. + * @return {number} + */ +StaticDisjointSet.prototype.find = function(x) { + var y = x; + + var c, p; + + while (true) { + c = this.parents[y]; + + if (y === c) + break; + + y = c; + } + + // Path compression + while (true) { + p = this.parents[x]; + + if (p === y) + break; + + this.parents[x] = y; + x = p; + } + + return y; +}; + +/** + * Method used to perform the union of two items. + * + * @param {number} x - First item. + * @param {number} y - Second item. + * @return {StaticDisjointSet} + */ +StaticDisjointSet.prototype.union = function(x, y) { + var xRoot = this.find(x), + yRoot = this.find(y); + + // x and y are already in the same set + if (xRoot === yRoot) + return this; + + this.dimension--; + + // x and y are not in the same set, we merge them + var xRank = this.ranks[x], + yRank = this.ranks[y]; + + if (xRank < yRank) { + this.parents[xRoot] = yRoot; + } + else if (xRank > yRank) { + this.parents[yRoot] = xRoot; + } + else { + this.parents[yRoot] = xRoot; + this.ranks[xRoot]++; + } + + return this; +}; + +/** + * Method returning whether two items are connected. + * + * @param {number} x - First item. + * @param {number} y - Second item. + * @return {boolean} + */ +StaticDisjointSet.prototype.connected = function(x, y) { + var xRoot = this.find(x); + + return xRoot === this.find(y); +}; + +/** + * Method returning the set mapping. + * + * @return {TypedArray} + */ +StaticDisjointSet.prototype.mapping = function() { + var MappingClass = helpers.getPointerArray(this.dimension); + + var ids = {}, + mapping = new MappingClass(this.size), + c = 0; + + var r; + + for (var i = 0, l = this.parents.length; i < l; i++) { + r = this.find(i); + + if (typeof ids[r] === 'undefined') { + mapping[i] = c; + ids[r] = c++; + } + else { + mapping[i] = ids[r]; + } + } + + return mapping; +}; + +/** + * Method used to compile the disjoint set into an array of arrays. + * + * @return {array} + */ +StaticDisjointSet.prototype.compile = function() { + var ids = {}, + result = new Array(this.dimension), + c = 0; + + var r; + + for (var i = 0, l = this.parents.length; i < l; i++) { + r = this.find(i); + + if (typeof ids[r] === 'undefined') { + result[c] = [i]; + ids[r] = c++; + } + else { + result[ids[r]].push(i); + } + } + + return result; +}; + +/** + * Convenience known methods. + */ +StaticDisjointSet.prototype.inspect = function() { + var array = this.compile(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: StaticDisjointSet, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + StaticDisjointSet.prototype[Symbol.for('nodejs.util.inspect.custom')] = StaticDisjointSet.prototype.inspect; + + +/** + * Exporting. + */ +module.exports = StaticDisjointSet; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/static-interval-tree.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/static-interval-tree.d.ts new file mode 100644 index 0000000..5302f1e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/static-interval-tree.d.ts @@ -0,0 +1,24 @@ +/** + * Mnemonist StaticIntervalTree Typings + * ===================================== + */ +type StaticIntervalTreeGetter = (item: T) => number; +type StaticIntervalTreeGettersTuple = [StaticIntervalTreeGetter, StaticIntervalTreeGetter]; + +export default class StaticIntervalTree { + + // Members + height: number; + size: number; + + // Constructor + constructor(intervals: Array, getters?: StaticIntervalTreeGettersTuple); + + // Methods + intervalsContainingPoint(point: number): Array; + intervalsOverlappingInterval(interval: T): Array; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}): StaticIntervalTree; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/static-interval-tree.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/static-interval-tree.js new file mode 100644 index 0000000..41452f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/static-interval-tree.js @@ -0,0 +1,387 @@ +/* + * Mnemonist StaticIntervalTree + * ============================= + * + * JavaScript implementation of a static interval tree. This tree is static in + * that you are required to know all its items beforehand and to built it + * from an iterable. + * + * This implementation represents the interval tree as an augmented balanced + * binary search tree. It works by sorting the intervals by startpoint first + * then proceeds building the augmented balanced BST bottom-up from the + * sorted list. + * + * Note that this implementation considers every given intervals as closed for + * simplicity's sake. + * + * For more information: https://en.wikipedia.org/wiki/Interval_tree + */ +var iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'); + +var FixedStack = require('./fixed-stack.js'); + + +// TODO: pass index to getters +// TODO: custom comparison +// TODO: possibility to pass offset buffer + +// TODO: intervals() => Symbol.iterator +// TODO: dfs() + +/** + * Helpers. + */ + +/** + * Recursive function building the BST from the sorted list of interval + * indices. + * + * @param {array} intervals - Array of intervals to index. + * @param {function} endGetter - Getter function for end of intervals. + * @param {array} sortedIndices - Sorted indices of the intervals. + * @param {array} tree - BST memory. + * @param {array} augmentations - Array of node augmentations. + * @param {number} i - BST index of current node. + * @param {number} low - Dichotomy low index. + * @param {number} high - Dichotomy high index. + * @return {number} - Created node augmentation value. + */ +function buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + i, + low, + high +) { + var mid = (low + (high - low) / 2) | 0, + midMinusOne = ~-mid, + midPlusOne = -~mid; + + var current = sortedIndices[mid]; + tree[i] = current + 1; + + var end = endGetter ? endGetter(intervals[current]) : intervals[current][1]; + + var left = i * 2 + 1, + right = i * 2 + 2; + + var leftEnd = -Infinity, + rightEnd = -Infinity; + + if (low <= midMinusOne) { + leftEnd = buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + left, + low, + midMinusOne + ); + } + + if (midPlusOne <= high) { + rightEnd = buildBST( + intervals, + endGetter, + sortedIndices, + tree, + augmentations, + right, + midPlusOne, + high + ); + } + + var augmentation = Math.max(end, leftEnd, rightEnd); + + var augmentationPointer = current; + + if (augmentation === leftEnd) + augmentationPointer = augmentations[tree[left] - 1]; + else if (augmentation === rightEnd) + augmentationPointer = augmentations[tree[right] - 1]; + + augmentations[current] = augmentationPointer; + + return augmentation; +} + +/** + * StaticIntervalTree. + * + * @constructor + * @param {array} intervals - Array of intervals to index. + * @param {array} getters - Optional getters. + */ +function StaticIntervalTree(intervals, getters) { + + // Properties + this.size = intervals.length; + this.intervals = intervals; + + var startGetter = null, + endGetter = null; + + if (Array.isArray(getters)) { + startGetter = getters[0]; + endGetter = getters[1]; + } + + // Building the indices array + var length = intervals.length; + + var IndicesArray = typed.getPointerArray(length + 1); + + var indices = new IndicesArray(length); + + var i; + + for (i = 1; i < length; i++) + indices[i] = i; + + // Sorting indices array + // TODO: check if some version of radix sort can outperform this part + indices.sort(function(a, b) { + a = intervals[a]; + b = intervals[b]; + + if (startGetter) { + a = startGetter(a); + b = startGetter(b); + } + else { + a = a[0]; + b = b[0]; + } + + if (a < b) + return -1; + + if (a > b) + return 1; + + // TODO: use getters + // TODO: this ordering has the following invariant: if query interval + // contains [nodeStart, max], then whole right subtree can be collected + // a = a[1]; + // b = b[1]; + + // if (a < b) + // return 1; + + // if (a > b) + // return -1; + + return 0; + }); + + // Building the binary tree + var height = Math.ceil(Math.log2(length + 1)), + treeSize = Math.pow(2, height) - 1; + + var tree = new IndicesArray(treeSize); + + var augmentations = new IndicesArray(length); + + buildBST( + intervals, + endGetter, + indices, + tree, + augmentations, + 0, + 0, + length - 1 + ); + + // Dropping indices + indices = null; + + // Storing necessary information + this.height = height; + this.tree = tree; + this.augmentations = augmentations; + this.startGetter = startGetter; + this.endGetter = endGetter; + + // Initializing DFS stack + this.stack = new FixedStack(IndicesArray, this.height); +} + +/** + * Method returning a list of intervals containing the given point. + * + * @param {any} point - Target point. + * @return {array} + */ +StaticIntervalTree.prototype.intervalsContainingPoint = function(point) { + var matches = []; + + var stack = this.stack; + + stack.clear(); + stack.push(0); + + var l = this.tree.length; + + var bstIndex, + intervalIndex, + interval, + maxInterval, + start, + end, + max, + left, + right; + + while (stack.size) { + bstIndex = stack.pop(); + intervalIndex = this.tree[bstIndex] - 1; + interval = this.intervals[intervalIndex]; + maxInterval = this.intervals[this.augmentations[intervalIndex]]; + + max = this.endGetter ? this.endGetter(maxInterval) : maxInterval[1]; + + // No possible match, point is farther right than the max end value + if (point > max) + continue; + + // Searching left + left = bstIndex * 2 + 1; + + if (left < l && this.tree[left] !== 0) + stack.push(left); + + start = this.startGetter ? this.startGetter(interval) : interval[0]; + end = this.endGetter ? this.endGetter(interval) : interval[1]; + + // Checking current node + if (point >= start && point <= end) + matches.push(interval); + + // If the point is to the left of the start of the current interval, + // then it cannot be in the right child + if (point < start) + continue; + + // Searching right + right = bstIndex * 2 + 2; + + if (right < l && this.tree[right] !== 0) + stack.push(right); + } + + return matches; +}; + +/** + * Method returning a list of intervals overlapping the given interval. + * + * @param {any} interval - Target interval. + * @return {array} + */ +StaticIntervalTree.prototype.intervalsOverlappingInterval = function(interval) { + var intervalStart = this.startGetter ? this.startGetter(interval) : interval[0], + intervalEnd = this.endGetter ? this.endGetter(interval) : interval[1]; + + var matches = []; + + var stack = this.stack; + + stack.clear(); + stack.push(0); + + var l = this.tree.length; + + var bstIndex, + intervalIndex, + currentInterval, + maxInterval, + start, + end, + max, + left, + right; + + while (stack.size) { + bstIndex = stack.pop(); + intervalIndex = this.tree[bstIndex] - 1; + currentInterval = this.intervals[intervalIndex]; + maxInterval = this.intervals[this.augmentations[intervalIndex]]; + + max = this.endGetter ? this.endGetter(maxInterval) : maxInterval[1]; + + // No possible match, start is farther right than the max end value + if (intervalStart > max) + continue; + + // Searching left + left = bstIndex * 2 + 1; + + if (left < l && this.tree[left] !== 0) + stack.push(left); + + start = this.startGetter ? this.startGetter(currentInterval) : currentInterval[0]; + end = this.endGetter ? this.endGetter(currentInterval) : currentInterval[1]; + + // Checking current node + if (intervalEnd >= start && intervalStart <= end) + matches.push(currentInterval); + + // If the end is to the left of the start of the current interval, + // then it cannot be in the right child + if (intervalEnd < start) + continue; + + // Searching right + right = bstIndex * 2 + 2; + + if (right < l && this.tree[right] !== 0) + stack.push(right); + } + + return matches; +}; + +/** + * Convenience known methods. + */ +StaticIntervalTree.prototype.inspect = function() { + var proxy = this.intervals.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: StaticIntervalTree, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + StaticIntervalTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = StaticIntervalTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {StaticIntervalTree} + */ +StaticIntervalTree.from = function(iterable, getters) { + if (iterables.isArrayLike(iterable)) + return new StaticIntervalTree(iterable, getters); + + return new StaticIntervalTree(Array.from(iterable), getters); +}; + +/** + * Exporting. + */ +module.exports = StaticIntervalTree; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/suffix-array.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/suffix-array.d.ts new file mode 100644 index 0000000..b959403 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/suffix-array.d.ts @@ -0,0 +1,37 @@ +/** + * Mnemonist SuffixArray Typings + * ============================== + */ +export default class SuffixArray { + + // Members + array: Array; + length: number; + string: string | Array; + + // Constructor + constructor(string: string | Array); + + // Methods + toString(): string; + toJSON(): Array; + inspect(): any; +} + +export class GeneralizedSuffixArray { + + // Members + array: Array; + length: number; + size: number; + text: string | Array; + + // Constructor + constructor(strings: Array | Array>); + + // Methods + longestCommonSubsequence(): string | Array; + toString(): string; + toJSON(): Array; + inspect(): any; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/suffix-array.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/suffix-array.js new file mode 100644 index 0000000..14990f4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/suffix-array.js @@ -0,0 +1,352 @@ +/** + * Mnemonist Suffix Array + * ======================= + * + * Linear time implementation of a suffix array using the recursive + * method by Karkkainen and Sanders. + * + * [References]: + * https://www.cs.helsinki.fi/u/tpkarkka/publications/jacm05-revised.pdf + * http://people.mpi-inf.mpg.de/~sanders/programs/suffix/ + * http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.184.442&rep=rep1&type=pdf + * + * [Article]: + * "Simple Linear Work Suffix Array Construction", Karkkainen and Sanders. + * + * [Note]: + * A paper by Simon J. Puglisi, William F. Smyth & Andrew Turpin named + * "The Performance of Linear Time Suffix Sorting Algorithms" seems to + * prove that supralinear algorithm are in fact better faring for + * "real" world use cases. It would be nice to check this out in JavaScript + * because the high level of the language could change a lot to the fact. + * + * The current code is largely inspired by the following: + * https://github.com/tixxit/suffixarray/blob/master/suffixarray.js + */ + +/** + * Constants. + */ +var SEPARATOR = '\u0001'; + +/** + * Function used to sort the triples. + * + * @param {string|array} string - Padded sequence. + * @param {array} array - Array to sort (will be mutated). + * @param {number} offset - Index offset. + */ +function sort(string, array, offset) { + var l = array.length, + buckets = [], + i = l, + j = -1, + b, + d = 0, + bits; + + while (i--) + j = Math.max(string[array[i] + offset], j); + + bits = j >> 24 && 32 || j >> 16 && 24 || j >> 8 && 16 || 8; + + for (; d < bits; d += 4) { + for (i = 16; i--;) + buckets[i] = []; + for (i = l; i--;) + buckets[((string[array[i] + offset]) >> d) & 15].push(array[i]); + for (b = 0; b < 16; b++) { + for (j = buckets[b].length; j--;) + array[++i] = buckets[b][j]; + } + } +} + +/** + * Comparison helper. + */ +function compare(string, lookup, m, n) { + return ( + (string[m] - string[n]) || + (m % 3 === 2 ? + (string[m + 1] - string[n + 1]) || (lookup[m + 2] - lookup[n + 2]) : + (lookup[m + 1] - lookup[n + 1])) + ); +} + +/** + * Recursive function used to build the suffix tree in linear time. + * + * @param {string|array} string - Padded sequence. + * @param {number} l - True length of sequence (unpadded). + * @return {array} + */ +function build(string, l) { + var a = [], + b = [], + al = (2 * l / 3) | 0, + bl = l - al, + r = (al + 1) >> 1, + i = al, + j = 0, + k, + lookup = [], + result = []; + + if (l === 1) + return [0]; + + while (i--) + a[i] = ((i * 3) >> 1) + 1; + + for (i = 3; i--;) + sort(string, a, i); + + j = b[((a[0] / 3) | 0) + (a[0] % 3 === 1 ? 0 : r)] = 1; + + for (i = 1; i < al; i++) { + if (string[a[i]] !== string[a[i - 1]] || + string[a[i] + 1] !== string[a[i - 1] + 1] || + string[a[i] + 2] !== string[a[i - 1] + 2]) + j++; + + b[((a[i] / 3) | 0) + (a[i] % 3 === 1 ? 0 : r)] = j; + } + + if (j < al) { + b = build(b, al); + + for (i = al; i--;) + a[i] = b[i] < r ? b[i] * 3 + 1 : ((b[i] - r) * 3 + 2); + } + + for (i = al; i--;) + lookup[a[i]] = i; + lookup[l] = -1; + lookup[l + 1] = -2; + + b = l % 3 === 1 ? [l - 1] : []; + + for (i = 0; i < al; i++) { + if (a[i] % 3 === 1) + b.push(a[i] - 1); + } + + sort(string, b, 0); + + for (i = 0, j = 0, k = 0; i < al && j < bl;) + result[k++] = ( + compare(string, lookup, a[i], b[j]) < 0 ? + a[i++] : + b[j++] + ); + + while (i < al) + result[k++] = a[i++]; + + while (j < bl) + result[k++] = b[j++]; + + return result; +} + +/** + * Function used to create the array we are going to work on. + * + * @param {string|array} target - Target sequence. + * @return {array} + */ +function convert(target) { + + // Creating the alphabet array + var length = target.length, + paddingOffset = length % 3, + array = new Array(length + paddingOffset), + l, + i; + + // If we have an arbitrary sequence, we need to transform it + if (typeof target !== 'string') { + var uniqueTokens = Object.create(null); + + for (i = 0; i < length; i++) { + if (!uniqueTokens[target[i]]) + uniqueTokens[target[i]] = true; + } + + var alphabet = Object.create(null), + sortedUniqueTokens = Object.keys(uniqueTokens).sort(); + + for (i = 0, l = sortedUniqueTokens.length; i < l; i++) + alphabet[sortedUniqueTokens[i]] = i + 1; + + for (i = 0; i < length; i++) { + array[i] = alphabet[target[i]]; + } + } + else { + for (i = 0; i < length; i++) + array[i] = target.charCodeAt(i); + } + + // Padding the array + for (; i < paddingOffset; i++) + array[i] = 0; + + return array; +} + +/** + * Suffix Array. + * + * @constructor + * @param {string|array} string - Sequence for which to build the suffix array. + */ +function SuffixArray(string) { + + // Properties + this.hasArbitrarySequence = typeof string !== 'string'; + this.string = string; + this.length = string.length; + + // Building the array + this.array = build(convert(string), this.length); +} + +/** + * Convenience known methods. + */ +SuffixArray.prototype.toString = function() { + return this.array.join(','); +}; + +SuffixArray.prototype.toJSON = function() { + return this.array; +}; + +SuffixArray.prototype.inspect = function() { + var array = new Array(this.length); + + for (var i = 0; i < this.length; i++) + array[i] = this.string.slice(this.array[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: SuffixArray, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + SuffixArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = SuffixArray.prototype.inspect; + +/** + * Generalized Suffix Array. + * + * @constructor + */ +function GeneralizedSuffixArray(strings) { + + // Properties + this.hasArbitrarySequence = typeof strings[0] !== 'string'; + this.size = strings.length; + + if (this.hasArbitrarySequence) { + this.text = []; + + for (var i = 0, l = this.size; i < l; i++) { + this.text.push.apply(this.text, strings[i]); + + if (i < l - 1) + this.text.push(SEPARATOR); + } + } + else { + this.text = strings.join(SEPARATOR); + } + + this.firstLength = strings[0].length; + this.length = this.text.length; + + // Building the array + this.array = build(convert(this.text), this.length); +} + +/** + * Method used to retrieve the longest common subsequence of the generalized + * suffix array. + * + * @return {string|array} + */ +GeneralizedSuffixArray.prototype.longestCommonSubsequence = function() { + var lcs = this.hasArbitrarySequence ? [] : '', + lcp, + i, + j, + s, + t; + + for (i = 1; i < this.length; i++) { + s = this.array[i]; + t = this.array[i - 1]; + + if (s < this.firstLength && + t < this.firstLength) + continue; + + if (s > this.firstLength && + t > this.firstLength) + continue; + + lcp = Math.min(this.length - s, this.length - t); + + for (j = 0; j < lcp; j++) { + if (this.text[s + j] !== this.text[t + j]) { + lcp = j; + break; + } + } + + if (lcp > lcs.length) + lcs = this.text.slice(s, s + lcp); + } + + return lcs; +}; + +/** + * Convenience known methods. + */ +GeneralizedSuffixArray.prototype.toString = function() { + return this.array.join(','); +}; + +GeneralizedSuffixArray.prototype.toJSON = function() { + return this.array; +}; + +GeneralizedSuffixArray.prototype.inspect = function() { + var array = new Array(this.length); + + for (var i = 0; i < this.length; i++) + array[i] = this.text.slice(this.array[i]); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: GeneralizedSuffixArray, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + GeneralizedSuffixArray.prototype[Symbol.for('nodejs.util.inspect.custom')] = GeneralizedSuffixArray.prototype.inspect; + +/** + * Exporting. + */ +SuffixArray.GeneralizedSuffixArray = GeneralizedSuffixArray; +module.exports = SuffixArray; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/symspell.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/symspell.d.ts new file mode 100644 index 0000000..0e926d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/symspell.d.ts @@ -0,0 +1,33 @@ +/** + * Mnemonist SymSpell Typings + * =========================== + */ +type SymSpellVerbosity = 0 | 1 | 2; + +type SymSpellOptions = { + maxDistance?: number; + verbosity?: SymSpellVerbosity +}; + +type SymSpellMatch = { + term: string; + distance: number; + count: number; +} + +export default class SymSpell { + + // Members + size: number; + + // Constructor + constructor(options?: SymSpellOptions); + + // Methods + clear(): void; + add(string: string): this; + search(query: string): Array; + + // Statics + static from(strings: Iterable | {[key: string]: string}, options?: SymSpellOptions): SymSpell; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/symspell.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/symspell.js new file mode 100644 index 0000000..365ee43 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/symspell.js @@ -0,0 +1,547 @@ +/* eslint no-loop-func: 0 */ +/** + * Mnemonist SymSpell + * =================== + * + * JavaScript implementation of the Symmetric Delete Spelling dictionary to + * efficiently index & query expression based on edit distance. + * Note that the current implementation target the v3.0 of the algorithm. + * + * [Reference]: + * http://blog.faroo.com/2012/06/07/improved-edit-distance-based-spelling-correction/ + * https://github.com/wolfgarbe/symspell + * + * [Author]: + * Wolf Garbe + */ +var forEach = require('obliterator/foreach'); + +/** + * Constants. + */ +var DEFAULT_MAX_DISTANCE = 2, + DEFAULT_VERBOSITY = 2; + +var VERBOSITY = new Set([ + // Returns only the top suggestion + 0, + // Returns suggestions with the smallest edit distance + 1, + // Returns every suggestion (no early termination) + 2 +]); + +var VERBOSITY_EXPLANATIONS = { + 0: 'Returns only the top suggestion', + 1: 'Returns suggestions with the smallest edit distance', + 2: 'Returns every suggestion (no early termination)' +}; + +/** + * Functions. + */ + +/** + * Function creating a dictionary item. + * + * @param {number} [value] - An optional suggestion. + * @return {object} - The created item. + */ +function createDictionaryItem(value) { + var suggestions = new Set(); + + if (typeof value === 'number') + suggestions.add(value); + + return { + suggestions, + count: 0 + }; +} + +/** + * Function creating a suggestion item. + * + * @return {object} - The created item. + */ +function createSuggestionItem(term, distance, count) { + return { + term: term || '', + distance: distance || 0, + count: count || 0 + }; +} + +/** + * Simplified edit function. + * + * @param {string} word - Target word. + * @param {number} distance - Distance. + * @param {number} max - Max distance. + * @param {Set} [deletes] - Set mutated to store deletes. + */ +function edits(word, distance, max, deletes) { + deletes = deletes || new Set(); + distance++; + + var deletedItem, + l = word.length, + i; + + if (l > 1) { + for (i = 0; i < l; i++) { + deletedItem = word.substring(0, i) + word.substring(i + 1); + + if (!deletes.has(deletedItem)) { + deletes.add(deletedItem); + + if (distance < max) + edits(deletedItem, distance, max, deletes); + } + } + } + + return deletes; +} + +/** + * Function used to conditionally add suggestions. + * + * @param {array} words - Words list. + * @param {number} verbosity - Verbosity level. + * @param {object} item - The target item. + * @param {string} suggestion - The target suggestion. + * @param {number} int - Integer key of the word. + * @param {object} deletedItem - Considered deleted item. + * @param {SymSpell} + */ +function addLowestDistance(words, verbosity, item, suggestion, int, deletedItem) { + var first = item.suggestions.values().next().value; + + if (verbosity < 2 && + item.suggestions.size > 0 && + words[first].length - deletedItem.length > suggestion.length - deletedItem.length) { + item.suggestions = new Set(); + item.count = 0; + } + + if (verbosity === 2 || + !item.suggestions.size || + words[first].length - deletedItem.length >= suggestion.length - deletedItem.length) { + item.suggestions.add(int); + } +} + +/** + * Custom Damerau-Levenshtein used by the algorithm. + * + * @param {string} source - First string. + * @param {string} target - Second string. + * @return {number} - The distance. + */ +function damerauLevenshtein(source, target) { + var m = source.length, + n = target.length, + H = [[]], + INF = m + n, + sd = new Map(), + i, + l, + j; + + H[0][0] = INF; + + for (i = 0; i <= m; i++) { + if (!H[i + 1]) + H[i + 1] = []; + H[i + 1][1] = i; + H[i + 1][0] = INF; + } + + for (j = 0; j <= n; j++) { + H[1][j + 1] = j; + H[0][j + 1] = INF; + } + + var st = source + target, + letter; + + for (i = 0, l = st.length; i < l; i++) { + letter = st[i]; + + if (!sd.has(letter)) + sd.set(letter, 0); + } + + // Iterating + for (i = 1; i <= m; i++) { + var DB = 0; + + for (j = 1; j <= n; j++) { + var i1 = sd.get(target[j - 1]), + j1 = DB; + + if (source[i - 1] === target[j - 1]) { + H[i + 1][j + 1] = H[i][j]; + DB = j; + } + else { + H[i + 1][j + 1] = Math.min( + H[i][j], + H[i + 1][j], + H[i][j + 1] + ) + 1; + } + + H[i + 1][j + 1] = Math.min( + H[i + 1][j + 1], + H[i1][j1] + (i - i1 - 1) + 1 + (j - j1 - 1) + ); + } + + sd.set(source[i - 1], i); + } + + return H[m + 1][n + 1]; +} + +/** + * Lookup function. + * + * @param {object} dictionary - A SymSpell dictionary. + * @param {array} words - Unique words list. + * @param {number} verbosity - Verbosity level. + * @param {number} maxDistance - Maximum distance. + * @param {number} maxLength - Maximum word length in the dictionary. + * @param {string} input - Input string. + * @return {array} - The list of suggestions. + */ +function lookup(dictionary, words, verbosity, maxDistance, maxLength, input) { + var length = input.length; + + if (length - maxDistance > maxLength) + return []; + + var candidates = [input], + candidateSet = new Set(), + suggestionSet = new Set(); + + var suggestions = [], + candidate, + item; + + // Exhausting every candidates + while (candidates.length > 0) { + candidate = candidates.shift(); + + // Early termination + if ( + verbosity < 2 && + suggestions.length > 0 && + length - candidate.length > suggestions[0].distance + ) + break; + + item = dictionary[candidate]; + + if (item !== undefined) { + if (typeof item === 'number') + item = createDictionaryItem(item); + + if (item.count > 0 && !suggestionSet.has(candidate)) { + suggestionSet.add(candidate); + + var suggestItem = createSuggestionItem( + candidate, + length - candidate.length, + item.count + ); + + suggestions.push(suggestItem); + + // Another early termination + if (verbosity < 2 && length - candidate.length === 0) + break; + } + + // Iterating over the item's suggestions + item.suggestions.forEach(index => { + var suggestion = words[index]; + + // Do we already have this suggestion? + if (suggestionSet.has(suggestion)) + return; + + suggestionSet.add(suggestion); + + // Computing distance between candidate & suggestion + var distance = 0; + + if (input !== suggestion) { + if (suggestion.length === candidate.length) { + distance = length - candidate.length; + } + else if (length === candidate.length) { + distance = suggestion.length - candidate.length; + } + else { + var ii = 0, + jj = 0; + + var l = suggestion.length; + + while ( + ii < l && + ii < length && + suggestion[ii] === input[ii] + ) { + ii++; + } + + while ( + jj < l - ii && + jj < length && + suggestion[l - jj - 1] === input[length - jj - 1] + ) { + jj++; + } + + if (ii > 0 || jj > 0) { + distance = damerauLevenshtein( + suggestion.substr(ii, l - ii - jj), + input.substr(ii, length - ii - jj) + ); + } + else { + distance = damerauLevenshtein(suggestion, input); + } + } + } + + // Removing suggestions of higher distance + if (verbosity < 2 && + suggestions.length > 0 && + suggestions[0].distance > distance) { + suggestions = []; + } + + if (verbosity < 2 && + suggestions.length > 0 && + distance > suggestions[0].distance) { + return; + } + + if (distance <= maxDistance) { + var target = dictionary[suggestion]; + + if (target !== undefined) { + suggestions.push(createSuggestionItem( + suggestion, + distance, + target.count + )); + } + } + }); + } + + // Adding edits + if (length - candidate.length < maxDistance) { + + if (verbosity < 2 && + suggestions.length > 0 && + length - candidate.length >= suggestions[0].distance) + continue; + + for (var i = 0, l = candidate.length; i < l; i++) { + var deletedItem = ( + candidate.substring(0, i) + + candidate.substring(i + 1) + ); + + if (!candidateSet.has(deletedItem)) { + candidateSet.add(deletedItem); + candidates.push(deletedItem); + } + } + } + } + + if (verbosity === 0) + return suggestions.slice(0, 1); + + return suggestions; +} + +/** + * SymSpell. + * + * @constructor + */ +function SymSpell(options) { + options = options || {}; + + this.clear(); + + // Properties + this.maxDistance = typeof options.maxDistance === 'number' ? + options.maxDistance : + DEFAULT_MAX_DISTANCE; + this.verbosity = typeof options.verbosity === 'number' ? + options.verbosity : + DEFAULT_VERBOSITY; + + // Sanity checks + if (typeof this.maxDistance !== 'number' || this.maxDistance <= 0) + throw Error('mnemonist/SymSpell.constructor: invalid `maxDistance` option. Should be a integer greater than 0.'); + + if (!VERBOSITY.has(this.verbosity)) + throw Error('mnemonist/SymSpell.constructor: invalid `verbosity` option. Should be either 0, 1 or 2.'); +} + +/** + * Method used to clear the structure. + * + * @return {undefined} + */ +SymSpell.prototype.clear = function() { + + // Properties + this.size = 0; + this.dictionary = Object.create(null); + this.maxLength = 0; + this.words = []; +}; + +/** + * Method used to add a word to the index. + * + * @param {string} word - Word to add. + * @param {SymSpell} + */ +SymSpell.prototype.add = function(word) { + var item = this.dictionary[word]; + + if (item !== undefined) { + if (typeof item === 'number') { + item = createDictionaryItem(item); + this.dictionary[word] = item; + } + + item.count++; + } + + else { + item = createDictionaryItem(); + item.count++; + + this.dictionary[word] = item; + + if (word.length > this.maxLength) + this.maxLength = word.length; + } + + if (item.count === 1) { + var number = this.words.length; + this.words.push(word); + + var deletes = edits(word, 0, this.maxDistance); + + deletes.forEach(deletedItem => { + var target = this.dictionary[deletedItem]; + + if (target !== undefined) { + if (typeof target === 'number') { + target = createDictionaryItem(target); + + this.dictionary[deletedItem] = target; + } + + if (!target.suggestions.has(number)) { + addLowestDistance( + this.words, + this.verbosity, + target, + word, + number, + deletedItem + ); + } + } + else { + this.dictionary[deletedItem] = number; + } + }); + } + + this.size++; + + return this; +}; + +/** + * Method used to search the index. + * + * @param {string} input - Input query. + * @return {array} - The found suggestions. + */ +SymSpell.prototype.search = function(input) { + return lookup( + this.dictionary, + this.words, + this.verbosity, + this.maxDistance, + this.maxLength, + input + ); +}; + +/** + * Convenience known methods. + */ +SymSpell.prototype.inspect = function() { + var array = []; + + array.size = this.size; + array.maxDistance = this.maxDistance; + array.verbosity = this.verbosity; + array.behavior = VERBOSITY_EXPLANATIONS[this.verbosity]; + + for (var k in this.dictionary) { + if (typeof this.dictionary[k] === 'object' && this.dictionary[k].count) + array.push([k, this.dictionary[k].count]); + } + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: SymSpell, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + SymSpell.prototype[Symbol.for('nodejs.util.inspect.custom')] = SymSpell.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a structure. + * + * @param {Iterable} iterable - Target iterable. + * @return {SymSpell} + */ +SymSpell.from = function(iterable, options) { + var index = new SymSpell(options); + + forEach(iterable, function(value) { + index.add(value); + }); + + return index; +}; + +/** + * Exporting. + */ +module.exports = SymSpell; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/trie-map.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/trie-map.d.ts new file mode 100644 index 0000000..b083304 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/trie-map.d.ts @@ -0,0 +1,30 @@ +/** + * Mnemonist TrieMap Typings + * ========================== + */ +export default class TrieMap implements Iterable<[K, V]> { + + // Members + size: number; + + // Constructor + constructor(Token?: new () => K); + + // Methods + clear(): void; + set(prefix: K, value: V): this; + update(prefix: K, updateFunction: (oldValue: V | undefined) => V): this + get(prefix: K): V; + delete(prefix: K): boolean; + has(prefix: K): boolean; + find(prefix: K): Array<[K, V]>; + values(): IterableIterator; + prefixes(): IterableIterator; + keys(): IterableIterator; + entries(): IterableIterator<[K, V]>; + [Symbol.iterator](): IterableIterator<[K, V]>; + inspect(): any; + + // Statics + static from(iterable: Iterable<[I, J]> | {[key: string]: J}): TrieMap; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/trie-map.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/trie-map.js new file mode 100644 index 0000000..d601448 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/trie-map.js @@ -0,0 +1,477 @@ +/** + * Mnemonist TrieMap + * ================== + * + * JavaScript TrieMap implementation based upon plain objects. As such this + * structure is more a convenience building upon the trie's advantages than + * a real performant alternative to already existing structures. + * + * Note that the Trie is based upon the TrieMap since the underlying machine + * is the very same. The Trie just does not let you set values and only + * considers the existence of the given prefixes. + */ +var forEach = require('obliterator/foreach'), + Iterator = require('obliterator/iterator'); + +/** + * Constants. + */ +var SENTINEL = String.fromCharCode(0); + +/** + * TrieMap. + * + * @constructor + */ +function TrieMap(Token) { + this.mode = Token === Array ? 'array' : 'string'; + this.clear(); +} + +/** + * Method used to clear the trie. + * + * @return {undefined} + */ +TrieMap.prototype.clear = function() { + + // Properties + this.root = {}; + this.size = 0; +}; + +/** + * Method used to set the value of the given prefix in the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @param {any} value - Value for the prefix. + * @return {TrieMap} + */ +TrieMap.prototype.set = function(prefix, value) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = value; + + return this; +}; + +/** + * Method used to update the value of the given prefix in the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @param {(oldValue: any | undefined) => any} updateFunction - Update value visitor callback. + * @return {TrieMap} + */ +TrieMap.prototype.update = function(prefix, updateFunction) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = updateFunction(node[SENTINEL]); + + return this; +}; + +/** + * Method used to return the value sitting at the end of the given prefix or + * undefined if none exist. + * + * @param {string|array} prefix - Prefix to follow. + * @return {any|undefined} + */ +TrieMap.prototype.get = function(prefix) { + var node = this.root, + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // Prefix does not exist + if (typeof node === 'undefined') + return; + } + + if (!(SENTINEL in node)) + return; + + return node[SENTINEL]; +}; + +/** + * Method used to delete a prefix from the trie. + * + * @param {string|array} prefix - Prefix to delete. + * @return {boolean} + */ +TrieMap.prototype.delete = function(prefix) { + var node = this.root, + toPrune = null, + tokenToPrune = null, + parent, + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + parent = node; + node = node[token]; + + // Prefix does not exist + if (typeof node === 'undefined') + return false; + + // Keeping track of a potential branch to prune + if (toPrune !== null) { + if (Object.keys(node).length > 1) { + toPrune = null; + tokenToPrune = null; + } + } + else { + if (Object.keys(node).length < 2) { + toPrune = parent; + tokenToPrune = token; + } + } + } + + if (!(SENTINEL in node)) + return false; + + this.size--; + + if (toPrune) + delete toPrune[tokenToPrune]; + else + delete node[SENTINEL]; + + return true; +}; + +// TODO: add #.prune? + +/** + * Method used to assert whether the given prefix exists in the TrieMap. + * + * @param {string|array} prefix - Prefix to check. + * @return {boolean} + */ +TrieMap.prototype.has = function(prefix) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return false; + } + + return SENTINEL in node; +}; + +/** + * Method used to retrieve every item in the trie with the given prefix. + * + * @param {string|array} prefix - Prefix to query. + * @return {array} + */ +TrieMap.prototype.find = function(prefix) { + var isString = typeof prefix === 'string'; + + var node = this.root, + matches = [], + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return matches; + } + + // Performing DFS from prefix + var nodeStack = [node], + prefixStack = [prefix], + k; + + while (nodeStack.length) { + prefix = prefixStack.pop(); + node = nodeStack.pop(); + + for (k in node) { + if (k === SENTINEL) { + matches.push([prefix, node[SENTINEL]]); + continue; + } + + nodeStack.push(node[k]); + prefixStack.push(isString ? prefix + k : prefix.concat(k)); + } + } + + return matches; +}; + +/** + * Method returning an iterator over the trie's values. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.values = function(prefix) { + var node = this.root, + nodeStack = [], + token, + i, + l; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + + nodeStack.push(node); + + return new Iterator(function() { + var currentNode, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + } + + if (hasValue) + return {done: false, value: currentNode[SENTINEL]}; + } + + return {done: true}; + }); +}; + +/** + * Method returning an iterator over the trie's prefixes. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.prefixes = function(prefix) { + var node = this.root, + nodeStack = [], + prefixStack = [], + token, + i, + l; + + var isString = this.mode === 'string'; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + else { + prefix = isString ? '' : []; + } + + nodeStack.push(node); + prefixStack.push(prefix); + + return new Iterator(function() { + var currentNode, + currentPrefix, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + currentPrefix = prefixStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + prefixStack.push(isString ? currentPrefix + k : currentPrefix.concat(k)); + } + + if (hasValue) + return {done: false, value: currentPrefix}; + } + + return {done: true}; + }); +}; +TrieMap.prototype.keys = TrieMap.prototype.prefixes; + +/** + * Method returning an iterator over the trie's entries. + * + * @param {string|array} [prefix] - Optional starting prefix. + * @return {Iterator} + */ +TrieMap.prototype.entries = function(prefix) { + var node = this.root, + nodeStack = [], + prefixStack = [], + token, + i, + l; + + var isString = this.mode === 'string'; + + // Resolving initial prefix + if (prefix) { + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + // If the prefix does not exist, we return an empty iterator + if (typeof node === 'undefined') + return Iterator.empty(); + } + } + else { + prefix = isString ? '' : []; + } + + nodeStack.push(node); + prefixStack.push(prefix); + + return new Iterator(function() { + var currentNode, + currentPrefix, + hasValue = false, + k; + + while (nodeStack.length) { + currentNode = nodeStack.pop(); + currentPrefix = prefixStack.pop(); + + for (k in currentNode) { + if (k === SENTINEL) { + hasValue = true; + continue; + } + + nodeStack.push(currentNode[k]); + prefixStack.push(isString ? currentPrefix + k : currentPrefix.concat(k)); + } + + if (hasValue) + return {done: false, value: [currentPrefix, currentNode[SENTINEL]]}; + } + + return {done: true}; + }); +}; + +/** + * Attaching the #.entries method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + TrieMap.prototype[Symbol.iterator] = TrieMap.prototype.entries; + +/** + * Convenience known methods. + */ +TrieMap.prototype.inspect = function() { + var proxy = new Array(this.size); + + var iterator = this.entries(), + step, + i = 0; + + while ((step = iterator.next(), !step.done)) + proxy[i++] = step.value; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: TrieMap, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + TrieMap.prototype[Symbol.for('nodejs.util.inspect.custom')] = TrieMap.prototype.inspect; + +TrieMap.prototype.toJSON = function() { + return this.root; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a trie. + * + * @param {Iterable} iterable - Target iterable. + * @return {TrieMap} + */ +TrieMap.from = function(iterable) { + var trie = new TrieMap(); + + forEach(iterable, function(value, key) { + trie.set(key, value); + }); + + return trie; +}; + +/** + * Exporting. + */ +TrieMap.SENTINEL = SENTINEL; +module.exports = TrieMap; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/trie.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/trie.d.ts new file mode 100644 index 0000000..4b2a202 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/trie.d.ts @@ -0,0 +1,26 @@ +/** + * Mnemonist Trie Typings + * ======================= + */ +export default class Trie implements Iterable { + + // Members + size: number; + + // Constructor + constructor(Token?: new () => T); + + // Methods + clear(): void; + add(prefix: T): this; + delete(prefix: T): boolean; + has(prefix: T): boolean; + find(prefix: T): Array; + prefixes(): IterableIterator; + keys(): IterableIterator; + [Symbol.iterator](): IterableIterator; + inspect(): any; + + // Statics + static from(iterable: Iterable | {[key: string]: I}): Trie; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/trie.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/trie.js new file mode 100644 index 0000000..9562aef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/trie.js @@ -0,0 +1,167 @@ +/** + * Mnemonist Trie + * =============== + * + * JavaScript Trie implementation based upon plain objects. As such this + * structure is more a convenience building upon the trie's advantages than + * a real performant alternative to already existing structures. + * + * Note that the Trie is based upon the TrieMap since the underlying machine + * is the very same. The Trie just does not let you set values and only + * considers the existence of the given prefixes. + */ +var forEach = require('obliterator/foreach'), + TrieMap = require('./trie-map.js'); + +/** + * Constants. + */ +var SENTINEL = String.fromCharCode(0); + +/** + * Trie. + * + * @constructor + */ +function Trie(Token) { + this.mode = Token === Array ? 'array' : 'string'; + this.clear(); +} + +// Re-using TrieMap's prototype +for (var methodName in TrieMap.prototype) + Trie.prototype[methodName] = TrieMap.prototype[methodName]; + +// Dropping irrelevant methods +delete Trie.prototype.set; +delete Trie.prototype.get; +delete Trie.prototype.values; +delete Trie.prototype.entries; + +/** + * Method used to add the given prefix to the trie. + * + * @param {string|array} prefix - Prefix to follow. + * @return {TrieMap} + */ +Trie.prototype.add = function(prefix) { + var node = this.root, + token; + + for (var i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + + node = node[token] || (node[token] = {}); + } + + // Do we need to increase size? + if (!(SENTINEL in node)) + this.size++; + + node[SENTINEL] = true; + + return this; +}; + +/** + * Method used to retrieve every item in the trie with the given prefix. + * + * @param {string|array} prefix - Prefix to query. + * @return {array} + */ +Trie.prototype.find = function(prefix) { + var isString = typeof prefix === 'string'; + + var node = this.root, + matches = [], + token, + i, + l; + + for (i = 0, l = prefix.length; i < l; i++) { + token = prefix[i]; + node = node[token]; + + if (typeof node === 'undefined') + return matches; + } + + // Performing DFS from prefix + var nodeStack = [node], + prefixStack = [prefix], + k; + + while (nodeStack.length) { + prefix = prefixStack.pop(); + node = nodeStack.pop(); + + for (k in node) { + if (k === SENTINEL) { + matches.push(prefix); + continue; + } + + nodeStack.push(node[k]); + prefixStack.push(isString ? prefix + k : prefix.concat(k)); + } + } + + return matches; +}; + +/** + * Attaching the #.keys method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Trie.prototype[Symbol.iterator] = Trie.prototype.keys; + +/** + * Convenience known methods. + */ +Trie.prototype.inspect = function() { + var proxy = new Set(); + + var iterator = this.keys(), + step; + + while ((step = iterator.next(), !step.done)) + proxy.add(step.value); + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Trie, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Trie.prototype[Symbol.for('nodejs.util.inspect.custom')] = Trie.prototype.inspect; + +Trie.prototype.toJSON = function() { + return this.root; +}; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a trie. + * + * @param {Iterable} iterable - Target iterable. + * @return {Trie} + */ +Trie.from = function(iterable) { + var trie = new Trie(); + + forEach(iterable, function(value) { + trie.add(value); + }); + + return trie; +}; + +/** + * Exporting. + */ +Trie.SENTINEL = SENTINEL; +module.exports = Trie; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/binary-search.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/binary-search.js new file mode 100644 index 0000000..0666c82 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/binary-search.js @@ -0,0 +1,216 @@ +/** + * Mnemonist Binary Search Helpers + * ================================ + * + * Typical binary search functions. + */ + +/** + * Function returning the index of the search value in the array or `-1` if + * not found. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.search = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + hi--; + + var current; + + while (lo <= hi) { + mid = (lo + hi) >>> 1; + + current = array[mid]; + + if (current > value) { + hi = ~-mid; + } + else if (current < value) { + lo = -~mid; + } + else { + return mid; + } + } + + return -1; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.searchWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = ~-array.length, + comparison; + + while (lo <= hi) { + mid = (lo + hi) >>> 1; + + comparison = comparator(array[mid], value); + + if (comparison > 0) { + hi = ~-mid; + } + else if (comparison < 0) { + lo = -~mid; + } + else { + return mid; + } + } + + return -1; +}; + +/** + * Function returning the lower bound of the given value in the array. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @param {number} [lo] - Start index. + * @param {numner} [hi] - End index. + * @return {number} + */ +exports.lowerBound = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value <= array[mid]) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.lowerBoundWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (comparator(value, array[mid]) <= 0) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Same as above, but can work on sorted indices. + * + * @param {array} array - Haystack. + * @param {array} array - Indices. + * @param {any} value - Needle. + * @return {number} + */ +exports.lowerBoundIndices = function(array, indices, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value <= array[indices[mid]]) { + hi = mid; + } + else { + lo = -~mid; + } + } + + return lo; +}; + +/** + * Function returning the upper bound of the given value in the array. + * + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @param {number} [lo] - Start index. + * @param {numner} [hi] - End index. + * @return {number} + */ +exports.upperBound = function(array, value, lo, hi) { + var mid = 0; + + lo = typeof lo !== 'undefined' ? lo : 0; + hi = typeof hi !== 'undefined' ? hi : array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (value >= array[mid]) { + lo = -~mid; + } + else { + hi = mid; + } + } + + return lo; +}; + +/** + * Same as above, but can use a custom comparator function. + * + * @param {function} comparator - Custom comparator function. + * @param {array} array - Haystack. + * @param {any} value - Needle. + * @return {number} + */ +exports.upperBoundWithComparator = function(comparator, array, value) { + var mid = 0, + lo = 0, + hi = array.length; + + while (lo < hi) { + mid = (lo + hi) >>> 1; + + if (comparator(value, array[mid]) >= 0) { + lo = -~mid; + } + else { + hi = mid; + } + } + + return lo; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/bitwise.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/bitwise.js new file mode 100644 index 0000000..191dfc2 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/bitwise.js @@ -0,0 +1,109 @@ +/** + * Mnemonist Bitwise Helpers + * ========================== + * + * Miscellaneous helpers helping with bitwise operations. + */ + +/** + * Takes a 32 bits integer and returns its MSB using SWAR strategy. + * + * @param {number} x - Target number. + * @return {number} + */ +function msb32(x) { + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + x |= (x >> 8); + x |= (x >> 16); + + return (x & ~(x >> 1)); +} +exports.msb32 = msb32; + +/** + * Takes a byte and returns its MSB using SWAR strategy. + * + * @param {number} x - Target number. + * @return {number} + */ +function msb8(x) { + x |= (x >> 1); + x |= (x >> 2); + x |= (x >> 4); + + return (x & ~(x >> 1)); +} +exports.msb8 = msb8; + +/** + * Takes a number and return bit at position. + * + * @param {number} x - Target number. + * @param {number} pos - Position. + * @return {number} + */ +exports.test = function(x, pos) { + return (x >> pos) & 1; +}; + +/** + * Compare two bytes and return their critical bit. + * + * @param {number} a - First byte. + * @param {number} b - Second byte. + * @return {number} + */ +exports.criticalBit8 = function(a, b) { + return msb8(a ^ b); +}; + +exports.criticalBit8Mask = function(a, b) { + return (~msb8(a ^ b) >>> 0) & 0xff; +}; + +exports.testCriticalBit8 = function(x, mask) { + return (1 + (x | mask)) >> 8; +}; + +exports.criticalBit32Mask = function(a, b) { + return (~msb32(a ^ b) >>> 0) & 0xffffffff; +}; + +/** + * Takes a 32 bits integer and returns its population count (number of 1 of + * the binary representation). + * + * @param {number} x - Target number. + * @return {number} + */ +exports.popcount = function(x) { + x -= x >> 1 & 0x55555555; + x = (x & 0x33333333) + (x >> 2 & 0x33333333); + x = x + (x >> 4) & 0x0f0f0f0f; + x += x >> 8; + x += x >> 16; + return x & 0x7f; +}; + +/** + * Slightly faster popcount function based on a precomputed table of 8bits + * words. + * + * @param {number} x - Target number. + * @return {number} + */ +var TABLE8 = new Uint8Array(Math.pow(2, 8)); + +for (var i = 0, l = TABLE8.length; i < l; i++) + TABLE8[i] = exports.popcount(i); + +exports.table8Popcount = function(x) { + return ( + TABLE8[x & 0xff] + + TABLE8[(x >> 8) & 0xff] + + TABLE8[(x >> 16) & 0xff] + + TABLE8[(x >> 24) & 0xff] + ); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/comparators.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/comparators.js new file mode 100644 index 0000000..498b4a6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/comparators.js @@ -0,0 +1,79 @@ +/** + * Mnemonist Heap Comparators + * =========================== + * + * Default comparators & functions dealing with comparators reversing etc. + */ +var DEFAULT_COMPARATOR = function(a, b) { + if (a < b) + return -1; + if (a > b) + return 1; + + return 0; +}; + +var DEFAULT_REVERSE_COMPARATOR = function(a, b) { + if (a < b) + return 1; + if (a > b) + return -1; + + return 0; +}; + +/** + * Function used to reverse a comparator. + */ +function reverseComparator(comparator) { + return function(a, b) { + return comparator(b, a); + }; +} + +/** + * Function returning a tuple comparator. + */ +function createTupleComparator(size) { + if (size === 2) { + return function(a, b) { + if (a[0] < b[0]) + return -1; + + if (a[0] > b[0]) + return 1; + + if (a[1] < b[1]) + return -1; + + if (a[1] > b[1]) + return 1; + + return 0; + }; + } + + return function(a, b) { + var i = 0; + + while (i < size) { + if (a[i] < b[i]) + return -1; + + if (a[i] > b[i]) + return 1; + + i++; + } + + return 0; + }; +} + +/** + * Exporting. + */ +exports.DEFAULT_COMPARATOR = DEFAULT_COMPARATOR; +exports.DEFAULT_REVERSE_COMPARATOR = DEFAULT_REVERSE_COMPARATOR; +exports.reverseComparator = reverseComparator; +exports.createTupleComparator = createTupleComparator; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/hash-tables.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/hash-tables.js new file mode 100644 index 0000000..dfed95e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/hash-tables.js @@ -0,0 +1,107 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Hashtable Helpers + * ============================ + * + * Miscellaneous helpers helper function dealing with hashtables. + */ +function jenkinsInt32(a) { + + a = (a + 0x7ed55d16) + (a << 12); + a = (a ^ 0xc761c23c) ^ (a >> 19); + a = (a + 0x165667b1) + (a << 5); + a = (a + 0xd3a2646c) ^ (a << 9); + a = (a + 0xfd7046c5) + (a << 3); + a = (a ^ 0xb55a4f09) ^ (a >> 16); + + return a; +} + +function linearProbingGet(hash, keys, values, key) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === key) + return values[i]; + + else if (c === 0) + return; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + return; + } +} + +function linearProbingHas(hash, keys, key) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === key) + return true; + + else if (c === 0) + return false; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + return false; + } +} + +function linearProbingSet(hash, keys, values, key, value) { + var n = keys.length, + j = hash(key) & (n - 1), + i = j; + + var c; + + while (true) { + c = keys[i]; + + if (c === 0 || c === key) + break; + + // Handling wrapping around + i += 1; + i %= n; + + // Full turn + if (i === j) + throw new Error('mnemonist/utils/hash-tables.linearProbingSet: table is full.'); + } + + keys[i] = key; + values[i] = value; +} + +module.exports = { + hashes: { + jenkinsInt32: jenkinsInt32 + }, + linearProbing: { + get: linearProbingGet, + has: linearProbingHas, + set: linearProbingSet + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/iterables.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/iterables.js new file mode 100644 index 0000000..d95f701 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/iterables.js @@ -0,0 +1,93 @@ +/** + * Mnemonist Iterable Function + * ============================ + * + * Harmonized iteration helpers over mixed iterable targets. + */ +var forEach = require('obliterator/foreach'); + +var typed = require('./typed-arrays.js'); + +/** + * Function used to determine whether the given object supports array-like + * random access. + * + * @param {any} target - Target object. + * @return {boolean} + */ +function isArrayLike(target) { + return Array.isArray(target) || typed.isTypedArray(target); +} + +/** + * Function used to guess the length of the structure over which we are going + * to iterate. + * + * @param {any} target - Target object. + * @return {number|undefined} + */ +function guessLength(target) { + if (typeof target.length === 'number') + return target.length; + + if (typeof target.size === 'number') + return target.size; + + return; +} + +/** + * Function used to convert an iterable to an array. + * + * @param {any} target - Iteration target. + * @return {array} + */ +function toArray(target) { + var l = guessLength(target); + + var array = typeof l === 'number' ? new Array(l) : []; + + var i = 0; + + // TODO: we could optimize when given target is array like + forEach(target, function(value) { + array[i++] = value; + }); + + return array; +} + +/** + * Same as above but returns a supplementary indices array. + * + * @param {any} target - Iteration target. + * @return {array} + */ +function toArrayWithIndices(target) { + var l = guessLength(target); + + var IndexArray = typeof l === 'number' ? + typed.getPointerArray(l) : + Array; + + var array = typeof l === 'number' ? new Array(l) : []; + var indices = typeof l === 'number' ? new IndexArray(l) : []; + + var i = 0; + + // TODO: we could optimize when given target is array like + forEach(target, function(value) { + array[i] = value; + indices[i] = i++; + }); + + return [array, indices]; +} + +/** + * Exporting. + */ +exports.isArrayLike = isArrayLike; +exports.guessLength = guessLength; +exports.toArray = toArray; +exports.toArrayWithIndices = toArrayWithIndices; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/merge.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/merge.js new file mode 100644 index 0000000..bf40d45 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/merge.js @@ -0,0 +1,563 @@ +/* eslint no-constant-condition: 0 */ +/** + * Mnemonist Merge Helpers + * ======================== + * + * Various merge algorithms used to handle sorted lists. Note that the given + * functions are optimized and won't accept mixed arguments. + * + * Note: maybe this piece of code belong to sortilege, along with binary-search. + */ +var typed = require('./typed-arrays.js'), + isArrayLike = require('./iterables.js').isArrayLike, + binarySearch = require('./binary-search.js'), + FibonacciHeap = require('../fibonacci-heap.js'); + +// TODO: update to use exponential search +// TODO: when not knowing final length => should use plain arrays rather than +// same type as input + +/** + * Merge two sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +function mergeArrays(a, b) { + + // One of the arrays is empty + if (a.length === 0) + return b.slice(); + if (b.length === 0) + return a.slice(); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, we can just concatenate them + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd <= bStart) { + if (typed.isTypedArray(a)) + return typed.concat(a, b); + return a.concat(b); + } + + // Initializing target + var array = new a.constructor(a.length + b.length); + + // Iterating until we overlap + var i, l, v; + + for (i = 0, l = a.length; i < l; i++) { + v = a[i]; + + if (v <= bStart) + array[i] = v; + else + break; + } + + // Handling overlap + var aPointer = i, + aLength = a.length, + bPointer = 0, + bLength = b.length, + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead <= bHead) { + array[i++] = aHead; + aPointer++; + } + else { + array[i++] = bHead; + bPointer++; + } + } + + // Filling + while (aPointer < aLength) + array[i++] = a[aPointer++]; + while (bPointer < bLength) + array[i++] = b[bPointer++]; + + return array; +} + +/** + * Perform the union of two already unique sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +function unionUniqueArrays(a, b) { + + // One of the arrays is empty + if (a.length === 0) + return b.slice(); + if (b.length === 0) + return a.slice(); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, we can just concatenate them + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd < bStart) { + if (typed.isTypedArray(a)) + return typed.concat(a, b); + return a.concat(b); + } + + // Initializing target + var array = new a.constructor(); + + // Iterating until we overlap + var i, l, v; + + for (i = 0, l = a.length; i < l; i++) { + v = a[i]; + + if (v < bStart) + array.push(v); + else + break; + } + + // Handling overlap + var aPointer = i, + aLength = a.length, + bPointer = 0, + bLength = b.length, + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead <= bHead) { + + if (array.length === 0 || array[array.length - 1] !== aHead) + array.push(aHead); + + aPointer++; + } + else { + if (array.length === 0 || array[array.length - 1] !== bHead) + array.push(bHead); + + bPointer++; + } + } + + // Filling + // TODO: it's possible to optimize a bit here, since the condition is only + // relevant the first time + while (aPointer < aLength) { + aHead = a[aPointer++]; + + if (array.length === 0 || array[array.length - 1] !== aHead) + array.push(aHead); + } + while (bPointer < bLength) { + bHead = b[bPointer++]; + + if (array.length === 0 || array[array.length - 1] !== bHead) + array.push(bHead); + } + + return array; +} + +/** + * Perform the intersection of two already unique sorted array-like structures into one. + * + * @param {array} a - First array. + * @param {array} b - Second array. + * @return {array} + */ +exports.intersectionUniqueArrays = function(a, b) { + + // One of the arrays is empty + if (a.length === 0 || b.length === 0) + return new a.constructor(0); + + // Finding min array + var tmp; + + if (a[0] > b[0]) { + tmp = a; + a = b; + b = tmp; + } + + // If array have non overlapping ranges, there is no intersection + var aEnd = a[a.length - 1], + bStart = b[0]; + + if (aEnd < bStart) + return new a.constructor(0); + + // Initializing target + var array = new a.constructor(); + + // Handling overlap + var aPointer = binarySearch.lowerBound(a, bStart), + aLength = a.length, + bPointer = 0, + bLength = binarySearch.upperBound(b, aEnd), + aHead, + bHead; + + while (aPointer < aLength && bPointer < bLength) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead < bHead) { + aPointer = binarySearch.lowerBound(a, bHead, aPointer + 1); + } + else if (aHead > bHead) { + bPointer = binarySearch.lowerBound(b, aHead, bPointer + 1); + } + else { + array.push(aHead); + aPointer++; + bPointer++; + } + } + + return array; +}; + +/** + * Merge k sorted array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +function kWayMergeArrays(arrays) { + var length = 0, + max = -Infinity, + al, + i, + l; + + var filtered = []; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + if (al === 0) + continue; + + filtered.push(arrays[i]); + + length += al; + + if (al > max) + max = al; + } + + if (filtered.length === 0) + return new arrays[0].constructor(0); + + if (filtered.length === 1) + return filtered[0].slice(); + + if (filtered.length === 2) + return mergeArrays(filtered[0], filtered[1]); + + arrays = filtered; + + var array = new arrays[0].constructor(length); + + var PointerArray = typed.getPointerArray(max); + + var pointers = new PointerArray(arrays.length); + + // TODO: benchmark vs. a binomial heap + var heap = new FibonacciHeap(function(a, b) { + a = arrays[a][pointers[a]]; + b = arrays[b][pointers[b]]; + + if (a < b) + return -1; + + if (a > b) + return 1; + + return 0; + }); + + for (i = 0; i < l; i++) + heap.push(i); + + i = 0; + + var p, + v; + + while (heap.size) { + p = heap.pop(); + v = arrays[p][pointers[p]++]; + array[i++] = v; + + if (pointers[p] < arrays[p].length) + heap.push(p); + } + + return array; +} + +/** + * Perform the union of k sorted unique array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +function kWayUnionUniqueArrays(arrays) { + var max = -Infinity, + al, + i, + l; + + var filtered = []; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + if (al === 0) + continue; + + filtered.push(arrays[i]); + + if (al > max) + max = al; + } + + if (filtered.length === 0) + return new arrays[0].constructor(0); + + if (filtered.length === 1) + return filtered[0].slice(); + + if (filtered.length === 2) + return unionUniqueArrays(filtered[0], filtered[1]); + + arrays = filtered; + + var array = new arrays[0].constructor(); + + var PointerArray = typed.getPointerArray(max); + + var pointers = new PointerArray(arrays.length); + + // TODO: benchmark vs. a binomial heap + var heap = new FibonacciHeap(function(a, b) { + a = arrays[a][pointers[a]]; + b = arrays[b][pointers[b]]; + + if (a < b) + return -1; + + if (a > b) + return 1; + + return 0; + }); + + for (i = 0; i < l; i++) + heap.push(i); + + var p, + v; + + while (heap.size) { + p = heap.pop(); + v = arrays[p][pointers[p]++]; + + if (array.length === 0 || array[array.length - 1] !== v) + array.push(v); + + if (pointers[p] < arrays[p].length) + heap.push(p); + } + + return array; +} + +/** + * Perform the intersection of k sorted array-like structures into one. + * + * @param {array} arrays - Arrays to merge. + * @return {array} + */ +exports.kWayIntersectionUniqueArrays = function(arrays) { + var max = -Infinity, + maxStart = -Infinity, + minEnd = Infinity, + first, + last, + al, + i, + l; + + for (i = 0, l = arrays.length; i < l; i++) { + al = arrays[i].length; + + // If one of the arrays is empty, so is the intersection + if (al === 0) + return []; + + if (al > max) + max = al; + + first = arrays[i][0]; + last = arrays[i][al - 1]; + + if (first > maxStart) + maxStart = first; + + if (last < minEnd) + minEnd = last; + } + + // Full overlap is impossible + if (maxStart > minEnd) + return []; + + // Only one value + if (maxStart === minEnd) + return [maxStart]; + + // NOTE: trying to outsmart I(D,I(C,I(A,B))) is pointless unfortunately... + // NOTE: I tried to be very clever about bounds but it does not seem + // to improve the performance of the algorithm. + var a, b, + array = arrays[0], + aPointer, + bPointer, + aLimit, + bLimit, + aHead, + bHead, + start = maxStart; + + for (i = 1; i < l; i++) { + a = array; + b = arrays[i]; + + // Change that to `[]` and observe some perf drops on V8... + array = new Array(); + + aPointer = 0; + bPointer = binarySearch.lowerBound(b, start); + + aLimit = a.length; + bLimit = b.length; + + while (aPointer < aLimit && bPointer < bLimit) { + aHead = a[aPointer]; + bHead = b[bPointer]; + + if (aHead < bHead) { + aPointer = binarySearch.lowerBound(a, bHead, aPointer + 1); + } + else if (aHead > bHead) { + bPointer = binarySearch.lowerBound(b, aHead, bPointer + 1); + } + else { + array.push(aHead); + aPointer++; + bPointer++; + } + } + + if (array.length === 0) + return array; + + start = array[0]; + } + + return array; +}; + +/** + * Variadic merging all of the given arrays. + * + * @param {...array} + * @return {array} + */ +exports.merge = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return mergeArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return kWayMergeArrays(arguments); + } + + return null; +}; + +/** + * Variadic function performing the union of all the given unique arrays. + * + * @param {...array} + * @return {array} + */ +exports.unionUnique = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return unionUniqueArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return kWayUnionUniqueArrays(arguments); + } + + return null; +}; + +/** + * Variadic function performing the intersection of all the given unique arrays. + * + * @param {...array} + * @return {array} + */ +exports.intersectionUnique = function() { + if (arguments.length === 2) { + if (isArrayLike(arguments[0])) + return exports.intersectionUniqueArrays(arguments[0], arguments[1]); + } + else { + if (isArrayLike(arguments[0])) + return exports.kWayIntersectionUniqueArrays(arguments); + } + + return null; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/murmurhash3.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/murmurhash3.js new file mode 100644 index 0000000..c09ec8a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/murmurhash3.js @@ -0,0 +1,87 @@ +/* eslint no-fallthrough: 0 */ +/** + * Mnemonist MurmurHash 3 + * ======================= + * + * Straightforward implementation of the third version of MurmurHash. + * + * Note: this piece of code belong to haschisch. + */ + +/** + * Various helpers. + */ +function mul32(a, b) { + return (a & 0xffff) * b + (((a >>> 16) * b & 0xffff) << 16) & 0xffffffff; +} + +function sum32(a, b) { + return (a & 0xffff) + (b >>> 16) + (((a >>> 16) + b & 0xffff) << 16) & 0xffffffff; +} + +function rotl32(a, b) { + return (a << b) | (a >>> (32 - b)); +} + +/** + * MumurHash3 function. + * + * @param {number} seed - Seed. + * @param {ByteArray} data - Data. + */ +module.exports = function murmurhash3(seed, data) { + var c1 = 0xcc9e2d51, + c2 = 0x1b873593, + r1 = 15, + r2 = 13, + m = 5, + n = 0x6b64e654; + + var hash = seed, + k1, + i, + l; + + for (i = 0, l = data.length - 4; i <= l; i += 4) { + k1 = ( + data[i] | + (data[i + 1] << 8) | + (data[i + 2] << 16) | + (data[i + 3] << 24) + ); + + k1 = mul32(k1, c1); + k1 = rotl32(k1, r1); + k1 = mul32(k1, c2); + + hash ^= k1; + hash = rotl32(hash, r2); + hash = mul32(hash, m); + hash = sum32(hash, n); + } + + k1 = 0; + + switch (data.length & 3) { + case 3: + k1 ^= data[i + 2] << 16; + case 2: + k1 ^= data[i + 1] << 8; + case 1: + k1 ^= data[i]; + k1 = mul32(k1, c1); + k1 = rotl32(k1, r1); + k1 = mul32(k1, c2); + hash ^= k1; + default: + } + + hash ^= data.length; + hash ^= hash >>> 16; + hash = mul32(hash, 0x85ebca6b); + hash ^= hash >>> 13; + hash = mul32(hash, 0xc2b2ae35); + hash ^= hash >>> 16; + + return hash >>> 0; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/typed-arrays.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/typed-arrays.js new file mode 100644 index 0000000..474a2cb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/typed-arrays.js @@ -0,0 +1,187 @@ +/** + * Mnemonist Typed Array Helpers + * ============================== + * + * Miscellaneous helpers related to typed arrays. + */ + +/** + * When using an unsigned integer array to store pointers, one might want to + * choose the optimal word size in regards to the actual numbers of pointers + * to store. + * + * This helpers does just that. + * + * @param {number} size - Expected size of the array to map. + * @return {TypedArray} + */ +var MAX_8BIT_INTEGER = Math.pow(2, 8) - 1, + MAX_16BIT_INTEGER = Math.pow(2, 16) - 1, + MAX_32BIT_INTEGER = Math.pow(2, 32) - 1; + +var MAX_SIGNED_8BIT_INTEGER = Math.pow(2, 7) - 1, + MAX_SIGNED_16BIT_INTEGER = Math.pow(2, 15) - 1, + MAX_SIGNED_32BIT_INTEGER = Math.pow(2, 31) - 1; + +exports.getPointerArray = function(size) { + var maxIndex = size - 1; + + if (maxIndex <= MAX_8BIT_INTEGER) + return Uint8Array; + + if (maxIndex <= MAX_16BIT_INTEGER) + return Uint16Array; + + if (maxIndex <= MAX_32BIT_INTEGER) + return Uint32Array; + + return Float64Array; +}; + +exports.getSignedPointerArray = function(size) { + var maxIndex = size - 1; + + if (maxIndex <= MAX_SIGNED_8BIT_INTEGER) + return Int8Array; + + if (maxIndex <= MAX_SIGNED_16BIT_INTEGER) + return Int16Array; + + if (maxIndex <= MAX_SIGNED_32BIT_INTEGER) + return Int32Array; + + return Float64Array; +}; + +/** + * Function returning the minimal type able to represent the given number. + * + * @param {number} value - Value to test. + * @return {TypedArrayClass} + */ +exports.getNumberType = function(value) { + + // <= 32 bits itnteger? + if (value === (value | 0)) { + + // Negative + if (Math.sign(value) === -1) { + if (value <= 127 && value >= -128) + return Int8Array; + + if (value <= 32767 && value >= -32768) + return Int16Array; + + return Int32Array; + } + else { + + if (value <= 255) + return Uint8Array; + + if (value <= 65535) + return Uint16Array; + + return Uint32Array; + } + } + + // 53 bits integer & floats + // NOTE: it's kinda hard to tell whether we could use 32bits or not... + return Float64Array; +}; + +/** + * Function returning the minimal type able to represent the given array + * of JavaScript numbers. + * + * @param {array} array - Array to represent. + * @param {function} getter - Optional getter. + * @return {TypedArrayClass} + */ +var TYPE_PRIORITY = { + Uint8Array: 1, + Int8Array: 2, + Uint16Array: 3, + Int16Array: 4, + Uint32Array: 5, + Int32Array: 6, + Float32Array: 7, + Float64Array: 8 +}; + +// TODO: make this a one-shot for one value +exports.getMinimalRepresentation = function(array, getter) { + var maxType = null, + maxPriority = 0, + p, + t, + v, + i, + l; + + for (i = 0, l = array.length; i < l; i++) { + v = getter ? getter(array[i]) : array[i]; + t = exports.getNumberType(v); + p = TYPE_PRIORITY[t.name]; + + if (p > maxPriority) { + maxPriority = p; + maxType = t; + } + } + + return maxType; +}; + +/** + * Function returning whether the given value is a typed array. + * + * @param {any} value - Value to test. + * @return {boolean} + */ +exports.isTypedArray = function(value) { + return typeof ArrayBuffer !== 'undefined' && ArrayBuffer.isView(value); +}; + +/** + * Function used to concat byte arrays. + * + * @param {...ByteArray} + * @return {ByteArray} + */ +exports.concat = function() { + var length = 0, + i, + o, + l; + + for (i = 0, l = arguments.length; i < l; i++) + length += arguments[i].length; + + var array = new (arguments[0].constructor)(length); + + for (i = 0, o = 0; i < l; i++) { + array.set(arguments[i], o); + o += arguments[i].length; + } + + return array; +}; + +/** + * Function used to initialize a byte array of indices. + * + * @param {number} length - Length of target. + * @return {ByteArray} + */ +exports.indices = function(length) { + var PointerArray = exports.getPointerArray(length); + + var array = new PointerArray(length); + + for (var i = 0; i < length; i++) + array[i] = i; + + return array; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/types.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/types.d.ts new file mode 100644 index 0000000..1a199d6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/utils/types.d.ts @@ -0,0 +1,16 @@ +/** + * Mnemonist Generic Types + * ======================== + * + * Collection of types used throughout the library. + */ +export interface IArrayLike { + length: number; + slice(from: number, to?: number): IArrayLike; +} + +export type ArrayLike = IArrayLike | ArrayBuffer; + +export interface IArrayLikeConstructor { + new(...args: any[]): ArrayLike; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/vector.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/vector.d.ts new file mode 100644 index 0000000..414f969 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/vector.d.ts @@ -0,0 +1,81 @@ +/** + * Mnemonist Vector Typings + * ========================= + */ +import {IArrayLikeConstructor} from './utils/types'; + +type VectorOptions = { + initialLength?: number; + initialCapacity?: number; + policy?: (capacity: number) => number; +} + +export default class Vector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(ArrayClass: IArrayLikeConstructor, length: number | VectorOptions); + + // Methods + clear(): void; + set(index: number, value: number): this; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: number): number; + pop(): number | undefined; + get(index: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, ArrayClass: IArrayLikeConstructor, capacity?: number): Vector; +} + +declare class TypedVector implements Iterable { + + // Members + capacity: number; + length: number; + size: number; + + // Constructor + constructor(length: number | VectorOptions); + + // Methods + clear(): void; + set(index: number, value: number): this; + reallocate(capacity: number): this; + grow(capacity?: number): this; + resize(length: number): this; + push(value: number): number; + pop(): number | undefined; + get(index: number): number; + forEach(callback: (index: number, value: number, set: this) => void, scope?: any): void; + values(): IterableIterator; + entries(): IterableIterator<[number, number]>; + [Symbol.iterator](): IterableIterator; + inspect(): any; + toJSON(): any; + + // Statics + static from(iterable: Iterable | {[key: string] : I}, capacity?: number): TypedVector; +} + +export class Int8Vector extends TypedVector {} +export class Uint8Vector extends TypedVector {} +export class Uint8ClampedVector extends TypedVector {} +export class Int16Vector extends TypedVector {} +export class Uint16Vector extends TypedVector {} +export class Int32Vector extends TypedVector {} +export class Uint32Vector extends TypedVector {} +export class Float32Vector extends TypedVector {} +export class Float64Array extends TypedVector {} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/vector.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/vector.js new file mode 100644 index 0000000..467bf20 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/vector.js @@ -0,0 +1,373 @@ +/** + * Mnemonist Vector + * ================= + * + * Abstract implementation of a growing array that can be used with JavaScript + * typed arrays and other array-like structures. + * + * Note: should try and use ArrayBuffer.transfer when it will be available. + */ +var Iterator = require('obliterator/iterator'), + forEach = require('obliterator/foreach'), + iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'); + +/** + * Defaults. + */ +var DEFAULT_GROWING_POLICY = function(currentCapacity) { + return Math.max(1, Math.ceil(currentCapacity * 1.5)); +}; + +var pointerArrayFactory = function(capacity) { + var PointerArray = typed.getPointerArray(capacity); + + return new PointerArray(capacity); +}; + +/** + * Vector. + * + * @constructor + * @param {function} ArrayClass - An array constructor. + * @param {number|object} initialCapacityOrOptions - Self-explanatory: + * @param {number} initialCapacity - Initial capacity. + * @param {number} initialLength - Initial length. + * @param {function} policy - Allocation policy. + */ +function Vector(ArrayClass, initialCapacityOrOptions) { + if (arguments.length < 1) + throw new Error('mnemonist/vector: expecting at least a byte array constructor.'); + + var initialCapacity = initialCapacityOrOptions || 0, + policy = DEFAULT_GROWING_POLICY, + initialLength = 0, + factory = false; + + if (typeof initialCapacityOrOptions === 'object') { + initialCapacity = initialCapacityOrOptions.initialCapacity || 0; + initialLength = initialCapacityOrOptions.initialLength || 0; + policy = initialCapacityOrOptions.policy || policy; + factory = initialCapacityOrOptions.factory === true; + } + + this.factory = factory ? ArrayClass : null; + this.ArrayClass = ArrayClass; + this.length = initialLength; + this.capacity = Math.max(initialLength, initialCapacity); + this.policy = policy; + this.array = new ArrayClass(this.capacity); +} + +/** + * Method used to set a value. + * + * @param {number} index - Index to edit. + * @param {any} value - Value. + * @return {Vector} + */ +Vector.prototype.set = function(index, value) { + + // Out of bounds? + if (this.length < index) + throw new Error('Vector(' + this.ArrayClass.name + ').set: index out of bounds.'); + + // Updating value + this.array[index] = value; + + return this; +}; + +/** + * Method used to get a value. + * + * @param {number} index - Index to retrieve. + * @return {any} + */ +Vector.prototype.get = function(index) { + if (this.length < index) + return undefined; + + return this.array[index]; +}; + +/** + * Method used to apply the growing policy. + * + * @param {number} [override] - Override capacity. + * @return {number} + */ +Vector.prototype.applyPolicy = function(override) { + var newCapacity = this.policy(override || this.capacity); + + if (typeof newCapacity !== 'number' || newCapacity < 0) + throw new Error('mnemonist/vector.applyPolicy: policy returned an invalid value (expecting a positive integer).'); + + if (newCapacity <= this.capacity) + throw new Error('mnemonist/vector.applyPolicy: policy returned a less or equal capacity to allocate.'); + + // TODO: we should probably check that the returned number is an integer + return newCapacity; +}; + +/** + * Method used to reallocate the underlying array. + * + * @param {number} capacity - Target capacity. + * @return {Vector} + */ +Vector.prototype.reallocate = function(capacity) { + if (capacity === this.capacity) + return this; + + var oldArray = this.array; + + if (capacity < this.length) + this.length = capacity; + + if (capacity > this.capacity) { + if (this.factory === null) + this.array = new this.ArrayClass(capacity); + else + this.array = this.factory(capacity); + + if (typed.isTypedArray(this.array)) { + this.array.set(oldArray, 0); + } + else { + for (var i = 0, l = this.length; i < l; i++) + this.array[i] = oldArray[i]; + } + } + else { + this.array = oldArray.slice(0, capacity); + } + + this.capacity = capacity; + + return this; +}; + +/** + * Method used to grow the array. + * + * @param {number} [capacity] - Optional capacity to match. + * @return {Vector} + */ +Vector.prototype.grow = function(capacity) { + var newCapacity; + + if (typeof capacity === 'number') { + + if (this.capacity >= capacity) + return this; + + // We need to match the given capacity + newCapacity = this.capacity; + + while (newCapacity < capacity) + newCapacity = this.applyPolicy(newCapacity); + + this.reallocate(newCapacity); + + return this; + } + + // We need to run the policy once + newCapacity = this.applyPolicy(); + this.reallocate(newCapacity); + + return this; +}; + +/** + * Method used to resize the array. Won't deallocate. + * + * @param {number} length - Target length. + * @return {Vector} + */ +Vector.prototype.resize = function(length) { + if (length === this.length) + return this; + + if (length < this.length) { + this.length = length; + return this; + } + + this.length = length; + this.reallocate(length); + + return this; +}; + +/** + * Method used to push a value into the array. + * + * @param {any} value - Value to push. + * @return {number} - Length of the array. + */ +Vector.prototype.push = function(value) { + if (this.capacity === this.length) + this.grow(); + + this.array[this.length++] = value; + + return this.length; +}; + +/** + * Method used to pop the last value of the array. + * + * @return {number} - The popped value. + */ +Vector.prototype.pop = function() { + if (this.length === 0) + return; + + return this.array[--this.length]; +}; + +/** + * Method used to create an iterator over a vector's values. + * + * @return {Iterator} + */ +Vector.prototype.values = function() { + var items = this.array, + l = this.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[i]; + i++; + + return { + value: value, + done: false + }; + }); +}; + +/** + * Method used to create an iterator over a vector's entries. + * + * @return {Iterator} + */ +Vector.prototype.entries = function() { + var items = this.array, + l = this.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return { + done: true + }; + + var value = items[i]; + + return { + value: [i++, value], + done: false + }; + }); +}; + +/** + * Attaching the #.values method to Symbol.iterator if possible. + */ +if (typeof Symbol !== 'undefined') + Vector.prototype[Symbol.iterator] = Vector.prototype.values; + +/** + * Convenience known methods. + */ +Vector.prototype.inspect = function() { + var proxy = this.array.slice(0, this.length); + + proxy.type = this.array.constructor.name; + proxy.items = this.length; + proxy.capacity = this.capacity; + + // Trick so that node displays the name of the constructor + Object.defineProperty(proxy, 'constructor', { + value: Vector, + enumerable: false + }); + + return proxy; +}; + +if (typeof Symbol !== 'undefined') + Vector.prototype[Symbol.for('nodejs.util.inspect.custom')] = Vector.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a vector. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} ArrayClass - Byte array class. + * @param {number} capacity - Desired capacity. + * @return {Vector} + */ +Vector.from = function(iterable, ArrayClass, capacity) { + + if (arguments.length < 3) { + + // Attempting to guess the needed capacity + capacity = iterables.guessLength(iterable); + + if (typeof capacity !== 'number') + throw new Error('mnemonist/vector.from: could not guess iterable length. Please provide desired capacity as last argument.'); + } + + var vector = new Vector(ArrayClass, capacity); + + forEach(iterable, function(value) { + vector.push(value); + }); + + return vector; +}; + +/** + * Exporting. + */ +function subClass(ArrayClass) { + var SubClass = function(initialCapacityOrOptions) { + Vector.call(this, ArrayClass, initialCapacityOrOptions); + }; + + for (var k in Vector.prototype) { + if (Vector.prototype.hasOwnProperty(k)) + SubClass.prototype[k] = Vector.prototype[k]; + } + + SubClass.from = function(iterable, capacity) { + return Vector.from(iterable, ArrayClass, capacity); + }; + + if (typeof Symbol !== 'undefined') + SubClass.prototype[Symbol.iterator] = SubClass.prototype.values; + + return SubClass; +} + +Vector.Int8Vector = subClass(Int8Array); +Vector.Uint8Vector = subClass(Uint8Array); +Vector.Uint8ClampedVector = subClass(Uint8ClampedArray); +Vector.Int16Vector = subClass(Int16Array); +Vector.Uint16Vector = subClass(Uint16Array); +Vector.Int32Vector = subClass(Int32Array); +Vector.Uint32Vector = subClass(Uint32Array); +Vector.Float32Vector = subClass(Float32Array); +Vector.Float64Vector = subClass(Float64Array); +Vector.PointerVector = subClass(pointerArrayFactory); + +module.exports = Vector; diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/vp-tree.d.ts b/amplify/functions/fetchDocuments/node_modules/mnemonist/vp-tree.d.ts new file mode 100644 index 0000000..2c03354 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/vp-tree.d.ts @@ -0,0 +1,27 @@ +/** + * Mnemonist VPTree Typings + * ========================= + */ +type DistanceFunction = (a: T, b: T) => number; +type QueryMatch = {distance: number, item: T}; + +export default class VPTree { + + // Members + distance: DistanceFunction; + size: number; + D: number; + + // Constructor + constructor(distance: DistanceFunction, items: Iterable); + + // Methods + nearestNeighbors(k: number, query: T): Array>; + neighbors(radius: number, query: T): Array>; + + // Statics + static from( + iterable: Iterable | {[key: string] : I}, + distance: DistanceFunction + ): VPTree; +} diff --git a/amplify/functions/fetchDocuments/node_modules/mnemonist/vp-tree.js b/amplify/functions/fetchDocuments/node_modules/mnemonist/vp-tree.js new file mode 100644 index 0000000..2acd01e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/mnemonist/vp-tree.js @@ -0,0 +1,367 @@ +/** + * Mnemonist Vantage Point Tree + * ============================= + * + * JavaScript implementation of the Vantage Point Tree storing the binary + * tree as a flat byte array. + * + * Note that a VPTree has worst cases and is likely not to be perfectly + * balanced because of median ambiguity. It is therefore not suitable + * for hairballs and tiny datasets. + * + * [Reference]: + * https://en.wikipedia.org/wiki/Vantage-point_tree + */ +var iterables = require('./utils/iterables.js'), + typed = require('./utils/typed-arrays.js'), + inplaceQuickSortIndices = require('./sort/quick.js').inplaceQuickSortIndices, + lowerBoundIndices = require('./utils/binary-search.js').lowerBoundIndices, + Heap = require('./heap.js'); + +var getPointerArray = typed.getPointerArray; + +// TODO: implement vantage point selection techniques (by swapping with last) +// TODO: is this required to implement early termination for k <= size? + +/** + * Heap comparator used by the #.nearestNeighbors method. + */ +function comparator(a, b) { + if (a.distance < b.distance) + return 1; + + if (a.distance > b.distance) + return -1; + + return 0; +} + +/** + * Function used to create the binary tree. + * + * @param {function} distance - Distance function to use. + * @param {array} items - Items to index (will be mutated). + * @param {array} indices - Indexes of the items. + * @return {Float64Array} - The flat binary tree. + */ +function createBinaryTree(distance, items, indices) { + var N = indices.length; + + var PointerArray = getPointerArray(N); + + var C = 0, + nodes = new PointerArray(N), + lefts = new PointerArray(N), + rights = new PointerArray(N), + mus = new Float64Array(N), + stack = [0, 0, N], + distances = new Float64Array(N), + nodeIndex, + vantagePoint, + medianIndex, + lo, + hi, + mid, + mu, + i, + l; + + while (stack.length) { + hi = stack.pop(); + lo = stack.pop(); + nodeIndex = stack.pop(); + + // Getting our vantage point + vantagePoint = indices[hi - 1]; + hi--; + + l = hi - lo; + + // Storing vantage point + nodes[nodeIndex] = vantagePoint; + + // We are in a leaf + if (l === 0) + continue; + + // We only have two elements, the second one has to go right + if (l === 1) { + + // We put remaining item to the right + mu = distance(items[vantagePoint], items[indices[lo]]); + + mus[nodeIndex] = mu; + + // Right + C++; + rights[nodeIndex] = C; + nodes[C] = indices[lo]; + + continue; + } + + // Computing distance from vantage point to other points + for (i = lo; i < hi; i++) + distances[indices[i]] = distance(items[vantagePoint], items[indices[i]]); + + inplaceQuickSortIndices(distances, indices, lo, hi); + + // Finding median of distances + medianIndex = lo + (l / 2) - 1; + + // Need to interpolate? + if (medianIndex === (medianIndex | 0)) { + mu = ( + distances[indices[medianIndex]] + + distances[indices[medianIndex + 1]] + ) / 2; + } + else { + mu = distances[indices[Math.ceil(medianIndex)]]; + } + + // Storing mu + mus[nodeIndex] = mu; + + mid = lowerBoundIndices(distances, indices, mu, lo, hi); + + // console.log('Vantage point', items[vantagePoint], vantagePoint); + // console.log('mu =', mu); + // console.log('lo =', lo); + // console.log('hi =', hi); + // console.log('mid =', mid); + + // console.log('need to split', Array.from(indices).slice(lo, hi).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + + // Right + if (hi - mid > 0) { + C++; + rights[nodeIndex] = C; + stack.push(C, mid, hi); + // console.log('Went right with ', Array.from(indices).slice(mid, hi).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + } + + // Left + if (mid - lo > 0) { + C++; + lefts[nodeIndex] = C; + stack.push(C, lo, mid); + // console.log('Went left with', Array.from(indices).slice(lo, mid).map(i => { + // return [distances[i], distance(items[vantagePoint], items[i]), items[i]]; + // })); + } + + // console.log(); + } + + return { + nodes: nodes, + lefts: lefts, + rights: rights, + mus: mus + }; +} + +/** + * VPTree. + * + * @constructor + * @param {function} distance - Distance function to use. + * @param {Iterable} items - Items to store. + */ +function VPTree(distance, items) { + if (typeof distance !== 'function') + throw new Error('mnemonist/VPTree.constructor: given `distance` must be a function.'); + + if (!items) + throw new Error('mnemonist/VPTree.constructor: you must provide items to the tree. A VPTree cannot be updated after its creation.'); + + // Properties + this.distance = distance; + this.heap = new Heap(comparator); + this.D = 0; + + var arrays = iterables.toArrayWithIndices(items); + this.items = arrays[0]; + var indices = arrays[1]; + + // Creating the binary tree + this.size = indices.length; + + var result = createBinaryTree(distance, this.items, indices); + + this.nodes = result.nodes; + this.lefts = result.lefts; + this.rights = result.rights; + this.mus = result.mus; +} + +/** + * Function used to retrieve the k nearest neighbors of the query. + * + * @param {number} k - Number of neighbors to retrieve. + * @param {any} query - The query. + * @return {array} + */ +VPTree.prototype.nearestNeighbors = function(k, query) { + var neighbors = this.heap, + stack = [0], + tau = Infinity, + nodeIndex, + itemIndex, + vantagePoint, + leftIndex, + rightIndex, + mu, + d; + + this.D = 0; + + while (stack.length) { + nodeIndex = stack.pop(); + itemIndex = this.nodes[nodeIndex]; + vantagePoint = this.items[itemIndex]; + + // Distance between query & the current vantage point + d = this.distance(vantagePoint, query); + this.D++; + + if (d < tau) { + neighbors.push({distance: d, item: vantagePoint}); + + // Trimming + if (neighbors.size > k) + neighbors.pop(); + + // Adjusting tau (only if we already have k items, else it stays Infinity) + if (neighbors.size >= k) + tau = neighbors.peek().distance; + } + + leftIndex = this.lefts[nodeIndex]; + rightIndex = this.rights[nodeIndex]; + + // We are a leaf + if (!leftIndex && !rightIndex) + continue; + + mu = this.mus[nodeIndex]; + + if (d < mu) { + if (leftIndex && d < mu + tau) + stack.push(leftIndex); + if (rightIndex && d >= mu - tau) // Might not be necessary to test d + stack.push(rightIndex); + } + else { + if (rightIndex && d >= mu - tau) + stack.push(rightIndex); + if (leftIndex && d < mu + tau) // Might not be necessary to test d + stack.push(leftIndex); + } + } + + var array = new Array(neighbors.size); + + for (var i = neighbors.size - 1; i >= 0; i--) + array[i] = neighbors.pop(); + + return array; +}; + +/** + * Function used to retrieve every neighbors of query in the given radius. + * + * @param {number} radius - Radius. + * @param {any} query - The query. + * @return {array} + */ +VPTree.prototype.neighbors = function(radius, query) { + var neighbors = [], + stack = [0], + nodeIndex, + itemIndex, + vantagePoint, + leftIndex, + rightIndex, + mu, + d; + + this.D = 0; + + while (stack.length) { + nodeIndex = stack.pop(); + itemIndex = this.nodes[nodeIndex]; + vantagePoint = this.items[itemIndex]; + + // Distance between query & the current vantage point + d = this.distance(vantagePoint, query); + this.D++; + + if (d <= radius) + neighbors.push({distance: d, item: vantagePoint}); + + leftIndex = this.lefts[nodeIndex]; + rightIndex = this.rights[nodeIndex]; + + // We are a leaf + if (!leftIndex && !rightIndex) + continue; + + mu = this.mus[nodeIndex]; + + if (d < mu) { + if (leftIndex && d < mu + radius) + stack.push(leftIndex); + if (rightIndex && d >= mu - radius) // Might not be necessary to test d + stack.push(rightIndex); + } + else { + if (rightIndex && d >= mu - radius) + stack.push(rightIndex); + if (leftIndex && d < mu + radius) // Might not be necessary to test d + stack.push(leftIndex); + } + } + + return neighbors; +}; + +/** + * Convenience known methods. + */ +VPTree.prototype.inspect = function() { + var array = this.items.slice(); + + // Trick so that node displays the name of the constructor + Object.defineProperty(array, 'constructor', { + value: VPTree, + enumerable: false + }); + + return array; +}; + +if (typeof Symbol !== 'undefined') + VPTree.prototype[Symbol.for('nodejs.util.inspect.custom')] = VPTree.prototype.inspect; + +/** + * Static @.from function taking an arbitrary iterable & converting it into + * a tree. + * + * @param {Iterable} iterable - Target iterable. + * @param {function} distance - Distance function to use. + * @return {VPTree} + */ +VPTree.from = function(iterable, distance) { + return new VPTree(distance, iterable); +}; + +/** + * Exporting. + */ +module.exports = VPTree; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/LICENSE.txt b/amplify/functions/fetchDocuments/node_modules/obliterator/LICENSE.txt new file mode 100644 index 0000000..ca37c96 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Guillaume Plique (Yomguithereal) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/README.md b/amplify/functions/fetchDocuments/node_modules/obliterator/README.md new file mode 100644 index 0000000..f611e39 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/README.md @@ -0,0 +1,321 @@ +[![Build Status](https://travis-ci.org/Yomguithereal/obliterator.svg)](https://travis-ci.org/Yomguithereal/obliterator) + +# Obliterator + +Obliterator is a dead simple JavaScript/TypeScript library providing miscellaneous higher-order iterator functions such as combining two or more iterators into a single one. + +# Installation + +``` +npm install --save obliterator +``` + +Note `obliterator` comes along with its TypeScript declarations. + +# Usage + +## Summary + +*Classes* + +* [Iterator](#iterator) + +*Functions* + +* [chain](#chain) +* [combinations](#combinations) +* [consume](#consume) +* [filter](#filter) +* [forEach](#foreach) +* [map](#map) +* [match](#match) +* [permutations](#permutations) +* [powerSet](#powerSet) +* [split](#split) +* [take](#take) + +## Iterator + +A handy Iterator class with safeguards and usable with ES2015's `for ... of` loop constructs & spread operator. + +```js +import Iterator from 'obliterator/iterator'; +// Or +import {Iterator} from 'obliterator'; + +const iterator = new Iterator(function() { + // Define what the `next` function does +}); + +// Checking that the given value is an iterator (native or else) +Iterator.is(value); + +// Creating an empty iterator +const emptyIterator = Iterator.empty(); + +// Creating a simple iterator from a single value +const simpleIterator = Iterator.of(34); + +// Creating a simple iterator from multiple values +const multipleIterator = Iterator.of(1, 2, 3); +``` + +## chain + +Variadic function chaining all the given iterators. + +```js +import chain from 'obliterator/chain'; +// Or +import {chain} from 'obliterator'; + +const set1 = new Set('a'); +const set2 = new Set('bc'); + +const chained = chain(set1.values(), set2.values()); + +chained.next(); +>>> {done: false, value: 'a'} +chained.next(); +>>> {done: false, value: 'b'} +``` + +## combinations + +Returns an iterator of combinations of the given array and of the given size. + +Note that for performance reasons, the yielded combination is always the same object. + +```js +import combinations from 'obliterator/combinations'; +// Or +import {combinations} from 'obliterator'; + +const iterator = combinations(['A', 'B', 'C', 'D'], 2); + +iterator.next().value; +>>> ['A', 'B'] +iterator.next().value; +>>> ['A', 'C'] +``` + +## consume + +Function consuming the given iterator fully or for n steps. + +```js +import consume from 'obliterator/consume'; +// Or +import {consume} from 'obliterator'; + +const set = new Set([1, 2, 3]); + +// Consuming the whole iterator +let iterator = set.values(); +consume(iterator); +iterator.next().done +>>> true + +// Consuming n steps +let iterator = set.values(); +consume(iterator, 2); +iterator.next().value +>>> 3 +``` + +## filter + +Function returning an iterator filtering another one's values using the given predicate. + +```js +import filter from 'obliterator/filter'; +// Or +import {filter} from 'obliterator'; + +const set = new Set([1, 2, 3, 4, 5]); + +const even = x => x % 2 === 0; + +const iterator = filter(even, set.values()); + +iterator.next().value +>>> 2 +iterator.next().value +>>> 4 +``` + +## forEach + +Function able to iterate over almost any JavaScript iterable value using a callback. + +Supported values range from arrays, typed arrays, sets, maps, objects, strings, arguments, iterators, arbitrary iterables etc. + +```js +import forEach from 'obliterator/foreach'; +// Or +import {forEach} from 'obliterator'; + +const set = new Set(['apple', 'banana']); + +forEach(set.values(), (value, i) => { + console.log(i, value); +}); + +// Iterating over a string +forEach('abc', (char, i) => ...); + +// Iterating over a map +forEach(map, (value, key) => ...); +``` + +Optionally, one can use the `forEachWithNullKeys` function to iterate over mixed values but with the twist that iterables without proper keys (lists, sets etc.), will yield `null` instead of an index key. + +```js +import {forEachWithNullKeys} from 'obliterator/foreach'; + +const set = new Set(['apple', 'banana']); + +forEach(set, (value, key) => { + console.log(key, value); +}); +>>> null, 'apple' +>>> null, 'banana' +``` + +## map + +Function returning an iterator mapping another one's values using the given function. + +```js +import map from 'obliterator/map'; +// Or +import {map} from 'obliterator'; + +const set = new Set([1, 2, 3, 4, 5]); + +const triple = x => x * 3; + +const iterator = map(triple, set.values()); + +iterator.next().value +>>> 3 +iterator.next().value +>>> 6 +``` + +## match + +Function returning an iterator over the matches of a given regex applied to the target string. + +```js +import match from 'obliterator/match'; +// Or +import {match} from 'obliterator'; + +const iterator = match(/t/, 'test'); + +iterator.next().value.index +>>> 0 +iterator.next().value.index +>>> 3 +``` + +## permutations + +Returns an iterator of permutations of the given array and of the given size. + +Note that for performance reasons, the yielded permutation is always the same object. + +```js +import permutations from 'obliterator/permutations'; +// Or +import {permutations} from 'obliterator'; + +let iterator = permutations([1, 2, 3]); + +iterator.next().value +>>> [1, 2, 3] +iterator.next().value +>>> [1, 3, 2] + +iterator = permutations(['A', 'B', 'C', 'D'], 2); + +iterator.next().value; +>>> ['A', 'B'] +iterator.next().value; +>>> ['A', 'C'] +``` + +## powerSet + +Returns an iterator of sets composing the power set of the given array. + +```js +import powerSet from 'obliterator/power-set'; +// Or +import {powerSet} from 'obliterator'; + +const iterator = powerSet(['A', 'B', 'C']); + +iterator.next().value; +>>> [] +iterator.next().value; +>>> ['A'] +``` + +## split + +Returns an iterator over the splits of the target string, according to the given RegExp pattern. + +```js +import split from 'obliterator/split'; +// Or +import {split} from 'obliterator'; + +const iterator = split(/;/g, 'hello;world;super'); + +iterator.next().value; +>>> 'hello' +iterator.next().value; +>>> 'world' +``` + +## take + +Function taking values from given iterator and returning them in an array. + +```js +import take from 'obliterator/take'; +// Or +import {take} from 'obliterator'; + +const set = new Set([1, 2, 3]); + +// To take n values from the iterator +take(set.values(), 2); +>>> [1, 2] + +// To convert the full iterator into an array +take(set.values()); +>>> [1, 2, 3] +``` + +# Contribution + +Contributions are obviously welcome. Please be sure to lint the code & add the relevant unit tests before submitting any PR. + +``` +git clone git@github.com:Yomguithereal/obliterator.git +cd obliterator +npm install + +# To lint the code +npm run lint + +# To run the unit tests +npm test +``` + +# License + +[MIT](LICENSE.txt) diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/chain.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/chain.d.ts new file mode 100644 index 0000000..298e592 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/chain.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function chain(...iterators: Iterator[]): ObliteratorIterator; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/chain.js b/amplify/functions/fetchDocuments/node_modules/obliterator/chain.js new file mode 100644 index 0000000..1e4e25d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/chain.js @@ -0,0 +1,39 @@ +/** + * Obliterator Chain Function + * =========================== + * + * Variadic function combining the given iterators. + */ +var Iterator = require('./iterator.js'); + +/** + * Chain. + * + * @param {...Iterator} iterators - Target iterators. + * @return {Iterator} + */ +module.exports = function chain() { + var iterators = arguments, + current, + i = -1; + + return new Iterator(function iterate() { + if (!current) { + i++; + + if (i >= iterators.length) + return {done: true}; + + current = iterators[i]; + } + + var step = current.next(); + + if (step.done) { + current = null; + return iterate(); + } + + return step; + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/combinations.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/combinations.d.ts new file mode 100644 index 0000000..206dea9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/combinations.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function combinations(array: Array, r: number): ObliteratorIterator>; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/combinations.js b/amplify/functions/fetchDocuments/node_modules/obliterator/combinations.js new file mode 100644 index 0000000..d1cf456 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/combinations.js @@ -0,0 +1,76 @@ +/** + * Obliterator Combinations Function + * ================================== + * + * Iterator returning combinations of the given array. + */ +var Iterator = require('./iterator.js'); + +/** + * Helper mapping indices to items. + */ +function indicesToItems(target, items, indices, r) { + for (var i = 0; i < r; i++) + target[i] = items[indices[i]]; +} + +/** + * Combinations. + * + * @param {array} array - Target array. + * @param {number} r - Size of the subsequences. + * @return {Iterator} + */ +module.exports = function combinations(array, r) { + if (!Array.isArray(array)) + throw new Error('obliterator/combinations: first argument should be an array.'); + + var n = array.length; + + if (typeof r !== 'number') + throw new Error('obliterator/combinations: second argument should be omitted or a number.'); + + if (r > n) + throw new Error('obliterator/combinations: the size of the subsequences should not exceed the length of the array.'); + + if (r === n) + return Iterator.of(array.slice()); + + var indices = new Array(r), + subsequence = new Array(r), + first = true, + i; + + for (i = 0; i < r; i++) + indices[i] = i; + + return new Iterator(function next() { + if (first) { + first = false; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + if (indices[r - 1]++ < n - 1) { + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + i = r - 2; + + while (i >= 0 && indices[i] >= (n - (r - i))) + --i; + + if (i < 0) + return {done: true}; + + indices[i]++; + + while (++i < r) + indices[i] = indices[i - 1] + 1; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/consume.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/consume.d.ts new file mode 100644 index 0000000..16812ee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/consume.d.ts @@ -0,0 +1 @@ +export default function consume(iterator: Iterator, steps?: number): void; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/consume.js b/amplify/functions/fetchDocuments/node_modules/obliterator/consume.js new file mode 100644 index 0000000..455fea5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/consume.js @@ -0,0 +1,32 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Consume Function + * ============================= + * + * Function consuming the given iterator for n or every steps. + */ + +/** + * Consume. + * + * @param {Iterator} iterator - Target iterator. + * @param {number} [steps] - Optional steps. + */ +module.exports = function consume(iterator, steps) { + var step, + l = arguments.length > 1 ? steps : Infinity, + i = 0; + + while (true) { + + if (i === l) + return; + + step = iterator.next(); + + if (step.done) + return; + + i++; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/filter.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/filter.d.ts new file mode 100644 index 0000000..18b17f9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/filter.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +type PredicateFunction = (item: T) => boolean; + +export default function filter(predicate: PredicateFunction, iterator: Iterator): ObliteratorIterator; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/filter.js b/amplify/functions/fetchDocuments/node_modules/obliterator/filter.js new file mode 100644 index 0000000..25a519b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/filter.js @@ -0,0 +1,28 @@ +/** + * Obliterator Filter Function + * =========================== + * + * Function returning a iterator filtering the given iterator. + */ +var Iterator = require('./iterator.js'); + +/** + * Filter. + * + * @param {function} predicate - Predicate function. + * @param {Iterator} target - Target iterator. + * @return {Iterator} + */ +module.exports = function filter(predicate, target) { + return new Iterator(function next() { + var step = target.next(); + + if (step.done) + return step; + + if (!predicate(step.value)) + return next(); + + return step; + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/foreach.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/foreach.d.ts new file mode 100644 index 0000000..61fb9ea --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/foreach.d.ts @@ -0,0 +1 @@ +export default function forEach(iterable: any, callback: (item: any, key: any) => void): void; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/foreach.js b/amplify/functions/fetchDocuments/node_modules/obliterator/foreach.js new file mode 100644 index 0000000..ef90051 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/foreach.js @@ -0,0 +1,156 @@ +/** + * Obliterator ForEach Function + * ============================= + * + * Helper function used to easily iterate over mixed values. + */ + +/** + * Constants. + */ +var ARRAY_BUFFER_SUPPORT = typeof ArrayBuffer !== 'undefined', + SYMBOL_SUPPORT = typeof Symbol !== 'undefined'; + +/** + * Function able to iterate over almost any iterable JS value. + * + * @param {any} iterable - Iterable value. + * @param {function} callback - Callback function. + */ +function forEach(iterable, callback) { + var iterator, k, i, l, s; + + if (!iterable) + throw new Error('obliterator/forEach: invalid iterable.'); + + if (typeof callback !== 'function') + throw new Error('obliterator/forEach: expecting a callback.'); + + // The target is an array or a string or function arguments + if ( + Array.isArray(iterable) || + (ARRAY_BUFFER_SUPPORT && ArrayBuffer.isView(iterable)) || + typeof iterable === 'string' || + iterable.toString() === '[object Arguments]' + ) { + for (i = 0, l = iterable.length; i < l; i++) + callback(iterable[i], i); + return; + } + + // The target has a #.forEach method + if (typeof iterable.forEach === 'function') { + iterable.forEach(callback); + return; + } + + // The target is iterable + if ( + SYMBOL_SUPPORT && + Symbol.iterator in iterable && + typeof iterable.next !== 'function' + ) { + iterable = iterable[Symbol.iterator](); + } + + // The target is an iterator + if (typeof iterable.next === 'function') { + iterator = iterable; + i = 0; + + while ((s = iterator.next(), s.done !== true)) { + callback(s.value, i); + i++; + } + + return; + } + + // The target is a plain object + for (k in iterable) { + if (iterable.hasOwnProperty(k)) { + callback(iterable[k], k); + } + } + + return; +} + +/** + * Same function as the above `forEach` but will yield `null` when the target + * does not have keys. + * + * @param {any} iterable - Iterable value. + * @param {function} callback - Callback function. + */ +forEach.forEachWithNullKeys = function(iterable, callback) { + var iterator, k, i, l, s; + + if (!iterable) + throw new Error('obliterator/forEachWithNullKeys: invalid iterable.'); + + if (typeof callback !== 'function') + throw new Error('obliterator/forEachWithNullKeys: expecting a callback.'); + + // The target is an array or a string or function arguments + if ( + Array.isArray(iterable) || + (ARRAY_BUFFER_SUPPORT && ArrayBuffer.isView(iterable)) || + typeof iterable === 'string' || + iterable.toString() === '[object Arguments]' + ) { + for (i = 0, l = iterable.length; i < l; i++) + callback(iterable[i], null); + return; + } + + // The target is a Set + if (iterable instanceof Set) { + iterable.forEach(function(value) { + callback(value, null); + }); + return; + } + + // The target has a #.forEach method + if (typeof iterable.forEach === 'function') { + iterable.forEach(callback); + return; + } + + // The target is iterable + if ( + SYMBOL_SUPPORT && + Symbol.iterator in iterable && + typeof iterable.next !== 'function' + ) { + iterable = iterable[Symbol.iterator](); + } + + // The target is an iterator + if (typeof iterable.next === 'function') { + iterator = iterable; + i = 0; + + while ((s = iterator.next(), s.done !== true)) { + callback(s.value, null); + i++; + } + + return; + } + + // The target is a plain object + for (k in iterable) { + if (iterable.hasOwnProperty(k)) { + callback(iterable[k], k); + } + } + + return; +}; + +/** + * Exporting. + */ +module.exports = forEach; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/index.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/index.d.ts new file mode 100644 index 0000000..9aa15bb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/index.d.ts @@ -0,0 +1,14 @@ +export {default as Iterator} from './iterator'; +export {default as chain} from './chain'; +export {default as combinations} from './combinations'; +export {default as consume} from './consume'; +export {default as filter} from './filter'; +export {default as forEach} from './foreach'; +export {default as map} from './map'; +export {default as match} from './match'; +export {default as permutations} from './permutations'; +export {default as powerSet} from './power-set'; +export {default as range} from './range'; +export {default as split} from './split'; +export {default as take} from './take'; +export {default as takeInto} from './take-into'; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/index.js b/amplify/functions/fetchDocuments/node_modules/obliterator/index.js new file mode 100644 index 0000000..d84da62 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/index.js @@ -0,0 +1,22 @@ +/** + * Obliterator Library Endpoint + * ============================= + * + * Exporting the library's functions. + */ +module.exports = { + Iterator: require('./iterator.js'), + chain: require('./chain.js'), + combinations: require('./combinations.js'), + consume: require('./consume.js'), + filter: require('./filter.js'), + forEach: require('./foreach.js'), + map: require('./map.js'), + match: require('./match.js'), + permutations: require('./permutations.js'), + powerSet: require('./power-set.js'), + range: require('./range.js'), + split: require('./split.js'), + take: require('./take.js'), + takeInto: require('./take-into.js') +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/iterator.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/iterator.d.ts new file mode 100644 index 0000000..a8ea6a8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/iterator.d.ts @@ -0,0 +1,19 @@ +type NextFunction = () => IteratorResult; + +export default class Iterator implements IterableIterator { + + // Constructor + constructor(next: NextFunction); + + // Members + done: boolean; + + // Well-known methods + next(): IteratorResult; + [Symbol.iterator](): IterableIterator; + + // Static methods + static of(...args: T[]): Iterator; + static empty(): Iterator; + static is(value: any): boolean; +} diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/iterator.js b/amplify/functions/fetchDocuments/node_modules/obliterator/iterator.js new file mode 100644 index 0000000..67652ab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/iterator.js @@ -0,0 +1,104 @@ +/** + * Obliterator Iterator Class + * =========================== + * + * Simple class representing the library's iterators. + */ + +/** + * Iterator class. + * + * @constructor + * @param {function} next - Next function. + */ +function Iterator(next) { + + // Hiding the given function + Object.defineProperty(this, '_next', { + writable: false, + enumerable: false, + value: next + }); + + // Is the iterator complete? + this.done = false; +} + +/** + * Next function. + * + * @return {object} + */ +// NOTE: maybe this should dropped for performance? +Iterator.prototype.next = function() { + if (this.done) + return {done: true}; + + var step = this._next(); + + if (step.done) + this.done = true; + + return step; +}; + +/** + * If symbols are supported, we add `next` to `Symbol.iterator`. + */ +if (typeof Symbol !== 'undefined') + Iterator.prototype[Symbol.iterator] = function() { + return this; + }; + +/** + * Returning an iterator of the given values. + * + * @param {any...} values - Values. + * @return {Iterator} + */ +Iterator.of = function() { + var args = arguments, + l = args.length, + i = 0; + + return new Iterator(function() { + if (i >= l) + return {done: true}; + + return {done: false, value: args[i++]}; + }); +}; + +/** + * Returning an empty iterator. + * + * @return {Iterator} + */ +Iterator.empty = function() { + var iterator = new Iterator(null); + iterator.done = true; + + return iterator; +}; + +/** + * Returning whether the given value is an iterator. + * + * @param {any} value - Value. + * @return {boolean} + */ +Iterator.is = function(value) { + if (value instanceof Iterator) + return true; + + return ( + typeof value === 'object' && + value !== null && + typeof value.next === 'function' + ); +}; + +/** + * Exporting. + */ +module.exports = Iterator; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/map.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/map.d.ts new file mode 100644 index 0000000..389591b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/map.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +type MapFunction = (item: S) => T; + +export default function map(predicate: MapFunction, iterator: Iterator): ObliteratorIterator; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/map.js b/amplify/functions/fetchDocuments/node_modules/obliterator/map.js new file mode 100644 index 0000000..fd6dd17 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/map.js @@ -0,0 +1,27 @@ +/** + * Obliterator Map Function + * =========================== + * + * Function returning a iterator mapping the given iterator's values. + */ +var Iterator = require('./iterator.js'); + +/** + * Map. + * + * @param {function} mapper - Map function. + * @param {Iterator} target - Target iterator. + * @return {Iterator} + */ +module.exports = function map(mapper, target) { + return new Iterator(function next() { + var step = target.next(); + + if (step.done) + return step; + + return { + value: mapper(step.value) + }; + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/match.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/match.d.ts new file mode 100644 index 0000000..9a42616 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/match.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function match(pattern: RegExp, string: string): ObliteratorIterator; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/match.js b/amplify/functions/fetchDocuments/node_modules/obliterator/match.js new file mode 100644 index 0000000..82edf41 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/match.js @@ -0,0 +1,42 @@ +/** + * Obliterator Match Function + * =========================== + * + * Function returning an iterator over the matches of the given regex on the + * target string. + */ +var Iterator = require('./iterator.js'); + +/** + * Match. + * + * @param {RegExp} pattern - Regular expression to use. + * @param {string} string - Target string. + * @return {Iterator} + */ +module.exports = function match(pattern, string) { + var executed = false; + + if (!(pattern instanceof RegExp)) + throw new Error('obliterator/match: invalid pattern. Expecting a regular expression.'); + + if (typeof string !== 'string') + throw new Error('obliterator/match: invalid target. Expecting a string.'); + + return new Iterator(function() { + if (executed && !pattern.global) { + pattern.lastIndex = 0; + return {done: true}; + } + + executed = true; + + var m = pattern.exec(string); + + if (m) + return {value: m}; + + pattern.lastIndex = 0; + return {done: true}; + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/package.json b/amplify/functions/fetchDocuments/node_modules/obliterator/package.json new file mode 100644 index 0000000..ddfaead --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/package.json @@ -0,0 +1,45 @@ +{ + "name": "obliterator", + "version": "1.6.1", + "description": "Higher order iterator library for JavaScript.", + "main": "index.js", + "scripts": { + "lint": "eslint *.js", + "prepublish": "npm run lint && npm test", + "test": "mocha test.js && npm run test:types", + "test:types": "tsc --lib es2015,dom --noEmit --noImplicitAny --noImplicitReturns ./test-types.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/yomguithereal/obliterator.git" + }, + "keywords": [ + "iterator" + ], + "author": { + "name": "Guillaume Plique", + "url": "http://github.com/Yomguithereal" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/yomguithereal/obliterator/issues" + }, + "homepage": "https://github.com/yomguithereal/obliterator#readme", + "devDependencies": { + "@yomguithereal/eslint-config": "^4.0.0", + "eslint": "^6.8.0", + "mocha": "^7.0.0", + "typescript": "^3.7.5" + }, + "eslintConfig": { + "extends": "@yomguithereal/eslint-config", + "globals": { + "ArrayBuffer": true, + "Map": true, + "Set": true, + "Symbol": true, + "Uint8Array": true, + "Uint32Array": true + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/permutations.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/permutations.d.ts new file mode 100644 index 0000000..d48dffd --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/permutations.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function permutations(array: Array, r: number): ObliteratorIterator>; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/permutations.js b/amplify/functions/fetchDocuments/node_modules/obliterator/permutations.js new file mode 100644 index 0000000..4f4752f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/permutations.js @@ -0,0 +1,96 @@ +/** + * Obliterator Permutations Function + * ================================== + * + * Iterator returning permutations of the given array. + */ +var Iterator = require('./iterator.js'); + +/** + * Helper mapping indices to items. + */ +function indicesToItems(target, items, indices, r) { + for (var i = 0; i < r; i++) + target[i] = items[indices[i]]; +} + +/** + * Permutations. + * + * @param {array} array - Target array. + * @param {number} r - Size of the subsequences. + * @return {Iterator} + */ +module.exports = function permutations(array, r) { + if (!Array.isArray(array)) + throw new Error('obliterator/permutations: first argument should be an array.'); + + var n = array.length; + + if (arguments.length < 2) + r = n; + + if (typeof r !== 'number') + throw new Error('obliterator/permutations: second argument should be omitted or a number.'); + + if (r > n) + throw new Error('obliterator/permutations: the size of the subsequences should not exceed the length of the array.'); + + var indices = new Uint32Array(n), + subsequence = new Array(r), + cycles = new Uint32Array(r), + first = true, + i; + + for (i = 0; i < n; i++) { + indices[i] = i; + + if (i < r) + cycles[i] = n - i; + } + + i = r; + + return new Iterator(function next() { + if (first) { + first = false; + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + + var tmp, + j; + + i--; + + if (i < 0) + return {done: true}; + + cycles[i]--; + + if (cycles[i] === 0) { + + tmp = indices[i]; + + for (j = i; j < n - 1; j++) + indices[j] = indices[j + 1]; + + indices[n - 1] = tmp; + + cycles[i] = n - i; + return next(); + } + else { + j = cycles[i]; + tmp = indices[i]; + + indices[i] = indices[n - j]; + indices[n - j] = tmp; + + i = r; + + indicesToItems(subsequence, array, indices, r); + return {value: subsequence}; + } + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/power-set.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/power-set.d.ts new file mode 100644 index 0000000..1f238d0 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/power-set.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function powerSet(array: Array): ObliteratorIterator>; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/power-set.js b/amplify/functions/fetchDocuments/node_modules/obliterator/power-set.js new file mode 100644 index 0000000..a9d1850 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/power-set.js @@ -0,0 +1,28 @@ +/** + * Obliterator Power Set Function + * =============================== + * + * Iterator returning the power set of the given array. + */ +var Iterator = require('./iterator.js'), + combinations = require('./combinations.js'), + chain = require('./chain.js'); + +/** + * Power set. + * + * @param {array} array - Target array. + * @return {Iterator} + */ +module.exports = function powerSet(array) { + var n = array.length; + + var iterators = new Array(n + 1); + + iterators[0] = Iterator.of([]); + + for (var i = 1; i < n + 1; i++) + iterators[i] = combinations(array, i); + + return chain.apply(null, iterators); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/range.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/range.d.ts new file mode 100644 index 0000000..498229e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/range.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function range(end: number): ObliteratorIterator; +export default function range(start: number, end: number): ObliteratorIterator; +export default function range(start: number, end: number, step: number): ObliteratorIterator; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/range.js b/amplify/functions/fetchDocuments/node_modules/obliterator/range.js new file mode 100644 index 0000000..eea7e4d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/range.js @@ -0,0 +1,45 @@ +/** + * Obliterator Range Function + * =========================== + * + * Function returning a range iterator. + */ +var Iterator = require('./iterator.js'); + +/** + * Range. + * + * @param {number} start - Start. + * @param {number} end - End. + * @param {number} step - Step. + * @return {Iterator} + */ +module.exports = function range(start, end, step) { + if (arguments.length === 1) { + end = start; + start = 0; + } + + if (arguments.length < 3) + step = 1; + + var i = start; + + var iterator = new Iterator(function() { + if (i < end) { + var value = i; + + i += step; + + return {value: value}; + } + + return {done: true}; + }); + + iterator.start = start; + iterator.end = end; + iterator.step = step; + + return iterator; +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/split.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/split.d.ts new file mode 100644 index 0000000..e9124ab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/split.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function split(pattern: RegExp, string: string): ObliteratorIterator; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/split.js b/amplify/functions/fetchDocuments/node_modules/obliterator/split.js new file mode 100644 index 0000000..09abf83 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/split.js @@ -0,0 +1,68 @@ +/** + * Obliterator Split Function + * =========================== + * + * Function returning an iterator over the pieces of a regex split. + */ +var Iterator = require('./iterator.js'); + +/** + * Function used to make the given pattern global. + * + * @param {RegExp} pattern - Regular expression to make global. + * @return {RegExp} + */ +function makeGlobal(pattern) { + var flags = 'g'; + + if (pattern.multiline) flags += 'm'; + if (pattern.ignoreCase) flags += 'i'; + if (pattern.sticky) flags += 'y'; + if (pattern.unicode) flags += 'u'; + + return new RegExp(pattern.source, flags); +} + +/** + * Split. + * + * @param {RegExp} pattern - Regular expression to use. + * @param {string} string - Target string. + * @return {Iterator} + */ +module.exports = function split(pattern, string) { + if (!(pattern instanceof RegExp)) + throw new Error('obliterator/split: invalid pattern. Expecting a regular expression.'); + + if (typeof string !== 'string') + throw new Error('obliterator/split: invalid target. Expecting a string.'); + + // NOTE: cloning the pattern has a performance cost but side effects for not + // doing so might be worse. + pattern = makeGlobal(pattern); + + var consumed = false, + current = 0; + + return new Iterator(function() { + if (consumed) + return {done: true}; + + var match = pattern.exec(string), + value, + length; + + if (match) { + length = match.index + match[0].length; + + value = string.slice(current, match.index); + current = length; + } + else { + consumed = true; + value = string.slice(current); + } + + return {value: value}; + }); +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/take-into.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/take-into.d.ts new file mode 100644 index 0000000..d2bf004 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/take-into.d.ts @@ -0,0 +1,5 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +// Requires a resolution of https://github.com/microsoft/TypeScript/issues/1213 +// export default function takeInto, T>(ArrayClass: new (n: number) => C, iterator: Iterator, n: number): C; +export default function takeInto(ArrayClass: new (arrayLength: number) => T[], iterator: Iterator, n: number): T[]; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/take-into.js b/amplify/functions/fetchDocuments/node_modules/obliterator/take-into.js new file mode 100644 index 0000000..6b814f8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/take-into.js @@ -0,0 +1,40 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Take Into Function + * =============================== + * + * Same as the take function but enables the user to select an array class + * in which to insert the retrieved values. + */ + +/** + * Take Into. + * + * @param {function} ArrayClass - Array class to use. + * @param {Iterator} iterator - Target iterator. + * @param {number} n - Number of items to take. + * @return {array} + */ +module.exports = function takeInto(ArrayClass, iterator, n) { + var array = new ArrayClass(n), + step, + i = 0; + + while (true) { + + if (i === n) + return array; + + step = iterator.next(); + + if (step.done) { + + if (i !== n) + return array.slice(0, i); + + return array; + } + + array[i++] = step.value; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/take.d.ts b/amplify/functions/fetchDocuments/node_modules/obliterator/take.d.ts new file mode 100644 index 0000000..c9b5026 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/take.d.ts @@ -0,0 +1,3 @@ +import {default as ObliteratorIterator} from './iterator.js'; + +export default function take(iterator: Iterator, n: number): Array; diff --git a/amplify/functions/fetchDocuments/node_modules/obliterator/take.js b/amplify/functions/fetchDocuments/node_modules/obliterator/take.js new file mode 100644 index 0000000..d7c5e96 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/obliterator/take.js @@ -0,0 +1,40 @@ +/* eslint no-constant-condition: 0 */ +/** + * Obliterator Take Function + * ========================== + * + * Function taking n or every value of the given iterator and returns them + * into an array. + */ + +/** + * Take. + * + * @param {Iterator} iterator - Target iterator. + * @param {number} [n] - Optional number of items to take. + * @return {array} + */ +module.exports = function take(iterator, n) { + var l = arguments.length > 1 ? n : Infinity, + array = l !== Infinity ? new Array(l) : [], + step, + i = 0; + + while (true) { + + if (i === l) + return array; + + step = iterator.next(); + + if (step.done) { + + if (i !== n) + return array.slice(0, i); + + return array; + } + + array[i++] = step.value; + } +}; diff --git a/amplify/functions/fetchDocuments/node_modules/strnum/.vscode/launch.json b/amplify/functions/fetchDocuments/node_modules/strnum/.vscode/launch.json new file mode 100644 index 0000000..b87b349 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/strnum/.vscode/launch.json @@ -0,0 +1,25 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Jasmine Tests", + "program": "${workspaceFolder}/node_modules/jasmine/bin/jasmine.js", + "args": [ + "${workspaceFolder}/spec/attr_spec.js" + ], + "internalConsoleOptions": "openOnSessionStart" + },{ + "type": "node", + "request": "launch", + "name": "Jasmine Tests current test file", + "program": "${workspaceFolder}/node_modules/jasmine/bin/jasmine.js", + "args": [ + "${file}" + ], + "internalConsoleOptions": "openOnSessionStart" + } + ] + +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/strnum/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/strnum/CHANGELOG.md new file mode 100644 index 0000000..582e460 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/strnum/CHANGELOG.md @@ -0,0 +1,22 @@ + +**1.1.2 / 2025-02-27** +- fix skiplike for 0 + +**1.1.1 / 2025-02-21** +- All recent fixes of version 2 + +**2.0.4 / 2025-02-20** +- remove console log + +**2.0.3 / 2025-02-20** +- fix for string which are falsly identified as e-notation + +**2.0.1 / 2025-02-20** +- fix: handle only zeros +- fix: return original string when NaN + +**2.0.0 / 2025-02-20** +- Migrating to ESM modules. No functional change + +**1.1.0 / 2025-02-20** +- fix (#9): support missing floating point and e notations \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/strnum/LICENSE b/amplify/functions/fetchDocuments/node_modules/strnum/LICENSE new file mode 100644 index 0000000..6450554 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/strnum/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Natural Intelligence + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/amplify/functions/fetchDocuments/node_modules/strnum/README.md b/amplify/functions/fetchDocuments/node_modules/strnum/README.md new file mode 100644 index 0000000..419e8ef --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/strnum/README.md @@ -0,0 +1,97 @@ +# strnum +Parse string into Number based on configuration + +## Users + + + + + +Many React Native projects and plugins + +## Usage + +```bash +npm install strnum +``` +```js +const toNumber = require("strnum"); + +toNumber(undefined) // undefined +toNumber(null)) //null +toNumber("")) // "" +toNumber("string"); //"string") +toNumber("12,12"); //"12,12") +toNumber("12 12"); //"12 12") +toNumber("12-12"); //"12-12") +toNumber("12.12.12"); //"12.12.12") +toNumber("0x2f"); //47) +toNumber("-0x2f"); //-47) +toNumber("0x2f", { hex : true}); //47) +toNumber("-0x2f", { hex : true}); //-47) +toNumber("0x2f", { hex : false}); //"0x2f") +toNumber("-0x2f", { hex : false}); //"-0x2f") +toNumber("06"); //6) +toNumber("06", { leadingZeros : true}); //6) +toNumber("06", { leadingZeros : false}); //"06") + +toNumber("006"); //6) +toNumber("006", { leadingZeros : true}); //6) +toNumber("006", { leadingZeros : false}); //"006") +toNumber("0.0"); //0) +toNumber("00.00"); //0) +toNumber("0.06"); //0.06) +toNumber("00.6"); //0.6) +toNumber(".006"); //0.006) +toNumber("6.0"); //6) +toNumber("06.0"); //6) + +toNumber("0.0", { leadingZeros : false}); //0) +toNumber("00.00", { leadingZeros : false}); //"00.00") +toNumber("0.06", { leadingZeros : false}); //0.06) +toNumber("00.6", { leadingZeros : false}); //"00.6") +toNumber(".006", { leadingZeros : false}); //0.006) +toNumber("6.0" , { leadingZeros : false}); //6) +toNumber("06.0" , { leadingZeros : false}); //"06.0") +toNumber("-06"); //-6) +toNumber("-06", { leadingZeros : true}); //-6) +toNumber("-06", { leadingZeros : false}); //"-06") + +toNumber("-0.0"); //-0) +toNumber("-00.00"); //-0) +toNumber("-0.06"); //-0.06) +toNumber("-00.6"); //-0.6) +toNumber("-.006"); //-0.006) +toNumber("-6.0"); //-6) +toNumber("-06.0"); //-6) + +toNumber("-0.0" , { leadingZeros : false}); //-0) +toNumber("-00.00", { leadingZeros : false}); //"-00.00") +toNumber("-0.06", { leadingZeros : false}); //-0.06) +toNumber("-00.6", { leadingZeros : false}); //"-00.6") +toNumber("-.006", {leadingZeros : false}); //-0.006) +toNumber("-6.0" , { leadingZeros : false}); //-6) +toNumber("-06.0" , { leadingZeros : false}); //"-06.0") +toNumber("420926189200190257681175017717") ; //4.209261892001902e+29) +toNumber("000000000000000000000000017717" , { leadingZeros : false}); //"000000000000000000000000017717") +toNumber("000000000000000000000000017717" , { leadingZeros : true}); //17717) +toNumber("01.0e2" , { leadingZeros : false}); //"01.0e2") +toNumber("-01.0e2" , { leadingZeros : false}); //"-01.0e2") +toNumber("01.0e2") ; //100) +toNumber("-01.0e2") ; //-100) +toNumber("1.0e2") ; //100) + +toNumber("-1.0e2") ; //-100) +toNumber("1.0e-2"); //0.01) + +toNumber("+1212121212"); // 1212121212 +toNumber("+1212121212", { skipLike: /\+[0-9]{10}/} )); //"+1212121212" +``` + +Supported Options +```js +hex: true, //when hexadecimal string should be parsed +leadingZeros: true, //when number with leading zeros like 08 should be parsed. 0.0 is not impacted +eNotation: true, //when number with eNotation or number parsed in eNotation should be considered +skipLike: /regex/ //when string should not be parsed when it matches the specified regular expression +``` diff --git a/amplify/functions/fetchDocuments/node_modules/strnum/package.json b/amplify/functions/fetchDocuments/node_modules/strnum/package.json new file mode 100644 index 0000000..90a1b96 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/strnum/package.json @@ -0,0 +1,30 @@ +{ + "name": "strnum", + "version": "1.1.2", + "description": "Parse String to Number based on configuration", + "main": "strnum.js", + "scripts": { + "test": "jasmine strnum.test.js" + }, + "keywords": [ + "string", + "number", + "parse", + "convert" + ], + "repository": { + "type": "git", + "url": "https://github.com/NaturalIntelligence/strnum" + }, + "author": "Amit Gupta (https://amitkumargupta.work/)", + "license": "MIT", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "devDependencies": { + "jasmine": "^5.6.0" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/strnum/strnum.js b/amplify/functions/fetchDocuments/node_modules/strnum/strnum.js new file mode 100644 index 0000000..c3bd08e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/strnum/strnum.js @@ -0,0 +1,111 @@ +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/; +// const octRegex = /^0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +const consider = { + hex : true, + // oct: false, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true, + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if(str==="0") return 0; + else if (options.hex && hexRegex.test(trimmedStr)) { + return parse_int(trimmedStr, 16); + // }else if (options.oct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + }else if (trimmedStr.search(/[eE]/)!== -1) { //eNotation + const notation = trimmedStr.match(/^([-\+])?(0*)([0-9]*(\.[0-9]*)?[eE][-\+]?[0-9]+)$/); + // +00.123 => [ , '+', '00', '.123', .. + if(notation){ + // console.log(notation) + if(options.leadingZeros){ //accept with leading zeros + trimmedStr = (notation[1] || "") + notation[3]; + }else{ + if(notation[2] === "0" && notation[3][0]=== "."){ //valid number + }else{ + return str; + } + } + return options.eNotation ? Number(trimmedStr) : str; + }else{ + return str; + } + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + // +00.123 => [ , '+', '00', '.123', .. + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else if(options.leadingZeros && leadingZeros===str) return 0; //00 + + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + return (numTrimmedByZeros === numStr) || (sign+numTrimmedByZeros === numStr) ? num : str + }else { + return (trimmedStr === numStr) || (trimmedStr === sign+numStr) ? num : str + } + } + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} + +function parse_int(numStr, base){ + //polyfill + if(parseInt) return parseInt(numStr, base); + else if(Number.parseInt) return Number.parseInt(numStr, base); + else if(window && window.parseInt) return window.parseInt(numStr, base); + else throw new Error("parseInt, Number.parseInt, window.parseInt are not supported") +} + +module.exports = toNumber; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/strnum/strnum.test.js b/amplify/functions/fetchDocuments/node_modules/strnum/strnum.test.js new file mode 100644 index 0000000..c476614 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/strnum/strnum.test.js @@ -0,0 +1,165 @@ +const toNumber = require("./strnum.js"); + +describe("Should convert all the valid numeric strings to number", () => { + it("should return undefined, null, empty string, or non-numeric as it is", () => { + expect(toNumber(undefined)).not.toBeDefined(); + expect(toNumber(null)).toEqual(null); + expect(toNumber("")).toEqual(""); + expect(toNumber("string")).toEqual("string"); + expect(toNumber("e89794659669cb7bb967db73a7ea6889c3891727")).toEqual("e89794659669cb7bb967db73a7ea6889c3891727"); + + }); + it("should not parse number with spaces or comma", () => { + expect(toNumber("12,12")).toEqual("12,12"); + expect(toNumber("12 12")).toEqual("12 12"); + expect(toNumber("12-12")).toEqual("12-12"); + expect(toNumber("12.12.12")).toEqual("12.12.12"); + }) + it("should consider + sign", () => { + expect(toNumber("+12")).toEqual(12); + expect(toNumber("+ 12")).toEqual("+ 12"); + expect(toNumber("12+12")).toEqual("12+12"); + expect(toNumber("1212+")).toEqual("1212+"); + }) + it("should parse hexadecimal values", () => { + expect(toNumber("0x2f")).toEqual(47); + expect(toNumber("-0x2f")).toEqual(-47); + expect(toNumber("0x2f", { hex : true})).toEqual(47); + expect(toNumber("-0x2f", { hex : true})).toEqual(-47); + expect(toNumber("0x2f", { hex : false})).toEqual("0x2f"); + expect(toNumber("-0x2f", { hex : false})).toEqual("-0x2f"); + }) + it("should not parse strings with 0x embedded", () => { + expect(toNumber("0xzz")).toEqual("0xzz"); + expect(toNumber("iweraf0x123qwerqwer")).toEqual("iweraf0x123qwerqwer"); + expect(toNumber("1230x55")).toEqual("1230x55"); + expect(toNumber("JVBERi0xLjMNCiXi48")).toEqual("JVBERi0xLjMNCiXi48"); + }) + it("leading zeros", () => { + expect(toNumber("0")).toEqual(0); + expect(toNumber("00")).toEqual(0); + expect(toNumber("00.0")).toEqual(0); + + expect(toNumber("0",{ leadingZeros : false})).toEqual(0); + expect(toNumber("00",{ leadingZeros : false})).toEqual("00"); + expect(toNumber("00.0",{ leadingZeros : false})).toEqual("00.0"); + + expect(toNumber("06")).toEqual(6); + expect(toNumber("06", { leadingZeros : true})).toEqual(6); + expect(toNumber("06", { leadingZeros : false})).toEqual("06"); + + expect(toNumber("006")).toEqual(6); + expect(toNumber("006", { leadingZeros : true})).toEqual(6); + expect(toNumber("006", { leadingZeros : false})).toEqual("006"); + + expect(toNumber("000000000000000000000000017717" , { leadingZeros : false})).toEqual("000000000000000000000000017717"); + expect(toNumber("000000000000000000000000017717" , { leadingZeros : true})).toEqual(17717); + expect(toNumber("020211201030005811824") ).toEqual("020211201030005811824"); + expect(toNumber("0420926189200190257681175017717") ).toEqual(4.209261892001902e+29); + }) + it("invalid floating number", () => { + expect(toNumber("20.21.030") ).toEqual("20.21.030"); + expect(toNumber("0.21.030") ).toEqual("0.21.030"); + expect(toNumber("0.21.") ).toEqual("0.21."); + }); + it("floating point and leading zeros", () => { + expect(toNumber("0.")).toEqual(0); + expect(toNumber("+0.")).toEqual(0); + expect(toNumber("-0.")).toEqual(-0); + expect(toNumber("1.") ).toEqual(1); + expect(toNumber("00.00")).toEqual(0); + expect(toNumber("0.06")).toEqual(0.06); + expect(toNumber("00.6")).toEqual(0.6); + expect(toNumber(".006")).toEqual(0.006); + expect(toNumber("6.0")).toEqual(6); + expect(toNumber("06.0")).toEqual(6); + + expect(toNumber("0.0", { leadingZeros : false})).toEqual(0); + expect(toNumber("00.00", { leadingZeros : false})).toEqual("00.00"); + expect(toNumber("0.06", { leadingZeros : false})).toEqual(0.06); + expect(toNumber("00.6", { leadingZeros : false})).toEqual("00.6"); + expect(toNumber(".006", { leadingZeros : false})).toEqual(0.006); + expect(toNumber("6.0" , { leadingZeros : false})).toEqual(6); + expect(toNumber("06.0" , { leadingZeros : false})).toEqual("06.0"); + }) + it("negative number leading zeros", () => { + expect(toNumber("+06")).toEqual(6); + expect(toNumber("-06")).toEqual(-6); + expect(toNumber("-06", { leadingZeros : true})).toEqual(-6); + expect(toNumber("-06", { leadingZeros : false})).toEqual("-06"); + + expect(toNumber("-0.0")).toEqual(-0); + expect(toNumber("-00.00")).toEqual(-0); + expect(toNumber("-0.06")).toEqual(-0.06); + expect(toNumber("-00.6")).toEqual(-0.6); + expect(toNumber("-.006")).toEqual(-0.006); + expect(toNumber("-6.0")).toEqual(-6); + expect(toNumber("-06.0")).toEqual(-6); + + expect(toNumber("-0.0" , { leadingZeros : false})).toEqual(-0); + expect(toNumber("-00.00", { leadingZeros : false})).toEqual("-00.00"); + expect(toNumber("-0.06", { leadingZeros : false})).toEqual(-0.06); + expect(toNumber("-00.6", { leadingZeros : false})).toEqual("-00.6"); + expect(toNumber("-.006", {leadingZeros : false})).toEqual(-0.006); + expect(toNumber("-6.0" , { leadingZeros : false})).toEqual(-6); + expect(toNumber("-06.0" , { leadingZeros : false})).toEqual("-06.0"); + }) + it("long number", () => { + expect(toNumber("020211201030005811824") ).toEqual("020211201030005811824"); + expect(toNumber("20211201030005811824") ).toEqual("20211201030005811824"); + expect(toNumber("20.211201030005811824") ).toEqual("20.211201030005811824"); + expect(toNumber("0.211201030005811824") ).toEqual("0.211201030005811824"); + }); + it("scientific notation", () => { + expect(toNumber("01.0e2" , { leadingZeros : false})).toEqual("01.0e2"); + expect(toNumber("-01.0e2" , { leadingZeros : false})).toEqual("-01.0e2"); + expect(toNumber("01.0e2") ).toEqual(100); + expect(toNumber("-01.0e2") ).toEqual(-100); + expect(toNumber("1.0e2") ).toEqual(100); + + expect(toNumber("-1.0e2") ).toEqual(-100); + expect(toNumber("1.0e-2")).toEqual(0.01); + + expect(toNumber("420926189200190257681175017717") ).toEqual(4.209261892001902e+29); + expect(toNumber("420926189200190257681175017717" , { eNotation: false} )).toEqual("420926189200190257681175017717"); + + expect(toNumber("1e-2")).toEqual(0.01); + expect(toNumber("1e+2")).toEqual(100); + expect(toNumber("1.e+2")).toEqual(100); + }); + + it("scientific notation with upper E", () => { + expect(toNumber("01.0E2" , { leadingZeros : false})).toEqual("01.0E2"); + expect(toNumber("-01.0E2" , { leadingZeros : false})).toEqual("-01.0E2"); + expect(toNumber("01.0E2") ).toEqual(100); + expect(toNumber("-01.0E2") ).toEqual(-100); + expect(toNumber("1.0E2") ).toEqual(100); + + expect(toNumber("-1.0E2") ).toEqual(-100); + expect(toNumber("1.0E-2")).toEqual(0.01); + }); + + it("should skip matching pattern", () => { + expect(toNumber("0", { skipLike: /.*/ })).toEqual("0"); + expect(toNumber("+12", { skipLike: /\+[0-9]{10}/} )).toEqual(12); + expect(toNumber("12+12", { skipLike: /\+[0-9]{10}/} )).toEqual("12+12"); + expect(toNumber("12+1212121212", { skipLike: /\+[0-9]{10}/} )).toEqual("12+1212121212"); + expect(toNumber("+1212121212") ).toEqual(1212121212); + expect(toNumber("+1212121212", { skipLike: /\+[0-9]{10}/} )).toEqual("+1212121212"); + }) + it("should not change string if not number", () => { + expect(toNumber("+12 12")).toEqual("+12 12"); + expect(toNumber(" +12 12 ")).toEqual(" +12 12 "); + }) + it("should ignore sorrounded spaces ", () => { + expect(toNumber(" +1212 ")).toEqual(1212); + }) + + it("negative numbers", () => { + expect(toNumber("+1212")).toEqual(1212); + expect(toNumber("+12.12")).toEqual(12.12); + expect(toNumber("-12.12")).toEqual(-12.12); + expect(toNumber("-012.12")).toEqual(-12.12); + expect(toNumber("-012.12")).toEqual(-12.12); + }) +}); diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/CopyrightNotice.txt b/amplify/functions/fetchDocuments/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000..0e42542 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/LICENSE.txt b/amplify/functions/fetchDocuments/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000..bfe6430 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/README.md b/amplify/functions/fetchDocuments/node_modules/tslib/README.md new file mode 100644 index 0000000..290cc61 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](https://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/SECURITY.md b/amplify/functions/fetchDocuments/node_modules/tslib/SECURITY.md new file mode 100644 index 0000000..869fdfe --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/SECURITY.md @@ -0,0 +1,41 @@ + + +## Security + +Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). + +If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. + +## Reporting Security Issues + +**Please do not report security vulnerabilities through public GitHub issues.** + +Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). + +If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). + +You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). + +Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: + + * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) + * Full paths of source file(s) related to the manifestation of the issue + * The location of the affected source code (tag/branch/commit or direct URL) + * Any special configuration required to reproduce the issue + * Step-by-step instructions to reproduce the issue + * Proof-of-concept or exploit code (if possible) + * Impact of the issue, including how an attacker might exploit the issue + +This information will help us triage your report more quickly. + +If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. + +## Preferred Languages + +We prefer all communications to be in English. + +## Policy + +Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). + + diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/modules/index.d.ts b/amplify/functions/fetchDocuments/node_modules/tslib/modules/index.d.ts new file mode 100644 index 0000000..3244fab --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/modules/index.d.ts @@ -0,0 +1,38 @@ +// Note: named reexports are used instead of `export *` because +// TypeScript itself doesn't resolve the `export *` when checking +// if a particular helper exists. +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __createBinding, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +} from '../tslib.js'; +export * as default from '../tslib.js'; diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/modules/index.js b/amplify/functions/fetchDocuments/node_modules/tslib/modules/index.js new file mode 100644 index 0000000..c91f618 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/modules/index.js @@ -0,0 +1,70 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +}; +export default tslib; diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/modules/package.json b/amplify/functions/fetchDocuments/node_modules/tslib/modules/package.json new file mode 100644 index 0000000..aafa0e4 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/package.json b/amplify/functions/fetchDocuments/node_modules/tslib/package.json new file mode 100644 index 0000000..57d0578 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/package.json @@ -0,0 +1,47 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.8.1", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": { + "types": "./modules/index.d.ts", + "default": "./tslib.es6.mjs" + }, + "import": { + "node": "./modules/index.js", + "default": { + "types": "./modules/index.d.ts", + "default": "./tslib.es6.mjs" + } + }, + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/tslib.d.ts b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000..f23df55 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.d.ts @@ -0,0 +1,460 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Applies decorators to a class or class member, following the native ECMAScript decorator specification. + * @param ctor For non-field class members, the class constructor. Otherwise, `null`. + * @param descriptorIn The `PropertyDescriptor` to use when unable to look up the property from `ctor`. + * @param decorators The decorators to apply + * @param contextIn The `DecoratorContext` to clone for each decorator application. + * @param initializers An array of field initializer mutation functions into which new initializers are written. + * @param extraInitializers An array of extra initializer functions into which new initializers are written. + */ +export declare function __esDecorate(ctor: Function | null, descriptorIn: object | null, decorators: Function[], contextIn: object, initializers: Function[] | null, extraInitializers: Function[]): void; + +/** + * Runs field initializers or extra initializers generated by `__esDecorate`. + * @param thisArg The `this` argument to use. + * @param initializers The array of initializers to evaluate. + * @param value The initial value to pass to the initializers. + */ +export declare function __runInitializers(thisArg: unknown, initializers: Function[], value?: any): any; + +/** + * Converts a computed property name into a `string` or `symbol` value. + */ +export declare function __propKey(x: any): string | symbol; + +/** + * Assigns the name of a function derived from the left-hand side of an assignment. + * @param f The function to rename. + * @param name The new name for the function. + * @param prefix A prefix (such as `"get"` or `"set"`) to insert before the name. + */ +export declare function __setFunctionName(f: Function, name: string | symbol, prefix?: string): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param o The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; + +/** + * Adds a disposable resource to a resource-tracking environment object. + * @param env A resource-tracking environment object. + * @param value Either a Disposable or AsyncDisposable object, `null`, or `undefined`. + * @param async When `true`, `AsyncDisposable` resources can be added. When `false`, `AsyncDisposable` resources cannot be added. + * @returns The {@link value} argument. + * + * @throws {TypeError} If {@link value} is not an object, or if either `Symbol.dispose` or `Symbol.asyncDispose` are not + * defined, or if {@link value} does not have an appropriate `Symbol.dispose` or `Symbol.asyncDispose` method. + */ +export declare function __addDisposableResource(env: { stack: { value?: unknown, dispose?: Function, async: boolean }[]; error: unknown; hasError: boolean; }, value: T, async: boolean): T; + +/** + * Disposes all resources in a resource-tracking environment object. + * @param env A resource-tracking environment object. + * @returns A {@link Promise} if any resources in the environment were marked as `async` when added; otherwise, `void`. + * + * @throws {SuppressedError} if an error thrown during disposal would have suppressed a prior error from disposal or the + * error recorded in the resource-tracking environment object. + * @seealso {@link __addDisposableResource} + */ +export declare function __disposeResources(env: { stack: { value?: unknown, dispose?: Function, async: boolean }[]; error: unknown; hasError: boolean; }): any; + +/** + * Transforms a relative import specifier ending in a non-declaration TypeScript file extension to its JavaScript file extension counterpart. + * @param path The import specifier. + * @param preserveJsx Causes '*.tsx' to transform to '*.jsx' instead of '*.js'. Should be true when `--jsx` is set to `preserve`. + */ +export declare function __rewriteRelativeImportExtension(path: string, preserveJsx?: boolean): string; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.html b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000..b122e41 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.js b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000..6c1739b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.js @@ -0,0 +1,402 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise, SuppressedError, Symbol, Iterator */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; + +export function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; + +export function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +}; + +export function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +}; + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} + +export function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + +} + +var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; +}; + +export function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); +} + +export function __rewriteRelativeImportExtension(path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; +} + +export default { + __extends: __extends, + __assign: __assign, + __rest: __rest, + __decorate: __decorate, + __param: __param, + __esDecorate: __esDecorate, + __runInitializers: __runInitializers, + __propKey: __propKey, + __setFunctionName: __setFunctionName, + __metadata: __metadata, + __awaiter: __awaiter, + __generator: __generator, + __createBinding: __createBinding, + __exportStar: __exportStar, + __values: __values, + __read: __read, + __spread: __spread, + __spreadArrays: __spreadArrays, + __spreadArray: __spreadArray, + __await: __await, + __asyncGenerator: __asyncGenerator, + __asyncDelegator: __asyncDelegator, + __asyncValues: __asyncValues, + __makeTemplateObject: __makeTemplateObject, + __importStar: __importStar, + __importDefault: __importDefault, + __classPrivateFieldGet: __classPrivateFieldGet, + __classPrivateFieldSet: __classPrivateFieldSet, + __classPrivateFieldIn: __classPrivateFieldIn, + __addDisposableResource: __addDisposableResource, + __disposeResources: __disposeResources, + __rewriteRelativeImportExtension: __rewriteRelativeImportExtension, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.mjs b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.mjs new file mode 100644 index 0000000..c17990a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.es6.mjs @@ -0,0 +1,401 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise, SuppressedError, Symbol, Iterator */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +}; + +export function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +}; + +export function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +}; + +export function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +}; + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} + +export function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; +} + +var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; +}; + +export function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); +} + +export function __rewriteRelativeImportExtension(path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; +} + +export default { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __createBinding, + __exportStar, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension, +}; diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/tslib.html b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.html new file mode 100644 index 0000000..44c9ba5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/tslib/tslib.js b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.js new file mode 100644 index 0000000..5e12ace --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/tslib/tslib.js @@ -0,0 +1,484 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError, Iterator */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +var __rewriteRelativeImportExtension; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); + }; + + __rewriteRelativeImportExtension = function (path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); + exporter("__rewriteRelativeImportExtension", __rewriteRelativeImportExtension); +}); + +0 && (module.exports = { + __extends: __extends, + __assign: __assign, + __rest: __rest, + __decorate: __decorate, + __param: __param, + __esDecorate: __esDecorate, + __runInitializers: __runInitializers, + __propKey: __propKey, + __setFunctionName: __setFunctionName, + __metadata: __metadata, + __awaiter: __awaiter, + __generator: __generator, + __exportStar: __exportStar, + __createBinding: __createBinding, + __values: __values, + __read: __read, + __spread: __spread, + __spreadArrays: __spreadArrays, + __spreadArray: __spreadArray, + __await: __await, + __asyncGenerator: __asyncGenerator, + __asyncDelegator: __asyncDelegator, + __asyncValues: __asyncValues, + __makeTemplateObject: __makeTemplateObject, + __importStar: __importStar, + __importDefault: __importDefault, + __classPrivateFieldGet: __classPrivateFieldGet, + __classPrivateFieldSet: __classPrivateFieldSet, + __classPrivateFieldIn: __classPrivateFieldIn, + __addDisposableResource: __addDisposableResource, + __disposeResources: __disposeResources, + __rewriteRelativeImportExtension: __rewriteRelativeImportExtension, +}); diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/CHANGELOG.md b/amplify/functions/fetchDocuments/node_modules/uuid/CHANGELOG.md new file mode 100644 index 0000000..0412ad8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,274 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [9.0.1](https://github.com/uuidjs/uuid/compare/v9.0.0...v9.0.1) (2023-09-12) + +### build + +- Fix CI to work with Node.js 20.x + +## [9.0.0](https://github.com/uuidjs/uuid/compare/v8.3.2...v9.0.0) (2022-09-05) + +### ⚠ BREAKING CHANGES + +- Drop Node.js 10.x support. This library always aims at supporting one EOLed LTS release which by this time now is 12.x which has reached EOL 30 Apr 2022. + +- Remove the minified UMD build from the package. + + Minified code is hard to audit and since this is a widely used library it seems more appropriate nowadays to optimize for auditability than to ship a legacy module format that, at best, serves educational purposes nowadays. + + For production browser use cases, users should be using a bundler. For educational purposes, today's online sandboxes like replit.com offer convenient ways to load npm modules, so the use case for UMD through repos like UNPKG or jsDelivr has largely vanished. + +- Drop IE 11 and Safari 10 support. Drop support for browsers that don't correctly implement const/let and default arguments, and no longer transpile the browser build to ES2015. + + This also removes the fallback on msCrypto instead of the crypto API. + + Browser tests are run in the first supported version of each supported browser and in the latest (as of this commit) version available on Browserstack. + +### Features + +- optimize uuid.v1 by 1.3x uuid.v4 by 4.3x (430%) ([#597](https://github.com/uuidjs/uuid/issues/597)) ([3a033f6](https://github.com/uuidjs/uuid/commit/3a033f6bab6bb3780ece6d645b902548043280bc)) +- remove UMD build ([#645](https://github.com/uuidjs/uuid/issues/645)) ([e948a0f](https://github.com/uuidjs/uuid/commit/e948a0f22bf22f4619b27bd913885e478e20fe6f)), closes [#620](https://github.com/uuidjs/uuid/issues/620) +- use native crypto.randomUUID when available ([#600](https://github.com/uuidjs/uuid/issues/600)) ([c9e076c](https://github.com/uuidjs/uuid/commit/c9e076c852edad7e9a06baaa1d148cf4eda6c6c4)) + +### Bug Fixes + +- add Jest/jsdom compatibility ([#642](https://github.com/uuidjs/uuid/issues/642)) ([16f9c46](https://github.com/uuidjs/uuid/commit/16f9c469edf46f0786164cdf4dc980743984a6fd)) +- change default export to named function ([#545](https://github.com/uuidjs/uuid/issues/545)) ([c57bc5a](https://github.com/uuidjs/uuid/commit/c57bc5a9a0653273aa639cda9177ce52efabe42a)) +- handle error when parameter is not set in v3 and v5 ([#622](https://github.com/uuidjs/uuid/issues/622)) ([fcd7388](https://github.com/uuidjs/uuid/commit/fcd73881692d9fabb63872576ba28e30ff852091)) +- run npm audit fix ([#644](https://github.com/uuidjs/uuid/issues/644)) ([04686f5](https://github.com/uuidjs/uuid/commit/04686f54c5fed2cfffc1b619f4970c4bb8532353)) +- upgrading from uuid3 broken link ([#568](https://github.com/uuidjs/uuid/issues/568)) ([1c849da](https://github.com/uuidjs/uuid/commit/1c849da6e164259e72e18636726345b13a7eddd6)) + +### build + +- drop Node.js 8.x from babel transpile target ([#603](https://github.com/uuidjs/uuid/issues/603)) ([aa11485](https://github.com/uuidjs/uuid/commit/aa114858260402107ec8a1e1a825dea0a259bcb5)) +- drop support for legacy browsers (IE11, Safari 10) ([#604](https://github.com/uuidjs/uuid/issues/604)) ([0f433e5](https://github.com/uuidjs/uuid/commit/0f433e5ec444edacd53016de67db021102f36148)) + +- drop node 10.x to upgrade dev dependencies ([#653](https://github.com/uuidjs/uuid/issues/653)) ([28a5712](https://github.com/uuidjs/uuid/commit/28a571283f8abda6b9d85e689f95b7d3ee9e282e)), closes [#643](https://github.com/uuidjs/uuid/issues/643) + +### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08) + +### Bug Fixes + +- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536) + +### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04) + +### Bug Fixes + +- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375) + +## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27) + +### Features + +- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180) + +## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23) + +### Features + +- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5)) +- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437) +- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659)) + +### Bug Fixes + +- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8)) + +## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20) + +### Features + +- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d)) +- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2)) + +### Bug Fixes + +- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444) + +## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29) + +### ⚠ BREAKING CHANGES + +- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export. + + ```diff + -import uuid from 'uuid'; + -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869' + +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' + ``` + +- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported. + + Instead use the named exports that this module exports. + + For ECMAScript Modules (ESM): + + ```diff + -import uuidv4 from 'uuid/v4'; + +import { v4 as uuidv4 } from 'uuid'; + uuidv4(); + ``` + + For CommonJS: + + ```diff + -const uuidv4 = require('uuid/v4'); + +const { v4: uuidv4 } = require('uuid'); + uuidv4(); + ``` + +### Features + +- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342) +- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba)) + +### Bug Fixes + +- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0)) + +### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31) + +### Bug Fixes + +- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408) + +### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04) + +### Bug Fixes + +- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c)) +- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7)) +- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4)) + +### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25) + +### Bug Fixes + +- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc)) +- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378) + +## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24) + +### ⚠ BREAKING CHANGES + +- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed. +- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants. +- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function. +- Remove support for generating v3 and v5 UUIDs in Node.js<4.x +- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers. + +### Features + +- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345) +- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555)) +- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b)) +- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0)) +- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173) +- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627)) +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### Bug Fixes + +- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48)) +- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370) +- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23)) + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + +### Features + +- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + +### Bug Fixes + +- no longer run ci tests on node v4 +- upgrade dependencies + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + +### Bug Fixes + +- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + +### Bug Fixes + +- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + +### Bug Fixes + +- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + +### Bug Fixes + +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + +### Bug Fixes + +- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + +### Features + +- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +- Fix typo (#178) +- Simple typo fix (#165) + +### Features + +- v5 support in CLI (#197) +- V5 support (#188) + +# 3.0.1 (2016-11-28) + +- split uuid versions into separate files + +# 3.0.0 (2016-11-17) + +- remove .parse and .unparse + +# 2.0.0 + +- Removed uuid.BufferClass + +# 1.4.0 + +- Improved module context detection +- Removed public RNG functions + +# 1.3.2 + +- Improve tests and handling of v1() options (Issue #24) +- Expose RNG option to allow for perf testing with different generators + +# 1.3.0 + +- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +- Support for node.js crypto API +- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/CONTRIBUTING.md b/amplify/functions/fetchDocuments/node_modules/uuid/CONTRIBUTING.md new file mode 100644 index 0000000..4a4503d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contributing + +Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library! + +## Testing + +```shell +npm test +``` + +## Releasing + +Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version): + +```shell +npm run release -- --dry-run # verify output manually +npm run release # follow the instructions from the output of this command +``` diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/LICENSE.md b/amplify/functions/fetchDocuments/node_modules/uuid/LICENSE.md new file mode 100644 index 0000000..3934168 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/README.md b/amplify/functions/fetchDocuments/node_modules/uuid/README.md new file mode 100644 index 0000000..4f51e09 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/README.md @@ -0,0 +1,466 @@ + + + +# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser) + +For the creation of [RFC4122](https://www.ietf.org/rfc/rfc4122.txt) UUIDs + +- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs +- **Cross-platform** - Support for ... + - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds) + - NodeJS 12+ ([LTS releases](https://github.com/nodejs/Release)) + - Chrome, Safari, Firefox, Edge browsers + - Webpack and rollup.js module bundlers + - [React Native / Expo](#react-native--expo) +- **Secure** - Cryptographically-strong random values +- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers +- **CLI** - Includes the [`uuid` command line](#command-line) utility + +> **Note** Upgrading from `uuid@3`? Your code is probably okay, but check out [Upgrading From `uuid@3`](#upgrading-from-uuid3) for details. + +> **Note** Only interested in creating a version 4 UUID? You might be able to use [`crypto.randomUUID()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/randomUUID), eliminating the need to install this library. + +## Quickstart + +To create a random UUID... + +**1. Install** + +```shell +npm install uuid +``` + +**2. Create a UUID** (ES6 module syntax) + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d' +``` + +... or using CommonJS syntax: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +For timestamp UUIDs, namespace UUIDs, and other options read on ... + +## API Summary + +| | | | +| --- | --- | --- | +| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` | +| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` | +| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` | +| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | | +| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | | +| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | | +| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | | +| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` | +| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` | + +## API + +### uuid.NIL + +The nil UUID string (all zeros). + +Example: + +```javascript +import { NIL as NIL_UUID } from 'uuid'; + +NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000' +``` + +### uuid.parse(str) + +Convert UUID string to array of bytes + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Uint8Array[16]` | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { parse as uuidParse } from 'uuid'; + +// Parse a UUID +const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); + +// Convert to hex strings to show byte order (for documentation purposes) +[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨ + // [ + // '6e', 'c0', 'bd', '7f', + // '11', 'c0', '43', 'da', + // '97', '5e', '2a', '8a', + // 'd9', 'eb', 'ae', '0b' + // ] +``` + +### uuid.stringify(arr[, offset]) + +Convert array of bytes to UUID string + +| | | +| -------------- | ---------------------------------------------------------------------------- | +| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. | +| [`offset` = 0] | `Number` Starting index in the Array | +| _returns_ | `String` | +| _throws_ | `TypeError` if a valid UUID string cannot be generated | + +Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left ↠ right order of hex-pairs in UUID strings. As shown in the example below. + +Example: + +```javascript +import { stringify as uuidStringify } from 'uuid'; + +const uuidBytes = [ + 0x6e, 0xc0, 0xbd, 0x7f, 0x11, 0xc0, 0x43, 0xda, 0x97, 0x5e, 0x2a, 0x8a, 0xd9, 0xeb, 0xae, 0x0b, +]; + +uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b' +``` + +### uuid.v1([options[, buffer[, offset]]]) + +Create an RFC version 1 (timestamp) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) | +| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff | +| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) | +| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanoseconds to add to `msecs`, should be 0-10,000) | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | +| _throws_ | `Error` if more than 10M UUIDs/sec are requested | + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields. + +Example: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' +``` + +Example using `options`: + +```javascript +import { v1 as uuidv1 } from 'uuid'; + +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678, +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' +``` + +### uuid.v3(name, namespace[, buffer[, offset]]) + +Create an RFC version 3 (namespace w/ MD5) UUID + +API is identical to `v5()`, but uses "v3" instead. + +⚠️ Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_." + +### uuid.v4([options[, buffer[, offset]]]) + +Create an RFC version 4 (random) UUID + +| | | +| --- | --- | +| [`options`] | `Object` with one or more of the following properties: | +| [`options.random`] | `Array` of 16 random bytes (0-255) | +| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Example: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +Example using predefined `random` values: + +```javascript +import { v4 as uuidv4 } from 'uuid'; + +const v4options = { + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36, + ], +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' +``` + +### uuid.v5(name, namespace[, buffer[, offset]]) + +Create an RFC version 5 (namespace w/ SHA-1) UUID + +| | | +| --- | --- | +| `name` | `String \| Array` | +| `namespace` | `String \| Array[16]` Namespace UUID | +| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` | +| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` | +| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` | + +Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`. + +Example with custom namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +// Define a custom namespace. Readers, create your own using something like +// https://www.uuidgenerator.net/ +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; + +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' +``` + +Example with RFC `URL` namespace: + +```javascript +import { v5 as uuidv5 } from 'uuid'; + +uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1' +``` + +### uuid.validate(str) + +Test a string to see if it is a valid UUID + +| | | +| --------- | --------------------------------------------------- | +| `str` | `String` to validate | +| _returns_ | `true` if string is a valid UUID, `false` otherwise | + +Example: + +```javascript +import { validate as uuidValidate } from 'uuid'; + +uuidValidate('not a UUID'); // ⇨ false +uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true +``` + +Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds. + +```javascript +import { version as uuidVersion } from 'uuid'; +import { validate as uuidValidate } from 'uuid'; + +function uuidValidateV4(uuid) { + return uuidValidate(uuid) && uuidVersion(uuid) === 4; +} + +const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210'; +const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836'; + +uuidValidateV4(v4Uuid); // ⇨ true +uuidValidateV4(v1Uuid); // ⇨ false +``` + +### uuid.version(str) + +Detect RFC version of a UUID + +| | | +| --------- | ---------------------------------------- | +| `str` | A valid UUID `String` | +| _returns_ | `Number` The RFC version of the UUID | +| _throws_ | `TypeError` if `str` is not a valid UUID | + +Example: + +```javascript +import { version as uuidVersion } from 'uuid'; + +uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1 +uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4 +``` + +## Command Line + +UUIDs can be generated from the command line using `uuid`. + +```shell +$ npx uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 +``` + +The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details: + +```shell +$ npx uuid --help + +Usage: + uuid + uuid v1 + uuid v3 + uuid v4 + uuid v5 + uuid --help + +Note: may be "URL" or "DNS" to use the corresponding UUIDs +defined by RFC4122 +``` + +## ECMAScript Modules + +This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments). + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' +``` + +To run the examples you must first create a dist build of this library in the module root: + +```shell +npm run build +``` + +## CDN Builds + +### ECMAScript Modules + +To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/): + +```html + +``` + +### UMD + +As of `uuid@9` [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds are no longer shipped with this library. + +If you need a UMD build of this library, use a bundler like Webpack or Rollup. Alternatively, refer to the documentation of [`uuid@8.3.2`](https://github.com/uuidjs/uuid/blob/v8.3.2/README.md#umd) which was the last version that shipped UMD builds. + +## Known issues + +### Duplicate UUIDs (Googlebot) + +This module may generate duplicate UUIDs when run in clients with _deterministic_ random number generators, such as [Googlebot crawlers](https://developers.google.com/search/docs/advanced/crawling/overview-google-crawlers). This can cause problems for apps that expect client-generated UUIDs to always be unique. Developers should be prepared for this and have a strategy for dealing with possible collisions, such as: + +- Check for duplicate UUIDs, fail gracefully +- Disable write operations for Googlebot clients + +### "getRandomValues() not supported" + +This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill: + +### React Native / Expo + +1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme) +1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point: + +```javascript +import 'react-native-get-random-values'; +import { v4 as uuidv4 } from 'uuid'; +``` + +Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`. + +### Web Workers / Service Workers (Edge <= 18) + +[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please). + +### IE 11 (Internet Explorer) + +Support for IE11 and other legacy browsers has been dropped as of `uuid@9`. If you need to support legacy browsers, you can always transpile the uuid module source yourself (e.g. using [Babel](https://babeljs.io/)). + +## Upgrading From `uuid@7` + +### Only Named Exports Supported When Using with Node.js ESM + +`uuid@7` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports. + +Instead of doing: + +```javascript +import uuid from 'uuid'; +uuid.v4(); +``` + +you will now have to use the named exports: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +### Deep Requires No Longer Supported + +Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7`](#deep-requires-now-deprecated) are no longer supported. + +## Upgrading From `uuid@3` + +"_Wait... what happened to `uuid@4` thru `uuid@6`?!?_" + +In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped. + +### Deep Requires Now Deprecated + +`uuid@3` encouraged the use of deep requires to minimize the bundle size of browser builds: + +```javascript +const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED! +uuidv4(); +``` + +As of `uuid@7` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax: + +```javascript +import { v4 as uuidv4 } from 'uuid'; +uuidv4(); +``` + +... or for CommonJS: + +```javascript +const { v4: uuidv4 } = require('uuid'); +uuidv4(); +``` + +### Default Export Removed + +`uuid@3` was exporting the Version 4 UUID method as a default export: + +```javascript +const uuid = require('uuid'); // <== REMOVED! +``` + +This usage pattern was already discouraged in `uuid@3` and has been removed in `uuid@7`. + +--- + +Markdown generated from [README_js.md](README_js.md) by diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/bin/uuid b/amplify/functions/fetchDocuments/node_modules/uuid/dist/bin/uuid new file mode 100755 index 0000000..f38d2ee --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/bin/uuid @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../uuid-bin'); diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/index.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/index.js new file mode 100644 index 0000000..5586dd3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function get() { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function get() { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function get() { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function get() { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function get() { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function get() { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function get() { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function get() { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function get() { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/md5.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/md5.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/md5.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/native.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/native.js new file mode 100644 index 0000000..c2eea59 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/native.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/nil.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/parse.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/regex.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/rng.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/rng.js new file mode 100644 index 0000000..d067cdb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/rng.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/sha1.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/sha1.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/sha1.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/stringify.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/stringify.js new file mode 100644 index 0000000..390bf89 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v1.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v1.js new file mode 100644 index 0000000..125bc58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v3.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v35.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v35.js new file mode 100644 index 0000000..7c522d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v4.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v4.js new file mode 100644 index 0000000..959d698 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v5.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/validate.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/version.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/version.js new file mode 100644 index 0000000..f63af01 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/commonjs-browser/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/index.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/md5.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/md5.js new file mode 100644 index 0000000..f12212e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/md5.js @@ -0,0 +1,215 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +export default md5; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/native.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/native.js new file mode 100644 index 0000000..b22292c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/native.js @@ -0,0 +1,4 @@ +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +export default { + randomUUID +}; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/nil.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/parse.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/regex.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/rng.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/rng.js new file mode 100644 index 0000000..6e65234 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/rng.js @@ -0,0 +1,18 @@ +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); +export default function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/sha1.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/sha1.js new file mode 100644 index 0000000..d3c2565 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/sha1.js @@ -0,0 +1,96 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +export default sha1; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/stringify.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/stringify.js new file mode 100644 index 0000000..a6e4c88 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v1.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v1.js new file mode 100644 index 0000000..382e5d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v3.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v35.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v35.js new file mode 100644 index 0000000..3355e1f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v4.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v4.js new file mode 100644 index 0000000..95ea879 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v5.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/validate.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/version.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/version.js new file mode 100644 index 0000000..9363076 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-browser/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/index.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/index.js new file mode 100644 index 0000000..1db6f6d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/index.js @@ -0,0 +1,9 @@ +export { default as v1 } from './v1.js'; +export { default as v3 } from './v3.js'; +export { default as v4 } from './v4.js'; +export { default as v5 } from './v5.js'; +export { default as NIL } from './nil.js'; +export { default as version } from './version.js'; +export { default as validate } from './validate.js'; +export { default as stringify } from './stringify.js'; +export { default as parse } from './parse.js'; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/md5.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/md5.js new file mode 100644 index 0000000..4d68b04 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/md5.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +export default md5; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/native.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/native.js new file mode 100644 index 0000000..f0d1992 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/native.js @@ -0,0 +1,4 @@ +import crypto from 'crypto'; +export default { + randomUUID: crypto.randomUUID +}; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/nil.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/nil.js new file mode 100644 index 0000000..b36324c --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/nil.js @@ -0,0 +1 @@ +export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/parse.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/parse.js new file mode 100644 index 0000000..6421c5d --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/parse.js @@ -0,0 +1,35 @@ +import validate from './validate.js'; + +function parse(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +export default parse; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/regex.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/regex.js new file mode 100644 index 0000000..3da8673 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/regex.js @@ -0,0 +1 @@ +export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/rng.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/rng.js new file mode 100644 index 0000000..8006244 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/rng.js @@ -0,0 +1,12 @@ +import crypto from 'crypto'; +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; +export default function rng() { + if (poolPtr > rnds8Pool.length - 16) { + crypto.randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/sha1.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/sha1.js new file mode 100644 index 0000000..e23850b --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/sha1.js @@ -0,0 +1,13 @@ +import crypto from 'crypto'; + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +export default sha1; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/stringify.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/stringify.js new file mode 100644 index 0000000..a6e4c88 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/stringify.js @@ -0,0 +1,33 @@ +import validate from './validate.js'; +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +export function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +export default stringify; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v1.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v1.js new file mode 100644 index 0000000..382e5d7 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v1.js @@ -0,0 +1,95 @@ +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; // **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || rng)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || unsafeStringify(b); +} + +export default v1; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v3.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v3.js new file mode 100644 index 0000000..09063b8 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v3.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import md5 from './md5.js'; +const v3 = v35('v3', 0x30, md5); +export default v3; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v35.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v35.js new file mode 100644 index 0000000..3355e1f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v35.js @@ -0,0 +1,66 @@ +import { unsafeStringify } from './stringify.js'; +import parse from './parse.js'; + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +export default function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = parse(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return unsafeStringify(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v4.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v4.js new file mode 100644 index 0000000..95ea879 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v4.js @@ -0,0 +1,29 @@ +import native from './native.js'; +import rng from './rng.js'; +import { unsafeStringify } from './stringify.js'; + +function v4(options, buf, offset) { + if (native.randomUUID && !buf && !options) { + return native.randomUUID(); + } + + options = options || {}; + const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return unsafeStringify(rnds); +} + +export default v4; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v5.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v5.js new file mode 100644 index 0000000..e87fe31 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/v5.js @@ -0,0 +1,4 @@ +import v35 from './v35.js'; +import sha1 from './sha1.js'; +const v5 = v35('v5', 0x50, sha1); +export default v5; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/validate.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/validate.js new file mode 100644 index 0000000..f1cdc7a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/validate.js @@ -0,0 +1,7 @@ +import REGEX from './regex.js'; + +function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); +} + +export default validate; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/version.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/version.js new file mode 100644 index 0000000..9363076 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/esm-node/version.js @@ -0,0 +1,11 @@ +import validate from './validate.js'; + +function version(uuid) { + if (!validate(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +export default version; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/index.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/index.js new file mode 100644 index 0000000..88d676a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/index.js @@ -0,0 +1,79 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "NIL", { + enumerable: true, + get: function () { + return _nil.default; + } +}); +Object.defineProperty(exports, "parse", { + enumerable: true, + get: function () { + return _parse.default; + } +}); +Object.defineProperty(exports, "stringify", { + enumerable: true, + get: function () { + return _stringify.default; + } +}); +Object.defineProperty(exports, "v1", { + enumerable: true, + get: function () { + return _v.default; + } +}); +Object.defineProperty(exports, "v3", { + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", { + enumerable: true, + get: function () { + return _v3.default; + } +}); +Object.defineProperty(exports, "v5", { + enumerable: true, + get: function () { + return _v4.default; + } +}); +Object.defineProperty(exports, "validate", { + enumerable: true, + get: function () { + return _validate.default; + } +}); +Object.defineProperty(exports, "version", { + enumerable: true, + get: function () { + return _version.default; + } +}); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +var _nil = _interopRequireDefault(require("./nil.js")); + +var _version = _interopRequireDefault(require("./version.js")); + +var _validate = _interopRequireDefault(require("./validate.js")); + +var _stringify = _interopRequireDefault(require("./stringify.js")); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/md5-browser.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/md5-browser.js new file mode 100644 index 0000000..7a4582a --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/md5-browser.js @@ -0,0 +1,223 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ +function md5(bytes) { + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = new Uint8Array(msg.length); + + for (let i = 0; i < msg.length; ++i) { + bytes[i] = msg.charCodeAt(i); + } + } + + return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8)); +} +/* + * Convert an array of little-endian words to an array of bytes + */ + + +function md5ToHexEncodedArray(input) { + const output = []; + const length32 = input.length * 32; + const hexTab = '0123456789abcdef'; + + for (let i = 0; i < length32; i += 8) { + const x = input[i >> 5] >>> i % 32 & 0xff; + const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16); + output.push(hex); + } + + return output; +} +/** + * Calculate output length with padding and bit length + */ + + +function getOutputLength(inputLength8) { + return (inputLength8 + 64 >>> 9 << 4) + 14 + 1; +} +/* + * Calculate the MD5 of an array of little-endian words, and a bit length. + */ + + +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << len % 32; + x[getOutputLength(len) - 1] = len; + let a = 1732584193; + let b = -271733879; + let c = -1732584194; + let d = 271733878; + + for (let i = 0; i < x.length; i += 16) { + const olda = a; + const oldb = b; + const oldc = c; + const oldd = d; + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + + return [a, b, c, d]; +} +/* + * Convert an array bytes to an array of little-endian words + * Characters >255 have their high-byte silently ignored. + */ + + +function bytesToWords(input) { + if (input.length === 0) { + return []; + } + + const length8 = input.length * 8; + const output = new Uint32Array(getOutputLength(length8)); + + for (let i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[i / 8] & 0xff) << i % 32; + } + + return output; +} +/* + * Add integers, wrapping at 2^32. This uses 16-bit operations internally + * to work around bugs in some JS interpreters. + */ + + +function safeAdd(x, y) { + const lsw = (x & 0xffff) + (y & 0xffff); + const msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return msw << 16 | lsw & 0xffff; +} +/* + * Bitwise rotate a 32-bit number to the left. + */ + + +function bitRotateLeft(num, cnt) { + return num << cnt | num >>> 32 - cnt; +} +/* + * These functions implement the four basic operations the algorithm uses. + */ + + +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} + +function md5ff(a, b, c, d, x, s, t) { + return md5cmn(b & c | ~b & d, a, b, x, s, t); +} + +function md5gg(a, b, c, d, x, s, t) { + return md5cmn(b & d | c & ~d, a, b, x, s, t); +} + +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} + +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | ~d), a, b, x, s, t); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/md5.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/md5.js new file mode 100644 index 0000000..824d481 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/md5.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/native-browser.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/native-browser.js new file mode 100644 index 0000000..c2eea59 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/native-browser.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); +var _default = { + randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/native.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/native.js new file mode 100644 index 0000000..de80469 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/native.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/nil.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/nil.js new file mode 100644 index 0000000..7ade577 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/nil.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/parse.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/parse.js new file mode 100644 index 0000000..4c69fc3 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/parse.js @@ -0,0 +1,45 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/regex.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/regex.js new file mode 100644 index 0000000..1ef91d6 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/regex.js @@ -0,0 +1,8 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/rng-browser.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/rng-browser.js new file mode 100644 index 0000000..d067cdb --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/rng-browser.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; +// Unique ID creation requires a high quality random # generator. In the browser we therefore +// require the crypto API and do not support built-in fallback to lower quality random number +// generators (like Math.random()). +let getRandomValues; +const rnds8 = new Uint8Array(16); + +function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/rng.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/rng.js new file mode 100644 index 0000000..3507f93 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/rng.js @@ -0,0 +1,24 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = rng; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/sha1-browser.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/sha1-browser.js new file mode 100644 index 0000000..24cbced --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/sha1-browser.js @@ -0,0 +1,104 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +function f(s, x, y, z) { + switch (s) { + case 0: + return x & y ^ ~x & z; + + case 1: + return x ^ y ^ z; + + case 2: + return x & y ^ x & z ^ y & z; + + case 3: + return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return x << n | x >>> 32 - n; +} + +function sha1(bytes) { + const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof bytes === 'string') { + const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + + bytes = []; + + for (let i = 0; i < msg.length; ++i) { + bytes.push(msg.charCodeAt(i)); + } + } else if (!Array.isArray(bytes)) { + // Convert Array-like to Array + bytes = Array.prototype.slice.call(bytes); + } + + bytes.push(0x80); + const l = bytes.length / 4 + 2; + const N = Math.ceil(l / 16); + const M = new Array(N); + + for (let i = 0; i < N; ++i) { + const arr = new Uint32Array(16); + + for (let j = 0; j < 16; ++j) { + arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3]; + } + + M[i] = arr; + } + + M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32); + M[N - 1][14] = Math.floor(M[N - 1][14]); + M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff; + + for (let i = 0; i < N; ++i) { + const W = new Uint32Array(80); + + for (let t = 0; t < 16; ++t) { + W[t] = M[i][t]; + } + + for (let t = 16; t < 80; ++t) { + W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1); + } + + let a = H[0]; + let b = H[1]; + let c = H[2]; + let d = H[3]; + let e = H[4]; + + for (let t = 0; t < 80; ++t) { + const s = Math.floor(t / 20); + const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = H[0] + a >>> 0; + H[1] = H[1] + b >>> 0; + H[2] = H[2] + c >>> 0; + H[3] = H[3] + d >>> 0; + H[4] = H[4] + e >>> 0; + } + + return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff]; +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/sha1.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/sha1.js new file mode 100644 index 0000000..03bdd63 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/sha1.js @@ -0,0 +1,23 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _crypto = _interopRequireDefault(require("crypto")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/stringify.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/stringify.js new file mode 100644 index 0000000..390bf89 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/stringify.js @@ -0,0 +1,44 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/uuid-bin.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/uuid-bin.js new file mode 100644 index 0000000..50a7a9f --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/uuid-bin.js @@ -0,0 +1,85 @@ +"use strict"; + +var _assert = _interopRequireDefault(require("assert")); + +var _v = _interopRequireDefault(require("./v1.js")); + +var _v2 = _interopRequireDefault(require("./v3.js")); + +var _v3 = _interopRequireDefault(require("./v4.js")); + +var _v4 = _interopRequireDefault(require("./v5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +const args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} + +const version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + console.log((0, _v.default)()); + break; + + case 'v3': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v3 name not specified'); + (0, _assert.default)(namespace != null, 'v3 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v2.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v2.default.DNS; + } + + console.log((0, _v2.default)(name, namespace)); + break; + } + + case 'v4': + console.log((0, _v3.default)()); + break; + + case 'v5': + { + const name = args.shift(); + let namespace = args.shift(); + (0, _assert.default)(name != null, 'v5 name not specified'); + (0, _assert.default)(namespace != null, 'v5 namespace not specified'); + + if (namespace === 'URL') { + namespace = _v4.default.URL; + } + + if (namespace === 'DNS') { + namespace = _v4.default.DNS; + } + + console.log((0, _v4.default)(name, namespace)); + break; + } + + default: + usage(); + process.exit(1); +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/v1.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v1.js new file mode 100644 index 0000000..125bc58 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v1.js @@ -0,0 +1,107 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/v3.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v3.js new file mode 100644 index 0000000..6b47ff5 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v3.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _md = _interopRequireDefault(require("./md5.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/v35.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v35.js new file mode 100644 index 0000000..7c522d9 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v35.js @@ -0,0 +1,80 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.URL = exports.DNS = void 0; +exports.default = v35; + +var _stringify = require("./stringify.js"); + +var _parse = _interopRequireDefault(require("./parse.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/v4.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v4.js new file mode 100644 index 0000000..959d698 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v4.js @@ -0,0 +1,43 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _native = _interopRequireDefault(require("./native.js")); + +var _rng = _interopRequireDefault(require("./rng.js")); + +var _stringify = require("./stringify.js"); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/v5.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v5.js new file mode 100644 index 0000000..99d615e --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/v5.js @@ -0,0 +1,16 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _v = _interopRequireDefault(require("./v35.js")); + +var _sha = _interopRequireDefault(require("./sha1.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/validate.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/validate.js new file mode 100644 index 0000000..fd05215 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/validate.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _regex = _interopRequireDefault(require("./regex.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/dist/version.js b/amplify/functions/fetchDocuments/node_modules/uuid/dist/version.js new file mode 100644 index 0000000..f63af01 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/dist/version.js @@ -0,0 +1,21 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = void 0; + +var _validate = _interopRequireDefault(require("./validate.js")); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports.default = _default; \ No newline at end of file diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/package.json b/amplify/functions/fetchDocuments/node_modules/uuid/package.json new file mode 100644 index 0000000..6cc3361 --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/package.json @@ -0,0 +1,135 @@ +{ + "name": "uuid", + "version": "9.0.1", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./dist/bin/uuid" + }, + "sideEffects": false, + "main": "./dist/index.js", + "exports": { + ".": { + "node": { + "module": "./dist/esm-node/index.js", + "require": "./dist/index.js", + "import": "./wrapper.mjs" + }, + "browser": { + "import": "./dist/esm-browser/index.js", + "require": "./dist/commonjs-browser/index.js" + }, + "default": "./dist/esm-browser/index.js" + }, + "./package.json": "./package.json" + }, + "module": "./dist/esm-node/index.js", + "browser": { + "./dist/md5.js": "./dist/md5-browser.js", + "./dist/native.js": "./dist/native-browser.js", + "./dist/rng.js": "./dist/rng-browser.js", + "./dist/sha1.js": "./dist/sha1-browser.js", + "./dist/esm-node/index.js": "./dist/esm-browser/index.js" + }, + "files": [ + "CHANGELOG.md", + "CONTRIBUTING.md", + "LICENSE.md", + "README.md", + "dist", + "wrapper.mjs" + ], + "devDependencies": { + "@babel/cli": "7.18.10", + "@babel/core": "7.18.10", + "@babel/eslint-parser": "7.18.9", + "@babel/preset-env": "7.18.10", + "@commitlint/cli": "17.0.3", + "@commitlint/config-conventional": "17.0.3", + "bundlewatch": "0.3.3", + "eslint": "8.21.0", + "eslint-config-prettier": "8.5.0", + "eslint-config-standard": "17.0.0", + "eslint-plugin-import": "2.26.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-promise": "6.0.0", + "husky": "8.0.1", + "jest": "28.1.3", + "lint-staged": "13.0.3", + "npm-run-all": "4.1.5", + "optional-dev-dependency": "2.0.1", + "prettier": "2.7.1", + "random-seed": "0.3.0", + "runmd": "1.3.9", + "standard-version": "9.5.0" + }, + "optionalDevDependencies": { + "@wdio/browserstack-service": "7.16.10", + "@wdio/cli": "7.16.10", + "@wdio/jasmine-framework": "7.16.6", + "@wdio/local-runner": "7.16.10", + "@wdio/spec-reporter": "7.16.9", + "@wdio/static-server-service": "7.16.6" + }, + "scripts": { + "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build", + "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build", + "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test", + "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test", + "examples:node:jest:test": "cd examples/node-jest && npm install && npm test", + "prepare": "cd $( git rev-parse --show-toplevel ) && husky install", + "lint": "npm run eslint:check && npm run prettier:check", + "eslint:check": "eslint src/ test/ examples/ *.js", + "eslint:fix": "eslint --fix src/ test/ examples/ *.js", + "pretest": "[ -n $CI ] || npm run build", + "test": "BABEL_ENV=commonjsNode node --throw-deprecation node_modules/.bin/jest test/unit/", + "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**", + "test:browser": "wdio run ./wdio.conf.js", + "pretest:node": "npm run build", + "test:node": "npm-run-all --parallel examples:node:**", + "test:pack": "./scripts/testpack.sh", + "pretest:benchmark": "npm run build", + "test:benchmark": "cd examples/benchmark && npm install && npm test", + "prettier:check": "prettier --check '**/*.{js,jsx,json,md}'", + "prettier:fix": "prettier --write '**/*.{js,jsx,json,md}'", + "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json", + "md": "runmd --watch --output=README.md README_js.md", + "docs": "( node --version | grep -q 'v18' ) && ( npm run build && npx runmd --output=README.md README_js.md )", + "docs:diff": "npm run docs && git diff --quiet README.md", + "build": "./scripts/build.sh", + "prepack": "npm run build", + "release": "standard-version --no-verify" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "lint-staged": { + "*.{js,jsx,json,md}": [ + "prettier --write" + ], + "*.{js,jsx}": [ + "eslint --fix" + ] + }, + "standard-version": { + "scripts": { + "postchangelog": "prettier --write CHANGELOG.md" + } + } +} diff --git a/amplify/functions/fetchDocuments/node_modules/uuid/wrapper.mjs b/amplify/functions/fetchDocuments/node_modules/uuid/wrapper.mjs new file mode 100644 index 0000000..c31e9ce --- /dev/null +++ b/amplify/functions/fetchDocuments/node_modules/uuid/wrapper.mjs @@ -0,0 +1,10 @@ +import uuid from './dist/index.js'; +export const v1 = uuid.v1; +export const v3 = uuid.v3; +export const v4 = uuid.v4; +export const v5 = uuid.v5; +export const NIL = uuid.NIL; +export const version = uuid.version; +export const validate = uuid.validate; +export const stringify = uuid.stringify; +export const parse = uuid.parse; diff --git a/amplify/functions/fetchDocuments/package-lock.json b/amplify/functions/fetchDocuments/package-lock.json new file mode 100644 index 0000000..c5cfc82 --- /dev/null +++ b/amplify/functions/fetchDocuments/package-lock.json @@ -0,0 +1,1349 @@ +{ + "name": "fetchdocuments", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "fetchdocuments", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0", + "@aws-sdk/lib-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.803.0.tgz", + "integrity": "sha512-rJPidxfyTQHz/1Naq3FukSoIt40GwXfv3npVR15bCBFpqx9TXEt7GoIUbiqm+Ftx8sx9hqJ6XNhf80FIa243gw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.803.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.803.0.tgz", + "integrity": "sha512-TT3BRD1yiL3IGXBKfq560vvEdyOJtJr8bp+R82dD6P0IoS8aFcNtF822BOJy7CqvxksOc3hQKLaPVzE82gE8Ow==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.803.0.tgz", + "integrity": "sha512-XtbFftJex18GobpRWJxg5V7stVwvmV2gdBYW+zRM0YW6NZAR4NP/4vcc9ktM3++BWW5OF4Kvl7Nu7N4mAzRHmw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.803.0.tgz", + "integrity": "sha512-lPdRYbjxwmv7gRqbaEe1Y1Yl5fD4c43AuK3P31eKjf1j41hZEQ0dg9a9KLk7i6ehEoVsxewnJrvbC2pVoYrCmQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.803.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.803.0", + "@aws-sdk/credential-provider-web-identity": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.803.0.tgz", + "integrity": "sha512-HEAcxSHrHxVekGnZqjFrkqdYAf4jFiZIMhuh0jqiqY6A4udEyXy1V623HVcTz/XXj6UBRnyD+zmOmlbzBvkfQg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-sso": "3.803.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.803.0.tgz", + "integrity": "sha512-oChnEpwI25OW4GPvhI1VnXM3IQEkDhESGFZd5JHzJDHyvSF2NU58V86jkJyaa4H4X25IbGaThuulNI5xCOngjw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/endpoint-cache": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.723.0.tgz", + "integrity": "sha512-2+a4WXRc+07uiPR+zJiPGKSOWaNJQNqitkks+6Hhm/haTLJqNVTgY2OWDh2PXvwMNpKB+AlGdhE65Oy6NzUgXg==", + "license": "Apache-2.0", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.803.0.tgz", + "integrity": "sha512-J9oeaKnF0vfw1ixUc0Bu1GTcYwp/riiGCst/MSLPHeGqoFiYzyox/im1Pbuv2Ipx7/0QI7w5PxYdxOpwvqMCFg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.803.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/middleware-endpoint-discovery": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.775.0.tgz", + "integrity": "sha512-L0PmjSg7t+wovRo/Lin1kpei3e7wBhrENWb1Bbccu3PWUIfxolGeWplOmNhSlXjuQe9GXjf3z8kJRYOGBMFOvw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.803.0.tgz", + "integrity": "sha512-wiWiYaFQxK2u37G9IOXuWkHelEbU8ulLxdHpoPf0TSu/1boqLW7fcofuZATAvFcvigQx3oebwO8G4c/mmixTTw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.2", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.803.0.tgz", + "integrity": "sha512-lDbMgVjWWEPT7a6lLaAEPPljwOeLTjPX2sJ7MoDICpQotg4Yd8cQfX3nqScSyLAGSc7Rq/21UPnPoij/E0K3lg==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/nested-clients": "3.803.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-dynamodb": { + "version": "3.803.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.803.0.tgz", + "integrity": "sha512-QiXvurnve8xIm41Zf/jNXwcYotDX3KZbHcsTaJ7ILhyFomqCjJ6bjLcCRdfndG600N5ua6AEK2XGw1luyBQxig==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.723.0.tgz", + "integrity": "sha512-Yf2CS10BqK688DRsrKI/EO6B8ff5J86NXe4C+VCysK7UOgN0l1zOTeTukZ3H8Q9tYYX3oaF1961o8vRkFm7Nmw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.1.tgz", + "integrity": "sha512-W7AppgQD3fP1aBmo8wWo0id5zeR2/aYRy067vZsDVaa6v/mdhkg6DxXwEVuSPjZl+ZnvWAQbUMCd5ckw38+tHQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.2.tgz", + "integrity": "sha512-EqOy3xaEGQpsKxLlzYstDRJ8eY90CbyBP4cl+w7r45mE60S8YliyL9AgWsdWcyNiB95E2PMqHBEv67nNl1zLfg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.3.tgz", + "integrity": "sha512-AsJtI9KiFoEGAhcEKZyzzPfrszAQGcf4HSYKmenz0WGx/6YNvoPPv4OSGfZTCsDmgPHv4pXzxE+7QV7jcGWNKw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.3", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.3", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.3.tgz", + "integrity": "sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.2.tgz", + "integrity": "sha512-3AnHfsMdq9Wg7+3BeR1HuLWI9+DMA/SoHVpCWq6xSsa52ikNd6nlF/wFzdpHyGtVa+Aji6lMgvwOF4sGcVA7SA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/core": "^3.3.1", + "@smithy/middleware-endpoint": "^4.1.2", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.10.tgz", + "integrity": "sha512-2k6fgUNOZ1Rn0gEjvGPGrDEINLG8qSBHsN7xlkkbO+fnHJ36BQPDzhFfMmYSDS8AgzoygqQiDOQ+6Hp2vBTUdA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.10.tgz", + "integrity": "sha512-2XR1WRglLVmoIFts7bODUTgBdVyvkfKNkydHrlsI5VxW9q3s1hnJCuY+f1OHzvj5ue23q4vydM2fjrMjf2HSdQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.3.tgz", + "integrity": "sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/service-error-classification": "^4.0.3", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "license": "Apache-2.0", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "license": "MIT" + }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==", + "license": "MIT" + }, + "node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "license": "MIT", + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/mnemonist": { + "version": "0.38.3", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz", + "integrity": "sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw==", + "license": "MIT", + "dependencies": { + "obliterator": "^1.6.1" + } + }, + "node_modules/obliterator": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz", + "integrity": "sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig==", + "license": "MIT" + }, + "node_modules/strnum": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.1.2.tgz", + "integrity": "sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + } + } +} diff --git a/amplify/functions/fetchDocuments/package.json b/amplify/functions/fetchDocuments/package.json index e69de29..ac6cd12 100644 --- a/amplify/functions/fetchDocuments/package.json +++ b/amplify/functions/fetchDocuments/package.json @@ -0,0 +1,16 @@ +{ + "name": "fetchdocuments", + "version": "1.0.0", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "description": "", + "dependencies": { + "@aws-sdk/client-dynamodb": "^3.803.0", + "@aws-sdk/lib-dynamodb": "^3.803.0" + } +} diff --git a/app/app.css b/app/app.css index 8bc28f1..6979aed 100644 --- a/app/app.css +++ b/app/app.css @@ -117,6 +117,10 @@ a:hover { display: flex; justify-content: center; align-items: center; + display: inline-grid; + grid-template-columns: auto auto auto; + overflow: auto; + gap: 1% } .text-control textarea { diff --git a/app/document-selector/DocumentSelector.tsx b/app/document-selector/DocumentSelector.tsx index d0296ba..d2fd86f 100644 --- a/app/document-selector/DocumentSelector.tsx +++ b/app/document-selector/DocumentSelector.tsx @@ -2,6 +2,10 @@ import { useState, useEffect } from "react"; import DocumentTile from "./DocumentTile"; +import Editor from "../editor/page"; +import { generateClient } from "aws-amplify/data"; +import type {Schema} from "../../amplify/data/resource"; +import { ModelField, Nullable } from "@aws-amplify/data-schema"; interface Document { id: string; @@ -14,37 +18,41 @@ interface DocumentSelectorProps { signOut?: () => void; } +const client = generateClient(); + export default function DocumentSelector({ signOut }: DocumentSelectorProps) { const [documents, setDocuments] = useState([]); const [loading, setLoading] = useState(false); - + const fetchDocuments = async () => { - setLoading(true); - try { - const response = await fetch("/api/fetchDocuments"); - const data = await response.json(); - setDocuments(data); - } catch (error) { - console.error("Failed to fetch documents:", error); - } finally { - setLoading(false); + // setLoading(true); + // try { + // const response = await handler(event); + // const data = await response.body; + // //setDocuments(data); + // console.log(data); + // } catch (error) { + // console.error("Failed to fetch documents:", error); + // } finally { + // setLoading(false); + // } + const {data} = await client.models.Document.list(); + let docs:Document[]=[]; + for (const d of data){ + var str:string; + str = d.title || ""; + var temp: Document = {title: str, id: d.id, createdAt: d.createdAt, updatedAt: d.updatedAt}; + docs.push(temp) } + console.log(docs); + setDocuments(docs); + }; const createDocument = async () => { - const title = prompt("Enter a title for the new document:"); - if (!title) return; - - try { - const response = await fetch("/api/createDocument", { - method: "POST", - body: JSON.stringify({ title }), - }); - - const newDoc = await response.json(); - window.location.href = `/editor?docId=${newDoc.id}`; - } catch (error) { - console.error("Failed to create document:", error); + const str = window.prompt("Create New Document"); + if(str != null){ + client.models.Document.create({title: str,}); } }; @@ -82,6 +90,7 @@ export default function DocumentSelector({ signOut }: DocumentSelectorProps) { /> )) )} + diff --git a/app/editor/page.tsx b/app/editor/page.tsx index d606c13..3092bbf 100644 --- a/app/editor/page.tsx +++ b/app/editor/page.tsx @@ -1,13 +1,34 @@ "use client"; import "@/lib/amplifyClient"; -import { useState, useEffect, useMemo, FC } from "react"; +import React, { + useState, + useEffect, + useMemo, + FC, + ChangeEvent, + Dispatch, + SetStateAction, +} from "react"; import { generateClient } from "aws-amplify/data"; import type { Schema } from "@/amplify/data/resource"; import { Authenticator } from "@aws-amplify/ui-react"; import "@aws-amplify/ui-react/styles.css"; - -const Editor: FC = () => { +//import {API} from "aws-amplify" + +const Editor: FC = () => { + let url_params:URLSearchParams; + let docId; + if(typeof window !== 'undefined'){ + url_params = new URLSearchParams(window.location.search); + docId = url_params.get('docId'); + } else{ + docId = null; + } + if(docId === null){ + docId = "848cca7a-3bf8-443f-aa9a-2f18a185189f"; + } + console.log(docId); const client = generateClient(); const [content, setContent] = useState(""); @@ -15,14 +36,14 @@ const Editor: FC = () => { const [italic, setItalic] = useState(false); const [underline, setUnderline] = useState(false); - const handleEdit = async (event: React.ChangeEvent) => { + const handleEdit = async (event: ChangeEvent) => { const updatedContent = event.target.value; setContent(updatedContent); // Save update to DynamoDB const { data } = await client.models.Document.list(); console.log(data); - const doc = data.find((d) => d.title === "shared-doc"); + const doc = data.find((d: any) => d.id === docId); if (doc) { console.log(doc.id, updatedContent); @@ -36,7 +57,7 @@ const Editor: FC = () => { const handleDownload = async () => { try { const { data } = await client.models.Document.list(); - const doc = data.find((d) => d.title === "shared-doc"); + const doc = data.find((d) => d.id === docId); if (!doc) { alert("No document found."); @@ -62,34 +83,39 @@ const Editor: FC = () => { const toggleStyle = ( style: boolean, - setter: React.Dispatch>, + setter: Dispatch>, eventName: string ): void => { - setter((prev) => { + setter((prev:Boolean) => { const newState: boolean = !prev; return newState; }); }; + // const callLambda = async () => { + // const response = await API.get("apiName", "/documents"); + // console.log(response); + // }; + useEffect(() => { - try { + const fetchDocument = async () => { const { data } = await client.models.Document.list(); - const doc = data.find((d) => d.title === "shared-doc"); + const doc = data.find((d) => d.id === docId); if (doc) { setContent(doc.content ?? ""); } else { // Optional: create one if it doesn't exist - await client.models.Document.create({ - title: "shared-doc", - content: "", - }); + // await client.models.Document.create({ + // title: "shared-doc", + // content: "", + // }); } }; - fetchDocument(); - } catch (error) { - console.error("Could not load document", error); - } + setInterval(() => { + fetchDocument(); + }, 1) + }, []); return ( @@ -145,6 +171,7 @@ const Editor: FC = () => { + {/* */} diff --git a/cdk.context.json b/cdk.context.json new file mode 100644 index 0000000..f4bc90c --- /dev/null +++ b/cdk.context.json @@ -0,0 +1,5 @@ +{ + "acknowledged-issue-numbers": [ + 32775 + ] +} diff --git a/cdk.json b/cdk.json new file mode 100644 index 0000000..c1171db --- /dev/null +++ b/cdk.json @@ -0,0 +1,79 @@ +{ + "app": "npx ts-node --transpile-only infra/bin/infrastructure.ts", + "watch": { + "include": [ + "**" + ], + "exclude": [ + "README.md", + "cdk*.json", + "**/*.d.ts", + "**/*.js", + "tsconfig.json", + "package*.json", + "yarn.lock", + "node_modules", + "test" + ] + }, + "context": { + "@aws-cdk/aws-lambda:recognizeLayerVersion": true, + "@aws-cdk/core:checkSecretUsage": true, + "@aws-cdk/core:target-partitions": [ + "aws", + "aws-cn" + ], + "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, + "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, + "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, + "@aws-cdk/aws-iam:minimizePolicies": true, + "@aws-cdk/core:validateSnapshotRemovalPolicy": true, + "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, + "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, + "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, + "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, + "@aws-cdk/core:enablePartitionLiterals": true, + "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, + "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, + "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, + "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, + "@aws-cdk/aws-route53-patters:useCertificate": true, + "@aws-cdk/customresources:installLatestAwsSdkDefault": false, + "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true, + "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true, + "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true, + "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true, + "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true, + "@aws-cdk/aws-redshift:columnId": true, + "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true, + "@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true, + "@aws-cdk/aws-apigateway:requestValidatorUniqueId": true, + "@aws-cdk/aws-kms:aliasNameRef": true, + "@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true, + "@aws-cdk/core:includePrefixInUniqueNameGeneration": true, + "@aws-cdk/aws-efs:denyAnonymousAccess": true, + "@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true, + "@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true, + "@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true, + "@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true, + "@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true, + "@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true, + "@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForCodeCommitSource": true, + "@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true, + "@aws-cdk/aws-codepipeline:crossAccountKeysDefaultValueToFalse": true, + "@aws-cdk/aws-codepipeline:defaultPipelineTypeToV2": true, + "@aws-cdk/aws-kms:reduceCrossAccountRegionPolicyScope": true, + "@aws-cdk/aws-eks:nodegroupNameAttribute": true, + "@aws-cdk/aws-ec2:ebsDefaultGp3Volume": true, + "@aws-cdk/aws-ecs:removeDefaultDeploymentAlarm": true, + "@aws-cdk/custom-resources:logApiResponseDataPropertyTrueDefault": false, + "@aws-cdk/aws-s3:keepNotificationInImportedBucket": false, + "@aws-cdk/aws-ecs:reduceEc2FargateCloudWatchPermissions": true, + "@aws-cdk/aws-ec2:ec2SumTImeoutEnabled": true, + "@aws-cdk/aws-appsync:appSyncGraphQLAPIScopeLambdaPermission": true, + "@aws-cdk/aws-rds:setCorrectValueForDatabaseInstanceReadReplicaInstanceResourceId": true, + "@aws-cdk/core:cfnIncludeRejectComplexResourceUpdateCreatePolicyIntrinsics": true, + "@aws-cdk/aws-lambda-nodejs:sdkV3ExcludeSmithyPackages": true, + "@aws-cdk/aws-stepfunctions-tasks:fixRunEcsTaskPolicy": true + } +} diff --git a/infra/.gitignore b/infra/.gitignore new file mode 100644 index 0000000..f60797b --- /dev/null +++ b/infra/.gitignore @@ -0,0 +1,8 @@ +*.js +!jest.config.js +*.d.ts +node_modules + +# CDK asset staging directory +.cdk.staging +cdk.out diff --git a/infra/.npmignore b/infra/.npmignore new file mode 100644 index 0000000..c1d6d45 --- /dev/null +++ b/infra/.npmignore @@ -0,0 +1,6 @@ +*.ts +!*.d.ts + +# CDK asset staging directory +.cdk.staging +cdk.out diff --git a/infra/README.md b/infra/README.md new file mode 100644 index 0000000..9315fe5 --- /dev/null +++ b/infra/README.md @@ -0,0 +1,14 @@ +# Welcome to your CDK TypeScript project + +This is a blank project for CDK development with TypeScript. + +The `cdk.json` file tells the CDK Toolkit how to execute your app. + +## Useful commands + +* `npm run build` compile typescript to js +* `npm run watch` watch for changes and compile +* `npm run test` perform the jest unit tests +* `npx cdk deploy` deploy this stack to your default AWS account/region +* `npx cdk diff` compare deployed stack with current state +* `npx cdk synth` emits the synthesized CloudFormation template diff --git a/infra/bin/infrastructure.ts b/infra/bin/infrastructure.ts new file mode 100644 index 0000000..19d64dc --- /dev/null +++ b/infra/bin/infrastructure.ts @@ -0,0 +1,15 @@ +#!/usr/bin/env node +import 'source-map-support/register'; +import * as cdk from 'aws-cdk-lib'; +import { RealtimeDocStack } from '../lib/appsync-stack'; + +const app = new cdk.App(); +// Only two arguments—that matches your current constructor signature: +const account = process.env.CDK_DEFAULT_ACCOUNT!; +const region = process.env.CDK_DEFAULT_REGION!; +new RealtimeDocStack(app, 'RealtimeDocStack', { + env: { + account, + region, + }, + }); \ No newline at end of file diff --git a/infra/jest.config.js b/infra/jest.config.js new file mode 100644 index 0000000..08263b8 --- /dev/null +++ b/infra/jest.config.js @@ -0,0 +1,8 @@ +module.exports = { + testEnvironment: 'node', + roots: ['/test'], + testMatch: ['**/*.test.ts'], + transform: { + '^.+\\.tsx?$': 'ts-jest' + } +}; diff --git a/infra/lib/appsync-stack.ts b/infra/lib/appsync-stack.ts new file mode 100644 index 0000000..72aff68 --- /dev/null +++ b/infra/lib/appsync-stack.ts @@ -0,0 +1,113 @@ +import * as path from 'path'; +import * as cdk from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +import { + GraphqlApi, + SchemaFile, + AuthorizationType, + FieldLogLevel, +} from 'aws-cdk-lib/aws-appsync'; +import { + Table, + AttributeType, + BillingMode, +} from 'aws-cdk-lib/aws-dynamodb'; +import { + Function, + Runtime, + Code, +} from 'aws-cdk-lib/aws-lambda'; +import { + Duration, + Expiration, + RemovalPolicy, + CfnOutput, +} from 'aws-cdk-lib'; + +export class RealtimeDocStack extends cdk.Stack { + constructor(scope: Construct, id: string, props?: cdk.StackProps) { + super(scope, id, props); + + // 1) DynamoDB table + const table = new Table(this, 'DocumentsTable', { + partitionKey: { name: 'id', type: AttributeType.STRING }, + billingMode: BillingMode.PAY_PER_REQUEST, + removalPolicy: RemovalPolicy.DESTROY, + }); + + // 2) AppSync API + const api = new GraphqlApi(this, 'RealtimeDocAPI', { + name: 'RealtimeDocumentAPI', + schema: SchemaFile.fromAsset(path.join(__dirname, '../schema.graphql')), + authorizationConfig: { + defaultAuthorization: { + authorizationType: AuthorizationType.API_KEY, + apiKeyConfig: { expires: Expiration.after(Duration.days(365)) }, + }, + }, + logConfig: { fieldLogLevel: FieldLogLevel.ALL }, + }); + + // Helper to wire a Lambda → DataSource → Resolver + const makeLambdaResolver = ( + id: string, + folder: string, + typeName: 'Query' | 'Mutation', + fieldName: string, + perms: { read?: boolean; write?: boolean } + ) => { + const fn = new Function(this, id, { + runtime: Runtime.NODEJS_20_X, + handler: 'index.handler', + code: Code.fromAsset( + path.join(__dirname, `../../amplify/functions/${folder}`) + ), + environment: { DOCUMENT_TABLE_NAME: table.tableName }, + }); + if (perms.read) table.grantReadData(fn); + if (perms.write) table.grantWriteData(fn); + + const ds = api.addLambdaDataSource(`${id}DS`, fn); + ds.createResolver(`${id}Resolver`, { typeName, fieldName }); + }; + + // 1) List all documents + makeLambdaResolver( + 'FetchDocumentsFn', + 'fetchDocuments', + 'Query', + 'fetchDocuments', + { read: true } + ); + + // 2) Delete a document + makeLambdaResolver( + 'DeleteDocumentFn', + 'deleteDocument', + 'Mutation', + 'deleteDocument', + { write: true } + ); + + // 3) Download a document + makeLambdaResolver( + 'DownloadDocumentFn', + 'downloadDocument', + 'Mutation', + 'downloadDocument', + { read: true } + ); + + // 4) Outputs for your frontend + new CfnOutput(this, 'GraphQLAPIURL', { value: api.graphqlUrl }); + new CfnOutput(this, 'GraphQLAPIKey', { value: api.apiKey! }); + } +} + +const app = new cdk.App(); +new RealtimeDocStack(app, 'RealtimeDocStack', { + env: { + account: process.env.CDK_DEFAULT_ACCOUNT, + region: process.env.CDK_DEFAULT_REGION, + }, +}); diff --git a/infra/lib/infrastructure-stack.ts b/infra/lib/infrastructure-stack.ts new file mode 100644 index 0000000..5fa1c48 --- /dev/null +++ b/infra/lib/infrastructure-stack.ts @@ -0,0 +1,16 @@ +import * as cdk from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +// import * as sqs from 'aws-cdk-lib/aws-sqs'; + +export class InfrastructureStack extends cdk.Stack { + constructor(scope: Construct, id: string, props?: cdk.StackProps) { + super(scope, id, props); + + // The code that defines your stack goes here + + // example resource + // const queue = new sqs.Queue(this, 'InfrastructureQueue', { + // visibilityTimeout: cdk.Duration.seconds(300) + // }); + } +} diff --git a/infra/package-lock.json b/infra/package-lock.json new file mode 100644 index 0000000..b3b8e6c --- /dev/null +++ b/infra/package-lock.json @@ -0,0 +1,5009 @@ +{ + "name": "infrastructure", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "infrastructure", + "version": "0.1.0", + "dependencies": { + "aws-cdk-lib": "^2.163.1", + "aws-sdk": "^2.1692.0", + "constructs": "^10.4.2", + "source-map-support": "^0.5.21" + }, + "bin": { + "infrastructure": "bin/infrastructure.js" + }, + "devDependencies": { + "@types/aws-lambda": "^8.10.149", + "@types/aws-sdk": "^0.0.42", + "@types/jest": "^29.5.12", + "@types/node": "^22.5.4", + "aws-cdk": "2.163.1", + "jest": "^29.7.0", + "ts-jest": "^29.2.5", + "ts-node": "^10.9.2", + "typescript": "~5.6.2" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@aws-cdk/asset-awscli-v1": { + "version": "2.2.233", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.233.tgz", + "integrity": "sha512-OH5ZN1F/0wwOUwzVUSvE0/syUOi44H9the6IG16anlSptfeQ1fvduJazZAKRuJLtautPbiqxllyOrtWh6LhX8A==", + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/asset-kubectl-v20": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.4.tgz", + "integrity": "sha512-Ps2MkmjYgMyflagqQ4dgTElc7Vwpqj8spw8dQVFiSeaaMPsuDSNsPax3/HjuDuwqsmLdaCZc6umlxYLpL0kYDA==", + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/asset-node-proxy-agent-v6": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.1.0.tgz", + "integrity": "sha512-7bY3J8GCVxLupn/kNmpPc5VJz8grx+4RKfnnJiO1LG+uxkZfANZG3RMHhE+qQxxwkyQ9/MfPtTpf748UhR425A==", + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/cloud-assembly-schema": { + "version": "38.0.1", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-38.0.1.tgz", + "integrity": "sha512-KvPe+NMWAulfNVwY7jenFhzhuLhLqJ/OPy5jx7wUstbjnYnjRVLpUHPU3yCjXFE0J8cuJVdx95BJ4rOs66Pi9w==", + "bundleDependencies": [ + "jsonschema", + "semver" + ], + "license": "Apache-2.0", + "dependencies": { + "jsonschema": "^1.4.1", + "semver": "^7.6.3" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": { + "version": "1.4.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": { + "version": "7.6.3", + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", + "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz", + "integrity": "sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.10", + "@babel/helper-compilation-targets": "^7.26.5", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.10", + "@babel/parser": "^7.26.10", + "@babel/template": "^7.26.9", + "@babel/traverse": "^7.26.10", + "@babel/types": "^7.26.10", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.0.tgz", + "integrity": "sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.27.0", + "@babel/types": "^7.27.0", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.0.tgz", + "integrity": "sha512-LVk7fbXml0H2xH34dFzKQ7TDZ2G4/rVTOrq9V+icbbadjbVxxeFeDsNHv2SrZeWoA+6ZiTyWYWtScEIW07EAcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.26.8", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.26.5", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.26.5.tgz", + "integrity": "sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.0.tgz", + "integrity": "sha512-U5eyP/CTFPuNE3qk+WZMxFkp/4zUzdceQlfzf7DdGdhp+Fezd7HD+i8Y24ZuTMKX3wQBld449jijbGq6OdGNQg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.0", + "@babel/types": "^7.27.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz", + "integrity": "sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz", + "integrity": "sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz", + "integrity": "sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz", + "integrity": "sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.0.tgz", + "integrity": "sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.27.0", + "@babel/types": "^7.27.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.0.tgz", + "integrity": "sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.27.0", + "@babel/parser": "^7.27.0", + "@babel/template": "^7.27.0", + "@babel/types": "^7.27.0", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz", + "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", + "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/aws-lambda": { + "version": "8.10.149", + "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.149.tgz", + "integrity": "sha512-NXSZIhfJjnXqJgtS7IwutqIF/SOy1Wz5Px4gUY1RWITp3AYTyuJS4xaXr/bIJY1v15XMzrJ5soGnPM+7uigZjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/aws-sdk": { + "version": "0.0.42", + "resolved": "https://registry.npmjs.org/@types/aws-sdk/-/aws-sdk-0.0.42.tgz", + "integrity": "sha512-zIgLukZrf0/s+oAKxLMHgZFDDjDpuJ95hbE9DiNGrmNGNM7odIt99rHLWVwnOYdF0TNjF0reQeL/mcadAIqljg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz", + "integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/node": { + "version": "22.5.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.4.tgz", + "integrity": "sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "dev": true, + "license": "MIT" + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/aws-cdk": { + "version": "2.163.1", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.163.1.tgz", + "integrity": "sha512-EBkiWBC3MTnkcYRLBaAPXQoZkzPyB97X21PN/YQUdCmNiz7SJT0F5kQdfKtKsY6RnYsj+pufYMb7n+R07i/t1w==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "cdk": "bin/cdk" + }, + "engines": { + "node": ">= 14.15.0" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/aws-cdk-lib": { + "version": "2.163.1", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.163.1.tgz", + "integrity": "sha512-Vw0fMOW6BsdQqIILFfl5qCcmZAJxuLmh1SMBIoQ34k3rUFUmKwUH7PtRXbcHK3N9Sy0WzmF70ys05YqCDdhTSQ==", + "bundleDependencies": [ + "@balena/dockerignore", + "case", + "fs-extra", + "ignore", + "jsonschema", + "minimatch", + "punycode", + "semver", + "table", + "yaml", + "mime-types" + ], + "license": "Apache-2.0", + "dependencies": { + "@aws-cdk/asset-awscli-v1": "^2.2.202", + "@aws-cdk/asset-kubectl-v20": "^2.1.2", + "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.0", + "@aws-cdk/cloud-assembly-schema": "^38.0.0", + "@balena/dockerignore": "^1.0.2", + "case": "1.6.3", + "fs-extra": "^11.2.0", + "ignore": "^5.3.2", + "jsonschema": "^1.4.1", + "mime-types": "^2.1.35", + "minimatch": "^3.1.2", + "punycode": "^2.3.1", + "semver": "^7.6.3", + "table": "^6.8.2", + "yaml": "1.10.2" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "constructs": "^10.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/@balena/dockerignore": { + "version": "1.0.2", + "inBundle": true, + "license": "Apache-2.0" + }, + "node_modules/aws-cdk-lib/node_modules/ajv": { + "version": "8.17.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/aws-cdk-lib/node_modules/ansi-regex": { + "version": "5.0.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/ansi-styles": { + "version": "4.3.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/aws-cdk-lib/node_modules/astral-regex": { + "version": "2.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/balanced-match": { + "version": "1.0.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/brace-expansion": { + "version": "1.1.11", + "inBundle": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/aws-cdk-lib/node_modules/case": { + "version": "1.6.3", + "inBundle": true, + "license": "(MIT OR GPL-3.0-or-later)", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/color-convert": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/color-name": { + "version": "1.1.4", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/concat-map": { + "version": "0.0.1", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/emoji-regex": { + "version": "8.0.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/fast-deep-equal": { + "version": "3.1.3", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/fast-uri": { + "version": "3.0.1", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/fs-extra": { + "version": "11.2.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/aws-cdk-lib/node_modules/graceful-fs": { + "version": "4.2.11", + "inBundle": true, + "license": "ISC" + }, + "node_modules/aws-cdk-lib/node_modules/ignore": { + "version": "5.3.2", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/aws-cdk-lib/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/json-schema-traverse": { + "version": "1.0.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/jsonfile": { + "version": "6.1.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/aws-cdk-lib/node_modules/jsonschema": { + "version": "1.4.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/aws-cdk-lib/node_modules/lodash.truncate": { + "version": "4.4.2", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/mime-db": { + "version": "1.52.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/aws-cdk-lib/node_modules/mime-types": { + "version": "2.1.35", + "inBundle": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/aws-cdk-lib/node_modules/minimatch": { + "version": "3.1.2", + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/aws-cdk-lib/node_modules/punycode": { + "version": "2.3.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/aws-cdk-lib/node_modules/require-from-string": { + "version": "2.0.2", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/semver": { + "version": "7.6.3", + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/aws-cdk-lib/node_modules/slice-ansi": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/aws-cdk-lib/node_modules/string-width": { + "version": "4.2.3", + "inBundle": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/strip-ansi": { + "version": "6.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/table": { + "version": "6.8.2", + "inBundle": true, + "license": "BSD-3-Clause", + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/universalify": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/yaml": { + "version": "1.10.2", + "inBundle": true, + "license": "ISC", + "engines": { + "node": ">= 6" + } + }, + "node_modules/aws-sdk": { + "version": "2.1692.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1692.0.tgz", + "integrity": "sha512-x511uiJ/57FIsbgUe5csJ13k3uzu25uWQE+XqfBis/sB0SFoiElJWXRkgEAUh0U6n40eT3ay5Ue4oPkRMu1LYw==", + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.16.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "util": "^0.12.4", + "uuid": "8.0.0", + "xml2js": "0.6.2" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", + "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.24.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", + "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001688", + "electron-to-chromium": "^1.5.73", + "node-releases": "^2.0.19", + "update-browserslist-db": "^1.1.1" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "license": "MIT", + "dependencies": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "license": "MIT" + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001715", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001715.tgz", + "integrity": "sha512-7ptkFGMm2OAOgvZpwgA4yjQ5SQbrNVGdRjzH0pBdy1Fasvcr+KAeECmbCAECzTuDuoX0FCY8KzUxjf9+9kfZEw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", + "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/constructs": { + "version": "10.4.2", + "resolved": "https://registry.npmjs.org/constructs/-/constructs-10.4.2.tgz", + "integrity": "sha512-wsNxBlAott2qg8Zv87q3eYZYgheb9lchtBfjHzzLHtXbttwSrHPs1NNQbBrmbb1YZvYg2+Vh0Dor76w4mFxJkA==", + "license": "Apache-2.0" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/create-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", + "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "prompts": "^2.0.1" + }, + "bin": { + "create-jest": "bin/create-jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz", + "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.140", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.140.tgz", + "integrity": "sha512-o82Rj+ONp4Ip7Cl1r7lrqx/pXhbp/lh9DpKcMNscFJdh8ebyRofnc7Sh01B4jx403RI0oqTBvlZ7OBIZLMr2+Q==", + "dev": true, + "license": "ISC" + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, + "license": "MIT" + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "license": "BSD-2-Clause", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==", + "license": "MIT", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/expect-utils": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true, + "license": "MIT" + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==", + "license": "BSD-3-Clause" + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/is-arguments": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.2.0.tgz", + "integrity": "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true, + "license": "MIT" + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/is-generator-function": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz", + "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "get-proto": "^1.0.0", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "license": "MIT", + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jake": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", + "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.4", + "minimatch": "^3.1.2" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", + "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/types": "^29.6.3", + "import-local": "^3.0.2", + "jest-cli": "^29.7.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", + "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "dev": true, + "license": "MIT", + "dependencies": { + "execa": "^5.0.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.7.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", + "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "create-jest": "^29.7.0", + "exit": "^0.1.2", + "import-local": "^3.0.2", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", + "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-jest": "^29.7.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", + "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", + "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "jest-util": "^29.7.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", + "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", + "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", + "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", + "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.3", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", + "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", + "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", + "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "dev": true, + "license": "MIT", + "dependencies": { + "jest-regex-util": "^29.6.3", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", + "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/environment": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-leak-detector": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-resolve": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-util": "^29.7.0", + "jest-watcher": "^29.7.0", + "jest-worker": "^29.7.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner/node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", + "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/globals": "^29.7.0", + "@jest/source-map": "^29.6.3", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", + "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.7.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", + "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/types": "^29.6.3", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "leven": "^3.1.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", + "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.7.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/jmespath": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", + "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==", + "license": "MIT" + }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, + "node_modules/querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true, + "license": "MIT" + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.10", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz", + "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.3.tgz", + "integrity": "sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==", + "license": "ISC" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-jest": { + "version": "29.3.2", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.3.2.tgz", + "integrity": "sha512-bJJkrWc6PjFVz5g2DGCNUo8z7oFEYaz1xP1NpeDU7KNLMWPpEyV8Chbpkn8xjzgRDpQhnGMyvyldoL7h8JXyug==", + "dev": true, + "license": "MIT", + "dependencies": { + "bs-logger": "^0.2.6", + "ejs": "^3.1.10", + "fast-json-stable-stringify": "^2.1.0", + "jest-util": "^29.0.0", + "json5": "^2.2.3", + "lodash.memoize": "^4.1.2", + "make-error": "^1.3.6", + "semver": "^7.7.1", + "type-fest": "^4.39.1", + "yargs-parser": "^21.1.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0", + "@jest/types": "^29.0.0", + "babel-jest": "^29.0.0", + "jest": "^29.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.40.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.40.0.tgz", + "integrity": "sha512-ABHZ2/tS2JkvH1PEjxFDTUWC8dB5OsIGZP4IFLhR293GqT5Y5qB1WwL2kMPYhQW9DVgVD8Hd7I8gjwPIf5GFkw==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", + "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true, + "license": "MIT" + }, + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", + "license": "MIT", + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "node_modules/uuid": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", + "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==", + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "license": "ISC", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/xml2js": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz", + "integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==", + "license": "MIT", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "license": "MIT", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/infra/package.json b/infra/package.json new file mode 100644 index 0000000..5f3ec3a --- /dev/null +++ b/infra/package.json @@ -0,0 +1,34 @@ +{ + "name": "infrastructure", + "version": "0.1.0", + "bin": { + "infrastructure": "bin/infrastructure.js" + }, + "scripts": { + "build": "tsc", + "build:lambdas": "tsc -p tsconfig.json --outDir lambdas/dist", + "watch": "tsc -w", + "test": "jest", + "cdk": "cdk", + "synth": "cdk synth", + "deploy": "cdk deploy --require-approval never", + "bootstrap": "cdk bootstrap" + }, + "devDependencies": { + "@types/aws-lambda": "^8.10.149", + "@types/aws-sdk": "^0.0.42", + "@types/jest": "^29.5.12", + "@types/node": "^22.5.4", + "aws-cdk": "2.163.1", + "jest": "^29.7.0", + "ts-jest": "^29.2.5", + "ts-node": "^10.9.2", + "typescript": "~5.6.2" + }, + "dependencies": { + "aws-cdk-lib": "^2.163.1", + "aws-sdk": "^2.1692.0", + "constructs": "^10.4.2", + "source-map-support": "^0.5.21" + } +} diff --git a/infra/schema.graphql b/infra/schema.graphql new file mode 100644 index 0000000..03699db --- /dev/null +++ b/infra/schema.graphql @@ -0,0 +1,23 @@ +type Document { + id: ID! + title: String + content: String! + updatedAt: AWSDateTime! +} + +type DocumentDownloadPayload { + document: Document! + content: String! + title: String! +} + +type Query { + # list all documents + fetchDocuments: [Document!]! +} + +type Mutation { + deleteDocument(id: ID!): Document + downloadDocument(id: ID!): DocumentDownloadPayload + fetchDocuments: [Document] +} \ No newline at end of file diff --git a/infra/terraform_security/instructions.txt b/infra/terraform_security/instructions.txt index dcf2948..b8a36dc 100644 --- a/infra/terraform_security/instructions.txt +++ b/infra/terraform_security/instructions.txt @@ -1,4 +1,4 @@ -1. install aws cli and terraform +*1. install aws cli and terraform 2. run aws configure and set up keys to authenticate diff --git a/infra/test/infrastructure.test.ts b/infra/test/infrastructure.test.ts new file mode 100644 index 0000000..bed3988 --- /dev/null +++ b/infra/test/infrastructure.test.ts @@ -0,0 +1,17 @@ +// import * as cdk from 'aws-cdk-lib'; +// import { Template } from 'aws-cdk-lib/assertions'; +// import * as Infrastructure from '../lib/infrastructure-stack'; + +// example test. To run these tests, uncomment this file along with the +// example resource in lib/infrastructure-stack.ts +test('SQS Queue Created', () => { +// const app = new cdk.App(); +// // WHEN +// const stack = new Infrastructure.InfrastructureStack(app, 'MyTestStack'); +// // THEN +// const template = Template.fromStack(stack); + +// template.hasResourceProperties('AWS::SQS::Queue', { +// VisibilityTimeout: 300 +// }); +}); diff --git a/infra/tsconfig.json b/infra/tsconfig.json new file mode 100644 index 0000000..4604fa9 --- /dev/null +++ b/infra/tsconfig.json @@ -0,0 +1,21 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "esModuleInterop": true, + "moduleResolution": "node", + "allowJs": true, + "strict": true, + "inlineSourceMap": true, + "inlineSources": true + }, + "include": [ + "bin/**/*.ts", + "lib/**/*.ts", + "lambdas/**/*.ts" + ], + "exclude": [ + "node_modules", + "cdk.out" + ] +} diff --git a/lib/amplifyClient.ts b/lib/amplifyClient.ts index f5cc229..78dac3d 100644 --- a/lib/amplifyClient.ts +++ b/lib/amplifyClient.ts @@ -1,4 +1,4 @@ -import { Amplify } from 'aws-amplify'; -import config from '@/amplify_outputs.json'; +import { Amplify } from "aws-amplify"; +import config from "../amplify/amplify_outputs.json"; -Amplify.configure(config, { ssr: true }); +Amplify.configure(config, { ssr: true }); \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 8350cd9..5019bb8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,6 +9,8 @@ "version": "0.1.0", "dependencies": { "@aws-amplify/ui-react": "^6.5.5", + "@aws-sdk/client-dynamodb": "^3.799.0", + "@aws-sdk/lib-dynamodb": "^3.799.0", "aws-amplify": "^6.6.6", "bootstrap": "^5.3.5", "next": "14.2.10", @@ -25,8 +27,9 @@ "aws-cdk-lib": "^2", "constructs": "^10.3.0", "esbuild": "^0.23.1", + "ts-node": "^10.9.2", "tsx": "^4.19.0", - "typescript": "^5.6.2" + "typescript": "^5.8.3" } }, "node_modules/@ampproject/remapping": { @@ -9111,628 +9114,983 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-ec2": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-ec2/-/client-ec2-3.624.0.tgz", - "integrity": "sha512-n3IHWiNSP5Cj0ZbENJGtDeJPsx6EVNMeePh8Nqe9Ja5l5/Brkdyu4TV6t/taPXHJQDH7E6cq4/uMiiEPRNuf6Q==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-dynamodb/-/client-dynamodb-3.799.0.tgz", + "integrity": "sha512-EPUxhG5Kk5bs5P0Lnv97i5mUb8e6b3jokbOnElrEQxnGd+1uZFM0X+3w7IjVNrLR4nAeFE4+k+h4p44YOAFNKg==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.624.0", - "@aws-sdk/client-sts": "3.624.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", - "@aws-sdk/middleware-host-header": "3.620.0", - "@aws-sdk/middleware-logger": "3.609.0", - "@aws-sdk/middleware-recursion-detection": "3.620.0", - "@aws-sdk/middleware-sdk-ec2": "3.622.0", - "@aws-sdk/middleware-user-agent": "3.620.0", - "@aws-sdk/region-config-resolver": "3.614.0", - "@aws-sdk/types": "3.609.0", - "@aws-sdk/util-endpoints": "3.614.0", - "@aws-sdk/util-user-agent-browser": "3.609.0", - "@aws-sdk/util-user-agent-node": "3.614.0", - "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", - "@smithy/fetch-http-handler": "^3.2.4", - "@smithy/hash-node": "^3.0.3", - "@smithy/invalid-dependency": "^3.0.3", - "@smithy/middleware-content-length": "^3.0.5", - "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", - "@smithy/middleware-serde": "^3.0.3", - "@smithy/middleware-stack": "^3.0.3", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/node-http-handler": "^3.1.4", - "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", - "@smithy/types": "^3.3.0", - "@smithy/url-parser": "^3.0.3", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", - "@smithy/util-endpoints": "^2.0.5", - "@smithy/util-middleware": "^3.0.3", - "@smithy/util-retry": "^3.0.3", - "@smithy/util-utf8": "^3.0.0", - "@smithy/util-waiter": "^3.1.2", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-node": "3.799.0", + "@aws-sdk/middleware-endpoint-discovery": "3.775.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", + "@smithy/util-waiter": "^4.0.3", + "@types/uuid": "^9.0.1", "tslib": "^2.6.2", "uuid": "^9.0.1" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/client-sso": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.624.0.tgz", - "integrity": "sha512-EX6EF+rJzMPC5dcdsu40xSi2To7GSvdGQNIpe97pD9WvZwM9tRNQnNM4T6HA4gjV1L6Jwk8rBlG/CnveXtLEMw==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/client-sso": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.799.0.tgz", + "integrity": "sha512-/i/LG7AiWPmPxKCA2jnR2zaf7B3HYSTbxaZI21ElIz9wASlNAsKr8CnLY7qb50kOyXiNfQ834S5Q3Gl8dX9o3Q==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/middleware-host-header": "3.620.0", - "@aws-sdk/middleware-logger": "3.609.0", - "@aws-sdk/middleware-recursion-detection": "3.620.0", - "@aws-sdk/middleware-user-agent": "3.620.0", - "@aws-sdk/region-config-resolver": "3.614.0", - "@aws-sdk/types": "3.609.0", - "@aws-sdk/util-endpoints": "3.614.0", - "@aws-sdk/util-user-agent-browser": "3.609.0", - "@aws-sdk/util-user-agent-node": "3.614.0", - "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", - "@smithy/fetch-http-handler": "^3.2.4", - "@smithy/hash-node": "^3.0.3", - "@smithy/invalid-dependency": "^3.0.3", - "@smithy/middleware-content-length": "^3.0.5", - "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", - "@smithy/middleware-serde": "^3.0.3", - "@smithy/middleware-stack": "^3.0.3", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/node-http-handler": "^3.1.4", - "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", - "@smithy/types": "^3.3.0", - "@smithy/url-parser": "^3.0.3", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", - "@smithy/util-endpoints": "^2.0.5", - "@smithy/util-middleware": "^3.0.3", - "@smithy/util-retry": "^3.0.3", - "@smithy/util-utf8": "^3.0.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/client-sso-oidc": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.624.0.tgz", - "integrity": "sha512-Ki2uKYJKKtfHxxZsiMTOvJoVRP6b2pZ1u3rcUb2m/nVgBPUfLdl8ZkGpqE29I+t5/QaS/sEdbn6cgMUZwl+3Dg==", - "dev": true, - "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", - "@aws-sdk/middleware-host-header": "3.620.0", - "@aws-sdk/middleware-logger": "3.609.0", - "@aws-sdk/middleware-recursion-detection": "3.620.0", - "@aws-sdk/middleware-user-agent": "3.620.0", - "@aws-sdk/region-config-resolver": "3.614.0", - "@aws-sdk/types": "3.609.0", - "@aws-sdk/util-endpoints": "3.614.0", - "@aws-sdk/util-user-agent-browser": "3.609.0", - "@aws-sdk/util-user-agent-node": "3.614.0", - "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", - "@smithy/fetch-http-handler": "^3.2.4", - "@smithy/hash-node": "^3.0.3", - "@smithy/invalid-dependency": "^3.0.3", - "@smithy/middleware-content-length": "^3.0.5", - "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", - "@smithy/middleware-serde": "^3.0.3", - "@smithy/middleware-stack": "^3.0.3", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/node-http-handler": "^3.1.4", - "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", - "@smithy/types": "^3.3.0", - "@smithy/url-parser": "^3.0.3", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", - "@smithy/util-endpoints": "^2.0.5", - "@smithy/util-middleware": "^3.0.3", - "@smithy/util-retry": "^3.0.3", - "@smithy/util-utf8": "^3.0.0", + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" - }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.624.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/client-sts": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.624.0.tgz", - "integrity": "sha512-k36fLZCb2nfoV/DKK3jbRgO/Yf7/R80pgYfMiotkGjnZwDmRvNN08z4l06L9C+CieazzkgRxNUzyppsYcYsQaw==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/credential-provider-env": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.799.0.tgz", + "integrity": "sha512-vT/SSWtbUIOW/U21qgEySmmO44SFWIA7WeQPX1OrI8WJ5n7OEI23JWLHjLvHTkYmuZK6z1rPcv7HzRgmuGRibA==", "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.624.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", - "@aws-sdk/middleware-host-header": "3.620.0", - "@aws-sdk/middleware-logger": "3.609.0", - "@aws-sdk/middleware-recursion-detection": "3.620.0", - "@aws-sdk/middleware-user-agent": "3.620.0", - "@aws-sdk/region-config-resolver": "3.614.0", - "@aws-sdk/types": "3.609.0", - "@aws-sdk/util-endpoints": "3.614.0", - "@aws-sdk/util-user-agent-browser": "3.609.0", - "@aws-sdk/util-user-agent-node": "3.614.0", - "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", - "@smithy/fetch-http-handler": "^3.2.4", - "@smithy/hash-node": "^3.0.3", - "@smithy/invalid-dependency": "^3.0.3", - "@smithy/middleware-content-length": "^3.0.5", - "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", - "@smithy/middleware-serde": "^3.0.3", - "@smithy/middleware-stack": "^3.0.3", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/node-http-handler": "^3.1.4", - "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", - "@smithy/types": "^3.3.0", - "@smithy/url-parser": "^3.0.3", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", - "@smithy/util-endpoints": "^2.0.5", - "@smithy/util-middleware": "^3.0.3", - "@smithy/util-retry": "^3.0.3", - "@smithy/util-utf8": "^3.0.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/core": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.624.0.tgz", - "integrity": "sha512-WyFmPbhRIvtWi7hBp8uSFy+iPpj8ccNV/eX86hwF4irMjfc/FtsGVIAeBXxXM/vGCjkdfEzOnl+tJ2XACD4OXg==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/credential-provider-http": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.799.0.tgz", + "integrity": "sha512-2CjBpOWmhaPAExOgHnIB5nOkS5ef+mfRlJ1JC4nsnjAx0nrK4tk0XRE0LYz11P3+ue+a86cU8WTmBo+qjnGxPQ==", "dependencies": { - "@smithy/core": "^2.3.2", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/protocol-http": "^4.1.0", - "@smithy/signature-v4": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", - "@smithy/types": "^3.3.0", - "@smithy/util-middleware": "^3.0.3", - "fast-xml-parser": "4.4.1", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-env": { - "version": "3.620.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.620.1.tgz", - "integrity": "sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.799.0.tgz", + "integrity": "sha512-M9ubILFxerqw4QJwk83MnjtZyoA2eNCiea5V+PzZeHlwk2PON/EnawKqy65x9/hMHGoSvvNuby7iMAmPptu7yw==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/types": "^3.3.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.799.0", + "@aws-sdk/credential-provider-web-identity": "3.799.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-http": { - "version": "3.622.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.622.0.tgz", - "integrity": "sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/credential-provider-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.799.0.tgz", + "integrity": "sha512-nd9fSJc0wUlgKUkIr2ldJhcIIrzJFS29AGZoyY22J3xih63nNDv61eTGVMsDZzHlV21XzMlPEljTR7axiimckg==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/fetch-http-handler": "^3.2.4", - "@smithy/node-http-handler": "^3.1.4", - "@smithy/property-provider": "^3.1.3", - "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", - "@smithy/types": "^3.3.0", - "@smithy/util-stream": "^3.1.3", + "@aws-sdk/credential-provider-env": "3.799.0", + "@aws-sdk/credential-provider-http": "3.799.0", + "@aws-sdk/credential-provider-ini": "3.799.0", + "@aws-sdk/credential-provider-process": "3.799.0", + "@aws-sdk/credential-provider-sso": "3.799.0", + "@aws-sdk/credential-provider-web-identity": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.624.0.tgz", - "integrity": "sha512-mMoNIy7MO2WTBbdqMyLpbt6SZpthE6e0GkRYpsd0yozPt0RZopcBhEh+HG1U9Y1PVODo+jcMk353vAi61CfnhQ==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/credential-provider-process": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.799.0.tgz", + "integrity": "sha512-g8jmNs2k98WNHMYcea1YKA+7ao2Ma4w0P42Dz4YpcI155pQHxHx25RwbOG+rsAKuo3bKwkW53HVE/ZTKhcWFgw==", "dependencies": { - "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.622.0", - "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.624.0", - "@aws-sdk/credential-provider-web-identity": "3.621.0", - "@aws-sdk/types": "3.609.0", - "@smithy/credential-provider-imds": "^3.2.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.799.0.tgz", + "integrity": "sha512-lQv27QkNU9FJFZqEf5DIEN3uXEN409Iaym9WJzhOouGtxvTIAWiD23OYh1u8PvBdrordJGS2YddfQvhcmq9akw==", + "dependencies": { + "@aws-sdk/client-sso": "3.799.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/token-providers": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.624.0" + "engines": { + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-node": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.624.0.tgz", - "integrity": "sha512-vYyGK7oNpd81BdbH5IlmQ6zfaQqU+rPwsKTDDBeLRjshtrGXOEpfoahVpG9PX0ibu32IOWp4ZyXBNyVrnvcMOw==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.799.0.tgz", + "integrity": "sha512-8k1i9ut+BEg0QZ+I6UQMxGNR1T8paLmAOAZXU+nLQR0lcxS6lr8v+dqofgzQPuHLBkWNCr1Av1IKeL3bJjgU7g==", "dependencies": { - "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.622.0", - "@aws-sdk/credential-provider-ini": "3.624.0", - "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.624.0", - "@aws-sdk/credential-provider-web-identity": "3.621.0", - "@aws-sdk/types": "3.609.0", - "@smithy/credential-provider-imds": "^3.2.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-process": { - "version": "3.620.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.620.1.tgz", - "integrity": "sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.624.0.tgz", - "integrity": "sha512-A02bayIjU9APEPKr3HudrFHEx0WfghoSPsPopckDkW7VBqO4wizzcxr75Q9A3vNX+cwg0wCN6UitTNe6pVlRaQ==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", "dependencies": { - "@aws-sdk/client-sso": "3.624.0", - "@aws-sdk/token-providers": "3.614.0", - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.621.0.tgz", - "integrity": "sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/types": "^3.3.0", + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.621.0" + "engines": { + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/middleware-host-header": { - "version": "3.620.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.620.0.tgz", - "integrity": "sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/token-providers": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.799.0.tgz", + "integrity": "sha512-/8iDjnsJs/D8AhGbDAmdF5oSHzE4jsDsM2RIIxmBAKTZXkaaclQBNX9CmAqLKQmO3IUMZsDH2KENHLVAk/N/mw==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/protocol-http": "^4.1.0", - "@smithy/types": "^3.3.0", + "@aws-sdk/nested-clients": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/middleware-logger": { - "version": "3.609.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.609.0.tgz", - "integrity": "sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/types": "^3.3.0", + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.620.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.620.0.tgz", - "integrity": "sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/protocol-http": "^4.1.0", - "@smithy/types": "^3.3.0", + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.620.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.620.0.tgz", - "integrity": "sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@aws-sdk/util-endpoints": "3.614.0", - "@smithy/protocol-http": "^4.1.0", - "@smithy/types": "^3.3.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/region-config-resolver": { - "version": "3.614.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.614.0.tgz", - "integrity": "sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/types": "^3.3.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/token-providers": { - "version": "3.614.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.614.0.tgz", - "integrity": "sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/core": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.0.tgz", + "integrity": "sha512-r6gvs5OfRq/w+9unPm7B3po4rmWaGh0CIL/OwHntGGux7+RhOOZLGuurbeMgWV6W55ZuyMTypJLeH0vn/ZRaWQ==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "tslib": "^2.6.2" }, - "peerDependencies": { - "@aws-sdk/client-sso-oidc": "^3.614.0" + "engines": { + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/types": { - "version": "3.609.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", - "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", "dependencies": { - "@smithy/types": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/util-endpoints": { - "version": "3.614.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.614.0.tgz", - "integrity": "sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/types": "^3.3.0", - "@smithy/util-endpoints": "^2.0.5", + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.609.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.609.0.tgz", - "integrity": "sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/types": "^3.3.0", - "bowser": "^2.11.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.614.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.614.0.tgz", - "integrity": "sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/types": "^3.3.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" }, - "peerDependencies": { - "aws-crt": ">=1.0.0" + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/middleware-endpoint": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.1.tgz", + "integrity": "sha512-z5RmcHxjvScL+LwEDU2mTNCOhgUs4lu5PGdF1K36IPRmUHhNFxNxgenSB7smyDiYD4vdKQ7CAZtG5cUErqib9w==", + "dependencies": { + "@smithy/core": "^3.3.0", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", + "tslib": "^2.6.2" }, - "peerDependenciesMeta": { - "aws-crt": { - "optional": true - } + "engines": { + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/middleware-retry": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.1.tgz", + "integrity": "sha512-mBJOxn9aUYwcBUPQpKv9ifzrCn4EbhPUFguEZv3jB57YOMh0caS4P8HoLvUeNUI1nx4bIVH2SIbogbDfFI9DUA==", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ec2/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", - "dev": true, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-firehose": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-firehose/-/client-firehose-3.621.0.tgz", - "integrity": "sha512-XAjAkXdb35PDvBYph609Fxn4g00HYH/U6N4+KjF9gLQrdTU+wkjf3D9YD02DZNbApJVcu4eIxWh/8M25YkW02A==", + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.621.0", - "@aws-sdk/client-sts": "3.621.0", - "@aws-sdk/core": "3.621.0", - "@aws-sdk/credential-provider-node": "3.621.0", - "@aws-sdk/middleware-host-header": "3.620.0", - "@aws-sdk/middleware-logger": "3.609.0", - "@aws-sdk/middleware-recursion-detection": "3.620.0", - "@aws-sdk/middleware-user-agent": "3.620.0", + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", + "dependencies": { + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/service-error-classification": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.2.tgz", + "integrity": "sha512-LA86xeFpTKn270Hbkixqs5n73S+LVM0/VZco8dqd+JT75Dyx3Lcw/MraL7ybjmz786+160K8rPOmhsq0SocoJQ==", + "dependencies": { + "@smithy/types": "^4.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/smithy-client": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.1.tgz", + "integrity": "sha512-fbniZef60QdsBc4ZY0iyI8xbFHIiC/QRtPi66iE4ufjiE/aaz7AfUXzcWMkpO8r+QhLeNRIfmPchIG+3/QDZ6g==", + "dependencies": { + "@smithy/core": "^3.3.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", + "dependencies": { + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", + "dependencies": { + "@smithy/is-array-buffer": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.9.tgz", + "integrity": "sha512-B8j0XsElvyhv6+5hlFf6vFV/uCSyLKcInpeXOGnOImX2mGXshE01RvPoGipTlRpIk53e6UfYj7WdDdgbVfXDZw==", + "dependencies": { + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.9.tgz", + "integrity": "sha512-wTDU8P/zdIf9DOpV5qm64HVgGRXvqjqB/fJZTEQbrz3s79JHM/E7XkMm/876Oq+ZLHJQgnXM9QHDo29dlM62eA==", + "dependencies": { + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-retry": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.2.tgz", + "integrity": "sha512-Qryc+QG+7BCpvjloFLQrmlSd0RsVRHejRXd78jNO3+oREueCjwG1CCEH1vduw/ZkM1U9TztwIKVIi3+8MJScGg==", + "dependencies": { + "@smithy/service-error-classification": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", + "dependencies": { + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", + "dependencies": { + "@smithy/util-buffer-from": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-dynamodb/node_modules/@smithy/util-waiter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.0.3.tgz", + "integrity": "sha512-JtaY3FxmD+te+KSI2FJuEcfNC9T/DGGVf551babM7fAaXhjJUt7oSYurH1Devxd2+BOSUACCgt3buinx4UnmEA==", + "dependencies": { + "@smithy/abort-controller": "^4.0.2", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/client-ec2": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-ec2/-/client-ec2-3.624.0.tgz", + "integrity": "sha512-n3IHWiNSP5Cj0ZbENJGtDeJPsx6EVNMeePh8Nqe9Ja5l5/Brkdyu4TV6t/taPXHJQDH7E6cq4/uMiiEPRNuf6Q==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.624.0", + "@aws-sdk/client-sts": "3.624.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/middleware-host-header": "3.620.0", + "@aws-sdk/middleware-logger": "3.609.0", + "@aws-sdk/middleware-recursion-detection": "3.620.0", + "@aws-sdk/middleware-sdk-ec2": "3.622.0", + "@aws-sdk/middleware-user-agent": "3.620.0", "@aws-sdk/region-config-resolver": "3.614.0", "@aws-sdk/types": "3.609.0", "@aws-sdk/util-endpoints": "3.614.0", "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-retry": "^3.0.14", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.13", - "@smithy/util-defaults-mode-node": "^3.0.13", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", "@smithy/util-utf8": "^3.0.0", - "tslib": "^2.6.2" + "@smithy/util-waiter": "^3.1.2", + "tslib": "^2.6.2", + "uuid": "^9.0.1" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/client-sso": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.621.0.tgz", - "integrity": "sha512-xpKfikN4u0BaUYZA9FGUMkkDmfoIP0Q03+A86WjqDWhcOoqNA1DkHsE4kZ+r064ifkPUfcNuUvlkVTEoBZoFjA==", + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/client-sso": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.624.0.tgz", + "integrity": "sha512-EX6EF+rJzMPC5dcdsu40xSi2To7GSvdGQNIpe97pD9WvZwM9tRNQnNM4T6HA4gjV1L6Jwk8rBlG/CnveXtLEMw==", + "dev": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.621.0", + "@aws-sdk/core": "3.624.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -9743,26 +10101,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-retry": "^3.0.14", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.13", - "@smithy/util-defaults-mode-node": "^3.0.13", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -9773,15 +10131,16 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/client-sso-oidc": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.621.0.tgz", - "integrity": "sha512-mMjk3mFUwV2Y68POf1BQMTF+F6qxt5tPu6daEUCNGC9Cenk3h2YXQQoS4/eSyYzuBiYk3vx49VgleRvdvkg8rg==", + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.624.0.tgz", + "integrity": "sha512-Ki2uKYJKKtfHxxZsiMTOvJoVRP6b2pZ1u3rcUb2m/nVgBPUfLdl8ZkGpqE29I+t5/QaS/sEdbn6cgMUZwl+3Dg==", + "dev": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.621.0", - "@aws-sdk/credential-provider-node": "3.621.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -9792,26 +10151,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-retry": "^3.0.14", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.13", - "@smithy/util-defaults-mode-node": "^3.0.13", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -9822,19 +10181,20 @@ "node": ">=16.0.0" }, "peerDependencies": { - "@aws-sdk/client-sts": "^3.621.0" + "@aws-sdk/client-sts": "^3.624.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/client-sts": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.621.0.tgz", - "integrity": "sha512-707uiuReSt+nAx6d0c21xLjLm2lxeKc7padxjv92CIrIocnQSlJPxSCM7r5zBhwiahJA6MNQwmTl2xznU67KgA==", + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/client-sts": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.624.0.tgz", + "integrity": "sha512-k36fLZCb2nfoV/DKK3jbRgO/Yf7/R80pgYfMiotkGjnZwDmRvNN08z4l06L9C+CieazzkgRxNUzyppsYcYsQaw==", + "dev": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.621.0", - "@aws-sdk/core": "3.621.0", - "@aws-sdk/credential-provider-node": "3.621.0", + "@aws-sdk/client-sso-oidc": "3.624.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -9845,26 +10205,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-retry": "^3.0.14", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.13", - "@smithy/util-defaults-mode-node": "^3.0.13", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -9875,16 +10235,17 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/core": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.621.0.tgz", - "integrity": "sha512-CtOwWmDdEiINkGXD93iGfXjN0WmCp9l45cDWHHGa8lRgEDyhuL7bwd/pH5aSzj0j8SiQBG2k0S7DHbd5RaqvbQ==", + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/core": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.624.0.tgz", + "integrity": "sha512-WyFmPbhRIvtWi7hBp8uSFy+iPpj8ccNV/eX86hwF4irMjfc/FtsGVIAeBXxXM/vGCjkdfEzOnl+tJ2XACD4OXg==", + "dev": true, "dependencies": { - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/node-config-provider": "^3.1.4", "@smithy/protocol-http": "^4.1.0", "@smithy/signature-v4": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/util-middleware": "^3.0.3", "fast-xml-parser": "4.4.1", @@ -9894,10 +10255,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-env": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-env": { "version": "3.620.1", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.620.1.tgz", "integrity": "sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -9908,17 +10270,18 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-http": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.621.0.tgz", - "integrity": "sha512-/jc2tEsdkT1QQAI5Dvoci50DbSxtJrevemwFsm0B73pwCcOQZ5ZwwSdVqGsPutzYzUVx3bcXg3LRL7jLACqRIg==", + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-http": { + "version": "3.622.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.622.0.tgz", + "integrity": "sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/property-provider": "^3.1.3", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/util-stream": "^3.1.3", "tslib": "^2.6.2" @@ -9927,15 +10290,16 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.621.0.tgz", - "integrity": "sha512-0EWVnSc+JQn5HLnF5Xv405M8n4zfdx9gyGdpnCmAmFqEDHA8LmBdxJdpUk1Ovp/I5oPANhjojxabIW5f1uU0RA==", + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.624.0.tgz", + "integrity": "sha512-mMoNIy7MO2WTBbdqMyLpbt6SZpthE6e0GkRYpsd0yozPt0RZopcBhEh+HG1U9Y1PVODo+jcMk353vAi61CfnhQ==", + "dev": true, "dependencies": { "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.621.0", + "@aws-sdk/credential-provider-http": "3.622.0", "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.621.0", + "@aws-sdk/credential-provider-sso": "3.624.0", "@aws-sdk/credential-provider-web-identity": "3.621.0", "@aws-sdk/types": "3.609.0", "@smithy/credential-provider-imds": "^3.2.0", @@ -9948,19 +10312,20 @@ "node": ">=16.0.0" }, "peerDependencies": { - "@aws-sdk/client-sts": "^3.621.0" + "@aws-sdk/client-sts": "^3.624.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-node": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.621.0.tgz", - "integrity": "sha512-4JqpccUgz5Snanpt2+53hbOBbJQrSFq7E1sAAbgY6BKVQUsW5qyXqnjvSF32kDeKa5JpBl3bBWLZl04IadcPHw==", + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-node": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.624.0.tgz", + "integrity": "sha512-vYyGK7oNpd81BdbH5IlmQ6zfaQqU+rPwsKTDDBeLRjshtrGXOEpfoahVpG9PX0ibu32IOWp4ZyXBNyVrnvcMOw==", + "dev": true, "dependencies": { "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.621.0", - "@aws-sdk/credential-provider-ini": "3.621.0", + "@aws-sdk/credential-provider-http": "3.622.0", + "@aws-sdk/credential-provider-ini": "3.624.0", "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.621.0", + "@aws-sdk/credential-provider-sso": "3.624.0", "@aws-sdk/credential-provider-web-identity": "3.621.0", "@aws-sdk/types": "3.609.0", "@smithy/credential-provider-imds": "^3.2.0", @@ -9973,10 +10338,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-process": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-process": { "version": "3.620.1", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.620.1.tgz", "integrity": "sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -9988,12 +10354,13 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.621.0.tgz", - "integrity": "sha512-Kza0jcFeA/GEL6xJlzR2KFf1PfZKMFnxfGzJzl5yN7EjoGdMijl34KaRyVnfRjnCWcsUpBWKNIDk9WZVMY9yiw==", + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.624.0.tgz", + "integrity": "sha512-A02bayIjU9APEPKr3HudrFHEx0WfghoSPsPopckDkW7VBqO4wizzcxr75Q9A3vNX+cwg0wCN6UitTNe6pVlRaQ==", + "dev": true, "dependencies": { - "@aws-sdk/client-sso": "3.621.0", + "@aws-sdk/client-sso": "3.624.0", "@aws-sdk/token-providers": "3.614.0", "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -10005,10 +10372,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-web-identity": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/credential-provider-web-identity": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.621.0.tgz", "integrity": "sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -10022,10 +10390,11 @@ "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/middleware-host-header": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/middleware-host-header": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.620.0.tgz", "integrity": "sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/protocol-http": "^4.1.0", @@ -10036,10 +10405,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/middleware-logger": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/middleware-logger": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.609.0.tgz", "integrity": "sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -10049,10 +10419,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/middleware-recursion-detection": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/middleware-recursion-detection": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.620.0.tgz", "integrity": "sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/protocol-http": "^4.1.0", @@ -10063,10 +10434,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/middleware-user-agent": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/middleware-user-agent": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.620.0.tgz", "integrity": "sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@aws-sdk/util-endpoints": "3.614.0", @@ -10078,10 +10450,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/region-config-resolver": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/region-config-resolver": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.614.0.tgz", "integrity": "sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/node-config-provider": "^3.1.4", @@ -10094,10 +10467,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/token-providers": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/token-providers": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.614.0.tgz", "integrity": "sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -10112,10 +10486,11 @@ "@aws-sdk/client-sso-oidc": "^3.614.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/types": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", + "dev": true, "dependencies": { "@smithy/types": "^3.3.0", "tslib": "^2.6.2" @@ -10124,10 +10499,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/util-endpoints": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/util-endpoints": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.614.0.tgz", "integrity": "sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -10138,10 +10514,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/util-user-agent-browser": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/util-user-agent-browser": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.609.0.tgz", "integrity": "sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -10149,10 +10526,11 @@ "tslib": "^2.6.2" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/util-user-agent-node": { + "node_modules/@aws-sdk/client-ec2/node_modules/@aws-sdk/util-user-agent-node": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.614.0.tgz", "integrity": "sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/node-config-provider": "^3.1.4", @@ -10171,10 +10549,11 @@ } } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@smithy/node-config-provider": { + "node_modules/@aws-sdk/client-ec2/node_modules/@smithy/node-config-provider": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, "dependencies": { "@smithy/property-provider": "^3.1.8", "@smithy/shared-ini-file-loader": "^3.1.9", @@ -10185,10 +10564,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@smithy/shared-ini-file-loader": { + "node_modules/@aws-sdk/client-ec2/node_modules/@smithy/shared-ini-file-loader": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, "dependencies": { "@smithy/types": "^3.6.0", "tslib": "^2.6.2" @@ -10197,10 +10577,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-firehose/node_modules/@smithy/util-utf8": { + "node_modules/@aws-sdk/client-ec2/node_modules/@smithy/util-utf8": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dev": true, "dependencies": { "@smithy/util-buffer-from": "^3.0.0", "tslib": "^2.6.2" @@ -10209,18 +10590,17 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-iam/-/client-iam-3.624.0.tgz", - "integrity": "sha512-a3Qy7AIht2nHiZPJ/HiMdyiOLiDN+iKp1R916SEbgFi9MiOyRHFeLCCPQHMf1O8YXfb0hbHr5IFnfZLfUcJaWQ==", - "dev": true, + "node_modules/@aws-sdk/client-firehose": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-firehose/-/client-firehose-3.621.0.tgz", + "integrity": "sha512-XAjAkXdb35PDvBYph609Fxn4g00HYH/U6N4+KjF9gLQrdTU+wkjf3D9YD02DZNbApJVcu4eIxWh/8M25YkW02A==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.624.0", - "@aws-sdk/client-sts": "3.624.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/client-sso-oidc": "3.621.0", + "@aws-sdk/client-sts": "3.621.0", + "@aws-sdk/core": "3.621.0", + "@aws-sdk/credential-provider-node": "3.621.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -10231,46 +10611,44 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-retry": "^3.0.13", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-defaults-mode-browser": "^3.0.13", + "@smithy/util-defaults-mode-node": "^3.0.13", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", "@smithy/util-utf8": "^3.0.0", - "@smithy/util-waiter": "^3.1.2", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/client-sso": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.624.0.tgz", - "integrity": "sha512-EX6EF+rJzMPC5dcdsu40xSi2To7GSvdGQNIpe97pD9WvZwM9tRNQnNM4T6HA4gjV1L6Jwk8rBlG/CnveXtLEMw==", - "dev": true, + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/client-sso": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.621.0.tgz", + "integrity": "sha512-xpKfikN4u0BaUYZA9FGUMkkDmfoIP0Q03+A86WjqDWhcOoqNA1DkHsE4kZ+r064ifkPUfcNuUvlkVTEoBZoFjA==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.624.0", + "@aws-sdk/core": "3.621.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -10281,26 +10659,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-retry": "^3.0.13", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-defaults-mode-browser": "^3.0.13", + "@smithy/util-defaults-mode-node": "^3.0.13", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -10311,16 +10689,15 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/client-sso-oidc": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.624.0.tgz", - "integrity": "sha512-Ki2uKYJKKtfHxxZsiMTOvJoVRP6b2pZ1u3rcUb2m/nVgBPUfLdl8ZkGpqE29I+t5/QaS/sEdbn6cgMUZwl+3Dg==", - "dev": true, + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.621.0.tgz", + "integrity": "sha512-mMjk3mFUwV2Y68POf1BQMTF+F6qxt5tPu6daEUCNGC9Cenk3h2YXQQoS4/eSyYzuBiYk3vx49VgleRvdvkg8rg==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/core": "3.621.0", + "@aws-sdk/credential-provider-node": "3.621.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -10331,26 +10708,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-retry": "^3.0.13", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-defaults-mode-browser": "^3.0.13", + "@smithy/util-defaults-mode-node": "^3.0.13", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -10361,20 +10738,19 @@ "node": ">=16.0.0" }, "peerDependencies": { - "@aws-sdk/client-sts": "^3.624.0" + "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/client-sts": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.624.0.tgz", - "integrity": "sha512-k36fLZCb2nfoV/DKK3jbRgO/Yf7/R80pgYfMiotkGjnZwDmRvNN08z4l06L9C+CieazzkgRxNUzyppsYcYsQaw==", - "dev": true, + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/client-sts": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.621.0.tgz", + "integrity": "sha512-707uiuReSt+nAx6d0c21xLjLm2lxeKc7padxjv92CIrIocnQSlJPxSCM7r5zBhwiahJA6MNQwmTl2xznU67KgA==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.624.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/client-sso-oidc": "3.621.0", + "@aws-sdk/core": "3.621.0", + "@aws-sdk/credential-provider-node": "3.621.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -10385,26 +10761,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-retry": "^3.0.13", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-defaults-mode-browser": "^3.0.13", + "@smithy/util-defaults-mode-node": "^3.0.13", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -10415,17 +10791,16 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/core": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.624.0.tgz", - "integrity": "sha512-WyFmPbhRIvtWi7hBp8uSFy+iPpj8ccNV/eX86hwF4irMjfc/FtsGVIAeBXxXM/vGCjkdfEzOnl+tJ2XACD4OXg==", - "dev": true, + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/core": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.621.0.tgz", + "integrity": "sha512-CtOwWmDdEiINkGXD93iGfXjN0WmCp9l45cDWHHGa8lRgEDyhuL7bwd/pH5aSzj0j8SiQBG2k0S7DHbd5RaqvbQ==", "dependencies": { - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/node-config-provider": "^3.1.4", "@smithy/protocol-http": "^4.1.0", "@smithy/signature-v4": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/util-middleware": "^3.0.3", "fast-xml-parser": "4.4.1", @@ -10435,11 +10810,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-env": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-env": { "version": "3.620.1", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.620.1.tgz", "integrity": "sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -10450,18 +10824,17 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-http": { - "version": "3.622.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.622.0.tgz", - "integrity": "sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==", - "dev": true, + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-http": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.621.0.tgz", + "integrity": "sha512-/jc2tEsdkT1QQAI5Dvoci50DbSxtJrevemwFsm0B73pwCcOQZ5ZwwSdVqGsPutzYzUVx3bcXg3LRL7jLACqRIg==", "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/property-provider": "^3.1.3", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/util-stream": "^3.1.3", "tslib": "^2.6.2" @@ -10470,16 +10843,15 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.624.0.tgz", - "integrity": "sha512-mMoNIy7MO2WTBbdqMyLpbt6SZpthE6e0GkRYpsd0yozPt0RZopcBhEh+HG1U9Y1PVODo+jcMk353vAi61CfnhQ==", - "dev": true, + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.621.0.tgz", + "integrity": "sha512-0EWVnSc+JQn5HLnF5Xv405M8n4zfdx9gyGdpnCmAmFqEDHA8LmBdxJdpUk1Ovp/I5oPANhjojxabIW5f1uU0RA==", "dependencies": { "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.622.0", + "@aws-sdk/credential-provider-http": "3.621.0", "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.624.0", + "@aws-sdk/credential-provider-sso": "3.621.0", "@aws-sdk/credential-provider-web-identity": "3.621.0", "@aws-sdk/types": "3.609.0", "@smithy/credential-provider-imds": "^3.2.0", @@ -10492,20 +10864,19 @@ "node": ">=16.0.0" }, "peerDependencies": { - "@aws-sdk/client-sts": "^3.624.0" + "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-node": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.624.0.tgz", - "integrity": "sha512-vYyGK7oNpd81BdbH5IlmQ6zfaQqU+rPwsKTDDBeLRjshtrGXOEpfoahVpG9PX0ibu32IOWp4ZyXBNyVrnvcMOw==", - "dev": true, + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-node": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.621.0.tgz", + "integrity": "sha512-4JqpccUgz5Snanpt2+53hbOBbJQrSFq7E1sAAbgY6BKVQUsW5qyXqnjvSF32kDeKa5JpBl3bBWLZl04IadcPHw==", "dependencies": { "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.622.0", - "@aws-sdk/credential-provider-ini": "3.624.0", + "@aws-sdk/credential-provider-http": "3.621.0", + "@aws-sdk/credential-provider-ini": "3.621.0", "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.624.0", + "@aws-sdk/credential-provider-sso": "3.621.0", "@aws-sdk/credential-provider-web-identity": "3.621.0", "@aws-sdk/types": "3.609.0", "@smithy/credential-provider-imds": "^3.2.0", @@ -10518,11 +10889,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-process": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-process": { "version": "3.620.1", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.620.1.tgz", "integrity": "sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -10534,13 +10904,12 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.624.0.tgz", - "integrity": "sha512-A02bayIjU9APEPKr3HudrFHEx0WfghoSPsPopckDkW7VBqO4wizzcxr75Q9A3vNX+cwg0wCN6UitTNe6pVlRaQ==", - "dev": true, + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.621.0.tgz", + "integrity": "sha512-Kza0jcFeA/GEL6xJlzR2KFf1PfZKMFnxfGzJzl5yN7EjoGdMijl34KaRyVnfRjnCWcsUpBWKNIDk9WZVMY9yiw==", "dependencies": { - "@aws-sdk/client-sso": "3.624.0", + "@aws-sdk/client-sso": "3.621.0", "@aws-sdk/token-providers": "3.614.0", "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -10552,11 +10921,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-web-identity": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/credential-provider-web-identity": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.621.0.tgz", "integrity": "sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -10570,11 +10938,10 @@ "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/middleware-host-header": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/middleware-host-header": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.620.0.tgz", "integrity": "sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/protocol-http": "^4.1.0", @@ -10585,11 +10952,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/middleware-logger": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/middleware-logger": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.609.0.tgz", "integrity": "sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -10599,11 +10965,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/middleware-recursion-detection": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/middleware-recursion-detection": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.620.0.tgz", "integrity": "sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/protocol-http": "^4.1.0", @@ -10614,11 +10979,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/middleware-user-agent": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/middleware-user-agent": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.620.0.tgz", "integrity": "sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@aws-sdk/util-endpoints": "3.614.0", @@ -10630,11 +10994,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/region-config-resolver": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/region-config-resolver": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.614.0.tgz", "integrity": "sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/node-config-provider": "^3.1.4", @@ -10647,11 +11010,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/token-providers": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/token-providers": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.614.0.tgz", "integrity": "sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -10666,11 +11028,10 @@ "@aws-sdk/client-sso-oidc": "^3.614.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/types": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", - "dev": true, "dependencies": { "@smithy/types": "^3.3.0", "tslib": "^2.6.2" @@ -10679,11 +11040,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/util-endpoints": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/util-endpoints": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.614.0.tgz", "integrity": "sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -10694,11 +11054,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/util-user-agent-browser": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/util-user-agent-browser": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.609.0.tgz", "integrity": "sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -10706,11 +11065,10 @@ "tslib": "^2.6.2" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/util-user-agent-node": { + "node_modules/@aws-sdk/client-firehose/node_modules/@aws-sdk/util-user-agent-node": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.614.0.tgz", "integrity": "sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==", - "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/node-config-provider": "^3.1.4", @@ -10729,11 +11087,10 @@ } } }, - "node_modules/@aws-sdk/client-iam/node_modules/@smithy/node-config-provider": { + "node_modules/@aws-sdk/client-firehose/node_modules/@smithy/node-config-provider": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dev": true, "dependencies": { "@smithy/property-provider": "^3.1.8", "@smithy/shared-ini-file-loader": "^3.1.9", @@ -10744,11 +11101,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@smithy/shared-ini-file-loader": { + "node_modules/@aws-sdk/client-firehose/node_modules/@smithy/shared-ini-file-loader": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, "dependencies": { "@smithy/types": "^3.6.0", "tslib": "^2.6.2" @@ -10757,11 +11113,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-iam/node_modules/@smithy/util-utf8": { + "node_modules/@aws-sdk/client-firehose/node_modules/@smithy/util-utf8": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", - "dev": true, "dependencies": { "@smithy/util-buffer-from": "^3.0.0", "tslib": "^2.6.2" @@ -10770,17 +11125,18 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-kinesis/-/client-kinesis-3.621.0.tgz", - "integrity": "sha512-53Omt/beFmTQPjQNpMuPMk5nMzYVsXCRiO+MeqygZEKYG1fWw/UGluCWVbi7WjClOHacsW8lQcsqIRvkPDFNag==", + "node_modules/@aws-sdk/client-iam": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-iam/-/client-iam-3.624.0.tgz", + "integrity": "sha512-a3Qy7AIht2nHiZPJ/HiMdyiOLiDN+iKp1R916SEbgFi9MiOyRHFeLCCPQHMf1O8YXfb0hbHr5IFnfZLfUcJaWQ==", + "dev": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.621.0", - "@aws-sdk/client-sts": "3.621.0", - "@aws-sdk/core": "3.621.0", - "@aws-sdk/credential-provider-node": "3.621.0", + "@aws-sdk/client-sso-oidc": "3.624.0", + "@aws-sdk/client-sts": "3.624.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -10791,29 +11147,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.1", - "@smithy/eventstream-serde-browser": "^3.0.5", - "@smithy/eventstream-serde-config-resolver": "^3.0.3", - "@smithy/eventstream-serde-node": "^3.0.4", + "@smithy/core": "^2.3.2", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-retry": "^3.0.14", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.13", - "@smithy/util-defaults-mode-node": "^3.0.13", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -10825,14 +11178,15 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/client-sso": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.621.0.tgz", - "integrity": "sha512-xpKfikN4u0BaUYZA9FGUMkkDmfoIP0Q03+A86WjqDWhcOoqNA1DkHsE4kZ+r064ifkPUfcNuUvlkVTEoBZoFjA==", + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/client-sso": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.624.0.tgz", + "integrity": "sha512-EX6EF+rJzMPC5dcdsu40xSi2To7GSvdGQNIpe97pD9WvZwM9tRNQnNM4T6HA4gjV1L6Jwk8rBlG/CnveXtLEMw==", + "dev": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.621.0", + "@aws-sdk/core": "3.624.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -10843,26 +11197,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-retry": "^3.0.14", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.13", - "@smithy/util-defaults-mode-node": "^3.0.13", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -10873,15 +11227,16 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/client-sso-oidc": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.621.0.tgz", - "integrity": "sha512-mMjk3mFUwV2Y68POf1BQMTF+F6qxt5tPu6daEUCNGC9Cenk3h2YXQQoS4/eSyYzuBiYk3vx49VgleRvdvkg8rg==", + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.624.0.tgz", + "integrity": "sha512-Ki2uKYJKKtfHxxZsiMTOvJoVRP6b2pZ1u3rcUb2m/nVgBPUfLdl8ZkGpqE29I+t5/QaS/sEdbn6cgMUZwl+3Dg==", + "dev": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.621.0", - "@aws-sdk/credential-provider-node": "3.621.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -10892,26 +11247,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-retry": "^3.0.14", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.13", - "@smithy/util-defaults-mode-node": "^3.0.13", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -10922,19 +11277,20 @@ "node": ">=16.0.0" }, "peerDependencies": { - "@aws-sdk/client-sts": "^3.621.0" + "@aws-sdk/client-sts": "^3.624.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/client-sts": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.621.0.tgz", - "integrity": "sha512-707uiuReSt+nAx6d0c21xLjLm2lxeKc7padxjv92CIrIocnQSlJPxSCM7r5zBhwiahJA6MNQwmTl2xznU67KgA==", + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/client-sts": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.624.0.tgz", + "integrity": "sha512-k36fLZCb2nfoV/DKK3jbRgO/Yf7/R80pgYfMiotkGjnZwDmRvNN08z4l06L9C+CieazzkgRxNUzyppsYcYsQaw==", + "dev": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.621.0", - "@aws-sdk/core": "3.621.0", - "@aws-sdk/credential-provider-node": "3.621.0", + "@aws-sdk/client-sso-oidc": "3.624.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -10945,26 +11301,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.13", + "@smithy/middleware-retry": "^3.0.14", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.13", - "@smithy/util-defaults-mode-node": "^3.0.13", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -10975,16 +11331,17 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/core": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.621.0.tgz", - "integrity": "sha512-CtOwWmDdEiINkGXD93iGfXjN0WmCp9l45cDWHHGa8lRgEDyhuL7bwd/pH5aSzj0j8SiQBG2k0S7DHbd5RaqvbQ==", + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/core": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.624.0.tgz", + "integrity": "sha512-WyFmPbhRIvtWi7hBp8uSFy+iPpj8ccNV/eX86hwF4irMjfc/FtsGVIAeBXxXM/vGCjkdfEzOnl+tJ2XACD4OXg==", + "dev": true, "dependencies": { - "@smithy/core": "^2.3.1", + "@smithy/core": "^2.3.2", "@smithy/node-config-provider": "^3.1.4", "@smithy/protocol-http": "^4.1.0", "@smithy/signature-v4": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/util-middleware": "^3.0.3", "fast-xml-parser": "4.4.1", @@ -10994,10 +11351,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-env": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-env": { "version": "3.620.1", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.620.1.tgz", "integrity": "sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -11008,17 +11366,18 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-http": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.621.0.tgz", - "integrity": "sha512-/jc2tEsdkT1QQAI5Dvoci50DbSxtJrevemwFsm0B73pwCcOQZ5ZwwSdVqGsPutzYzUVx3bcXg3LRL7jLACqRIg==", + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-http": { + "version": "3.622.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.622.0.tgz", + "integrity": "sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/property-provider": "^3.1.3", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.11", + "@smithy/smithy-client": "^3.1.12", "@smithy/types": "^3.3.0", "@smithy/util-stream": "^3.1.3", "tslib": "^2.6.2" @@ -11027,15 +11386,16 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.621.0.tgz", - "integrity": "sha512-0EWVnSc+JQn5HLnF5Xv405M8n4zfdx9gyGdpnCmAmFqEDHA8LmBdxJdpUk1Ovp/I5oPANhjojxabIW5f1uU0RA==", + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.624.0.tgz", + "integrity": "sha512-mMoNIy7MO2WTBbdqMyLpbt6SZpthE6e0GkRYpsd0yozPt0RZopcBhEh+HG1U9Y1PVODo+jcMk353vAi61CfnhQ==", + "dev": true, "dependencies": { "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.621.0", + "@aws-sdk/credential-provider-http": "3.622.0", "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.621.0", + "@aws-sdk/credential-provider-sso": "3.624.0", "@aws-sdk/credential-provider-web-identity": "3.621.0", "@aws-sdk/types": "3.609.0", "@smithy/credential-provider-imds": "^3.2.0", @@ -11048,19 +11408,20 @@ "node": ">=16.0.0" }, "peerDependencies": { - "@aws-sdk/client-sts": "^3.621.0" + "@aws-sdk/client-sts": "^3.624.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-node": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.621.0.tgz", - "integrity": "sha512-4JqpccUgz5Snanpt2+53hbOBbJQrSFq7E1sAAbgY6BKVQUsW5qyXqnjvSF32kDeKa5JpBl3bBWLZl04IadcPHw==", + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-node": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.624.0.tgz", + "integrity": "sha512-vYyGK7oNpd81BdbH5IlmQ6zfaQqU+rPwsKTDDBeLRjshtrGXOEpfoahVpG9PX0ibu32IOWp4ZyXBNyVrnvcMOw==", + "dev": true, "dependencies": { "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.621.0", - "@aws-sdk/credential-provider-ini": "3.621.0", + "@aws-sdk/credential-provider-http": "3.622.0", + "@aws-sdk/credential-provider-ini": "3.624.0", "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.621.0", + "@aws-sdk/credential-provider-sso": "3.624.0", "@aws-sdk/credential-provider-web-identity": "3.621.0", "@aws-sdk/types": "3.609.0", "@smithy/credential-provider-imds": "^3.2.0", @@ -11073,10 +11434,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-process": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-process": { "version": "3.620.1", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.620.1.tgz", "integrity": "sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -11088,12 +11450,13 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.621.0.tgz", - "integrity": "sha512-Kza0jcFeA/GEL6xJlzR2KFf1PfZKMFnxfGzJzl5yN7EjoGdMijl34KaRyVnfRjnCWcsUpBWKNIDk9WZVMY9yiw==", + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.624.0.tgz", + "integrity": "sha512-A02bayIjU9APEPKr3HudrFHEx0WfghoSPsPopckDkW7VBqO4wizzcxr75Q9A3vNX+cwg0wCN6UitTNe6pVlRaQ==", + "dev": true, "dependencies": { - "@aws-sdk/client-sso": "3.621.0", + "@aws-sdk/client-sso": "3.624.0", "@aws-sdk/token-providers": "3.614.0", "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -11105,10 +11468,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-web-identity": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/credential-provider-web-identity": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.621.0.tgz", "integrity": "sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -11122,10 +11486,11 @@ "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/middleware-host-header": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/middleware-host-header": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.620.0.tgz", "integrity": "sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/protocol-http": "^4.1.0", @@ -11136,10 +11501,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/middleware-logger": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/middleware-logger": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.609.0.tgz", "integrity": "sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -11149,10 +11515,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/middleware-recursion-detection": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/middleware-recursion-detection": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.620.0.tgz", "integrity": "sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/protocol-http": "^4.1.0", @@ -11163,10 +11530,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/middleware-user-agent": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/middleware-user-agent": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.620.0.tgz", "integrity": "sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@aws-sdk/util-endpoints": "3.614.0", @@ -11178,10 +11546,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/region-config-resolver": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/region-config-resolver": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.614.0.tgz", "integrity": "sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/node-config-provider": "^3.1.4", @@ -11194,10 +11563,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/token-providers": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/token-providers": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.614.0.tgz", "integrity": "sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/property-provider": "^3.1.3", @@ -11212,10 +11582,11 @@ "@aws-sdk/client-sso-oidc": "^3.614.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/types": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", + "dev": true, "dependencies": { "@smithy/types": "^3.3.0", "tslib": "^2.6.2" @@ -11224,10 +11595,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/util-endpoints": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/util-endpoints": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.614.0.tgz", "integrity": "sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -11238,10 +11610,11 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/util-user-agent-browser": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/util-user-agent-browser": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.609.0.tgz", "integrity": "sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/types": "^3.3.0", @@ -11249,10 +11622,11 @@ "tslib": "^2.6.2" } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/util-user-agent-node": { + "node_modules/@aws-sdk/client-iam/node_modules/@aws-sdk/util-user-agent-node": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.614.0.tgz", "integrity": "sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==", + "dev": true, "dependencies": { "@aws-sdk/types": "3.609.0", "@smithy/node-config-provider": "^3.1.4", @@ -11271,132 +11645,7 @@ } } }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dependencies": { - "@smithy/types": "^3.6.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/client-kinesis/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", - "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/client-lambda": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-lambda/-/client-lambda-3.678.0.tgz", - "integrity": "sha512-3D2tTrJg8A8sXYvzc0SrPYBfaRgcq/7D5KGWnoonEEM8bZxORBS69aZU6ihZFEKNykvuoIoky6EoCu2HA6HOPA==", - "dev": true, - "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.678.0", - "@aws-sdk/client-sts": "3.678.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/credential-provider-node": "3.678.0", - "@aws-sdk/middleware-host-header": "3.667.0", - "@aws-sdk/middleware-logger": "3.667.0", - "@aws-sdk/middleware-recursion-detection": "3.667.0", - "@aws-sdk/middleware-user-agent": "3.678.0", - "@aws-sdk/region-config-resolver": "3.667.0", - "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-endpoints": "3.667.0", - "@aws-sdk/util-user-agent-browser": "3.675.0", - "@aws-sdk/util-user-agent-node": "3.678.0", - "@smithy/config-resolver": "^3.0.9", - "@smithy/core": "^2.4.8", - "@smithy/eventstream-serde-browser": "^3.0.10", - "@smithy/eventstream-serde-config-resolver": "^3.0.7", - "@smithy/eventstream-serde-node": "^3.0.9", - "@smithy/fetch-http-handler": "^3.2.9", - "@smithy/hash-node": "^3.0.7", - "@smithy/invalid-dependency": "^3.0.7", - "@smithy/middleware-content-length": "^3.0.9", - "@smithy/middleware-endpoint": "^3.1.4", - "@smithy/middleware-retry": "^3.0.23", - "@smithy/middleware-serde": "^3.0.7", - "@smithy/middleware-stack": "^3.0.7", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/node-http-handler": "^3.2.4", - "@smithy/protocol-http": "^4.1.4", - "@smithy/smithy-client": "^3.4.0", - "@smithy/types": "^3.5.0", - "@smithy/url-parser": "^3.0.7", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.23", - "@smithy/util-defaults-mode-node": "^3.0.23", - "@smithy/util-endpoints": "^2.1.3", - "@smithy/util-middleware": "^3.0.7", - "@smithy/util-retry": "^3.0.7", - "@smithy/util-stream": "^3.1.9", - "@smithy/util-utf8": "^3.0.0", - "@smithy/util-waiter": "^3.1.6", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/client-lambda/node_modules/@aws-sdk/region-config-resolver": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", - "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", - "dev": true, - "dependencies": { - "@aws-sdk/types": "3.667.0", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/types": "^3.5.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.7", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/client-lambda/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, - "dependencies": { - "@smithy/types": "^3.5.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/client-lambda/node_modules/@smithy/node-config-provider": { + "node_modules/@aws-sdk/client-iam/node_modules/@smithy/node-config-provider": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", @@ -11411,7 +11660,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-lambda/node_modules/@smithy/shared-ini-file-loader": { + "node_modules/@aws-sdk/client-iam/node_modules/@smithy/shared-ini-file-loader": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", @@ -11424,7 +11673,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-lambda/node_modules/@smithy/util-utf8": { + "node_modules/@aws-sdk/client-iam/node_modules/@smithy/util-utf8": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", @@ -11437,10 +11686,10 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events": { + "node_modules/@aws-sdk/client-kinesis": { "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-personalize-events/-/client-personalize-events-3.621.0.tgz", - "integrity": "sha512-qkVkqYvOe3WVuVNL/gRITGYFfHJCx2ijGFK7H3hNUJH3P4AwskmouAd1pWf+3cbGedRnj2is7iw7E602LeJIHA==", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-kinesis/-/client-kinesis-3.621.0.tgz", + "integrity": "sha512-53Omt/beFmTQPjQNpMuPMk5nMzYVsXCRiO+MeqygZEKYG1fWw/UGluCWVbi7WjClOHacsW8lQcsqIRvkPDFNag==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", @@ -11459,6 +11708,9 @@ "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", "@smithy/core": "^2.3.1", + "@smithy/eventstream-serde-browser": "^3.0.5", + "@smithy/eventstream-serde-config-resolver": "^3.0.3", + "@smithy/eventstream-serde-node": "^3.0.4", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", @@ -11482,13 +11734,14 @@ "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", "@smithy/util-utf8": "^3.0.0", + "@smithy/util-waiter": "^3.1.2", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/client-sso": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/client-sso": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.621.0.tgz", "integrity": "sha512-xpKfikN4u0BaUYZA9FGUMkkDmfoIP0Q03+A86WjqDWhcOoqNA1DkHsE4kZ+r064ifkPUfcNuUvlkVTEoBZoFjA==", @@ -11536,7 +11789,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/client-sso-oidc": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/client-sso-oidc": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.621.0.tgz", "integrity": "sha512-mMjk3mFUwV2Y68POf1BQMTF+F6qxt5tPu6daEUCNGC9Cenk3h2YXQQoS4/eSyYzuBiYk3vx49VgleRvdvkg8rg==", @@ -11588,7 +11841,7 @@ "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/client-sts": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/client-sts": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.621.0.tgz", "integrity": "sha512-707uiuReSt+nAx6d0c21xLjLm2lxeKc7padxjv92CIrIocnQSlJPxSCM7r5zBhwiahJA6MNQwmTl2xznU67KgA==", @@ -11638,7 +11891,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/core": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/core": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.621.0.tgz", "integrity": "sha512-CtOwWmDdEiINkGXD93iGfXjN0WmCp9l45cDWHHGa8lRgEDyhuL7bwd/pH5aSzj0j8SiQBG2k0S7DHbd5RaqvbQ==", @@ -11657,7 +11910,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-env": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-env": { "version": "3.620.1", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.620.1.tgz", "integrity": "sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==", @@ -11671,7 +11924,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-http": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-http": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.621.0.tgz", "integrity": "sha512-/jc2tEsdkT1QQAI5Dvoci50DbSxtJrevemwFsm0B73pwCcOQZ5ZwwSdVqGsPutzYzUVx3bcXg3LRL7jLACqRIg==", @@ -11690,7 +11943,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-ini": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-ini": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.621.0.tgz", "integrity": "sha512-0EWVnSc+JQn5HLnF5Xv405M8n4zfdx9gyGdpnCmAmFqEDHA8LmBdxJdpUk1Ovp/I5oPANhjojxabIW5f1uU0RA==", @@ -11714,7 +11967,7 @@ "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-node": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-node": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.621.0.tgz", "integrity": "sha512-4JqpccUgz5Snanpt2+53hbOBbJQrSFq7E1sAAbgY6BKVQUsW5qyXqnjvSF32kDeKa5JpBl3bBWLZl04IadcPHw==", @@ -11736,7 +11989,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-process": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-process": { "version": "3.620.1", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.620.1.tgz", "integrity": "sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==", @@ -11751,7 +12004,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-sso": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-sso": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.621.0.tgz", "integrity": "sha512-Kza0jcFeA/GEL6xJlzR2KFf1PfZKMFnxfGzJzl5yN7EjoGdMijl34KaRyVnfRjnCWcsUpBWKNIDk9WZVMY9yiw==", @@ -11768,7 +12021,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-web-identity": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/credential-provider-web-identity": { "version": "3.621.0", "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.621.0.tgz", "integrity": "sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==", @@ -11785,7 +12038,7 @@ "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/middleware-host-header": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/middleware-host-header": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.620.0.tgz", "integrity": "sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==", @@ -11799,7 +12052,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/middleware-logger": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/middleware-logger": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.609.0.tgz", "integrity": "sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==", @@ -11812,7 +12065,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/middleware-recursion-detection": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/middleware-recursion-detection": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.620.0.tgz", "integrity": "sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==", @@ -11826,7 +12079,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/middleware-user-agent": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/middleware-user-agent": { "version": "3.620.0", "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.620.0.tgz", "integrity": "sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==", @@ -11841,7 +12094,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/region-config-resolver": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/region-config-resolver": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.614.0.tgz", "integrity": "sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==", @@ -11857,7 +12110,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/token-providers": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/token-providers": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.614.0.tgz", "integrity": "sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==", @@ -11875,7 +12128,7 @@ "@aws-sdk/client-sso-oidc": "^3.614.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/types": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", @@ -11887,7 +12140,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/util-endpoints": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/util-endpoints": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.614.0.tgz", "integrity": "sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==", @@ -11901,7 +12154,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/util-user-agent-browser": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/util-user-agent-browser": { "version": "3.609.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.609.0.tgz", "integrity": "sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==", @@ -11912,7 +12165,7 @@ "tslib": "^2.6.2" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/util-user-agent-node": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@aws-sdk/util-user-agent-node": { "version": "3.614.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.614.0.tgz", "integrity": "sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==", @@ -11934,7 +12187,7 @@ } } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@smithy/node-config-provider": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@smithy/node-config-provider": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", @@ -11948,7 +12201,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@smithy/shared-ini-file-loader": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@smithy/shared-ini-file-loader": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", @@ -11960,7 +12213,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-personalize-events/node_modules/@smithy/util-utf8": { + "node_modules/@aws-sdk/client-kinesis/node_modules/@smithy/util-utf8": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", @@ -11972,22 +12225,148 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-rds": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-rds/-/client-rds-3.624.0.tgz", - "integrity": "sha512-WZytF5YaDqEaJ/+2xm//ux+ER3pDwHU4ub4xXgMs46vG8WVLEDzILXp+Nn78w7W2sMwaQO12RYMvqgIB+/wF2A==", + "node_modules/@aws-sdk/client-lambda": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-lambda/-/client-lambda-3.678.0.tgz", + "integrity": "sha512-3D2tTrJg8A8sXYvzc0SrPYBfaRgcq/7D5KGWnoonEEM8bZxORBS69aZU6ihZFEKNykvuoIoky6EoCu2HA6HOPA==", "dev": true, "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.624.0", - "@aws-sdk/client-sts": "3.624.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/client-sso-oidc": "3.678.0", + "@aws-sdk/client-sts": "3.678.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/credential-provider-node": "3.678.0", + "@aws-sdk/middleware-host-header": "3.667.0", + "@aws-sdk/middleware-logger": "3.667.0", + "@aws-sdk/middleware-recursion-detection": "3.667.0", + "@aws-sdk/middleware-user-agent": "3.678.0", + "@aws-sdk/region-config-resolver": "3.667.0", + "@aws-sdk/types": "3.667.0", + "@aws-sdk/util-endpoints": "3.667.0", + "@aws-sdk/util-user-agent-browser": "3.675.0", + "@aws-sdk/util-user-agent-node": "3.678.0", + "@smithy/config-resolver": "^3.0.9", + "@smithy/core": "^2.4.8", + "@smithy/eventstream-serde-browser": "^3.0.10", + "@smithy/eventstream-serde-config-resolver": "^3.0.7", + "@smithy/eventstream-serde-node": "^3.0.9", + "@smithy/fetch-http-handler": "^3.2.9", + "@smithy/hash-node": "^3.0.7", + "@smithy/invalid-dependency": "^3.0.7", + "@smithy/middleware-content-length": "^3.0.9", + "@smithy/middleware-endpoint": "^3.1.4", + "@smithy/middleware-retry": "^3.0.23", + "@smithy/middleware-serde": "^3.0.7", + "@smithy/middleware-stack": "^3.0.7", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/node-http-handler": "^3.2.4", + "@smithy/protocol-http": "^4.1.4", + "@smithy/smithy-client": "^3.4.0", + "@smithy/types": "^3.5.0", + "@smithy/url-parser": "^3.0.7", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.23", + "@smithy/util-defaults-mode-node": "^3.0.23", + "@smithy/util-endpoints": "^2.1.3", + "@smithy/util-middleware": "^3.0.7", + "@smithy/util-retry": "^3.0.7", + "@smithy/util-stream": "^3.1.9", + "@smithy/util-utf8": "^3.0.0", + "@smithy/util-waiter": "^3.1.6", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-lambda/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", + "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.667.0", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/types": "^3.5.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.7", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-lambda/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-lambda/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-lambda/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-lambda/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dev": true, + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-personalize-events/-/client-personalize-events-3.621.0.tgz", + "integrity": "sha512-qkVkqYvOe3WVuVNL/gRITGYFfHJCx2ijGFK7H3hNUJH3P4AwskmouAd1pWf+3cbGedRnj2is7iw7E602LeJIHA==", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.621.0", + "@aws-sdk/client-sts": "3.621.0", + "@aws-sdk/core": "3.621.0", + "@aws-sdk/credential-provider-node": "3.621.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", - "@aws-sdk/middleware-sdk-rds": "3.620.0", "@aws-sdk/middleware-user-agent": "3.620.0", "@aws-sdk/region-config-resolver": "3.614.0", "@aws-sdk/types": "3.609.0", @@ -11995,46 +12374,44 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-retry": "^3.0.13", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-defaults-mode-browser": "^3.0.13", + "@smithy/util-defaults-mode-node": "^3.0.13", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", "@smithy/util-utf8": "^3.0.0", - "@smithy/util-waiter": "^3.1.2", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/client-sso": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.624.0.tgz", - "integrity": "sha512-EX6EF+rJzMPC5dcdsu40xSi2To7GSvdGQNIpe97pD9WvZwM9tRNQnNM4T6HA4gjV1L6Jwk8rBlG/CnveXtLEMw==", - "dev": true, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/client-sso": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.621.0.tgz", + "integrity": "sha512-xpKfikN4u0BaUYZA9FGUMkkDmfoIP0Q03+A86WjqDWhcOoqNA1DkHsE4kZ+r064ifkPUfcNuUvlkVTEoBZoFjA==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.624.0", + "@aws-sdk/core": "3.621.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -12045,26 +12422,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-retry": "^3.0.13", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-defaults-mode-browser": "^3.0.13", + "@smithy/util-defaults-mode-node": "^3.0.13", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -12075,16 +12452,15 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/client-sso-oidc": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.624.0.tgz", - "integrity": "sha512-Ki2uKYJKKtfHxxZsiMTOvJoVRP6b2pZ1u3rcUb2m/nVgBPUfLdl8ZkGpqE29I+t5/QaS/sEdbn6cgMUZwl+3Dg==", - "dev": true, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.621.0.tgz", + "integrity": "sha512-mMjk3mFUwV2Y68POf1BQMTF+F6qxt5tPu6daEUCNGC9Cenk3h2YXQQoS4/eSyYzuBiYk3vx49VgleRvdvkg8rg==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/core": "3.621.0", + "@aws-sdk/credential-provider-node": "3.621.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -12095,26 +12471,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-retry": "^3.0.13", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-defaults-mode-browser": "^3.0.13", + "@smithy/util-defaults-mode-node": "^3.0.13", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -12125,20 +12501,19 @@ "node": ">=16.0.0" }, "peerDependencies": { - "@aws-sdk/client-sts": "^3.624.0" + "@aws-sdk/client-sts": "^3.621.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/client-sts": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.624.0.tgz", - "integrity": "sha512-k36fLZCb2nfoV/DKK3jbRgO/Yf7/R80pgYfMiotkGjnZwDmRvNN08z4l06L9C+CieazzkgRxNUzyppsYcYsQaw==", - "dev": true, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/client-sts": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.621.0.tgz", + "integrity": "sha512-707uiuReSt+nAx6d0c21xLjLm2lxeKc7padxjv92CIrIocnQSlJPxSCM7r5zBhwiahJA6MNQwmTl2xznU67KgA==", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.624.0", - "@aws-sdk/core": "3.624.0", - "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/client-sso-oidc": "3.621.0", + "@aws-sdk/core": "3.621.0", + "@aws-sdk/credential-provider-node": "3.621.0", "@aws-sdk/middleware-host-header": "3.620.0", "@aws-sdk/middleware-logger": "3.609.0", "@aws-sdk/middleware-recursion-detection": "3.620.0", @@ -12149,26 +12524,26 @@ "@aws-sdk/util-user-agent-browser": "3.609.0", "@aws-sdk/util-user-agent-node": "3.614.0", "@smithy/config-resolver": "^3.0.5", - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/fetch-http-handler": "^3.2.4", "@smithy/hash-node": "^3.0.3", "@smithy/invalid-dependency": "^3.0.3", "@smithy/middleware-content-length": "^3.0.5", "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-retry": "^3.0.13", "@smithy/middleware-serde": "^3.0.3", "@smithy/middleware-stack": "^3.0.3", "@smithy/node-config-provider": "^3.1.4", "@smithy/node-http-handler": "^3.1.4", "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/url-parser": "^3.0.3", "@smithy/util-base64": "^3.0.0", "@smithy/util-body-length-browser": "^3.0.0", "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.14", - "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-defaults-mode-browser": "^3.0.13", + "@smithy/util-defaults-mode-node": "^3.0.13", "@smithy/util-endpoints": "^2.0.5", "@smithy/util-middleware": "^3.0.3", "@smithy/util-retry": "^3.0.3", @@ -12179,448 +12554,2369 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/core": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.624.0.tgz", - "integrity": "sha512-WyFmPbhRIvtWi7hBp8uSFy+iPpj8ccNV/eX86hwF4irMjfc/FtsGVIAeBXxXM/vGCjkdfEzOnl+tJ2XACD4OXg==", - "dev": true, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/core": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.621.0.tgz", + "integrity": "sha512-CtOwWmDdEiINkGXD93iGfXjN0WmCp9l45cDWHHGa8lRgEDyhuL7bwd/pH5aSzj0j8SiQBG2k0S7DHbd5RaqvbQ==", "dependencies": { - "@smithy/core": "^2.3.2", + "@smithy/core": "^2.3.1", "@smithy/node-config-provider": "^3.1.4", "@smithy/protocol-http": "^4.1.0", "@smithy/signature-v4": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", + "@smithy/smithy-client": "^3.1.11", "@smithy/types": "^3.3.0", "@smithy/util-middleware": "^3.0.3", "fast-xml-parser": "4.4.1", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-env": { + "version": "3.620.1", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.620.1.tgz", + "integrity": "sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-http": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.621.0.tgz", + "integrity": "sha512-/jc2tEsdkT1QQAI5Dvoci50DbSxtJrevemwFsm0B73pwCcOQZ5ZwwSdVqGsPutzYzUVx3bcXg3LRL7jLACqRIg==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/fetch-http-handler": "^3.2.4", + "@smithy/node-http-handler": "^3.1.4", + "@smithy/property-provider": "^3.1.3", + "@smithy/protocol-http": "^4.1.0", + "@smithy/smithy-client": "^3.1.11", + "@smithy/types": "^3.3.0", + "@smithy/util-stream": "^3.1.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.621.0.tgz", + "integrity": "sha512-0EWVnSc+JQn5HLnF5Xv405M8n4zfdx9gyGdpnCmAmFqEDHA8LmBdxJdpUk1Ovp/I5oPANhjojxabIW5f1uU0RA==", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.620.1", + "@aws-sdk/credential-provider-http": "3.621.0", + "@aws-sdk/credential-provider-process": "3.620.1", + "@aws-sdk/credential-provider-sso": "3.621.0", + "@aws-sdk/credential-provider-web-identity": "3.621.0", + "@aws-sdk/types": "3.609.0", + "@smithy/credential-provider-imds": "^3.2.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.621.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-node": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.621.0.tgz", + "integrity": "sha512-4JqpccUgz5Snanpt2+53hbOBbJQrSFq7E1sAAbgY6BKVQUsW5qyXqnjvSF32kDeKa5JpBl3bBWLZl04IadcPHw==", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.620.1", + "@aws-sdk/credential-provider-http": "3.621.0", + "@aws-sdk/credential-provider-ini": "3.621.0", + "@aws-sdk/credential-provider-process": "3.620.1", + "@aws-sdk/credential-provider-sso": "3.621.0", + "@aws-sdk/credential-provider-web-identity": "3.621.0", + "@aws-sdk/types": "3.609.0", + "@smithy/credential-provider-imds": "^3.2.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-process": { + "version": "3.620.1", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.620.1.tgz", + "integrity": "sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.621.0.tgz", + "integrity": "sha512-Kza0jcFeA/GEL6xJlzR2KFf1PfZKMFnxfGzJzl5yN7EjoGdMijl34KaRyVnfRjnCWcsUpBWKNIDk9WZVMY9yiw==", + "dependencies": { + "@aws-sdk/client-sso": "3.621.0", + "@aws-sdk/token-providers": "3.614.0", + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.621.0.tgz", + "integrity": "sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.621.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/middleware-host-header": { + "version": "3.620.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.620.0.tgz", + "integrity": "sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/protocol-http": "^4.1.0", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/middleware-logger": { + "version": "3.609.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.609.0.tgz", + "integrity": "sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.620.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.620.0.tgz", + "integrity": "sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/protocol-http": "^4.1.0", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.620.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.620.0.tgz", + "integrity": "sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@aws-sdk/util-endpoints": "3.614.0", + "@smithy/protocol-http": "^4.1.0", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.614.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.614.0.tgz", + "integrity": "sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/types": "^3.3.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/token-providers": { + "version": "3.614.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.614.0.tgz", + "integrity": "sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sso-oidc": "^3.614.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/types": { + "version": "3.609.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", + "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", + "dependencies": { + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/util-endpoints": { + "version": "3.614.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.614.0.tgz", + "integrity": "sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/types": "^3.3.0", + "@smithy/util-endpoints": "^2.0.5", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.609.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.609.0.tgz", + "integrity": "sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/types": "^3.3.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.614.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.614.0.tgz", + "integrity": "sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==", + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-personalize-events/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-rds/-/client-rds-3.624.0.tgz", + "integrity": "sha512-WZytF5YaDqEaJ/+2xm//ux+ER3pDwHU4ub4xXgMs46vG8WVLEDzILXp+Nn78w7W2sMwaQO12RYMvqgIB+/wF2A==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.624.0", + "@aws-sdk/client-sts": "3.624.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/middleware-host-header": "3.620.0", + "@aws-sdk/middleware-logger": "3.609.0", + "@aws-sdk/middleware-recursion-detection": "3.620.0", + "@aws-sdk/middleware-sdk-rds": "3.620.0", + "@aws-sdk/middleware-user-agent": "3.620.0", + "@aws-sdk/region-config-resolver": "3.614.0", + "@aws-sdk/types": "3.609.0", + "@aws-sdk/util-endpoints": "3.614.0", + "@aws-sdk/util-user-agent-browser": "3.609.0", + "@aws-sdk/util-user-agent-node": "3.614.0", + "@smithy/config-resolver": "^3.0.5", + "@smithy/core": "^2.3.2", + "@smithy/fetch-http-handler": "^3.2.4", + "@smithy/hash-node": "^3.0.3", + "@smithy/invalid-dependency": "^3.0.3", + "@smithy/middleware-content-length": "^3.0.5", + "@smithy/middleware-endpoint": "^3.1.0", + "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-serde": "^3.0.3", + "@smithy/middleware-stack": "^3.0.3", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/node-http-handler": "^3.1.4", + "@smithy/protocol-http": "^4.1.0", + "@smithy/smithy-client": "^3.1.12", + "@smithy/types": "^3.3.0", + "@smithy/url-parser": "^3.0.3", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-endpoints": "^2.0.5", + "@smithy/util-middleware": "^3.0.3", + "@smithy/util-retry": "^3.0.3", + "@smithy/util-utf8": "^3.0.0", + "@smithy/util-waiter": "^3.1.2", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/client-sso": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.624.0.tgz", + "integrity": "sha512-EX6EF+rJzMPC5dcdsu40xSi2To7GSvdGQNIpe97pD9WvZwM9tRNQnNM4T6HA4gjV1L6Jwk8rBlG/CnveXtLEMw==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/middleware-host-header": "3.620.0", + "@aws-sdk/middleware-logger": "3.609.0", + "@aws-sdk/middleware-recursion-detection": "3.620.0", + "@aws-sdk/middleware-user-agent": "3.620.0", + "@aws-sdk/region-config-resolver": "3.614.0", + "@aws-sdk/types": "3.609.0", + "@aws-sdk/util-endpoints": "3.614.0", + "@aws-sdk/util-user-agent-browser": "3.609.0", + "@aws-sdk/util-user-agent-node": "3.614.0", + "@smithy/config-resolver": "^3.0.5", + "@smithy/core": "^2.3.2", + "@smithy/fetch-http-handler": "^3.2.4", + "@smithy/hash-node": "^3.0.3", + "@smithy/invalid-dependency": "^3.0.3", + "@smithy/middleware-content-length": "^3.0.5", + "@smithy/middleware-endpoint": "^3.1.0", + "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-serde": "^3.0.3", + "@smithy/middleware-stack": "^3.0.3", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/node-http-handler": "^3.1.4", + "@smithy/protocol-http": "^4.1.0", + "@smithy/smithy-client": "^3.1.12", + "@smithy/types": "^3.3.0", + "@smithy/url-parser": "^3.0.3", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-endpoints": "^2.0.5", + "@smithy/util-middleware": "^3.0.3", + "@smithy/util-retry": "^3.0.3", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.624.0.tgz", + "integrity": "sha512-Ki2uKYJKKtfHxxZsiMTOvJoVRP6b2pZ1u3rcUb2m/nVgBPUfLdl8ZkGpqE29I+t5/QaS/sEdbn6cgMUZwl+3Dg==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/middleware-host-header": "3.620.0", + "@aws-sdk/middleware-logger": "3.609.0", + "@aws-sdk/middleware-recursion-detection": "3.620.0", + "@aws-sdk/middleware-user-agent": "3.620.0", + "@aws-sdk/region-config-resolver": "3.614.0", + "@aws-sdk/types": "3.609.0", + "@aws-sdk/util-endpoints": "3.614.0", + "@aws-sdk/util-user-agent-browser": "3.609.0", + "@aws-sdk/util-user-agent-node": "3.614.0", + "@smithy/config-resolver": "^3.0.5", + "@smithy/core": "^2.3.2", + "@smithy/fetch-http-handler": "^3.2.4", + "@smithy/hash-node": "^3.0.3", + "@smithy/invalid-dependency": "^3.0.3", + "@smithy/middleware-content-length": "^3.0.5", + "@smithy/middleware-endpoint": "^3.1.0", + "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-serde": "^3.0.3", + "@smithy/middleware-stack": "^3.0.3", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/node-http-handler": "^3.1.4", + "@smithy/protocol-http": "^4.1.0", + "@smithy/smithy-client": "^3.1.12", + "@smithy/types": "^3.3.0", + "@smithy/url-parser": "^3.0.3", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-endpoints": "^2.0.5", + "@smithy/util-middleware": "^3.0.3", + "@smithy/util-retry": "^3.0.3", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.624.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/client-sts": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.624.0.tgz", + "integrity": "sha512-k36fLZCb2nfoV/DKK3jbRgO/Yf7/R80pgYfMiotkGjnZwDmRvNN08z4l06L9C+CieazzkgRxNUzyppsYcYsQaw==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.624.0", + "@aws-sdk/core": "3.624.0", + "@aws-sdk/credential-provider-node": "3.624.0", + "@aws-sdk/middleware-host-header": "3.620.0", + "@aws-sdk/middleware-logger": "3.609.0", + "@aws-sdk/middleware-recursion-detection": "3.620.0", + "@aws-sdk/middleware-user-agent": "3.620.0", + "@aws-sdk/region-config-resolver": "3.614.0", + "@aws-sdk/types": "3.609.0", + "@aws-sdk/util-endpoints": "3.614.0", + "@aws-sdk/util-user-agent-browser": "3.609.0", + "@aws-sdk/util-user-agent-node": "3.614.0", + "@smithy/config-resolver": "^3.0.5", + "@smithy/core": "^2.3.2", + "@smithy/fetch-http-handler": "^3.2.4", + "@smithy/hash-node": "^3.0.3", + "@smithy/invalid-dependency": "^3.0.3", + "@smithy/middleware-content-length": "^3.0.5", + "@smithy/middleware-endpoint": "^3.1.0", + "@smithy/middleware-retry": "^3.0.14", + "@smithy/middleware-serde": "^3.0.3", + "@smithy/middleware-stack": "^3.0.3", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/node-http-handler": "^3.1.4", + "@smithy/protocol-http": "^4.1.0", + "@smithy/smithy-client": "^3.1.12", + "@smithy/types": "^3.3.0", + "@smithy/url-parser": "^3.0.3", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.14", + "@smithy/util-defaults-mode-node": "^3.0.14", + "@smithy/util-endpoints": "^2.0.5", + "@smithy/util-middleware": "^3.0.3", + "@smithy/util-retry": "^3.0.3", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/core": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.624.0.tgz", + "integrity": "sha512-WyFmPbhRIvtWi7hBp8uSFy+iPpj8ccNV/eX86hwF4irMjfc/FtsGVIAeBXxXM/vGCjkdfEzOnl+tJ2XACD4OXg==", + "dev": true, + "dependencies": { + "@smithy/core": "^2.3.2", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/protocol-http": "^4.1.0", + "@smithy/signature-v4": "^4.1.0", + "@smithy/smithy-client": "^3.1.12", + "@smithy/types": "^3.3.0", + "@smithy/util-middleware": "^3.0.3", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-env": { + "version": "3.620.1", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.620.1.tgz", + "integrity": "sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-http": { + "version": "3.622.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.622.0.tgz", + "integrity": "sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/fetch-http-handler": "^3.2.4", + "@smithy/node-http-handler": "^3.1.4", + "@smithy/property-provider": "^3.1.3", + "@smithy/protocol-http": "^4.1.0", + "@smithy/smithy-client": "^3.1.12", + "@smithy/types": "^3.3.0", + "@smithy/util-stream": "^3.1.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.624.0.tgz", + "integrity": "sha512-mMoNIy7MO2WTBbdqMyLpbt6SZpthE6e0GkRYpsd0yozPt0RZopcBhEh+HG1U9Y1PVODo+jcMk353vAi61CfnhQ==", + "dev": true, + "dependencies": { + "@aws-sdk/credential-provider-env": "3.620.1", + "@aws-sdk/credential-provider-http": "3.622.0", + "@aws-sdk/credential-provider-process": "3.620.1", + "@aws-sdk/credential-provider-sso": "3.624.0", + "@aws-sdk/credential-provider-web-identity": "3.621.0", + "@aws-sdk/types": "3.609.0", + "@smithy/credential-provider-imds": "^3.2.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.624.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-node": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.624.0.tgz", + "integrity": "sha512-vYyGK7oNpd81BdbH5IlmQ6zfaQqU+rPwsKTDDBeLRjshtrGXOEpfoahVpG9PX0ibu32IOWp4ZyXBNyVrnvcMOw==", + "dev": true, + "dependencies": { + "@aws-sdk/credential-provider-env": "3.620.1", + "@aws-sdk/credential-provider-http": "3.622.0", + "@aws-sdk/credential-provider-ini": "3.624.0", + "@aws-sdk/credential-provider-process": "3.620.1", + "@aws-sdk/credential-provider-sso": "3.624.0", + "@aws-sdk/credential-provider-web-identity": "3.621.0", + "@aws-sdk/types": "3.609.0", + "@smithy/credential-provider-imds": "^3.2.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-process": { + "version": "3.620.1", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.620.1.tgz", + "integrity": "sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.624.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.624.0.tgz", + "integrity": "sha512-A02bayIjU9APEPKr3HudrFHEx0WfghoSPsPopckDkW7VBqO4wizzcxr75Q9A3vNX+cwg0wCN6UitTNe6pVlRaQ==", + "dev": true, + "dependencies": { + "@aws-sdk/client-sso": "3.624.0", + "@aws-sdk/token-providers": "3.614.0", + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.621.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.621.0.tgz", + "integrity": "sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.621.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/middleware-host-header": { + "version": "3.620.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.620.0.tgz", + "integrity": "sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/protocol-http": "^4.1.0", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/middleware-logger": { + "version": "3.609.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.609.0.tgz", + "integrity": "sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.620.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.620.0.tgz", + "integrity": "sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/protocol-http": "^4.1.0", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.620.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.620.0.tgz", + "integrity": "sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@aws-sdk/util-endpoints": "3.614.0", + "@smithy/protocol-http": "^4.1.0", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.614.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.614.0.tgz", + "integrity": "sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/types": "^3.3.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/token-providers": { + "version": "3.614.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.614.0.tgz", + "integrity": "sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/property-provider": "^3.1.3", + "@smithy/shared-ini-file-loader": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sso-oidc": "^3.614.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/types": { + "version": "3.609.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", + "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/util-endpoints": { + "version": "3.614.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.614.0.tgz", + "integrity": "sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/types": "^3.3.0", + "@smithy/util-endpoints": "^2.0.5", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.609.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.609.0.tgz", + "integrity": "sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/types": "^3.3.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.614.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.614.0.tgz", + "integrity": "sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.609.0", + "@smithy/node-config-provider": "^3.1.4", + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-rds/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dev": true, + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-s3": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.678.0.tgz", + "integrity": "sha512-2N+cGerOtcijYVRThakA1wwaXjdb7bNX8fMnmNzfqsRu1kASCPNvefhPTAiNl//Hf2l2d+H8TdI3wtLw0KurBQ==", + "dev": true, + "dependencies": { + "@aws-crypto/sha1-browser": "5.2.0", + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.678.0", + "@aws-sdk/client-sts": "3.678.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/credential-provider-node": "3.678.0", + "@aws-sdk/middleware-bucket-endpoint": "3.667.0", + "@aws-sdk/middleware-expect-continue": "3.667.0", + "@aws-sdk/middleware-flexible-checksums": "3.678.0", + "@aws-sdk/middleware-host-header": "3.667.0", + "@aws-sdk/middleware-location-constraint": "3.667.0", + "@aws-sdk/middleware-logger": "3.667.0", + "@aws-sdk/middleware-recursion-detection": "3.667.0", + "@aws-sdk/middleware-sdk-s3": "3.678.0", + "@aws-sdk/middleware-ssec": "3.667.0", + "@aws-sdk/middleware-user-agent": "3.678.0", + "@aws-sdk/region-config-resolver": "3.667.0", + "@aws-sdk/signature-v4-multi-region": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@aws-sdk/util-endpoints": "3.667.0", + "@aws-sdk/util-user-agent-browser": "3.675.0", + "@aws-sdk/util-user-agent-node": "3.678.0", + "@aws-sdk/xml-builder": "3.662.0", + "@smithy/config-resolver": "^3.0.9", + "@smithy/core": "^2.4.8", + "@smithy/eventstream-serde-browser": "^3.0.10", + "@smithy/eventstream-serde-config-resolver": "^3.0.7", + "@smithy/eventstream-serde-node": "^3.0.9", + "@smithy/fetch-http-handler": "^3.2.9", + "@smithy/hash-blob-browser": "^3.1.6", + "@smithy/hash-node": "^3.0.7", + "@smithy/hash-stream-node": "^3.1.6", + "@smithy/invalid-dependency": "^3.0.7", + "@smithy/md5-js": "^3.0.7", + "@smithy/middleware-content-length": "^3.0.9", + "@smithy/middleware-endpoint": "^3.1.4", + "@smithy/middleware-retry": "^3.0.23", + "@smithy/middleware-serde": "^3.0.7", + "@smithy/middleware-stack": "^3.0.7", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/node-http-handler": "^3.2.4", + "@smithy/protocol-http": "^4.1.4", + "@smithy/smithy-client": "^3.4.0", + "@smithy/types": "^3.5.0", + "@smithy/url-parser": "^3.0.7", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.23", + "@smithy/util-defaults-mode-node": "^3.0.23", + "@smithy/util-endpoints": "^2.1.3", + "@smithy/util-middleware": "^3.0.7", + "@smithy/util-retry": "^3.0.7", + "@smithy/util-stream": "^3.1.9", + "@smithy/util-utf8": "^3.0.0", + "@smithy/util-waiter": "^3.1.6", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-s3/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", + "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.667.0", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/types": "^3.5.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.7", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-s3/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-s3/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-s3/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-s3/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dev": true, + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-ssm": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-ssm/-/client-ssm-3.678.0.tgz", + "integrity": "sha512-vJO7iieQq09bMKaGgESibzZaLgm0MIuR9m7SmEPZGMJ4wKhgOosm/P8lFMU+q0lHtCHoxdvjSYcUQga6ZN+fww==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.678.0", + "@aws-sdk/client-sts": "3.678.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/credential-provider-node": "3.678.0", + "@aws-sdk/middleware-host-header": "3.667.0", + "@aws-sdk/middleware-logger": "3.667.0", + "@aws-sdk/middleware-recursion-detection": "3.667.0", + "@aws-sdk/middleware-user-agent": "3.678.0", + "@aws-sdk/region-config-resolver": "3.667.0", + "@aws-sdk/types": "3.667.0", + "@aws-sdk/util-endpoints": "3.667.0", + "@aws-sdk/util-user-agent-browser": "3.675.0", + "@aws-sdk/util-user-agent-node": "3.678.0", + "@smithy/config-resolver": "^3.0.9", + "@smithy/core": "^2.4.8", + "@smithy/fetch-http-handler": "^3.2.9", + "@smithy/hash-node": "^3.0.7", + "@smithy/invalid-dependency": "^3.0.7", + "@smithy/middleware-content-length": "^3.0.9", + "@smithy/middleware-endpoint": "^3.1.4", + "@smithy/middleware-retry": "^3.0.23", + "@smithy/middleware-serde": "^3.0.7", + "@smithy/middleware-stack": "^3.0.7", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/node-http-handler": "^3.2.4", + "@smithy/protocol-http": "^4.1.4", + "@smithy/smithy-client": "^3.4.0", + "@smithy/types": "^3.5.0", + "@smithy/url-parser": "^3.0.7", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.23", + "@smithy/util-defaults-mode-node": "^3.0.23", + "@smithy/util-endpoints": "^2.1.3", + "@smithy/util-middleware": "^3.0.7", + "@smithy/util-retry": "^3.0.7", + "@smithy/util-utf8": "^3.0.0", + "@smithy/util-waiter": "^3.1.6", + "@types/uuid": "^9.0.1", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-ssm/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", + "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.667.0", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/types": "^3.5.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.7", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-ssm/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-ssm/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-ssm/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-ssm/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dev": true, + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.678.0.tgz", + "integrity": "sha512-5Fg2BkR1En8iBbiZ18STvLDGPK9Re5MyCmX+hfIhQzPsEf1FRkAkOluEXX79aBva8iWn2oCD/xKBUku4x3eusw==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/middleware-host-header": "3.667.0", + "@aws-sdk/middleware-logger": "3.667.0", + "@aws-sdk/middleware-recursion-detection": "3.667.0", + "@aws-sdk/middleware-user-agent": "3.678.0", + "@aws-sdk/region-config-resolver": "3.667.0", + "@aws-sdk/types": "3.667.0", + "@aws-sdk/util-endpoints": "3.667.0", + "@aws-sdk/util-user-agent-browser": "3.675.0", + "@aws-sdk/util-user-agent-node": "3.678.0", + "@smithy/config-resolver": "^3.0.9", + "@smithy/core": "^2.4.8", + "@smithy/fetch-http-handler": "^3.2.9", + "@smithy/hash-node": "^3.0.7", + "@smithy/invalid-dependency": "^3.0.7", + "@smithy/middleware-content-length": "^3.0.9", + "@smithy/middleware-endpoint": "^3.1.4", + "@smithy/middleware-retry": "^3.0.23", + "@smithy/middleware-serde": "^3.0.7", + "@smithy/middleware-stack": "^3.0.7", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/node-http-handler": "^3.2.4", + "@smithy/protocol-http": "^4.1.4", + "@smithy/smithy-client": "^3.4.0", + "@smithy/types": "^3.5.0", + "@smithy/url-parser": "^3.0.7", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.23", + "@smithy/util-defaults-mode-node": "^3.0.23", + "@smithy/util-endpoints": "^2.1.3", + "@smithy/util-middleware": "^3.0.7", + "@smithy/util-retry": "^3.0.7", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.678.0.tgz", + "integrity": "sha512-sgj9Y4zGiwLePLDjqhGoghoZgseh88JkKkwWH558IIte/cf/ix7ezOvptnA0WUlI5Z/329LtkN6O8TRqSJ7MWw==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/credential-provider-node": "3.678.0", + "@aws-sdk/middleware-host-header": "3.667.0", + "@aws-sdk/middleware-logger": "3.667.0", + "@aws-sdk/middleware-recursion-detection": "3.667.0", + "@aws-sdk/middleware-user-agent": "3.678.0", + "@aws-sdk/region-config-resolver": "3.667.0", + "@aws-sdk/types": "3.667.0", + "@aws-sdk/util-endpoints": "3.667.0", + "@aws-sdk/util-user-agent-browser": "3.675.0", + "@aws-sdk/util-user-agent-node": "3.678.0", + "@smithy/config-resolver": "^3.0.9", + "@smithy/core": "^2.4.8", + "@smithy/fetch-http-handler": "^3.2.9", + "@smithy/hash-node": "^3.0.7", + "@smithy/invalid-dependency": "^3.0.7", + "@smithy/middleware-content-length": "^3.0.9", + "@smithy/middleware-endpoint": "^3.1.4", + "@smithy/middleware-retry": "^3.0.23", + "@smithy/middleware-serde": "^3.0.7", + "@smithy/middleware-stack": "^3.0.7", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/node-http-handler": "^3.2.4", + "@smithy/protocol-http": "^4.1.4", + "@smithy/smithy-client": "^3.4.0", + "@smithy/types": "^3.5.0", + "@smithy/url-parser": "^3.0.7", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.23", + "@smithy/util-defaults-mode-node": "^3.0.23", + "@smithy/util-endpoints": "^2.1.3", + "@smithy/util-middleware": "^3.0.7", + "@smithy/util-retry": "^3.0.7", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.678.0" + } + }, + "node_modules/@aws-sdk/client-sso-oidc/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", + "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.667.0", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/types": "^3.5.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.7", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso-oidc/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso-oidc/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso-oidc/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso-oidc/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dev": true, + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", + "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.667.0", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/types": "^3.5.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.7", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dev": true, + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sts": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.678.0.tgz", + "integrity": "sha512-oRtDnbqIuTbBq0xd7XlaugDA41EqRFzWLpPNr4uwkH8L7xwtIByfJG/qXx2OtOiFFasAhMWJLu/DDqWZyp819A==", + "dev": true, + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.678.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/credential-provider-node": "3.678.0", + "@aws-sdk/middleware-host-header": "3.667.0", + "@aws-sdk/middleware-logger": "3.667.0", + "@aws-sdk/middleware-recursion-detection": "3.667.0", + "@aws-sdk/middleware-user-agent": "3.678.0", + "@aws-sdk/region-config-resolver": "3.667.0", + "@aws-sdk/types": "3.667.0", + "@aws-sdk/util-endpoints": "3.667.0", + "@aws-sdk/util-user-agent-browser": "3.675.0", + "@aws-sdk/util-user-agent-node": "3.678.0", + "@smithy/config-resolver": "^3.0.9", + "@smithy/core": "^2.4.8", + "@smithy/fetch-http-handler": "^3.2.9", + "@smithy/hash-node": "^3.0.7", + "@smithy/invalid-dependency": "^3.0.7", + "@smithy/middleware-content-length": "^3.0.9", + "@smithy/middleware-endpoint": "^3.1.4", + "@smithy/middleware-retry": "^3.0.23", + "@smithy/middleware-serde": "^3.0.7", + "@smithy/middleware-stack": "^3.0.7", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/node-http-handler": "^3.2.4", + "@smithy/protocol-http": "^4.1.4", + "@smithy/smithy-client": "^3.4.0", + "@smithy/types": "^3.5.0", + "@smithy/url-parser": "^3.0.7", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.23", + "@smithy/util-defaults-mode-node": "^3.0.23", + "@smithy/util-endpoints": "^2.1.3", + "@smithy/util-middleware": "^3.0.7", + "@smithy/util-retry": "^3.0.7", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sts/node_modules/@aws-sdk/region-config-resolver": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", + "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.667.0", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/types": "^3.5.0", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.7", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sts/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sts/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sts/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sts/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dev": true, + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.678.0.tgz", + "integrity": "sha512-ZTzybFZqSaPQymgRkTl08vk6xilaxr8LnJOc0h3KhcHLK4TJmdOcxqPpa6QxrBKcn2rmxzGiPRbAHLGI+BIxBw==", + "dev": true, + "dependencies": { + "@aws-sdk/types": "3.667.0", + "@smithy/core": "^2.4.8", + "@smithy/node-config-provider": "^3.1.8", + "@smithy/property-provider": "^3.1.7", + "@smithy/protocol-http": "^4.1.4", + "@smithy/signature-v4": "^4.2.0", + "@smithy/smithy-client": "^3.4.0", + "@smithy/types": "^3.5.0", + "@smithy/util-middleware": "^3.0.7", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/core/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/core/node_modules/@smithy/node-config-provider": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", + "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "dev": true, + "dependencies": { + "@smithy/property-provider": "^3.1.8", + "@smithy/shared-ini-file-loader": "^3.1.9", + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/core/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-cognito-identity": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.678.0.tgz", + "integrity": "sha512-t9bgu2Kc0H8FdQsSrkIJ42vis0CaVxUlA0wmmNyh268ZZyT9lKXUmf91QIhWbZ1zHx8Ek2u301xusoIaj4mLHA==", + "dev": true, + "dependencies": { + "@aws-sdk/client-cognito-identity": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/property-provider": "^3.1.7", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-cognito-identity/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.678.0.tgz", + "integrity": "sha512-29uhXAB7uJqHtvJ2U3pi1YkMfv0WefW9EmSMoFAunjudXXBVktwTlWg0lyCM+KHrGKLkQyfs5UF/A9IelS8tdQ==", + "dev": true, + "dependencies": { + "@aws-sdk/core": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/property-provider": "^3.1.7", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.678.0.tgz", + "integrity": "sha512-EvpmP0nc7ddRp0qwJOSu0uBXa+MMk4+OLlyEJcdaHnZI4/BoyVWr5fJUD5eQYZk11LZPZSvnsliYXWwLyVNXHQ==", + "dev": true, + "dependencies": { + "@aws-sdk/core": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/fetch-http-handler": "^3.2.9", + "@smithy/node-http-handler": "^3.2.4", + "@smithy/property-provider": "^3.1.7", + "@smithy/protocol-http": "^4.1.4", + "@smithy/smithy-client": "^3.4.0", + "@smithy/types": "^3.5.0", + "@smithy/util-stream": "^3.1.9", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.678.0.tgz", + "integrity": "sha512-8kHy7V5rRO73EpBCUclykP9T/QIBVi0SkQsc88ZRxpdh59/JY2N6DT5khMTzrz9+Vvlw3FDMJN4AI/qWjJHhdw==", + "dev": true, + "dependencies": { + "@aws-sdk/core": "3.678.0", + "@aws-sdk/credential-provider-env": "3.678.0", + "@aws-sdk/credential-provider-http": "3.678.0", + "@aws-sdk/credential-provider-process": "3.678.0", + "@aws-sdk/credential-provider-sso": "3.678.0", + "@aws-sdk/credential-provider-web-identity": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/credential-provider-imds": "^3.2.4", + "@smithy/property-provider": "^3.1.7", + "@smithy/shared-ini-file-loader": "^3.1.8", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.678.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.678.0.tgz", + "integrity": "sha512-KGRBVD/oNr/aD+Wy5zc5AjfeSv5b4ahAu5eAUbOz+eGjGpGgrMtjY+R2rDY/3i3wFj9/DvOIfFGeZQMwtDzIuA==", + "dev": true, + "dependencies": { + "@aws-sdk/credential-provider-env": "3.678.0", + "@aws-sdk/credential-provider-http": "3.678.0", + "@aws-sdk/credential-provider-ini": "3.678.0", + "@aws-sdk/credential-provider-process": "3.678.0", + "@aws-sdk/credential-provider-sso": "3.678.0", + "@aws-sdk/credential-provider-web-identity": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/credential-provider-imds": "^3.2.4", + "@smithy/property-provider": "^3.1.7", + "@smithy/shared-ini-file-loader": "^3.1.8", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.678.0.tgz", + "integrity": "sha512-5TpzzHKwPOvUJig0bvTt+brtXfLPaSVLwea9re+XGrS5T6Hz65IaX2RL6uY1GQ0UVOqgwQ5nAti1WOfBoSJ5BA==", + "dev": true, + "dependencies": { + "@aws-sdk/core": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/property-provider": "^3.1.7", + "@smithy/shared-ini-file-loader": "^3.1.8", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.678.0.tgz", + "integrity": "sha512-PXydLUsLYd1rkhZ7zwf0613u5sofxIEhh7C1QGP1MSY3L1jt8bu7pZIcMzubfvmaGZI5k84aHhhjQEiAJUxIMg==", + "dev": true, + "dependencies": { + "@aws-sdk/client-sso": "3.678.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/token-providers": "3.667.0", + "@aws-sdk/types": "3.667.0", + "@smithy/property-provider": "^3.1.7", + "@smithy/shared-ini-file-loader": "^3.1.8", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso/node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", + "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.6.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.678.0.tgz", + "integrity": "sha512-fcYZjTTFcef99l+BhcEAhHS4tEK1kE6Xj5Zz5lT4tFA07BkQt3d6kUKRVVfJnsbcHH4RDBUCnLhU8HPfc/kvjA==", + "dev": true, + "dependencies": { + "@aws-sdk/core": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/property-provider": "^3.1.7", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.678.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.678.0.tgz", + "integrity": "sha512-cF6IvQI1Jf5nJrK/Q7y3yFSQ8hv6MQ1g7HmZNo1tZTkywhfB3/zKcIFe6YftQul/s6RGHotXC2fr8jDkYQFDSQ==", + "dev": true, + "dependencies": { + "@aws-sdk/client-cognito-identity": "3.678.0", + "@aws-sdk/client-sso": "3.678.0", + "@aws-sdk/client-sts": "3.678.0", + "@aws-sdk/core": "3.678.0", + "@aws-sdk/credential-provider-cognito-identity": "3.678.0", + "@aws-sdk/credential-provider-env": "3.678.0", + "@aws-sdk/credential-provider-http": "3.678.0", + "@aws-sdk/credential-provider-ini": "3.678.0", + "@aws-sdk/credential-provider-node": "3.678.0", + "@aws-sdk/credential-provider-process": "3.678.0", + "@aws-sdk/credential-provider-sso": "3.678.0", + "@aws-sdk/credential-provider-web-identity": "3.678.0", + "@aws-sdk/types": "3.667.0", + "@smithy/credential-provider-imds": "^3.2.4", + "@smithy/property-provider": "^3.1.7", + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/endpoint-cache": { + "version": "3.723.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/endpoint-cache/-/endpoint-cache-3.723.0.tgz", + "integrity": "sha512-2+a4WXRc+07uiPR+zJiPGKSOWaNJQNqitkks+6Hhm/haTLJqNVTgY2OWDh2PXvwMNpKB+AlGdhE65Oy6NzUgXg==", + "dependencies": { + "mnemonist": "0.38.3", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/lib-dynamodb/-/lib-dynamodb-3.799.0.tgz", + "integrity": "sha512-HcjWRyv9T0WuDTeLTwuh8YEJhyZm462Gzmki5+VEkwOxX7Sq3hoBrKqgko++y7qW8bjKou3DlynyuShhSw1mjw==", + "dependencies": { + "@aws-sdk/core": "3.799.0", + "@aws-sdk/util-dynamodb": "3.799.0", + "@smithy/core": "^3.3.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.799.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/core": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.0.tgz", + "integrity": "sha512-r6gvs5OfRq/w+9unPm7B3po4rmWaGh0CIL/OwHntGGux7+RhOOZLGuurbeMgWV6W55ZuyMTypJLeH0vn/ZRaWQ==", + "dependencies": { + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", + "dependencies": { + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-env": { - "version": "3.620.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.620.1.tgz", - "integrity": "sha512-ExuILJ2qLW5ZO+rgkNRj0xiAipKT16Rk77buvPP8csR7kkCflT/gXTyzRe/uzIiETTxM7tr8xuO9MP/DQXqkfg==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/middleware-endpoint": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.1.tgz", + "integrity": "sha512-z5RmcHxjvScL+LwEDU2mTNCOhgUs4lu5PGdF1K36IPRmUHhNFxNxgenSB7smyDiYD4vdKQ7CAZtG5cUErqib9w==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/types": "^3.3.0", + "@smithy/core": "^3.3.0", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-http": { - "version": "3.622.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.622.0.tgz", - "integrity": "sha512-VUHbr24Oll1RK3WR8XLUugLpgK9ZuxEm/NVeVqyFts1Ck9gsKpRg1x4eH7L7tW3SJ4TDEQNMbD7/7J+eoL2svg==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/fetch-http-handler": "^3.2.4", - "@smithy/node-http-handler": "^3.1.4", - "@smithy/property-provider": "^3.1.3", - "@smithy/protocol-http": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", - "@smithy/types": "^3.3.0", - "@smithy/util-stream": "^3.1.3", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.624.0.tgz", - "integrity": "sha512-mMoNIy7MO2WTBbdqMyLpbt6SZpthE6e0GkRYpsd0yozPt0RZopcBhEh+HG1U9Y1PVODo+jcMk353vAi61CfnhQ==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", "dependencies": { - "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.622.0", - "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.624.0", - "@aws-sdk/credential-provider-web-identity": "3.621.0", - "@aws-sdk/types": "3.609.0", - "@smithy/credential-provider-imds": "^3.2.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" - }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.624.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-node": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.624.0.tgz", - "integrity": "sha512-vYyGK7oNpd81BdbH5IlmQ6zfaQqU+rPwsKTDDBeLRjshtrGXOEpfoahVpG9PX0ibu32IOWp4ZyXBNyVrnvcMOw==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", "dependencies": { - "@aws-sdk/credential-provider-env": "3.620.1", - "@aws-sdk/credential-provider-http": "3.622.0", - "@aws-sdk/credential-provider-ini": "3.624.0", - "@aws-sdk/credential-provider-process": "3.620.1", - "@aws-sdk/credential-provider-sso": "3.624.0", - "@aws-sdk/credential-provider-web-identity": "3.621.0", - "@aws-sdk/types": "3.609.0", - "@smithy/credential-provider-imds": "^3.2.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-process": { - "version": "3.620.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.620.1.tgz", - "integrity": "sha512-hWqFMidqLAkaV9G460+1at6qa9vySbjQKKc04p59OT7lZ5cO5VH5S4aI05e+m4j364MBROjjk2ugNvfNf/8ILg==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.624.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.624.0.tgz", - "integrity": "sha512-A02bayIjU9APEPKr3HudrFHEx0WfghoSPsPopckDkW7VBqO4wizzcxr75Q9A3vNX+cwg0wCN6UitTNe6pVlRaQ==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", "dependencies": { - "@aws-sdk/client-sso": "3.624.0", - "@aws-sdk/token-providers": "3.614.0", - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.621.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.621.0.tgz", - "integrity": "sha512-w7ASSyfNvcx7+bYGep3VBgC3K6vEdLmlpjT7nSIHxxQf+WSdvy+HynwJosrpZax0sK5q0D1Jpn/5q+r5lwwW6w==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/types": "^3.3.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" - }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.621.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/middleware-host-header": { - "version": "3.620.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.620.0.tgz", - "integrity": "sha512-VMtPEZwqYrII/oUkffYsNWY9PZ9xpNJpMgmyU0rlDQ25O1c0Hk3fJmZRe6pEkAJ0omD7kLrqGl1DUjQVxpd/Rg==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/protocol-http": "^4.1.0", - "@smithy/types": "^3.3.0", + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/middleware-logger": { - "version": "3.609.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.609.0.tgz", - "integrity": "sha512-S62U2dy4jMDhDFDK5gZ4VxFdWzCtLzwbYyFZx2uvPYTECkepLUfzLic2BHg2Qvtu4QjX+oGE3P/7fwaGIsGNuQ==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/types": "^3.3.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.620.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.620.0.tgz", - "integrity": "sha512-nh91S7aGK3e/o1ck64sA/CyoFw+gAYj2BDOnoNa6ouyCrVJED96ZXWbhye/fz9SgmNUZR2g7GdVpiLpMKZoI5w==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/protocol-http": "^4.1.0", - "@smithy/types": "^3.3.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.620.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.620.0.tgz", - "integrity": "sha512-bvS6etn+KsuL32ubY5D3xNof1qkenpbJXf/ugGXbg0n98DvDFQ/F+SMLxHgbnER5dsKYchNnhmtI6/FC3HFu/A==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@aws-sdk/util-endpoints": "3.614.0", - "@smithy/protocol-http": "^4.1.0", - "@smithy/types": "^3.3.0", + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/region-config-resolver": { - "version": "3.614.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.614.0.tgz", - "integrity": "sha512-vDCeMXvic/LU0KFIUjpC3RiSTIkkvESsEfbVHiHH0YINfl8HnEqR5rj+L8+phsCeVg2+LmYwYxd5NRz4PHxt5g==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/smithy-client": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.1.tgz", + "integrity": "sha512-fbniZef60QdsBc4ZY0iyI8xbFHIiC/QRtPi66iE4ufjiE/aaz7AfUXzcWMkpO8r+QhLeNRIfmPchIG+3/QDZ6g==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/types": "^3.3.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.3", + "@smithy/core": "^3.3.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/token-providers": { - "version": "3.614.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.614.0.tgz", - "integrity": "sha512-okItqyY6L9IHdxqs+Z116y5/nda7rHxLvROxtAJdLavWTYDydxrZstImNgGWTeVdmc0xX2gJCI77UYUTQWnhRw==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/property-provider": "^3.1.3", - "@smithy/shared-ini-file-loader": "^3.1.4", - "@smithy/types": "^3.3.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" - }, - "peerDependencies": { - "@aws-sdk/client-sso-oidc": "^3.614.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/types": { - "version": "3.609.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", - "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", "dependencies": { - "@smithy/types": "^3.3.0", + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/util-endpoints": { - "version": "3.614.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.614.0.tgz", - "integrity": "sha512-wK2cdrXHH4oz4IomV/yrGkftU9A+ITB6nFL+rxxyO78is2ifHJpFdV4aqk4LSkXYPi6CXWNru/Dqc7yiKXgJPw==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/types": "^3.3.0", - "@smithy/util-endpoints": "^2.0.5", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.609.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.609.0.tgz", - "integrity": "sha512-fojPU+mNahzQ0YHYBsx0ZIhmMA96H+ZIZ665ObU9tl+SGdbLneVZVikGve+NmHTQwHzwkFsZYYnVKAkreJLAtA==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/types": "^3.3.0", - "bowser": "^2.11.0", "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.614.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.614.0.tgz", - "integrity": "sha512-15ElZT88peoHnq5TEoEtZwoXTXRxNrk60TZNdpl/TUBJ5oNJ9Dqb5Z4ryb8ofN6nm9aFf59GVAerFDz8iUoHBA==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@smithy/node-config-provider": "^3.1.4", - "@smithy/types": "^3.3.0", + "@smithy/is-array-buffer": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" - }, - "peerDependencies": { - "aws-crt": ">=1.0.0" - }, - "peerDependenciesMeta": { - "aws-crt": { - "optional": true - } + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", + "dependencies": { + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-rds/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-s3": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.678.0.tgz", - "integrity": "sha512-2N+cGerOtcijYVRThakA1wwaXjdb7bNX8fMnmNzfqsRu1kASCPNvefhPTAiNl//Hf2l2d+H8TdI3wtLw0KurBQ==", - "dev": true, + "node_modules/@aws-sdk/lib-dynamodb/node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", "dependencies": { - "@aws-crypto/sha1-browser": "5.2.0", - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.678.0", - "@aws-sdk/client-sts": "3.678.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/credential-provider-node": "3.678.0", - "@aws-sdk/middleware-bucket-endpoint": "3.667.0", - "@aws-sdk/middleware-expect-continue": "3.667.0", - "@aws-sdk/middleware-flexible-checksums": "3.678.0", - "@aws-sdk/middleware-host-header": "3.667.0", - "@aws-sdk/middleware-location-constraint": "3.667.0", - "@aws-sdk/middleware-logger": "3.667.0", - "@aws-sdk/middleware-recursion-detection": "3.667.0", - "@aws-sdk/middleware-sdk-s3": "3.678.0", - "@aws-sdk/middleware-ssec": "3.667.0", - "@aws-sdk/middleware-user-agent": "3.678.0", - "@aws-sdk/region-config-resolver": "3.667.0", - "@aws-sdk/signature-v4-multi-region": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-endpoints": "3.667.0", - "@aws-sdk/util-user-agent-browser": "3.675.0", - "@aws-sdk/util-user-agent-node": "3.678.0", - "@aws-sdk/xml-builder": "3.662.0", - "@smithy/config-resolver": "^3.0.9", - "@smithy/core": "^2.4.8", - "@smithy/eventstream-serde-browser": "^3.0.10", - "@smithy/eventstream-serde-config-resolver": "^3.0.7", - "@smithy/eventstream-serde-node": "^3.0.9", - "@smithy/fetch-http-handler": "^3.2.9", - "@smithy/hash-blob-browser": "^3.1.6", - "@smithy/hash-node": "^3.0.7", - "@smithy/hash-stream-node": "^3.1.6", - "@smithy/invalid-dependency": "^3.0.7", - "@smithy/md5-js": "^3.0.7", - "@smithy/middleware-content-length": "^3.0.9", - "@smithy/middleware-endpoint": "^3.1.4", - "@smithy/middleware-retry": "^3.0.23", - "@smithy/middleware-serde": "^3.0.7", - "@smithy/middleware-stack": "^3.0.7", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/node-http-handler": "^3.2.4", - "@smithy/protocol-http": "^4.1.4", - "@smithy/smithy-client": "^3.4.0", - "@smithy/types": "^3.5.0", - "@smithy/url-parser": "^3.0.7", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.23", - "@smithy/util-defaults-mode-node": "^3.0.23", - "@smithy/util-endpoints": "^2.1.3", - "@smithy/util-middleware": "^3.0.7", - "@smithy/util-retry": "^3.0.7", - "@smithy/util-stream": "^3.1.9", - "@smithy/util-utf8": "^3.0.0", - "@smithy/util-waiter": "^3.1.6", + "@smithy/util-buffer-from": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-s3/node_modules/@aws-sdk/region-config-resolver": { + "node_modules/@aws-sdk/middleware-bucket-endpoint": { "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", - "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.667.0.tgz", + "integrity": "sha512-XGz4jMAkDoTyFdtLz7ZF+C05IAhCTC1PllpvTBaj821z/L0ilhbqVhrT/f2Buw8Id/K5A390csGXgusXyrFFjA==", "dev": true, "dependencies": { "@aws-sdk/types": "3.667.0", + "@aws-sdk/util-arn-parser": "3.568.0", "@smithy/node-config-provider": "^3.1.8", + "@smithy/protocol-http": "^4.1.4", "@smithy/types": "^3.5.0", "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.7", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-s3/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/middleware-bucket-endpoint/node_modules/@aws-sdk/types": { "version": "3.667.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", @@ -12633,7 +14929,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-s3/node_modules/@smithy/node-config-provider": { + "node_modules/@aws-sdk/middleware-bucket-endpoint/node_modules/@smithy/node-config-provider": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", @@ -12648,7 +14944,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-s3/node_modules/@smithy/shared-ini-file-loader": { + "node_modules/@aws-sdk/middleware-bucket-endpoint/node_modules/@smithy/shared-ini-file-loader": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", @@ -12661,265 +14957,134 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-s3/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", - "dev": true, - "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/client-ssm": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-ssm/-/client-ssm-3.678.0.tgz", - "integrity": "sha512-vJO7iieQq09bMKaGgESibzZaLgm0MIuR9m7SmEPZGMJ4wKhgOosm/P8lFMU+q0lHtCHoxdvjSYcUQga6ZN+fww==", - "dev": true, - "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.678.0", - "@aws-sdk/client-sts": "3.678.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/credential-provider-node": "3.678.0", - "@aws-sdk/middleware-host-header": "3.667.0", - "@aws-sdk/middleware-logger": "3.667.0", - "@aws-sdk/middleware-recursion-detection": "3.667.0", - "@aws-sdk/middleware-user-agent": "3.678.0", - "@aws-sdk/region-config-resolver": "3.667.0", - "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-endpoints": "3.667.0", - "@aws-sdk/util-user-agent-browser": "3.675.0", - "@aws-sdk/util-user-agent-node": "3.678.0", - "@smithy/config-resolver": "^3.0.9", - "@smithy/core": "^2.4.8", - "@smithy/fetch-http-handler": "^3.2.9", - "@smithy/hash-node": "^3.0.7", - "@smithy/invalid-dependency": "^3.0.7", - "@smithy/middleware-content-length": "^3.0.9", - "@smithy/middleware-endpoint": "^3.1.4", - "@smithy/middleware-retry": "^3.0.23", - "@smithy/middleware-serde": "^3.0.7", - "@smithy/middleware-stack": "^3.0.7", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/node-http-handler": "^3.2.4", - "@smithy/protocol-http": "^4.1.4", - "@smithy/smithy-client": "^3.4.0", - "@smithy/types": "^3.5.0", - "@smithy/url-parser": "^3.0.7", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.23", - "@smithy/util-defaults-mode-node": "^3.0.23", - "@smithy/util-endpoints": "^2.1.3", - "@smithy/util-middleware": "^3.0.7", - "@smithy/util-retry": "^3.0.7", - "@smithy/util-utf8": "^3.0.0", - "@smithy/util-waiter": "^3.1.6", - "@types/uuid": "^9.0.1", - "tslib": "^2.6.2", - "uuid": "^9.0.1" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/client-ssm/node_modules/@aws-sdk/region-config-resolver": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", - "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", - "dev": true, + "node_modules/@aws-sdk/middleware-endpoint-discovery": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint-discovery/-/middleware-endpoint-discovery-3.775.0.tgz", + "integrity": "sha512-L0PmjSg7t+wovRo/Lin1kpei3e7wBhrENWb1Bbccu3PWUIfxolGeWplOmNhSlXjuQe9GXjf3z8kJRYOGBMFOvw==", "dependencies": { - "@aws-sdk/types": "3.667.0", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/types": "^3.5.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.7", + "@aws-sdk/endpoint-cache": "3.723.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ssm/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/middleware-endpoint-discovery/node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ssm/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dev": true, + "node_modules/@aws-sdk/middleware-endpoint-discovery/node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ssm/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/middleware-endpoint-discovery/node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-ssm/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", - "dev": true, + "node_modules/@aws-sdk/middleware-endpoint-discovery/node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-sso": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.678.0.tgz", - "integrity": "sha512-5Fg2BkR1En8iBbiZ18STvLDGPK9Re5MyCmX+hfIhQzPsEf1FRkAkOluEXX79aBva8iWn2oCD/xKBUku4x3eusw==", - "dev": true, - "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/middleware-host-header": "3.667.0", - "@aws-sdk/middleware-logger": "3.667.0", - "@aws-sdk/middleware-recursion-detection": "3.667.0", - "@aws-sdk/middleware-user-agent": "3.678.0", - "@aws-sdk/region-config-resolver": "3.667.0", - "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-endpoints": "3.667.0", - "@aws-sdk/util-user-agent-browser": "3.675.0", - "@aws-sdk/util-user-agent-node": "3.678.0", - "@smithy/config-resolver": "^3.0.9", - "@smithy/core": "^2.4.8", - "@smithy/fetch-http-handler": "^3.2.9", - "@smithy/hash-node": "^3.0.7", - "@smithy/invalid-dependency": "^3.0.7", - "@smithy/middleware-content-length": "^3.0.9", - "@smithy/middleware-endpoint": "^3.1.4", - "@smithy/middleware-retry": "^3.0.23", - "@smithy/middleware-serde": "^3.0.7", - "@smithy/middleware-stack": "^3.0.7", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/node-http-handler": "^3.2.4", - "@smithy/protocol-http": "^4.1.4", - "@smithy/smithy-client": "^3.4.0", - "@smithy/types": "^3.5.0", - "@smithy/url-parser": "^3.0.7", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.23", - "@smithy/util-defaults-mode-node": "^3.0.23", - "@smithy/util-endpoints": "^2.1.3", - "@smithy/util-middleware": "^3.0.7", - "@smithy/util-retry": "^3.0.7", - "@smithy/util-utf8": "^3.0.0", + "node_modules/@aws-sdk/middleware-endpoint-discovery/node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "dependencies": { "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/client-sso-oidc": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.678.0.tgz", - "integrity": "sha512-sgj9Y4zGiwLePLDjqhGoghoZgseh88JkKkwWH558IIte/cf/ix7ezOvptnA0WUlI5Z/329LtkN6O8TRqSJ7MWw==", + "node_modules/@aws-sdk/middleware-expect-continue": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.667.0.tgz", + "integrity": "sha512-0TiSL9S5DSG95NHGIz6qTMuV7GDKVn8tvvGSrSSZu/wXO3JaYSH0AElVpYfc4PtPRqVpEyNA7nnc7W56mMCLWQ==", "dev": true, "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/credential-provider-node": "3.678.0", - "@aws-sdk/middleware-host-header": "3.667.0", - "@aws-sdk/middleware-logger": "3.667.0", - "@aws-sdk/middleware-recursion-detection": "3.667.0", - "@aws-sdk/middleware-user-agent": "3.678.0", - "@aws-sdk/region-config-resolver": "3.667.0", "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-endpoints": "3.667.0", - "@aws-sdk/util-user-agent-browser": "3.675.0", - "@aws-sdk/util-user-agent-node": "3.678.0", - "@smithy/config-resolver": "^3.0.9", - "@smithy/core": "^2.4.8", - "@smithy/fetch-http-handler": "^3.2.9", - "@smithy/hash-node": "^3.0.7", - "@smithy/invalid-dependency": "^3.0.7", - "@smithy/middleware-content-length": "^3.0.9", - "@smithy/middleware-endpoint": "^3.1.4", - "@smithy/middleware-retry": "^3.0.23", - "@smithy/middleware-serde": "^3.0.7", - "@smithy/middleware-stack": "^3.0.7", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/node-http-handler": "^3.2.4", "@smithy/protocol-http": "^4.1.4", - "@smithy/smithy-client": "^3.4.0", "@smithy/types": "^3.5.0", - "@smithy/url-parser": "^3.0.7", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.23", - "@smithy/util-defaults-mode-node": "^3.0.23", - "@smithy/util-endpoints": "^2.1.3", - "@smithy/util-middleware": "^3.0.7", - "@smithy/util-retry": "^3.0.7", - "@smithy/util-utf8": "^3.0.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" - }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.678.0" } }, - "node_modules/@aws-sdk/client-sso-oidc/node_modules/@aws-sdk/region-config-resolver": { + "node_modules/@aws-sdk/middleware-expect-continue/node_modules/@aws-sdk/types": { "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", - "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.5.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/middleware-flexible-checksums": { + "version": "3.678.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.678.0.tgz", + "integrity": "sha512-IyWWXVvG4IJ9vkagTF8wkNtybKU5SWYIQ1BRDiCmoDyLPOpogNOBVnn10RX9FW7J7BMAUFgtx6N1uMQ8MitDiA==", "dev": true, "dependencies": { + "@aws-crypto/crc32": "5.2.0", + "@aws-crypto/crc32c": "5.2.0", + "@aws-sdk/core": "3.678.0", "@aws-sdk/types": "3.667.0", + "@smithy/is-array-buffer": "^3.0.0", "@smithy/node-config-provider": "^3.1.8", + "@smithy/protocol-http": "^4.1.4", "@smithy/types": "^3.5.0", - "@smithy/util-config-provider": "^3.0.0", "@smithy/util-middleware": "^3.0.7", + "@smithy/util-utf8": "^3.0.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso-oidc/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@aws-sdk/types": { "version": "3.667.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", @@ -12932,7 +15097,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso-oidc/node_modules/@smithy/node-config-provider": { + "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@smithy/node-config-provider": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", @@ -12947,7 +15112,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso-oidc/node_modules/@smithy/shared-ini-file-loader": { + "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@smithy/shared-ini-file-loader": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", @@ -12960,7 +15125,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso-oidc/node_modules/@smithy/util-utf8": { + "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@smithy/util-utf8": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", @@ -12973,24 +15138,22 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso/node_modules/@aws-sdk/region-config-resolver": { + "node_modules/@aws-sdk/middleware-host-header": { "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", - "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.667.0.tgz", + "integrity": "sha512-Z7fIAMQnPegs7JjAQvlOeWXwpMRfegh5eCoIP6VLJIeR6DLfYKbP35JBtt98R6DXslrN2RsbTogjbxPEDQfw1w==", "dev": true, "dependencies": { "@aws-sdk/types": "3.667.0", - "@smithy/node-config-provider": "^3.1.8", + "@smithy/protocol-http": "^4.1.4", "@smithy/types": "^3.5.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.7", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/middleware-host-header/node_modules/@aws-sdk/types": { "version": "3.667.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", @@ -13003,116 +15166,76 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "node_modules/@aws-sdk/middleware-location-constraint": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.667.0.tgz", + "integrity": "sha512-ob85H3HhT3/u5O+x0o557xGZ78vSNeSSwMaSitxdsfs2hOuoUl1uk+OeLpi1hkuJnL41FPpokV7TVII2XrFfmg==", "dev": true, "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", + "@aws-sdk/types": "3.667.0", + "@smithy/types": "^3.5.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "node_modules/@aws-sdk/middleware-location-constraint/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", "dev": true, "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/types": "^3.5.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sso/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.667.0.tgz", + "integrity": "sha512-PtTRNpNm/5c746jRgZCNg4X9xEJIwggkGJrF0GP9AB1ANg4pc/sF2Fvn1NtqPe9wtQ2stunJprnm5WkCHN7QiA==", "dev": true, "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", + "@aws-sdk/types": "3.667.0", + "@smithy/types": "^3.5.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sts": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.678.0.tgz", - "integrity": "sha512-oRtDnbqIuTbBq0xd7XlaugDA41EqRFzWLpPNr4uwkH8L7xwtIByfJG/qXx2OtOiFFasAhMWJLu/DDqWZyp819A==", + "node_modules/@aws-sdk/middleware-logger/node_modules/@aws-sdk/types": { + "version": "3.667.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", + "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", "dev": true, "dependencies": { - "@aws-crypto/sha256-browser": "5.2.0", - "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/client-sso-oidc": "3.678.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/credential-provider-node": "3.678.0", - "@aws-sdk/middleware-host-header": "3.667.0", - "@aws-sdk/middleware-logger": "3.667.0", - "@aws-sdk/middleware-recursion-detection": "3.667.0", - "@aws-sdk/middleware-user-agent": "3.678.0", - "@aws-sdk/region-config-resolver": "3.667.0", - "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-endpoints": "3.667.0", - "@aws-sdk/util-user-agent-browser": "3.675.0", - "@aws-sdk/util-user-agent-node": "3.678.0", - "@smithy/config-resolver": "^3.0.9", - "@smithy/core": "^2.4.8", - "@smithy/fetch-http-handler": "^3.2.9", - "@smithy/hash-node": "^3.0.7", - "@smithy/invalid-dependency": "^3.0.7", - "@smithy/middleware-content-length": "^3.0.9", - "@smithy/middleware-endpoint": "^3.1.4", - "@smithy/middleware-retry": "^3.0.23", - "@smithy/middleware-serde": "^3.0.7", - "@smithy/middleware-stack": "^3.0.7", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/node-http-handler": "^3.2.4", - "@smithy/protocol-http": "^4.1.4", - "@smithy/smithy-client": "^3.4.0", "@smithy/types": "^3.5.0", - "@smithy/url-parser": "^3.0.7", - "@smithy/util-base64": "^3.0.0", - "@smithy/util-body-length-browser": "^3.0.0", - "@smithy/util-body-length-node": "^3.0.0", - "@smithy/util-defaults-mode-browser": "^3.0.23", - "@smithy/util-defaults-mode-node": "^3.0.23", - "@smithy/util-endpoints": "^2.1.3", - "@smithy/util-middleware": "^3.0.7", - "@smithy/util-retry": "^3.0.7", - "@smithy/util-utf8": "^3.0.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sts/node_modules/@aws-sdk/region-config-resolver": { + "node_modules/@aws-sdk/middleware-recursion-detection": { "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.667.0.tgz", - "integrity": "sha512-iNr+JhhA902JMKHG9IwT9YdaEx6KGl6vjAL5BRNeOjfj4cZYMog6Lz/IlfOAltMtT0w88DAHDEFrBd2uO0l2eg==", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.667.0.tgz", + "integrity": "sha512-U5glWD3ehFohzpUpopLtmqAlDurGWo2wRGPNgi4SwhWU7UDt6LS7E/UvJjqC0CUrjlzOw+my2A+Ncf+fisMhxQ==", "dev": true, "dependencies": { "@aws-sdk/types": "3.667.0", - "@smithy/node-config-provider": "^3.1.8", + "@smithy/protocol-http": "^4.1.4", "@smithy/types": "^3.5.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.7", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sts/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/middleware-recursion-detection/node_modules/@aws-sdk/types": { "version": "3.667.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", @@ -13125,70 +15248,95 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sts/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", + "node_modules/@aws-sdk/middleware-sdk-ec2": { + "version": "3.622.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-ec2/-/middleware-sdk-ec2-3.622.0.tgz", + "integrity": "sha512-rVShV+eB1vovLuvlzUEFuxZB4yxSMFzyP+VNIoFxtSZh0LWh7+7bNLwp1I9Vq3SxHLMVYQevjm7nkiPM0DG+RQ==", "dev": true, "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", + "@aws-sdk/types": "3.609.0", + "@aws-sdk/util-format-url": "3.609.0", + "@smithy/middleware-endpoint": "^3.1.0", + "@smithy/protocol-http": "^4.1.0", + "@smithy/signature-v4": "^4.1.0", + "@smithy/smithy-client": "^3.1.12", + "@smithy/types": "^3.3.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sts/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", + "node_modules/@aws-sdk/middleware-sdk-ec2/node_modules/@aws-sdk/types": { + "version": "3.609.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", + "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", "dev": true, "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/types": "^3.3.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/client-sts/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "node_modules/@aws-sdk/middleware-sdk-rds": { + "version": "3.620.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-rds/-/middleware-sdk-rds-3.620.0.tgz", + "integrity": "sha512-pokuq3rMJfn8ZNUIhAKn0c1nQtvClPLzh5h1fOXAeRXmNjp+YPXQ4CIsGRcqDNO8lkUyyfV42WnPCdUZmR9zAA==", "dev": true, "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", + "@aws-sdk/types": "3.609.0", + "@aws-sdk/util-format-url": "3.609.0", + "@smithy/middleware-endpoint": "^3.1.0", + "@smithy/protocol-http": "^4.1.0", + "@smithy/signature-v4": "^4.1.0", + "@smithy/types": "^3.3.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/core": { + "node_modules/@aws-sdk/middleware-sdk-rds/node_modules/@aws-sdk/types": { + "version": "3.609.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", + "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", + "dev": true, + "dependencies": { + "@smithy/types": "^3.3.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/middleware-sdk-s3": { "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.678.0.tgz", - "integrity": "sha512-ZTzybFZqSaPQymgRkTl08vk6xilaxr8LnJOc0h3KhcHLK4TJmdOcxqPpa6QxrBKcn2rmxzGiPRbAHLGI+BIxBw==", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.678.0.tgz", + "integrity": "sha512-AT4oKh4kPGWG+Ews9M/KYB/TdSvRJLxhVvrVXFxStm9OgeNksxsHH02gnyEOfmGX08ouNRyeaIsqG9RsvXz6Gg==", "dev": true, "dependencies": { + "@aws-sdk/core": "3.678.0", "@aws-sdk/types": "3.667.0", + "@aws-sdk/util-arn-parser": "3.568.0", "@smithy/core": "^2.4.8", "@smithy/node-config-provider": "^3.1.8", - "@smithy/property-provider": "^3.1.7", "@smithy/protocol-http": "^4.1.4", "@smithy/signature-v4": "^4.2.0", "@smithy/smithy-client": "^3.4.0", "@smithy/types": "^3.5.0", + "@smithy/util-config-provider": "^3.0.0", "@smithy/util-middleware": "^3.0.7", - "fast-xml-parser": "4.4.1", + "@smithy/util-stream": "^3.1.9", + "@smithy/util-utf8": "^3.0.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/core/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@aws-sdk/types": { "version": "3.667.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", @@ -13201,7 +15349,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/core/node_modules/@smithy/node-config-provider": { + "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@smithy/node-config-provider": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", @@ -13216,7 +15364,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/core/node_modules/@smithy/shared-ini-file-loader": { + "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@smithy/shared-ini-file-loader": { "version": "3.1.9", "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", @@ -13229,44 +15377,26 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/credential-provider-cognito-identity": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.678.0.tgz", - "integrity": "sha512-t9bgu2Kc0H8FdQsSrkIJ42vis0CaVxUlA0wmmNyh268ZZyT9lKXUmf91QIhWbZ1zHx8Ek2u301xusoIaj4mLHA==", + "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", "dev": true, "dependencies": { - "@aws-sdk/client-cognito-identity": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@smithy/property-provider": "^3.1.7", - "@smithy/types": "^3.5.0", + "@smithy/util-buffer-from": "^3.0.0", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/credential-provider-cognito-identity/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/middleware-ssec": { "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, - "dependencies": { - "@smithy/types": "^3.5.0", - "tslib": "^2.6.2" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.678.0.tgz", - "integrity": "sha512-29uhXAB7uJqHtvJ2U3pi1YkMfv0WefW9EmSMoFAunjudXXBVktwTlWg0lyCM+KHrGKLkQyfs5UF/A9IelS8tdQ==", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.667.0.tgz", + "integrity": "sha512-1wuAUZIkmZIvOmGg5qNQU821CGFHhkuKioxXgNh0DpUxZ9+AeiV7yorJr+bqkb2KBFv1i1TnzGRecvKf/KvZIQ==", "dev": true, "dependencies": { - "@aws-sdk/core": "3.678.0", "@aws-sdk/types": "3.667.0", - "@smithy/property-provider": "^3.1.7", "@smithy/types": "^3.5.0", "tslib": "^2.6.2" }, @@ -13274,7 +15404,7 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/credential-provider-env/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/middleware-ssec/node_modules/@aws-sdk/types": { "version": "3.667.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", @@ -13287,28 +15417,25 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/credential-provider-http": { + "node_modules/@aws-sdk/middleware-user-agent": { "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.678.0.tgz", - "integrity": "sha512-EvpmP0nc7ddRp0qwJOSu0uBXa+MMk4+OLlyEJcdaHnZI4/BoyVWr5fJUD5eQYZk11LZPZSvnsliYXWwLyVNXHQ==", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.678.0.tgz", + "integrity": "sha512-tg9cC5COgGP0cznD2ys9kxPtVeKUygPZshDWXLAfA/cH/4m2ZUBvoEVv1SxkIbvOjnPwa976rdPLQUwRZvsL0g==", "dev": true, "dependencies": { "@aws-sdk/core": "3.678.0", "@aws-sdk/types": "3.667.0", - "@smithy/fetch-http-handler": "^3.2.9", - "@smithy/node-http-handler": "^3.2.4", - "@smithy/property-provider": "^3.1.7", + "@aws-sdk/util-endpoints": "3.667.0", + "@smithy/core": "^2.4.8", "@smithy/protocol-http": "^4.1.4", - "@smithy/smithy-client": "^3.4.0", "@smithy/types": "^3.5.0", - "@smithy/util-stream": "^3.1.9", "tslib": "^2.6.2" }, "engines": { "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/credential-provider-http/node_modules/@aws-sdk/types": { + "node_modules/@aws-sdk/middleware-user-agent/node_modules/@aws-sdk/types": { "version": "3.667.0", "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", @@ -13321,784 +15448,793 @@ "node": ">=16.0.0" } }, - "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.678.0.tgz", - "integrity": "sha512-8kHy7V5rRO73EpBCUclykP9T/QIBVi0SkQsc88ZRxpdh59/JY2N6DT5khMTzrz9+Vvlw3FDMJN4AI/qWjJHhdw==", - "dev": true, + "node_modules/@aws-sdk/nested-clients": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.799.0.tgz", + "integrity": "sha512-zILlWh7asrcQG9JYMYgnvEQBfwmWKfED0yWCf3UNAmQcfS9wkCAWCgicNy/y5KvNvEYnHidsU117STtyuUNG5g==", "dependencies": { - "@aws-sdk/core": "3.678.0", - "@aws-sdk/credential-provider-env": "3.678.0", - "@aws-sdk/credential-provider-http": "3.678.0", - "@aws-sdk/credential-provider-process": "3.678.0", - "@aws-sdk/credential-provider-sso": "3.678.0", - "@aws-sdk/credential-provider-web-identity": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@smithy/credential-provider-imds": "^3.2.4", - "@smithy/property-provider": "^3.1.7", - "@smithy/shared-ini-file-loader": "^3.1.8", - "@smithy/types": "^3.5.0", + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/middleware-host-header": "3.775.0", + "@aws-sdk/middleware-logger": "3.775.0", + "@aws-sdk/middleware-recursion-detection": "3.775.0", + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/region-config-resolver": "3.775.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@aws-sdk/util-user-agent-browser": "3.775.0", + "@aws-sdk/util-user-agent-node": "3.799.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/core": "^3.3.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/hash-node": "^4.0.2", + "@smithy/invalid-dependency": "^4.0.2", + "@smithy/middleware-content-length": "^4.0.2", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-retry": "^4.1.1", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/protocol-http": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-body-length-node": "^4.0.0", + "@smithy/util-defaults-mode-browser": "^4.0.9", + "@smithy/util-defaults-mode-node": "^4.0.9", + "@smithy/util-endpoints": "^3.0.2", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/core": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.799.0.tgz", + "integrity": "sha512-hkKF3Zpc6+H8GI1rlttYVRh9uEE77cqAzLmLpY3iu7sql8cZgPERRBfaFct8p1SaDyrksLNiboD1vKW58mbsYg==", + "dependencies": { + "@aws-sdk/types": "3.775.0", + "@smithy/core": "^3.3.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/signature-v4": "^5.1.0", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.678.0" + "engines": { + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-ini/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/middleware-host-header": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.775.0.tgz", + "integrity": "sha512-tkSegM0Z6WMXpLB8oPys/d+umYIocvO298mGvcMCncpRl77L9XkvSLJIFzaHes+o7djAgIduYw8wKIMStFss2w==", "dependencies": { - "@smithy/types": "^3.5.0", + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-ini/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/middleware-logger": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.775.0.tgz", + "integrity": "sha512-FaxO1xom4MAoUJsldmR92nT1G6uZxTdNYOFYtdHfd6N2wcNaTuxgjIvqzg5y7QIH9kn58XX/dzf1iTjgqUStZw==", "dependencies": { - "@smithy/types": "^3.6.0", + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.678.0.tgz", - "integrity": "sha512-KGRBVD/oNr/aD+Wy5zc5AjfeSv5b4ahAu5eAUbOz+eGjGpGgrMtjY+R2rDY/3i3wFj9/DvOIfFGeZQMwtDzIuA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.775.0.tgz", + "integrity": "sha512-GLCzC8D0A0YDG5u3F5U03Vb9j5tcOEFhr8oc6PDk0k0vm5VwtZOE6LvK7hcCSoAB4HXyOUM0sQuXrbaAh9OwXA==", "dependencies": { - "@aws-sdk/credential-provider-env": "3.678.0", - "@aws-sdk/credential-provider-http": "3.678.0", - "@aws-sdk/credential-provider-ini": "3.678.0", - "@aws-sdk/credential-provider-process": "3.678.0", - "@aws-sdk/credential-provider-sso": "3.678.0", - "@aws-sdk/credential-provider-web-identity": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@smithy/credential-provider-imds": "^3.2.4", - "@smithy/property-provider": "^3.1.7", - "@smithy/shared-ini-file-loader": "^3.1.8", - "@smithy/types": "^3.5.0", + "@aws-sdk/types": "3.775.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-node/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.799.0.tgz", + "integrity": "sha512-TropQZanbOTxa+p+Nl4fWkzlRhgFwDfW+Wb6TR3jZN7IXHNlPpgGFpdrgvBExhW/RBhqr+94OsR8Ou58lp3hhA==", "dependencies": { - "@smithy/types": "^3.5.0", + "@aws-sdk/core": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@aws-sdk/util-endpoints": "3.787.0", + "@smithy/core": "^3.3.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-node/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/util-endpoints": { + "version": "3.787.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.787.0.tgz", + "integrity": "sha512-fd3zkiOkwnbdbN0Xp9TsP5SWrmv0SpT70YEdbb8wAj2DWQwiCmFszaSs+YCvhoCdmlR3Wl9Spu0pGpSAGKeYvQ==", "dependencies": { - "@smithy/types": "^3.6.0", + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "@smithy/util-endpoints": "^3.0.2", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.678.0.tgz", - "integrity": "sha512-5TpzzHKwPOvUJig0bvTt+brtXfLPaSVLwea9re+XGrS5T6Hz65IaX2RL6uY1GQ0UVOqgwQ5nAti1WOfBoSJ5BA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.775.0.tgz", + "integrity": "sha512-txw2wkiJmZKVdDbscK7VBK+u+TJnRtlUjRTLei+elZg2ADhpQxfVAQl436FUeIv6AhB/oRHW6/K/EAGXUSWi0A==", "dependencies": { - "@aws-sdk/core": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@smithy/property-provider": "^3.1.7", - "@smithy/shared-ini-file-loader": "^3.1.8", - "@smithy/types": "^3.5.0", + "@aws-sdk/types": "3.775.0", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.799.0.tgz", + "integrity": "sha512-iXBk38RbIWPF5Nq9O4AnktORAzXovSVqWYClvS1qbE7ILsnTLJbagU9HlU25O2iV5COVh1qZkwuP5NHQ2yTEyw==", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.799.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } } }, - "node_modules/@aws-sdk/credential-provider-process/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/abort-controller": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", + "integrity": "sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-process/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/config-resolver": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.1.0.tgz", + "integrity": "sha512-8smPlwhga22pwl23fM5ew4T9vfLUCeFXlcqNOCD5M5h8VmNPNUE9j6bQSuRXpDSV11L/E/SwEBQuW8hr6+nS1A==", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.678.0.tgz", - "integrity": "sha512-PXydLUsLYd1rkhZ7zwf0613u5sofxIEhh7C1QGP1MSY3L1jt8bu7pZIcMzubfvmaGZI5k84aHhhjQEiAJUxIMg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/core": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.3.0.tgz", + "integrity": "sha512-r6gvs5OfRq/w+9unPm7B3po4rmWaGh0CIL/OwHntGGux7+RhOOZLGuurbeMgWV6W55ZuyMTypJLeH0vn/ZRaWQ==", "dependencies": { - "@aws-sdk/client-sso": "3.678.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/token-providers": "3.667.0", - "@aws-sdk/types": "3.667.0", - "@smithy/property-provider": "^3.1.7", - "@smithy/shared-ini-file-loader": "^3.1.8", - "@smithy/types": "^3.5.0", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-body-length-browser": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-stream": "^4.2.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-sso/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/credential-provider-imds": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.0.2.tgz", + "integrity": "sha512-32lVig6jCaWBHnY+OEQ6e6Vnt5vDHaLiydGrwYMW9tPqO688hPGTYRamYJ1EptxEC2rAwJrHWmPoKRBl4iTa8w==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-sso/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/fetch-http-handler": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.0.2.tgz", + "integrity": "sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.678.0.tgz", - "integrity": "sha512-fcYZjTTFcef99l+BhcEAhHS4tEK1kE6Xj5Zz5lT4tFA07BkQt3d6kUKRVVfJnsbcHH4RDBUCnLhU8HPfc/kvjA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/hash-node": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.0.2.tgz", + "integrity": "sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==", "dependencies": { - "@aws-sdk/core": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@smithy/property-provider": "^3.1.7", - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" - }, - "peerDependencies": { - "@aws-sdk/client-sts": "^3.678.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-provider-web-identity/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/invalid-dependency": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.0.2.tgz", + "integrity": "sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-providers": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.678.0.tgz", - "integrity": "sha512-cF6IvQI1Jf5nJrK/Q7y3yFSQ8hv6MQ1g7HmZNo1tZTkywhfB3/zKcIFe6YftQul/s6RGHotXC2fr8jDkYQFDSQ==", - "dev": true, - "dependencies": { - "@aws-sdk/client-cognito-identity": "3.678.0", - "@aws-sdk/client-sso": "3.678.0", - "@aws-sdk/client-sts": "3.678.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/credential-provider-cognito-identity": "3.678.0", - "@aws-sdk/credential-provider-env": "3.678.0", - "@aws-sdk/credential-provider-http": "3.678.0", - "@aws-sdk/credential-provider-ini": "3.678.0", - "@aws-sdk/credential-provider-node": "3.678.0", - "@aws-sdk/credential-provider-process": "3.678.0", - "@aws-sdk/credential-provider-sso": "3.678.0", - "@aws-sdk/credential-provider-web-identity": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@smithy/credential-provider-imds": "^3.2.4", - "@smithy/property-provider": "^3.1.7", - "@smithy/types": "^3.5.0", + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/is-array-buffer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-4.0.0.tgz", + "integrity": "sha512-saYhF8ZZNoJDTvJBEWgeBccCg+yvp1CX+ed12yORU3NilJScfc6gfch2oVb4QgxZrGUx3/ZJlb+c/dJbyupxlw==", + "dependencies": { "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/credential-providers/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/middleware-content-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.0.2.tgz", + "integrity": "sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-bucket-endpoint": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.667.0.tgz", - "integrity": "sha512-XGz4jMAkDoTyFdtLz7ZF+C05IAhCTC1PllpvTBaj821z/L0ilhbqVhrT/f2Buw8Id/K5A390csGXgusXyrFFjA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/middleware-endpoint": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.1.1.tgz", + "integrity": "sha512-z5RmcHxjvScL+LwEDU2mTNCOhgUs4lu5PGdF1K36IPRmUHhNFxNxgenSB7smyDiYD4vdKQ7CAZtG5cUErqib9w==", "dependencies": { - "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-arn-parser": "3.568.0", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/protocol-http": "^4.1.4", - "@smithy/types": "^3.5.0", - "@smithy/util-config-provider": "^3.0.0", + "@smithy/core": "^3.3.0", + "@smithy/middleware-serde": "^4.0.3", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/url-parser": "^4.0.2", + "@smithy/util-middleware": "^4.0.2", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-bucket-endpoint/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/middleware-retry": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.1.1.tgz", + "integrity": "sha512-mBJOxn9aUYwcBUPQpKv9ifzrCn4EbhPUFguEZv3jB57YOMh0caS4P8HoLvUeNUI1nx4bIVH2SIbogbDfFI9DUA==", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/service-error-classification": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-retry": "^4.0.2", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/middleware-serde": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.0.3.tgz", + "integrity": "sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-bucket-endpoint/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/middleware-stack": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.0.2.tgz", + "integrity": "sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==", "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-bucket-endpoint/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-expect-continue": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.667.0.tgz", - "integrity": "sha512-0TiSL9S5DSG95NHGIz6qTMuV7GDKVn8tvvGSrSSZu/wXO3JaYSH0AElVpYfc4PtPRqVpEyNA7nnc7W56mMCLWQ==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/node-http-handler": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.0.4.tgz", + "integrity": "sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==", "dependencies": { - "@aws-sdk/types": "3.667.0", - "@smithy/protocol-http": "^4.1.4", - "@smithy/types": "^3.5.0", + "@smithy/abort-controller": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/querystring-builder": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-expect-continue/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-flexible-checksums": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.678.0.tgz", - "integrity": "sha512-IyWWXVvG4IJ9vkagTF8wkNtybKU5SWYIQ1BRDiCmoDyLPOpogNOBVnn10RX9FW7J7BMAUFgtx6N1uMQ8MitDiA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/protocol-http": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.1.0.tgz", + "integrity": "sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==", "dependencies": { - "@aws-crypto/crc32": "5.2.0", - "@aws-crypto/crc32c": "5.2.0", - "@aws-sdk/core": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@smithy/is-array-buffer": "^3.0.0", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/protocol-http": "^4.1.4", - "@smithy/types": "^3.5.0", - "@smithy/util-middleware": "^3.0.7", - "@smithy/util-utf8": "^3.0.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/querystring-builder": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.0.2.tgz", + "integrity": "sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", + "@smithy/util-uri-escape": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/querystring-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.0.2.tgz", + "integrity": "sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==", "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/service-error-classification": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.0.2.tgz", + "integrity": "sha512-LA86xeFpTKn270Hbkixqs5n73S+LVM0/VZco8dqd+JT75Dyx3Lcw/MraL7ybjmz786+160K8rPOmhsq0SocoJQ==", "dependencies": { - "@smithy/types": "^3.6.0", - "tslib": "^2.6.2" + "@smithy/types": "^4.2.0" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.667.0.tgz", - "integrity": "sha512-Z7fIAMQnPegs7JjAQvlOeWXwpMRfegh5eCoIP6VLJIeR6DLfYKbP35JBtt98R6DXslrN2RsbTogjbxPEDQfw1w==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/signature-v4": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.1.0.tgz", + "integrity": "sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==", "dependencies": { - "@aws-sdk/types": "3.667.0", - "@smithy/protocol-http": "^4.1.4", - "@smithy/types": "^3.5.0", + "@smithy/is-array-buffer": "^4.0.0", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", + "@smithy/util-uri-escape": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-host-header/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/smithy-client": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.2.1.tgz", + "integrity": "sha512-fbniZef60QdsBc4ZY0iyI8xbFHIiC/QRtPi66iE4ufjiE/aaz7AfUXzcWMkpO8r+QhLeNRIfmPchIG+3/QDZ6g==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/core": "^3.3.0", + "@smithy/middleware-endpoint": "^4.1.1", + "@smithy/middleware-stack": "^4.0.2", + "@smithy/protocol-http": "^5.1.0", + "@smithy/types": "^4.2.0", + "@smithy/util-stream": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-location-constraint": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.667.0.tgz", - "integrity": "sha512-ob85H3HhT3/u5O+x0o557xGZ78vSNeSSwMaSitxdsfs2hOuoUl1uk+OeLpi1hkuJnL41FPpokV7TVII2XrFfmg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", "dependencies": { - "@aws-sdk/types": "3.667.0", - "@smithy/types": "^3.5.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-location-constraint/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/url-parser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.0.2.tgz", + "integrity": "sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/querystring-parser": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-logger": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.667.0.tgz", - "integrity": "sha512-PtTRNpNm/5c746jRgZCNg4X9xEJIwggkGJrF0GP9AB1ANg4pc/sF2Fvn1NtqPe9wtQ2stunJprnm5WkCHN7QiA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-base64": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-4.0.0.tgz", + "integrity": "sha512-CvHfCmO2mchox9kjrtzoHkWHxjHZzaFojLc8quxXY7WAAMAg43nuxwv95tATVgQFNDwd4M9S1qFzj40Ul41Kmg==", "dependencies": { - "@aws-sdk/types": "3.667.0", - "@smithy/types": "^3.5.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-logger/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-body-length-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-4.0.0.tgz", + "integrity": "sha512-sNi3DL0/k64/LO3A256M+m3CDdG6V7WKWHdAiBBMUN8S3hK3aMPhwnPik2A/a2ONN+9doY9UxaLfgqsIRg69QA==", "dependencies": { - "@smithy/types": "^3.5.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.667.0.tgz", - "integrity": "sha512-U5glWD3ehFohzpUpopLtmqAlDurGWo2wRGPNgi4SwhWU7UDt6LS7E/UvJjqC0CUrjlzOw+my2A+Ncf+fisMhxQ==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-body-length-node": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-4.0.0.tgz", + "integrity": "sha512-q0iDP3VsZzqJyje8xJWEJCNIu3lktUGVoSy1KB0UWym2CL1siV3artm+u1DFYTLejpsrdGyCSWBdGNjJzfDPjg==", "dependencies": { - "@aws-sdk/types": "3.667.0", - "@smithy/protocol-http": "^4.1.4", - "@smithy/types": "^3.5.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-recursion-detection/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-buffer-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-4.0.0.tgz", + "integrity": "sha512-9TOQ7781sZvddgO8nxueKi3+yGvkY35kotA0Y6BWRajAv8jjmigQ1sBwz0UX47pQMYXJPahSKEKYFgt+rXdcug==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/is-array-buffer": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-ec2": { - "version": "3.622.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-ec2/-/middleware-sdk-ec2-3.622.0.tgz", - "integrity": "sha512-rVShV+eB1vovLuvlzUEFuxZB4yxSMFzyP+VNIoFxtSZh0LWh7+7bNLwp1I9Vq3SxHLMVYQevjm7nkiPM0DG+RQ==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@aws-sdk/util-format-url": "3.609.0", - "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/protocol-http": "^4.1.0", - "@smithy/signature-v4": "^4.1.0", - "@smithy/smithy-client": "^3.1.12", - "@smithy/types": "^3.3.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-ec2/node_modules/@aws-sdk/types": { - "version": "3.609.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", - "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-defaults-mode-browser": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.0.9.tgz", + "integrity": "sha512-B8j0XsElvyhv6+5hlFf6vFV/uCSyLKcInpeXOGnOImX2mGXshE01RvPoGipTlRpIk53e6UfYj7WdDdgbVfXDZw==", "dependencies": { - "@smithy/types": "^3.3.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "bowser": "^2.11.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-rds": { - "version": "3.620.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-rds/-/middleware-sdk-rds-3.620.0.tgz", - "integrity": "sha512-pokuq3rMJfn8ZNUIhAKn0c1nQtvClPLzh5h1fOXAeRXmNjp+YPXQ4CIsGRcqDNO8lkUyyfV42WnPCdUZmR9zAA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-defaults-mode-node": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.0.9.tgz", + "integrity": "sha512-wTDU8P/zdIf9DOpV5qm64HVgGRXvqjqB/fJZTEQbrz3s79JHM/E7XkMm/876Oq+ZLHJQgnXM9QHDo29dlM62eA==", "dependencies": { - "@aws-sdk/types": "3.609.0", - "@aws-sdk/util-format-url": "3.609.0", - "@smithy/middleware-endpoint": "^3.1.0", - "@smithy/protocol-http": "^4.1.0", - "@smithy/signature-v4": "^4.1.0", - "@smithy/types": "^3.3.0", + "@smithy/config-resolver": "^4.1.0", + "@smithy/credential-provider-imds": "^4.0.2", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/property-provider": "^4.0.2", + "@smithy/smithy-client": "^4.2.1", + "@smithy/types": "^4.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-endpoints": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.0.2.tgz", + "integrity": "sha512-6QSutU5ZyrpNbnd51zRTL7goojlcnuOB55+F9VBD+j8JpRY50IGamsjlycrmpn8PQkmJucFW8A0LSfXj7jjtLQ==", + "dependencies": { + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-rds/node_modules/@aws-sdk/types": { - "version": "3.609.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.609.0.tgz", - "integrity": "sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-hex-encoding": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-4.0.0.tgz", + "integrity": "sha512-Yk5mLhHtfIgW2W2WQZWSg5kuMZCVbvhFmC7rV4IO2QqnZdbEFPmQnCcGMAX2z/8Qj3B9hYYNjZOhWym+RwhePw==", "dependencies": { - "@smithy/types": "^3.3.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.678.0.tgz", - "integrity": "sha512-AT4oKh4kPGWG+Ews9M/KYB/TdSvRJLxhVvrVXFxStm9OgeNksxsHH02gnyEOfmGX08ouNRyeaIsqG9RsvXz6Gg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", "dependencies": { - "@aws-sdk/core": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-arn-parser": "3.568.0", - "@smithy/core": "^2.4.8", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/protocol-http": "^4.1.4", - "@smithy/signature-v4": "^4.2.0", - "@smithy/smithy-client": "^3.4.0", - "@smithy/types": "^3.5.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.7", - "@smithy/util-stream": "^3.1.9", - "@smithy/util-utf8": "^3.0.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-retry": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.0.2.tgz", + "integrity": "sha512-Qryc+QG+7BCpvjloFLQrmlSd0RsVRHejRXd78jNO3+oREueCjwG1CCEH1vduw/ZkM1U9TztwIKVIi3+8MJScGg==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/service-error-classification": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.2.0.tgz", + "integrity": "sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==", "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", + "@smithy/fetch-http-handler": "^5.0.2", + "@smithy/node-http-handler": "^4.0.4", + "@smithy/types": "^4.2.0", + "@smithy/util-base64": "^4.0.0", + "@smithy/util-buffer-from": "^4.0.0", + "@smithy/util-hex-encoding": "^4.0.0", + "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-uri-escape": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-4.0.0.tgz", + "integrity": "sha512-77yfbCbQMtgtTylO9itEAdpPXSog3ZxMe09AEhm0dU0NLTalV70ghDZFR+Nfi1C60jnJoh/Re4090/DuZh2Omg==", "dependencies": { - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@smithy/util-utf8": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", - "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", - "dev": true, + "node_modules/@aws-sdk/nested-clients/node_modules/@smithy/util-utf8": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-4.0.0.tgz", + "integrity": "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow==", "dependencies": { - "@smithy/util-buffer-from": "^3.0.0", + "@smithy/util-buffer-from": "^4.0.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-ssec": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.667.0.tgz", - "integrity": "sha512-1wuAUZIkmZIvOmGg5qNQU821CGFHhkuKioxXgNh0DpUxZ9+AeiV7yorJr+bqkb2KBFv1i1TnzGRecvKf/KvZIQ==", - "dev": true, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.775.0.tgz", + "integrity": "sha512-40iH3LJjrQS3LKUJAl7Wj0bln7RFPEvUYKFxtP8a+oKFDO0F65F52xZxIJbPn6sHkxWDAnZlGgdjZXM3p2g5wQ==", "dependencies": { - "@aws-sdk/types": "3.667.0", - "@smithy/types": "^3.5.0", + "@aws-sdk/types": "3.775.0", + "@smithy/node-config-provider": "^4.0.2", + "@smithy/types": "^4.2.0", + "@smithy/util-config-provider": "^4.0.0", + "@smithy/util-middleware": "^4.0.2", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-ssec/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/region-config-resolver/node_modules/@smithy/node-config-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.0.2.tgz", + "integrity": "sha512-WgCkILRZfJwJ4Da92a6t3ozN/zcvYyJGUTmfGbgS/FkCcoCjl7G4FJaCDN1ySdvLvemnQeo25FdkyMSTSwulsw==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/property-provider": "^4.0.2", + "@smithy/shared-ini-file-loader": "^4.0.2", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.678.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.678.0.tgz", - "integrity": "sha512-tg9cC5COgGP0cznD2ys9kxPtVeKUygPZshDWXLAfA/cH/4m2ZUBvoEVv1SxkIbvOjnPwa976rdPLQUwRZvsL0g==", - "dev": true, + "node_modules/@aws-sdk/region-config-resolver/node_modules/@smithy/property-provider": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.0.2.tgz", + "integrity": "sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==", "dependencies": { - "@aws-sdk/core": "3.678.0", - "@aws-sdk/types": "3.667.0", - "@aws-sdk/util-endpoints": "3.667.0", - "@smithy/core": "^2.4.8", - "@smithy/protocol-http": "^4.1.4", - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/middleware-user-agent/node_modules/@aws-sdk/types": { - "version": "3.667.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.667.0.tgz", - "integrity": "sha512-gYq0xCsqFfQaSL/yT1Gl1vIUjtsg7d7RhnUfsXaHt8xTxOKRTdH9GjbesBjXOzgOvB0W0vfssfreSNGFlOOMJg==", - "dev": true, + "node_modules/@aws-sdk/region-config-resolver/node_modules/@smithy/shared-ini-file-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.0.2.tgz", + "integrity": "sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.664.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.664.0.tgz", - "integrity": "sha512-o/B8dg8K+9714RGYPgMxZgAChPe/MTSMkf/eHXTUFHNik5i1HgVKfac22njV2iictGy/6GhpFsKa1OWNYAkcUg==", - "dev": true, + "node_modules/@aws-sdk/region-config-resolver/node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", "dependencies": { - "@aws-sdk/types": "3.664.0", - "@smithy/node-config-provider": "^3.1.8", - "@smithy/types": "^3.5.0", - "@smithy/util-config-provider": "^3.0.0", - "@smithy/util-middleware": "^3.0.7", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/region-config-resolver/node_modules/@smithy/node-config-provider": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.9.tgz", - "integrity": "sha512-qRHoah49QJ71eemjuS/WhUXB+mpNtwHRWQr77J/m40ewBVVwvo52kYAmb7iuaECgGTTcYxHS4Wmewfwy++ueew==", - "dev": true, + "node_modules/@aws-sdk/region-config-resolver/node_modules/@smithy/util-config-provider": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.0.0.tgz", + "integrity": "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w==", "dependencies": { - "@smithy/property-provider": "^3.1.8", - "@smithy/shared-ini-file-loader": "^3.1.9", - "@smithy/types": "^3.6.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, - "node_modules/@aws-sdk/region-config-resolver/node_modules/@smithy/shared-ini-file-loader": { - "version": "3.1.9", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.9.tgz", - "integrity": "sha512-/+OsJRNtoRbtsX0UpSgWVxFZLsJHo/4sTr+kBg/J78sr7iC+tHeOvOJrS5hCpVQ6sWBbhWLp1UNiuMyZhE6pmA==", - "dev": true, + "node_modules/@aws-sdk/region-config-resolver/node_modules/@smithy/util-middleware": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.0.2.tgz", + "integrity": "sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==", "dependencies": { - "@smithy/types": "^3.6.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" } }, "node_modules/@aws-sdk/signature-v4-multi-region": { @@ -14177,15 +16313,26 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.664.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.664.0.tgz", - "integrity": "sha512-+GtXktvVgpreM2b+NJL9OqZGsOzHwlCUrO8jgQUvH/yA6Kd8QO2YFhQCp0C9sSzTteZJVqGBu8E0CQurxJHPbw==", + "version": "3.775.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.775.0.tgz", + "integrity": "sha512-ZoGKwa4C9fC9Av6bdfqcW6Ix5ot05F/S4VxWR2nHuMv7hzfmAjTOcUiWT7UR4hM/U0whf84VhDtXN/DWAk52KA==", "dependencies": { - "@smithy/types": "^3.5.0", + "@smithy/types": "^4.2.0", "tslib": "^2.6.2" }, "engines": { - "node": ">=16.0.0" + "node": ">=18.0.0" + } + }, + "node_modules/@aws-sdk/types/node_modules/@smithy/types": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.2.0.tgz", + "integrity": "sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@aws-sdk/util-arn-parser": { @@ -14200,6 +16347,20 @@ "node": ">=16.0.0" } }, + "node_modules/@aws-sdk/util-dynamodb": { + "version": "3.799.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-dynamodb/-/util-dynamodb-3.799.0.tgz", + "integrity": "sha512-QC48umXlDlm5dG6GghAb6Itc8x4uLc4+8R3tdBexa6TkIVs4m8kHhTAn5q/5ZyL8//cWSJZJxiAy2vcgHA8LDQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-dynamodb": "^3.799.0" + } + }, "node_modules/@aws-sdk/util-endpoints": { "version": "3.667.0", "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.667.0.tgz", @@ -15424,6 +17585,30 @@ "to-fast-properties": "^2.0.0" } }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.23.1", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz", @@ -18641,6 +20826,34 @@ "tslib": "^2.4.0" } }, + "node_modules/@tsconfig/node10": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", + "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true, + "license": "MIT" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", + "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/aws-lambda": { "version": "8.10.145", "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.145.tgz", @@ -18770,6 +20983,32 @@ } } }, + "node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.4", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz", + "integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/ansi-escapes": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", @@ -18807,6 +21046,13 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true, + "license": "MIT" + }, "node_modules/aria-hidden": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz", @@ -19960,6 +22206,13 @@ "url": "https://opencollective.com/core-js" } }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true, + "license": "MIT" + }, "node_modules/cross-fetch": { "version": "3.1.8", "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.8.tgz", @@ -20315,6 +22568,16 @@ "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/dijkstrajs": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/dijkstrajs/-/dijkstrajs-1.0.3.tgz", @@ -22145,6 +24408,13 @@ "yallist": "^3.0.2" } }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true, + "license": "ISC" + }, "node_modules/map-cache": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", @@ -22247,6 +24517,14 @@ "node": ">=16 || 14 >=14.17" } }, + "node_modules/mnemonist": { + "version": "0.38.3", + "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.38.3.tgz", + "integrity": "sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw==", + "dependencies": { + "obliterator": "^1.6.1" + } + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -22556,6 +24834,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/obliterator": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/obliterator/-/obliterator-1.6.1.tgz", + "integrity": "sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig==" + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -24231,6 +26514,50 @@ "node": ">=6.10" } }, + "node_modules/ts-node": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", + "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, "node_modules/tslib": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.0.tgz", @@ -24355,10 +26682,11 @@ } }, "node_modules/typescript": { - "version": "5.6.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", - "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", "dev": true, + "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -24590,6 +26918,13 @@ "uuid": "dist/bin/uuid" } }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true, + "license": "MIT" + }, "node_modules/value-or-promise": { "version": "1.0.12", "resolved": "https://registry.npmjs.org/value-or-promise/-/value-or-promise-1.0.12.tgz", @@ -24849,6 +27184,16 @@ "node": ">=8" } }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/yup": { "version": "0.32.11", "resolved": "https://registry.npmjs.org/yup/-/yup-0.32.11.tgz", diff --git a/package.json b/package.json index 58d317b..7876e2a 100644 --- a/package.json +++ b/package.json @@ -10,6 +10,8 @@ }, "dependencies": { "@aws-amplify/ui-react": "^6.5.5", + "@aws-sdk/client-dynamodb": "^3.799.0", + "@aws-sdk/lib-dynamodb": "^3.799.0", "aws-amplify": "^6.6.6", "bootstrap": "^5.3.5", "next": "14.2.10", @@ -26,7 +28,8 @@ "aws-cdk-lib": "^2", "constructs": "^10.3.0", "esbuild": "^0.23.1", + "ts-node": "^10.9.2", "tsx": "^4.19.0", - "typescript": "^5.6.2" + "typescript": "^5.8.3" } }